hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
01147e4a82a4a696af3efd6740c7aef0e620cc34
| 129
|
py
|
Python
|
rBN/utils/eval.py
|
hiroki-kyoto/LogicNet
|
a9105c38c60fec8e74c7d039e58625c9b3b3e691
|
[
"MIT"
] | null | null | null |
rBN/utils/eval.py
|
hiroki-kyoto/LogicNet
|
a9105c38c60fec8e74c7d039e58625c9b3b3e691
|
[
"MIT"
] | null | null | null |
rBN/utils/eval.py
|
hiroki-kyoto/LogicNet
|
a9105c38c60fec8e74c7d039e58625c9b3b3e691
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
def eval(model: nn.Module, data, weight_path: str):
pass
| 18.428571
| 51
| 0.744186
| 23
| 129
| 4.130435
| 0.652174
| 0.347368
| 0.273684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170543
| 129
| 7
| 52
| 18.428571
| 0.88785
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
0146fb0691528b5c331553be5368a038bea5f39a
| 46
|
py
|
Python
|
Release/Tests/AnalysisTest/Python.VS.TestData/Grammar/ExecStmt.py
|
rsumner33/PTVS
|
f5d67cff8c7bb32992dd4f77c0dfddaca6071250
|
[
"Apache-2.0"
] | null | null | null |
Release/Tests/AnalysisTest/Python.VS.TestData/Grammar/ExecStmt.py
|
rsumner33/PTVS
|
f5d67cff8c7bb32992dd4f77c0dfddaca6071250
|
[
"Apache-2.0"
] | null | null | null |
Release/Tests/AnalysisTest/Python.VS.TestData/Grammar/ExecStmt.py
|
rsumner33/PTVS
|
f5d67cff8c7bb32992dd4f77c0dfddaca6071250
|
[
"Apache-2.0"
] | 1
|
2020-12-09T10:16:23.000Z
|
2020-12-09T10:16:23.000Z
|
exec foo
exec foo in bar
exec foo in bar, baz
| 15.333333
| 20
| 0.73913
| 12
| 46
| 2.916667
| 0.5
| 0.6
| 0.514286
| 0.685714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 46
| 3
| 20
| 15.333333
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0149bd1e4b96d3898353cfff34dce891d13a9168
| 269
|
py
|
Python
|
notebook/random_random.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 174
|
2018-05-30T21:14:50.000Z
|
2022-03-25T07:59:37.000Z
|
notebook/random_random.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 5
|
2019-08-10T03:22:02.000Z
|
2021-07-12T20:31:17.000Z
|
notebook/random_random.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 53
|
2018-04-27T05:26:35.000Z
|
2022-03-25T07:59:37.000Z
|
import random
print(random.random())
# 0.4496839011176701
random.seed(0)
print(random.random())
# 0.8444218515250481
print(random.random())
# 0.7579544029403025
random.seed(0)
print(random.random())
# 0.8444218515250481
print(random.random())
# 0.7579544029403025
| 14.157895
| 22
| 0.758364
| 33
| 269
| 6.181818
| 0.242424
| 0.269608
| 0.416667
| 0.441176
| 0.77451
| 0.77451
| 0.77451
| 0.77451
| 0.77451
| 0.77451
| 0
| 0.355102
| 0.089219
| 269
| 18
| 23
| 14.944444
| 0.477551
| 0.349442
| 0
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.125
| 0
| 0.125
| 0.625
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 13
|
6dd2b1fb20e0bfa3e682005711696fdb77563e53
| 6,393
|
py
|
Python
|
tests/test_magics.py
|
davidesarra/jupyter_spaces
|
3152b226e14f5c9b21ae9e997efca50ff10b7757
|
[
"MIT"
] | 24
|
2018-05-24T16:50:43.000Z
|
2021-09-07T00:34:33.000Z
|
tests/test_magics.py
|
davidesarra/jupyter_spaces
|
3152b226e14f5c9b21ae9e997efca50ff10b7757
|
[
"MIT"
] | 15
|
2020-04-20T08:45:04.000Z
|
2021-03-26T07:14:53.000Z
|
tests/test_magics.py
|
davidesarra/jupyter_spaces
|
3152b226e14f5c9b21ae9e997efca50ff10b7757
|
[
"MIT"
] | 2
|
2018-07-02T16:03:07.000Z
|
2022-03-30T22:40:45.000Z
|
from pytest import raises
def test_space_can_access_user_namespace_references(ip):
ip.run_cell(raw_cell="x = 100")
ip.run_cell_magic(magic_name="space", line="tomato", cell="x")
def test_space_references_prioritized_over_user_namespace_references(ip):
ip.run_cell(raw_cell="x = 100")
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99; assert x == 99")
def test_space_cannot_alter_user_namespace_immutable_references(ip):
ip.run_cell(raw_cell="x = 100")
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
assert ip.user_global_ns["x"] == 100
def test_space_can_alter_user_namespace_mutable_references(ip):
ip.run_cell(raw_cell="x = [1, 2, 3]")
ip.run_cell_magic(magic_name="space", line="tomato", cell="x[-1] = 10")
assert ip.user_global_ns["x"] == [1, 2, 10]
def test_space_cannot_alter_user_namespace_references_using_global(ip):
ip.run_cell(raw_cell="x = 100")
ip.run_cell_magic(magic_name="space", line="tomato", cell="global x; x = 99")
assert ip.user_global_ns["x"] == 100
def test_space_cannot_remove_user_namespace_references(ip):
ip.run_cell(raw_cell="x = 100")
with raises(NameError):
ip.run_cell_magic(magic_name="space", line="tomato", cell="del x")
assert ip.user_global_ns["x"] == 100
def test_space_cannot_remove_user_namespace_references_using_global(ip):
ip.run_cell(raw_cell="x = 100")
with raises(NameError):
ip.run_cell_magic(magic_name="space", line="tomato", cell="global x; del x")
assert "x" in ip.user_global_ns
def test_space_cannot_add_user_namespace_references(ip):
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
assert "x" not in ip.user_global_ns
def test_space_cannot_add_user_namespace_references_using_global(ip):
ip.run_cell_magic(magic_name="space", line="tomato", cell="global x; x = 99")
assert "x" not in ip.user_global_ns
def test_space_reference_assignments_persist_in_new_magic_call(ip):
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
ip.run_cell_magic(magic_name="space", line="tomato", cell="assert x == 99")
def test_space_reference_deletions_persist_in_new_magic_call(ip):
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
ip.run_cell_magic(magic_name="space", line="tomato", cell="del x")
with raises(NameError):
ip.run_cell_magic(magic_name="space", line="tomato", cell="x")
def test_space_references_assignments_are_confined_in_one_space_only(ip):
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
ip.run_cell_magic(magic_name="space", line="potato", cell="x = 100")
ip.run_cell_magic(magic_name="space", line="tomato", cell="assert x == 99")
def test_space_references_deletions_are_confined_in_one_space_only(ip):
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
with raises(NameError):
ip.run_cell_magic(magic_name="space", line="potato", cell="del x")
ip.run_cell_magic(magic_name="space", line="tomato", cell="assert x == 99")
def test_space_can_execute_newly_defined_lambda_functions(ip):
ip.run_cell_magic(
magic_name="space",
line="tomato",
cell="f = lambda x: x + 1; y = f(x=2); assert y == 3",
)
def test_space_can_execute_newly_defined_functions(ip):
ip.run_cell_magic(
magic_name="space",
line="tomato",
cell="def f(x): return x + 1; y = f(x=2); assert y == 3",
)
def test_space_can_execute_top_level_non_closure_functions(ip):
ip.run_cell_magic(
magic_name="space",
line="tomato",
cell="def f(x): return x + 1\ndef g(x): return f(x=x) * 2\ny = g(x=3)",
)
ip.run_cell_magic(magic_name="space", line="tomato", cell="assert y == 8")
def test_get_spaces_can_access_space_references(ip):
ip.run_cell(raw_cell="from jupyter_spaces import get_spaces")
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
ip.run_cell(raw_cell="spaces = get_spaces()")
assert ip.user_global_ns["spaces"]["tomato"].namespace["x"] == 99
def test_get_spaces_can_alter_space_references(ip):
ip.run_cell(raw_cell="from jupyter_spaces import get_spaces")
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
ip.run_cell(raw_cell="spaces = get_spaces()")
ip.run_cell(raw_cell='spaces["tomato"].namespace["x"] = 101')
assert ip.user_global_ns["spaces"]["tomato"].namespace["x"] == 101
def test_get_spaces_can_remove_space_references(ip):
ip.run_cell(raw_cell="from jupyter_spaces import get_spaces")
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
ip.run_cell(raw_cell="spaces = get_spaces()")
ip.run_cell(raw_cell='del spaces["tomato"].namespace["x"]')
assert "x" not in ip.user_global_ns["spaces"]["tomato"].namespace
def test_get_spaces_reflects_space_references_changes(ip):
ip.run_cell(raw_cell="from jupyter_spaces import get_spaces")
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
ip.run_cell(raw_cell="spaces = get_spaces()")
ip.run_cell(raw_cell='spaces["tomato"].namespace["x"] = 101')
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 11")
assert ip.user_global_ns["spaces"]["tomato"].namespace["x"] == 11
def test_get_spaces_reflects_space_removal(ip):
ip.run_cell(raw_cell="from jupyter_spaces import get_spaces")
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
ip.run_cell(raw_cell="spaces = get_spaces()")
ip.run_line_magic(magic_name="remove_space", line="tomato")
assert "tomato" not in ip.user_global_ns["spaces"]
def test_get_spaces_reflects_extension_reload(ip):
ip.run_cell(raw_cell="from jupyter_spaces import get_spaces")
ip.run_cell_magic(magic_name="space", line="tomato", cell="x = 99")
ip.run_cell(raw_cell="spaces = get_spaces()")
ip.run_line_magic(magic_name="reload_ext", line="jupyter_spaces")
assert not ip.user_global_ns["spaces"]
def test_space_outputs_to_console(ip, capsys):
ip.run_cell_magic(magic_name="space", line="tomato", cell="100")
assert capsys.readouterr().out == "100\n"
def test_space_can_print_to_console(ip, capsys):
ip.run_cell_magic(magic_name="space", line="tomato", cell="print(100)")
assert capsys.readouterr().out == "100\n"
| 39.462963
| 87
| 0.713906
| 1,033
| 6,393
| 4.072604
| 0.087125
| 0.067744
| 0.117661
| 0.109817
| 0.897076
| 0.878773
| 0.85952
| 0.803185
| 0.790112
| 0.757547
| 0
| 0.020784
| 0.134522
| 6,393
| 161
| 88
| 39.708075
| 0.739563
| 0
| 0
| 0.539823
| 0
| 0.026549
| 0.22071
| 0.014547
| 0
| 0
| 0
| 0
| 0.19469
| 1
| 0.212389
| false
| 0
| 0.061947
| 0
| 0.274336
| 0.017699
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3048b47315de4a38a708e652e230bf44746bfeb
| 10,355
|
py
|
Python
|
snake_game.py
|
JehunYoo/SnakeRL
|
3635e2e5bcd6d3147cafa90e57471dbc5587d49a
|
[
"MIT"
] | null | null | null |
snake_game.py
|
JehunYoo/SnakeRL
|
3635e2e5bcd6d3147cafa90e57471dbc5587d49a
|
[
"MIT"
] | null | null | null |
snake_game.py
|
JehunYoo/SnakeRL
|
3635e2e5bcd6d3147cafa90e57471dbc5587d49a
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD
'''
Game Description
valid coordinate : ([0, 380], [0, 380])
pygame 좌표가 x축 방향은 똑같은데 y축 방향이 반대에 주의!
'''
import pygame
from pygame.locals import *
import random, time
class Snake():
def __init__(self, x, y):
self.x = x # head
self.y = y # head
self.length = 1
self.go = 100
self.gone = 100
self.edible = False
self.space = [(self.x, self.y)] # queue
self.rect = []
self.surf = []
self.gene = [] # FIXME
self.wall = None
self.prey = None
def update(self, delta_x=None, delta_y=None, prey_x=None, prey_y=None):
if delta_x is not None:
self.x += delta_x
if delta_y is not None:
self.y += delta_y
self.space.insert(0, (self.x, self.y))
if not self.edible:
self.space.pop()
elif self.edible:
self.edible = False
self.wall = ((self.x - 0) / STEP, (self.x - SCREEN_SIZE + SIZE) / STEP,
(self.y - 0) / STEP, (self.y - SCREEN_SIZE + SIZE) / STEP)
self.prey = ((prey_x - self.x) / STEP, (prey_y - self.y) / STEP)
def where(self):
print(self.length, self.space, self.wall)
def move(self):
self.rect = []
self.surf = []
for idx, sp in enumerate(self.space):
self.rect.append(pygame.Rect((sp[0], sp[1]), (SIZE, SIZE)))
self.surf.append(pygame.Surface((SIZE, SIZE)))
if idx == 0:
self.surf[idx].fill(GREEN)
elif idx == len(self.space) - 1:
self.surf[idx].fill(BLUE)
else:
self.surf[idx].fill(WHITE)
def eat(self):
self.length += 1
self.edible = True
# hyperparameter control (GAME)
## screen and objects
SCREEN_SIZE = 400
FPS = 60 # how many frames we update per second.
SIZE = 20
STEP = 20
INIT_POS_X = 20 * random.randint(0, 19)
INIT_POS_Y = 20 * random.randint(0, 19)
## colors
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
## others
START = True
# hyperparameter control (Neural Network)
## TODO
# pygame initialization
successes, failures = pygame.init()
print("{0} successes and {1} failures".format(successes, failures))
assert not failures, "pygame initialization error"
# screen
screen = pygame.display.set_mode((SCREEN_SIZE, SCREEN_SIZE))
pygame.display.set_caption('snake game')
clock = pygame.time.Clock()
# objects
snake = Snake(INIT_POS_X, INIT_POS_Y)
snakeRect = pygame.Rect((INIT_POS_X, INIT_POS_Y), (SIZE, SIZE))
snakeSurf = pygame.Surface((SIZE, SIZE))
snakeSurf.fill(GREEN)
preySurf = pygame.Surface((SIZE, SIZE))
preySurf.fill(RED)
# game operation
while True:
clock.tick(FPS)
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_UP:
snakeRect.move_ip(0, -STEP) # changes the snakeRect's position
snake.update(delta_y=-STEP, prey_x=prey_x, prey_y=prey_y)
snake.go = 1
snake.where()
elif event.key == pygame.K_DOWN:
snakeRect.move_ip(0, STEP)
snake.update(delta_y=STEP, prey_x=prey_x, prey_y=prey_y)
snake.go = -1
snake.where()
elif event.key == pygame.K_LEFT:
snakeRect.move_ip(-STEP, 0)
snake.update(delta_x=-STEP, prey_x=prey_x, prey_y=prey_y)
snake.go = -2
snake.where()
elif event.key == pygame.K_RIGHT:
snakeRect.move_ip(STEP, 0)
snake.update(delta_x=STEP, prey_x=prey_x, prey_y=prey_y)
snake.go = 2
snake.where()
elif event.key == pygame.K_ESCAPE:
quit()
# game end rule
if (not 0 <= snake.x <= (SCREEN_SIZE - SIZE)) or (not 0 <= snake.y <= (SCREEN_SIZE - SIZE)):
print('out of boundary')
quit()
elif (snake.x, snake.y) in snake.space[1:] or (snake.length > 1 and snake.go + snake.gone == 0):
print('it\'s you!')
quit()
elif snake.length == 400:
print('you win!')
quit()
# prey
if START == True or (prey_x, prey_y) == (snake.x, snake.y):
prey_x = SIZE * random.randint(0, int((SCREEN_SIZE - SIZE) / STEP))
prey_y = SIZE * random.randint(0, int((SCREEN_SIZE - SIZE) / STEP))
while (prey_x, prey_y) in snake.space:
prey_x = SIZE * random.randint(0, int((SCREEN_SIZE - SIZE) / STEP))
prey_y = SIZE * random.randint(0, int((SCREEN_SIZE - SIZE) / STEP))
preyRect = pygame.Rect((prey_x, prey_y), (SIZE, SIZE))
if START:
START = False
elif not START:
snake.eat()
# update direction of snake
snake.gone = snake.go
screen.fill(BLACK)
screen.blit(snakeSurf, snakeRect)
snake.move()
for surf, rect in zip(snake.surf, snake.rect):
screen.blit(surf, rect)
screen.blit(preySurf, preyRect)
=======
'''
Game Description
valid coordinate : ([0, 380], [0, 380])
pygame 좌표가 x축 방향은 똑같은데 y축 방향이 반대에 주의!
'''
import pygame
from pygame.locals import *
import random, time
class Snake():
def __init__(self, x, y):
self.x = x # head
self.y = y # head
self.length = 1
self.go = 100
self.gone = 100
self.edible = False
self.space = [(self.x, self.y)] # queue
self.rect = []
self.surf = []
self.gene = [] # FIXME
self.wall = None
self.prey = None
def update(self, delta_x=None, delta_y=None, prey_x=None, prey_y=None):
if delta_x is not None:
self.x += delta_x
if delta_y is not None:
self.y += delta_y
self.space.insert(0, (self.x, self.y))
if not self.edible:
self.space.pop()
elif self.edible:
self.edible = False
self.wall = ((self.x - 0) / STEP, (self.x - SCREEN_SIZE + SIZE) / STEP,
(self.y - 0) / STEP, (self.y - SCREEN_SIZE + SIZE) / STEP)
self.prey = ((prey_x - self.x) / STEP, (prey_y - self.y) / STEP)
def where(self):
print(self.length, self.space, self.wall)
def move(self):
self.rect = []
self.surf = []
for idx, sp in enumerate(self.space):
self.rect.append(pygame.Rect((sp[0], sp[1]), (SIZE, SIZE)))
self.surf.append(pygame.Surface((SIZE, SIZE)))
if idx == 0:
self.surf[idx].fill(GREEN)
elif idx == len(self.space) - 1:
self.surf[idx].fill(BLUE)
else:
self.surf[idx].fill(WHITE)
def eat(self):
self.length += 1
self.edible = True
# hyperparameter control (GAME)
## screen and objects
SCREEN_SIZE = 400
FPS = 60 # how many frames we update per second.
SIZE = 20
STEP = 20
INIT_POS_X = 20 * random.randint(0, 19)
INIT_POS_Y = 20 * random.randint(0, 19)
## colors
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
## others
START = True
# hyperparameter control (Neural Network)
## TODO
# pygame initialization
successes, failures = pygame.init()
print("{0} successes and {1} failures".format(successes, failures))
assert not failures, "pygame initialization error"
# screen
screen = pygame.display.set_mode((SCREEN_SIZE, SCREEN_SIZE))
pygame.display.set_caption('snake game')
clock = pygame.time.Clock()
# objects
snake = Snake(INIT_POS_X, INIT_POS_Y)
snakeRect = pygame.Rect((INIT_POS_X, INIT_POS_Y), (SIZE, SIZE))
snakeSurf = pygame.Surface((SIZE, SIZE))
snakeSurf.fill(GREEN)
preySurf = pygame.Surface((SIZE, SIZE))
preySurf.fill(RED)
# game operation
while True:
clock.tick(FPS)
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_UP:
snakeRect.move_ip(0, -STEP) # changes the snakeRect's position
snake.update(delta_y=-STEP, prey_x=prey_x, prey_y=prey_y)
snake.go = 1
snake.where()
elif event.key == pygame.K_DOWN:
snakeRect.move_ip(0, STEP)
snake.update(delta_y=STEP, prey_x=prey_x, prey_y=prey_y)
snake.go = -1
snake.where()
elif event.key == pygame.K_LEFT:
snakeRect.move_ip(-STEP, 0)
snake.update(delta_x=-STEP, prey_x=prey_x, prey_y=prey_y)
snake.go = -2
snake.where()
elif event.key == pygame.K_RIGHT:
snakeRect.move_ip(STEP, 0)
snake.update(delta_x=STEP, prey_x=prey_x, prey_y=prey_y)
snake.go = 2
snake.where()
elif event.key == pygame.K_ESCAPE:
quit()
# game end rule
if (not 0 <= snake.x <= (SCREEN_SIZE - SIZE)) or (not 0 <= snake.y <= (SCREEN_SIZE - SIZE)):
print('out of boundary')
quit()
elif (snake.x, snake.y) in snake.space[1:] or (snake.length > 1 and snake.go + snake.gone == 0):
print('it\'s you!')
quit()
elif snake.length == 400:
print('you win!')
quit()
# prey
if START == True or (prey_x, prey_y) == (snake.x, snake.y):
prey_x = SIZE * random.randint(0, int((SCREEN_SIZE - SIZE) / STEP))
prey_y = SIZE * random.randint(0, int((SCREEN_SIZE - SIZE) / STEP))
while (prey_x, prey_y) in snake.space:
prey_x = SIZE * random.randint(0, int((SCREEN_SIZE - SIZE) / STEP))
prey_y = SIZE * random.randint(0, int((SCREEN_SIZE - SIZE) / STEP))
preyRect = pygame.Rect((prey_x, prey_y), (SIZE, SIZE))
if START:
START = False
elif not START:
snake.eat()
# update direction of snake
snake.gone = snake.go
screen.fill(BLACK)
screen.blit(snakeSurf, snakeRect)
snake.move()
for surf, rect in zip(snake.surf, snake.rect):
screen.blit(surf, rect)
screen.blit(preySurf, preyRect)
>>>>>>> ff35870c235c677bd6e367cfedf2974cac4a6e8a
pygame.display.update()
| 30.455882
| 101
| 0.561758
| 1,421
| 10,355
| 3.983814
| 0.104856
| 0.026497
| 0.034976
| 0.024731
| 0.988871
| 0.988871
| 0.988871
| 0.988871
| 0.988871
| 0.988871
| 0
| 0.028302
| 0.303911
| 10,355
| 340
| 102
| 30.455882
| 0.757075
| 0.05746
| 0
| 0.984375
| 0
| 0.027344
| 0.019558
| 0
| 0
| 0
| 0
| 0.005882
| 0.007813
| 0
| null | null | 0
| 0.023438
| null | null | 0.039063
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
096ca815f3ba365f32e9b9f794d729151f466436
| 13,337
|
py
|
Python
|
utils/cloud/cloud_client/api/packages_api.py
|
alexfdo/asr_eval
|
d1573cc3113ce9df1ae64c3b91b5f495e2cff9a3
|
[
"MIT"
] | 3
|
2020-03-06T17:20:34.000Z
|
2021-09-09T09:18:48.000Z
|
utils/cloud/cloud_client/api/packages_api.py
|
alexfdo/asr_eval
|
d1573cc3113ce9df1ae64c3b91b5f495e2cff9a3
|
[
"MIT"
] | 1
|
2020-02-03T18:25:08.000Z
|
2020-02-03T18:25:08.000Z
|
utils/cloud/cloud_client/api/packages_api.py
|
alexfdo/asr_eval
|
d1573cc3113ce9df1ae64c3b91b5f495e2cff9a3
|
[
"MIT"
] | 1
|
2020-01-29T19:47:54.000Z
|
2020-01-29T19:47:54.000Z
|
# coding: utf-8
"""
ASR documentation
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from cloud_client.cloud_api_client import CloudApiClient
class PackagesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = CloudApiClient()
self.api_client = api_client
def get_available_packages(self, x_session_id, **kwargs): # noqa: E501
"""Get all available packages # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_available_packages(x_session_id, async=True)
>>> result = thread.get()
:param async bool
:param str x_session_id: Session identifier (required)
:return: list[PackageDto]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_available_packages_with_http_info(x_session_id, **kwargs) # noqa: E501
else:
(data) = self.get_available_packages_with_http_info(x_session_id, **kwargs) # noqa: E501
return data
def get_available_packages_with_http_info(self, x_session_id, **kwargs): # noqa: E501
"""Get all available packages # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_available_packages_with_http_info(x_session_id, async=True)
>>> result = thread.get()
:param async bool
:param str x_session_id: Session identifier (required)
:return: list[PackageDto]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_session_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_available_packages" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'x_session_id' is set
if ('x_session_id' not in params or
params['x_session_id'] is None):
raise ValueError("Missing the required parameter `x_session_id` when calling `get_available_packages`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_session_id' in params:
header_params['X-Session-ID'] = params['x_session_id'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json;charset=UTF-8']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/vkasr/rest/v1/packages/available', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[PackageDto]', # noqa: E501
auth_settings=auth_settings,
async_=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def load(self, x_session_id, package_id, **kwargs): # noqa: E501
"""Load package # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.load(x_session_id, package_id, async=True)
>>> result = thread.get()
:param async bool
:param str x_session_id: Session identifier (required)
:param str package_id: Package name (required)
:return: StatusDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.load_with_http_info(x_session_id, package_id, **kwargs) # noqa: E501
else:
(data) = self.load_with_http_info(x_session_id, package_id, **kwargs) # noqa: E501
return data
def load_with_http_info(self, x_session_id, package_id, **kwargs): # noqa: E501
"""Load package # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.load_with_http_info(x_session_id, package_id, async=True)
>>> result = thread.get()
:param async bool
:param str x_session_id: Session identifier (required)
:param str package_id: Package name (required)
:return: StatusDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_session_id', 'package_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method load" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'x_session_id' is set
if ('x_session_id' not in params or
params['x_session_id'] is None):
raise ValueError("Missing the required parameter `x_session_id` when calling `load`") # noqa: E501
# verify the required parameter 'package_id' is set
if ('package_id' not in params or
params['package_id'] is None):
raise ValueError("Missing the required parameter `package_id` when calling `load`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_id' in params:
path_params['packageId'] = params['package_id'] # noqa: E501
query_params = []
header_params = {}
if 'x_session_id' in params:
header_params['X-Session-ID'] = params['x_session_id'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json;charset=UTF-8']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/vkasr/rest/v1/packages/{packageId}/load', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StatusDto', # noqa: E501
auth_settings=auth_settings,
async_=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unload(self, x_session_id, package_id, **kwargs): # noqa: E501
"""Unload package # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.unload(x_session_id, package_id, async=True)
>>> result = thread.get()
:param async bool
:param str x_session_id: Session identifier (required)
:param str package_id: Package name (required)
:return: StatusDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.unload_with_http_info(x_session_id, package_id, **kwargs) # noqa: E501
else:
(data) = self.unload_with_http_info(x_session_id, package_id, **kwargs) # noqa: E501
return data
def unload_with_http_info(self, x_session_id, package_id, **kwargs): # noqa: E501
"""Unload package # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.unload_with_http_info(x_session_id, package_id, async=True)
>>> result = thread.get()
:param async bool
:param str x_session_id: Session identifier (required)
:param str package_id: Package name (required)
:return: StatusDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_session_id', 'package_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unload" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'x_session_id' is set
if ('x_session_id' not in params or
params['x_session_id'] is None):
raise ValueError("Missing the required parameter `x_session_id` when calling `unload`") # noqa: E501
# verify the required parameter 'package_id' is set
if ('package_id' not in params or
params['package_id'] is None):
raise ValueError("Missing the required parameter `package_id` when calling `unload`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_id' in params:
path_params['packageId'] = params['package_id'] # noqa: E501
query_params = []
header_params = {}
if 'x_session_id' in params:
header_params['X-Session-ID'] = params['x_session_id'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json;charset=UTF-8']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/vkasr/rest/v1/packages/{packageId}/unload', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StatusDto', # noqa: E501
auth_settings=auth_settings,
async_=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.324713
| 129
| 0.610032
| 1,570
| 13,337
| 4.928662
| 0.101911
| 0.054795
| 0.062032
| 0.030757
| 0.925433
| 0.925433
| 0.915741
| 0.896743
| 0.896743
| 0.895193
| 0
| 0.018747
| 0.296094
| 13,337
| 347
| 130
| 38.435159
| 0.805496
| 0.316113
| 0
| 0.79558
| 1
| 0
| 0.20444
| 0.063991
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038674
| false
| 0
| 0.022099
| 0
| 0.116022
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
097748848d694a8ecad028ff57b9e7498b1f917a
| 10,488
|
py
|
Python
|
tests/ocd_backend/items/ra_nijmegen.py
|
openstate/open-cultuur-data
|
57cad9d221594cc8dddb34124e19dfbbde73e129
|
[
"CC-BY-4.0"
] | 13
|
2015-02-27T01:46:31.000Z
|
2021-12-27T18:08:31.000Z
|
tests/ocd_backend/items/ra_nijmegen.py
|
openstate/open-archaeology-data
|
c16b36f7ac90f90e6ffa5c839783b6ff34969a08
|
[
"CC-BY-4.0"
] | 68
|
2015-01-05T15:15:30.000Z
|
2022-03-11T23:13:05.000Z
|
tests/ocd_backend/items/ra_nijmegen.py
|
openstate/open-cultuur-data
|
57cad9d221594cc8dddb34124e19dfbbde73e129
|
[
"CC-BY-4.0"
] | 10
|
2015-01-09T14:01:07.000Z
|
2018-04-13T14:27:07.000Z
|
import json
import os
from ocd_backend.items.ra_nijmegen import (
NijmegenGrintenItem, NijmegenDoornroosjeItem, NijmegenVierdaagseItem)
from . import ItemTestCase
class NijmegenGrintenItemTestCase(ItemTestCase):
def setUp(self):
super(NijmegenGrintenItemTestCase, self).setUp()
self.PWD = os.path.dirname(__file__)
self.source_definition = {
'id': 'test_definition',
'extractor': (
'ocd_backend.extractors.staticfile.'
'StaticJSONExtractor'
),
'transformer': 'ocd_backend.transformers.BaseTransformer',
'item': 'ocd_backend.items.gemeente_ede.NijmegenGrintenItem',
'loader': 'ocd_backend.loaders.ElasticsearchLoader',
'file_url': ''
}
with open(os.path.abspath(os.path.join(
self.PWD, (
'../test_dumps/ra_nijmegen_grinten_item.json'))), 'r') as f:
self.raw_item = f.read()
with open(os.path.abspath(os.path.join(
self.PWD, (
'../test_dumps/ra_nijmegen_grinten_item.json'))), 'r') as f:
self.item = json.load(f)
self.collection = (
u'Regionaal Archief Nijmegen - Fotocollectie Prof. dr. E.F. '
u'van der Grinten')
self.rights = u'https://creativecommons.org/licenses/by-sa/4.0/'
self.original_object_id = u'F26235'
self.original_object_urls = {
u'html': (
u'http://studiezaal.nijmegen.nl/ran/_detail.aspx?xmldescid='
u'2113744396')}
self.media_urls = [{
'original_url': (
u'http://www.nijmegen.nl/opendata/archief/vandergrinten/'
u'F26235.jpg'),
'content_type': 'image/jpeg'}]
def _instantiate_item(self):
return NijmegenGrintenItem(
self.source_definition, 'application/json',
self.raw_item, self.item)
def test_item_collection(self):
item = self._instantiate_item()
self.assertEqual(item.get_collection(), self.collection)
def test_get_rights(self):
item = self._instantiate_item()
self.assertEqual(item.get_rights(), self.rights)
def test_get_original_object_id(self):
item = self._instantiate_item()
self.assertEqual(
item.get_original_object_id(), self.original_object_id)
def test_get_original_object_urls(self):
item = self._instantiate_item()
self.assertDictEqual(
item.get_original_object_urls(), self.original_object_urls)
def test_get_combined_index_data(self):
item = self._instantiate_item()
self.assertIsInstance(item.get_combined_index_data(), dict)
def test_get_index_data(self):
item = self._instantiate_item()
self.assertIsInstance(item.get_index_data(), dict)
def test_get_all_text(self):
item = self._instantiate_item()
self.assertEqual(type(item.get_all_text()), unicode)
self.assertTrue(len(item.get_all_text()) > 0)
def test_media_urls(self):
item = self._instantiate_item()
data = item.get_combined_index_data()
self.assertEqual(data['media_urls'], self.media_urls)
def test_combined_index_data_types(self):
item = self._instantiate_item()
data = item.get_combined_index_data()
for field, field_type in item.combined_index_fields.iteritems():
self.assertIn(field, data)
if data[field] is not None:
self.assertIsInstance(data[field], field_type)
class NijmegenDoornroosjeItemTestCase(ItemTestCase):
def setUp(self):
super(NijmegenDoornroosjeItemTestCase, self).setUp()
self.PWD = os.path.dirname(__file__)
self.source_definition = {
'id': 'test_definition',
'extractor': (
'ocd_backend.extractors.staticfile.'
'StaticJSONExtractor'
),
'transformer': 'ocd_backend.transformers.BaseTransformer',
'item': 'ocd_backend.items.gemeente_ede.NijmegenDoornroosjeItem',
'loader': 'ocd_backend.loaders.ElasticsearchLoader',
'file_url': ''
}
with open(os.path.abspath(os.path.join(
self.PWD, (
'../test_dumps/ra_nijmegen_doornroosje_item.json'))),
'r') as f:
self.raw_item = f.read()
with open(os.path.abspath(os.path.join(
self.PWD, (
'../test_dumps/ra_nijmegen_doornroosje_item.json'))),
'r') as f:
self.item = json.load(f)
self.collection = (
u'Regionaal Archief Nijmegen - Affichecollectie Doornroosje')
self.rights = u'https://creativecommons.org/licenses/by-sa/3.0/'
self.original_object_id = u'AF1000.1000'
self.original_object_urls = {
u'html': (
u'http://studiezaal.nijmegen.nl/ran/_detail.aspx?xmldescid='
u'2010305578')}
self.media_urls = [{
'original_url': (
u'http://www.nijmegen.nl/opendata/archief/AF1000.1000.JPG'),
'content_type': 'image/jpeg'}]
def _instantiate_item(self):
return NijmegenDoornroosjeItem(
self.source_definition, 'application/json',
self.raw_item, self.item)
def test_item_collection(self):
item = self._instantiate_item()
self.assertEqual(item.get_collection(), self.collection)
def test_get_rights(self):
item = self._instantiate_item()
self.assertEqual(item.get_rights(), self.rights)
def test_get_original_object_id(self):
item = self._instantiate_item()
self.assertEqual(
item.get_original_object_id(), self.original_object_id)
def test_get_original_object_urls(self):
item = self._instantiate_item()
self.assertDictEqual(
item.get_original_object_urls(), self.original_object_urls)
def test_get_combined_index_data(self):
item = self._instantiate_item()
self.assertIsInstance(item.get_combined_index_data(), dict)
def test_get_index_data(self):
item = self._instantiate_item()
self.assertIsInstance(item.get_index_data(), dict)
def test_get_all_text(self):
item = self._instantiate_item()
self.assertEqual(type(item.get_all_text()), unicode)
self.assertTrue(len(item.get_all_text()) > 0)
def test_media_urls(self):
item = self._instantiate_item()
data = item.get_combined_index_data()
self.assertEqual(data['media_urls'], self.media_urls)
def test_combined_index_data_types(self):
item = self._instantiate_item()
data = item.get_combined_index_data()
for field, field_type in item.combined_index_fields.iteritems():
self.assertIn(field, data)
if data[field] is not None:
self.assertIsInstance(data[field], field_type)
class NijmegenVierdaagseItemTestCase(ItemTestCase):
def setUp(self):
super(NijmegenVierdaagseItemTestCase, self).setUp()
self.PWD = os.path.dirname(__file__)
self.source_definition = {
'id': 'test_definition',
'extractor': (
'ocd_backend.extractors.staticfile.'
'StaticJSONExtractor'
),
'transformer': 'ocd_backend.transformers.BaseTransformer',
'item': 'ocd_backend.items.gemeente_ede.NijmegenVierdaagseItem',
'loader': 'ocd_backend.loaders.ElasticsearchLoader',
'file_url': ''
}
with open(os.path.abspath(os.path.join(
self.PWD, (
'../test_dumps/ra_nijmegen_vierdaagse_item.json'))),
'r') as f:
self.raw_item = f.read()
with open(os.path.abspath(os.path.join(
self.PWD, (
'../test_dumps/ra_nijmegen_vierdaagse_item.json'))),
'r') as f:
self.item = json.load(f)
self.collection = (
u'Fotocollectie Regionaal Archief Nijmegen - Vierdaagsefeesten'
u' / Zomerfeesten')
self.rights = u'https://creativecommons.org/licenses/by-sa/3.0/'
self.original_object_id = u'F20706'
self.original_object_urls = {
u'html': (
u'http://studiezaal.nijmegen.nl/ran/_detail.aspx?xmldescid='
u'277219')}
self.media_urls = [{
'original_url': (
u'http://www.nijmegen.nl/opendata/archief/F20706.jpg'),
'content_type': 'image/jpeg'}]
def _instantiate_item(self):
return NijmegenVierdaagseItem(
self.source_definition, 'application/json',
self.raw_item, self.item)
def test_item_collection(self):
item = self._instantiate_item()
self.assertEqual(item.get_collection(), self.collection)
def test_get_rights(self):
item = self._instantiate_item()
self.assertEqual(item.get_rights(), self.rights)
def test_get_original_object_id(self):
item = self._instantiate_item()
self.assertEqual(
item.get_original_object_id(), self.original_object_id)
def test_get_original_object_urls(self):
item = self._instantiate_item()
self.assertDictEqual(
item.get_original_object_urls(), self.original_object_urls)
def test_get_combined_index_data(self):
item = self._instantiate_item()
self.assertIsInstance(item.get_combined_index_data(), dict)
def test_get_index_data(self):
item = self._instantiate_item()
self.assertIsInstance(item.get_index_data(), dict)
def test_get_all_text(self):
item = self._instantiate_item()
self.assertEqual(type(item.get_all_text()), unicode)
self.assertTrue(len(item.get_all_text()) > 0)
def test_media_urls(self):
item = self._instantiate_item()
data = item.get_combined_index_data()
self.assertEqual(data['media_urls'], self.media_urls)
def test_combined_index_data_types(self):
item = self._instantiate_item()
data = item.get_combined_index_data()
for field, field_type in item.combined_index_fields.iteritems():
self.assertIn(field, data)
if data[field] is not None:
self.assertIsInstance(data[field], field_type)
| 37.591398
| 77
| 0.624428
| 1,161
| 10,488
| 5.349699
| 0.115418
| 0.069554
| 0.052166
| 0.099984
| 0.893415
| 0.879408
| 0.875865
| 0.875865
| 0.875865
| 0.868459
| 0
| 0.009191
| 0.263444
| 10,488
| 278
| 78
| 37.726619
| 0.794822
| 0
| 0
| 0.846154
| 0
| 0
| 0.183352
| 0.073227
| 0
| 0
| 0
| 0
| 0.141026
| 1
| 0.141026
| false
| 0
| 0.017094
| 0.012821
| 0.183761
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
09881cdd9093da7b0af71f2e7298a1f4174943ef
| 13,157
|
py
|
Python
|
SLIT/mk_SLIT_plot.py
|
aymgal/SLIT
|
cd41433a3302e3090617c15016bb6552f27804d7
|
[
"MIT"
] | null | null | null |
SLIT/mk_SLIT_plot.py
|
aymgal/SLIT
|
cd41433a3302e3090617c15016bb6552f27804d7
|
[
"MIT"
] | null | null | null |
SLIT/mk_SLIT_plot.py
|
aymgal/SLIT
|
cd41433a3302e3090617c15016bb6552f27804d7
|
[
"MIT"
] | null | null | null |
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import numpy as np
from SLIT import Lens
import scipy.signal as scp
import scipy.misc as misc
def plot_critical(kappa, Fkappa, n1,n2, size, extra= 1):
det = 1./Lens.Jacobian_det(kappa, n1,n2)[extra/2:-extra/2., extra/2:-extra/2.]
kernel = np.array([[1,0],[0,-1]])
kernelT = np.array([[0,1],[-1,0]])
diff = np.abs(-det+np.abs(det))
diff[diff!=0]=1
xderiv = scp.convolve2d(diff, kernel, mode='same')
yderiv = scp.convolve2d(diff, kernelT, mode='same')
x,y = np.where(np.abs(xderiv)+np.abs(yderiv)>0)
#newx,newy = mk_curve(x,y)
critical = np.zeros((n1,n2))
critical[x,y] = 1
Splane = Lens.image_to_source(critical, Fkappa, 0)
xs,ys = np.where(Splane !=0)
#newxs,newys = mk_curve(xs,ys)
factor = np.float(size)
return x/factor,y/factor, xs, ys
def mk_curve(x,y):
newx, newy = np.zeros(np.size(x)+1), np.zeros(np.size(y)+1)
newx[0], newy[0] = x[0], y[0]
newx[-1], newy[-1] = x[0], y[0]
x[0],y[0] = -999,-999
for i in range(x.size-1):
r = (newx[i]-x)**2+(newy[i]-y)**2
if np.size(np.where(r == np.min(r)))>1:
newx[i + 1], newy[i + 1] = int(x[(r == np.min(r)) * (r != 0)][0]), int(y[(r == np.min(r)) * (r != 0)][0])
else:
newx[i+1], newy[i+1] = int(x[(r == np.min(r))*(r!=0)]), int(y[(r == np.min(r))*(r!=0)])
x[(newx[i+1]-x)**2+(newy[i+1]-y)**2==0], y[(newx[i+1]-x)**2+(newy[i+1]-y)**2==0] =-999,-999
return (newy.astype(int)), (newx.astype(int))
def Plot_SLIT_Results(Y,S,FS, TrueS, TrueFS, sigma, TitleFont = 40, ColorbarFont = 25, x=[0], y=[0], xs = [0], ys = [0], delta_pix = 0):
ns1,ns2 = S.shape
n1,n2 = Y.shape
size = ns1 / np.float(n1)
if delta_pix > 0:
L = 1. / delta_pix # Length of 1 arceseconds in pixels
Ls = L * size
XXarc = [n1 / 10, n1 / 10 + L]
XYarc = [n2 / 10, n2 / 10]
YYarc = [n2 / 10, n2 / 10 + L]
YXarc = [n1 / 10, n1 / 10]
XXarcs = [ns1 / 10, ns1 / 10 + Ls]
XYarcs = [ns2 / 10, ns2 / 10]
YYarcs = [ns2 / 10, ns2 / 10 + Ls]
YXarcs = [ns1 / 10, ns1 / 10]
plt.figure(0)
plt.title('$\~{S}$', fontsize=TitleFont)
plt.imshow((S), vmin=np.min(TrueS), vmax=np.max(TrueS), cmap=cm.gist_stern, interpolation='nearest')
plt.plot(ys, xs, 'w.', ms=2)
if delta_pix >0:
plt.plot(XXarc, XYarc, 'w', linewidth = 10 )
plt.plot(YXarc, YYarc, 'w', linewidth = 10 )
plt.text(n1/5+10, n2/5+10, '$1\"$', color = 'white', fontsize = 25 )
plt.axis('off')
plt.xlim(xmax = ns1)
plt.ylim(ymax = ns2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(1)
plt.title('$S$', fontsize=TitleFont)
plt.imshow(TrueS, cmap=cm.gist_stern, interpolation='nearest')
plt.plot(ys, xs, 'w.', ms=2)
plt.axis('off')
plt.xlim(xmax = ns1)
plt.ylim(ymax = ns2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(2)
plt.title('$|S-\~S|$', fontsize=TitleFont)
diff = (TrueS - S)
plt.imshow((np.abs(diff)), cmap=cm.gist_stern, interpolation='nearest')
plt.plot(ys, xs, 'w.', ms=2)
plt.axis('off')
plt.xlim(xmax = ns1)
plt.ylim(ymax = ns2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
####Lensed source
plt.figure(3)
plt.title('$HFS$', fontsize=TitleFont)
plt.imshow(TrueFS, cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms=2)
plt.axis('off')
plt.xlim(xmax = n1)
plt.ylim(ymax = n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(4)
plt.title('$HF\~S$', fontsize=TitleFont)
plt.imshow((FS), vmin=np.min(TrueFS), vmax=np.max(TrueFS), cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms=2)
plt.axis('off')
plt.xlim(xmax = n1)
plt.ylim(ymax = n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(5)
plt.title('$|HFS-HF\~S|$', fontsize=TitleFont)
plt.imshow((TrueFS - FS), cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms=2)
plt.axis('off')
plt.xlim(xmax = n1)
plt.ylim(ymax = n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
###Image
plt.figure(8)
plt.title('$Y$', fontsize=TitleFont)
plt.imshow(Y, cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms=2)
plt.axis('off')
plt.xlim(xmax = n1)
plt.ylim(ymax = n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(10)
plt.title('$|Y-HF\~S|$', fontsize=TitleFont)
plt.imshow(Y - FS, cmap=cm.gist_stern, interpolation='nearest', vmin=-5 * sigma,
vmax=5 * sigma) # slit.fft_convolve(Im,PSF)
plt.plot(y, x, 'w.', ms=2)
plt.axis('off')
plt.xlim(xmax = n1)
plt.ylim(ymax = n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.show()
return 0
def Plot_MCA_Results(Y,S,FS,G,FG, TrueS, TrueG, TrueFS, sigma, TitleFont = 40, ColorbarFont = 25, x=[0], y=[0], xs = [0], ys = [0], delta_pix = 0):
ns1,ns2 = S.shape
n1,n2 = G.shape
size = ns1/np.float(n1)
if delta_pix > 0:
L = 1. / delta_pix # Length of 1 arceseconds in pixels
Ls = L*size
XXarc = [n1 / 10, n1 / 10 + L]
XYarc = [n2 / 10, n2 / 10]
YYarc = [n2 / 10, n2 / 10 + L]
YXarc = [n1 / 10, n1 / 10]
XXarcs = [ns1 / 10, ns1 / 10 + Ls]
XYarcs = [ns2 / 10, ns2 / 10]
YYarcs = [ns2 / 10, ns2 / 10 + Ls]
YXarcs = [ns1 / 10, ns1 / 10]
plt.figure(0)
plt.title('$\~{S}$', fontsize=TitleFont)
plt.imshow((S), vmin=np.min(TrueS), vmax=np.max(TrueS), cmap=cm.gist_stern, interpolation='nearest')
plt.plot(ys,xs,'w.', ms = 2)
plt.axis('off')
plt.xlim(xmax=ns1)
plt.ylim(ymax=ns2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
if delta_pix > 0:
plt.plot(XXarcs, XYarcs, 'w', linewidth=3)
plt.plot(YXarcs, YYarcs, 'w', linewidth=3)
plt.text(ns1 / 10 + 5, ns2 / 10 + 10, '1 "', color='white', fontsize=25)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(1)
plt.title('$S$', fontsize=TitleFont)
plt.imshow(TrueS, cmap=cm.gist_stern, interpolation='nearest')
plt.plot(ys, xs, 'w.', ms = 2)
plt.axis('off')
plt.xlim(xmax=ns1)
plt.ylim(ymax=ns2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
if delta_pix > 0:
plt.plot(XXarcs, XYarcs, 'w', linewidth=3)
plt.plot(YXarcs, YYarcs, 'w', linewidth=3)
plt.text(ns1 / 10 + 5, ns2 / 10 + 10, '1 "', color='white', fontsize=25)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(2)
plt.title('$S-\~S$', fontsize=TitleFont)
diff = (TrueS - S)
plt.imshow(diff, cmap=cm.gist_stern, interpolation='nearest')
plt.plot(ys, xs, 'w.', ms = 2)
plt.axis('off')
plt.xlim(xmax=ns1)
plt.ylim(ymax=ns2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
if delta_pix > 0:
plt.plot(XXarcs, XYarcs, 'w', linewidth=3)
plt.plot(YXarcs, YYarcs, 'w', linewidth=3)
plt.text(ns1 / 10 + 5, ns2 / 10 + 10, '1 "', color='white', fontsize=25)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
####Lensed source
plt.figure(3)
plt.title('$HFS$', fontsize=TitleFont)
plt.imshow(TrueFS, cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms = 2)
if delta_pix > 0:
plt.plot(XXarc, XYarc, 'w', linewidth=3)
plt.plot(YXarc, YYarc, 'w', linewidth=3)
plt.text(n1 / 10 + 5, n2 / 10 + 10, '1 "', color='white', fontsize=25)
plt.axis('off')
plt.xlim(xmax=n1)
plt.ylim(ymax=n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(4)
plt.title('$HF\~S$', fontsize=TitleFont)
plt.imshow((FS), vmin=np.min(TrueFS), vmax=np.max(TrueFS), cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms = 2)
if delta_pix > 0:
plt.plot(XXarc, XYarc, 'w', linewidth=3)
plt.plot(YXarc, YYarc, 'w', linewidth=3)
plt.text(n1 / 10 + 5, n2 / 10 + 10, '1 "', color='white', fontsize=25)
plt.axis('off')
plt.xlim(xmax=n1)
plt.ylim(ymax=n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(5)
plt.title('$HFS-HF\~S$', fontsize=TitleFont)
plt.imshow((TrueFS - FS), cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms = 2)
plt.axis('off')
plt.xlim(xmax=n1)
plt.ylim(ymax=n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
if delta_pix > 0:
plt.plot(XXarc, XYarc, 'w', linewidth=3)
plt.plot(YXarc, YYarc, 'w', linewidth=3)
plt.text(n1 / 10 + 5, n2 / 10 + 10, '1 "', color='white', fontsize=25)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
###Galaxy
plt.figure(6)
plt.title('$HG$', fontsize=TitleFont)
plt.imshow((TrueG), vmin=np.min(TrueG), vmax=np.max(TrueG), cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms = 2)
plt.axis('off')
plt.xlim(xmax=n1)
plt.ylim(ymax=n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
if delta_pix > 0:
plt.plot(XXarc, XYarc, 'w', linewidth=3)
plt.plot(YXarc, YYarc, 'w', linewidth=3)
plt.text(n1 / 10 + 5, n2 / 10 + 10, '1 "', color='white', fontsize=25)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(12)
plt.title('$H\~G$', fontsize=TitleFont)
plt.imshow((G), vmin=np.min(TrueG), vmax=np.max(TrueG), cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms = 2)
plt.axis('off')
plt.xlim(xmax=n1)
plt.ylim(ymax=n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
if delta_pix > 0:
plt.plot(XXarc, XYarc, 'w', linewidth=3)
plt.plot(YXarc, YYarc, 'w', linewidth=3)
plt.text(n1 / 10 + 5, n2 / 10 + 10, '1 "', color='white', fontsize=25)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(7)
plt.title('$HG-H\~G$', fontsize=TitleFont)
plt.imshow((TrueG - G), cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms = 2)
plt.axis('off')
plt.xlim(xmax=n1)
plt.ylim(ymax=n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
if delta_pix > 0:
plt.plot(XXarc, XYarc, 'w', linewidth=3)
plt.plot(YXarc, YYarc, 'w', linewidth=3)
plt.text(n1 / 10 + 5, n2 / 10 + 10, '1 "', color='white', fontsize=25)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
###Image
plt.figure(8)
plt.title('$Y$', fontsize=TitleFont)
plt.imshow(Y, cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms = 2)
plt.axis('off')
plt.xlim(xmax=n1)
plt.ylim(ymax=n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
if delta_pix > 0:
plt.plot(XXarc, XYarc, 'w', linewidth=3)
plt.plot(YXarc, YYarc, 'w', linewidth=3)
plt.text(n1 / 10 + 5, n2 / 10 + 10, '1 "', color='white', fontsize=25)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(9)
plt.title('$H\~G+HF\~S$', fontsize=TitleFont)
plt.imshow(FS + FG, cmap=cm.gist_stern, interpolation='nearest')
plt.plot(y, x, 'w.', ms = 2)
plt.axis('off')
plt.xlim(xmax=n1)
plt.ylim(ymax=n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
if delta_pix > 0:
plt.plot(XXarc, XYarc, 'w', linewidth=3)
plt.plot(YXarc, YYarc, 'w', linewidth=3)
plt.text(n1 / 10 + 5, n2 / 10 + 10, '1 "', color='white', fontsize=25)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.figure(10)
plt.title('$Y-H\~G-HF\~S$', fontsize=TitleFont)
plt.imshow(Y - FS - FG, cmap=cm.gist_stern, interpolation='nearest', vmin=-5 * sigma,
vmax=5 * sigma) # slit.fft_convolve(Im,PSF)
plt.plot(y, x, 'w.', ms = 2)
plt.axis('off')
plt.xlim(xmax=n1)
plt.ylim(ymax=n2)
plt.xlim(xmin=0)
plt.ylim(ymin=0)
if delta_pix > 0:
plt.plot(XXarc, XYarc, 'w', linewidth=3)
plt.plot(YXarc, YYarc, 'w', linewidth=3)
plt.text(n1 / 10 + 5, n2 / 10 + 10, '1 "', color='white', fontsize=25)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=ColorbarFont)
plt.show()
return 0
| 35.179144
| 147
| 0.570951
| 2,061
| 13,157
| 3.609898
| 0.074721
| 0.04328
| 0.035484
| 0.045161
| 0.885484
| 0.879301
| 0.864919
| 0.864785
| 0.850941
| 0.847312
| 0
| 0.053125
| 0.23744
| 13,157
| 374
| 148
| 35.179144
| 0.688428
| 0.016417
| 0
| 0.81686
| 0
| 0
| 0.040715
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011628
| false
| 0
| 0.017442
| 0
| 0.040698
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
09d5db0a99a3d24e0b1288336523d4c3ad21b405
| 2,103
|
py
|
Python
|
WriteToCSV.py
|
Michal3456/ermlab3
|
4b4f1e6775467c05875580fef4df336773b23209
|
[
"MIT"
] | null | null | null |
WriteToCSV.py
|
Michal3456/ermlab3
|
4b4f1e6775467c05875580fef4df336773b23209
|
[
"MIT"
] | null | null | null |
WriteToCSV.py
|
Michal3456/ermlab3
|
4b4f1e6775467c05875580fef4df336773b23209
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# module WriteToCSV
# from ./WriteToCSV.py
# by written 2.00
# no doc
# imports
import csv
import re
# no Variables
# class
class WriteTiCSV:
"""
open_file_count_word() -> bool.
"""
# function
@staticmethod
def open_file_count_word(dict_words_quantity: dict) -> bool: # real signature unknown; loaded from data/
"""
open_file_count_word() -> bool
Parameters
-----------
dict_words_quantity:
Dictionary with words and number of the same words
Returns
-------
return:
Bool
"""
try:
csv.register_dialect('myDialect',
quoting=csv.QUOTE_ALL,
skipinitialspace=True)
with open('words.csv', 'w') as f:
writer = csv.writer(f, dialect='myDialect')
for row in dict_words_quantity.items():
writer.writerow('d')
except Exception as e:
print("Error in write csv method: %e" % e)
return False
finally:
f.close()
return True
@staticmethod
def open_file_count_word(dict_words_quantity: [str]) -> bool: # real signature unknown; loaded from data/
"""
open_file_count_word() -> bool
Parameters
-----------
dict_words_quantity:
Dictionary with words and number of the same words
Returns
-------
return:
Bool
"""
try:
csv.register_dialect('myDialect',
quoting=csv.QUOTE_ALL,
skipinitialspace=True)
with open('words.csv', 'w') as f:
writer = csv.writer(f, dialect='myDialect')
for row in dict_words_quantity:
writer.writerow(row)
except Exception as e:
print("Error in write csv method: %e" % e)
return False
finally:
f.close()
return True
| 25.337349
| 110
| 0.503091
| 211
| 2,103
| 4.867299
| 0.36019
| 0.05258
| 0.099318
| 0.082765
| 0.836417
| 0.815969
| 0.815969
| 0.815969
| 0.815969
| 0.720545
| 0
| 0.003175
| 0.400856
| 2,103
| 82
| 111
| 25.646341
| 0.811905
| 0.266286
| 0
| 0.742857
| 0
| 0
| 0.085629
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0
| 0.057143
| 0
| 0.257143
| 0.057143
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fece586f7c879565e293bd688e97ebed878c9afc
| 137
|
py
|
Python
|
jack/beanstalk/shortcuts.py
|
andreisavu/django-jack
|
7a20eeebe3cb6535c0d3734124acf7171713fdc5
|
[
"Apache-2.0"
] | 10
|
2015-04-13T10:09:08.000Z
|
2021-12-17T01:29:11.000Z
|
jack/beanstalk/shortcuts.py
|
andreisavu/django-jack
|
7a20eeebe3cb6535c0d3734124acf7171713fdc5
|
[
"Apache-2.0"
] | null | null | null |
jack/beanstalk/shortcuts.py
|
andreisavu/django-jack
|
7a20eeebe3cb6535c0d3734124acf7171713fdc5
|
[
"Apache-2.0"
] | 3
|
2016-02-12T01:48:38.000Z
|
2021-12-17T01:29:10.000Z
|
from django.shortcuts import render_to_response
def render_unavailable():
return render_to_response('beanstalk/unavailable.html')
| 19.571429
| 59
| 0.817518
| 17
| 137
| 6.294118
| 0.705882
| 0.149533
| 0.299065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109489
| 137
| 6
| 60
| 22.833333
| 0.877049
| 0
| 0
| 0
| 0
| 0
| 0.192593
| 0.192593
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
28df32a590f831dc58732e958794fa48e5c4ce05
| 11,754
|
py
|
Python
|
tests/unit/test_word2vec.py
|
juliob29/BitcoinTalk-Insights
|
73033791698c67bb1f6268dc0b762832f0e097dd
|
[
"MIT"
] | null | null | null |
tests/unit/test_word2vec.py
|
juliob29/BitcoinTalk-Insights
|
73033791698c67bb1f6268dc0b762832f0e097dd
|
[
"MIT"
] | 13
|
2020-01-28T22:29:05.000Z
|
2022-02-09T23:52:43.000Z
|
tests/unit/test_word2vec.py
|
juliob29/BitcoinTalk-Insights
|
73033791698c67bb1f6268dc0b762832f0e097dd
|
[
"MIT"
] | null | null | null |
"""
Unit tests for the Word2Vec class.
"""
import os
import gensim
import unittest
import psycopg2
from isoweek import Week
from pathlib import Path
from skill.skill import Crypto
from skill.word2vec import Model
from skill.storage import Storage
from skill.bitcointalk import BitcoinTalk
class Word2VecTestCase(unittest.TestCase):
"""
Test case for the Model() class.
"""
@classmethod
def setUpClass(self):
sql = """
INSERT INTO "public"."message"("sid","topic","topic_position","member","post_time","subject","link","content","content_no_html","content_no_quote","content_no_quote_no_html","db_update_time")
VALUES
(1000000000,34,1,28,E'2010-02-01 02:17:02',E'Questions about Addresses',E'https://bitcointalk.org/index.php?topic=34.msg185#msg185',E'\\x3129205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f20616e204950206164647265737320746861742069736e27742072756e6e696e67207468652070726f6772616d3f3c62723e3229205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f206120696e76616c696420426974636f696e20416464726573733f3c62723e3329205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f206120426974636f696e20416464726573732074686174206e6f626f6479206f776e733f3c62723e3429205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f206120426974636f696e20416464726573732074686174207573656420746f206265206f776e65642c20627574206861732073696e6365206265656e206162616e646f6e65642028726563697069656e742073746f707065642072756e6e696e67207468652070726f6772616d2c206f722064656c657465642074686569722077616c6c6574293f3c62723e3c62723e49207468616e6b20616e7377657265727320666f7220657870616e64696e67206d79206b6e6f776c656467652e',E'1) What would happen if one attempts to send an amount of coins to an IP address that isn t running the program?2) What would happen if one attempts to send an amount of coins to a invalid Bitcoin Address?3) What would happen if one attempts to send an amount of coins to a Bitcoin Address that nobody owns?4) What would happen if one attempts to send an amount of coins to a Bitcoin Address that used to be owned, but has since been abandoned (recipient stopped running the program, or deleted their wallet)?I thank answerers for expanding my knowledge.',E'\\x3129205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f20616e204950206164647265737320746861742069736e27742072756e6e696e67207468652070726f6772616d3f3c62723e3229205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f206120696e76616c696420426974636f696e20416464726573733f3c62723e3329205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f206120426974636f696e20416464726573732074686174206e6f626f6479206f776e733f3c62723e3429205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f206120426974636f696e20416464726573732074686174207573656420746f206265206f776e65642c20627574206861732073696e6365206265656e206162616e646f6e65642028726563697069656e742073746f707065642072756e6e696e67207468652070726f6772616d2c206f722064656c657465642074686569722077616c6c6574293f3c62723e3c62723e49207468616e6b20616e7377657265727320666f7220657870616e64696e67206d79206b6e6f776c656467652e',E'1) What would happen if one attempts to send an amount of coins to an IP address that isn t running the program?2) What would happen if one attempts to send an amount of coins to a invalid Bitcoin Address?3) What would happen if one attempts to send an amount of coins to a Bitcoin Address that nobody owns?4) What would happen if one attempts to send an amount of coins to a Bitcoin Address that used to be owned, but has since been abandoned (recipient stopped running the program, or deleted their wallet)?I thank answerers for expanding my knowledge.',NULL),
(1000000001,34,2,4,E'2010-02-01 10:44:43',E'Re: Questions about Addresses',E'https://bitcointalk.org/index.php?topic=34.msg187#msg187',E'\\x3c64697620636c6173733d2271756f7465686561646572223e3c6120687265663d2268747470733a2f2f626974636f696e74616c6b2e6f72672f696e6465782e7068703f746f7069633d33342e6d7367313835236d7367313835223e51756f74652066726f6d3a20536162756e6972206f6e2046656272756172792030312c20323031302c2030323a31373a303220414d3c2f613e3c2f6469763e3c64697620636c6173733d2271756f7465223e3129205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f20616e204950206164647265737320746861742069736e27742072756e6e696e67207468652070726f6772616d3f3c62723e3229205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f206120696e76616c696420426974636f696e20416464726573733f3c62723e3c2f6469763e3c62723e4e6f7420706f737369626c652c2074686520736f66747761726520646f65736e277420616c6c6f7720746861742e3c62723e3c62723e3c64697620636c6173733d2271756f7465686561646572223e51756f74653c2f6469763e3c64697620636c6173733d2271756f7465223e3329205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f206120426974636f696e20416464726573732074686174206e6f626f6479206f776e733f3c62723e3429205768617420776f756c642068617070656e206966206f6e6520617474656d70747320746f2073656e6420616e20616d6f756e74206f6620636f696e7320746f206120426974636f696e20416464726573732074686174207573656420746f206265206f776e65642c20627574206861732073696e6365206265656e206162616e646f6e65642028726563697069656e742073746f707065642072756e6e696e67207468652070726f6772616d2c206f722064656c657465642074686569722077616c6c6574293f3c62723e3c2f6469763e3c62723e4920677565737320746865206d6f6e65792077696c6c206a757374206265206c6f737420696e207468617420636173652e',E'Quote from: Sabunir on February 01, 2010, 02:17:02 AM1) What would happen if one attempts to send an amount of coins to an IP address that isn t running the program?2) What would happen if one attempts to send an amount of coins to a invalid Bitcoin Address?Not possible, the software doesn t allow that.Quote3) What would happen if one attempts to send an amount of coins to a Bitcoin Address that nobody owns?4) What would happen if one attempts to send an amount of coins to a Bitcoin Address that used to be owned, but has since been abandoned (recipient stopped running the program, or deleted their wallet)?I guess the money will just be lost in that case.',E'\\x3c62723e4e6f7420706f737369626c652c2074686520736f66747761726520646f65736e277420616c6c6f7720746861742e3c62723e3c62723e3c62723e4920677565737320746865206d6f6e65792077696c6c206a757374206265206c6f737420696e207468617420636173652e',E'Not possible, the software doesn t allow that.I guess the money will just be lost in that case.',NULL),
(1000000002,34,3,30,E'2010-02-01 23:10:07',E'Re: Questions about Addresses',E'https://bitcointalk.org/index.php?topic=34.msg196#msg196',E'\\x576861742068617070656e7320746f20426974636f696e732073656e7420746f20616e204950206164647265737320776865726520746865207375622d6e657420636f6d7075746572732061726520656163682072756e6e696e6720426974636f696e20736f667477617265203f203c696d67207372633d2268747470733a2f2f626974636f696e74616c6b2e6f72672f536d696c6579732f64656661756c742f6875682e6769662220616c743d224875682220626f726465723d2230223e3c62723e3c62723e446f207468657920616c6c207265636569766520616e20657175616c207368617265206f6620426974636f696e73203f206c6f6c203c696d67207372633d2268747470733a2f2f626974636f696e74616c6b2e6f72672f536d696c6579732f64656661756c742f726f6c6c657965732e6769662220616c743d22526f6c6c20457965732220626f726465723d2230223e',E'What happens to Bitcoins sent to an IP address where the sub-net computers are each running Bitcoin software ? Do they all receive an equal share of Bitcoins ? lol ',E'\\x576861742068617070656e7320746f20426974636f696e732073656e7420746f20616e204950206164647265737320776865726520746865207375622d6e657420636f6d7075746572732061726520656163682072756e6e696e6720426974636f696e20736f667477617265203f203c696d67207372633d2268747470733a2f2f626974636f696e74616c6b2e6f72672f536d696c6579732f64656661756c742f6875682e6769662220616c743d224875682220626f726465723d2230223e3c62723e3c62723e446f207468657920616c6c207265636569766520616e20657175616c207368617265206f6620426974636f696e73203f206c6f6c203c696d67207372633d2268747470733a2f2f626974636f696e74616c6b2e6f72672f536d696c6579732f64656661756c742f726f6c6c657965732e6769662220616c743d22526f6c6c20457965732220626f726465723d2230223e',E'What happens to Bitcoins sent to an IP address where the sub-net computers are each running Bitcoin software ? Do they all receive an equal share of Bitcoins ? lol ',NULL),
(1000000003,34,4,26,E'2010-02-01 23:41:24',E'Re: Questions about Addresses',E'https://bitcointalk.org/index.php?topic=34.msg197#msg197',E'\\x496620492072656d656d62657220636f72726563746c792049207468696e6b20492077617320746f6c642074686174207468657920776f756c642062652073656e7420746f2074686520666972737420626974636f696e206170706c69636174696f6e20776974682077686f6d207468652073656e64657220636f6e6e656374732e',E'If I remember correctly I think I was told that they would be sent to the first bitcoin application with whom the sender connects.',E'\\x496620492072656d656d62657220636f72726563746c792049207468696e6b20492077617320746f6c642074686174207468657920776f756c642062652073656e7420746f2074686520666972737420626974636f696e206170706c69636174696f6e20776974682077686f6d207468652073656e64657220636f6e6e656374732e',E'If I remember correctly I think I was told that they would be sent to the first bitcoin application with whom the sender connects.',NULL)
"""
with Storage(os.getenv('POSTGRES_URI')) as S:
try:
S.execute(sql)
except psycopg2.IntegrityError:
S.execute("""
DELETE FROM message
WHERE sid IN (1000000000, 1000000001, 1000000002, 1000000003)
""")
@classmethod
def tearDownClass(self):
with Storage(os.getenv('POSTGRES_URI')) as S:
S.execute("""
DELETE FROM message
WHERE sid IN (1000000000, 1000000001, 1000000002, 1000000003)
""")
def test_corpus_create(self):
"""
Model()._corpus_create() can create a properly cleaned corpus from an
input from the BitcoinTalk() class.
"""
unclean = BitcoinTalk().all()
results = Model()._corpus_create(unclean)
for result in results:
assert type(result) == list
assert len(result) >= 3
def test_training_pipeline(self):
"""
Model().train is able to train a model and place it
in the proper directory.
"""
#
# The original Word2Vec requires that words
# appear at least 5 times by default. Here
# we pass the argument min_count=1 because
# our test test does not contain words that
# repeat 5 times.
#
path = os.getenv('MODELS_PATH')
trained = Model().train(
directory=path, data_last_week=False, epochs=1,
min_count=1)
last_week = str(Week.thisweek() - 1)
my_trained_file = Path(f'{path}/{last_week}.model')
assert my_trained_file.is_file()
assert type(trained) == gensim.models.word2vec.Word2Vec
| 138.282353
| 3,576
| 0.852646
| 868
| 11,754
| 11.506912
| 0.279954
| 0.010813
| 0.018022
| 0.020425
| 0.660292
| 0.65809
| 0.65809
| 0.65809
| 0.644073
| 0.644073
| 0
| 0.54246
| 0.106347
| 11,754
| 84
| 3,577
| 139.928571
| 0.408416
| 0.037519
| 0
| 0.235294
| 0
| 0.098039
| 0.880057
| 0.244698
| 0
| 1
| 0
| 0
| 0.078431
| 1
| 0.078431
| false
| 0
| 0.196078
| 0
| 0.294118
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e90141a0ecf7af89f8bdd4046f70b52f3a1c59b1
| 331
|
py
|
Python
|
solardatatools/__init__.py
|
tadatoshi/solar-data-tools
|
51b3e1ac2d1daefef88b8f110c42db42da3ba952
|
[
"BSD-2-Clause"
] | null | null | null |
solardatatools/__init__.py
|
tadatoshi/solar-data-tools
|
51b3e1ac2d1daefef88b8f110c42db42da3ba952
|
[
"BSD-2-Clause"
] | null | null | null |
solardatatools/__init__.py
|
tadatoshi/solar-data-tools
|
51b3e1ac2d1daefef88b8f110c42db42da3ba952
|
[
"BSD-2-Clause"
] | null | null | null |
from solardatatools.data_transforms import standardize_time_axis
from solardatatools.data_transforms import make_2d
from solardatatools.data_transforms import fix_time_shifts
from solardatatools.clear_day_detection import find_clear_days
from solardatatools.dataio import get_pvdaq_data
from solardatatools.plotting import plot_2d
| 47.285714
| 64
| 0.909366
| 45
| 331
| 6.355556
| 0.488889
| 0.377622
| 0.230769
| 0.335664
| 0.398601
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006515
| 0.072508
| 331
| 6
| 65
| 55.166667
| 0.925081
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3aa99d611090a847ab67f1c2b086e2ebc3283e58
| 367
|
py
|
Python
|
pycessing/pycessing.py
|
kitao/pycessing
|
ab59dfbef7f52712e894f5b4d869c27f68bf3685
|
[
"MIT"
] | null | null | null |
pycessing/pycessing.py
|
kitao/pycessing
|
ab59dfbef7f52712e894f5b4d869c27f68bf3685
|
[
"MIT"
] | null | null | null |
pycessing/pycessing.py
|
kitao/pycessing
|
ab59dfbef7f52712e894f5b4d869c27f68bf3685
|
[
"MIT"
] | null | null | null |
from sketch_runner import load_library # NOQA
from sketch_runner import load_all_jars # NOQA
from sketch_runner import load_jar # NOQA
from sketch_runner import complete_path # NOQA
from sketch_runner import reload_sketch # NOQA
from sketch_runner import run_app # NOQA
from app import App # NOQA
# from sketch_info import filename, dirname, basename # NOQA
| 36.7
| 61
| 0.80654
| 56
| 367
| 5.035714
| 0.339286
| 0.248227
| 0.340426
| 0.468085
| 0.58156
| 0.212766
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163488
| 367
| 9
| 62
| 40.777778
| 0.918567
| 0.253406
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3aec019d39dd4990a71833138a1afd94240bc790
| 116
|
py
|
Python
|
gym-tic-tac-toe/gym_tic_tac_toe/envs/__init__.py
|
Akhilez/gyms
|
bf16cf7fa4024cf9f0124e73293ea7e37421478e
|
[
"MIT"
] | null | null | null |
gym-tic-tac-toe/gym_tic_tac_toe/envs/__init__.py
|
Akhilez/gyms
|
bf16cf7fa4024cf9f0124e73293ea7e37421478e
|
[
"MIT"
] | null | null | null |
gym-tic-tac-toe/gym_tic_tac_toe/envs/__init__.py
|
Akhilez/gyms
|
bf16cf7fa4024cf9f0124e73293ea7e37421478e
|
[
"MIT"
] | null | null | null |
from gym_tic_tac_toe.envs.tic_tac_toe_env import TicTacToeEnv
from gym_tic_tac_toe.envs.t3_v2 import TicTacToeEnvV2
| 38.666667
| 61
| 0.896552
| 22
| 116
| 4.272727
| 0.545455
| 0.191489
| 0.287234
| 0.276596
| 0.425532
| 0.425532
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 0.068966
| 116
| 2
| 62
| 58
| 0.842593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3af4bbb0f43dcd060407da0cfc4ccbf66473f524
| 5,045
|
py
|
Python
|
test/units/service_framework_test.py
|
praktikos/praktikos-template-python
|
324c7640ee05469fba87a7ec64d6ac61675b259e
|
[
"MIT"
] | null | null | null |
test/units/service_framework_test.py
|
praktikos/praktikos-template-python
|
324c7640ee05469fba87a7ec64d6ac61675b259e
|
[
"MIT"
] | null | null | null |
test/units/service_framework_test.py
|
praktikos/praktikos-template-python
|
324c7640ee05469fba87a7ec64d6ac61675b259e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from api.rdb.utils.service_framework import get_api_gateway_request
# https://docs.aws.amazon.com/lambda/latest/dg/eventsources.html#eventsources-api-gateway-request
# noinspection PyPep8,PyPep8,PyPep8
api_gateway_proxy_request_event_get = {
"path": "/test/hello",
"headers": {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate, lzma, sdch, br",
"Accept-Language": "en-US,en;q=0.8",
"CloudFront-Forwarded-Proto": "https",
"CloudFront-Is-Desktop-Viewer": "true",
"CloudFront-Is-Mobile-Viewer": "false",
"CloudFront-Is-SmartTV-Viewer": "false",
"CloudFront-Is-Tablet-Viewer": "false",
"CloudFront-Viewer-Country": "US",
"Host": "wt6mne2s9k.execute-api.us-west-2.amazonaws.com",
"Upgrade-Insecure-Requests": "1",
"Subscription-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48",
"Via": "1.1 fb7cca60f0ecd82ce07790c9c5eef16c.cloudfront.net (CloudFront)",
"X-Amz-Cf-Id": "nBsWBOrSHMgnaROZJK1wGCZ9PcRcSpq_oSXZNQwQ10OTZL4cimZo3g==",
"X-Forwarded-For": "192.168.100.1, 192.168.1.1",
"X-Forwarded-Port": "443",
"X-Forwarded-Proto": "https"
},
"pathParameters": {
"proxy": "hello"
},
"requestContext": {
"accountId": "123456789012",
"resourceId": "us4z18",
"stage": "test",
"requestId": "41b45ea3-70b5-11e6-b7bd-69b5aaebc7d9",
"identity": {
"cognitoIdentityPoolId": "",
"accountId": "",
"cognitoIdentityId": "",
"caller": "",
"apiKey": "",
"sourceIp": "192.168.100.1",
"cognitoAuthenticationType": "",
"cognitoAuthenticationProvider": "",
"userArn": "",
"userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48",
"user": ""
},
"resourcePath": "/{proxy+}",
"httpMethod": "GET",
"apiId": "wt6mne2s9k"
},
"resource": "/{proxy+}",
"httpMethod": "GET",
"queryStringParameters": {
"name": "me"
},
"stageVariables": {
"stageVarName": "stageVarValue"
}
}
# noinspection PyPep8,PyPep8
api_gateway_proxy_request_event_put = {
"path": "/test/hello",
"headers": {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate, lzma, sdch, br",
"Accept-Language": "en-US,en;q=0.8",
"CloudFront-Forwarded-Proto": "https",
"CloudFront-Is-Desktop-Viewer": "true",
"CloudFront-Is-Mobile-Viewer": "false",
"CloudFront-Is-SmartTV-Viewer": "false",
"CloudFront-Is-Tablet-Viewer": "false",
"CloudFront-Viewer-Country": "US",
"Host": "wt6mne2s9k.execute-api.us-west-2.amazonaws.com",
"Upgrade-Insecure-Requests": "1",
"Subscription-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48",
"Via": "1.1 fb7cca60f0ecd82ce07790c9c5eef16c.cloudfront.net (CloudFront)",
"X-Amz-Cf-Id": "nBsWBOrSHMgnaROZJK1wGCZ9PcRcSpq_oSXZNQwQ10OTZL4cimZo3g==",
"X-Forwarded-For": "192.168.100.1, 192.168.1.1",
"X-Forwarded-Port": "443",
"X-Forwarded-Proto": "https"
},
"pathParameters": {
"proxy": "hello"
},
"requestContext": {
"accountId": "123456789012",
"resourceId": "us4z18",
"stage": "test",
"requestId": "41b45ea3-70b5-11e6-b7bd-69b5aaebc7d9",
"identity": {
"cognitoIdentityPoolId": "",
"accountId": "",
"cognitoIdentityId": "",
"caller": "",
"apiKey": "",
"sourceIp": "192.168.100.1",
"cognitoAuthenticationType": "",
"cognitoAuthenticationProvider": "",
"userArn": "",
"userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48",
"user": ""
},
"resourcePath": "/{proxy+}",
"httpMethod": "PUT",
"apiId": "wt6mne2s9k"
},
"resource": "/{proxy+}",
"httpMethod": "PUT",
"queryStringParameters": {
"name": "me"
},
"stageVariables": {
"stageVarName": "stageVarValue"
}
}
def test_log_api_gateway_request():
"""Test request_header
"""
result = get_api_gateway_request(api_gateway_proxy_request_event_get)
assert result.startswith('curl https://')
result = get_api_gateway_request(api_gateway_proxy_request_event_put)
assert result.startswith('curl -H "Content-Type: application/json" -X PUT https://')
| 39.414063
| 170
| 0.589296
| 531
| 5,045
| 5.514124
| 0.299435
| 0.030738
| 0.043033
| 0.030055
| 0.888661
| 0.862705
| 0.813525
| 0.786885
| 0.786885
| 0.786885
| 0
| 0.089406
| 0.232904
| 5,045
| 127
| 171
| 39.724409
| 0.667183
| 0.040238
| 0
| 0.810345
| 0
| 0.051724
| 0.586507
| 0.223924
| 0
| 0
| 0
| 0
| 0.017241
| 1
| 0.008621
| false
| 0
| 0.008621
| 0
| 0.017241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c910af6873447b004fdd58ca734317e049889b33
| 336
|
py
|
Python
|
temporalio/bridge/proto/activity_result/__init__.py
|
cretz/temporal-sdk-python
|
431ca1967d365556a9cf5aa9aac00243b71059f8
|
[
"MIT"
] | 55
|
2022-01-31T22:02:22.000Z
|
2022-03-30T11:17:21.000Z
|
temporalio/bridge/proto/activity_result/__init__.py
|
cretz/temporal-sdk-python
|
431ca1967d365556a9cf5aa9aac00243b71059f8
|
[
"MIT"
] | 7
|
2022-02-04T14:08:46.000Z
|
2022-03-22T13:27:30.000Z
|
temporalio/bridge/proto/activity_result/__init__.py
|
cretz/temporal-sdk-python
|
431ca1967d365556a9cf5aa9aac00243b71059f8
|
[
"MIT"
] | 4
|
2022-01-31T17:31:49.000Z
|
2022-03-29T01:04:46.000Z
|
from .activity_result_pb2 import (
ActivityExecutionResult,
ActivityResolution,
Cancellation,
DoBackoff,
Failure,
Success,
WillCompleteAsync,
)
__all__ = [
"ActivityExecutionResult",
"ActivityResolution",
"Cancellation",
"DoBackoff",
"Failure",
"Success",
"WillCompleteAsync",
]
| 16.8
| 34
| 0.660714
| 20
| 336
| 10.8
| 0.65
| 0.37963
| 0.490741
| 0.574074
| 0.861111
| 0.861111
| 0.861111
| 0
| 0
| 0
| 0
| 0.003906
| 0.238095
| 336
| 19
| 35
| 17.684211
| 0.839844
| 0
| 0
| 0
| 0
| 0
| 0.276786
| 0.068452
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.055556
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a30b2181125014ebeb41a0a5caaefb58593a434c
| 11,756
|
py
|
Python
|
commit_checker/download_security_classified_crbug_numbers.py
|
freingruber/JavaScript-Raider
|
d1c1fff2fcfc60f210b93dbe063216fa1a83c1d0
|
[
"Apache-2.0"
] | 91
|
2022-01-24T07:32:34.000Z
|
2022-03-31T23:37:15.000Z
|
commit_checker/download_security_classified_crbug_numbers.py
|
zeusguy/JavaScript-Raider
|
d1c1fff2fcfc60f210b93dbe063216fa1a83c1d0
|
[
"Apache-2.0"
] | null | null | null |
commit_checker/download_security_classified_crbug_numbers.py
|
zeusguy/JavaScript-Raider
|
d1c1fff2fcfc60f210b93dbe063216fa1a83c1d0
|
[
"Apache-2.0"
] | 11
|
2022-01-24T14:21:12.000Z
|
2022-03-31T23:37:23.000Z
|
# Copyright 2022 @ReneFreingruber
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We can use the "Chrome update news" to detect which bugs were security related.
# This information can be used to label older CRBugs (e.g. "security relevant" or "not security relevant")
# The labels are important when the data is passed to some learning logic.
# For example, a neuronal network can be used to detect if a commit fixes an interesting CRBug (an exploitable 1day).
# Data like "CRBug public viewable or not", authors of the commit patch, reviewers of the commit,
# how long did it take to fix the bug, date of the fix, referenced persons, which files were modified, which words occur in the commit message,
# which words are used in the discussion-messages, assigned tags, ... can be used as input to the neuronal network.
# With the correct labels (which this script downloads), the neuronal network can learn to detect if a commit is
# interesting or not.
import requests
import sys
import pickle
import re
import utils
import os
all_urls = """https://chromereleases.googleblog.com/2021/08/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2021/08/the-stable-channel-has-been-updated-to.html
https://chromereleases.googleblog.com/2021/07/stable-channel-update-for-desktop_20.html
https://chromereleases.googleblog.com/2021/07/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2021/06/stable-channel-update-for-desktop_17.html
https://chromereleases.googleblog.com/2021/06/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2021/05/stable-channel-update-for-desktop_25.html
https://chromereleases.googleblog.com/2021/05/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2021/04/stable-channel-update-for-desktop_26.html
https://chromereleases.googleblog.com/2021/04/stable-channel-update-for-desktop_20.html
https://chromereleases.googleblog.com/2021/04/stable-channel-update-for-desktop_14.html
https://chromereleases.googleblog.com/2021/04/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2021/03/stable-channel-update-for-desktop_30.html
https://chromereleases.googleblog.com/2021/03/stable-channel-update-for-desktop_12.html
https://chromereleases.googleblog.com/2021/03/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2021/02/stable-channel-update-for-desktop_16.html
https://chromereleases.googleblog.com/2021/02/stable-channel-update-for-desktop_4.html
https://chromereleases.googleblog.com/2021/02/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2021/01/stable-channel-update-for-desktop_19.html
https://chromereleases.googleblog.com/2021/01/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2020/12/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2020/11/stable-channel-update-for-desktop_17.html
https://chromereleases.googleblog.com/2020/11/stable-channel-update-for-desktop_11.html
https://chromereleases.googleblog.com/2020/11/stable-channel-update-for-desktop_9.html
https://chromereleases.googleblog.com/2020/11/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2020/10/stable-channel-update-for-desktop_20.html
https://chromereleases.googleblog.com/2020/10/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2020/10/beta-channel-update-for-desktop_6.html
https://chromereleases.googleblog.com/2020/09/stable-channel-update-for-desktop_21.html
https://chromereleases.googleblog.com/2020/09/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2020/08/stable-channel-update-for-desktop_25.html
https://chromereleases.googleblog.com/2020/08/stable-channel-update-for-desktop_18.html
https://chromereleases.googleblog.com/2020/08/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2020/07/stable-channel-update-for-desktop_27.html
https://chromereleases.googleblog.com/2020/07/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2020/06/stable-channel-update-for-desktop_22.html
https://chromereleases.googleblog.com/2020/06/stable-channel-update-for-desktop_15.html
https://chromereleases.googleblog.com/2020/06/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2020/05/stable-channel-update-for-desktop_19.html
https://chromereleases.googleblog.com/2020/05/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2020/04/stable-channel-update-for-desktop_27.html
https://chromereleases.googleblog.com/2020/04/stable-channel-update-for-desktop_21.html
https://chromereleases.googleblog.com/2020/04/stable-channel-update-for-desktop_15.html
https://chromereleases.googleblog.com/2020/04/stable-channel-update-for-desktop_7.html
https://chromereleases.googleblog.com/2020/03/stable-channel-update-for-desktop_31.html
https://chromereleases.googleblog.com/2020/03/stable-channel-update-for-desktop_18.html
https://chromereleases.googleblog.com/2020/03/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2020/02/stable-channel-update-for-desktop_24.html
https://chromereleases.googleblog.com/2020/02/stable-channel-update-for-desktop_18.html
https://chromereleases.googleblog.com/2020/02/stable-channel-update-for-desktop_13.html
https://chromereleases.googleblog.com/2020/02/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2020/01/stable-channel-update-for-desktop_16.html
https://chromereleases.googleblog.com/2020/01/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2019/12/stable-channel-update-for-desktop_17.html
https://chromereleases.googleblog.com/2019/12/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2019/11/stable-channel-update-for-desktop_18.html
https://chromereleases.googleblog.com/2019/10/stable-channel-update-for-desktop_31.html
https://chromereleases.googleblog.com/2019/10/stable-channel-update-for-desktop_22.html
https://chromereleases.googleblog.com/2019/10/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2019/09/stable-channel-update-for-desktop_18.html
https://chromereleases.googleblog.com/2019/09/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2019/08/stable-channel-update-for-desktop_26.html
https://chromereleases.googleblog.com/2019/08/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2019/07/stable-channel-update-for-desktop_30.html
https://chromereleases.googleblog.com/2019/07/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2019/06/stable-channel-update-for-desktop_13.html
https://chromereleases.googleblog.com/2019/06/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2019/05/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2019/04/stable-channel-update-for-desktop_30.html
https://chromereleases.googleblog.com/2019/04/stable-channel-update-for-desktop_23.html
https://chromereleases.googleblog.com/2019/04/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2019/03/stable-channel-update-for-desktop_12.html
https://chromereleases.googleblog.com/2019/03/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2019/02/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2019/01/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2018/12/stable-channel-update-for-desktop_12.html
https://chromereleases.googleblog.com/2018/12/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2018/11/stable-channel-update-for-desktop_19.html
https://chromereleases.googleblog.com/2018/11/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2018/10/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2018/09/stable-channel-update-for-desktop_17.html
https://chromereleases.googleblog.com/2018/09/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2018/07/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2018/06/stable-channel-update-for-desktop_12.html
https://chromereleases.googleblog.com/2018/06/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2018/05/stable-channel-update-for-desktop_58.html
https://chromereleases.googleblog.com/2018/05/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2018/04/stable-channel-update-for-desktop_26.html
https://chromereleases.googleblog.com/2018/04/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2018/03/stable-channel-update-for-desktop_20.html
https://chromereleases.googleblog.com/2018/03/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2018/02/stable-channel-update-for-desktop_13.html
https://chromereleases.googleblog.com/2018/02/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2018/01/stable-channel-update-for-desktop_24.html
https://chromereleases.googleblog.com/2017/12/stable-channel-update-for-desktop_14.html
https://chromereleases.googleblog.com/2017/12/stable-channel-update-for-desktop.html
https://chromereleases.googleblog.com/2017/11/stable-channel-update-for-desktop_13.html
https://chromereleases.googleblog.com/2017/11/stable-channel-update-for-desktop.html"""
def get_response(arg_url):
response = requests.get(arg_url).text
return response
def load_database():
if os.path.exists('database_security_bugs_from_rewards.pickle'):
with open('database_security_bugs_from_rewards.pickle', 'rb') as handle:
database_security_bugs_from_rewards = pickle.load(handle)
else:
database_security_bugs_from_rewards = set()
return database_security_bugs_from_rewards
def get_all_referenced_bugs(arg_url):
chromium_bug_ids = set()
response = get_response(arg_url)
if "https://bugs.chromium.org/p/v8/issues/detail?id=" in arg_url:
utils.perror("TODO: %s" % arg_url)
for entry in re.findall(r'https://crbug.com/[0-9]+', response):
chromium_bug_ids.add(int(entry[len("https://crbug.com/"):], 10))
for entry in re.findall(r'https://bugs.chromium.org/p/chromium/issues/detail\?id=[0-9]+', response):
chromium_bug_ids.add(int(entry[len("https://bugs.chromium.org/p/chromium/issues/detail?id="):], 10))
return chromium_bug_ids
def main():
all_security_bugs = set()
for url in all_urls.split():
url = url.strip()
if url == "":
continue
utils.msg("[i] Going to download from: %s" % url)
bugs = get_all_referenced_bugs(url)
all_security_bugs.update(bugs)
with open('database_security_bugs_from_rewards.pickle', 'wb') as handle:
pickle.dump(all_security_bugs, handle, protocol=pickle.HIGHEST_PROTOCOL)
if __name__ == '__main__':
main()
| 65.311111
| 144
| 0.795509
| 1,701
| 11,756
| 5.427984
| 0.13933
| 0.201668
| 0.307809
| 0.339651
| 0.811112
| 0.79844
| 0.785877
| 0.78057
| 0.770822
| 0.762049
| 0
| 0.065172
| 0.070687
| 11,756
| 179
| 145
| 65.675978
| 0.779954
| 0.119684
| 0
| 0
| 0
| 0.722628
| 0.874544
| 0.012417
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029197
| false
| 0
| 0.043796
| 0
| 0.094891
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
a37c3d078d0c41f2c931c17db0983ad739358991
| 17,025
|
py
|
Python
|
morerandom.py
|
Wolframoviy/morerandom-py
|
3159aa3fa70db0c79e2a3fe2aed8803aa58fc966
|
[
"MIT"
] | null | null | null |
morerandom.py
|
Wolframoviy/morerandom-py
|
3159aa3fa70db0c79e2a3fe2aed8803aa58fc966
|
[
"MIT"
] | null | null | null |
morerandom.py
|
Wolframoviy/morerandom-py
|
3159aa3fa70db0c79e2a3fe2aed8803aa58fc966
|
[
"MIT"
] | null | null | null |
import random
def randbool(chance):
res = random.randint(0,1000)
if res >= chance:
return False
else:
return True
def randobj(dict):
return dict[random.randint(0, len(dict)-1)]
def randname(lang = None, gender = None):
if lang == "en":
if gender == "m":
names = ["Abraham", "Addison", "Adrian", "Albert", "Alec", "Alfred", "Alvin", "Andrew", "Andy", "Archibald", "Archie", "Arlo", "Arthur", "Arthur", "Austen", "Barnabe", "Bartholomew", "Bertram", "Bramwell", "Byam", "Cardew", "Chad", "Chance", "Colin", "Coloman", "Curtis", "Cuthbert", "Daniel", "Darryl", "David", "Dickon", "Donald", "Dougie", "Douglas", "Earl", "Ebenezer", "Edgar", "Edmund", "Edward", "Edwin", "Elliot", "Emil", "Floyd", "Franklin", "Frederick", "Gabriel", "Galton", "Gareth", "George", "Gerard", "Gilbert", "Gorden", "Gordon", "Graham", "Grant", "Henry", "Hervey", "Hudson", "Hugh", "Ian", "Jack", "Jaime", "James", "Jason", "Jeffrey", "Joey", "John", "Jolyon", "Jonas", "Joseph", "Joshua", "Julian", "Justin", "Kurt", "Lanny", "Larry", "Laurence", "Lawton", "Lester", "Malcolm", "Marcus", "Mark", "Marshall", "Martin", "Marvin", "Matt", "Maximilian", "Michael", "Miles", "Murray", "Myron", "Nate", "Nathan", "Neil", "Nicholas", "Nicolas", "Norman", "Oliver", "Oscar", "Osric", "Owen", "Patrick", "Paul", "Peleg", "Philip", "Phillipps", "Raymond", "Reginald", "Rhys", "Richard", "Robert", "Roderick", "Rodger", "Roger", "Ronald", "Rowland", "Rufus", "Russell", "Sebastian", "Shahaf", "Simon", "Stephen", "Swaine", "Thomas", "Tobias", "Travis", "Victor", "Vincent", "Vincent", "Vivian", "Wayne", "Wilfred", "William", "Winston", "Zadoc"]
return names[random.randint(0, len(names)-1)]
elif gender == "f":
names = ["Abigail", "Ada", "Addy", "Adelaide", "Adele", "Agatha", "Agnes", "Alaina", "Alanna", "Alberta", "Alice", "Aliso", "Alvina", "Amanda", "Ambe", "Amelia", "Amy", "Andrea", "Ange", "Angela", "Anna", "Annabe", "Arda", "Audrey", "Augusta", "Autum", "Avi", "Barbara", "Beatrice", "Belinda", "Bella", "Berenice", "Bertha", "Brenda", "Bridget", "Bronwen", "Cadence", "Carmelita", "Caroline", "Carolyn", "Carolynn", "Cassandra", "Cecilia", "Cecily", "Celia", "Charis", "Charisse", "Charity", "Charlotte", "Charmaine", "Cheryl", "Chloe", "Christabel", "Claribel", "Clarissa", "Clementine", "Cleo", "Cordelia", "Cristalyn", "Crystal", "Cynthia", "Daisy", "Daphne", "Darryl", "Davina", "Dawn", "Deanna", "Deanne", "Deborah", "Dede", "Delia", "Denise", "Destiny", "Diana", "Dolores", "Dora", "Doreen", "Dorothy", "Drusilla", "Dulcie", "Edith", "Edna", "Edwina", "Effie", "Eileen", "Eleanor", "Elektra", "Elizabeth", "Ella", "Ellen", "Emma", "Enid", "Estelle", "Ethel", "Eudora", "Eunice", "Eva", "Faith", "Felicity", "Fiona", "Flora", "Galenka", "Gaynor", "Gemma", "Genevieve", "Georgiana", "Gertie", "Gertrude", "Gia", "Glenda", "Gwen", "Gwenda", "Gwendolen", "Gwendoline", "Gwendolyn", "Hannah", "Harriet", "Helen", "Henrietta", "Hero", "Hester", "Honor", "Hope", "Ida", "Imelda", "Imogen", "InnogeIona", "Irene", "IriIsla", "Ivy", "Jacqueline", "Jacqui", "Jaime", "Jane", "Jemima", "Jenna", "Jennifer", "Jessica", "Jessie", "Joanna", "Joanne", "Joelle", "Joey", "Josephine", "Judith", "Julianne", "June", "Karen", "Kathleen", "Kaylee", "Kierra", "Lara", "Laura", "Lauren", "Leah", "Lettice", "Liana", "Lilla", "Lillia", "Lois", "Lorelei", "Loretta", "Lorna", "Lorraine", "Louisa", "Lucinda", "Lucy", "Lynnette", "Mabel", "Madge", "Maggie", "Marcia", "Marcie", "Margaret", "Marian", "Marianne", "Marilyn", "Marissa", "Marjorie", "Marsha", "Matilda", "Maud", "Maude", "Mavis", "May", "Medea", "Mehitable", "Melanie", "Melissa", "Michele", "Millicent", "Minna", "Moira", "Myra", "Myrna", "Myrtle", "Nadine", "Naila"]
return names[random.randint(0, len(names)-1)]
elif gender == None:
names = ["Abraham", "Addison", "Adrian", "Albert", "Alec", "Alfred", "Alvin", "Andrew", "Andy", "Archibald", "Archie", "Arlo", "Arthur", "Arthur", "Austen", "Barnabe", "Bartholomew", "Bertram", "Bramwell", "Byam", "Cardew", "Chad", "Chance", "Colin", "Coloman", "Curtis", "Cuthbert", "Daniel", "Darryl", "David", "Dickon", "Donald", "Dougie", "Douglas", "Earl", "Ebenezer", "Edgar", "Edmund", "Edward", "Edwin", "Elliot", "Emil", "Floyd", "Franklin", "Frederick", "Gabriel", "Galton", "Gareth", "George", "Gerard", "Gilbert", "Gorden", "Gordon", "Graham", "Grant", "Henry", "Hervey", "Hudson", "Hugh", "Ian", "Jack", "Jaime", "James", "Jason", "Jeffrey", "Joey", "John", "Jolyon", "Jonas", "Joseph", "Joshua", "Julian", "Justin", "Kurt", "Lanny", "Larry", "Laurence", "Lawton", "Lester", "Malcolm", "Marcus", "Mark", "Marshall", "Martin", "Marvin", "Matt", "Maximilian", "Michael", "Miles", "Murray", "Myron", "Nate", "Nathan", "Neil", "Nicholas", "Nicolas", "Norman", "Oliver", "Oscar", "Osric", "Owen", "Patrick", "Paul", "Peleg", "Philip", "Phillipps", "Raymond", "Reginald", "Rhys", "Richard", "Robert", "Roderick", "Rodger", "Roger", "Ronald", "Rowland", "Rufus", "Russell", "Sebastian", "Shahaf", "Simon", "Stephen", "Swaine", "Thomas", "Tobias", "Travis", "Victor", "Vincent", "Vincent", "Vivian", "Wayne", "Wilfred", "William", "Winston", "Zadoc","Abigail", "Ada", "Addy", "Adelaide", "Adele", "Agatha", "Agnes", "Alaina", "Alanna", "Alberta", "Alice", "Aliso", "Alvina", "Amanda", "Ambe", "Amelia", "Amy", "Andrea", "Ange", "Angela", "Anna", "Annabe", "Arda", "Audrey", "Augusta", "Autum", "Avi", "Barbara", "Beatrice", "Belinda", "Bella", "Berenice", "Bertha", "Brenda", "Bridget", "Bronwen", "Cadence", "Carmelita", "Caroline", "Carolyn", "Carolynn", "Cassandra", "Cecilia", "Cecily", "Celia", "Charis", "Charisse", "Charity", "Charlotte", "Charmaine", "Cheryl", "Chloe", "Christabel", "Claribel", "Clarissa", "Clementine", "Cleo", "Cordelia", "Cristalyn", "Crystal", "Cynthia", "Daisy", "Daphne", "Darryl", "Davina", "Dawn", "Deanna", "Deanne", "Deborah", "Dede", "Delia", "Denise", "Destiny", "Diana", "Dolores", "Dora", "Doreen", "Dorothy", "Drusilla", "Dulcie", "Edith", "Edna", "Edwina", "Effie", "Eileen", "Eleanor", "Elektra", "Elizabeth", "Ella", "Ellen", "Emma", "Enid", "Estelle", "Ethel", "Eudora", "Eunice", "Eva", "Faith", "Felicity", "Fiona", "Flora", "Galenka", "Gaynor", "Gemma", "Genevieve", "Georgiana", "Gertie", "Gertrude", "Gia", "Glenda", "Gwen", "Gwenda", "Gwendolen", "Gwendoline", "Gwendolyn", "Hannah", "Harriet", "Helen", "Henrietta", "Hero", "Hester", "Honor", "Hope", "Ida", "Imelda", "Imogen", "InnogeIona", "Irene", "IriIsla", "Ivy", "Jacqueline", "Jacqui", "Jaime", "Jane", "Jemima", "Jenna", "Jennifer", "Jessica", "Jessie", "Joanna", "Joanne", "Joelle", "Joey", "Josephine", "Judith", "Julianne", "June", "Karen", "Kathleen", "Kaylee", "Kierra", "Lara", "Laura", "Lauren", "Leah", "Lettice", "Liana", "Lilla", "Lillia", "Lois", "Lorelei", "Loretta", "Lorna", "Lorraine", "Louisa", "Lucinda", "Lucy", "Lynnette", "Mabel", "Madge", "Maggie", "Marcia", "Marcie", "Margaret", "Marian", "Marianne", "Marilyn", "Marissa", "Marjorie", "Marsha", "Matilda", "Maud", "Maude", "Mavis", "May", "Medea", "Mehitable", "Melanie", "Melissa", "Michele", "Millicent", "Minna", "Moira", "Myra", "Myrna", "Myrtle", "Nadine", "Naila"]
return names[random.randint(0, len(names)-1)]
elif lang == "ru":
if gender == "m":
names = ['Иван', 'Арсений', 'Дмитрий', 'Никита', 'Константин', 'Артемий', 'Илья', 'Владимир', 'Андрей', 'Лев', 'Егор', 'Александр', 'Степан', 'Лука', 'Сергей', 'Артём', 'Кирилл', 'Максим', 'Дамир', 'Демид', 'Григорий', 'Тимофей', 'Василий', 'Валерий', 'Фёдор', 'Марк', 'Вадим', 'Матвей', 'Павел', 'Георгий', 'Платон', 'Михаил', 'Богдан', 'Даниил', 'Ярослав', 'Алексей', 'Леонид', 'Али', 'Роман', 'Елисей', 'Савелий', 'Борис', 'Марат', 'Роберт', 'Данила', 'Антон', 'Адам', 'Семён', 'Даниэль', 'Давид', 'Николай', 'Филипп', 'Владислав', 'Глеб', 'Мирон', 'Ян', 'Святослав', 'Тимур', 'Артур', 'Серафим', 'Денис', 'Назар', 'Игорь', 'Руслан', 'Данил', 'Эмир', 'Евгений', 'Пётр', 'Макар', 'Юрий', 'Станислав', 'Захар', 'Гордей', 'Родион', 'Всеволод', 'Ростислав', 'Вячеслав', 'Герман', 'Олег', 'Леон', 'Ибрагим', 'Виктор', 'Виталий', 'Мирослав', 'Демьян', 'Савва', 'Даниль', 'Рафаэль', 'Эрик', 'Альберт', 'Рустам', 'Эмиль', 'Яков', 'Тихон', 'Анатолий', 'Мартин', 'Билал', 'Тигран', 'Камиль', 'Марсель']
return names[random.randint(0, len(names)-1)]
elif gender == "f":
names = ['Мирослава', 'Кира', 'Анастасия', 'Ирина', 'Василиса', 'Екатерина', 'Евгения', 'Алиса', 'Арина', 'Вероника', 'Ника', 'Елизавета', 'Анна', 'Мария', 'Варвара', 'Марьяна', 'Валерия', 'Элина', 'Виктория', 'Сафия', 'Ярослава', 'Есения', 'Юлия', 'Светлана', 'Милана', 'Ульяна', 'Николь', 'Софья', 'София', 'Эмилия', 'Сабина', 'Полина', 'Надежда', 'Камила', 'Таисия', 'Дарья', 'Эвелина', 'Амина', 'Алисия', 'Айлин', 'Дарина', 'Антонина', 'Александра', 'Ксения', 'Камилла', 'Ясмина', 'Мадина', 'Диана', 'Малика', 'Оливия', 'Яна', 'Татьяна', 'Ева', 'Аделина', 'Ангелина', 'Вера', 'Лея', 'Злата', 'Асия', 'Аврора', 'Ольга', 'Кристина', 'Марина', 'Зоя', 'Наталья', 'Мира', 'Марьям', 'Фатима', 'Алина', 'Маргарита', 'Лидия', 'Амира', 'Лиана', 'Алия', 'Мила', 'Алёна', 'Юлиана', 'Ариана', 'Любовь', 'Аиша', 'Дария', 'Сара', 'Аяна', 'Агата', 'Майя', 'Мелания', 'Амелия', 'Марианна', 'Анфиса', 'Марта', 'Элеонора', 'Милослава', 'Регина', 'Серафима', 'Владислава', 'Елена', 'Нина', 'Лилия', 'Карина', 'Олеся']
return names[random.randint(0, len(names)-1)]
elif gender == None:
names = ['Мирослава', 'Кира', 'Анастасия', 'Ирина', 'Василиса', 'Екатерина', 'Евгения', 'Алиса', 'Арина', 'Вероника', 'Ника', 'Елизавета', 'Анна', 'Мария', 'Варвара', 'Марьяна', 'Валерия', 'Элина', 'Виктория', 'Сафия', 'Ярослава', 'Есения', 'Юлия', 'Светлана', 'Милана', 'Ульяна', 'Николь', 'Софья', 'София', 'Эмилия', 'Сабина', 'Полина', 'Надежда', 'Камила', 'Таисия', 'Дарья', 'Эвелина', 'Амина', 'Алисия', 'Айлин', 'Дарина', 'Антонина', 'Александра', 'Ксения', 'Камилла', 'Ясмина', 'Мадина', 'Диана', 'Малика', 'Оливия', 'Яна', 'Татьяна', 'Ева', 'Аделина', 'Ангелина', 'Вера', 'Лея', 'Злата', 'Асия', 'Аврора', 'Ольга', 'Кристина', 'Марина', 'Зоя', 'Наталья', 'Мира', 'Марьям', 'Фатима', 'Алина', 'Маргарита', 'Лидия', 'Амира', 'Лиана', 'Алия', 'Мила', 'Алёна', 'Юлиана', 'Ариана', 'Любовь', 'Аиша', 'Дария', 'Сара', 'Аяна', 'Агата', 'Майя', 'Мелания', 'Амелия', 'Марианна', 'Анфиса', 'Марта', 'Элеонора', 'Милослава', 'Регина', 'Серафима', 'Владислава', 'Елена', 'Нина', 'Лилия', 'Карина', 'Олеся','Иван', 'Арсений', 'Дмитрий', 'Никита', 'Константин', 'Артемий', 'Илья', 'Владимир', 'Андрей', 'Лев', 'Егор', 'Александр', 'Степан', 'Лука', 'Сергей', 'Артём', 'Кирилл', 'Максим', 'Дамир', 'Демид', 'Григорий', 'Тимофей', 'Василий', 'Валерий', 'Фёдор', 'Марк', 'Вадим', 'Матвей', 'Павел', 'Георгий', 'Платон', 'Михаил', 'Богдан', 'Даниил', 'Ярослав', 'Алексей', 'Леонид', 'Али', 'Роман', 'Елисей', 'Савелий', 'Борис', 'Марат', 'Роберт', 'Данила', 'Антон', 'Адам', 'Семён', 'Даниэль', 'Давид', 'Николай', 'Филипп', 'Владислав', 'Глеб', 'Мирон', 'Ян', 'Святослав', 'Тимур', 'Артур', 'Серафим', 'Денис', 'Назар', 'Игорь', 'Руслан', 'Данил', 'Эмир', 'Евгений', 'Пётр', 'Макар', 'Юрий', 'Станислав', 'Захар', 'Гордей', 'Родион', 'Всеволод', 'Ростислав', 'Вячеслав', 'Герман', 'Олег', 'Леон', 'Ибрагим', 'Виктор', 'Виталий', 'Мирослав', 'Демьян', 'Савва', 'Даниль', 'Рафаэль', 'Эрик', 'Альберт', 'Рустам', 'Эмиль', 'Яков', 'Тихон', 'Анатолий', 'Мартин', 'Билал', 'Тигран', 'Камиль', 'Марсель']
return names[random.randint(0, len(names)-1)]
elif lang == None:
names = ['Мирослава', 'Кира', 'Анастасия', 'Ирина', 'Василиса', 'Екатерина', 'Евгения', 'Алиса', 'Арина', 'Вероника', 'Ника', 'Елизавета', 'Анна', 'Мария', 'Варвара', 'Марьяна', 'Валерия', 'Элина', 'Виктория', 'Сафия', 'Ярослава', 'Есения', 'Юлия', 'Светлана', 'Милана', 'Ульяна', 'Николь', 'Софья', 'София', 'Эмилия', 'Сабина', 'Полина', 'Надежда', 'Камила', 'Таисия', 'Дарья', 'Эвелина', 'Амина', 'Алисия', 'Айлин', 'Дарина', 'Антонина', 'Александра', 'Ксения', 'Камилла', 'Ясмина', 'Мадина', 'Диана', 'Малика', 'Оливия', 'Яна', 'Татьяна', 'Ева', 'Аделина', 'Ангелина', 'Вера', 'Лея', 'Злата', 'Асия', 'Аврора', 'Ольга', 'Кристина', 'Марина', 'Зоя', 'Наталья', 'Мира', 'Марьям', 'Фатима', 'Алина', 'Маргарита', 'Лидия', 'Амира', 'Лиана', 'Алия', 'Мила', 'Алёна', 'Юлиана', 'Ариана', 'Любовь', 'Аиша', 'Дария', 'Сара', 'Аяна', 'Агата', 'Майя', 'Мелания', 'Амелия', 'Марианна', 'Анфиса', 'Марта', 'Элеонора', 'Милослава', 'Регина', 'Серафима', 'Владислава', 'Елена', 'Нина', 'Лилия', 'Карина', 'Олеся','Иван', 'Арсений', 'Дмитрий', 'Никита', 'Константин', 'Артемий', 'Илья', 'Владимир', 'Андрей', 'Лев', 'Егор', 'Александр', 'Степан', 'Лука', 'Сергей', 'Артём', 'Кирилл', 'Максим', 'Дамир', 'Демид', 'Григорий', 'Тимофей', 'Василий', 'Валерий', 'Фёдор', 'Марк', 'Вадим', 'Матвей', 'Павел', 'Георгий', 'Платон', 'Михаил', 'Богдан', 'Даниил', 'Ярослав', 'Алексей', 'Леонид', 'Али', 'Роман', 'Елисей', 'Савелий', 'Борис', 'Марат', 'Роберт', 'Данила', 'Антон', 'Адам', 'Семён', 'Даниэль', 'Давид', 'Николай', 'Филипп', 'Владислав', 'Глеб', 'Мирон', 'Ян', 'Святослав', 'Тимур', 'Артур', 'Серафим', 'Денис', 'Назар', 'Игорь', 'Руслан', 'Данил', 'Эмир', 'Евгений', 'Пётр', 'Макар', 'Юрий', 'Станислав', 'Захар', 'Гордей', 'Родион', 'Всеволод', 'Ростислав', 'Вячеслав', 'Герман', 'Олег', 'Леон', 'Ибрагим', 'Виктор', 'Виталий', 'Мирослав', 'Демьян', 'Савва', 'Даниль', 'Рафаэль', 'Эрик', 'Альберт', 'Рустам', 'Эмиль', 'Яков', 'Тихон', 'Анатолий', 'Мартин', 'Билал', 'Тигран', 'Камиль', 'Марсель', "Abraham", "Addison", "Adrian", "Albert", "Alec", "Alfred", "Alvin", "Andrew", "Andy", "Archibald", "Archie", "Arlo", "Arthur", "Arthur", "Austen", "Barnabe", "Bartholomew", "Bertram", "Bramwell", "Byam", "Cardew", "Chad", "Chance", "Colin", "Coloman", "Curtis", "Cuthbert", "Daniel", "Darryl", "David", "Dickon", "Donald", "Dougie", "Douglas", "Earl", "Ebenezer", "Edgar", "Edmund", "Edward", "Edwin", "Elliot", "Emil", "Floyd", "Franklin", "Frederick", "Gabriel", "Galton", "Gareth", "George", "Gerard", "Gilbert", "Gorden", "Gordon", "Graham", "Grant", "Henry", "Hervey", "Hudson", "Hugh", "Ian", "Jack", "Jaime", "James", "Jason", "Jeffrey", "Joey", "John", "Jolyon", "Jonas", "Joseph", "Joshua", "Julian", "Justin", "Kurt", "Lanny", "Larry", "Laurence", "Lawton", "Lester", "Malcolm", "Marcus", "Mark", "Marshall", "Martin", "Marvin", "Matt", "Maximilian", "Michael", "Miles", "Murray", "Myron", "Nate", "Nathan", "Neil", "Nicholas", "Nicolas", "Norman", "Oliver", "Oscar", "Osric", "Owen", "Patrick", "Paul", "Peleg", "Philip", "Phillipps", "Raymond", "Reginald", "Rhys", "Richard", "Robert", "Roderick", "Rodger", "Roger", "Ronald", "Rowland", "Rufus", "Russell", "Sebastian", "Shahaf", "Simon", "Stephen", "Swaine", "Thomas", "Tobias", "Travis", "Victor", "Vincent", "Vincent", "Vivian", "Wayne", "Wilfred", "William", "Winston", "Zadoc","Abigail", "Ada", "Addy", "Adelaide", "Adele", "Agatha", "Agnes", "Alaina", "Alanna", "Alberta", "Alice", "Aliso", "Alvina", "Amanda", "Ambe", "Amelia", "Amy", "Andrea", "Ange", "Angela", "Anna", "Annabe", "Arda", "Audrey", "Augusta", "Autum", "Avi", "Barbara", "Beatrice", "Belinda", "Bella", "Berenice", "Bertha", "Brenda", "Bridget", "Bronwen", "Cadence", "Carmelita", "Caroline", "Carolyn", "Carolynn", "Cassandra", "Cecilia", "Cecily", "Celia", "Charis", "Charisse", "Charity", "Charlotte", "Charmaine", "Cheryl", "Chloe", "Christabel", "Claribel", "Clarissa", "Clementine", "Cleo", "Cordelia", "Cristalyn", "Crystal", "Cynthia", "Daisy", "Daphne", "Darryl", "Davina", "Dawn", "Deanna", "Deanne", "Deborah", "Dede", "Delia", "Denise", "Destiny", "Diana", "Dolores", "Dora", "Doreen", "Dorothy", "Drusilla", "Dulcie", "Edith", "Edna", "Edwina", "Effie", "Eileen", "Eleanor", "Elektra", "Elizabeth", "Ella", "Ellen", "Emma", "Enid", "Estelle", "Ethel", "Eudora", "Eunice", "Eva", "Faith", "Felicity", "Fiona", "Flora", "Galenka", "Gaynor", "Gemma", "Genevieve", "Georgiana", "Gertie", "Gertrude", "Gia", "Glenda", "Gwen", "Gwenda", "Gwendolen", "Gwendoline", "Gwendolyn", "Hannah", "Harriet", "Helen", "Henrietta", "Hero", "Hester", "Honor", "Hope", "Ida", "Imelda", "Imogen", "InnogeIona", "Irene", "IriIsla", "Ivy", "Jacqueline", "Jacqui", "Jaime", "Jane", "Jemima", "Jenna", "Jennifer", "Jessica", "Jessie", "Joanna", "Joanne", "Joelle", "Joey", "Josephine", "Judith", "Julianne", "June", "Karen", "Kathleen", "Kaylee", "Kierra", "Lara", "Laura", "Lauren", "Leah", "Lettice", "Liana", "Lilla", "Lillia", "Lois", "Lorelei", "Loretta", "Lorna", "Lorraine", "Louisa", "Lucinda", "Lucy", "Lynnette", "Mabel", "Madge", "Maggie", "Marcia", "Marcie", "Margaret", "Marian", "Marianne", "Marilyn", "Marissa", "Marjorie", "Marsha", "Matilda", "Maud", "Maude", "Mavis", "May", "Medea", "Mehitable", "Melanie", "Melissa", "Michele", "Millicent", "Minna", "Moira", "Myra", "Myrna", "Myrtle", "Nadine", "Naila"]
return names[random.randint(0, len(names)-1)]
| 448.026316
| 5,346
| 0.595125
| 1,724
| 17,025
| 5.87703
| 0.321926
| 0.011548
| 0.012436
| 0.013423
| 0.980853
| 0.980853
| 0.980853
| 0.980853
| 0.980853
| 0.980853
| 0
| 0.001393
| 0.114537
| 17,025
| 37
| 5,347
| 460.135135
| 0.670713
| 0
| 0
| 0.393939
| 0
| 0
| 0.56438
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.030303
| 0.030303
| 0.424242
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a3a5c4b7d896d4ee0da3e7d1cb1b5a6b67fa8fa0
| 73,140
|
py
|
Python
|
tests/fast_tests/test_multi_fcnet.py
|
jesbu1/h-baselines
|
f6f775bb18de22527f2d01d73bd733ed2e435ba3
|
[
"MIT"
] | null | null | null |
tests/fast_tests/test_multi_fcnet.py
|
jesbu1/h-baselines
|
f6f775bb18de22527f2d01d73bd733ed2e435ba3
|
[
"MIT"
] | null | null | null |
tests/fast_tests/test_multi_fcnet.py
|
jesbu1/h-baselines
|
f6f775bb18de22527f2d01d73bd733ed2e435ba3
|
[
"MIT"
] | null | null | null |
"""Tests for the policies in the hbaselines/multi_fcnet subdirectory."""
import unittest
import numpy as np
import tensorflow as tf
from gym.spaces import Box
from hbaselines.utils.tf_util import get_trainable_vars
from hbaselines.multi_fcnet.td3 import MultiFeedForwardPolicy as \
TD3MultiFeedForwardPolicy
from hbaselines.multi_fcnet.sac import MultiFeedForwardPolicy as \
SACMultiFeedForwardPolicy
from hbaselines.algorithms.off_policy import SAC_PARAMS, TD3_PARAMS
from hbaselines.algorithms.off_policy import MULTI_FEEDFORWARD_PARAMS
class TestMultiActorCriticPolicy(unittest.TestCase):
"""Test MultiActorCriticPolicy in hbaselines/multi_fcnet/base.py."""
def setUp(self):
self.sess = tf.compat.v1.Session()
# Shared policy parameters
self.policy_params_shared = {
'sess': self.sess,
'ac_space': Box(low=-1, high=1, shape=(1,)),
'co_space': Box(low=-2, high=2, shape=(2,)),
'ob_space': Box(low=-3, high=3, shape=(3,)),
'all_ob_space': Box(low=-3, high=3, shape=(10,)),
'layers': [256, 256],
'verbose': 0,
}
self.policy_params_shared.update(TD3_PARAMS.copy())
self.policy_params_shared.update(MULTI_FEEDFORWARD_PARAMS.copy())
self.policy_params_shared['shared'] = True
# Independent policy parameters
self.policy_params_independent = {
'sess': self.sess,
'ac_space': {
'a': Box(low=-1, high=1, shape=(1,)),
'b': Box(low=-2, high=2, shape=(2,)),
},
'co_space': {
'a': Box(low=-3, high=3, shape=(3,)),
'b': Box(low=-4, high=4, shape=(4,)),
},
'ob_space': {
'a': Box(low=-5, high=5, shape=(5,)),
'b': Box(low=-6, high=6, shape=(6,)),
},
'all_ob_space': Box(low=-6, high=6, shape=(18,)),
'layers': [256, 256],
'verbose': 0,
}
self.policy_params_independent.update(TD3_PARAMS.copy())
self.policy_params_independent.update(MULTI_FEEDFORWARD_PARAMS.copy())
self.policy_params_independent['shared'] = False
def tearDown(self):
self.sess.close()
del self.policy_params_shared
del self.policy_params_independent
# Clear the graph.
tf.compat.v1.reset_default_graph()
def test_store_transition_1(self):
"""Check the functionality of the store_transition() method.
This test checks for the following cases:
1. maddpg = False, shared = False
2. maddpg = False, shared = True
"""
policy_params = self.policy_params_independent.copy()
policy_params["maddpg"] = False
policy = TD3MultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
for i in range(4):
action_0 = np.array([i for _ in range(1)])
action_1 = np.array([i for _ in range(2)])
context0_0 = np.array([i for _ in range(3)])
context0_1 = np.array([i for _ in range(4)])
obs0_0 = np.array([i for _ in range(5)])
obs0_1 = np.array([i for _ in range(6)])
reward = i
obs1_0 = np.array([i+1 for _ in range(5)])
obs1_1 = np.array([i+1 for _ in range(6)])
context1_0 = np.array([i for _ in range(3)])
context1_1 = np.array([i for _ in range(4)])
done = False
is_final_step = False
evaluate = False
policy.store_transition(
obs0={"a": obs0_0, "b": obs0_1},
context0={"a": context0_0, "b": context0_1},
action={"a": action_0, "b": action_1},
reward={"a": reward, "b": reward},
obs1={"a": obs1_0, "b": obs1_1},
context1={"a": context1_0, "b": context1_1},
done=done,
is_final_step=is_final_step,
evaluate=evaluate,
env_num=0,
)
# =================================================================== #
# test for agent a #
# =================================================================== #
obs_t = policy.agents["a"].replay_buffer.obs_t
action_t = policy.agents["a"].replay_buffer.action_t
reward = policy.agents["a"].replay_buffer.reward
done = policy.agents["a"].replay_buffer.done
# check the various attributes
np.testing.assert_almost_equal(
obs_t[:4, :],
np.array([[0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1.],
[2., 2., 2., 2., 2., 2., 2., 2.],
[3., 3., 3., 3., 3., 3., 3., 3.]])
)
np.testing.assert_almost_equal(
action_t[:4, :],
np.array([[0.], [1.], [2.], [3.]])
)
np.testing.assert_almost_equal(
reward[:4],
np.array([0., 1., 2., 3.])
)
np.testing.assert_almost_equal(
done[:4],
[0., 0., 0., 0.]
)
# =================================================================== #
# test for agent b #
# =================================================================== #
obs_t = policy.agents["b"].replay_buffer.obs_t
action_t = policy.agents["b"].replay_buffer.action_t
reward = policy.agents["b"].replay_buffer.reward
done = policy.agents["b"].replay_buffer.done
# check the various attributes
np.testing.assert_almost_equal(
obs_t[:4, :],
np.array([[0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],
[2., 2., 2., 2., 2., 2., 2., 2., 2., 2.],
[3., 3., 3., 3., 3., 3., 3., 3., 3., 3.]])
)
np.testing.assert_almost_equal(
action_t[:4, :],
np.array([[0., 0.], [1., 1.], [2., 2.], [3., 3.]])
)
np.testing.assert_almost_equal(
reward[:4],
np.array([0., 1., 2., 3.])
)
np.testing.assert_almost_equal(
done[:4],
[0., 0., 0., 0.]
)
def test_store_transition_2(self):
policy_params = self.policy_params_shared.copy()
policy_params["maddpg"] = False
policy = TD3MultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
for i in range(4):
obs0 = np.array([i for _ in range(2)])
context0 = np.array([i for _ in range(3)])
action = np.array([i for _ in range(1)])
reward = i
obs1 = np.array([i+1 for _ in range(2)])
context1 = np.array([i for _ in range(3)])
is_final_step = False
evaluate = False
policy.store_transition(
obs0={"a": obs0, "b": obs0 + 1},
context0={"a": context0, "b": context0 + 1},
action={"a": action, "b": action + 1},
reward={"a": reward, "b": reward + 1},
obs1={"a": obs1, "b": obs1 + 1},
context1={"a": context1, "b": context1 + 1},
done=0.,
is_final_step=is_final_step,
evaluate=evaluate,
env_num=0,
)
# extract the attributes
obs_t = policy.agents["policy"].replay_buffer.obs_t
action_t = policy.agents["policy"].replay_buffer.action_t
reward = policy.agents["policy"].replay_buffer.reward
done = policy.agents["policy"].replay_buffer.done
# check the various attributes
np.testing.assert_almost_equal(
obs_t[:8, :],
np.array([[0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1.],
[2., 2., 2., 2., 2.],
[2., 2., 2., 2., 2.],
[3., 3., 3., 3., 3.],
[3., 3., 3., 3., 3.],
[4., 4., 4., 4., 4.]])
)
np.testing.assert_almost_equal(
action_t[:8, :],
np.array([[0.], [1.], [1.], [2.], [2.], [3.], [3.], [4.]])
)
np.testing.assert_almost_equal(
reward[:8],
np.array([0., 1., 1., 2., 2., 3., 3., 4.])
)
np.testing.assert_almost_equal(
done[:8],
[0., 0., 0., 0., 0., 0., 0., 0.]
)
class TestTD3MultiFeedForwardPolicy(unittest.TestCase):
"""Test MultiFeedForwardPolicy in hbaselines/multi_fcnet/td3.py."""
def setUp(self):
self.sess = tf.compat.v1.Session()
# Shared policy parameters
self.policy_params_shared = {
'sess': self.sess,
'ac_space': Box(low=-1, high=1, shape=(1,)),
'co_space': Box(low=-2, high=2, shape=(2,)),
'ob_space': Box(low=-3, high=3, shape=(3,)),
'all_ob_space': Box(low=-3, high=3, shape=(10,)),
'layers': [256, 256],
'verbose': 0,
}
self.policy_params_shared.update(TD3_PARAMS.copy())
self.policy_params_shared.update(MULTI_FEEDFORWARD_PARAMS.copy())
self.policy_params_shared['shared'] = True
# Independent policy parameters
self.policy_params_independent = {
'sess': self.sess,
'ac_space': {
'a': Box(low=-1, high=1, shape=(1,)),
'b': Box(low=-2, high=2, shape=(2,)),
},
'co_space': {
'a': Box(low=-3, high=3, shape=(3,)),
'b': Box(low=-4, high=4, shape=(4,)),
},
'ob_space': {
'a': Box(low=-5, high=5, shape=(5,)),
'b': Box(low=-6, high=6, shape=(6,)),
},
'all_ob_space': Box(low=-6, high=6, shape=(18,)),
'layers': [256, 256],
'verbose': 0,
}
self.policy_params_independent.update(TD3_PARAMS.copy())
self.policy_params_independent.update(MULTI_FEEDFORWARD_PARAMS.copy())
self.policy_params_independent['shared'] = False
def tearDown(self):
self.sess.close()
del self.policy_params_shared
del self.policy_params_independent
# Clear the graph.
tf.compat.v1.reset_default_graph()
def test_init_1(self):
"""Check the functionality of the __init__() method.
This method is tested for the following features:
1. The proper structure graph was generated.
2. All input placeholders are correct.
This is done for the following cases:
1. maddpg = False, shared = False
2. maddpg = False, shared = True
3. maddpg = True, shared = False
4. maddpg = True, shared = True
"""
policy_params = self.policy_params_independent.copy()
policy_params["maddpg"] = False
policy = TD3MultiFeedForwardPolicy(**policy_params)
self.assertListEqual(
sorted([var.name for var in get_trainable_vars()]),
['a/model/pi/fc0/bias:0',
'a/model/pi/fc0/kernel:0',
'a/model/pi/fc1/bias:0',
'a/model/pi/fc1/kernel:0',
'a/model/pi/output/bias:0',
'a/model/pi/output/kernel:0',
'a/model/qf_0/fc0/bias:0',
'a/model/qf_0/fc0/kernel:0',
'a/model/qf_0/fc1/bias:0',
'a/model/qf_0/fc1/kernel:0',
'a/model/qf_0/qf_output/bias:0',
'a/model/qf_0/qf_output/kernel:0',
'a/model/qf_1/fc0/bias:0',
'a/model/qf_1/fc0/kernel:0',
'a/model/qf_1/fc1/bias:0',
'a/model/qf_1/fc1/kernel:0',
'a/model/qf_1/qf_output/bias:0',
'a/model/qf_1/qf_output/kernel:0',
'a/target/pi/fc0/bias:0',
'a/target/pi/fc0/kernel:0',
'a/target/pi/fc1/bias:0',
'a/target/pi/fc1/kernel:0',
'a/target/pi/output/bias:0',
'a/target/pi/output/kernel:0',
'a/target/qf_0/fc0/bias:0',
'a/target/qf_0/fc0/kernel:0',
'a/target/qf_0/fc1/bias:0',
'a/target/qf_0/fc1/kernel:0',
'a/target/qf_0/qf_output/bias:0',
'a/target/qf_0/qf_output/kernel:0',
'a/target/qf_1/fc0/bias:0',
'a/target/qf_1/fc0/kernel:0',
'a/target/qf_1/fc1/bias:0',
'a/target/qf_1/fc1/kernel:0',
'a/target/qf_1/qf_output/bias:0',
'a/target/qf_1/qf_output/kernel:0',
'b/model/pi/fc0/bias:0',
'b/model/pi/fc0/kernel:0',
'b/model/pi/fc1/bias:0',
'b/model/pi/fc1/kernel:0',
'b/model/pi/output/bias:0',
'b/model/pi/output/kernel:0',
'b/model/qf_0/fc0/bias:0',
'b/model/qf_0/fc0/kernel:0',
'b/model/qf_0/fc1/bias:0',
'b/model/qf_0/fc1/kernel:0',
'b/model/qf_0/qf_output/bias:0',
'b/model/qf_0/qf_output/kernel:0',
'b/model/qf_1/fc0/bias:0',
'b/model/qf_1/fc0/kernel:0',
'b/model/qf_1/fc1/bias:0',
'b/model/qf_1/fc1/kernel:0',
'b/model/qf_1/qf_output/bias:0',
'b/model/qf_1/qf_output/kernel:0',
'b/target/pi/fc0/bias:0',
'b/target/pi/fc0/kernel:0',
'b/target/pi/fc1/bias:0',
'b/target/pi/fc1/kernel:0',
'b/target/pi/output/bias:0',
'b/target/pi/output/kernel:0',
'b/target/qf_0/fc0/bias:0',
'b/target/qf_0/fc0/kernel:0',
'b/target/qf_0/fc1/bias:0',
'b/target/qf_0/fc1/kernel:0',
'b/target/qf_0/qf_output/bias:0',
'b/target/qf_0/qf_output/kernel:0',
'b/target/qf_1/fc0/bias:0',
'b/target/qf_1/fc0/kernel:0',
'b/target/qf_1/fc1/bias:0',
'b/target/qf_1/fc1/kernel:0',
'b/target/qf_1/qf_output/bias:0',
'b/target/qf_1/qf_output/kernel:0']
)
# Check observation/action/context spaces of the agents
self.assertEqual(policy.agents['a'].ac_space,
self.policy_params_independent['ac_space']['a'])
self.assertEqual(policy.agents['a'].ob_space,
self.policy_params_independent['ob_space']['a'])
self.assertEqual(policy.agents['a'].co_space,
self.policy_params_independent['co_space']['a'])
self.assertEqual(policy.agents['b'].ac_space,
self.policy_params_independent['ac_space']['b'])
self.assertEqual(policy.agents['b'].ob_space,
self.policy_params_independent['ob_space']['b'])
self.assertEqual(policy.agents['b'].co_space,
self.policy_params_independent['co_space']['b'])
# Check the instantiation of the class attributes.
self.assertTrue(not policy.shared)
self.assertTrue(not policy.maddpg)
def test_init_2(self):
policy_params = self.policy_params_shared.copy()
policy_params["maddpg"] = False
policy = TD3MultiFeedForwardPolicy(**policy_params)
self.assertListEqual(
sorted([var.name for var in get_trainable_vars()]),
['model/pi/fc0/bias:0',
'model/pi/fc0/kernel:0',
'model/pi/fc1/bias:0',
'model/pi/fc1/kernel:0',
'model/pi/output/bias:0',
'model/pi/output/kernel:0',
'model/qf_0/fc0/bias:0',
'model/qf_0/fc0/kernel:0',
'model/qf_0/fc1/bias:0',
'model/qf_0/fc1/kernel:0',
'model/qf_0/qf_output/bias:0',
'model/qf_0/qf_output/kernel:0',
'model/qf_1/fc0/bias:0',
'model/qf_1/fc0/kernel:0',
'model/qf_1/fc1/bias:0',
'model/qf_1/fc1/kernel:0',
'model/qf_1/qf_output/bias:0',
'model/qf_1/qf_output/kernel:0',
'target/pi/fc0/bias:0',
'target/pi/fc0/kernel:0',
'target/pi/fc1/bias:0',
'target/pi/fc1/kernel:0',
'target/pi/output/bias:0',
'target/pi/output/kernel:0',
'target/qf_0/fc0/bias:0',
'target/qf_0/fc0/kernel:0',
'target/qf_0/fc1/bias:0',
'target/qf_0/fc1/kernel:0',
'target/qf_0/qf_output/bias:0',
'target/qf_0/qf_output/kernel:0',
'target/qf_1/fc0/bias:0',
'target/qf_1/fc0/kernel:0',
'target/qf_1/fc1/bias:0',
'target/qf_1/fc1/kernel:0',
'target/qf_1/qf_output/bias:0',
'target/qf_1/qf_output/kernel:0']
)
# Check observation/action/context spaces of the agents
self.assertEqual(policy.agents['policy'].ac_space,
self.policy_params_shared['ac_space'])
self.assertEqual(policy.agents['policy'].ob_space,
self.policy_params_shared['ob_space'])
self.assertEqual(policy.agents['policy'].co_space,
self.policy_params_shared['co_space'])
# Check the instantiation of the class attributes.
self.assertTrue(policy.shared)
self.assertTrue(not policy.maddpg)
def test_init_3(self):
policy_params = self.policy_params_independent.copy()
policy_params["maddpg"] = True
policy = TD3MultiFeedForwardPolicy(**policy_params)
self.assertListEqual(
sorted([var.name for var in get_trainable_vars()]),
['a/model/centralized_qf_0/fc0/bias:0',
'a/model/centralized_qf_0/fc0/kernel:0',
'a/model/centralized_qf_0/fc1/bias:0',
'a/model/centralized_qf_0/fc1/kernel:0',
'a/model/centralized_qf_0/qf_output/bias:0',
'a/model/centralized_qf_0/qf_output/kernel:0',
'a/model/centralized_qf_1/fc0/bias:0',
'a/model/centralized_qf_1/fc0/kernel:0',
'a/model/centralized_qf_1/fc1/bias:0',
'a/model/centralized_qf_1/fc1/kernel:0',
'a/model/centralized_qf_1/qf_output/bias:0',
'a/model/centralized_qf_1/qf_output/kernel:0',
'a/model/pi/fc0/bias:0',
'a/model/pi/fc0/kernel:0',
'a/model/pi/fc1/bias:0',
'a/model/pi/fc1/kernel:0',
'a/model/pi/output/bias:0',
'a/model/pi/output/kernel:0',
'a/target/centralized_qf_0/fc0/bias:0',
'a/target/centralized_qf_0/fc0/kernel:0',
'a/target/centralized_qf_0/fc1/bias:0',
'a/target/centralized_qf_0/fc1/kernel:0',
'a/target/centralized_qf_0/qf_output/bias:0',
'a/target/centralized_qf_0/qf_output/kernel:0',
'a/target/centralized_qf_1/fc0/bias:0',
'a/target/centralized_qf_1/fc0/kernel:0',
'a/target/centralized_qf_1/fc1/bias:0',
'a/target/centralized_qf_1/fc1/kernel:0',
'a/target/centralized_qf_1/qf_output/bias:0',
'a/target/centralized_qf_1/qf_output/kernel:0',
'a/target/pi/fc0/bias:0',
'a/target/pi/fc0/kernel:0',
'a/target/pi/fc1/bias:0',
'a/target/pi/fc1/kernel:0',
'a/target/pi/output/bias:0',
'a/target/pi/output/kernel:0',
'b/model/centralized_qf_0/fc0/bias:0',
'b/model/centralized_qf_0/fc0/kernel:0',
'b/model/centralized_qf_0/fc1/bias:0',
'b/model/centralized_qf_0/fc1/kernel:0',
'b/model/centralized_qf_0/qf_output/bias:0',
'b/model/centralized_qf_0/qf_output/kernel:0',
'b/model/centralized_qf_1/fc0/bias:0',
'b/model/centralized_qf_1/fc0/kernel:0',
'b/model/centralized_qf_1/fc1/bias:0',
'b/model/centralized_qf_1/fc1/kernel:0',
'b/model/centralized_qf_1/qf_output/bias:0',
'b/model/centralized_qf_1/qf_output/kernel:0',
'b/model/pi/fc0/bias:0',
'b/model/pi/fc0/kernel:0',
'b/model/pi/fc1/bias:0',
'b/model/pi/fc1/kernel:0',
'b/model/pi/output/bias:0',
'b/model/pi/output/kernel:0',
'b/target/centralized_qf_0/fc0/bias:0',
'b/target/centralized_qf_0/fc0/kernel:0',
'b/target/centralized_qf_0/fc1/bias:0',
'b/target/centralized_qf_0/fc1/kernel:0',
'b/target/centralized_qf_0/qf_output/bias:0',
'b/target/centralized_qf_0/qf_output/kernel:0',
'b/target/centralized_qf_1/fc0/bias:0',
'b/target/centralized_qf_1/fc0/kernel:0',
'b/target/centralized_qf_1/fc1/bias:0',
'b/target/centralized_qf_1/fc1/kernel:0',
'b/target/centralized_qf_1/qf_output/bias:0',
'b/target/centralized_qf_1/qf_output/kernel:0',
'b/target/pi/fc0/bias:0',
'b/target/pi/fc0/kernel:0',
'b/target/pi/fc1/bias:0',
'b/target/pi/fc1/kernel:0',
'b/target/pi/output/bias:0',
'b/target/pi/output/kernel:0']
)
# Check observation/action/context spaces of the agents
for key in policy.ac_space.keys():
self.assertEqual(int(policy.all_obs_ph[key].shape[-1]),
policy.all_ob_space.shape[0])
self.assertEqual(int(policy.all_obs1_ph[key].shape[-1]),
policy.all_ob_space.shape[0])
self.assertEqual(int(policy.all_action_ph[key].shape[-1]),
sum(policy.ac_space[key].shape[0]
for key in policy.ac_space.keys()))
self.assertEqual(int(policy.action_ph[key].shape[-1]),
policy.ac_space[key].shape[0])
self.assertEqual(int(policy.obs_ph[key].shape[-1]),
int(policy.ob_space[key].shape[0]
+ policy.co_space[key].shape[0]))
self.assertEqual(int(policy.obs1_ph[key].shape[-1]),
int(policy.ob_space[key].shape[0]
+ policy.co_space[key].shape[0]))
# Check the instantiation of the class attributes.
self.assertTrue(not policy.shared)
self.assertTrue(policy.maddpg)
def test_init_4(self):
policy_params = self.policy_params_shared.copy()
policy_params["maddpg"] = True
policy = TD3MultiFeedForwardPolicy(**policy_params)
self.assertListEqual(
sorted([var.name for var in get_trainable_vars()]),
['model/centralized_qf_0/fc0/bias:0',
'model/centralized_qf_0/fc0/kernel:0',
'model/centralized_qf_0/fc1/bias:0',
'model/centralized_qf_0/fc1/kernel:0',
'model/centralized_qf_0/qf_output/bias:0',
'model/centralized_qf_0/qf_output/kernel:0',
'model/centralized_qf_1/fc0/bias:0',
'model/centralized_qf_1/fc0/kernel:0',
'model/centralized_qf_1/fc1/bias:0',
'model/centralized_qf_1/fc1/kernel:0',
'model/centralized_qf_1/qf_output/bias:0',
'model/centralized_qf_1/qf_output/kernel:0',
'model/pi/fc0/bias:0',
'model/pi/fc0/kernel:0',
'model/pi/fc1/bias:0',
'model/pi/fc1/kernel:0',
'model/pi/output/bias:0',
'model/pi/output/kernel:0',
'target/centralized_qf_0/fc0/bias:0',
'target/centralized_qf_0/fc0/kernel:0',
'target/centralized_qf_0/fc1/bias:0',
'target/centralized_qf_0/fc1/kernel:0',
'target/centralized_qf_0/qf_output/bias:0',
'target/centralized_qf_0/qf_output/kernel:0',
'target/centralized_qf_1/fc0/bias:0',
'target/centralized_qf_1/fc0/kernel:0',
'target/centralized_qf_1/fc1/bias:0',
'target/centralized_qf_1/fc1/kernel:0',
'target/centralized_qf_1/qf_output/bias:0',
'target/centralized_qf_1/qf_output/kernel:0',
'target/pi/fc0/bias:0',
'target/pi/fc0/kernel:0',
'target/pi/fc1/bias:0',
'target/pi/fc1/kernel:0',
'target/pi/output/bias:0',
'target/pi/output/kernel:0']
)
# Check observation/action/context spaces of the agents
self.assertEqual(int(policy.all_obs_ph.shape[-1]),
policy.all_ob_space.shape[0])
self.assertEqual(int(policy.all_obs1_ph.shape[-1]),
policy.all_ob_space.shape[0])
self.assertEqual(int(policy.all_action_ph.shape[-1]),
policy.n_agents * policy.ac_space.shape[0])
self.assertEqual(int(policy.action_ph[0].shape[-1]),
policy.ac_space.shape[0])
self.assertEqual(int(policy.obs_ph[0].shape[-1]),
int(policy.ob_space.shape[0]
+ policy.co_space.shape[0]))
self.assertEqual(int(policy.obs1_ph[0].shape[-1]),
int(policy.ob_space.shape[0]
+ policy.co_space.shape[0]))
# Check the instantiation of the class attributes.
self.assertTrue(policy.shared)
self.assertTrue(policy.maddpg)
def test_initialize_1(self):
"""Check the functionality of the initialize() method.
This test validates that the target variables are properly initialized
when initialize is called.
This is done for the following cases:
1. maddpg = False, shared = False
2. maddpg = False, shared = True
3. maddpg = True, shared = False
4. maddpg = True, shared = True
"""
policy_params = self.policy_params_independent.copy()
policy_params["maddpg"] = False
policy = TD3MultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
model_var_list = [
'a/model/pi/fc0/bias:0',
'a/model/pi/fc0/kernel:0',
'a/model/pi/fc1/bias:0',
'a/model/pi/fc1/kernel:0',
'a/model/pi/output/bias:0',
'a/model/pi/output/kernel:0',
'a/model/qf_0/fc0/bias:0',
'a/model/qf_0/fc0/kernel:0',
'a/model/qf_0/fc1/bias:0',
'a/model/qf_0/fc1/kernel:0',
'a/model/qf_0/qf_output/bias:0',
'a/model/qf_0/qf_output/kernel:0',
'a/model/qf_1/fc0/bias:0',
'a/model/qf_1/fc0/kernel:0',
'a/model/qf_1/fc1/bias:0',
'a/model/qf_1/fc1/kernel:0',
'a/model/qf_1/qf_output/bias:0',
'a/model/qf_1/qf_output/kernel:0',
'b/model/pi/fc0/bias:0',
'b/model/pi/fc0/kernel:0',
'b/model/pi/fc1/bias:0',
'b/model/pi/fc1/kernel:0',
'b/model/pi/output/bias:0',
'b/model/pi/output/kernel:0',
'b/model/qf_0/fc0/bias:0',
'b/model/qf_0/fc0/kernel:0',
'b/model/qf_0/fc1/bias:0',
'b/model/qf_0/fc1/kernel:0',
'b/model/qf_0/qf_output/bias:0',
'b/model/qf_0/qf_output/kernel:0',
'b/model/qf_1/fc0/bias:0',
'b/model/qf_1/fc0/kernel:0',
'b/model/qf_1/fc1/bias:0',
'b/model/qf_1/fc1/kernel:0',
'b/model/qf_1/qf_output/bias:0',
'b/model/qf_1/qf_output/kernel:0',
]
target_var_list = [
'a/target/pi/fc0/bias:0',
'a/target/pi/fc0/kernel:0',
'a/target/pi/fc1/bias:0',
'a/target/pi/fc1/kernel:0',
'a/target/pi/output/bias:0',
'a/target/pi/output/kernel:0',
'a/target/qf_0/fc0/bias:0',
'a/target/qf_0/fc0/kernel:0',
'a/target/qf_0/fc1/bias:0',
'a/target/qf_0/fc1/kernel:0',
'a/target/qf_0/qf_output/bias:0',
'a/target/qf_0/qf_output/kernel:0',
'a/target/qf_1/fc0/bias:0',
'a/target/qf_1/fc0/kernel:0',
'a/target/qf_1/fc1/bias:0',
'a/target/qf_1/fc1/kernel:0',
'a/target/qf_1/qf_output/bias:0',
'a/target/qf_1/qf_output/kernel:0',
'b/target/pi/fc0/bias:0',
'b/target/pi/fc0/kernel:0',
'b/target/pi/fc1/bias:0',
'b/target/pi/fc1/kernel:0',
'b/target/pi/output/bias:0',
'b/target/pi/output/kernel:0',
'b/target/qf_0/fc0/bias:0',
'b/target/qf_0/fc0/kernel:0',
'b/target/qf_0/fc1/bias:0',
'b/target/qf_0/fc1/kernel:0',
'b/target/qf_0/qf_output/bias:0',
'b/target/qf_0/qf_output/kernel:0',
'b/target/qf_1/fc0/bias:0',
'b/target/qf_1/fc0/kernel:0',
'b/target/qf_1/fc1/bias:0',
'b/target/qf_1/fc1/kernel:0',
'b/target/qf_1/qf_output/bias:0',
'b/target/qf_1/qf_output/kernel:0',
]
for model, target in zip(model_var_list, target_var_list):
with tf.compat.v1.variable_scope(
tf.compat.v1.get_variable_scope(), reuse=True):
model_val = policy.sess.run(model)
target_val = policy.sess.run(target)
np.testing.assert_almost_equal(model_val, target_val)
def test_initialize_2(self):
policy_params = self.policy_params_shared.copy()
policy_params["maddpg"] = False
policy = TD3MultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
model_var_list = [
'model/pi/fc0/bias:0',
'model/pi/fc0/kernel:0',
'model/pi/fc1/bias:0',
'model/pi/fc1/kernel:0',
'model/pi/output/bias:0',
'model/pi/output/kernel:0',
'model/qf_0/fc0/bias:0',
'model/qf_0/fc0/kernel:0',
'model/qf_0/fc1/bias:0',
'model/qf_0/fc1/kernel:0',
'model/qf_0/qf_output/bias:0',
'model/qf_0/qf_output/kernel:0',
'model/qf_1/fc0/bias:0',
'model/qf_1/fc0/kernel:0',
'model/qf_1/fc1/bias:0',
'model/qf_1/fc1/kernel:0',
'model/qf_1/qf_output/bias:0',
'model/qf_1/qf_output/kernel:0',
]
target_var_list = [
'target/pi/fc0/bias:0',
'target/pi/fc0/kernel:0',
'target/pi/fc1/bias:0',
'target/pi/fc1/kernel:0',
'target/pi/output/bias:0',
'target/pi/output/kernel:0',
'target/qf_0/fc0/bias:0',
'target/qf_0/fc0/kernel:0',
'target/qf_0/fc1/bias:0',
'target/qf_0/fc1/kernel:0',
'target/qf_0/qf_output/bias:0',
'target/qf_0/qf_output/kernel:0',
'target/qf_1/fc0/bias:0',
'target/qf_1/fc0/kernel:0',
'target/qf_1/fc1/bias:0',
'target/qf_1/fc1/kernel:0',
'target/qf_1/qf_output/bias:0',
'target/qf_1/qf_output/kernel:0'
]
for model, target in zip(model_var_list, target_var_list):
with tf.compat.v1.variable_scope(
tf.compat.v1.get_variable_scope(), reuse=True):
model_val = policy.sess.run(model)
target_val = policy.sess.run(target)
np.testing.assert_almost_equal(model_val, target_val)
def test_initialize_3(self):
policy_params = self.policy_params_independent.copy()
policy_params["maddpg"] = True
policy = TD3MultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
model_var_list = [
'a/model/centralized_qf_0/fc0/bias:0',
'a/model/centralized_qf_0/fc0/kernel:0',
'a/model/centralized_qf_0/fc1/bias:0',
'a/model/centralized_qf_0/fc1/kernel:0',
'a/model/centralized_qf_0/qf_output/bias:0',
'a/model/centralized_qf_0/qf_output/kernel:0',
'a/model/centralized_qf_1/fc0/bias:0',
'a/model/centralized_qf_1/fc0/kernel:0',
'a/model/centralized_qf_1/fc1/bias:0',
'a/model/centralized_qf_1/fc1/kernel:0',
'a/model/centralized_qf_1/qf_output/bias:0',
'a/model/centralized_qf_1/qf_output/kernel:0',
'a/model/pi/fc0/bias:0',
'a/model/pi/fc0/kernel:0',
'a/model/pi/fc1/bias:0',
'a/model/pi/fc1/kernel:0',
'a/model/pi/output/bias:0',
'a/model/pi/output/kernel:0',
'b/model/centralized_qf_0/fc0/bias:0',
'b/model/centralized_qf_0/fc0/kernel:0',
'b/model/centralized_qf_0/fc1/bias:0',
'b/model/centralized_qf_0/fc1/kernel:0',
'b/model/centralized_qf_0/qf_output/bias:0',
'b/model/centralized_qf_0/qf_output/kernel:0',
'b/model/centralized_qf_1/fc0/bias:0',
'b/model/centralized_qf_1/fc0/kernel:0',
'b/model/centralized_qf_1/fc1/bias:0',
'b/model/centralized_qf_1/fc1/kernel:0',
'b/model/centralized_qf_1/qf_output/bias:0',
'b/model/centralized_qf_1/qf_output/kernel:0',
'b/model/pi/fc0/bias:0',
'b/model/pi/fc0/kernel:0',
'b/model/pi/fc1/bias:0',
'b/model/pi/fc1/kernel:0',
'b/model/pi/output/bias:0',
'b/model/pi/output/kernel:0',
]
target_var_list = [
'a/target/centralized_qf_0/fc0/bias:0',
'a/target/centralized_qf_0/fc0/kernel:0',
'a/target/centralized_qf_0/fc1/bias:0',
'a/target/centralized_qf_0/fc1/kernel:0',
'a/target/centralized_qf_0/qf_output/bias:0',
'a/target/centralized_qf_0/qf_output/kernel:0',
'a/target/centralized_qf_1/fc0/bias:0',
'a/target/centralized_qf_1/fc0/kernel:0',
'a/target/centralized_qf_1/fc1/bias:0',
'a/target/centralized_qf_1/fc1/kernel:0',
'a/target/centralized_qf_1/qf_output/bias:0',
'a/target/centralized_qf_1/qf_output/kernel:0',
'a/target/pi/fc0/bias:0',
'a/target/pi/fc0/kernel:0',
'a/target/pi/fc1/bias:0',
'a/target/pi/fc1/kernel:0',
'a/target/pi/output/bias:0',
'a/target/pi/output/kernel:0',
'b/target/centralized_qf_0/fc0/bias:0',
'b/target/centralized_qf_0/fc0/kernel:0',
'b/target/centralized_qf_0/fc1/bias:0',
'b/target/centralized_qf_0/fc1/kernel:0',
'b/target/centralized_qf_0/qf_output/bias:0',
'b/target/centralized_qf_0/qf_output/kernel:0',
'b/target/centralized_qf_1/fc0/bias:0',
'b/target/centralized_qf_1/fc0/kernel:0',
'b/target/centralized_qf_1/fc1/bias:0',
'b/target/centralized_qf_1/fc1/kernel:0',
'b/target/centralized_qf_1/qf_output/bias:0',
'b/target/centralized_qf_1/qf_output/kernel:0',
'b/target/pi/fc0/bias:0',
'b/target/pi/fc0/kernel:0',
'b/target/pi/fc1/bias:0',
'b/target/pi/fc1/kernel:0',
'b/target/pi/output/bias:0',
'b/target/pi/output/kernel:0',
]
for model, target in zip(model_var_list, target_var_list):
with tf.compat.v1.variable_scope(
tf.compat.v1.get_variable_scope(), reuse=True):
model_val = policy.sess.run(model)
target_val = policy.sess.run(target)
np.testing.assert_almost_equal(model_val, target_val)
def test_initialize_4(self):
policy_params = self.policy_params_shared.copy()
policy_params["maddpg"] = True
policy = TD3MultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
model_var_list = [
'model/centralized_qf_0/fc0/bias:0',
'model/centralized_qf_0/fc0/kernel:0',
'model/centralized_qf_0/fc1/bias:0',
'model/centralized_qf_0/fc1/kernel:0',
'model/centralized_qf_0/qf_output/bias:0',
'model/centralized_qf_0/qf_output/kernel:0',
'model/centralized_qf_1/fc0/bias:0',
'model/centralized_qf_1/fc0/kernel:0',
'model/centralized_qf_1/fc1/bias:0',
'model/centralized_qf_1/fc1/kernel:0',
'model/centralized_qf_1/qf_output/bias:0',
'model/centralized_qf_1/qf_output/kernel:0',
'model/pi/fc0/bias:0',
'model/pi/fc0/kernel:0',
'model/pi/fc1/bias:0',
'model/pi/fc1/kernel:0',
'model/pi/output/bias:0',
'model/pi/output/kernel:0',
]
target_var_list = [
'target/centralized_qf_0/fc0/bias:0',
'target/centralized_qf_0/fc0/kernel:0',
'target/centralized_qf_0/fc1/bias:0',
'target/centralized_qf_0/fc1/kernel:0',
'target/centralized_qf_0/qf_output/bias:0',
'target/centralized_qf_0/qf_output/kernel:0',
'target/centralized_qf_1/fc0/bias:0',
'target/centralized_qf_1/fc0/kernel:0',
'target/centralized_qf_1/fc1/bias:0',
'target/centralized_qf_1/fc1/kernel:0',
'target/centralized_qf_1/qf_output/bias:0',
'target/centralized_qf_1/qf_output/kernel:0',
'target/pi/fc0/bias:0',
'target/pi/fc0/kernel:0',
'target/pi/fc1/bias:0',
'target/pi/fc1/kernel:0',
'target/pi/output/bias:0',
'target/pi/output/kernel:0',
]
for model, target in zip(model_var_list, target_var_list):
with tf.compat.v1.variable_scope(
tf.compat.v1.get_variable_scope(), reuse=True):
model_val = policy.sess.run(model)
target_val = policy.sess.run(target)
np.testing.assert_almost_equal(model_val, target_val)
def test_store_transition_1(self):
"""Check the functionality of the store_transition() method.
This test checks for the following cases:
1. maddpg = True, shared = False
2. maddpg = True, shared = True
"""
policy_params = self.policy_params_independent.copy()
policy_params["maddpg"] = True
policy = TD3MultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
for i in range(4):
action_0 = np.array([i for _ in range(1)])
action_1 = np.array([i for _ in range(2)])
context0_0 = np.array([i for _ in range(3)])
context0_1 = np.array([i for _ in range(4)])
obs0_0 = np.array([i for _ in range(5)])
obs0_1 = np.array([i for _ in range(6)])
reward = i
obs1_0 = np.array([i+1 for _ in range(5)])
obs1_1 = np.array([i+1 for _ in range(6)])
context1_0 = np.array([i for _ in range(3)])
context1_1 = np.array([i for _ in range(4)])
done = False
is_final_step = False
evaluate = False
all_obs0 = np.array([i for _ in range(18)])
all_obs1 = np.array([i+1 for _ in range(18)])
policy.store_transition(
obs0={"a": obs0_0, "b": obs0_1},
context0={"a": context0_0, "b": context0_1},
action={"a": action_0, "b": action_1},
reward={"a": reward, "b": reward},
obs1={"a": obs1_0, "b": obs1_1},
context1={"a": context1_0, "b": context1_1},
done=done,
is_final_step=is_final_step,
evaluate=evaluate,
env_num=0,
all_obs0=all_obs0,
all_obs1=all_obs1,
)
# =================================================================== #
# test for agent a #
# =================================================================== #
obs_t = policy.replay_buffer["a"].obs_t
action_t = policy.replay_buffer["a"].action_t
reward = policy.replay_buffer["a"].reward
done = policy.replay_buffer["a"].done
all_obs_t = policy.replay_buffer["a"].all_obs_t
# check the various attributes
np.testing.assert_almost_equal(
obs_t[:4, :],
np.array([[0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1.],
[2., 2., 2., 2., 2., 2., 2., 2.],
[3., 3., 3., 3., 3., 3., 3., 3.]])
)
np.testing.assert_almost_equal(
action_t[:4, :],
np.array([[0.], [1.], [2.], [3.]])
)
np.testing.assert_almost_equal(
reward[:4],
np.array([0., 1., 2., 3.])
)
np.testing.assert_almost_equal(
done[:4],
[0., 0., 0., 0.]
)
np.testing.assert_almost_equal(
all_obs_t[:4, :],
np.array([[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1.],
[2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2.,
2., 2., 2., 2.],
[3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3.,
3., 3., 3., 3.]])
)
# =================================================================== #
# test for agent b #
# =================================================================== #
obs_t = policy.replay_buffer["b"].obs_t
action_t = policy.replay_buffer["b"].action_t
reward = policy.replay_buffer["b"].reward
done = policy.replay_buffer["b"].done
all_obs_t = policy.replay_buffer["b"].all_obs_t
# check the various attributes
np.testing.assert_almost_equal(
obs_t[:4, :],
np.array([[0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],
[2., 2., 2., 2., 2., 2., 2., 2., 2., 2.],
[3., 3., 3., 3., 3., 3., 3., 3., 3., 3.]])
)
np.testing.assert_almost_equal(
action_t[:4, :],
np.array([[0., 0.], [1., 1.], [2., 2.], [3., 3.]])
)
np.testing.assert_almost_equal(
reward[:4],
np.array([0., 1., 2., 3.])
)
np.testing.assert_almost_equal(
done[:4],
[0., 0., 0., 0.]
)
np.testing.assert_almost_equal(
all_obs_t[:4, :],
np.array([[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1.],
[2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2.,
2., 2., 2., 2.],
[3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3.,
3., 3., 3., 3.]])
)
def test_store_transition_2(self):
policy_params = self.policy_params_shared.copy()
policy_params["maddpg"] = True
policy_params["n_agents"] = 2
policy = TD3MultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
for i in range(4):
obs0 = np.array([i for _ in range(2)])
context0 = np.array([i for _ in range(3)])
action = np.array([i for _ in range(1)])
reward = i
obs1 = np.array([i+1 for _ in range(2)])
context1 = np.array([i for _ in range(3)])
is_final_step = False
evaluate = False
all_obs0 = np.array([i for _ in range(10)])
all_obs1 = np.array([i+1 for _ in range(10)])
policy.store_transition(
obs0={"a": obs0, "b": obs0 + 1},
context0={"a": context0, "b": context0 + 1},
action={"a": action, "b": action + 1},
reward={"a": reward, "b": reward + 1},
obs1={"a": obs1, "b": obs1 + 1},
context1={"a": context1, "b": context1 + 1},
done=0.,
is_final_step=is_final_step,
evaluate=evaluate,
env_num=0,
all_obs0=all_obs0,
all_obs1=all_obs1,
)
# extract the attributes
obs_t = policy.replay_buffer.obs_t
action_t = policy.replay_buffer.action
reward = policy.replay_buffer.reward
done = policy.replay_buffer.done
all_obs_t = policy.replay_buffer.all_obs_t
# check the various attributes
np.testing.assert_almost_equal(
obs_t[0][:4, :],
np.array([[0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1.],
[2., 2., 2., 2., 2.],
[3., 3., 3., 3., 3.]])
)
np.testing.assert_almost_equal(
action_t[0][:4, :],
np.array([[0.], [1.], [2.], [3.]])
)
np.testing.assert_almost_equal(
reward[:4],
np.array([0., 1., 2., 3.])
)
np.testing.assert_almost_equal(
done[:4],
[0., 0., 0., 0.]
)
np.testing.assert_almost_equal(
all_obs_t[:4, :],
np.array([[0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],
[2., 2., 2., 2., 2., 2., 2., 2., 2., 2.],
[3., 3., 3., 3., 3., 3., 3., 3., 3., 3.]])
)
class TestSACMultiFeedForwardPolicy(unittest.TestCase):
"""Test MultiFeedForwardPolicy in hbaselines/multi_fcnet/sac.py."""
def setUp(self):
self.sess = tf.compat.v1.Session()
# Shared policy parameters
self.policy_params_shared = {
'sess': self.sess,
'ac_space': Box(low=-1, high=1, shape=(1,)),
'co_space': Box(low=-2, high=2, shape=(2,)),
'ob_space': Box(low=-3, high=3, shape=(3,)),
'all_ob_space': Box(low=-3, high=3, shape=(10,)),
'layers': [256, 256],
'verbose': 0,
}
self.policy_params_shared.update(SAC_PARAMS.copy())
self.policy_params_shared.update(MULTI_FEEDFORWARD_PARAMS.copy())
self.policy_params_shared['shared'] = True
# Independent policy parameters
self.policy_params_independent = {
'sess': self.sess,
'ac_space': {
'a': Box(low=-1, high=1, shape=(1,)),
'b': Box(low=-2, high=2, shape=(2,)),
},
'co_space': {
'a': Box(low=-3, high=3, shape=(3,)),
'b': Box(low=-4, high=4, shape=(4,)),
},
'ob_space': {
'a': Box(low=-5, high=5, shape=(5,)),
'b': Box(low=-6, high=6, shape=(6,)),
},
'all_ob_space': Box(low=-6, high=6, shape=(18,)),
'layers': [256, 256],
'verbose': 0,
}
self.policy_params_independent.update(SAC_PARAMS.copy())
self.policy_params_independent.update(MULTI_FEEDFORWARD_PARAMS.copy())
self.policy_params_independent['shared'] = False
def tearDown(self):
self.sess.close()
del self.policy_params_shared
del self.policy_params_independent
# Clear the graph.
tf.compat.v1.reset_default_graph()
def test_init_1(self):
"""Check the functionality of the __init__() method.
This method is tested for the following features:
1. The proper structure graph was generated.
2. All input placeholders are correct.
This is done for the following cases:
1. maddpg = False, shared = False
2. maddpg = False, shared = True
3. maddpg = True, shared = False
4. maddpg = True, shared = True
"""
policy_params = self.policy_params_independent.copy()
policy_params["maddpg"] = False
policy = SACMultiFeedForwardPolicy(**policy_params)
self.assertListEqual(
sorted([var.name for var in get_trainable_vars()]),
['a/model/log_alpha:0',
'a/model/pi/fc0/bias:0',
'a/model/pi/fc0/kernel:0',
'a/model/pi/fc1/bias:0',
'a/model/pi/fc1/kernel:0',
'a/model/pi/log_std/bias:0',
'a/model/pi/log_std/kernel:0',
'a/model/pi/mean/bias:0',
'a/model/pi/mean/kernel:0',
'a/model/value_fns/qf1/fc0/bias:0',
'a/model/value_fns/qf1/fc0/kernel:0',
'a/model/value_fns/qf1/fc1/bias:0',
'a/model/value_fns/qf1/fc1/kernel:0',
'a/model/value_fns/qf1/qf_output/bias:0',
'a/model/value_fns/qf1/qf_output/kernel:0',
'a/model/value_fns/qf2/fc0/bias:0',
'a/model/value_fns/qf2/fc0/kernel:0',
'a/model/value_fns/qf2/fc1/bias:0',
'a/model/value_fns/qf2/fc1/kernel:0',
'a/model/value_fns/qf2/qf_output/bias:0',
'a/model/value_fns/qf2/qf_output/kernel:0',
'a/model/value_fns/vf/fc0/bias:0',
'a/model/value_fns/vf/fc0/kernel:0',
'a/model/value_fns/vf/fc1/bias:0',
'a/model/value_fns/vf/fc1/kernel:0',
'a/model/value_fns/vf/vf_output/bias:0',
'a/model/value_fns/vf/vf_output/kernel:0',
'a/target/value_fns/vf/fc0/bias:0',
'a/target/value_fns/vf/fc0/kernel:0',
'a/target/value_fns/vf/fc1/bias:0',
'a/target/value_fns/vf/fc1/kernel:0',
'a/target/value_fns/vf/vf_output/bias:0',
'a/target/value_fns/vf/vf_output/kernel:0',
'b/model/log_alpha:0',
'b/model/pi/fc0/bias:0',
'b/model/pi/fc0/kernel:0',
'b/model/pi/fc1/bias:0',
'b/model/pi/fc1/kernel:0',
'b/model/pi/log_std/bias:0',
'b/model/pi/log_std/kernel:0',
'b/model/pi/mean/bias:0',
'b/model/pi/mean/kernel:0',
'b/model/value_fns/qf1/fc0/bias:0',
'b/model/value_fns/qf1/fc0/kernel:0',
'b/model/value_fns/qf1/fc1/bias:0',
'b/model/value_fns/qf1/fc1/kernel:0',
'b/model/value_fns/qf1/qf_output/bias:0',
'b/model/value_fns/qf1/qf_output/kernel:0',
'b/model/value_fns/qf2/fc0/bias:0',
'b/model/value_fns/qf2/fc0/kernel:0',
'b/model/value_fns/qf2/fc1/bias:0',
'b/model/value_fns/qf2/fc1/kernel:0',
'b/model/value_fns/qf2/qf_output/bias:0',
'b/model/value_fns/qf2/qf_output/kernel:0',
'b/model/value_fns/vf/fc0/bias:0',
'b/model/value_fns/vf/fc0/kernel:0',
'b/model/value_fns/vf/fc1/bias:0',
'b/model/value_fns/vf/fc1/kernel:0',
'b/model/value_fns/vf/vf_output/bias:0',
'b/model/value_fns/vf/vf_output/kernel:0',
'b/target/value_fns/vf/fc0/bias:0',
'b/target/value_fns/vf/fc0/kernel:0',
'b/target/value_fns/vf/fc1/bias:0',
'b/target/value_fns/vf/fc1/kernel:0',
'b/target/value_fns/vf/vf_output/bias:0',
'b/target/value_fns/vf/vf_output/kernel:0']
)
# Check observation/action/context spaces of the agents
self.assertEqual(policy.agents['a'].ac_space,
self.policy_params_independent['ac_space']['a'])
self.assertEqual(policy.agents['a'].ob_space,
self.policy_params_independent['ob_space']['a'])
self.assertEqual(policy.agents['a'].co_space,
self.policy_params_independent['co_space']['a'])
self.assertEqual(policy.agents['b'].ac_space,
self.policy_params_independent['ac_space']['b'])
self.assertEqual(policy.agents['b'].ob_space,
self.policy_params_independent['ob_space']['b'])
self.assertEqual(policy.agents['b'].co_space,
self.policy_params_independent['co_space']['b'])
# Check the instantiation of the class attributes.
self.assertTrue(not policy.shared)
self.assertTrue(not policy.maddpg)
def test_init_2(self):
policy_params = self.policy_params_shared.copy()
policy_params["maddpg"] = False
policy = SACMultiFeedForwardPolicy(**policy_params)
self.assertListEqual(
sorted([var.name for var in get_trainable_vars()]),
['model/log_alpha:0',
'model/pi/fc0/bias:0',
'model/pi/fc0/kernel:0',
'model/pi/fc1/bias:0',
'model/pi/fc1/kernel:0',
'model/pi/log_std/bias:0',
'model/pi/log_std/kernel:0',
'model/pi/mean/bias:0',
'model/pi/mean/kernel:0',
'model/value_fns/qf1/fc0/bias:0',
'model/value_fns/qf1/fc0/kernel:0',
'model/value_fns/qf1/fc1/bias:0',
'model/value_fns/qf1/fc1/kernel:0',
'model/value_fns/qf1/qf_output/bias:0',
'model/value_fns/qf1/qf_output/kernel:0',
'model/value_fns/qf2/fc0/bias:0',
'model/value_fns/qf2/fc0/kernel:0',
'model/value_fns/qf2/fc1/bias:0',
'model/value_fns/qf2/fc1/kernel:0',
'model/value_fns/qf2/qf_output/bias:0',
'model/value_fns/qf2/qf_output/kernel:0',
'model/value_fns/vf/fc0/bias:0',
'model/value_fns/vf/fc0/kernel:0',
'model/value_fns/vf/fc1/bias:0',
'model/value_fns/vf/fc1/kernel:0',
'model/value_fns/vf/vf_output/bias:0',
'model/value_fns/vf/vf_output/kernel:0',
'target/value_fns/vf/fc0/bias:0',
'target/value_fns/vf/fc0/kernel:0',
'target/value_fns/vf/fc1/bias:0',
'target/value_fns/vf/fc1/kernel:0',
'target/value_fns/vf/vf_output/bias:0',
'target/value_fns/vf/vf_output/kernel:0']
)
# Check observation/action/context spaces of the agents
self.assertEqual(policy.agents['policy'].ac_space,
self.policy_params_shared['ac_space'])
self.assertEqual(policy.agents['policy'].ob_space,
self.policy_params_shared['ob_space'])
self.assertEqual(policy.agents['policy'].co_space,
self.policy_params_shared['co_space'])
# Check the instantiation of the class attributes.
self.assertTrue(policy.shared)
self.assertTrue(not policy.maddpg)
def test_init_3(self):
policy_params = self.policy_params_independent.copy()
policy_params["maddpg"] = True
policy = SACMultiFeedForwardPolicy(**policy_params)
self.assertListEqual(
sorted([var.name for var in get_trainable_vars()]),
['a/model/centralized_value_fns/qf1/fc0/bias:0',
'a/model/centralized_value_fns/qf1/fc0/kernel:0',
'a/model/centralized_value_fns/qf1/fc1/bias:0',
'a/model/centralized_value_fns/qf1/fc1/kernel:0',
'a/model/centralized_value_fns/qf1/qf_output/bias:0',
'a/model/centralized_value_fns/qf1/qf_output/kernel:0',
'a/model/centralized_value_fns/qf2/fc0/bias:0',
'a/model/centralized_value_fns/qf2/fc0/kernel:0',
'a/model/centralized_value_fns/qf2/fc1/bias:0',
'a/model/centralized_value_fns/qf2/fc1/kernel:0',
'a/model/centralized_value_fns/qf2/qf_output/bias:0',
'a/model/centralized_value_fns/qf2/qf_output/kernel:0',
'a/model/centralized_value_fns/vf/fc0/bias:0',
'a/model/centralized_value_fns/vf/fc0/kernel:0',
'a/model/centralized_value_fns/vf/fc1/bias:0',
'a/model/centralized_value_fns/vf/fc1/kernel:0',
'a/model/centralized_value_fns/vf/vf_output/bias:0',
'a/model/centralized_value_fns/vf/vf_output/kernel:0',
'a/model/log_alpha:0',
'a/model/pi/fc0/bias:0',
'a/model/pi/fc0/kernel:0',
'a/model/pi/fc1/bias:0',
'a/model/pi/fc1/kernel:0',
'a/model/pi/log_std/bias:0',
'a/model/pi/log_std/kernel:0',
'a/model/pi/mean/bias:0',
'a/model/pi/mean/kernel:0',
'a/target/centralized_value_fns/vf/fc0/bias:0',
'a/target/centralized_value_fns/vf/fc0/kernel:0',
'a/target/centralized_value_fns/vf/fc1/bias:0',
'a/target/centralized_value_fns/vf/fc1/kernel:0',
'a/target/centralized_value_fns/vf/vf_output/bias:0',
'a/target/centralized_value_fns/vf/vf_output/kernel:0',
'b/model/centralized_value_fns/qf1/fc0/bias:0',
'b/model/centralized_value_fns/qf1/fc0/kernel:0',
'b/model/centralized_value_fns/qf1/fc1/bias:0',
'b/model/centralized_value_fns/qf1/fc1/kernel:0',
'b/model/centralized_value_fns/qf1/qf_output/bias:0',
'b/model/centralized_value_fns/qf1/qf_output/kernel:0',
'b/model/centralized_value_fns/qf2/fc0/bias:0',
'b/model/centralized_value_fns/qf2/fc0/kernel:0',
'b/model/centralized_value_fns/qf2/fc1/bias:0',
'b/model/centralized_value_fns/qf2/fc1/kernel:0',
'b/model/centralized_value_fns/qf2/qf_output/bias:0',
'b/model/centralized_value_fns/qf2/qf_output/kernel:0',
'b/model/centralized_value_fns/vf/fc0/bias:0',
'b/model/centralized_value_fns/vf/fc0/kernel:0',
'b/model/centralized_value_fns/vf/fc1/bias:0',
'b/model/centralized_value_fns/vf/fc1/kernel:0',
'b/model/centralized_value_fns/vf/vf_output/bias:0',
'b/model/centralized_value_fns/vf/vf_output/kernel:0',
'b/model/log_alpha:0',
'b/model/pi/fc0/bias:0',
'b/model/pi/fc0/kernel:0',
'b/model/pi/fc1/bias:0',
'b/model/pi/fc1/kernel:0',
'b/model/pi/log_std/bias:0',
'b/model/pi/log_std/kernel:0',
'b/model/pi/mean/bias:0',
'b/model/pi/mean/kernel:0',
'b/target/centralized_value_fns/vf/fc0/bias:0',
'b/target/centralized_value_fns/vf/fc0/kernel:0',
'b/target/centralized_value_fns/vf/fc1/bias:0',
'b/target/centralized_value_fns/vf/fc1/kernel:0',
'b/target/centralized_value_fns/vf/vf_output/bias:0',
'b/target/centralized_value_fns/vf/vf_output/kernel:0']
)
# Check observation/action/context spaces of the agents
for key in policy.ac_space.keys():
self.assertEqual(int(policy.all_obs_ph[key].shape[-1]),
int(policy.all_ob_space.shape[0]))
self.assertEqual(int(policy.all_obs1_ph[key].shape[-1]),
int(policy.all_ob_space.shape[0]))
self.assertEqual(int(policy.all_action_ph[key].shape[-1]),
sum(policy.ac_space[key].shape[0]
for key in policy.ac_space.keys()))
self.assertEqual(int(policy.action_ph[key].shape[-1]),
int(policy.ac_space[key].shape[0]))
self.assertEqual(int(policy.obs_ph[key].shape[-1]),
int(policy.ob_space[key].shape[0]
+ policy.co_space[key].shape[0]))
self.assertEqual(int(policy.obs1_ph[key].shape[-1]),
int(policy.ob_space[key].shape[0]
+ policy.co_space[key].shape[0]))
# Check the instantiation of the class attributes.
self.assertTrue(not policy.shared)
self.assertTrue(policy.maddpg)
def test_init_4(self):
policy_params = self.policy_params_shared.copy()
policy_params["maddpg"] = True
policy = SACMultiFeedForwardPolicy(**policy_params)
self.assertListEqual(
sorted([var.name for var in get_trainable_vars()]),
['model/centralized_value_fns/qf1/fc0/bias:0',
'model/centralized_value_fns/qf1/fc0/kernel:0',
'model/centralized_value_fns/qf1/fc1/bias:0',
'model/centralized_value_fns/qf1/fc1/kernel:0',
'model/centralized_value_fns/qf1/qf_output/bias:0',
'model/centralized_value_fns/qf1/qf_output/kernel:0',
'model/centralized_value_fns/qf2/fc0/bias:0',
'model/centralized_value_fns/qf2/fc0/kernel:0',
'model/centralized_value_fns/qf2/fc1/bias:0',
'model/centralized_value_fns/qf2/fc1/kernel:0',
'model/centralized_value_fns/qf2/qf_output/bias:0',
'model/centralized_value_fns/qf2/qf_output/kernel:0',
'model/centralized_value_fns/vf/fc0/bias:0',
'model/centralized_value_fns/vf/fc0/kernel:0',
'model/centralized_value_fns/vf/fc1/bias:0',
'model/centralized_value_fns/vf/fc1/kernel:0',
'model/centralized_value_fns/vf/vf_output/bias:0',
'model/centralized_value_fns/vf/vf_output/kernel:0',
'model/log_alpha:0',
'model/pi/fc0/bias:0',
'model/pi/fc0/kernel:0',
'model/pi/fc1/bias:0',
'model/pi/fc1/kernel:0',
'model/pi/log_std/bias:0',
'model/pi/log_std/kernel:0',
'model/pi/mean/bias:0',
'model/pi/mean/kernel:0',
'target/centralized_value_fns/vf/fc0/bias:0',
'target/centralized_value_fns/vf/fc0/kernel:0',
'target/centralized_value_fns/vf/fc1/bias:0',
'target/centralized_value_fns/vf/fc1/kernel:0',
'target/centralized_value_fns/vf/vf_output/bias:0',
'target/centralized_value_fns/vf/vf_output/kernel:0']
)
# Check observation/action/context spaces of the agents
self.assertEqual(int(policy.all_obs_ph.shape[-1]),
policy.all_ob_space.shape[0])
self.assertEqual(int(policy.all_obs1_ph.shape[-1]),
policy.all_ob_space.shape[0])
self.assertEqual(int(policy.all_action_ph.shape[-1]),
policy.n_agents * policy.ac_space.shape[0])
self.assertEqual(int(policy.action_ph[0].shape[-1]),
policy.ac_space.shape[0])
self.assertEqual(int(policy.obs_ph[0].shape[-1]),
int(policy.ob_space.shape[0]
+ policy.co_space.shape[0]))
self.assertEqual(int(policy.obs1_ph[0].shape[-1]),
int(policy.ob_space.shape[0]
+ policy.co_space.shape[0]))
# Check the instantiation of the class attributes.
self.assertTrue(policy.shared)
self.assertTrue(policy.maddpg)
def test_initialize_1(self):
"""Check the functionality of the initialize() method.
This test validates that the target variables are properly initialized
when initialize is called.
This is done for the following cases:
1. maddpg = False, shared = False
2. maddpg = False, shared = True
3. maddpg = True, shared = False
4. maddpg = True, shared = True
"""
policy_params = self.policy_params_independent.copy()
policy_params["maddpg"] = False
policy = SACMultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
model_var_list = [
'a/model/value_fns/vf/fc0/kernel:0',
'a/model/value_fns/vf/fc0/bias:0',
'a/model/value_fns/vf/fc1/kernel:0',
'a/model/value_fns/vf/fc1/bias:0',
'a/model/value_fns/vf/vf_output/kernel:0',
'a/model/value_fns/vf/vf_output/bias:0',
'b/model/value_fns/vf/fc0/kernel:0',
'b/model/value_fns/vf/fc0/bias:0',
'b/model/value_fns/vf/fc1/kernel:0',
'b/model/value_fns/vf/fc1/bias:0',
'b/model/value_fns/vf/vf_output/kernel:0',
'b/model/value_fns/vf/vf_output/bias:0',
]
target_var_list = [
'a/target/value_fns/vf/fc0/kernel:0',
'a/target/value_fns/vf/fc0/bias:0',
'a/target/value_fns/vf/fc1/kernel:0',
'a/target/value_fns/vf/fc1/bias:0',
'a/target/value_fns/vf/vf_output/kernel:0',
'a/target/value_fns/vf/vf_output/bias:0',
'b/target/value_fns/vf/fc0/kernel:0',
'b/target/value_fns/vf/fc0/bias:0',
'b/target/value_fns/vf/fc1/kernel:0',
'b/target/value_fns/vf/fc1/bias:0',
'b/target/value_fns/vf/vf_output/kernel:0',
'b/target/value_fns/vf/vf_output/bias:0',
]
for model, target in zip(model_var_list, target_var_list):
with tf.compat.v1.variable_scope(
tf.compat.v1.get_variable_scope(), reuse=True):
model_val = policy.sess.run(model)
target_val = policy.sess.run(target)
np.testing.assert_almost_equal(model_val, target_val)
def test_initialize_2(self):
policy_params = self.policy_params_shared.copy()
policy_params["maddpg"] = False
policy = SACMultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
model_var_list = [
'model/value_fns/vf/fc0/kernel:0',
'model/value_fns/vf/fc0/bias:0',
'model/value_fns/vf/fc1/kernel:0',
'model/value_fns/vf/fc1/bias:0',
'model/value_fns/vf/vf_output/kernel:0',
'model/value_fns/vf/vf_output/bias:0',
]
target_var_list = [
'target/value_fns/vf/fc0/kernel:0',
'target/value_fns/vf/fc0/bias:0',
'target/value_fns/vf/fc1/kernel:0',
'target/value_fns/vf/fc1/bias:0',
'target/value_fns/vf/vf_output/kernel:0',
'target/value_fns/vf/vf_output/bias:0',
]
for model, target in zip(model_var_list, target_var_list):
with tf.compat.v1.variable_scope(
tf.compat.v1.get_variable_scope(), reuse=True):
model_val = policy.sess.run(model)
target_val = policy.sess.run(target)
np.testing.assert_almost_equal(model_val, target_val)
def test_initialize_3(self):
policy_params = self.policy_params_independent.copy()
policy_params["maddpg"] = True
policy = SACMultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
model_var_list = [
'a/model/centralized_value_fns/vf/fc0/kernel:0',
'a/model/centralized_value_fns/vf/fc0/bias:0',
'a/model/centralized_value_fns/vf/fc1/kernel:0',
'a/model/centralized_value_fns/vf/fc1/bias:0',
'a/model/centralized_value_fns/vf/vf_output/kernel:0',
'a/model/centralized_value_fns/vf/vf_output/bias:0',
'b/model/centralized_value_fns/vf/fc0/kernel:0',
'b/model/centralized_value_fns/vf/fc0/bias:0',
'b/model/centralized_value_fns/vf/fc1/kernel:0',
'b/model/centralized_value_fns/vf/fc1/bias:0',
'b/model/centralized_value_fns/vf/vf_output/kernel:0',
'b/model/centralized_value_fns/vf/vf_output/bias:0',
]
target_var_list = [
'a/target/centralized_value_fns/vf/fc0/kernel:0',
'a/target/centralized_value_fns/vf/fc0/bias:0',
'a/target/centralized_value_fns/vf/fc1/kernel:0',
'a/target/centralized_value_fns/vf/fc1/bias:0',
'a/target/centralized_value_fns/vf/vf_output/kernel:0',
'a/target/centralized_value_fns/vf/vf_output/bias:0',
'b/target/centralized_value_fns/vf/fc0/kernel:0',
'b/target/centralized_value_fns/vf/fc0/bias:0',
'b/target/centralized_value_fns/vf/fc1/kernel:0',
'b/target/centralized_value_fns/vf/fc1/bias:0',
'b/target/centralized_value_fns/vf/vf_output/kernel:0',
'b/target/centralized_value_fns/vf/vf_output/bias:0',
]
for model, target in zip(model_var_list, target_var_list):
with tf.compat.v1.variable_scope(
tf.compat.v1.get_variable_scope(), reuse=True):
model_val = policy.sess.run(model)
target_val = policy.sess.run(target)
np.testing.assert_almost_equal(model_val, target_val)
def test_initialize_4(self):
policy_params = self.policy_params_shared.copy()
policy_params["maddpg"] = True
policy = SACMultiFeedForwardPolicy(**policy_params)
# Initialize the variables of the policy.
policy.sess.run(tf.compat.v1.global_variables_initializer())
# Run the initialize method.
policy.initialize()
model_var_list = [
'model/centralized_value_fns/vf/fc0/bias:0',
'model/centralized_value_fns/vf/fc0/kernel:0',
'model/centralized_value_fns/vf/fc1/bias:0',
'model/centralized_value_fns/vf/fc1/kernel:0',
'model/centralized_value_fns/vf/vf_output/bias:0',
'model/centralized_value_fns/vf/vf_output/kernel:0',
]
target_var_list = [
'target/centralized_value_fns/vf/fc0/bias:0',
'target/centralized_value_fns/vf/fc0/kernel:0',
'target/centralized_value_fns/vf/fc1/bias:0',
'target/centralized_value_fns/vf/fc1/kernel:0',
'target/centralized_value_fns/vf/vf_output/bias:0',
'target/centralized_value_fns/vf/vf_output/kernel:0',
]
for model, target in zip(model_var_list, target_var_list):
with tf.compat.v1.variable_scope(
tf.compat.v1.get_variable_scope(), reuse=True):
model_val = policy.sess.run(model)
target_val = policy.sess.run(target)
np.testing.assert_almost_equal(model_val, target_val)
if __name__ == '__main__':
unittest.main()
| 42.034483
| 79
| 0.537886
| 9,750
| 73,140
| 3.848615
| 0.019897
| 0.04637
| 0.038375
| 0.007995
| 0.983157
| 0.980999
| 0.970126
| 0.954029
| 0.91091
| 0.889218
| 0
| 0.049981
| 0.306276
| 73,140
| 1,739
| 80
| 42.058654
| 0.689568
| 0.066366
| 0
| 0.84064
| 0
| 0
| 0.340525
| 0.320764
| 0
| 0
| 0
| 0
| 0.070285
| 1
| 0.018093
| false
| 0
| 0.006263
| 0
| 0.026444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6edf893416b6de9a97e703dfd9b23e442bdc67d9
| 29,434
|
py
|
Python
|
dlkit/abstract_osid/mapping/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/abstract_osid/mapping/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/abstract_osid/mapping/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Implementations of mapping abstract base class queries."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class LocationQuery:
"""This is the query for searching locations.
Each method match specifies an ``AND`` term while multiple
invocations of the same method produce a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_coordinate(self, coordinate, match):
"""Matches locations at the specified ``Coordinate``.
:param coordinate: a coordinate
:type coordinate: ``osid.mapping.Coordinate``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``coordinate`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_coordinate(self, match):
"""Matches locations that have any coordinate assignment.
:param match: ``true`` to match locations with any coordinate, ``false`` to match locations with no coordinates
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_coordinate_terms(self):
"""Clears the coordinate query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
coordinate_terms = abc.abstractproperty(fdel=clear_coordinate_terms)
@abc.abstractmethod
def match_contained_spatial_unit(self, spatial_unit, match):
"""Matches locations containing the specified ``SpatialUnit``.
:param spatial_unit: a spatial unit
:type spatial_unit: ``osid.mapping.SpatialUnit``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``spatial_unit`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_contained_spatial_unit_terms(self):
"""Clears the spatial unit terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
contained_spatial_unit_terms = abc.abstractproperty(fdel=clear_contained_spatial_unit_terms)
@abc.abstractmethod
def match_overlapping_spatial_unit(self, spatial_unit, match):
"""Matches locations overlapping with the specified ``SpatialUnit``.
:param spatial_unit: a spatial unit
:type spatial_unit: ``osid.mapping.SpatialUnit``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``spatial_unit`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_overlapping_spatial_unit_terms(self):
"""Clears the overlapping spatial unit terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
overlapping_spatial_unit_terms = abc.abstractproperty(fdel=clear_overlapping_spatial_unit_terms)
@abc.abstractmethod
def match_any_spatial_unit(self, match):
"""Matches locations that have any spatial unit assignment.
:param match: ``true`` to match locations with any boundary, ``false`` to match locations with no boundaries
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_spatial_unit_terms(self):
"""Clears the spatial unit query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
spatial_unit_terms = abc.abstractproperty(fdel=clear_spatial_unit_terms)
@abc.abstractmethod
def match_route_id(self, route_id, match):
"""Sets the route ``Id`` for this query to match locations along the given route.
:param route_id: the route ``Id``
:type route_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``route_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_route_id_terms(self):
"""Clears the route ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
route_id_terms = abc.abstractproperty(fdel=clear_route_id_terms)
@abc.abstractmethod
def supports_route_query(self):
"""Tests if a ``RouteQuery`` is available.
:return: ``true`` if a route query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_route_query(self):
"""Gets the query for a route.
Multiple retrievals produce a nested ``OR`` term.
:return: the route query
:rtype: ``osid.mapping.route.RouteQuery``
:raise: ``Unimplemented`` -- ``supports_route_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_route_query()`` is ``true``.*
"""
return # osid.mapping.route.RouteQuery
route_query = abc.abstractproperty(fget=get_route_query)
@abc.abstractmethod
def match_any_route(self, match):
"""Matches locations that are used on any route.
:param match: ``true`` to match locations on any route, ``false`` to match locations on no routes
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_route_terms(self):
"""Clears the route query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
route_terms = abc.abstractproperty(fdel=clear_route_terms)
@abc.abstractmethod
def match_path_id(self, path_id, match):
"""Sets the path ``Id`` for this query to match locations along the given path.
:param path_id: the path ``Id``
:type path_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``path_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_path_id_terms(self):
"""Clears the path ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
path_id_terms = abc.abstractproperty(fdel=clear_path_id_terms)
@abc.abstractmethod
def supports_path_query(self):
"""Tests if a ``PathQuery`` is available.
:return: ``true`` if a path query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_path_query(self):
"""Gets the query for a path.
Multiple retrievals produce a nested ``OR`` term.
:return: the path query
:rtype: ``osid.mapping.path.PathQuery``
:raise: ``Unimplemented`` -- ``supports_path_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_path_query()`` is ``true``.*
"""
return # osid.mapping.path.PathQuery
path_query = abc.abstractproperty(fget=get_path_query)
@abc.abstractmethod
def match_any_path(self, match):
"""Matches locations that exist along any path.
:param match: ``true`` to match locations on any path, ``false`` to match locations on no path
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_path_terms(self):
"""Clears the path query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
path_terms = abc.abstractproperty(fdel=clear_path_terms)
@abc.abstractmethod
def match_containing_location_id(self, location_id, match):
"""Sets the location ``Id`` for this query to match locations contained within the given location.
:param location_id: a location ``Id``
:type location_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``location_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_containing_location_id_terms(self):
"""Clears the pcontaining location ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
containing_location_id_terms = abc.abstractproperty(fdel=clear_containing_location_id_terms)
@abc.abstractmethod
def supports_containing_location_query(self):
"""Tests if a ``LocationQuery`` is available.
:return: ``true`` if a location query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_containing_location_query(self):
"""Gets the query for a parent location.
Multiple retrievals produce a nested ``OR`` term.
:return: the location query
:rtype: ``osid.mapping.LocationQuery``
:raise: ``Unimplemented`` -- ``supports_containing_location_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_containing_location_query()`` is ``true``.*
"""
return # osid.mapping.LocationQuery
containing_location_query = abc.abstractproperty(fget=get_containing_location_query)
@abc.abstractmethod
def match_any_containing_location(self, match):
"""Matches locations that have any ancestor.
:param match: ``true`` to match locations with any parent location, ``false`` to match root locations
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_containing_location_terms(self):
"""Clears the containing location query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
containing_location_terms = abc.abstractproperty(fdel=clear_containing_location_terms)
@abc.abstractmethod
def match_contained_location_id(self, location_id, match):
"""Sets the location ``Id`` for this query to match locations containing the given location.
:param location_id: a location ``Id``
:type location_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``location_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_contained_location_id_terms(self):
"""Clears the contained location ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
contained_location_id_terms = abc.abstractproperty(fdel=clear_contained_location_id_terms)
@abc.abstractmethod
def supports_contained_location_query(self):
"""Tests if a ``LocationQuery`` is available.
:return: ``true`` if a location query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_contained_location_query(self):
"""Gets the query for a contained location.
Multiple retrievals produce a nested ``OR`` term.
:return: the location query
:rtype: ``osid.mapping.LocationQuery``
:raise: ``Unimplemented`` -- ``supports_contained_location_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_contained_location_query()`` is ``true``.*
"""
return # osid.mapping.LocationQuery
contained_location_query = abc.abstractproperty(fget=get_contained_location_query)
@abc.abstractmethod
def match_any_contained_location(self, match):
"""Matches locations that have any children.
:param match: ``true`` to match locations containing any other location, ``false`` to match empty locations
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_contained_location_terms(self):
"""Clears the contained location query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
contained_location_terms = abc.abstractproperty(fdel=clear_contained_location_terms)
@abc.abstractmethod
def match_map_id(self, map_id, match):
"""Sets the map ``Id`` for this query.
:param map_id: the map ``Id``
:type map_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_map_id_terms(self):
"""Clears the map ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
map_id_terms = abc.abstractproperty(fdel=clear_map_id_terms)
@abc.abstractmethod
def supports_map_query(self):
"""Tests if a ``MapQuery`` is available.
:return: ``true`` if a map query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_map_query(self):
"""Gets the query for a map.
Multiple retrievals produce a nested ``OR`` term.
:return: the map query
:rtype: ``osid.mapping.MapQuery``
:raise: ``Unimplemented`` -- ``supports_map_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_query()`` is ``true``.*
"""
return # osid.mapping.MapQuery
map_query = abc.abstractproperty(fget=get_map_query)
@abc.abstractmethod
def clear_map_terms(self):
"""Clears the map query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
map_terms = abc.abstractproperty(fdel=clear_map_terms)
@abc.abstractmethod
def get_location_query_record(self, location_record_type):
"""Gets the location query record corresponding to the given ``Location`` record ``Type``.
Multiple record retrievals produce a nested ``OR`` term.
:param location_record_type: a location record type
:type location_record_type: ``osid.type.Type``
:return: the location query record
:rtype: ``osid.mapping.records.LocationQueryRecord``
:raise: ``NullArgument`` -- ``location_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(location_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.records.LocationQueryRecord
class MapQuery:
"""This is the query for searching maps.
Each method match specifies an ``AND`` term while multiple
invocations of the same method produce a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_location_id(self, location_id, match):
"""Sets the location ``Id`` for this query to match maps that have a related location.
:param location_id: a location ``Id``
:type location_id: ``osid.id.Id``
:param match: ``true`` if a positive match, ``false`` for negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``location_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_location_id_terms(self):
"""Clears the location ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
location_id_terms = abc.abstractproperty(fdel=clear_location_id_terms)
@abc.abstractmethod
def supports_location_query(self):
"""Tests if a ``LocationQuery`` is available.
:return: ``true`` if a location query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_location_query(self):
"""Gets the query for a location.
Multiple retrievals produce a nested ``OR`` term.
:return: the location query
:rtype: ``osid.mapping.LocationQuery``
:raise: ``Unimplemented`` -- ``supports_location_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_query()`` is ``true``.*
"""
return # osid.mapping.LocationQuery
location_query = abc.abstractproperty(fget=get_location_query)
@abc.abstractmethod
def match_any_location(self, match):
"""Matches maps that have any location.
:param match: ``true`` to match maps with any location, ``false`` to match maps with no location
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_location_terms(self):
"""Clears the location query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
location_terms = abc.abstractproperty(fdel=clear_location_terms)
@abc.abstractmethod
def match_path_id(self, path_id, match):
"""Sets the path ``Id`` for this query to match maps containing paths.
:param path_id: the path ``Id``
:type path_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``path_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_path_id_terms(self):
"""Clears the path ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
path_id_terms = abc.abstractproperty(fdel=clear_path_id_terms)
@abc.abstractmethod
def supports_path_query(self):
"""Tests if a ``PathQuery`` is available.
:return: ``true`` if a path query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_path_query(self):
"""Gets the query for a path.
Multiple retrievals produce a nested ``OR`` term.
:return: the path query
:rtype: ``osid.mapping.path.PathQuery``
:raise: ``Unimplemented`` -- ``supports_path_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_path_query()`` is ``true``.*
"""
return # osid.mapping.path.PathQuery
path_query = abc.abstractproperty(fget=get_path_query)
@abc.abstractmethod
def match_any_path(self, match):
"""Matches maps that have any path.
:param match: ``true`` to match maps with any path, ``false`` to match maps with no path
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_path_terms(self):
"""Clears the path query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
path_terms = abc.abstractproperty(fdel=clear_path_terms)
@abc.abstractmethod
def match_route_id(self, path_id, match):
"""Sets the path ``Id`` for this query to match maps containing paths.
:param path_id: the path ``Id``
:type path_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``path_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_route_id_terms(self):
"""Clears the route ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
route_id_terms = abc.abstractproperty(fdel=clear_route_id_terms)
@abc.abstractmethod
def supports_route_query(self):
"""Tests if a ``RouteQuery`` is available.
:return: ``true`` if a route query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_route_query(self):
"""Gets the query for a route.
Multiple retrievals produce a nested ``OR`` term.
:return: the route query
:rtype: ``osid.mapping.route.RouteQuery``
:raise: ``Unimplemented`` -- ``supports_route_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_route_query()`` is ``true``.*
"""
return # osid.mapping.route.RouteQuery
route_query = abc.abstractproperty(fget=get_route_query)
@abc.abstractmethod
def match_any_route(self, match):
"""Matches maps that have any route.
:param match: ``true`` to match maps with any route, ``false`` to match maps with no route
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_route_terms(self):
"""Clears the route query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
route_terms = abc.abstractproperty(fdel=clear_route_terms)
@abc.abstractmethod
def match_ancestor_map_id(self, map_id, match):
"""Sets the map ``Id`` for this query to match maps that have the specified map as an ancestor.
:param map_id: a map ``Id``
:type map_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_ancestor_map_id_terms(self):
"""Clears the ancestor map ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
ancestor_map_id_terms = abc.abstractproperty(fdel=clear_ancestor_map_id_terms)
@abc.abstractmethod
def supports_ancestor_map_query(self):
"""Tests if a ``MapQuery`` is available.
:return: ``true`` if a map query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_ancestor_map_query(self):
"""Gets the query for a map.
Multiple retrievals produce a nested ``OR`` term.
:return: the map query
:rtype: ``osid.mapping.MapQuery``
:raise: ``Unimplemented`` -- ``supports_ancestor_map_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_ancestor_map_query()`` is ``true``.*
"""
return # osid.mapping.MapQuery
ancestor_map_query = abc.abstractproperty(fget=get_ancestor_map_query)
@abc.abstractmethod
def match_any_ancestor_map(self, match):
"""Matches maps with any ancestor.
:param match: ``true`` to match maps with any ancestor, ``false`` to match root maps
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_ancestor_map_terms(self):
"""Clears the ancestor map query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
ancestor_map_terms = abc.abstractproperty(fdel=clear_ancestor_map_terms)
@abc.abstractmethod
def match_descendant_map_id(self, map_id, match):
"""Sets the map ``Id`` for this query to match maps that have the specified map as a descendant.
:param map_id: a map ``Id``
:type map_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_descendant_map_id_terms(self):
"""Clears the descendant map ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
descendant_map_id_terms = abc.abstractproperty(fdel=clear_descendant_map_id_terms)
@abc.abstractmethod
def supports_descendant_map_query(self):
"""Tests if a ``MapQuery`` is available.
:return: ``true`` if a map query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_descendant_map_query(self):
"""Gets the query for a map.
Multiple retrievals produce a nested ``OR`` term.
:return: the map query
:rtype: ``osid.mapping.MapQuery``
:raise: ``Unimplemented`` -- ``supports_descendant_map_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_descendant_map_query()`` is ``true``.*
"""
return # osid.mapping.MapQuery
descendant_map_query = abc.abstractproperty(fget=get_descendant_map_query)
@abc.abstractmethod
def match_any_descendant_map(self, match):
"""Matches maps with any descendant.
:param match: ``true`` to match maps with any descendant, ``false`` to match leaf maps
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_descendant_map_terms(self):
"""Clears the descendant map query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
descendant_map_terms = abc.abstractproperty(fdel=clear_descendant_map_terms)
@abc.abstractmethod
def get_map_query_record(self, map_record_type):
"""Gets the map query record corresponding to the given ``Map`` record ``Type``.
Multiple record retrievals produce a nested ``OR`` term.
:param map_record_type: a map record type
:type map_record_type: ``osid.type.Type``
:return: the map query record
:rtype: ``osid.mapping.records.MapQueryRecord``
:raise: ``NullArgument`` -- ``map_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(map_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.records.MapQueryRecord
| 28.438647
| 119
| 0.631549
| 3,339
| 29,434
| 5.426176
| 0.051512
| 0.065681
| 0.077271
| 0.061817
| 0.907606
| 0.888288
| 0.825698
| 0.744011
| 0.692847
| 0.686996
| 0
| 0
| 0.256676
| 29,434
| 1,034
| 120
| 28.466151
| 0.8281
| 0.587246
| 0
| 0.706827
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.281125
| false
| 0.192771
| 0.004016
| 0
| 0.526104
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
42d9369c99af7adb8cfd02c0100c8ba5acd2c9f1
| 228
|
py
|
Python
|
solarforecast/__init__.py
|
arminalgln/solar
|
b23b0c1820f997c4ff1f99ffdb8a4ecc68007e5f
|
[
"MIT"
] | null | null | null |
solarforecast/__init__.py
|
arminalgln/solar
|
b23b0c1820f997c4ff1f99ffdb8a4ecc68007e5f
|
[
"MIT"
] | null | null | null |
solarforecast/__init__.py
|
arminalgln/solar
|
b23b0c1820f997c4ff1f99ffdb8a4ecc68007e5f
|
[
"MIT"
] | null | null | null |
from .readingdata import FileInf
from .readingdata import SolcastDataForecast
from .forecastmodel import SolarF
from .readingdata import EtapData
from .readingdata import SolcastHistorical
from .readingdata import OpenWeatherAPI
| 38
| 44
| 0.872807
| 24
| 228
| 8.291667
| 0.416667
| 0.376884
| 0.527638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100877
| 228
| 6
| 45
| 38
| 0.970732
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2826d8cf86573d5302da420e8ef00f20a41e5782
| 6,624
|
py
|
Python
|
app/filterMarketData_test.py
|
kyoungd/material-stock-finder-app
|
60b4a274ddb304ae8257f6a53a0d91b65975b649
|
[
"MIT"
] | null | null | null |
app/filterMarketData_test.py
|
kyoungd/material-stock-finder-app
|
60b4a274ddb304ae8257f6a53a0d91b65975b649
|
[
"MIT"
] | null | null | null |
app/filterMarketData_test.py
|
kyoungd/material-stock-finder-app
|
60b4a274ddb304ae8257f6a53a0d91b65975b649
|
[
"MIT"
] | 1
|
2022-03-26T06:50:59.000Z
|
2022-03-26T06:50:59.000Z
|
from unittest import mock, TestCase, main
import pandas as pd
import random
import datetime
from util import RedisTimeFrame
from dbase import MarketDataDb
class TestMarketData(TestCase):
def setUp(self):
self.app = MarketDataDb()
self.symbol = 'TEST01'
def tearDown(self):
pass
def test_marketdatadb_01(self):
# random number 0 to 100
data1 = [
{"t": "2022-05-11T12:54:00Z", "o": 60.99, "h": 60.99, "l": 60.99, "c": 60.99, "v": 300, "n": 2, "vw": 60.9901},
{"t": "2022-05-11T12:39:00Z", "o": 62.5, "h": 62.5, "l": 61.14, "c": 61.14, "v": 203, "n": 3, "vw": 61.806995},
{"t": "2022-05-10T20:49:00Z", "o": 63.14, "h": 63.14, "l": 63.14, "c": 63.14, "v": 738, "n": 3, "vw": 63.14},
{"t": "2022-05-10T20:37:00Z", "o": 63.14, "h": 63.14, "l": 63.14, "c": 63.14, "v": 367, "n": 12, "vw": 63.150981},
{"t": "2022-05-10T20:34:00Z", "o": 63.05, "h": 63.05, "l": 63.05, "c": 63.05, "v": 1870, "n": 1, "vw": 63.05},
{"t": "2022-05-10T20:01:00Z", "o": 63.05, "h": 63.05, "l": 63.05, "c": 63.05, "v": 166, "n": 1, "vw": 63.05},
{"t": "2022-05-10T20:00:00Z", "o": 63.05, "h": 63.05, "l": 63.05, "c": 63.05, "v": 86352, "n": 17, "vw": 63.05},
{"t": "2022-05-10T19:59:00Z", "o": 63.21, "h": 63.26, "l": 63.06, "c": 63.17, "v": 48768, "n": 577, "vw": 63.196629},
{"t": "2022-05-10T19:58:00Z", "o": 63.125, "h": 63.23, "l": 63.12, "c": 63.19, "v": 32905, "n": 434, "vw": 63.176757},
{"t": "2022-05-10T19:57:00Z", "o": 63.08, "h": 63.155, "l": 63.08, "c": 63.125, "v": 24075, "n": 335, "vw": 63.11993},
{"t": "2022-05-10T19:56:00Z", "o": 62.915, "h": 63.095, "l": 62.915, "c": 63.08, "v": 23586, "n": 356, "vw": 63.016562},
{"t": "2022-05-10T19:55:00Z", "o": 63.02, "h": 63.07, "l": 62.91, "c": 62.915, "v": 16345, "n": 236, "vw": 62.970845},
{"t": "2022-05-10T19:54:00Z", "o": 63.18, "h": 63.19, "l": 63.03, "c": 63.04, "v": 15894, "n": 304, "vw": 63.100413},
{"t": "2022-05-10T19:53:00Z", "o": 63.13, "h": 63.23, "l": 63.13, "c": 63.19, "v": 17371, "n": 256, "vw": 63.166113},
{"t": "2022-05-10T19:52:00Z", "o": 63.075, "h": 63.16, "l": 63.05, "c": 63.15, "v": 16097, "n": 230, "vw": 63.100952},
{"t": "2022-05-10T19:51:00Z", "o": 63.045, "h": 63.095, "l": 63.01, "c": 63.08, "v": 15041, "n": 217, "vw": 63.059537},
{"t": "2022-05-09T19:50:00Z", "o": 63.13, "h": 63.13, "l": 63, "c": 63.06, "v": 9846, "n": 157, "vw": 63.059741},
{"t": "2022-05-09T19:49:00Z", "o": 63.08, "h": 63.21, "l": 63.07, "c": 63.145, "v": 11489, "n": 188, "vw": 63.139188},
{"t": "2022-05-09T19:48:00Z", "o": 63.16, "h": 63.18, "l": 63.06, "c": 63.095, "v": 11551, "n": 181, "vw": 63.106101},
{"t": "2022-05-08T19:47:00Z", "o": 63.11, "h": 63.155, "l": 63.065, "c": 63.14, "v": 7863, "n": 118, "vw": 63.109396},
{"t": "2022-05-07T19:46:00Z", "o": 63.095, "h": 63.12, "l": 62.96, "c": 63.102, "v": 8571, "n": 143, "vw": 63.039032}
]
data2 = self.app.MergeData(RedisTimeFrame.DAILY, data1)
self.assertEqual(len(data2), 5)
def test_marketdatadb_02(self):
# random number 0 to 100
data = [
{"t": "2022-05-11T12:54:00Z", "o": 60.99, "h": 60.99, "l": 60.99, "c": 60.99, "v": 300, "n": 2, "vw": 60.9901},
{"t": "2022-05-11T12:39:00Z", "o": 62.5, "h": 62.5, "l": 61.14, "c": 61.14, "v": 203, "n": 3, "vw": 61.806995},
{"t": "2022-05-10T20:49:00Z", "o": 63.14, "h": 63.14, "l": 63.14, "c": 63.14, "v": 738, "n": 3, "vw": 63.14},
{"t": "2022-05-10T20:37:00Z", "o": 63.14, "h": 63.14, "l": 63.14, "c": 63.14, "v": 367, "n": 12, "vw": 63.150981},
{"t": "2022-05-10T20:34:00Z", "o": 63.05, "h": 63.05, "l": 63.05, "c": 63.05, "v": 1870, "n": 1, "vw": 63.05},
{"t": "2022-05-10T20:01:00Z", "o": 63.05, "h": 63.05, "l": 63.05, "c": 63.05, "v": 166, "n": 1, "vw": 63.05},
{"t": "2022-05-10T20:00:00Z", "o": 63.05, "h": 63.05, "l": 63.05, "c": 63.05, "v": 86352, "n": 17, "vw": 63.05},
{"t": "2022-05-10T19:59:00Z", "o": 63.21, "h": 63.26, "l": 63.06, "c": 63.17, "v": 48768, "n": 577, "vw": 63.196629},
{"t": "2022-05-10T19:58:00Z", "o": 63.125, "h": 63.23, "l": 63.12, "c": 63.19, "v": 32905, "n": 434, "vw": 63.176757},
{"t": "2022-05-10T19:57:00Z", "o": 63.08, "h": 63.155, "l": 63.08, "c": 63.125, "v": 24075, "n": 335, "vw": 63.11993},
{"t": "2022-05-10T19:56:00Z", "o": 62.915, "h": 63.095, "l": 62.915, "c": 63.08, "v": 23586, "n": 356, "vw": 63.016562},
{"t": "2022-05-10T19:55:00Z", "o": 63.02, "h": 63.07, "l": 62.91, "c": 62.915, "v": 16345, "n": 236, "vw": 62.970845},
{"t": "2022-05-10T19:54:00Z", "o": 63.18, "h": 63.19, "l": 63.03, "c": 63.04, "v": 15894, "n": 304, "vw": 63.100413},
{"t": "2022-05-10T19:53:00Z", "o": 63.13, "h": 63.23, "l": 63.13, "c": 63.19, "v": 17371, "n": 256, "vw": 63.166113},
{"t": "2022-05-10T19:52:00Z", "o": 63.075, "h": 63.16, "l": 63.05, "c": 63.15, "v": 16097, "n": 230, "vw": 63.100952},
{"t": "2022-05-10T19:51:00Z", "o": 63.045, "h": 63.095, "l": 63.01, "c": 63.08, "v": 15041, "n": 217, "vw": 63.059537},
{"t": "2022-05-09T19:50:00Z", "o": 63.13, "h": 63.13, "l": 63, "c": 63.06, "v": 9846, "n": 157, "vw": 63.059741},
{"t": "2022-05-09T19:49:00Z", "o": 63.08, "h": 63.21, "l": 63.07, "c": 63.145, "v": 11489, "n": 188, "vw": 63.139188},
{"t": "2022-05-09T19:48:00Z", "o": 63.16, "h": 63.18, "l": 63.06, "c": 63.095, "v": 11551, "n": 181, "vw": 63.106101},
{"t": "2022-05-08T19:47:00Z", "o": 63.11, "h": 63.155, "l": 63.065, "c": 63.14, "v": 7863, "n": 118, "vw": 63.109396},
{"t": "2022-05-07T19:46:00Z", "o": 63.095, "h": 63.12, "l": 62.96, "c": 63.102, "v": 8571, "n": 143, "vw": 63.039032}
];
datatype = 'stock'
timeframe='1Day'
result = self.app.WriteMarket(self.symbol, data, datatype, timeframe)
self.assertTrue(result)
isReadOk, values = self.app.ReadMarket(self.symbol, datatype, timeframe)
self.assertTrue(isReadOk)
rows = values[0]
self.assertEqual(rows[2]['o'], 63.13)
self.assertEqual(rows[0]['o'], 60.99)
| 80.780488
| 137
| 0.447162
| 1,165
| 6,624
| 2.539056
| 0.149356
| 0.070994
| 0.099391
| 0.073022
| 0.801893
| 0.801893
| 0.787018
| 0.787018
| 0.787018
| 0.787018
| 0
| 0.383232
| 0.252717
| 6,624
| 81
| 138
| 81.777778
| 0.214343
| 0.006793
| 0
| 0.591549
| 0
| 0
| 0.187804
| 0
| 0
| 0
| 0
| 0
| 0.070423
| 1
| 0.056338
| false
| 0.014085
| 0.084507
| 0
| 0.15493
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
95a5ea494772e2c1be7ed55991eae84280b0cf54
| 8,250
|
py
|
Python
|
docker/docker-lambda/python/Word2pdf/app/testdata.py
|
BigMountainTiger/aws-cdk-examples
|
e954a44544ee9921c305e460a4582fb6f6c09bc2
|
[
"MIT"
] | 2
|
2021-05-12T10:05:45.000Z
|
2021-09-26T21:32:25.000Z
|
docker/docker-lambda/python/Word2pdf/app/testdata.py
|
BigMountainTiger/aws-cdk-examples
|
e954a44544ee9921c305e460a4582fb6f6c09bc2
|
[
"MIT"
] | 6
|
2020-09-28T21:57:19.000Z
|
2022-03-15T22:21:18.000Z
|
docker/docker-lambda/python/Word2pdf/app/testdata.py
|
BigMountainTiger/aws-cdk-examples
|
e954a44544ee9921c305e460a4582fb6f6c09bc2
|
[
"MIT"
] | null | null | null |
def get_test_data():
return {
"contact": {
"name": "Paul Kempa",
"company": "Baltimore Steel Factory"
},
"invoice": {
"items": [
{
"quantity": 12,
"description": "Item description No.0",
"unitprice": 12000.3,
"linetotal": 20
},
{
"quantity": 1290,
"description": "Item description No.0",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.1",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.2",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.3",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.4",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.5",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.6",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.7",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.8",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.9",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.10",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.11",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.12",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.13",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.14",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.15",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.16",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.17",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.18",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.19",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.20",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.21",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.22",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.23",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.24",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.25",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.26",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.27",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.28",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.29",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.30",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.31",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.32",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.33",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.34",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.35",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.36",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.37",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.38",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.39",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.40",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.41",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.42",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.43",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.44",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.45",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.46",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.47",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.48",
"unitprice": 12.3,
"linetotal": 20000
},
{
"quantity": 1290,
"description": "Item description No.49",
"unitprice": 12.3,
"linetotal": 20000
}
],
"total": 50000000.01
}
}
| 25.943396
| 50
| 0.430545
| 630
| 8,250
| 5.634921
| 0.122222
| 0.215493
| 0.373521
| 0.402254
| 0.940563
| 0.933239
| 0.910986
| 0.910986
| 0.910986
| 0.910986
| 0
| 0.150955
| 0.429091
| 8,250
| 318
| 51
| 25.943396
| 0.60276
| 0
| 0
| 0.477987
| 0
| 0
| 0.371591
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003145
| true
| 0
| 0
| 0.003145
| 0.006289
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
252afafd97ab151c4fad12f140bd0aeb9bc78099
| 15,646
|
py
|
Python
|
presqt/targets/gitlab/tests/views/download/test_download.py
|
djordjetrajkovic/presqt
|
8424b61b1c5b8d29de74c7a333889d9e9eb7aee8
|
[
"Apache-2.0"
] | 3
|
2019-01-29T19:45:25.000Z
|
2020-12-01T18:24:51.000Z
|
presqt/targets/gitlab/tests/views/download/test_download.py
|
djordjetrajkovic/presqt
|
8424b61b1c5b8d29de74c7a333889d9e9eb7aee8
|
[
"Apache-2.0"
] | 419
|
2018-09-13T23:11:15.000Z
|
2021-09-22T17:49:00.000Z
|
presqt/targets/gitlab/tests/views/download/test_download.py
|
djordjetrajkovic/presqt
|
8424b61b1c5b8d29de74c7a333889d9e9eb7aee8
|
[
"Apache-2.0"
] | 2
|
2020-04-10T08:19:41.000Z
|
2021-01-04T15:29:42.000Z
|
import io
import json
import shutil
import zipfile
from django.test import SimpleTestCase
from rest_framework.reverse import reverse
from rest_framework.test import APIClient
from presqt.utilities import read_file
from presqt.targets.utilities import shared_call_get_resource_zip
from presqt.api_v1.utilities import hash_tokens
from config.settings.base import GITLAB_TEST_USER_TOKEN
class TestDownload(SimpleTestCase):
"""
Test the 'api_v1/downloads/<ticket_id>/' endpoint's GET method.
Testing only GitLab download function.
"""
def setUp(self):
self.client = APIClient()
self.header = {'HTTP_PRESQT_SOURCE_TOKEN': GITLAB_TEST_USER_TOKEN,
'HTTP_PRESQT_EMAIL_OPT_IN': ''}
self.target_name = 'gitlab'
self.token = GITLAB_TEST_USER_TOKEN
def test_success_download_private_repo(self):
"""
Return a 200 along with a zip file of the private project requested.
"""
resource_id = '17990894'
shared_call_get_resource_zip(self, resource_id)
url = reverse('job_status', kwargs={'action': 'download',
'response_format': 'zip'})
response = self.client.get(url, **self.header)
# Verify the status code
self.assertEqual(response.status_code, 200)
zip_file = zipfile.ZipFile(io.BytesIO(response.content))
# Verify the name of the zip file
self.assertEquals(
response._headers['content-disposition'][1],
'attachment; filename={}_download_{}.zip'.format(self.target_name, resource_id))
# Verify content type
self.assertEqual(response._headers['content-type'][1], 'application/zip')
# Verify the number of resources in the zip is correct
self.assertEqual(len(zip_file.namelist()), 14)
# Verify the fixity file is empty as there was nothing to check.
with zip_file.open('gitlab_download_{}/fixity_info.json'.format(resource_id)) as fixityfile:
zip_json = json.load(fixityfile)
self.assertEqual(len(zip_json), 2)
with zip_file.open('gitlab_download_{}/PRESQT_FTS_METADATA.json'.format(resource_id)) as metadatafile:
metadata = json.load(metadatafile)
self.assertEqual(metadata['extra_metadata']['description'],
"Welcome to the show, kid.")
self.assertEqual(metadata['extra_metadata']['title'], 'Test Project')
file_path = "{}_download_{}/data/Test Project/README.md".format(
self.target_name, resource_id)
# Verify that the folder exists
self.assertIn(file_path, zip_file.namelist())
# # Verify there is only one entry that contains this folder
count_of_file_references = zip_file.namelist().count(file_path)
self.assertEqual(count_of_file_references, 1)
# Delete corresponding folder
shutil.rmtree('mediafiles/jobs/{}'.format(self.ticket_number))
def test_success_download_public_repo(self):
"""
Return a 200 along with a zip file of the public project requested.
"""
resource_id = '17993206'
shared_call_get_resource_zip(self, resource_id)
url = reverse('job_status', kwargs={'action': 'download',
'response_format': 'zip'})
response = self.client.get(url, **self.header)
# Verify the status code
self.assertEqual(response.status_code, 200)
zip_file = zipfile.ZipFile(io.BytesIO(response.content))
# Verify the name of the zip file
self.assertEquals(
response._headers['content-disposition'][1],
'attachment; filename={}_download_{}.zip'.format(self.target_name, resource_id))
# Verify content type
self.assertEqual(response._headers['content-type'][1], 'application/zip')
# Verify the number of resources in the zip is correct
self.assertEqual(len(zip_file.namelist()), 14)
# Verify the fixity file is empty as there was nothing to check.
with zip_file.open('gitlab_download_{}/fixity_info.json'.format(resource_id)) as fixityfile:
zip_json = json.load(fixityfile)
self.assertEqual(len(zip_json), 1)
file_path = "{}_download_{}/data/ProjectTwo/README.md".format(
self.target_name, resource_id)
# Verify that the file exists
self.assertIn(file_path, zip_file.namelist())
# Verify there is only one entry that contains this file
count_of_file_references = zip_file.namelist().count(file_path)
self.assertEqual(count_of_file_references, 1)
# Delete corresponding folder
shutil.rmtree('mediafiles/jobs/{}'.format(self.ticket_number))
def test_success_download_unowned_public_repo(self):
"""
Return a 200 along with a zip file of the unowned public project requested.
"""
resource_id = '17433066'
shared_call_get_resource_zip(self, resource_id)
url = reverse('job_status', kwargs={'action': 'download',
'response_format': 'zip'})
response = self.client.get(url, **self.header)
# Verify the status code
self.assertEqual(response.status_code, 200)
zip_file = zipfile.ZipFile(io.BytesIO(response.content))
# Verify the name of the zip file
self.assertEquals(
response._headers['content-disposition'][1],
'attachment; filename={}_download_{}.zip'.format(self.target_name, resource_id))
# Verify content type
self.assertEqual(response._headers['content-type'][1], 'application/zip')
# Verify the number of resources in the zip is correct
# self.assertEqual(len(zip_file.namelist()), 13)
# Verify the fixity file is empty as there was nothing to check.
with zip_file.open('gitlab_download_{}/fixity_info.json'.format(resource_id)) as fixityfile:
zip_json = json.load(fixityfile)
self.assertEqual(len(zip_json), 72)
file_path = "gitlab_download_17433066/data/Eggs-Flutter/lib/themes/theme.dart"
# Verify that the file exists
self.assertIn(file_path, zip_file.namelist())
# Delete corresponding folder
shutil.rmtree('mediafiles/jobs/{}'.format(self.ticket_number))
def test_success_download_directory(self):
"""
Return a 200 along with a zip file of the unowned public directory requested.
"""
resource_id = '17433066:test'
shared_call_get_resource_zip(self, resource_id)
url = reverse('job_status', kwargs={'action': 'download',
'response_format': 'zip'})
response = self.client.get(url, **self.header)
# Verify the status code
self.assertEqual(response.status_code, 200)
zip_file = zipfile.ZipFile(io.BytesIO(response.content))
# Verify the name of the zip file
self.assertEquals(
response._headers['content-disposition'][1],
'attachment; filename={}_download_{}.zip'.format(self.target_name, resource_id))
# Verify content type
self.assertEqual(response._headers['content-type'][1], 'application/zip')
# Verify the number of resources in the zip is correct
# self.assertEqual(len(zip_file.namelist()), 13)
# Verify the fixity file is empty as there was nothing to check.
with zip_file.open('gitlab_download_{}/fixity_info.json'.format(resource_id)) as fixityfile:
zip_json = json.load(fixityfile)
self.assertEqual(len(zip_json), 1)
file_path = 'gitlab_download_17433066:test/data/test/widget_test.dart'
# # Verify that the file exists
self.assertIn(file_path, zip_file.namelist())
# Delete corresponding folder
shutil.rmtree('mediafiles/jobs/{}'.format(self.ticket_number))
def test_success_download_single_file(self):
"""
Return a 200 along with a zip file of the single file requested.
"""
resource_id = '17993268:README%2Emd'
shared_call_get_resource_zip(self, resource_id)
url = reverse('job_status', kwargs={'action': 'download',
'response_format': 'zip'})
response = self.client.get(url, **self.header)
# Verify the status code
self.assertEqual(response.status_code, 200)
zip_file = zipfile.ZipFile(io.BytesIO(response.content))
# Verify the name of the zip file
self.assertEquals(
response._headers['content-disposition'][1],
'attachment; filename={}_download_{}.zip'.format(self.target_name, resource_id))
# Verify content type
self.assertEqual(response._headers['content-type'][1], 'application/zip')
# Verify the number of resources in the zip is correct
# self.assertEqual(len(zip_file.namelist()), 13)
# Verify the fixity file is empty as there was nothing to check.
with zip_file.open('gitlab_download_{}/fixity_info.json'.format(resource_id)) as fixityfile:
zip_json = json.load(fixityfile)
self.assertEqual(len(zip_json), 1)
file_path = 'gitlab_download_17993268:README%2Emd/data/README.md'
# # Verify that the file exists
self.assertIn(file_path, zip_file.namelist())
# Delete corresponding folder
shutil.rmtree('mediafiles/jobs/{}'.format(self.ticket_number))
def test_error_500_401(self):
"""
Return a 500 if an invalid token is provided.
"""
url = reverse('resource', kwargs={'target_name': self.target_name,
'resource_id': '209373160',
'resource_format': 'zip'})
response = self.client.get(url, **{'HTTP_PRESQT_SOURCE_TOKEN': 'eggs', 'HTTP_PRESQT_EMAIL_OPT_IN': ''})
ticket_number = hash_tokens('eggs')
download_url = response.data['download_job_zip']
process_info_path = 'mediafiles/jobs/{}/process_info.json'.format(ticket_number)
process_info = read_file(process_info_path, True)
while process_info['resource_download']['status'] == 'in_progress':
try:
process_info = read_file(process_info_path, True)
except json.decoder.JSONDecodeError:
# Pass while the process_info file is being written to
pass
download_response = self.client.get(download_url, **{'HTTP_PRESQT_SOURCE_TOKEN': 'eggs'})
# The endpoint lumps all errors into a 500 status code
self.assertEqual(download_response.status_code, 500)
self.assertEqual(download_response.data['status_code'], 401)
self.assertEqual(download_response.data['message'],
"Token is invalid. Response returned a 401 status code.")
# Delete corresponding folder
shutil.rmtree('mediafiles/jobs/{}'.format(ticket_number))
def test_error_500_404_project(self):
"""
Return a 500 if an invalid resource_id is provided.
"""
url = reverse('resource', kwargs={'target_name': self.target_name,
'resource_id': 'bad',
'resource_format': 'zip'})
response = self.client.get(url, **self.header)
ticket_number = hash_tokens(self.token)
download_url = response.data['download_job_zip']
process_info_path = 'mediafiles/jobs/{}/process_info.json'.format(ticket_number)
process_info = read_file(process_info_path, True)
while process_info['resource_download']['status'] == 'in_progress':
try:
process_info = read_file(process_info_path, True)
except json.decoder.JSONDecodeError:
# Pass while the process_info file is being written to
pass
download_response = self.client.get(download_url, **self.header)
# The endpoint lumps all errors into a 500 status code
self.assertEqual(download_response.status_code, 500)
self.assertEqual(download_response.data['status_code'], 404)
self.assertEqual(download_response.data['message'],
"The resource with id, bad, does not exist for this user.")
# Delete corresponding folder
shutil.rmtree('mediafiles/jobs/{}'.format(ticket_number))
def test_error_500_404_directory(self):
"""
Return a 500 if an invalid resource_id (directory) is provided.
"""
url = reverse('resource', kwargs={'target_name': self.target_name,
'resource_id': '16682224:Danglesauce',
'resource_format': 'zip'})
response = self.client.get(url, **self.header)
ticket_number = hash_tokens(self.token)
download_url = response.data['download_job_zip']
process_info_path = 'mediafiles/jobs/{}/process_info.json'.format(ticket_number)
process_info = read_file(process_info_path, True)
while process_info['resource_download']['status'] == 'in_progress':
try:
process_info = read_file(process_info_path, True)
except json.decoder.JSONDecodeError:
# Pass while the process_info file is being written to
pass
download_response = self.client.get(download_url, **self.header)
# The endpoint lumps all errors into a 500 status code
self.assertEqual(download_response.status_code, 500)
self.assertEqual(download_response.data['status_code'], 404)
self.assertEqual(download_response.data['message'],
"The resource with id, 16682224:Danglesauce, does not exist for this user.")
# Delete corresponding folder
shutil.rmtree('mediafiles/jobs/{}'.format(ticket_number))
def test_error_500_404_file(self):
"""
Return a 500 if an invalid resource_id (file) is provided.
"""
url = reverse('resource', kwargs={'target_name': self.target_name,
'resource_id': '17993268:TheEggBasketMetaphor%2Emp4',
'resource_format': 'zip'})
response = self.client.get(url, **self.header)
ticket_number = hash_tokens(self.token)
download_url = response.data['download_job_zip']
process_info_path = 'mediafiles/jobs/{}/process_info.json'.format(ticket_number)
process_info = read_file(process_info_path, True)
while process_info['resource_download']['status'] == 'in_progress':
try:
process_info = read_file(process_info_path, True)
except json.decoder.JSONDecodeError:
# Pass while the process_info file is being written to
pass
download_response = self.client.get(download_url, **self.header)
# The endpoint lumps all errors into a 500 status code
self.assertEqual(download_response.status_code, 500)
self.assertEqual(download_response.data['status_code'], 404)
self.assertEqual(download_response.data['message'],
"The resource with id, 17993268:TheEggBasketMetaphor%2Emp4, does not exist for this user.")
# Delete corresponding folder
shutil.rmtree('mediafiles/jobs/{}'.format(ticket_number))
| 45.482558
| 116
| 0.639205
| 1,830
| 15,646
| 5.253552
| 0.104372
| 0.056168
| 0.02434
| 0.028396
| 0.877054
| 0.845642
| 0.832224
| 0.829623
| 0.825359
| 0.814437
| 0
| 0.021227
| 0.2593
| 15,646
| 343
| 117
| 45.61516
| 0.808353
| 0.173207
| 0
| 0.721393
| 0
| 0
| 0.19676
| 0.074595
| 0
| 0
| 0
| 0
| 0.21393
| 1
| 0.049751
| false
| 0.019901
| 0.054726
| 0
| 0.109453
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
254a1e75993d0dd92aac408fc911ab82057d8dba
| 11,002
|
py
|
Python
|
3DShit.py
|
moaz-eldefrawy/Graphical-Linear-Transformation
|
ca2cf30fc22d33c51ac65f7e714bf4e59b54ffc4
|
[
"MIT"
] | null | null | null |
3DShit.py
|
moaz-eldefrawy/Graphical-Linear-Transformation
|
ca2cf30fc22d33c51ac65f7e714bf4e59b54ffc4
|
[
"MIT"
] | 3
|
2021-03-19T08:34:05.000Z
|
2022-01-13T01:52:08.000Z
|
3DShit.py
|
moaz-eldefrawy/Graphical-Linear-Transformation
|
ca2cf30fc22d33c51ac65f7e714bf4e59b54ffc4
|
[
"MIT"
] | 1
|
2019-11-20T18:01:37.000Z
|
2019-11-20T18:01:37.000Z
|
from manimlib.imports import *
import os
import pyclbr
from tkinter import *
##import numpy as np
n = 0
def get_cube(b1,b2,b3,b4,h1,h2,h3,h4, **kwargs):
# base
s1 = Line(h1,h2, **kwargs)
s2 = Line(h2,h3, **kwargs)
s3 = Line(h3,h4, **kwargs)
s4 = Line(h4,h1, **kwargs)
# top
s5 = Line(b1, b2, **kwargs)
s6 = Line(b2, b3, **kwargs)
s7 = Line(b3, b4, **kwargs)
s8 = Line(b4, b1, **kwargs)
# connectors
s9 = Line(b1,h1, **kwargs)
s10 = Line(b2,h2, **kwargs)
s11 = Line(b3, h3, **kwargs)
s12 = Line(b4,h4, **kwargs)
return Group(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12)
def get_pyramid(p1,p2,p3,p4,p5, **kwargs):
#base
s1 = Line(p1, p2, **kwargs)
s2 = Line(p2, p3, **kwargs)
s3 = Line(p3, p4, **kwargs)
s4 = Line(p4, p1, **kwargs)
# top
s5 = Line(p1, p5, **kwargs)
s6 = Line(p2, p5, **kwargs)
s7 = Line(p3, p5, **kwargs)
s8 = Line(p4, p5, **kwargs)
return [s1, s2, s3, s4, s5, s6, s7, s8]
class Shapes(ThreeDScene):
# A few simple shapes
def construct(self):
##print(CONFIG)
CONFIG = {"plane_kwargs": {
"x_line_frequency": 2,
"y_line_frequency": 2
},
"camera_class": ThreeDCamera,
"ambient_camera_rotation": None,
"default_angled_camera_orientation_kwargs": {
"phi": 90 * DEGREES,
"theta": -135 * DEGREES,
}
}
## 1.5 PI = 90
## PI for Vertical and Gamma Horizontal
self.set_camera_orientation(phi=70 * DEGREES, theta=-90 * DEGREES)
##sphere = self.get_sphere()
##cube = Cube()
## prism = Prism()
##print("Print prism:")
## print(prism)
## prism = get_cube(1.5, color="#ff0000", stroke_width=10)
## print("printing:")
## print(type(prism))
## prism.shift(5 * np.array([0,0,1]))
## for i in (len(prism)):
## prism += np.array([0,0,5])
## self.play(ShowCreation(prism))
## self.play(ReplacementTransform(sphere, cube))
## self.play(ReplacementTransform(cube, prism))
self.wait(2)
for i in range(-10, 10):
Y_Axis = Line(np.array([i, 10, 0]), np.array([i, -10, 0]))
X_Axis = Line(np.array([10, i, 0]), np.array([-10, i, 0]))
X_Axis.set_color(GREEN)
Y_Axis.set_color(GREEN)
self.add(Y_Axis)
self.add(X_Axis)
Y_Axis = Line(np.array([0, 10, 0]), np.array([0, -10, 0]))
Y_Axis.set_fill(RED, opacity=0.5)
Y_Axis.stroke_width = 10;
X_Axis = Line(np.array([10, 0, 0]), np.array([-10, 0, 0]))
X_Axis.set_fill(RED)
X_Axis.stroke_width = 10;
self.add(Y_Axis)
self.add(X_Axis)
Z_Axis = Line(np.array([0, 0, -10]), np.array([0, 0, 10]))
Z_Axis.set_color(WHITE)
self.add(Z_Axis)
##self.set_camera_orientation(phi= , gamma=0)
'''
a = [np.array([1,1,0]), np.array([1,1,1]), np.array([1,1,0]),
np.array([1,2,0]), np.array([1,2,1]), np.array([1,2,0]),
np.array([2,2,0]), np.array([2,2,1]), np.array([2,2,0]),
np.array([2,1,0]), np.array([2,1,1]),## np.array([2,1,0]),
np.array([1, 1, 1]), np.array([1, 2, 1]), np.array([2, 2, 1]), np.array([2, 1, 1]),
np.array([2, 1, 0])]
'''
b = [np.array([1,1,0]) ,np.array([1,2,0]),np.array([2,2,0]),np.array([2,1,0]),np.array([1.5,1.5,3])]
for x in range(-10, 10):
for y in range(-10, 10):
self.add(Dot(np.array([x, y, 0]), color=DARK_GREY))
someTransformation = np.array([[2, 1, 1],
[-1, -1, 3],
[1, 1, -1]])
T_X_axis_reflection = np.array([[1, 0, 0],
[0, -1, 0],
[0, 0, 1]])
T_Y_axis_reflection = np.array([[-1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
T_Z_axis_reflection = np.array([[1, 0, 0],
[0, 1, 0],
[0, 0, -1]])
T_origin_reflection = np.array([[-1, 0, 0],
[0, -1, 0],
[0, 0, -1]])
shearValue = 2
T_shear = np.array([[1, shearValue, 0],
[0, 1, 0],
[0, 0, 1]])
## self.setup_axes
a = get_pyramid(*b)
for i in range(len(a)):
self.play(ShowCreation(a[i], run_time=0.5))
print(b)
for i in range(len(b)):
b[i] = TransformMatrix(T_Y_axis_reflection, b[i])
print(b)
self.wait(1)
##shape3 = shape1
unitScale = 2
##shape3.scale(2)
##self.play(ApplyMethod(shape1.shift, np.array([1, 1, 0])))
##self.play(ShowCreation(shape3))
## shape1.scale(3)
## shape1.stroke_width = 10
## shape1.set_fill(WHITE, opacity=1)
##shape1.rotate(45 * DEGREES)
##self.play(ApplyMethod(shape1.shift, np.array([0, 2, 0])))
## self.play(FadeOut(shape1))
factor = 3
## self.play(FadeInFromLarge(shape1, scale_factor=factor))
self.wait(1)
self.move_camera(phi=70 * DEGREES, theta=-270 * DEGREES)
self.wait(0.5)
z = get_pyramid(*b)
for i in range(len(z)):
self.play(Transform(a[i],z[i]), run_time=0.5)
self.move_camera(phi=85 * DEGREES, theta=-270 * DEGREES)
self.wait(2)
##shape2 =
class Shapes2(ThreeDScene):
# A few simple shapes
def construct(self):
##print(CONFIG)
CONFIG = {"plane_kwargs": {
"x_line_frequency": 2,
"y_line_frequency": 2
},
"camera_class": ThreeDCamera,
"ambient_camera_rotation": None,
"default_angled_camera_orientation_kwargs": {
"phi": 90 * DEGREES,
"theta": -135 * DEGREES,
}
}
## 1.5 PI = 90
## PI for Vertical and Gamma Horizontal
self.set_camera_orientation(phi=70 * DEGREES, theta=-90 * DEGREES)
##sphere = self.get_sphere()
##cube = Cube()
## prism = Prism()
##print("Print prism:")
## print(prism)
## prism = get_cube(1.5, color="#ff0000", stroke_width=10)
## print("printing:")
## print(type(prism))
## prism.shift(5 * np.array([0,0,1]))
## for i in (len(prism)):
## prism += np.array([0,0,5])
## self.play(ShowCreation(prism))
## self.play(ReplacementTransform(sphere, cube))
## self.play(ReplacementTransform(cube, prism))
self.wait(2)
for i in range(-10, 10):
Y_Axis = Line(np.array([i, 10, 0]), np.array([i, -10, 0]))
X_Axis = Line(np.array([10, i, 0]), np.array([-10, i, 0]))
X_Axis.set_color(GREEN)
Y_Axis.set_color(GREEN)
self.add(Y_Axis)
self.add(X_Axis)
Y_Axis = Line(np.array([0, 10, 0]), np.array([0, -10, 0]))
Y_Axis.set_fill(RED, opacity=0.5)
Y_Axis.stroke_width = 10;
X_Axis = Line(np.array([10, 0, 0]), np.array([-10, 0, 0]))
X_Axis.set_fill(RED)
X_Axis.stroke_width = 10;
self.add(Y_Axis)
self.add(X_Axis)
Z_Axis = Line(np.array([0, 0, -10]), np.array([0, 0, 10]))
Z_Axis.set_color(WHITE)
self.add(Z_Axis)
##self.set_camera_orientation(phi= , gamma=0)
'''
a = [np.array([1,1,0]), np.array([1,1,1]), np.array([1,1,0]),
np.array([1,2,0]), np.array([1,2,1]), np.array([1,2,0]),
np.array([2,2,0]), np.array([2,2,1]), np.array([2,2,0]),
np.array([2,1,0]), np.array([2,1,1]),## np.array([2,1,0]),
np.array([1, 1, 1]), np.array([1, 2, 1]), np.array([2, 2, 1]), np.array([2, 1, 1]),
np.array([2, 1, 0])]
'''
## pyramid
## b = [np.array([1, 1, 0]), np.array([1, 2, 0]), np.array([2, 2, 0]), np.array([2, 1, 0]),
## np.array([1.5, 1.5, 3])]
## Cube
b = [ np.array([1, 1, 0]), np.array([1, 2, 0]), np.array([2, 2, 0]), np.array([2, 1, 0]),
np.array([1, 1, 1]), np.array([1, 2, 1]), np.array([2, 2, 1]), np.array([2, 1, 1]) ]
for x in range(-10, 10):
for y in range(-10, 10):
self.add(Dot(np.array([x, y, 0]), color=DARK_GREY))
someTransformation = np.array([[2, 1, 1],
[-1, -1, 3],
[1, 1, -1]])
T_X_axis_reflection = np.array([[1, 0, 0],
[0, -1, 0],
[0, 0, 1]])
T_Y_axis_reflection = np.array([[-1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
T_Z_axis_reflection = np.array([[1, 0, 0],
[0, 1, 0],
[0, 0, -1]])
T_origin_reflection = np.array([[-1, 0, 0],
[0, -1, 0],
[0, 0, -1]])
shearValue = 2
T_shear = np.array([[1, shearValue, 0],
[0, 1, 0],
[0, 0, 1]])
## self.setup_axes
##a = get_pyramid(*b)
a = get_cube(*b)
for i in range(len(a)):
self.play(ShowCreation(a[i], run_time=0.5))
print(b)
for i in range(len(b)):
b[i] = TransformMatrix(T_Y_axis_reflection, b[i])
print(b)
self.wait(1)
##shape3 = shape1
unitScale = 2
##shape3.scale(2)
##self.play(ApplyMethod(shape1.shift, np.array([1, 1, 0])))
##self.play(ShowCreation(shape3))
## shape1.scale(3)
## shape1.stroke_width = 10
## shape1.set_fill(WHITE, opacity=1)
##shape1.rotate(45 * DEGREES)
##self.play(ApplyMethod(shape1.shift, np.array([0, 2, 0])))
## self.play(FadeOut(shape1))
factor = 3
## self.play(FadeInFromLarge(shape1, scale_factor=factor))
self.wait(1)
self.move_camera(phi=70 * DEGREES, theta=-270 * DEGREES)
self.wait(0.5)
z = get_cube(*b)
for i in range(len(z)):
self.play(Transform(a[i], z[i]), run_time=0.5)
self.move_camera(phi=85 * DEGREES, theta=-270 * DEGREES)
self.wait(2)
##shape2 =
def TransformMatrix(TransMatrix, point):
## (1,3)
point = point.reshape(3, 1)
c = np.dot(TransMatrix, point)
c = c.reshape(1, 3)
return c[0]
| 30.991549
| 108
| 0.459644
| 1,515
| 11,002
| 3.244884
| 0.09835
| 0.133849
| 0.061839
| 0.014646
| 0.868592
| 0.868592
| 0.868592
| 0.868592
| 0.858421
| 0.858421
| 0
| 0.091257
| 0.358571
| 11,002
| 354
| 109
| 31.079096
| 0.605356
| 0.183149
| 0
| 0.73224
| 0
| 0
| 0.031838
| 0.015793
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027322
| false
| 0
| 0.021858
| 0
| 0.076503
| 0.021858
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c2547a4d681f18f341916973a5c674d975a3c901
| 165
|
py
|
Python
|
backend/home/admin.py
|
crowdbotics-apps/testing-django-app-31743
|
4361e3a1e84437a59bb330547e4d4dc16fb6fbe2
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/home/admin.py
|
crowdbotics-apps/testing-django-app-31743
|
4361e3a1e84437a59bb330547e4d4dc16fb6fbe2
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/home/admin.py
|
crowdbotics-apps/testing-django-app-31743
|
4361e3a1e84437a59bb330547e4d4dc16fb6fbe2
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
from django.contrib import admin
from .models import Sample, Userdetails
admin.site.register(Userdetails)
admin.site.register(Sample)
# Register your models here.
| 20.625
| 39
| 0.812121
| 22
| 165
| 6.090909
| 0.545455
| 0.238806
| 0.298507
| 0.41791
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109091
| 165
| 7
| 40
| 23.571429
| 0.911565
| 0.157576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c2dc922722ff4c452cdca4f04e4a6646f287cf32
| 26,220
|
py
|
Python
|
tests/ope/test_dr_estimators.py
|
usaito/zr-obp
|
57b4bc79d21301703a2b7e6e0c284a308194a795
|
[
"Apache-2.0"
] | null | null | null |
tests/ope/test_dr_estimators.py
|
usaito/zr-obp
|
57b4bc79d21301703a2b7e6e0c284a308194a795
|
[
"Apache-2.0"
] | null | null | null |
tests/ope/test_dr_estimators.py
|
usaito/zr-obp
|
57b4bc79d21301703a2b7e6e0c284a308194a795
|
[
"Apache-2.0"
] | null | null | null |
import re
import pytest
import numpy as np
import torch
from obp.types import BanditFeedback
from obp.ope import (
DirectMethod,
DoublyRobust,
DoublyRobustWithShrinkage,
SwitchDoublyRobust,
SelfNormalizedDoublyRobust,
)
from conftest import generate_action_dist
# prepare instances
dm = DirectMethod()
dr = DoublyRobust()
dr_shrink_0 = DoublyRobustWithShrinkage(lambda_=0.0)
dr_shrink_max = DoublyRobustWithShrinkage(lambda_=1e10)
sndr = SelfNormalizedDoublyRobust()
switch_dr_0 = SwitchDoublyRobust(tau=0.0)
switch_dr_max = SwitchDoublyRobust(tau=1e10)
dr_estimators = [dr, dr_shrink_0, sndr, switch_dr_0]
# dr and self-normalized dr
# action_dist, action, reward, pscore, position, estimated_rewards_by_reg_model, description
invalid_input_of_dr = [
(
generate_action_dist(5, 4, 3),
None,
np.zeros(5, dtype=int),
np.ones(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"action must be ndarray",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
None,
np.ones(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"reward must be ndarray",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
np.zeros(5, dtype=int),
None,
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"pscore must be ndarray",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
np.zeros(5, dtype=int),
np.ones(5),
np.random.choice(3, size=5),
None,
"estimated_rewards_by_reg_model must be ndarray",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=float),
np.zeros(5, dtype=int),
np.ones(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"action elements must be non-negative integers",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int) - 1,
np.zeros(5, dtype=int),
np.ones(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"action elements must be non-negative integers",
),
(
generate_action_dist(5, 4, 3),
"4",
np.zeros(5, dtype=int),
np.ones(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"action must be ndarray",
),
(
generate_action_dist(5, 4, 3),
np.zeros((3, 2), dtype=int),
np.zeros(5, dtype=int),
np.ones(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"action must be 1-dimensional",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int) + 8,
np.zeros(5, dtype=int),
np.ones(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"action elements must be smaller than the second dimension of action_dist",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
"4",
np.ones(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"reward must be ndarray",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
np.zeros((3, 2), dtype=int),
np.ones(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"reward must be 1-dimensional",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
np.zeros(4, dtype=int),
np.ones(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"action and reward must be the same size.",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
np.zeros(5, dtype=int),
"4",
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"pscore must be ndarray",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
np.zeros(5, dtype=int),
np.ones((5, 3)),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"pscore must be 1-dimensional",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
np.zeros(5, dtype=int),
np.ones(4),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"action, reward, and pscore must be the same size.",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
np.zeros(5, dtype=int),
np.arange(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
"pscore must be positive",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
np.zeros(5, dtype=int),
np.ones(5),
np.random.choice(3, size=5),
np.zeros((5, 4, 2)),
"estimated_rewards_by_reg_model.shape must be the same as action_dist.shape",
),
(
generate_action_dist(5, 4, 3),
np.zeros(5, dtype=int),
np.zeros(5, dtype=int),
np.ones(5),
np.random.choice(3, size=5),
"4",
"estimated_rewards_by_reg_model must be ndarray",
),
]
@pytest.mark.parametrize(
"action_dist, action, reward, pscore, position, estimated_rewards_by_reg_model, description",
invalid_input_of_dr,
)
def test_dr_using_invalid_input_data(
action_dist: np.ndarray,
action: np.ndarray,
reward: np.ndarray,
pscore: np.ndarray,
position: np.ndarray,
estimated_rewards_by_reg_model: np.ndarray,
description: str,
) -> None:
# estimate_intervals function raises ValueError of all estimators
for estimator in [dr, sndr]:
with pytest.raises(ValueError, match=f"{description}*"):
_ = estimator.estimate_policy_value(
action_dist=action_dist,
action=action,
reward=reward,
pscore=pscore,
position=position,
estimated_rewards_by_reg_model=estimated_rewards_by_reg_model,
)
with pytest.raises(ValueError, match=f"{description}*"):
_ = estimator.estimate_interval(
action_dist=action_dist,
action=action,
reward=reward,
pscore=pscore,
position=position,
estimated_rewards_by_reg_model=estimated_rewards_by_reg_model,
)
# dr and self-normalized dr
# action_dist, action, reward, pscore, position, estimated_rewards_by_reg_model, description
invalid_input_tensor_of_dr = [
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
None,
torch.zeros(5, dtype=torch.int64),
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"action must be Tensor",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
None,
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"reward must be Tensor",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
torch.zeros(5, dtype=torch.int64),
None,
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"pscore must be Tensor",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
torch.zeros(5, dtype=torch.int64),
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
None,
"estimated_rewards_by_reg_model must be Tensor",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.float32),
torch.zeros(5, dtype=torch.int64),
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"action elements must be non-negative integers",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64) - 1,
torch.zeros(5, dtype=torch.int64),
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"action elements must be non-negative integers",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
"4",
torch.zeros(5, dtype=torch.int64),
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"action must be Tensor",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros((3, 2), dtype=torch.int64),
torch.zeros(5, dtype=torch.int64),
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"action must be 1-dimensional",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64) + 8,
torch.zeros(5, dtype=torch.int64),
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"action elements must be smaller than the second dimension of action_dist",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
"4",
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"reward must be Tensor",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
torch.zeros((3, 2), dtype=torch.int64),
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"reward must be 1-dimensional",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
torch.zeros(4, dtype=torch.int64),
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"action and reward must be the same size.",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
torch.zeros(5, dtype=torch.int64),
"4",
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"pscore must be Tensor",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
torch.zeros(5, dtype=torch.int64),
torch.ones((5, 3)),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"pscore must be 1-dimensional",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
torch.zeros(5, dtype=torch.int64),
torch.ones(4),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"action, reward, and pscore must be the same size.",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
torch.zeros(5, dtype=torch.int64),
torch.from_numpy(np.arange(5)),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
"pscore must be positive",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
torch.zeros(5, dtype=torch.int64),
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 2)),
"estimated_rewards_by_reg_model.shape must be the same as action_dist.shape",
),
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.zeros(5, dtype=torch.int64),
torch.zeros(5, dtype=torch.int64),
torch.ones(5),
torch.from_numpy(np.random.choice(3, size=5)),
"4",
"estimated_rewards_by_reg_model must be Tensor",
),
]
@pytest.mark.parametrize(
"action_dist, action, reward, pscore, position, estimated_rewards_by_reg_model, description",
invalid_input_tensor_of_dr,
)
def test_dr_using_invalid_input_tensor_data(
action_dist: torch.Tensor,
action: torch.Tensor,
reward: torch.Tensor,
pscore: torch.Tensor,
position: torch.Tensor,
estimated_rewards_by_reg_model: torch.Tensor,
description: str,
) -> None:
# estimate_intervals function raises ValueError of all estimators
for estimator in [dr, sndr]:
with pytest.raises(ValueError, match=f"{description}*"):
_ = estimator.estimate_policy_value_tensor(
action_dist=action_dist,
action=action,
reward=reward,
pscore=pscore,
position=position,
estimated_rewards_by_reg_model=estimated_rewards_by_reg_model,
)
# switch-dr
invalid_input_of_switch = [
("a", "switching hyperparameter must be float or integer"),
(-1.0, "switching hyperparameter must be larger than or equal to zero"),
(np.nan, "switching hyperparameter must not be nan"),
]
@pytest.mark.parametrize(
"tau, description",
invalid_input_of_switch,
)
def test_switch_using_invalid_input_data(tau: float, description: str) -> None:
with pytest.raises(ValueError, match=f"{description}*"):
_ = SwitchDoublyRobust(tau=tau)
valid_input_of_switch = [
(3.0, "float tau"),
(2, "integer tau"),
]
@pytest.mark.parametrize(
"tau, description",
valid_input_of_switch,
)
def test_switch_using_valid_input_data(tau: float, description: str) -> None:
_ = SwitchDoublyRobust(tau=tau)
# dr-os
invalid_input_of_shrinkage = [
("a", "shrinkage hyperparameter must be float or integer"),
(-1.0, "shrinkage hyperparameter must be larger than or equal to zero"),
(np.nan, "shrinkage hyperparameter must not be nan"),
]
@pytest.mark.parametrize(
"lambda_, description",
invalid_input_of_shrinkage,
)
def test_shrinkage_using_invalid_input_data(lambda_: float, description: str) -> None:
with pytest.raises(ValueError, match=f"{description}*"):
_ = DoublyRobustWithShrinkage(lambda_=lambda_)
valid_input_of_shrinkage = [
(3.0, "float lambda_"),
(2, "integer lambda_"),
]
@pytest.mark.parametrize(
"lambda_, description",
valid_input_of_shrinkage,
)
def test_shrinkage_using_valid_input_data(lambda_: float, description: str) -> None:
_ = DoublyRobustWithShrinkage(lambda_=lambda_)
# dr variants
valid_input_of_dr_variants = [
(
generate_action_dist(5, 4, 3),
np.random.choice(4, size=5),
np.zeros(5, dtype=int),
np.random.uniform(low=0.5, high=1.0, size=5),
np.random.choice(3, size=5),
np.zeros((5, 4, 3)),
0.5,
"all argumnents are given and len_list > 1",
)
]
@pytest.mark.parametrize(
"action_dist, action, reward, pscore, position, estimated_rewards_by_reg_model, hyperparameter, description",
valid_input_of_dr_variants,
)
def test_dr_variants_using_valid_input_data(
action_dist: np.ndarray,
action: np.ndarray,
reward: np.ndarray,
pscore: np.ndarray,
position: np.ndarray,
estimated_rewards_by_reg_model: np.ndarray,
hyperparameter: float,
description: str,
) -> None:
# check dr variants
switch_dr = SwitchDoublyRobust(tau=hyperparameter)
dr_os = DoublyRobustWithShrinkage(lambda_=hyperparameter)
for estimator in [switch_dr, dr_os]:
est = estimator.estimate_policy_value(
action_dist=action_dist,
action=action,
reward=reward,
pscore=pscore,
position=position,
estimated_rewards_by_reg_model=estimated_rewards_by_reg_model,
)
assert est == 0.0, f"policy value must be 0, but {est}"
# dr variants
valid_input_tensor_of_dr_variants = [
(
torch.from_numpy(generate_action_dist(5, 4, 3)),
torch.from_numpy(np.random.choice(4, size=5)),
torch.zeros(5, dtype=torch.int64),
torch.from_numpy(np.random.uniform(low=0.5, high=1.0, size=5)),
torch.from_numpy(np.random.choice(3, size=5)),
torch.zeros((5, 4, 3)),
0.5,
"all argumnents are given and len_list > 1",
)
]
@pytest.mark.parametrize(
"action_dist, action, reward, pscore, position, estimated_rewards_by_reg_model, hyperparameter, description",
valid_input_tensor_of_dr_variants,
)
def test_dr_variants_using_valid_input_tensor_data(
action_dist: torch.Tensor,
action: torch.Tensor,
reward: torch.Tensor,
pscore: torch.Tensor,
position: torch.Tensor,
estimated_rewards_by_reg_model: torch.Tensor,
hyperparameter: float,
description: str,
) -> None:
# check dr variants
dr_os = DoublyRobustWithShrinkage(lambda_=hyperparameter)
for estimator in [dr_os]:
est = estimator.estimate_policy_value_tensor(
action_dist=action_dist,
action=action,
reward=reward,
pscore=pscore,
position=position,
estimated_rewards_by_reg_model=estimated_rewards_by_reg_model,
)
assert est.item() == 0.0, f"policy value must be 0, but {est.item()}"
def test_dr_using_random_evaluation_policy(
synthetic_bandit_feedback: BanditFeedback, random_action_dist: np.ndarray
) -> None:
"""
Test the format of dr variants using synthetic bandit data and random evaluation policy
"""
expected_reward = synthetic_bandit_feedback["expected_reward"][:, :, np.newaxis]
action_dist = random_action_dist
# prepare input dict
input_dict = {
k: v
for k, v in synthetic_bandit_feedback.items()
if k in ["reward", "action", "pscore", "position"]
}
input_dict["action_dist"] = action_dist
input_dict["estimated_rewards_by_reg_model"] = expected_reward
# dr estimtors require all arguments
for estimator in dr_estimators:
estimated_policy_value = estimator.estimate_policy_value(**input_dict)
assert isinstance(
estimated_policy_value, float
), f"invalid type response: {estimator}"
# remove necessary keys
del input_dict["reward"]
del input_dict["pscore"]
del input_dict["action"]
del input_dict["estimated_rewards_by_reg_model"]
for estimator in dr_estimators:
with pytest.raises(
TypeError,
match=re.escape(
"estimate_policy_value() missing 4 required positional arguments: 'reward', 'action', 'pscore', and 'estimated_rewards_by_reg_model'"
),
):
_ = estimator.estimate_policy_value(**input_dict)
# prepare input dict
input_tensor_dict = {
k: v if v is None else torch.from_numpy(v)
for k, v in synthetic_bandit_feedback.items()
if k in ["reward", "action", "pscore", "position"]
}
input_tensor_dict["action_dist"] = torch.from_numpy(action_dist)
input_tensor_dict["estimated_rewards_by_reg_model"] = torch.from_numpy(
expected_reward
)
# dr estimtors require all arguments
for estimator in dr_estimators:
if estimator.estimator_name == "switch-dr":
with pytest.raises(
NotImplementedError,
match=re.escape(
"This is not implemented for Swtich-DR because it is indifferentiable."
),
):
_ = estimator.estimate_policy_value_tensor(**input_tensor_dict)
else:
estimated_policy_value = estimator.estimate_policy_value_tensor(
**input_tensor_dict
)
assert isinstance(
estimated_policy_value, torch.Tensor
), f"invalid type response: {estimator}"
# remove necessary keys
del input_tensor_dict["reward"]
del input_tensor_dict["pscore"]
del input_tensor_dict["action"]
del input_tensor_dict["estimated_rewards_by_reg_model"]
for estimator in dr_estimators:
if estimator.estimator_name == "switch-dr":
with pytest.raises(
NotImplementedError,
match=re.escape(
"This is not implemented for Swtich-DR because it is indifferentiable."
),
):
_ = estimator.estimate_policy_value_tensor(**input_tensor_dict)
else:
with pytest.raises(
TypeError,
match=re.escape(
"estimate_policy_value_tensor() missing 4 required positional arguments: 'reward', 'action', 'pscore', and 'estimated_rewards_by_reg_model'"
),
):
_ = estimator.estimate_policy_value_tensor(**input_tensor_dict)
def test_boundedness_of_sndr_using_random_evaluation_policy(
synthetic_bandit_feedback: BanditFeedback, random_action_dist: np.ndarray
) -> None:
"""
Test the boundedness of sndr estimators using synthetic bandit data and random evaluation policy
"""
expected_reward = synthetic_bandit_feedback["expected_reward"][:, :, np.newaxis]
action_dist = random_action_dist
# prepare input dict
input_dict = {
k: v
for k, v in synthetic_bandit_feedback.items()
if k in ["reward", "action", "pscore", "position"]
}
input_dict["action_dist"] = action_dist
input_dict["estimated_rewards_by_reg_model"] = expected_reward
# make pscore too small (to check the boundedness of sndr)
input_dict["pscore"] = input_dict["pscore"] ** 3
estimated_policy_value = sndr.estimate_policy_value(**input_dict)
assert (
estimated_policy_value <= 2
), f"estimated policy value of sndr should be smaller than or equal to 2 (because of its 2-boundedness), but the value is: {estimated_policy_value}"
# prepare input dict
input_tensor_dict = {
k: v if v is None else torch.from_numpy(v)
for k, v in synthetic_bandit_feedback.items()
if k in ["reward", "action", "pscore", "position"]
}
input_tensor_dict["action_dist"] = torch.from_numpy(action_dist)
input_tensor_dict["estimated_rewards_by_reg_model"] = torch.from_numpy(
expected_reward
)
# make pscore too small (to check the boundedness of sndr)
input_tensor_dict["pscore"] = input_tensor_dict["pscore"] ** 3
estimated_policy_value = sndr.estimate_policy_value_tensor(**input_tensor_dict)
assert (
estimated_policy_value.item() <= 2
), f"estimated policy value of sndr should be smaller than or equal to 2 (because of its 2-boundedness), but the value is: {estimated_policy_value.item()}"
def test_dr_shrinkage_using_random_evaluation_policy(
synthetic_bandit_feedback: BanditFeedback, random_action_dist: np.ndarray
) -> None:
"""
Test the dr shrinkage estimators using synthetic bandit data and random evaluation policy
"""
expected_reward = synthetic_bandit_feedback["expected_reward"][:, :, np.newaxis]
action_dist = random_action_dist
# prepare input dict
input_dict = {
k: v
for k, v in synthetic_bandit_feedback.items()
if k in ["reward", "action", "pscore", "position"]
}
input_dict["action_dist"] = action_dist
input_dict["estimated_rewards_by_reg_model"] = expected_reward
dm_value = dm.estimate_policy_value(**input_dict)
dr_value = dr.estimate_policy_value(**input_dict)
dr_shrink_0_value = dr_shrink_0.estimate_policy_value(**input_dict)
dr_shrink_max_value = dr_shrink_max.estimate_policy_value(**input_dict)
assert (
dm_value == dr_shrink_0_value
), "DoublyRobustWithShrinkage (lambda=0) should be the same as DirectMethod"
assert (
np.abs(dr_value - dr_shrink_max_value) < 1e-5
), "DoublyRobustWithShrinkage (lambda=inf) should be almost the same as DoublyRobust"
# prepare input dict
input_tensor_dict = {
k: v if v is None else torch.from_numpy(v)
for k, v in synthetic_bandit_feedback.items()
if k in ["reward", "action", "pscore", "position"]
}
input_tensor_dict["action_dist"] = torch.from_numpy(action_dist)
input_tensor_dict["estimated_rewards_by_reg_model"] = torch.from_numpy(
expected_reward
)
dm_value = dm.estimate_policy_value_tensor(**input_tensor_dict)
dr_value = dr.estimate_policy_value_tensor(**input_tensor_dict)
dr_shrink_0_value = dr_shrink_0.estimate_policy_value_tensor(**input_tensor_dict)
dr_shrink_max_value = dr_shrink_max.estimate_policy_value_tensor(
**input_tensor_dict
)
assert (
dm_value.item() == dr_shrink_0_value.item()
), "DoublyRobustWithShrinkage (lambda=0) should be the same as DirectMethod"
assert (
np.abs(dr_value.item() - dr_shrink_max_value.item()) < 1e-5
), "DoublyRobustWithShrinkage (lambda=inf) should be almost the same as DoublyRobust"
def test_switch_dr_using_random_evaluation_policy(
synthetic_bandit_feedback: BanditFeedback, random_action_dist: np.ndarray
) -> None:
"""
Test the switch_dr using synthetic bandit data and random evaluation policy
"""
expected_reward = synthetic_bandit_feedback["expected_reward"][:, :, np.newaxis]
action_dist = random_action_dist
# prepare input dict
input_dict = {
k: v
for k, v in synthetic_bandit_feedback.items()
if k in ["reward", "action", "pscore", "position"]
}
input_dict["action_dist"] = action_dist
input_dict["estimated_rewards_by_reg_model"] = expected_reward
dm_value = dm.estimate_policy_value(**input_dict)
dr_value = dr.estimate_policy_value(**input_dict)
switch_dr_0_value = switch_dr_0.estimate_policy_value(**input_dict)
switch_dr_max_value = switch_dr_max.estimate_policy_value(**input_dict)
assert (
dm_value == switch_dr_0_value
), "SwitchDR (tau=0) should be the same as DirectMethod"
assert (
dr_value == switch_dr_max_value
), "SwitchDR (tau=1e10) should be the same as DoublyRobust"
| 33.572343
| 160
| 0.625172
| 3,417
| 26,220
| 4.56892
| 0.053263
| 0.036126
| 0.013451
| 0.046246
| 0.906034
| 0.894312
| 0.883167
| 0.853062
| 0.807712
| 0.794005
| 0
| 0.028618
| 0.255034
| 26,220
| 780
| 161
| 33.615385
| 0.770645
| 0.044546
| 0
| 0.710375
| 0
| 0.005764
| 0.167868
| 0.034367
| 0
| 0
| 0
| 0
| 0.017291
| 1
| 0.017291
| false
| 0
| 0.010086
| 0
| 0.027378
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6c7c2ad93bfa581fdf940b462c82e8735b236465
| 2,885
|
py
|
Python
|
automata/neighbour_functions.py
|
javiruiz/cellular-automata
|
b83c41d5eacb6a3bb36324efa5d899e310e4a500
|
[
"Apache-2.0"
] | 2
|
2019-11-08T23:34:29.000Z
|
2019-11-17T23:26:05.000Z
|
automata/neighbour_functions.py
|
javiruiz/cellular-automata
|
b83c41d5eacb6a3bb36324efa5d899e310e4a500
|
[
"Apache-2.0"
] | null | null | null |
automata/neighbour_functions.py
|
javiruiz/cellular-automata
|
b83c41d5eacb6a3bb36324efa5d899e310e4a500
|
[
"Apache-2.0"
] | 1
|
2019-11-08T23:35:08.000Z
|
2019-11-08T23:35:08.000Z
|
import numpy as np
def side_neighbours(board, x, y, size_x, size_y):
alive_neighbours = 0
if x != size_x - 1:
alive_neighbours += board[x + 1][y]
if x != 0:
alive_neighbours += board[x - 1][y]
if y != size_y - 1:
alive_neighbours += board[x][y + 1]
if y != 0:
alive_neighbours += board[x][y - 1]
return alive_neighbours
def corner_neighbours(board, x, y, size_x, size_y):
alive_neighbours = 0
if x == 0 and y == 0:
alive_neighbours += board[x + 1][y + 1]
elif x == 0 and y == size_y - 1:
alive_neighbours += board[x + 1][y - 1]
elif x == size_x - 1 and y == 0:
alive_neighbours += board[x - 1][y + 1]
elif x == size_x - 1 and y == size_y - 1:
alive_neighbours += board[x - 1][y - 1]
elif x == 0:
alive_neighbours += board[x + 1][y + 1] + board[x + 1][y - 1]
elif x == size_x - 1:
alive_neighbours += board[x - 1][y + 1] + board[x - 1][y - 1]
elif y == 0:
alive_neighbours += board[x + 1][y + 1] + board[x - 1][y + 1]
elif y == size_y - 1:
alive_neighbours += board[x - 1][y - 1] + board[x + 1][y - 1]
else:
alive_neighbours += board[x - 1][y - 1] + board[x + 1][y - 1]
alive_neighbours += board[x + 1][y + 1] + board[x - 1][y + 1]
return alive_neighbours
def all_neighbours(board, x, y, size_x, size_y):
alive_neighbours = 0
if x == 0 and y == 0:
alive_neighbours += board[x + 1][y + 1] + board[x + 1][y] + board[x][y + 1]
elif x == 0 and y == size_y - 1:
alive_neighbours += board[x + 1][y - 1] + board[x + 1][y] + board[x][y - 1]
elif x == size_x - 1 and y == 0:
alive_neighbours += board[x - 1][y + 1] + board[x - 1][y] + board[x][y + 1]
elif x == size_x - 1 and y == size_y - 1:
alive_neighbours += board[x - 1][y - 1] + board[x - 1][y] + board[x][y - 1]
elif x == 0:
alive_neighbours += board[x + 1][y + 1] + board[x + 1][y - 1] + board[x + 1][y] \
+ board[x][y + 1] + board[x][y - 1]
elif x == size_x - 1:
alive_neighbours += board[x - 1][y + 1] + board[x - 1][y - 1] + board[x - 1][y] \
+ board[x][y + 1] + board[x][y - 1]
elif y == 0:
alive_neighbours += board[x + 1][y + 1] + board[x - 1][y + 1] + board[x + 1][y] \
+ board[x][y + 1] + board[x - 1][y]
elif y == size_y - 1:
alive_neighbours += board[x - 1][y - 1] + board[x + 1][y - 1] + board[x + 1][y] \
+ board[x][y - 1] + board[x - 1][y]
else:
alive_neighbours += board[x - 1][y - 1] + board[x + 1][y - 1] + board[x + 1][y + 1] \
+ board[x - 1][y + 1] + board[x][y + 1] + board[x][y - 1] \
+ board[x + 1][y] + board[x - 1][y]
return alive_neighbours
| 41.214286
| 93
| 0.47279
| 484
| 2,885
| 2.710744
| 0.043388
| 0.28811
| 0.245427
| 0.280488
| 0.957317
| 0.957317
| 0.900152
| 0.897104
| 0.838415
| 0.838415
| 0
| 0.064703
| 0.341075
| 2,885
| 69
| 94
| 41.811594
| 0.62546
| 0
| 0
| 0.491803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04918
| false
| 0
| 0.016393
| 0
| 0.114754
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6c7ec498ed371b1657dd5b8076f3fc11d937fd2b
| 47,571
|
py
|
Python
|
model/road_transformer.py
|
wgcban/spin_roadmapper
|
2c1c8f22073d989753dc6f95d1f547198a76414b
|
[
"Apache-2.0"
] | 24
|
2021-09-15T00:20:52.000Z
|
2022-03-27T05:01:23.000Z
|
model/road_transformer.py
|
wgcban/spin_roadmapper
|
2c1c8f22073d989753dc6f95d1f547198a76414b
|
[
"Apache-2.0"
] | 2
|
2021-12-27T13:45:02.000Z
|
2022-03-25T13:33:20.000Z
|
model/road_transformer.py
|
wgcban/spin_roadmapper
|
2c1c8f22073d989753dc6f95d1f547198a76414b
|
[
"Apache-2.0"
] | 3
|
2021-12-27T03:11:56.000Z
|
2022-03-10T10:24:42.000Z
|
from __future__ import print_function
import math
import pdb
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
#from model.glore import GloRe_Unit_2D, GloRe_Unit_SE_2D
from model.SPIN import spin
affine_par = True
############### Road Transformer ###############
class BasicResnetBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, padding=1, downsample=None):
super(BasicResnetBlock, self).__init__()
self.conv1 = nn.Conv2d(
inplanes, planes, kernel_size=3, stride=stride, padding=padding, bias=False
)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(
planes, planes, kernel_size=3, stride=stride, padding=padding, bias=False
)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class DecoderBlock(nn.Module):
def __init__(self, in_channels, n_filters, group=1):
super(DecoderBlock, self).__init__()
# B, C, H, W -> B, C/4, H, W
self.conv1 = nn.Conv2d(in_channels, in_channels // 4, 1, groups=group)
self.norm1 = nn.BatchNorm2d(in_channels // 4)
self.relu1 = nn.ReLU(inplace=True)
# B, C/4, H, W -> B, C/4, H, W
self.deconv2 = nn.ConvTranspose2d(
in_channels // 4,
in_channels // 4,
3,
stride=2,
padding=1,
output_padding=1,
groups=group,
)
self.norm2 = nn.BatchNorm2d(in_channels // 4)
self.relu2 = nn.ReLU(inplace=True)
# B, C/4, H, W -> B, C, H, W
self.conv3 = nn.Conv2d(in_channels // 4, n_filters, 1, groups=group)
self.norm3 = nn.BatchNorm2d(n_filters)
self.relu3 = nn.ReLU(inplace=True)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2.0 / n))
if isinstance(m, nn.ConvTranspose2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2.0 / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def forward(self, x):
x = self.conv1(x)
x = self.norm1(x)
x = self.relu1(x)
x = self.deconv2(x)
x = self.norm2(x)
x = self.relu2(x)
x = self.conv3(x)
x = self.norm3(x)
x = self.relu3(x)
return x
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class AxialAttention(nn.Module):
def __init__(self, in_planes, out_planes, groups=8, kernel_size=56,
stride=1, bias=False, width=False):
assert (in_planes % groups == 0) and (out_planes % groups == 0)
super(AxialAttention, self).__init__()
self.in_planes = in_planes
self.out_planes = out_planes
self.groups = groups
self.group_planes = out_planes // groups
self.kernel_size = kernel_size
self.stride = stride
self.bias = bias
self.width = width
# Multi-head self attention
self.qkv_transform = qkv_transform(in_planes, out_planes * 2, kernel_size=1, stride=1,
padding=0, bias=False)
self.bn_qkv = nn.BatchNorm1d(out_planes * 2)
self.bn_similarity = nn.BatchNorm2d(groups * 3)
self.bn_output = nn.BatchNorm1d(out_planes * 2)
# Position embedding
self.relative = nn.Parameter(torch.randn(self.group_planes * 2, kernel_size * 2 - 1), requires_grad=True)
query_index = torch.arange(kernel_size).unsqueeze(0)
key_index = torch.arange(kernel_size).unsqueeze(1)
relative_index = key_index - query_index + kernel_size - 1
self.register_buffer('flatten_index', relative_index.view(-1))
if stride > 1:
self.pooling = nn.AvgPool2d(stride, stride=stride)
self.reset_parameters()
def forward(self, x):
# pdb.set_trace()
if self.width:
x = x.permute(0, 2, 1, 3)
else:
x = x.permute(0, 3, 1, 2) # N, W, C, H
N, W, C, H = x.shape
x = x.contiguous().view(N * W, C, H)
# Transformations
qkv = self.bn_qkv(self.qkv_transform(x))
q, k, v = torch.split(qkv.reshape(N * W, self.groups, self.group_planes * 2, H), [self.group_planes // 2, self.group_planes // 2, self.group_planes], dim=2)
# Calculate position embedding
all_embeddings = torch.index_select(self.relative, 1, self.flatten_index).view(self.group_planes * 2, self.kernel_size, self.kernel_size)
q_embedding, k_embedding, v_embedding = torch.split(all_embeddings, [self.group_planes // 2, self.group_planes // 2, self.group_planes], dim=0)
qr = torch.einsum('bgci,cij->bgij', q, q_embedding)
kr = torch.einsum('bgci,cij->bgij', k, k_embedding).transpose(2, 3)
qk = torch.einsum('bgci, bgcj->bgij', q, k)
stacked_similarity = torch.cat([qk, qr, kr], dim=1)
stacked_similarity = self.bn_similarity(stacked_similarity).view(N * W, 3, self.groups, H, H).sum(dim=1)
#stacked_similarity = self.bn_qr(qr) + self.bn_kr(kr) + self.bn_qk(qk)
# (N, groups, H, H, W)
similarity = F.softmax(stacked_similarity, dim=3)
sv = torch.einsum('bgij,bgcj->bgci', similarity, v)
sve = torch.einsum('bgij,cij->bgci', similarity, v_embedding)
stacked_output = torch.cat([sv, sve], dim=-1).view(N * W, self.out_planes * 2, H)
output = self.bn_output(stacked_output).view(N, W, self.out_planes, 2, H).sum(dim=-2)
if self.width:
output = output.permute(0, 2, 1, 3)
else:
output = output.permute(0, 2, 3, 1)
if self.stride > 1:
output = self.pooling(output)
return output
def reset_parameters(self):
self.qkv_transform.weight.data.normal_(0, math.sqrt(1. / self.in_planes))
#nn.init.uniform_(self.relative, -0.1, 0.1)
nn.init.normal_(self.relative, 0., math.sqrt(1. / self.group_planes))
class AxialBlock(nn.Module):
expansion = 2
def __init__(self, inplanes, planes, stride=1, downsample=None, groups=1,
base_width=64, dilation=1, norm_layer=None, kernel_size=56):
super(AxialBlock, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
width = int(planes * (base_width / 64.))
# Both self.conv2 and self.downsample layers downsample the input when stride != 1
self.conv_down = conv1x1(inplanes, width)
self.bn1 = norm_layer(width)
self.hight_block = AxialAttention(width, width, groups=groups, kernel_size=kernel_size)
self.width_block = AxialAttention(width, width, groups=groups, kernel_size=kernel_size, stride=stride, width=True)
self.conv_up = conv1x1(width, planes * self.expansion)
self.bn2 = norm_layer(planes * self.expansion)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.conv_down(x)
out = self.bn1(out)
out = self.relu(out)
# print(out.shape)
out = self.hight_block(out)
out = self.width_block(out)
out = self.relu(out)
out = self.conv_up(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class qkv_transform(nn.Conv1d):
"""Conv1d for qkv_transform"""
class roadtransformer(nn.Module):
def __init__(
self,
task1_classes=2,
task2_classes=37,
block=AxialBlock,
s=1,
layers=[2,1,1,1],
block_base=BasicResnetBlock,
groups=8,
width_per_group=64,
in_channels=3,
num_stacks=2,
num_blocks=1,
hg_num_blocks=3,
depth=3,
):
super(roadtransformer, self).__init__()
self.dilation = 1
self.groups = groups
self.base_width = width_per_group
replace_stride_with_dilation = [False, False, False]
self.inplanes = 64
self.num_feats = 128
self.num_stacks = num_stacks
self.conv1 = nn.Conv2d(
in_channels, self.inplanes, kernel_size=7, stride=2, padding=3, bias=True
)
self.bn1 = nn.BatchNorm2d(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_residual(block_base, self.inplanes, 1)
self.layer2 = self._make_residual(block_base, self.inplanes, num_blocks)
self.layer3 = self._make_residual(block_base, self.num_feats, num_blocks)
self.maxpool = nn.MaxPool2d(2, stride=2, ceil_mode=True)
# # build hourglass modules
# ch = self.num_feats * block.expansion
# hg = []
# res_1, fc_1, score_1, _fc_1, _score_1 = [], [], [], [], []
# res_2, fc_2, score_2, _fc_2, _score_2 = [], [], [], [], []
###add transformer encoder
img_size = 128
self.trlayer1 = self._make_layer(block, int(128 * s), layers[0], kernel_size= (img_size//2))
self.trlayer2 = self._make_layer(block, int(128 * s), layers[1], stride=2, kernel_size=(img_size//2),
dilate=replace_stride_with_dilation[0])
self.trlayer3 = self._make_layer(block, int(128 * s), layers[2], stride=2, kernel_size=(img_size//4),
dilate=replace_stride_with_dilation[1])
self.trlayer4 = self._make_layer(block, int(128 * s), layers[3], stride=2, kernel_size=(img_size//8),
dilate=replace_stride_with_dilation[2])
###add conv decoder
self.trdecoder1 = nn.ConvTranspose2d(int(256*s), int(256*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.trdecoder2 = nn.ConvTranspose2d(int(256*s), int(256*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.trdecoder3 = nn.ConvTranspose2d(int(256*s), int(256*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.trdecoder4 = nn.ConvTranspose2d(int(256*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
# self.trdecoder5 = nn.Conv2d(int(256*s) , int(128*s) , kernel_size=3, stride=1, padding=1)
self.tr2decoder1 = nn.ConvTranspose2d(int(256*s), int(256*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.tr2decoder2 = nn.ConvTranspose2d(int(256*s), int(256*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.tr2decoder3 = nn.ConvTranspose2d(int(256*s), int(256*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.tr2decoder4 = nn.ConvTranspose2d(int(256*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
# self.tr2decoder5 = nn.Conv2d(int(256*s) , int(128*s) , kernel_size=3, stride=1, padding=1)
# for i in range(num_stacks):
# hg.append(HourglassModuleMTL(block, hg_num_blocks, self.num_feats, depth))
# res_1.append(self._make_residual(block, self.num_feats, hg_num_blocks))
# res_2.append(self._make_residual(block, self.num_feats, hg_num_blocks))
# fc_1.append(self._make_fc(ch, ch))
# fc_2.append(self._make_fc(ch, ch))
# score_1.append(nn.Conv2d(ch, task1_classes, kernel_size=1, bias=True))
# score_2.append(nn.Conv2d(ch, task2_classes, kernel_size=1, bias=True))
# if i < num_stacks - 1:
# _fc_1.append(nn.Conv2d(ch, ch, kernel_size=1, bias=True))
# _fc_2.append(nn.Conv2d(ch, ch, kernel_size=1, bias=True))
# _score_1.append(nn.Conv2d(task1_classes, ch, kernel_size=1, bias=True))
# _score_2.append(nn.Conv2d(task2_classes, ch, kernel_size=1, bias=True))
# self.hg = nn.ModuleList(hg)
# self.res_1 = nn.ModuleList(res_1)
# self.fc_1 = nn.ModuleList(fc_1)
# self.score_1 = nn.ModuleList(score_1)
# self._fc_1 = nn.ModuleList(_fc_1)
# self._score_1 = nn.ModuleList(_score_1)
# self.res_2 = nn.ModuleList(res_2)
# self.fc_2 = nn.ModuleList(fc_2)
# self.score_2 = nn.ModuleList(score_2)
# self._fc_2 = nn.ModuleList(_fc_2)
# self._score_2 = nn.ModuleList(_score_2)
# Final Classifier
self.decoder1 = DecoderBlock(self.num_feats, self.inplanes)
self.decoder1_score = nn.Conv2d(
self.inplanes, task1_classes, kernel_size=1, bias=True
)
self.finaldeconv1 = nn.ConvTranspose2d(self.inplanes, 32, 3, stride=2)
self.finalrelu1 = nn.ReLU(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nn.ReLU(inplace=True)
self.finalconv3 = nn.Conv2d(32, task1_classes, 2, padding=1)
# Final Classifier
self.angle_decoder1 = DecoderBlock(self.num_feats, self.inplanes)
self.angle_decoder1_score = nn.Conv2d(
self.inplanes, task2_classes, kernel_size=1, bias=True
)
self.angle_finaldeconv1 = nn.ConvTranspose2d(self.inplanes, 32, 3, stride=2)
self.angle_finalrelu1 = nn.ReLU(inplace=True)
self.angle_finalconv2 = nn.Conv2d(32, 32, 3)
self.angle_finalrelu2 = nn.ReLU(inplace=True)
self.angle_finalconv3 = nn.Conv2d(32, task2_classes, 2, padding=1)
def _make_layer(self, block, planes, blocks, kernel_size=56, stride=1, dilate=False):
norm_layer = nn.BatchNorm2d
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
norm_layer(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample, groups=self.groups,
base_width=self.base_width, dilation=previous_dilation,
norm_layer=norm_layer, kernel_size=kernel_size))
self.inplanes = planes * block.expansion
if stride != 1:
kernel_size = kernel_size // 2
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes, groups=self.groups,
base_width=self.base_width, dilation=self.dilation,
norm_layer=norm_layer, kernel_size=kernel_size))
return nn.Sequential(*layers)
def _make_residual(self, block_base, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block_base.expansion:
downsample = nn.Sequential(
nn.Conv2d(
self.inplanes,
planes * block_base.expansion,
kernel_size=1,
stride=stride,
bias=True,
)
)
layers = []
layers.append(block_base(self.inplanes, planes, stride, downsample=downsample))
self.inplanes = planes * block_base.expansion
for i in range(1, blocks):
layers.append(block_base(self.inplanes, planes))
return nn.Sequential(*layers)
def _make_fc(self, inplanes, outplanes):
bn = nn.BatchNorm2d(inplanes)
conv = nn.Conv2d(inplanes, outplanes, kernel_size=1, bias=True)
return nn.Sequential(conv, bn, self.relu)
def forward(self, x):
out_1 = []
out_2 = []
rows = x.size(2)
cols = x.size(3)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.layer1(x)
x = self.maxpool(x)
x = self.layer2(x)
x = self.layer3(x)
# import pdb
# pdb.set_trace()
## Add transformer encoder
x1 = self.trlayer1(x)
#print(x1.shape) # 256, 64, 64
x2 = self.trlayer2(x1)
#print(x2.shape) #512, 32, 32
x3 = self.trlayer3(x2)
#print(x3.shape) #1024, 16, 16
x4 = self.trlayer4(x3)
#print(x4.shape) #2048, 8, 8
## Add conv decoder -seg branch
y1 = F.relu(self.trdecoder1(x4))
#print(y1.shape)
y1 = torch.add(y1, x3)
y1 = F.relu(self.trdecoder2(y1))
#print(y1.shape)
y1 = torch.add(y1, x2)
y1 = F.relu(self.trdecoder3(y1))
#print(y1.shape)
y1 = torch.add(y1, x1)
y1 = F.relu(self.trdecoder4(y1))
#print(y1.shape)
#print(y1.shape)
# y1 = F.relu(F.interpolate(self.trdecoder5(y1) , scale_factor=(2,2), mode ='bilinear'))
## Add angle decoder
y2 = F.relu(self.tr2decoder1(x4))
y2 = torch.add(y2, x3)
y2 = F.relu(self.tr2decoder2(y2))
y2 = torch.add(y2, x2)
y2 = F.relu(self.tr2decoder3(y2))
y2 = torch.add(y2, x1)
y2 = F.relu(self.tr2decoder4(y2))
# y2 = F.relu(F.interpolate(self.tr2decoder5(y2) , scale_factor=(2,2), mode ='bilinear'))
# for i in range(self.num_stacks):
# y1, y2 = self.hg[i](x)
# y1, y2 = self.res_1[i](y1), self.res_2[i](y2)
# y1, y2 = self.fc_1[i](y1), self.fc_2[i](y2)
# score1, score2 = self.score_1[i](y1), self.score_2[i](y2)
# out_1.append(
# score1[:, :, : int(math.ceil(rows / 4.0)), : int(math.ceil(cols / 4.0))]
# )
# # out_2.append(
# # score2[:, :, : int(math.ceil(rows / 4.0)), : int(math.ceil(cols / 4.0))]
# # )
# if i < self.num_stacks - 1:
# _fc_1, _fc_2 = self._fc_1[i](y1), self._fc_2[i](y2)
# _score_1, _score_2 = self._score_1[i](score1), self._score_2[i](score2)
# x = x + _fc_1 + _score_1 + _fc_2 + _score_2
# Final Classification
# import pdb
# pdb.set_trace()
d1 = self.decoder1(y1)[
:, :, : int(math.ceil(rows / 2.0)), : int(math.ceil(cols / 2.0))
]
d1_score = F.interpolate(self.decoder1_score(d1), scale_factor=(0.5,0.5), mode ='bilinear')
out_1.append(d1_score)
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
out_1.append(f5)
# Final Classification
a_d1 = self.angle_decoder1(y2)[
:, :, : int(math.ceil(rows / 2.0)), : int(math.ceil(cols / 2.0))
]
a_d1_score = F.interpolate(self.angle_decoder1_score(a_d1), scale_factor=(0.5,0.5), mode ='bilinear')
out_2.append(a_d1_score)
a_f1 = self.angle_finaldeconv1(a_d1)
a_f2 = self.angle_finalrelu1(a_f1)
a_f3 = self.angle_finalconv2(a_f2)
a_f4 = self.angle_finalrelu2(a_f3)
a_f5 = self.angle_finalconv3(a_f4)
out_2.append(a_f5)
# print(out_1.shape, out_2.shape)
# import pdb
# pdb.set_trace()
return out_1, out_2
class roadtransformerv1(nn.Module):
def __init__(
self,
task1_classes=2,
block=AxialBlock,
s=1,
layers=[1,1,1,1],
block_base=BasicResnetBlock,
groups=8,
width_per_group=64,
in_channels=3,
num_stacks=2,
num_blocks=1,
hg_num_blocks=3,
depth=3,
):
super(roadtransformerv1, self).__init__()
self.dilation = 1
self.groups = groups
self.base_width = width_per_group
replace_stride_with_dilation = [False, False, False]
self.inplanes = 64
self.num_feats = 128
self.num_stacks = num_stacks
self.conv1 = nn.Conv2d(
in_channels, self.inplanes, kernel_size=7, stride=2, padding=3, bias=True
)
self.bn1 = nn.BatchNorm2d(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_residual(block_base, self.inplanes, 1)
self.layer2 = self._make_residual(block_base, self.inplanes, num_blocks)
self.layer3 = self._make_residual(block_base, self.num_feats, num_blocks)
self.maxpool = nn.MaxPool2d(2, stride=2, ceil_mode=True)
###add transformer encoder
img_size = 128
self.trlayer1 = self._make_layer(block, int(64 * s), layers[0], kernel_size= (img_size//2))
self.trlayer2 = self._make_layer(block, int(64 * s), layers[1], stride=2, kernel_size=(img_size//2),
dilate=replace_stride_with_dilation[0])
self.trlayer3 = self._make_layer(block, int(64 * s), layers[2], stride=2, kernel_size=(img_size//4),
dilate=replace_stride_with_dilation[1])
self.trlayer4 = self._make_layer(block, int(64 * s), layers[3], stride=2, kernel_size=(img_size//8),
dilate=replace_stride_with_dilation[2])
#Skip connection
self.skip128 = nn.Conv2d(64, 128, 1, 1, 0)
###add conv decoder
self.trdecoder1 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.trdecoder2 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.trdecoder3 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.trdecoder4 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
# Final Classifier
self.decoder1 = DecoderBlock(self.num_feats, self.inplanes)
self.decoder1_score = nn.Conv2d(
self.inplanes, task1_classes, kernel_size=1, bias=True
)
self.finaldeconv1 = nn.ConvTranspose2d(self.inplanes, 32, 3, stride=2)
self.finalrelu1 = nn.ReLU(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nn.ReLU(inplace=True)
self.finalconv3 = nn.Conv2d(32, task1_classes, 2, padding=1)
def _make_layer(self, block, planes, blocks, kernel_size=56, stride=1, dilate=False):
norm_layer = nn.BatchNorm2d
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
norm_layer(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample, groups=self.groups,
base_width=self.base_width, dilation=previous_dilation,
norm_layer=norm_layer, kernel_size=kernel_size))
self.inplanes = planes * block.expansion
if stride != 1:
kernel_size = kernel_size // 2
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes, groups=self.groups,
base_width=self.base_width, dilation=self.dilation,
norm_layer=norm_layer, kernel_size=kernel_size))
return nn.Sequential(*layers)
def _make_residual(self, block_base, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block_base.expansion:
downsample = nn.Sequential(
nn.Conv2d(
self.inplanes,
planes * block_base.expansion,
kernel_size=1,
stride=stride,
bias=True,
)
)
layers = []
layers.append(block_base(self.inplanes, planes, stride, downsample=downsample))
self.inplanes = planes * block_base.expansion
for i in range(1, blocks):
layers.append(block_base(self.inplanes, planes))
return nn.Sequential(*layers)
def _make_fc(self, inplanes, outplanes):
bn = nn.BatchNorm2d(inplanes)
conv = nn.Conv2d(inplanes, outplanes, kernel_size=1, bias=True)
return nn.Sequential(conv, bn, self.relu)
def forward(self, x):
rows = x.size(2)
cols = x.size(3)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.layer1(x)
x = self.maxpool(x)
x = self.layer2(x)
x = self.layer3(x)
## Add transformer encoder
#print(x.shape)
x1 = self.trlayer1(x)
#print(x1.shape)
#x2 = self.trlayer2(x1)
#x3 = self.trlayer3(x2)
#x4 = self.trlayer4(x3)
## Add conv decoder -seg branch
#y1 = F.relu(self.trdecoder1(x4))
#y1 = torch.add(y1, x3)
#y1 = F.relu(self.trdecoder2(y1))
#y1 = torch.add(y1, x2)
#y1 = F.relu(self.trdecoder3(x2))
#y1 = torch.add(y1, x1)
#y1 = F.relu(self.trdecoder4(x1))
#y1 = torch.add(y1, x128)
y1 = x1
# Final Classification
d1 = self.decoder1(y1)[
:, :, : int(math.ceil(rows / 2.0)), : int(math.ceil(cols / 2.0))
]
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
################### ROAD TRANSFORMER V2 ######################
# one block - three axial attention layers and three transpose convolution layers
class roadtransformerv2(nn.Module):
def __init__(
self,
task1_classes=2,
block=AxialBlock,
s=1,
layers=[1,1,1,1],
block_base=BasicResnetBlock,
groups=8,
width_per_group=64,
in_channels=3,
num_stacks=2,
num_blocks=1,
hg_num_blocks=3,
depth=3,
):
super(roadtransformerv2, self).__init__()
self.dilation = 1
self.groups = groups
self.base_width = width_per_group
replace_stride_with_dilation = [False, False, False]
self.inplanes = 64
self.num_feats = 128
self.num_stacks = num_stacks
self.conv1 = nn.Conv2d(
in_channels, self.inplanes, kernel_size=7, stride=2, padding=3, bias=True
)
self.bn1 = nn.BatchNorm2d(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_residual(block_base, self.inplanes, 1)
self.layer2 = self._make_residual(block_base, self.inplanes, num_blocks)
self.layer3 = self._make_residual(block_base, self.num_feats, num_blocks)
self.maxpool = nn.MaxPool2d(2, stride=2, ceil_mode=True)
###add transformer encoder
img_size = 128
#FIRST TRANSFORMER LAYER
self.trlayer1 = self._make_layer(block, int(64 * s), layers[0], kernel_size= (img_size//2))
self.trlayer2 = self._make_layer(block, int(64 * s), layers[1], stride=2, kernel_size=(img_size//2), dilate=replace_stride_with_dilation[0])
self.trlayer3 = self._make_layer(block, int(64 * s), layers[2], stride=2, kernel_size=(img_size//4), dilate=replace_stride_with_dilation[1])
self.trlayer4 = self._make_layer(block, int(64 * s), layers[3], stride=2, kernel_size=(img_size//8), dilate=replace_stride_with_dilation[2])
#FIRST DECORDER LAYER
self.trdecoder1 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.trdecoder2 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.trdecoder3 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.trdecoder4 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
# Final Classifier
self.decoder1 = DecoderBlock(self.num_feats, self.inplanes)
self.decoder1_score = nn.Conv2d(
self.inplanes, task1_classes, kernel_size=1, bias=True
)
self.finaldeconv1 = nn.ConvTranspose2d(self.inplanes, 32, 3, stride=2)
self.finalrelu1 = nn.ReLU(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nn.ReLU(inplace=True)
self.finalconv3 = nn.Conv2d(32, task1_classes, 2, padding=1)
def _make_layer(self, block, planes, blocks, kernel_size=56, stride=1, dilate=False):
norm_layer = nn.BatchNorm2d
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
norm_layer(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample, groups=self.groups,
base_width=self.base_width, dilation=previous_dilation,
norm_layer=norm_layer, kernel_size=kernel_size))
self.inplanes = planes * block.expansion
if stride != 1:
kernel_size = kernel_size // 2
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes, groups=self.groups,
base_width=self.base_width, dilation=self.dilation,
norm_layer=norm_layer, kernel_size=kernel_size))
return nn.Sequential(*layers)
def _make_residual(self, block_base, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block_base.expansion:
downsample = nn.Sequential(
nn.Conv2d(
self.inplanes,
planes * block_base.expansion,
kernel_size=1,
stride=stride,
bias=True,
)
)
layers = []
layers.append(block_base(self.inplanes, planes, stride, downsample=downsample))
self.inplanes = planes * block_base.expansion
for i in range(1, blocks):
layers.append(block_base(self.inplanes, planes))
return nn.Sequential(*layers)
def _make_fc(self, inplanes, outplanes):
bn = nn.BatchNorm2d(inplanes)
conv = nn.Conv2d(inplanes, outplanes, kernel_size=1, bias=True)
return nn.Sequential(conv, bn, self.relu)
def forward(self, x):
rows = x.size(2)
cols = x.size(3)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.layer1(x)
x = self.maxpool(x)
x = self.layer2(x)
x = self.layer3(x)
## Add transformer encoder
#print(x.shape)
x1 = self.trlayer1(x) #64
x2 = self.trlayer2(x1) #32
x3 = self.trlayer3(x2) #16
x4 = self.trlayer4(x3) #8
## Add conv decoder -seg branch
y1 = F.relu(self.trdecoder1(x4)) #16
y1 = torch.add(y1, x3)
y1 = F.relu(self.trdecoder2(y1)) #32
y1 = torch.add(y1, x2)
y1 = F.relu(self.trdecoder3(y1)) #64
y1 = torch.add(y1, x1)
# Final Classification
d1 = self.decoder1(y1)[
:, :, : int(math.ceil(rows / 2.0)), : int(math.ceil(cols / 2.0))
]
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
################### ROAD TRANSFORMER V3 ######################
# one block - three axial attention layers and three transpose convolution layers
class roadtransformerv3(nn.Module):
def __init__(
self,
task1_classes=2,
block=AxialBlock,
s=1,
layers=[1,1,1,1],
block_base=BasicResnetBlock,
groups=8,
width_per_group=64,
in_channels=3,
num_stacks=2,
num_blocks=1,
hg_num_blocks=3,
depth=3,
):
super(roadtransformerv3, self).__init__()
self.dilation = 1
self.groups = groups
self.base_width = width_per_group
replace_stride_with_dilation = [False, False, False]
self.inplanes = 64
self.num_feats = 128
self.num_stacks = num_stacks
self.conv1 = nn.Conv2d(
in_channels, self.inplanes, kernel_size=7, stride=2, padding=3, bias=True
)
self.bn1 = nn.BatchNorm2d(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_residual(block_base, self.inplanes, 1)
self.layer2 = self._make_residual(block_base, self.inplanes, num_blocks)
self.layer3 = self._make_residual(block_base, self.num_feats, num_blocks)
self.maxpool = nn.MaxPool2d(2, stride=2, ceil_mode=True)
###add transformer encoder
img_size = 128
#FIRST TRANSFORMER LAYER
self.tr1layer1 = self._make_layer(block, int(64 * s), layers[0], kernel_size= (img_size//2))
self.tr1layer2 = self._make_layer(block, int(64 * s), layers[1], stride=2, kernel_size=(img_size//2), dilate=replace_stride_with_dilation[0])
self.tr1layer3 = self._make_layer(block, int(64 * s), layers[2], stride=2, kernel_size=(img_size//4), dilate=replace_stride_with_dilation[1])
self.tr1layer4 = self._make_layer(block, int(64 * s), layers[3], stride=2, kernel_size=(img_size//8), dilate=replace_stride_with_dilation[2])
#FIRST DECORDER LAYER
self.tr1decoder1 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.tr1decoder2 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.tr1decoder3 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
#SECOND TRANSFORMER LAYER
self.tr2layer1 = self._make_layer(block, int(64 * s), layers[0], kernel_size= (img_size//2))
self.tr2layer2 = self._make_layer(block, int(64 * s), layers[1], stride=2, kernel_size=(img_size//2), dilate=replace_stride_with_dilation[0])
self.tr2layer3 = self._make_layer(block, int(64 * s), layers[2], stride=2, kernel_size=(img_size//4), dilate=replace_stride_with_dilation[1])
self.tr2layer4 = self._make_layer(block, int(64 * s), layers[3], stride=2, kernel_size=(img_size//8), dilate=replace_stride_with_dilation[2])
#SECOND DECORDER LAYER
self.tr2decoder1 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.tr2decoder2 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.tr2decoder3 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
# Final Classifier
self.decoder1 = DecoderBlock(self.num_feats, self.inplanes)
self.decoder1_score = nn.Conv2d(
self.inplanes, task1_classes, kernel_size=1, bias=True
)
self.finaldeconv1 = nn.ConvTranspose2d(self.inplanes, 32, 3, stride=2)
self.finalrelu1 = nn.ReLU(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nn.ReLU(inplace=True)
self.finalconv3 = nn.Conv2d(32, task1_classes, 2, padding=1)
def _make_layer(self, block, planes, blocks, kernel_size=56, stride=1, dilate=False):
norm_layer = nn.BatchNorm2d
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
norm_layer(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample, groups=self.groups,
base_width=self.base_width, dilation=previous_dilation,
norm_layer=norm_layer, kernel_size=kernel_size))
self.inplanes = planes * block.expansion
if stride != 1:
kernel_size = kernel_size // 2
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes, groups=self.groups,
base_width=self.base_width, dilation=self.dilation,
norm_layer=norm_layer, kernel_size=kernel_size))
return nn.Sequential(*layers)
def _make_residual(self, block_base, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block_base.expansion:
downsample = nn.Sequential(
nn.Conv2d(
self.inplanes,
planes * block_base.expansion,
kernel_size=1,
stride=stride,
bias=True,
)
)
layers = []
layers.append(block_base(self.inplanes, planes, stride, downsample=downsample))
self.inplanes = planes * block_base.expansion
for i in range(1, blocks):
layers.append(block_base(self.inplanes, planes))
return nn.Sequential(*layers)
def _make_fc(self, inplanes, outplanes):
bn = nn.BatchNorm2d(inplanes)
conv = nn.Conv2d(inplanes, outplanes, kernel_size=1, bias=True)
return nn.Sequential(conv, bn, self.relu)
def forward(self, x):
rows = x.size(2)
cols = x.size(3)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.layer1(x)
x = self.maxpool(x)
x = self.layer2(x)
x = self.layer3(x)
## FIRST TRANSFORMER LAYER
x1 = self.tr1layer1(x) #64
x2 = self.tr1layer2(x1) #32
x3 = self.tr1layer3(x2) #16
x4 = self.tr1layer4(x3) #8
y1 = F.relu(self.tr1decoder1(x4)) #16
y1 = torch.add(y1, x3)
y1 = F.relu(self.tr1decoder2(y1)) #32
y1 = torch.add(y1, x2)
y1 = F.relu(self.tr1decoder3(y1)) #64
#y1 = torch.add(y1, x1)
## SECOND TRANSFORMER LAYER
y1 = torch.add(y1, x)
x1 = self.tr2layer1(y1) #64
x2 = self.tr2layer2(x1) #32
x3 = self.tr2layer3(x2) #16
x4 = self.tr2layer4(x3) #8
y1 = F.relu(self.tr2decoder1(x4)) #16
y1 = torch.add(y1, x3)
y1 = F.relu(self.tr2decoder2(y1)) #32
y1 = torch.add(y1, x2)
y1 = F.relu(self.tr2decoder3(y1)) #64
# Final Classification
d1 = self.decoder1(y1)[
:, :, : int(math.ceil(rows / 2.0)), : int(math.ceil(cols / 2.0))
]
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
################### ROAD TRANSFORMER V4 ######################
# one block - three axial attention layers and three transpose convolution layers
class roadtransformerv4(nn.Module):
def __init__(
self,
task1_classes=2,
block=AxialBlock,
s=5,
layers=[1,1,1,1],
block_base=BasicResnetBlock,
groups=8,
width_per_group=64,
in_channels=3,
num_stacks=2,
num_blocks=1,
hg_num_blocks=3,
depth=3,
):
super(roadtransformerv4, self).__init__()
self.dilation = 1
self.groups = groups
self.base_width = width_per_group
replace_stride_with_dilation = [False, False, False]
self.inplanes = 64
self.num_feats = 128
self.num_stacks = num_stacks
self.conv1 = nn.Conv2d(
in_channels, self.inplanes, kernel_size=7, stride=2, padding=3, bias=True
)
self.bn1 = nn.BatchNorm2d(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_residual(block_base, self.inplanes, 1)
self.layer2 = self._make_residual(block_base, self.inplanes, num_blocks)
self.layer3 = self._make_residual(block_base, self.num_feats, num_blocks)
self.maxpool = nn.MaxPool2d(2, stride=2, ceil_mode=True)
###add transformer encoder
img_size = 128
#FIRST TRANSFORMER LAYER
self.tr1layer1 = self._make_layer(block, int(64 * s), layers[0], kernel_size= (img_size//2))
self.tr1layer2 = self._make_layer(block, int(64 * s), layers[1], stride=2, kernel_size=(img_size//2), dilate=replace_stride_with_dilation[0])
self.tr1layer3 = self._make_layer(block, int(64 * s), layers[2], stride=2, kernel_size=(img_size//4), dilate=replace_stride_with_dilation[1])
self.tr1layer4 = self._make_layer(block, int(64 * s), layers[3], stride=2, kernel_size=(img_size//8), dilate=replace_stride_with_dilation[2])
#FIRST DECORDER LAYER
self.tr1decoder1 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.tr1decoder2 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.tr1decoder3 = nn.ConvTranspose2d(int(128*s), int(128), kernel_size=3, stride=2, padding=1, output_padding=1)
self.inplanes = 128
#SECOND TRANSFORMER LAYER
self.tr2layer1 = self._make_layer(block, int(64 * s), layers[0], kernel_size= (img_size//2))
self.tr2layer2 = self._make_layer(block, int(64 * s), layers[1], stride=2, kernel_size=(img_size//2), dilate=replace_stride_with_dilation[0])
self.tr2layer3 = self._make_layer(block, int(64 * s), layers[2], stride=2, kernel_size=(img_size//4), dilate=replace_stride_with_dilation[1])
self.tr2layer4 = self._make_layer(block, int(64 * s), layers[3], stride=2, kernel_size=(img_size//8), dilate=replace_stride_with_dilation[2])
#SECOND DECORDER LAYER
self.tr2decoder1 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.tr2decoder2 = nn.ConvTranspose2d(int(128*s), int(128*s), kernel_size=3, stride=2, padding=1, output_padding=1)
self.tr2decoder3 = nn.ConvTranspose2d(int(128*s), int(128), kernel_size=3, stride=2, padding=1, output_padding=1)
# Final Classifier
self.decoder1 = DecoderBlock(self.num_feats, self.inplanes)
self.decoder1_score = nn.Conv2d(
self.inplanes, task1_classes, kernel_size=1, bias=True
)
self.finaldeconv1 = nn.ConvTranspose2d(self.inplanes, 32, 3, stride=2)
self.finalrelu1 = nn.ReLU(inplace=True)
self.finalconv2 = nn.Conv2d(32, 32, 3)
self.finalrelu2 = nn.ReLU(inplace=True)
self.finalconv3 = nn.Conv2d(32, task1_classes, 2, padding=1)
def _make_layer(self, block, planes, blocks, kernel_size=56, stride=1, dilate=False):
norm_layer = nn.BatchNorm2d
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
norm_layer(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample, groups=self.groups,
base_width=self.base_width, dilation=previous_dilation,
norm_layer=norm_layer, kernel_size=kernel_size))
self.inplanes = planes * block.expansion
if stride != 1:
kernel_size = kernel_size // 2
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes, groups=self.groups,
base_width=self.base_width, dilation=self.dilation,
norm_layer=norm_layer, kernel_size=kernel_size))
return nn.Sequential(*layers)
def _make_residual(self, block_base, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block_base.expansion:
downsample = nn.Sequential(
nn.Conv2d(
self.inplanes,
planes * block_base.expansion,
kernel_size=1,
stride=stride,
bias=True,
)
)
layers = []
layers.append(block_base(self.inplanes, planes, stride, downsample=downsample))
self.inplanes = planes * block_base.expansion
for i in range(1, blocks):
layers.append(block_base(self.inplanes, planes))
return nn.Sequential(*layers)
def _make_fc(self, inplanes, outplanes):
bn = nn.BatchNorm2d(inplanes)
conv = nn.Conv2d(inplanes, outplanes, kernel_size=1, bias=True)
return nn.Sequential(conv, bn, self.relu)
def forward(self, x):
rows = x.size(2)
cols = x.size(3)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.layer1(x)
x = self.maxpool(x)
x = self.layer2(x)
x = self.layer3(x)
## FIRST TRANSFORMER LAYER
x1 = self.tr1layer1(x) #64
x2 = self.tr1layer2(x1) #32
x3 = self.tr1layer3(x2) #16
x4 = self.tr1layer4(x3) #8
y1 = F.relu(self.tr1decoder1(x4)) #16
y1 = torch.add(y1, x3)
y1 = F.relu(self.tr1decoder2(y1)) #32
y1 = torch.add(y1, x2)
y1 = F.relu(self.tr1decoder3(y1)) #64
## SECOND TRANSFORMER LAYER
y1 = torch.add(y1, x)
x1 = self.tr2layer1(y1) #64
x2 = self.tr2layer2(x1) #32
x3 = self.tr2layer3(x2) #16
x4 = self.tr2layer4(x3) #8
y1 = F.relu(self.tr2decoder1(x4)) #16
y1 = torch.add(y1, x3)
y1 = F.relu(self.tr2decoder2(y1)) #32
y1 = torch.add(y1, x2)
y1 = F.relu(self.tr2decoder3(y1)) #64
# Final Classification
d1 = self.decoder1(y1)[
:, :, : int(math.ceil(rows / 2.0)), : int(math.ceil(cols / 2.0))
]
f1 = self.finaldeconv1(d1)
f2 = self.finalrelu1(f1)
f3 = self.finalconv2(f2)
f4 = self.finalrelu2(f3)
f5 = self.finalconv3(f4)
return f5
| 40.348601
| 164
| 0.592231
| 6,214
| 47,571
| 4.374155
| 0.050209
| 0.052242
| 0.034436
| 0.020014
| 0.83242
| 0.807145
| 0.787204
| 0.771421
| 0.762297
| 0.760237
| 0
| 0.05601
| 0.282777
| 47,571
| 1,178
| 165
| 40.382852
| 0.740643
| 0.101617
| 0
| 0.732411
| 0
| 0
| 0.002408
| 0
| 0
| 0
| 0
| 0
| 0.001153
| 1
| 0.040369
| false
| 0
| 0.009227
| 0
| 0.092272
| 0.001153
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6663c693aef1fba9be33c1808e96b0df3c6175ce
| 90
|
py
|
Python
|
rio_tiler_pds/cbers/aws/__init__.py
|
cogeotiff/rio-tiler-pds
|
52482c80baf7fd26cf06cd2af2961cca396b20e0
|
[
"BSD-3-Clause"
] | 30
|
2020-07-21T23:32:14.000Z
|
2022-02-21T23:35:35.000Z
|
rio_tiler_pds/cbers/aws/__init__.py
|
cogeotiff/rio-tiler-pds
|
52482c80baf7fd26cf06cd2af2961cca396b20e0
|
[
"BSD-3-Clause"
] | 36
|
2020-07-21T20:48:51.000Z
|
2021-10-06T08:15:00.000Z
|
rio_tiler_pds/cbers/aws/__init__.py
|
cogeotiff/rio-tiler-pds
|
52482c80baf7fd26cf06cd2af2961cca396b20e0
|
[
"BSD-3-Clause"
] | 4
|
2020-07-23T06:19:30.000Z
|
2021-11-18T03:27:04.000Z
|
"""rio-tiler-pds.cbers"""
from rio_tiler_pds.cbers.aws.cbers4 import CBERSReader # noqa
| 22.5
| 62
| 0.755556
| 14
| 90
| 4.714286
| 0.714286
| 0.242424
| 0.333333
| 0.484848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012346
| 0.1
| 90
| 3
| 63
| 30
| 0.802469
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6674345667a61c3fa1d4449402dbaaf5e0b6240e
| 4,287
|
py
|
Python
|
prun_table.py
|
artec-kk/Studth
|
52cfa9c61355852163e1e4b42832480f51fda410
|
[
"MIT"
] | 3
|
2020-12-20T11:43:00.000Z
|
2020-12-20T17:32:19.000Z
|
prun_table.py
|
Nyanyan/VirtualRubiksCubeSolver
|
9096f1ebe48b880ade01ee8e22324c9017e41d4d
|
[
"MIT"
] | 1
|
2021-04-19T09:08:02.000Z
|
2021-04-19T09:08:02.000Z
|
prun_table.py
|
Nyanyan/VirtualRubiksCubeSolver
|
9096f1ebe48b880ade01ee8e22324c9017e41d4d
|
[
"MIT"
] | 1
|
2021-03-11T09:26:49.000Z
|
2021-03-11T09:26:49.000Z
|
from basic_functions import *
import csv
from collections import deque
inf = 1000
def table_phase0():
trans_ep = []
with open('trans_ep_phase0.csv', mode='r') as f:
for line in map(str.strip, f):
trans_ep.append([int(i) for i in line.replace('\n', '').split(',')])
trans = []
with open('trans_co.csv', mode='r') as f:
for line in map(str.strip, f):
trans.append([int(i) for i in line.replace('\n', '').split(',')])
table = [[inf for _ in range(2187)] for _ in range(495)]
solved1 = ep2idx_phase0(list(range(12)))
solved2 = co2idx([0 for _ in range(8)])
que = deque([[solved1, solved2, 0]])
table[solved1][solved2] = 0
cnt = 0
while que:
idx1, idx2, cost = que.popleft()
cost += 1
for twist_idx, twist in enumerate(candidate[0]):
n_idx1 = trans_ep[idx1][twist_idx]
n_idx2 = trans[idx2][twist_idx]
if table[n_idx1][n_idx2] > cost:
table[n_idx1][n_idx2] = cost
que.append([n_idx1, n_idx2, cost])
with open('prun_phase0_co_ep.csv', mode='w') as f:
writer = csv.writer(f, lineterminator='\n')
for arr in table:
writer.writerow(arr)
trans = []
with open('trans_eo.csv', mode='r') as f:
for line in map(str.strip, f):
trans.append([int(i) for i in line.replace('\n', '').split(',')])
table = [[inf for _ in range(2048)] for _ in range(495)]
solved1 = ep2idx_phase0(list(range(12)))
solved2 = eo2idx([0 for _ in range(12)])
que = deque([[solved1, solved2, 0]])
table[solved1][solved2] = 0
while que:
idx1, idx2, cost = que.popleft()
cost += 1
for twist_idx, twist in enumerate(candidate[0]):
n_idx1 = trans_ep[idx1][twist_idx]
n_idx2 = trans[idx2][twist_idx]
if table[n_idx1][n_idx2] > cost:
table[n_idx1][n_idx2] = cost
que.append([n_idx1, n_idx2, cost])
with open('prun_phase0_eo_ep.csv', mode='w') as f:
writer = csv.writer(f, lineterminator='\n')
for arr in table:
writer.writerow(arr)
def table_phase1():
trans_ep = []
with open('trans_ep_phase1_2.csv', mode='r') as f:
for line in map(str.strip, f):
trans_ep.append([int(i) for i in line.replace('\n', '').split(',')])
trans = []
with open('trans_cp.csv', mode='r') as f:
for line in map(str.strip, f):
trans.append([int(i) for i in line.replace('\n', '').split(',')])
table = [[inf for _ in range(40320)] for _ in range(24)]
solved1 = ep2idx_phase1_2(list(range(12)))
solved2 = cp2idx(list(range(8)))
table[solved1][solved2] = 0
que = deque([[solved1, solved2, 0]])
while que:
idx1, idx2, cost = que.popleft()
cost += 1
for twist_idx, twist in enumerate(candidate[1]):
n_idx1 = trans_ep[idx1][twist_idx]
n_idx2 = trans[idx2][twist_idx]
if table[n_idx1][n_idx2] > cost:
table[n_idx1][n_idx2] = cost
que.append([n_idx1, n_idx2, cost])
with open('prun_phase1_cp_ep.csv', mode='w') as f:
writer = csv.writer(f, lineterminator='\n')
for arr in table:
writer.writerow(arr)
trans = []
with open('trans_ep_phase1_1.csv', mode='r') as f:
for line in map(str.strip, f):
trans.append([int(i) for i in line.replace('\n', '').split(',')])
table = [[inf for _ in range(40320)] for _ in range(24)]
solved1 = ep2idx_phase1_2(list(range(12)))
solved2 = ep2idx_phase1_1(list(range(12)))
table[solved1][solved2] = 0
que = deque([[solved1, solved2, 0]])
while que:
idx1, idx2, cost = que.popleft()
cost += 1
for twist_idx, twist in enumerate(candidate[1]):
n_idx1 = trans_ep[idx1][twist_idx]
n_idx2 = trans[idx2][twist_idx]
if table[n_idx1][n_idx2] > cost:
table[n_idx1][n_idx2] = cost
que.append([n_idx1, n_idx2, cost])
with open('prun_phase1_ep_ep.csv', mode='w') as f:
writer = csv.writer(f, lineterminator='\n')
for arr in table:
writer.writerow(arr)
table_phase0()
table_phase1()
| 37.938053
| 80
| 0.565664
| 625
| 4,287
| 3.7232
| 0.1152
| 0.055006
| 0.030941
| 0.051569
| 0.902879
| 0.896863
| 0.877954
| 0.877954
| 0.877954
| 0.847443
| 0
| 0.053722
| 0.279216
| 4,287
| 113
| 81
| 37.938053
| 0.699353
| 0
| 0
| 0.761905
| 0
| 0
| 0.050606
| 0.029384
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019048
| false
| 0
| 0.028571
| 0
| 0.047619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
66bd86cd6dd1bc20bbc00c99834357180a14981b
| 422
|
py
|
Python
|
src/testcase/GN_Y201S/input_case/GN_Y201S_Delay_Timer.py
|
maiyajj/AutoTest_script-Appium_Connect
|
f9c2c42c281a9e2f984acb4a72dda0694b053f22
|
[
"Apache-2.0"
] | 28
|
2017-11-10T00:19:16.000Z
|
2022-02-19T16:42:05.000Z
|
src/testcase/GN_Y201S/input_case/GN_Y201S_Delay_Timer.py
|
maiyajj/AutoTest_script-Appium_Connect
|
f9c2c42c281a9e2f984acb4a72dda0694b053f22
|
[
"Apache-2.0"
] | null | null | null |
src/testcase/GN_Y201S/input_case/GN_Y201S_Delay_Timer.py
|
maiyajj/AutoTest_script-Appium_Connect
|
f9c2c42c281a9e2f984acb4a72dda0694b053f22
|
[
"Apache-2.0"
] | 23
|
2017-08-22T06:12:19.000Z
|
2021-09-18T05:45:41.000Z
|
# coding=utf-8
try:
from src.testcase.GN_Y201S.case.GN_Y201S_DELAY_TIMER.GN_Y201S_DELAY_TIMER_001 import *
from src.testcase.GN_Y201S.case.GN_Y201S_DELAY_TIMER.GN_Y201S_DELAY_TIMER_002 import *
from src.testcase.GN_Y201S.case.GN_Y201S_DELAY_TIMER.GN_Y201S_DELAY_TIMER_003 import *
from src.testcase.GN_Y201S.case.GN_Y201S_DELAY_TIMER.GN_Y201S_DELAY_TIMER_004 import *
except ImportError as e:
print(e)
| 46.888889
| 90
| 0.822275
| 74
| 422
| 4.256757
| 0.297297
| 0.266667
| 0.304762
| 0.431746
| 0.819048
| 0.819048
| 0.819048
| 0.819048
| 0.819048
| 0.819048
| 0
| 0.12963
| 0.104265
| 422
| 8
| 91
| 52.75
| 0.703704
| 0.028436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.714286
| 0
| 0.714286
| 0.142857
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 13
|
dd0f4eb8c4f1dc20e0fd25780cbdd6e07f04d05b
| 5,488
|
py
|
Python
|
tests/test_metadata.py
|
awst-baum/c3s_sm
|
b94ebe98766bd4a48d663c2bf0c8b690abcb5cce
|
[
"MIT"
] | null | null | null |
tests/test_metadata.py
|
awst-baum/c3s_sm
|
b94ebe98766bd4a48d663c2bf0c8b690abcb5cce
|
[
"MIT"
] | null | null | null |
tests/test_metadata.py
|
awst-baum/c3s_sm
|
b94ebe98766bd4a48d663c2bf0c8b690abcb5cce
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from c3s_sm.metadata import C3S_daily_tsatt_nc, C3S_SM_TS_Attrs, C3S_dekmon_tsatt_nc
def test_daily_metadata_default_active():
default_attr = C3S_SM_TS_Attrs('active')
assert(default_attr.version == 'v0000')
assert(default_attr.product_sensor_type == 'active')
assert(default_attr.version_sub_string == '.0.0')
assert(default_attr.sm_full_name == 'Percent of Saturation Soil Moisture Uncertainty')
assert(default_attr.sm_units == "percentage (%)")
default_attr.flag()
assert(default_attr.flag_values[0] == 0)
assert(default_attr.flag_meanings.split(' ')[0] == 'no_data_inconsistency_detected')
assert(default_attr.flag_values[10] == 17)
assert(default_attr.flag_meanings.split(' ')[10] == 'combination_of_flag_values_1_and_16')
default_attr.freqbandID_flag()
assert (default_attr.freqbandID_flag_values[0] == 0)
assert (default_attr.freqbandID_flag_meanings.split(' ')[0] == 'NaN')
assert (default_attr.freqbandID_flag_values[10] == 34)
assert (default_attr.freqbandID_flag_meanings.split(' ')[10] == 'C53+C73')
default_attr.sensor_flag()
assert (default_attr.sensor_flag_values[0] == 0)
assert (default_attr.sensor_flag_meanings.split(' ')[0] == 'NaN')
assert (default_attr.sensor_flag_values[10] == 132)
assert (default_attr.sensor_flag_meanings.split(' ')[10] == 'TMI+AMIWS')
default_attr.mode_flag()
assert (default_attr.mode_flag_values[0] == 0)
assert (default_attr.mode_flag_meanings.split(' ')[0] == 'nan')
assert (default_attr.mode_flag_values[3] == 3)
assert (default_attr.mode_flag_meanings.split(' ')[3] == 'ascending_descending_combination')
def test_daily_metadata_default_passive_and_combined():
for sensor in ['passive', 'combined']:
default_attr = C3S_SM_TS_Attrs(sensor)
assert (default_attr.version == 'v0000')
assert (default_attr.product_sensor_type == sensor)
assert (default_attr.version_sub_string == '.0.0')
assert (default_attr.sm_full_name == 'Volumetric Soil Moisture Uncertainty')
assert (default_attr.sm_units == "m3 m-3")
default_attr.flag()
assert (default_attr.flag_values[0] == 0)
assert (default_attr.flag_meanings.split(' ')[0] == 'no_data_inconsistency_detected')
assert (default_attr.flag_values[10] == 17)
assert (default_attr.flag_meanings.split(' ')[10] == 'combination_of_flag_values_1_and_16')
default_attr.freqbandID_flag()
assert (default_attr.freqbandID_flag_values[0] == 0)
assert (default_attr.freqbandID_flag_meanings.split(' ')[0] == 'NaN')
assert (default_attr.freqbandID_flag_values[10] == 34)
assert (default_attr.freqbandID_flag_meanings.split(' ')[10] == 'C53+C73')
default_attr.sensor_flag()
assert (default_attr.sensor_flag_values[0] == 0)
assert (default_attr.sensor_flag_meanings.split(' ')[0] == 'NaN')
assert (default_attr.sensor_flag_values[10] == 132)
assert (default_attr.sensor_flag_meanings.split(' ')[10]== 'TMI+AMIWS')
default_attr.mode_flag()
assert (default_attr.mode_flag_values[0] == 0)
assert (default_attr.mode_flag_meanings.split(' ')[0] == 'nan')
assert (default_attr.flag_values[3] == 3)
assert (default_attr.mode_flag_meanings.split(' ')[3] == 'ascending_descending_combination')
def test_C3s_daily_tsatt_nc():
subtype = 'TCDR'
sensor = 'active'
dob = C3S_daily_tsatt_nc(C3S_SM_TS_Attrs, product_sub_type=subtype,
product_sensor_type=sensor, sub_version='.9.9')
glob = dob.global_attr
assert glob == {'product': 'C3S-SOILMOISTURE-L3S-SSMS-%s-%s-%s-v0000.9.9'
%(sensor.upper(), 'DAILY', subtype),
'resolution': '0.25 degree',
'temporalspacing': 'daily'}
assert dob.ts_attributes['flag']['flag_values'].size == 18
sm_should = {'units': 'percentage (%)', 'full_name': 'Percent of Saturation Soil Moisture Uncertainty'}
assert dob.ts_attributes['sm'] == sm_should
assert dob.ts_attributes['mode']['flag_values'].size == 4
t0_should = {'units': 'days since 1970-01-01 00:00:00 UTC', 'full_name': 'Observation Timestamp'}
assert dob.ts_attributes['t0'] == t0_should
def test_C3s_dekmon_tsatt_nc():
subtype = 'TCDR'
sensor = 'passive'
dob = C3S_dekmon_tsatt_nc(C3S_SM_TS_Attrs, product_sub_type=subtype,
product_sensor_type=sensor, sub_version='.9.9')
glob = dob.global_attr
assert glob == {'product': 'C3S-SOILMOISTURE-L3S-SSMV-%s-%s-%s-v0000.9.9'
% (sensor.upper(), 'MONTHLY', subtype),
'resolution': '0.25 degree',
'temporalspacing': 'monthly'}
assert dob.ts_attributes['freqbandID']['flag_values'].size == 19
sm_should = {'units': 'm3 m-3', 'full_name': 'Volumetric Soil Moisture Uncertainty'}
assert dob.ts_attributes['sm'] == sm_should
assert dob.ts_attributes['nobs'] == {'full_name': 'Number of valid observation'}
t0_should = {'units': 'days since 1970-01-01 00:00:00 UTC', 'full_name': 'Observation Timestamp'}
assert dob.ts_attributes['sensor']['flag_values'].size == 27
if __name__ == '__main__':
test_C3s_dekmon_tsatt_nc()
test_C3s_daily_tsatt_nc()
test_daily_metadata_default_passive_and_combined()
test_daily_metadata_default_active()
| 42.215385
| 107
| 0.674927
| 713
| 5,488
| 4.834502
| 0.152875
| 0.165941
| 0.207137
| 0.043516
| 0.89063
| 0.834059
| 0.795764
| 0.764433
| 0.723238
| 0.70148
| 0
| 0.03975
| 0.184038
| 5,488
| 130
| 108
| 42.215385
| 0.730013
| 0.003827
| 0
| 0.580645
| 0
| 0
| 0.187706
| 0.051592
| 0
| 0
| 0
| 0
| 0.55914
| 1
| 0.043011
| false
| 0.043011
| 0.010753
| 0
| 0.053763
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dd1925a25f05d0c1e383dff972c880006c02219d
| 6,210
|
py
|
Python
|
tests/test_consume_handler.py
|
LaVita-GmbH/celery-message-consumer
|
cbead6a9cb8774caa883e6338083e073e09427ad
|
[
"Apache-2.0"
] | 40
|
2018-06-06T05:16:01.000Z
|
2022-03-29T11:21:31.000Z
|
tests/test_consume_handler.py
|
LaVita-GmbH/celery-message-consumer
|
cbead6a9cb8774caa883e6338083e073e09427ad
|
[
"Apache-2.0"
] | 9
|
2018-10-03T05:40:45.000Z
|
2021-10-01T07:49:41.000Z
|
tests/test_consume_handler.py
|
LaVita-GmbH/celery-message-consumer
|
cbead6a9cb8774caa883e6338083e073e09427ad
|
[
"Apache-2.0"
] | 15
|
2018-01-26T15:20:37.000Z
|
2022-03-01T13:38:53.000Z
|
import socket
from flaky import flaky
import mock
from event_consumer import message_handler
from event_consumer import handlers as ec
from .base import BaseConsumerIntegrationTest
class ConsumeMessageHandlerTest(BaseConsumerIntegrationTest):
@flaky(max_runs=5, min_passes=5)
def test_consume_basic(self):
"""
Should run the wrapped function when a message arrives with its routing key.
"""
with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg:
f1 = message_handler('my.routing.key1')(
mock.MagicMock(__name__='mock_handler1')
)
f2 = message_handler('my.routing.key2')(
mock.MagicMock(__name__='mock_handler2')
)
assert len(reg) == 2
self.configure_handlers()
assert len(self.handlers) == len(reg)
h1 = self.get_handlers_for_key('my.routing.key1')[0]
h2 = self.get_handlers_for_key('my.routing.key2')[0]
p1 = self.get_producer(h1)
p2 = self.get_producer(h2)
body1 = self.body()
body2 = self.body()
p1.publish(body1)
p2.publish(body2)
for _ in range(2):
self.connection.drain_events(timeout=0.3)
f1.assert_called_once_with(body1)
f2.assert_called_once_with(body2)
# no retries:
e = None
try:
self.connection.drain_events(timeout=0.3)
except socket.timeout as exc:
e = exc
self.assertIsNotNone(e, msg="e=None here means task was unexpectedly retried")
# no further calls
f1.call_count = 1
f2.call_count = 1
@flaky(max_runs=5, min_passes=5)
def test_consume_custom_queue_name(self):
"""
Should run the wrapped function when a message arrives with its routing key.
Test that we can connect multiple routing keys on the same queue and the
appropriate handler will be called in each case.
"""
with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg:
# we have to use a named exchange to be able to bind a custom queue name
f1 = message_handler('my.routing.key1', queue='custom_queue', exchange='custom')(
mock.MagicMock(__name__='mock_handler1')
)
assert len(reg) == 1
self.configure_handlers()
assert len(self.handlers) == len(reg)
h1 = self.get_handlers_for_key('my.routing.key1')[0]
p1 = self.get_producer(h1)
body1 = self.body()
p1.publish(body1)
self.connection.drain_events(timeout=0.3)
f1.assert_called_once_with(body1)
# no retries:
e = None
try:
self.connection.drain_events(timeout=0.3)
except socket.timeout as exc:
e = exc
self.assertIsNotNone(e, msg="e=None here means task was unexpectedly retried")
# no further calls
f1.call_count = 1
@flaky(max_runs=5, min_passes=5)
def test_consume_wildcard_route(self):
"""
Should run the wrapped function when a message arrives with its routing key.
Test that we can connect multiple routing keys on the same queue and the
appropriate handler will be called in each case.
"""
with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg:
f1 = message_handler('my.routing.*', exchange='custom')(
mock.MagicMock(__name__='mock_handler1')
)
assert len(reg) == 1
self.configure_handlers()
assert len(self.handlers) == len(reg)
h1 = self.get_handlers_for_key('my.routing.*')[0]
p1 = self.get_producer(h1, 'my.routing.key1')
p2 = self.get_producer(h1, 'my.routing.key2')
body1 = self.body()
body2 = self.body()
p1.publish(body1)
p2.publish(body2)
for _ in range(2):
self.connection.drain_events(timeout=0.3)
f1.assert_has_calls([mock.call(body1), mock.call(body2)], any_order=True)
# no retries:
e = None
try:
self.connection.drain_events(timeout=0.3)
except socket.timeout as exc:
e = exc
self.assertIsNotNone(e, msg="e=None here means task was unexpectedly retried")
# no further calls
f1.call_count = 2
@flaky(max_runs=5, min_passes=5)
def test_consume_multiple_routes(self):
"""
Should run the wrapped function when a message arrives with its routing key.
Test that we can connect multiple routing keys on the same queue and the
appropriate handler will be called in each case.
"""
with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg:
decorator = message_handler(
['my.routing.key1', 'my.routing.key2'],
exchange='custom',
)
f1 = decorator(mock.MagicMock(__name__='mock_handler1'))
assert len(reg) == 2
self.configure_handlers()
assert len(self.handlers) == len(reg)
h1 = self.get_handlers_for_key('my.routing.key1')[0]
h2 = self.get_handlers_for_key('my.routing.key2')[0]
p1 = self.get_producer(h1)
p2 = self.get_producer(h2)
body1 = self.body()
body2 = self.body()
p1.publish(body1)
p2.publish(body2)
for _ in range(2):
self.connection.drain_events(timeout=0.3)
f1.assert_has_calls([mock.call(body1), mock.call(body2)], any_order=True)
# no retries:
e = None
try:
self.connection.drain_events(timeout=0.3)
except socket.timeout as exc:
e = exc
self.assertIsNotNone(e, msg="e=None here means task was unexpectedly retried")
# no further calls
f1.call_count = 2
| 33.031915
| 93
| 0.56876
| 756
| 6,210
| 4.521164
| 0.170635
| 0.036864
| 0.04447
| 0.058514
| 0.85986
| 0.837039
| 0.805734
| 0.805734
| 0.79725
| 0.79725
| 0
| 0.029576
| 0.335749
| 6,210
| 187
| 94
| 33.208556
| 0.79903
| 0.138647
| 0
| 0.741379
| 0
| 0
| 0.099808
| 0
| 0
| 0
| 0
| 0
| 0.146552
| 1
| 0.034483
| false
| 0.034483
| 0.051724
| 0
| 0.094828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dd2d731db5a41e82c6c83e0e33cbab084191eea9
| 6,373
|
py
|
Python
|
tests/unit/bucket/bucket_test.py
|
abeja-inc/abeja-platform-cli
|
bed60642ee46656a5bfec5577d876e54c88bfd3f
|
[
"Apache-2.0"
] | 2
|
2020-06-19T23:07:38.000Z
|
2021-06-03T10:44:39.000Z
|
tests/unit/bucket/bucket_test.py
|
abeja-inc/abeja-platform-cli
|
bed60642ee46656a5bfec5577d876e54c88bfd3f
|
[
"Apache-2.0"
] | 20
|
2020-04-07T07:48:42.000Z
|
2020-09-07T09:18:43.000Z
|
tests/unit/bucket/bucket_test.py
|
abeja-inc/abeja-platform-cli
|
bed60642ee46656a5bfec5577d876e54c88bfd3f
|
[
"Apache-2.0"
] | 1
|
2021-06-01T13:38:19.000Z
|
2021-06-01T13:38:19.000Z
|
from unittest import TestCase
import requests
import requests_mock
from abejacli.bucket import (
generate_bucket_file_iter,
generate_bucket_file_iter_by_id
)
from abejacli.config import DATALAKE_ITEMS_PER_PAGE, ORGANIZATION_ENDPOINT
FILE = {
"expires": "2017-11-21T02:18:16+00:00",
"metadata": {
"x-abeja-meta-filename": "file2/file2-2.txt"
},
"file_id": "file2/file2-2.txt",
"is_file": True,
"size": 4,
"etag": "etag",
"download_uri": "https://abeja-storage-bucket-dev.s3.amazonaws.com/1122334455667/1981155819522/file2/file2-2.txt?AWSAccessKeyId=ASIAIS6VOBREHPTWAQDA&Signature=Riaqm%2B4sJz9fc2J0GIsvIIAROG8%3D&x-amz-security-token=FQoDYXdzEN7%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEaDGO9U9SxYJDxdzmbMyKCAnNauIDasGDp9mNIHaSbhG8PIXZWA193DiNcFqRvd4BlfA9VB2ZjohVJNnMLssOQBLkrK5Tgc7ixxgTuon2pkeew9IEiyxHjDm8T3jjLbUCUWqUDuy0JKdYjTqYGQ4SJBUSEGsFOfyUIDW1VqXPAdmgHC3p%2BMOOBI07uW6%2BThG50EjCttzrCYX9ka73R3Tj6Iqe4bnj3ogl909o9%2Fen1yRJ6uEGGkbfXCMJsAGrDrRY5bJxcjS4uCQLidqxQM1nbumNc%2F2WipjF7AK1wQQl50eEO%2FG9%2F%2Fc81Bjv767GazeCraSnukGggMTcqEOeUQEAlxgTo7lh6ykbl0JU%2BMs0Hks08DiiHhc3QBQ%3D%3D&Expires=1511230696", # noqa
}
FILES = [
{
"expires": "2017-11-21T02:18:16+00:00",
"metadata": {
"x-abeja-meta-filename": "file2/file2-2.txt"
},
"file_id": "file2/file2-2.txt",
"is_file": True,
"size": 4,
"etag": "etag",
"download_uri": "https://abeja-storage-bucket-dev.s3.amazonaws.com/1122334455667/1981155819522/file2/file2-2.txt?AWSAccessKeyId=ASIAIS6VOBREHPTWAQDA&Signature=Riaqm%2B4sJz9fc2J0GIsvIIAROG8%3D&x-amz-security-token=FQoDYXdzEN7%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEaDGO9U9SxYJDxdzmbMyKCAnNauIDasGDp9mNIHaSbhG8PIXZWA193DiNcFqRvd4BlfA9VB2ZjohVJNnMLssOQBLkrK5Tgc7ixxgTuon2pkeew9IEiyxHjDm8T3jjLbUCUWqUDuy0JKdYjTqYGQ4SJBUSEGsFOfyUIDW1VqXPAdmgHC3p%2BMOOBI07uW6%2BThG50EjCttzrCYX9ka73R3Tj6Iqe4bnj3ogl909o9%2Fen1yRJ6uEGGkbfXCMJsAGrDrRY5bJxcjS4uCQLidqxQM1nbumNc%2F2WipjF7AK1wQQl50eEO%2FG9%2F%2Fc81Bjv767GazeCraSnukGggMTcqEOeUQEAlxgTo7lh6ykbl0JU%2BMs0Hks08DiiHhc3QBQ%3D%3D&Expires=1511230696", # noqa
},
{
"expires": "2017-11-21T02:18:16+00:00",
"metadata": {
"x-abeja-meta-filename": "file1.txt"
},
"file_id": "file1.txt",
"is_file": True,
"size": 4,
"etag": "etag",
"download_uri": "https://abeja-storage-bucket-dev.s3.amazonaws.com/1122334455667/1981155819522/file1.txt?AWSAccessKeyId=ASIAIS6VOBREHPTWAQDA&Signature=Riaqm%2B4sJz9fc2J0GIsvIIAROG8%3D&x-amz-security-token=FQoDYXdzEN7%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEaDGO9U9SxYJDxdzmbMyKCAnNauIDasGDp9mNIHaSbhG8PIXZWA193DiNcFqRvd4BlfA9VB2ZjohVJNnMLssOQBLkrK5Tgc7ixxgTuon2pkeew9IEiyxHjDm8T3jjLbUCUWqUDuy0JKdYjTqYGQ4SJBUSEGsFOfyUIDW1VqXPAdmgHC3p%2BMOOBI07uW6%2BThG50EjCttzrCYX9ka73R3Tj6Iqe4bnj3ogl909o9%2Fen1yRJ6uEGGkbfXCMJsAGrDrRY5bJxcjS4uCQLidqxQM1nbumNc%2F2WipjF7AK1wQQl50eEO%2FG9%2F%2Fc81Bjv767GazeCraSnukGggMTcqEOeUQEAlxgTo7lh6ykbl0JU%2BMs0Hks08DiiHhc3QBQ%3D%3D&Expires=1511230696", # noqa
},
]
class GenerateFilePeriodIterTest(TestCase):
@requests_mock.Mocker()
def test_iter_file_period(self, mock):
bucket_id = '1282495447337'
res = {
'files': FILES,
'next_start_after': 'file1.txt',
}
url = '{}/buckets/{}/files?items_per_page={}'.format(
ORGANIZATION_ENDPOINT, bucket_id, DATALAKE_ITEMS_PER_PAGE)
mock.register_uri('GET', url, json=res)
res2 = {
'files': [],
'next_start_after': 'file1.txt',
}
url = '{}/buckets/{}/files?items_per_page={}&start_after=file1.txt'.format(
ORGANIZATION_ENDPOINT, bucket_id, DATALAKE_ITEMS_PER_PAGE)
mock.register_uri('GET', url, json=res2)
it = generate_bucket_file_iter(bucket_id)
assert FILES == list(it)
@requests_mock.Mocker()
def test_iter_file_period_empty(self, mock):
bucket_id = '1282495447337'
res = {
'files': [],
'next_start_after': 'file1.txt',
}
url = '{}/buckets/{}/files?items_per_page={}'.format(
ORGANIZATION_ENDPOINT, bucket_id, DATALAKE_ITEMS_PER_PAGE)
mock.register_uri('GET', url, json=res)
it = generate_bucket_file_iter(bucket_id)
assert 0 == len(list(it))
@requests_mock.Mocker()
def test_iter_file_period_pagination(self, mock):
bucket_id = '1282495447337'
next_start_after = 'file1.txt' # noqa
first_res = {
'files': FILES[:1],
'next_start_after': next_start_after,
}
first_page_url = '{}/buckets/{}/files'.format(ORGANIZATION_ENDPOINT, bucket_id)
next_start_after2 = 'file2/file2-2.txt'
second_res = {
'files': FILES[1:],
'next_start_after': next_start_after2,
}
second_page_url = '{}/buckets/{}/files?start_after={}'.format(
ORGANIZATION_ENDPOINT, bucket_id, next_start_after)
third_res = {
'files': [],
'next_start_after': 'dummy',
}
third_page_url = '{}/buckets/{}/files?start_after={}'.format(
ORGANIZATION_ENDPOINT, bucket_id, next_start_after2)
mock.register_uri('GET', first_page_url, json=first_res)
mock.register_uri('GET', second_page_url, json=second_res)
mock.register_uri('GET', third_page_url, json=third_res)
it = generate_bucket_file_iter(bucket_id)
assert FILES == list(it)
class GenerateFileIdIterTest(TestCase):
@requests_mock.Mocker()
def test_iter_file_id(self, mock):
bucket_id = '1282495447337'
file_id = '20171116T071056/9e6463607e6e'
url = '{}/buckets/{}/files/{}'.format(ORGANIZATION_ENDPOINT, bucket_id, file_id)
mock.register_uri('GET', url, json=FILE)
file_ids = [file_id]
it = generate_bucket_file_iter_by_id(bucket_id, *file_ids)
assert [FILE] == list(it)
@requests_mock.Mocker()
def test_iter_file_id_not_found(self, mock):
bucket_id = '1282495447337'
file_id = '20171116T071056/9e6463607e6e'
url = '{}/buckets/{}/files/{}'.format(ORGANIZATION_ENDPOINT, bucket_id, file_id)
mock.register_uri('GET', url, json=FILE, status_code=404)
file_ids = [file_id]
with self.assertRaises(requests.HTTPError):
it = generate_bucket_file_iter_by_id(bucket_id, *file_ids)
list(it)
| 45.198582
| 686
| 0.708928
| 668
| 6,373
| 6.508982
| 0.17515
| 0.022079
| 0.028979
| 0.033119
| 0.870745
| 0.844296
| 0.838316
| 0.822447
| 0.785189
| 0.74977
| 0
| 0.110291
| 0.170563
| 6,373
| 140
| 687
| 45.521429
| 0.712259
| 0.002981
| 0
| 0.483607
| 1
| 0.02459
| 0.461963
| 0.069145
| 0
| 0
| 0
| 0
| 0.040984
| 1
| 0.040984
| false
| 0
| 0.040984
| 0
| 0.098361
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dd468423d481de6165d78402654a7b7f01f919c3
| 7,352
|
py
|
Python
|
app/laboratorio/migrations/0001_initial.py
|
echosisdev/siecho
|
ce9dd5ce9090e9e96c2c6e149c84fb26894058f9
|
[
"MIT"
] | null | null | null |
app/laboratorio/migrations/0001_initial.py
|
echosisdev/siecho
|
ce9dd5ce9090e9e96c2c6e149c84fb26894058f9
|
[
"MIT"
] | null | null | null |
app/laboratorio/migrations/0001_initial.py
|
echosisdev/siecho
|
ce9dd5ce9090e9e96c2c6e149c84fb26894058f9
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.7 on 2021-10-06 21:18
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ExameClinico',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_pedido', models.DateTimeField()),
('data_colheita', models.DateTimeField()),
('data_resultado', models.DateTimeField()),
('globulos_brancos', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('globulos_vermelhos', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('hemoglobina', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('hematorcito', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('vol_corpuscular_med', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('conc_media_hemoglob_corp', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('plaquetas', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('larg_distr_gv', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('velc_sedime_gv', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('vol_medio_plaquetas', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('tipagem_sanguinea', models.CharField(blank=True, max_length=255, null=True)),
('linfocitos_perc', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('linfocitos_abs', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('neutrofilos_perc', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('neutrofilos_abs', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('eosinofilo_perc', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('eosinofilo_abs', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('basofio_perc', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('basofio_abs', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('monocito_perc', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('monocito_abs', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('teste_vdr', models.CharField(blank=True, max_length=50, null=True)),
('rpr', models.CharField(blank=True, max_length=50, null=True)),
('cd4_abs', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('cd4_perc', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('geneexpert', models.CharField(blank=True, max_length=50, null=True)),
('xpert_mtb', models.CharField(blank=True, max_length=50, null=True)),
('nivel_mtb_detetado', models.CharField(blank=True, max_length=50, null=True)),
('resistencia_rifampin', models.CharField(blank=True, max_length=50, null=True)),
('cultura', models.CharField(blank=True, max_length=50, null=True)),
('tb_lam', models.CharField(blank=True, max_length=50, null=True)),
('carga_viral', models.CharField(blank=True, max_length=50, null=True)),
('baciloscopial', models.CharField(blank=True, max_length=50, null=True)),
('nivel_positividade', models.CharField(blank=True, max_length=50)),
('albumina', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('asparato_aminotransferiase', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('alanina_aminotransferiase', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('amilase', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('birrubina', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('birrubina_direita', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('colesterol_total', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('hdl_colesterol', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('ldl_colesterol', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('creatina', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('creatina_quinase', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('fosfatase_alcalina', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('gama_glutamil', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('glucose', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('lactato_desidrogenase', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('lactato', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('lipase', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('total_proteina', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('triglicerides', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('ureia', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('cloreto', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('potassio', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('sodio', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('globulinas', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('pcr', models.CharField(blank=True, max_length=50, null=True)),
('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.location')),
('paciente', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.paciente')),
],
options={
'verbose_name': 'Exame Clinico',
'verbose_name_plural': 'Exames Clinicos',
},
),
]
| 80.791209
| 124
| 0.649075
| 872
| 7,352
| 5.286697
| 0.162844
| 0.115184
| 0.229501
| 0.269414
| 0.796746
| 0.796746
| 0.789588
| 0.781996
| 0.781996
| 0.698048
| 0
| 0.031909
| 0.207155
| 7,352
| 90
| 125
| 81.688889
| 0.758964
| 0.006121
| 0
| 0
| 1
| 0
| 0.125257
| 0.013142
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.024096
| 0
| 0.072289
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dd4883ce326da35c2f6b5aa8b9c9f6620149c38c
| 8,593
|
py
|
Python
|
tests/test_season.py
|
paulgoetze/whattime
|
f45114e4dddfef91dfc2068a0ad41c5cb8d9c4b2
|
[
"MIT"
] | null | null | null |
tests/test_season.py
|
paulgoetze/whattime
|
f45114e4dddfef91dfc2068a0ad41c5cb8d9c4b2
|
[
"MIT"
] | 2
|
2021-04-27T21:43:24.000Z
|
2021-04-27T21:46:21.000Z
|
tests/test_season.py
|
paulgoetze/whattime
|
f45114e4dddfef91dfc2068a0ad41c5cb8d9c4b2
|
[
"MIT"
] | null | null | null |
from whattime import Hemisphere, season_info
# Spring:
def test_is_spring_for_northern_hemisphere(months):
"""Test returns true for spring months on the northern hemisphere"""
assert season_info(months.january, Hemisphere.NORTHERN).is_spring is False
assert season_info(months.february, Hemisphere.NORTHERN).is_spring is False
assert season_info(months.march, Hemisphere.NORTHERN).is_spring is True
assert season_info(months.april, Hemisphere.NORTHERN).is_spring is True
assert season_info(months.may, Hemisphere.NORTHERN).is_spring is True
assert season_info(months.june, Hemisphere.NORTHERN).is_spring is False
assert season_info(months.july, Hemisphere.NORTHERN).is_spring is False
assert season_info(months.august, Hemisphere.NORTHERN).is_spring is False
assert season_info(months.september, Hemisphere.NORTHERN).is_spring is False
assert season_info(months.october, Hemisphere.NORTHERN).is_spring is False
assert season_info(months.november, Hemisphere.NORTHERN).is_spring is False
assert season_info(months.december, Hemisphere.NORTHERN).is_spring is False
def test_is_spring_for_southern_hemisphere(months):
"""Test returns true for spring months on the southern hemisphere"""
assert season_info(months.january, Hemisphere.SOUTHERN).is_spring is False
assert season_info(months.february, Hemisphere.SOUTHERN).is_spring is False
assert season_info(months.march, Hemisphere.SOUTHERN).is_spring is False
assert season_info(months.april, Hemisphere.SOUTHERN).is_spring is False
assert season_info(months.may, Hemisphere.SOUTHERN).is_spring is False
assert season_info(months.june, Hemisphere.SOUTHERN).is_spring is False
assert season_info(months.july, Hemisphere.SOUTHERN).is_spring is False
assert season_info(months.august, Hemisphere.SOUTHERN).is_spring is False
assert season_info(months.september, Hemisphere.SOUTHERN).is_spring is True
assert season_info(months.october, Hemisphere.SOUTHERN).is_spring is True
assert season_info(months.november, Hemisphere.SOUTHERN).is_spring is True
assert season_info(months.december, Hemisphere.SOUTHERN).is_spring is False
# Summer:
def test_is_summer_for_northern_hemisphere(months):
"""Test returns true for summer months on the northern hemisphere"""
assert season_info(months.january, Hemisphere.NORTHERN).is_summer is False
assert season_info(months.february, Hemisphere.NORTHERN).is_summer is False
assert season_info(months.march, Hemisphere.NORTHERN).is_summer is False
assert season_info(months.april, Hemisphere.NORTHERN).is_summer is False
assert season_info(months.may, Hemisphere.NORTHERN).is_summer is False
assert season_info(months.june, Hemisphere.NORTHERN).is_summer is True
assert season_info(months.july, Hemisphere.NORTHERN).is_summer is True
assert season_info(months.august, Hemisphere.NORTHERN).is_summer is True
assert season_info(months.september, Hemisphere.NORTHERN).is_summer is False
assert season_info(months.october, Hemisphere.NORTHERN).is_summer is False
assert season_info(months.november, Hemisphere.NORTHERN).is_summer is False
assert season_info(months.december, Hemisphere.NORTHERN).is_summer is False
def test_is_summer_for_southern_hemisphere(months):
"""Test returns true for summer months on the southern hemisphere"""
assert season_info(months.january, Hemisphere.SOUTHERN).is_summer is True
assert season_info(months.february, Hemisphere.SOUTHERN).is_summer is True
assert season_info(months.march, Hemisphere.SOUTHERN).is_summer is False
assert season_info(months.april, Hemisphere.SOUTHERN).is_summer is False
assert season_info(months.may, Hemisphere.SOUTHERN).is_summer is False
assert season_info(months.june, Hemisphere.SOUTHERN).is_summer is False
assert season_info(months.july, Hemisphere.SOUTHERN).is_summer is False
assert season_info(months.august, Hemisphere.SOUTHERN).is_summer is False
assert season_info(months.september, Hemisphere.SOUTHERN).is_summer is False
assert season_info(months.october, Hemisphere.SOUTHERN).is_summer is False
assert season_info(months.november, Hemisphere.SOUTHERN).is_summer is False
assert season_info(months.december, Hemisphere.SOUTHERN).is_summer is True
# Autumn:
def test_is_autumn_for_northern_hemisphere(months):
"""Test returns true for autumn months on the northern hemisphere"""
assert season_info(months.january, Hemisphere.NORTHERN).is_autumn is False
assert season_info(months.february, Hemisphere.NORTHERN).is_autumn is False
assert season_info(months.march, Hemisphere.NORTHERN).is_autumn is False
assert season_info(months.april, Hemisphere.NORTHERN).is_autumn is False
assert season_info(months.may, Hemisphere.NORTHERN).is_autumn is False
assert season_info(months.june, Hemisphere.NORTHERN).is_autumn is False
assert season_info(months.july, Hemisphere.NORTHERN).is_autumn is False
assert season_info(months.august, Hemisphere.NORTHERN).is_autumn is False
assert season_info(months.september, Hemisphere.NORTHERN).is_autumn is True
assert season_info(months.october, Hemisphere.NORTHERN).is_autumn is True
assert season_info(months.november, Hemisphere.NORTHERN).is_autumn is True
assert season_info(months.december, Hemisphere.NORTHERN).is_autumn is False
def test_is_autumn_for_southern_hemisphere(months):
"""Test returns true for autumn months on the southern hemisphere"""
assert season_info(months.january, Hemisphere.SOUTHERN).is_autumn is False
assert season_info(months.february, Hemisphere.SOUTHERN).is_autumn is False
assert season_info(months.march, Hemisphere.SOUTHERN).is_autumn is True
assert season_info(months.april, Hemisphere.SOUTHERN).is_autumn is True
assert season_info(months.may, Hemisphere.SOUTHERN).is_autumn is True
assert season_info(months.june, Hemisphere.SOUTHERN).is_autumn is False
assert season_info(months.july, Hemisphere.SOUTHERN).is_autumn is False
assert season_info(months.august, Hemisphere.SOUTHERN).is_autumn is False
assert season_info(months.september, Hemisphere.SOUTHERN).is_autumn is False
assert season_info(months.october, Hemisphere.SOUTHERN).is_autumn is False
assert season_info(months.november, Hemisphere.SOUTHERN).is_autumn is False
assert season_info(months.december, Hemisphere.SOUTHERN).is_autumn is False
# Winter:
def test_is_winter_for_northern_hemisphere(months):
"""Test returns true for winter months on the northern hemisphere"""
assert season_info(months.january, Hemisphere.NORTHERN).is_winter is True
assert season_info(months.february, Hemisphere.NORTHERN).is_winter is True
assert season_info(months.march, Hemisphere.NORTHERN).is_winter is False
assert season_info(months.april, Hemisphere.NORTHERN).is_winter is False
assert season_info(months.may, Hemisphere.NORTHERN).is_winter is False
assert season_info(months.june, Hemisphere.NORTHERN).is_winter is False
assert season_info(months.july, Hemisphere.NORTHERN).is_winter is False
assert season_info(months.august, Hemisphere.NORTHERN).is_winter is False
assert season_info(months.september, Hemisphere.NORTHERN).is_winter is False
assert season_info(months.october, Hemisphere.NORTHERN).is_winter is False
assert season_info(months.november, Hemisphere.NORTHERN).is_winter is False
assert season_info(months.december, Hemisphere.NORTHERN).is_winter is True
def test_is_winter_for_southern_hemisphere(months):
"""Test returns true for winter months on the southern hemisphere"""
assert season_info(months.january, Hemisphere.SOUTHERN).is_winter is False
assert season_info(months.february, Hemisphere.SOUTHERN).is_winter is False
assert season_info(months.march, Hemisphere.SOUTHERN).is_winter is False
assert season_info(months.april, Hemisphere.SOUTHERN).is_winter is False
assert season_info(months.may, Hemisphere.SOUTHERN).is_winter is False
assert season_info(months.june, Hemisphere.SOUTHERN).is_winter is True
assert season_info(months.july, Hemisphere.SOUTHERN).is_winter is True
assert season_info(months.august, Hemisphere.SOUTHERN).is_winter is True
assert season_info(months.september, Hemisphere.SOUTHERN).is_winter is False
assert season_info(months.october, Hemisphere.SOUTHERN).is_winter is False
assert season_info(months.november, Hemisphere.SOUTHERN).is_winter is False
assert season_info(months.december, Hemisphere.SOUTHERN).is_winter is False
| 58.856164
| 80
| 0.803328
| 1,210
| 8,593
| 5.512397
| 0.028926
| 0.145427
| 0.230285
| 0.316642
| 0.990705
| 0.972714
| 0.957421
| 0.957421
| 0.918141
| 0.918141
| 0
| 0
| 0.120214
| 8,593
| 145
| 81
| 59.262069
| 0.882275
| 0.062376
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.914286
| 1
| 0.07619
| false
| 0
| 0.009524
| 0
| 0.085714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
dd50bfdbdf5736f319a03fdbf27e7f414b62b8f6
| 342
|
py
|
Python
|
platform/core/polyaxon/polyaxon/config_settings/commands/__init__.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/polyaxon/polyaxon/config_settings/commands/__init__.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/polyaxon/polyaxon/config_settings/commands/__init__.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
from polyaxon.config_settings.cors import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.labels import *
from polyaxon.config_settings.middlewares import *
from polyaxon.config_settings.spawner import *
from polyaxon.config_settings.stores import *
from .apps import *
| 34.2
| 50
| 0.836257
| 45
| 342
| 6.2
| 0.288889
| 0.301075
| 0.451613
| 0.65233
| 0.688172
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003236
| 0.096491
| 342
| 9
| 51
| 38
| 0.899676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dd5694366c72c6dddc8a67197d3693774e6a58fa
| 217,450
|
py
|
Python
|
dlkit/abstract_osid/resource/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/abstract_osid/resource/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/abstract_osid/resource/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Implementations of resource abstract base class sessions."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class ResourceLookupSession:
"""This session defines methods for retrieving resources.
A ``Resource`` is an arbitrary entity that may represent a person,
place or thing used to identify an object used in various services.
This lookup session defines several views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
* isolated bin view: All resource methods in this session operate,
retrieve and pertain to resources defined explicitly in the
current bin. Using an isolated view is useful for managing
``Resources`` with the ``ResourceAdminSession.``
* federated bin view: All resource methods in this session
operate, retrieve and pertain to all resources defined in this
bin and any other resources implicitly available in this bin
through bin inheritence.
The methods ``use_federated_bin_view()`` and
``use_isolated_bin_view()`` behave as a radio group and one should
be selected before invoking any lookup methods.
Resources may have an additional records indicated by their
respective record types. The record may not be accessed through a
cast of the ``Resource``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_lookup_resources(self):
"""Tests if this user can perform ``Resource`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_resource_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_resource_view(self):
"""A complete view of the ``Resource`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_federated_bin_view(self):
"""Federates the view for methods in this session.
A federated view will include resources in bins which are
children of this bin in the bin hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_bin_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this bin only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_resource(self, resource_id):
"""Gets the ``Resource`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Resource`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``Resource`` and retained for
compatibility.
:param resource_id: the ``Id`` of the ``Resource`` to retrieve
:type resource_id: ``osid.id.Id``
:return: the returned ``Resource``
:rtype: ``osid.resource.Resource``
:raise: ``NotFound`` -- no ``Resource`` found with the given ``Id``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Resource
@abc.abstractmethod
def get_resources_by_ids(self, resource_ids):
"""Gets a ``ResourceList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the resources
specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if an ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible ``Resources`` may be omitted from the list and may
present the elements in any order including returning a unique
set.
:param resource_ids: the list of ``Ids`` to retrieve
:type resource_ids: ``osid.id.IdList``
:return: the returned ``Resource`` list
:rtype: ``osid.resource.ResourceList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``resource_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceList
@abc.abstractmethod
def get_resources_by_genus_type(self, resource_genus_type):
"""Gets a ``ResourceList`` corresponding to the given resource genus ``Type`` which does not include resources of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known resources
or an error results. Otherwise, the returned list may contain
only those resources that are accessible through this session.
:param resource_genus_type: a resource genus type
:type resource_genus_type: ``osid.type.Type``
:return: the returned ``Resource`` list
:rtype: ``osid.resource.ResourceList``
:raise: ``NullArgument`` -- ``resource_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceList
@abc.abstractmethod
def get_resources_by_parent_genus_type(self, resource_genus_type):
"""Gets a ``ResourceList`` corresponding to the given resource genus ``Type`` and include any additional resources with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known resources
or an error results. Otherwise, the returned list may contain
only those resources that are accessible through this session.
:param resource_genus_type: a resource genus type
:type resource_genus_type: ``osid.type.Type``
:return: the returned ``Resource`` list
:rtype: ``osid.resource.ResourceList``
:raise: ``NullArgument`` -- ``resource_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceList
@abc.abstractmethod
def get_resources_by_record_type(self, resource_record_type):
"""Gets a ``ResourceList`` containing the given resource record ``Type``.
In plenary mode, the returned list contains all known resources
or an error results. Otherwise, the returned list may contain
only those resources that are accessible through this session.
:param resource_record_type: a resource record type
:type resource_record_type: ``osid.type.Type``
:return: the returned ``Resource`` list
:rtype: ``osid.resource.ResourceList``
:raise: ``NullArgument`` -- ``resource_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceList
@abc.abstractmethod
def get_resources(self):
"""Gets all ``Resources``.
In plenary mode, the returned list contains all known resources
or an error results. Otherwise, the returned list may contain
only those resources that are accessible through this session.
:return: a list of ``Resources``
:rtype: ``osid.resource.ResourceList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceList
resources = property(fget=get_resources)
class ResourceQuerySession:
"""This session provides methods for searching among ``Resource`` objects.
The search query is constructed using the ``ResourceQuery``.
This session defines views that offer differing behaviors for
searching.
* federated bin view: searches include resources in bins of which
this bin is a ancestor in the bin hierarchy
* isolated bin view: searches are restricted to resources in this
bin
Resources may have a resource record indicated by their respective
record types. The resource query record is accessed via the
``ResourceQuery``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_search_resources(self):
"""Tests if this user can perform ``Resource`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
:return: ``false`` if search methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_federated_bin_view(self):
"""Federates the view for methods in this session.
A federated view will include resources in bins which are
children of this bin in the bin hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_bin_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this bin only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_resource_query(self):
"""Gets a resource query.
The returned query will not have an extension query.
:return: the resource query
:rtype: ``osid.resource.ResourceQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceQuery
resource_query = property(fget=get_resource_query)
@abc.abstractmethod
def get_resources_by_query(self, resource_query):
"""Gets a list of ``Resources`` matching the given resource query.
:param resource_query: the resource query
:type resource_query: ``osid.resource.ResourceQuery``
:return: the returned ``ResourceList``
:rtype: ``osid.resource.ResourceList``
:raise: ``NullArgument`` -- ``resource_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``resource_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceList
class ResourceSearchSession:
"""This session provides methods for searching among ``Resource`` objects.
The search query is constructed using the ``ResourceQuery``.
``get_resources_by_query()`` is the basic search method and returns
a list of ``Resources``. A more advanced search may be performed
with ``getResourcesBySearch()``. It accepts an ``ResourceSearch`` in
addition to the query for the purpose of specifying additional
options affecting the entire search, such as ordering.
``get_resources_by_search()`` returns an ``ResourceSearchResults``
that can be used to access the resulting ``ResourceList`` or be used
to perform a search within the result set through ``ResourceList``.
This session defines views that offer differing behaviors for
searching.
* federated bin view: searches include resources in bins of which
this bin is a ancestor in the bin hierarchy
* isolated bin view: searches are restricted to resources in this
bin
Resources may have a resource query record indicated by their
respective record types. The resource query record is accessed via
the ``ResourceQuery``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_resource_search(self):
"""Gets a resource search.
:return: the resource search
:rtype: ``osid.resource.ResourceSearch``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceSearch
resource_search = property(fget=get_resource_search)
@abc.abstractmethod
def get_resource_search_order(self):
"""Gets a resource search order.
The ``ResourceSearchOrder`` is supplied to a ``ResourceSearch``
to specify the ordering of results.
:return: the resource search order
:rtype: ``osid.resource.ResourceSearchOrder``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceSearchOrder
resource_search_order = property(fget=get_resource_search_order)
@abc.abstractmethod
def get_resources_by_search(self, resource_query, resource_search):
"""Gets the search results matching the given search query using the given search.
:param resource_query: the resource query
:type resource_query: ``osid.resource.ResourceQuery``
:param resource_search: the resource search
:type resource_search: ``osid.resource.ResourceSearch``
:return: the resource search results
:rtype: ``osid.resource.ResourceSearchResults``
:raise: ``NullArgument`` -- ``resource_query`` or ``resource_search`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``resource_query`` or ``resource_search`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceSearchResults
@abc.abstractmethod
def get_resource_query_from_inspector(self, resource_query_inspector):
"""Gets a resource query from an inspector.
The inspector is available from a ``ResourceSearchResults``.
:param resource_query_inspector: a resource query inspector
:type resource_query_inspector: ``osid.resource.ResourceQueryInspector``
:return: the resource query
:rtype: ``osid.resource.ResourceQuery``
:raise: ``NullArgument`` -- ``resource_query_inspector`` is ``null``
:raise: ``Unsupported`` -- ``resource_query_inspector`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceQuery
class ResourceAdminSession:
"""This session creates, updates, and deletes ``Resources``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create a
``Resource,`` a ``ResourceForm`` is requested using
``get_resource_form_for_create()`` specifying desired record
``Types`` or none if no record ``Types`` are needed. The returned
``ResourceForm`` will indicate that it is to be used with a create
operation and can be used to examine metdata or validate data prior
to creation. Once the ``ResourceForm`` is submiited to a create
operation, it cannot be reused with another create operation unless
the first operation was unsuccessful. Each ``ResourceForm``
corresponds to an attempted transaction.
For updates, ``ResourceForms`` are requested to the ``Resource``
``Id`` that is to be updated using ``getResourceFormForUpdate()``.
Similarly, the ``ResourceForm`` has metadata about the data that can
be updated and it can perform validation before submitting the
update. The ``ResourceForm`` can only be used once for a successful
update and cannot be reused.
The delete operations delete ``Resources``. To unmap a ``Resource``
from the current ``Bin,`` the ``ResourceBinAssignmentSession``
should be used. These delete operations attempt to remove the
``Resource`` itself thus removing it from all known ``Bin``
catalogs.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_create_resources(self):
"""Tests if this user can create ``Resources``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``Resource`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
create operations to an unauthorized user.
:return: ``false`` if ``Resource`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_create_resource_with_record_types(self, resource_record_types):
"""Tests if this user can create a single ``Resource`` using the desired record types.
While ``ResourceManager.getResourceRecordTypes()`` can be used
to examine which records are supported, this method tests which
record(s) are required for creating a specific ``Resource``.
Providing an empty array tests if a ``Resource`` can be created
with no records.
:param resource_record_types: array of resource record types
:type resource_record_types: ``osid.type.Type[]``
:return: ``true`` if ``Resource`` creation using the specified ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``resource_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_form_for_create(self, resource_record_types):
"""Gets the resource form for creating new resources.
A new form should be requested for each create transaction.
:param resource_record_types: array of resource record types
:type resource_record_types: ``osid.type.Type[]``
:return: the resource form
:rtype: ``osid.resource.ResourceForm``
:raise: ``NullArgument`` -- ``resource_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form with requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceForm
@abc.abstractmethod
def create_resource(self, resource_form):
"""Creates a new ``Resource``.
:param resource_form: the form for this ``Resource``
:type resource_form: ``osid.resource.ResourceForm``
:return: the new ``Resource``
:rtype: ``osid.resource.Resource``
:raise: ``IllegalState`` -- ``resource_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``resource_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``resource_form`` did not originate from ``get_resource_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Resource
@abc.abstractmethod
def can_update_resources(self):
"""Tests if this user can update ``Resources``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``Resource`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
update operations to an unauthorized user.
:return: ``false`` if ``Resource`` modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_form_for_update(self, resource_id):
"""Gets the resource form for updating an existing resource.
A new resource form should be requested for each update
transaction.
:param resource_id: the ``Id`` of the ``Resource``
:type resource_id: ``osid.id.Id``
:return: the resource form
:rtype: ``osid.resource.ResourceForm``
:raise: ``NotFound`` -- ``resource_id`` is not found
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceForm
@abc.abstractmethod
def update_resource(self, resource_form):
"""Updates an existing resource.
:param resource_form: the form containing the elements to be updated
:type resource_form: ``osid.resource.ResourceForm``
:raise: ``IllegalState`` -- ``resource_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``resource_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``resource_form`` did not originate from ``get_resource_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_delete_resources(self):
"""Tests if this user can delete ``Resources``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``Resource`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
delete operations to an unauthorized user.
:return: ``false`` if ``Resource`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def delete_resource(self, resource_id):
"""Deletes a ``Resource``.
:param resource_id: the ``Id`` of the ``Resource`` to remove
:type resource_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``resource_id`` not found
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_manage_resource_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Resources``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``Resource`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def alias_resource(self, resource_id, alias_id):
"""Adds an ``Id`` to a ``Resource`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Resource`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another resource it is
reassigned to the given resource ``Id``.
:param resource_id: the ``Id`` of a ``Resource``
:type resource_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``resource_id`` not found
:raise: ``NullArgument`` -- ``alias_id`` or ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ResourceNotificationSession:
"""This session defines methods to receive notifications on adds/changes to ``Resource`` objects in this ``Bin``.
This also includes existing resources that may appear or disappear
due to changes in the ``Bin`` hierarchy, This session is intended
for consumers needing to synchronize their state with this service
without the use of polling. Notifications are cancelled when this
session is closed.
The two views defined in this session correspond to the views in the
``ResourceLookupSession``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_register_for_resource_notifications(self):
"""Tests if this user can register for ``Resource`` notifications.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer
notification operations.
:return: ``false`` if notification methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_federated_bin_view(self):
"""Federates the view for methods in this session.
A federated view will include resources in bins which are
children of this bin in the bin hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_bin_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts notifications to this bin only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_resources(self):
"""Register for notifications of new resources.
``ResourceReceiver.newResources()`` is invoked when a new
``Resource`` is appears in this bin.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_resources(self):
"""Registers for notification of updated resources.
``ResourceReceiver.changedResources()`` is invoked when a
resource in this bin is changed.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_resource(self, resource_id):
"""Registers for notification of an updated resource.
``ResourceReceiver.changedResources()`` is invoked when the
specified resource in this bin is changed.
:param resource_id: the ``Id`` of the ``Resource`` to monitor
:type resource_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_resources(self):
"""Registers for notification of deleted resources.
``ResourceReceiver.deletedResources()`` is invoked when a
resource is deleted or removed from this bin.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_resource(self, resource_id):
"""Registers for notification of a deleted resource.
``ResourceReceiver.deletedResources()`` is invoked when the
specified resource is deleted or removed from this bin.
:param resource_id: the ``Id`` of the ``Resource`` to monitor
:type resource_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def reliable_resource_notifications(self):
"""Reliable notifications are desired.
In reliable mode, notifications are to be acknowledged using
``acknowledge_item_notification()`` .
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def unreliable_resource_notifications(self):
"""Unreliable notifications are desired.
In unreliable mode, notifications do not need to be
acknowledged.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def acknowledge_resource_notification(self, notification_id):
"""Acknowledge an resource notification.
:param notification_id: the ``Id`` of the notification
:type notification_id: ``osid.id.Id``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ResourceBinSession:
"""This session provides methods to retrieve ``Resource`` to ``Bin`` mappings.
A ``Resource`` may appear in multiple ``Bins``. Each ``Bin`` may
have its own authorizations governing who is allowed to look at it.
This lookup session defines several views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def use_comparative_bin_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_bin_view(self):
"""A complete view of the ``Resource`` and ``Bin`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def can_lookup_resource_bin_mappings(self):
"""Tests if this user can perform lookups of resource/bin mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known lookup methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
lookup operations to unauthorized users.
:return: ``false`` if looking up mappings is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_ids_by_bin(self, bin_id):
"""Gets the list of ``Resource`` ``Ids`` associated with a ``Bin``.
:param bin_id: ``Id`` of a ``Bin``
:type bin_id: ``osid.id.Id``
:return: list of related resource ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``bin_id`` is not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_resources_by_bin(self, bin_id):
"""Gets the list of ``Resources`` associated with a ``Bin``.
:param bin_id: ``Id`` of a ``Bin``
:type bin_id: ``osid.id.Id``
:return: list of related resources
:rtype: ``osid.resource.ResourceList``
:raise: ``NotFound`` -- ``bin_id`` is not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceList
@abc.abstractmethod
def get_resource_ids_by_bins(self, bin_ids):
"""Gets the list of ``Resource Ids`` corresponding to a list of ``Bin`` objects.
:param bin_ids: list of bin ``Ids``
:type bin_ids: ``osid.id.IdList``
:return: list of resource ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``bin_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_resources_by_bins(self, bin_ids):
"""Gets the list of ``Resources`` corresponding to a list of ``Bins``.
:param bin_ids: list of bin ``Ids``
:type bin_ids: ``osid.id.IdList``
:return: list of resources
:rtype: ``osid.resource.ResourceList``
:raise: ``NullArgument`` -- ``bin_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceList
@abc.abstractmethod
def get_bin_ids_by_resource(self, resource_id):
"""Gets the list of ``Bin`` ``Ids`` mapped to a ``Resource``.
:param resource_id: ``Id`` of a ``Resource``
:type resource_id: ``osid.id.Id``
:return: list of bin ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``resource_id`` is not found
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_bins_by_resource(self, resource_id):
"""Gets the list of ``Bin`` objects mapped to a ``Resource``.
:param resource_id: ``Id`` of a ``Resource``
:type resource_id: ``osid.id.Id``
:return: list of bins
:rtype: ``osid.resource.BinList``
:raise: ``NotFound`` -- ``resource_id`` is not found
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinList
class ResourceBinAssignmentSession:
"""This session provides methods to re-assign ``Resources`` to ``Bins``.
A ``Resource`` may map to multiple ``Bin`` objects and removing the
last reference to a ``Resource`` is the equivalent of deleting it.
Each ``Bin`` may have its own authorizations governing who is
allowed to operate on it.
Moving or adding a reference of a ``Resource`` to another ``Bin`` is
not a copy operation (eg: does not change its ``Id`` ).
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_assign_resources(self):
"""Tests if this user can alter resource/bin mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
assignment operations to unauthorized users.
:return: ``false`` if mapping is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_assign_resources_to_bin(self, bin_id):
"""Tests if this user can alter resource/bin mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied`` . This is
intended as a hint to an application that may opt not to offer
assignment operations to unauthorized users.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:return: ``false`` if mapping is not authorized, ``true`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assignable_bin_ids(self, bin_id):
"""Gets a list of bins including and under the given bin node in which any resource can be assigned.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:return: list of assignable bin ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_assignable_bin_ids_for_resource(self, bin_id, resource_id):
"""Gets a list of bins including and under the given bin node in which a specific resource can be assigned.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:param resource_id: the ``Id`` of the ``Resource``
:type resource_id: ``osid.id.Id``
:return: list of assignable bin ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``bin_id`` or ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def assign_resource_to_bin(self, resource_id, bin_id):
"""Adds an existing ``Resource`` to a ``Bin``.
:param resource_id: the ``Id`` of the ``Resource``
:type resource_id: ``osid.id.Id``
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``resource_id`` is already assigned to ``bin_id``
:raise: ``NotFound`` -- ``resource_id`` or ``bin_id`` not found
:raise: ``NullArgument`` -- ``resource_id`` or ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def unassign_resource_from_bin(self, resource_id, bin_id):
"""Removes a ``Resource`` from a ``Bin``.
:param resource_id: the ``Id`` of the ``Resource``
:type resource_id: ``osid.id.Id``
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``resource_id`` or ``bin_id`` not found or ``resource_id`` not assigned to ``bin_id``
:raise: ``NullArgument`` -- ``resource_id`` or ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ResourceSmartBinSession:
"""This session manages queries and sequencing to create "smart" dynamic catalogs.
A ``ResourceQuery`` can be retrieved from this session and mapped to
this ``Bin`` to create a virtual collection of ``Resources``. The
resources may be sequenced using the ``ResourceSearchOrder`` from
this session.
This ``Bin`` has a default query that matches any resource and a
default search order that specifies no sequencing. The queries may
be examined using a ``ResourceQueryInspector``. The query may be
modified by converting the inspector back to a ``ResourceQuery``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_manage_smart_bins(self):
"""Tests if this user can manage smart bins.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer operations
to unauthorized users.
:return: ``false`` if smart bin management is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_query(self):
"""Gets a resource query.
:return: the resource query
:rtype: ``osid.resource.ResourceQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceQuery
resource_query = property(fget=get_resource_query)
@abc.abstractmethod
def get_resource_search_order(self):
"""Gets a resource search order.
:return: the resource search order
:rtype: ``osid.resource.ResourceSearchOrder``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceSearchOrder
resource_search_order = property(fget=get_resource_search_order)
@abc.abstractmethod
def apply_resource_query(self, resource_query):
"""Applies a resource query to this bin.
:param resource_query: the resource query
:type resource_query: ``osid.resource.ResourceQuery``
:raise: ``NullArgument`` -- ``resource_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure occurred
:raise: ``Unsupported`` -- ``resource_query`` not of this service
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def inspect_resource_query(self):
"""Gets a resource query inspector for this bin.
:return: the resource query inspector
:rtype: ``osid.resource.ResourceQueryInspector``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceQueryInspector
@abc.abstractmethod
def apply_resource_sequencing(self, resource_search_order):
"""Applies a resource search order to this bin.
:param resource_search_order: the resource search order
:type resource_search_order: ``osid.resource.ResourceSearchOrder``
:raise: ``NullArgument`` -- ``resource_search_order`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure occurred
:raise: ``Unsupported`` -- ``resource_search_order`` not of this service
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def get_resource_query_from_inspector(self, resource_query_inspector):
"""Gets a resource query from an inspector.
:param resource_query_inspector: a resource query inspector
:type resource_query_inspector: ``osid.resource.ResourceQueryInspector``
:return: the resource query
:rtype: ``osid.resource.ResourceQuery``
:raise: ``NullArgument`` -- ``resource_query_inspector`` is ``null``
:raise: ``Unsupported`` -- ``resource_query_inspector`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceQuery
class MembershipSession:
"""This session provides methods to query if a ``Resource`` is a member of another ``Resource``.
``Resources`` may represent groups of ``Resources`` or generated
``Demographics`` of ``Resources``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_query_membership(self):
"""Tests if this user can perform membership queries.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known lookup methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
lookup operations to unauthorized users.
:return: ``false`` if membership queries are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_federated_bin_view(self):
"""Federates the view for methods in this session.
A federated view will include resources in bins which are
children of this bin in the bin hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_bin_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts resources to this bin only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def is_member(self, member_resource_id, resource_id):
"""Tests if a ``Resource`` is a member of another ``Resource``.
:param member_resource_id: ``Id`` of the ``Resource`` member
:type member_resource_id: ``osid.id.Id``
:param resource_id: ``Id`` of the ``Resource`` representing the group or demographic
:type resource_id: ``osid.id.Id``
:return: true if ``member_resource_id`` is a member of the ``resource_id,`` false otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``member_resource_id`` or ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
class GroupSession:
"""This session provides methods to retrieve ``Resource`` to ``Group`` mappings.
A ``Resource`` may appear in multiple resource groups. A group is
also represented by a resource itself.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_lookup_resource_members(self):
"""Tests if this user can perform lookups of resource members.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known lookup methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
lookup operations to unauthorized users.
:return: ``false`` if looking up members is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_resource_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_resource_view(self):
"""A complete view of the ``Resource`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_federated_bin_view(self):
"""Federates the view for methods in this session.
A federated view will include resources in bins which are
children of this bin in the bin hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_bin_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts resources to this bin only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_federated_group_view(self):
"""Federates the view for methods in this session.
A federated view will include resources in groups which are
children of the specified group in the group hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_group_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to the specified group only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_resource_ids_by_group(self, group_resource_id):
"""Gets the list of ``Resource`` ``Ids`` associated with a ``Resource``.
In a federated view, resources for child groups are included.
:param group_resource_id: ``Id`` of the ``Resource``
:type group_resource_id: ``osid.id.Id``
:return: list of member resource ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``group_resource_id`` is not found
:raise: ``NullArgument`` -- ``group_resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_resources_by_group(self, group_resource_id):
"""Gets the list of ``Resources`` associated with a ``Resource``.
In a federated view, resources for child groups are included.
:param group_resource_id: ``Id`` of the ``Resource``
:type group_resource_id: ``osid.id.Id``
:return: list of resourcememembers
:rtype: ``osid.resource.ResourceList``
:raise: ``NotFound`` -- ``group_resource_id`` is not found
:raise: ``NullArgument`` -- ``group_resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceList
@abc.abstractmethod
def get_resource_ids_by_groups(self, group_resource_ids):
"""Gets the list of ``Resource Ids`` corresponding to a list of ``Resource`` objects.
:param group_resource_ids: list of resource ``Ids``
:type group_resource_ids: ``osid.id.IdList``
:return: list of resource ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``group_resource_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_resources_by_groups(self, group_resource_ids):
"""Gets the list of ``Resources`` corresponding to a list of ``Resource`` objects.
:param group_resource_ids: list of resource ``Ids``
:type group_resource_ids: ``osid.id.IdList``
:return: list of resources
:rtype: ``osid.resource.ResourceList``
:raise: ``NullArgument`` -- ``group_resource_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceList
@abc.abstractmethod
def get_group_ids_by_resource(self, resource_id):
"""Gets the list of ``Resource`` ``Ids`` mapped to a ``Resource``.
:param resource_id: ``Id`` of a ``Resource``
:type resource_id: ``osid.id.Id``
:return: list of group resource ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``resource_id`` is not found
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_groups_by_resource(self, resource_id):
"""Gets the list of ``Resource`` objects mapped to a ``Resource``.
:param resource_id: ``Id`` of a ``Resource``
:type resource_id: ``osid.id.Id``
:return: list of group resources
:rtype: ``osid.resource.ResourceList``
:raise: ``NotFound`` -- ``resource_id`` is not found
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceList
class GroupAssignmentSession:
"""This session provides methods to re-assign ``Resources`` to group ``Resources``."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_assign_resources(self):
"""Tests if this user can change resource group mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may not wish to offer
assignment operations.
:return: ``false`` if mapping is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_assign_resources_to_group(self, resource_id):
"""Tests if this user can assign members to the given group.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
assignment operations to unauthorized users.
:param resource_id: the ``Id`` of the ``Resource``
:type resource_id: ``osid.id.Id``
:return: ``false`` if mapping is not authorized, ``true`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def assign_resource_to_group(self, resource_id, resource_group_id):
"""Adds an existing ``Resource`` to a ``Resource`` group.
:param resource_id: the ``Id`` of the ``Resource``
:type resource_id: ``osid.id.Id``
:param resource_group_id: the ``Id`` of the ``Resource`` group
:type resource_group_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``resource_id`` is already part of ``resource_group_id``
:raise: ``NotFound`` -- ``resource_id`` or ``resource_group_id`` not found
:raise: ``NullArgument`` -- ``resource_id`` or ``resource_group_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def unassign_resource_from_group(self, resource_id, resource_group_id):
"""Removes a ``Resource`` from a ``Resource`` group.
:param resource_id: the ``Id`` of the ``Resource``
:type resource_id: ``osid.id.Id``
:param resource_group_id: the ``Id`` of the ``Repository``
:type resource_group_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``resource_id or resource_group_id`` not found or ``resource_id`` not part of ``resource_group_id``
:raise: ``NullArgument`` -- ``resource_id or resource_group_id is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class GroupNotificationSession:
"""This session defines methods to receive notifications on adds/changes to ``Resource`` members.
This session is intended for consumers needing to synchronize their
state with this service without the use of polling. Notifications
are cancelled when this session is closed.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_register_for_group_notifications(self):
"""Tests if this user can register for group notifications.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer
notification operations.
:return: ``false`` if notification methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_federated_bin_view(self):
"""Federates the view for methods in this session.
A federated view will include resources in bins which are
children of this bin in the bin hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_bin_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts notifications to this bin only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_members(self, resource_id):
"""Register for notifications of new resource memberss.
``GroupReceiver.newMember()`` is invoked when a new member is
added to the specified group.
:param resource_id: the ``Id`` of the ``Resource`` to monitor
:type resource_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_members(self, resource_id):
"""Register for notifications of deleted resource memberss.
``GroupReceiver.deletedMember()`` is invoked when a new member
is removed from the specified group.
:param resource_id: the ``Id`` of the ``Resource`` to monitor
:type resource_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def reliable_group_notifications(self):
"""Reliable notifications are desired.
In reliable mode, notifications are to be acknowledged using
``acknowledge_item_notification()`` .
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def unreliable_group_notifications(self):
"""Unreliable notifications are desired.
In unreliable mode, notifications do not need to be
acknowledged.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def acknowledge_group_notification(self, notification_id):
"""Acknowledge an group notification.
:param notification_id: the ``Id`` of the notification
:type notification_id: ``osid.id.Id``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class GroupHierarchySession:
"""This session provides a hierarchical view of resource groups.
Each node in the hierarchy is a unique ``Resource``. The hierarchy
may be traversed recursively to establish the tree structure through
``get_parent_resources()`` and ``getChildResources()``. To relate
these ``Ids`` to another OSID, ``get_resource_nodes()`` can be used
for retrievals that can be used for bulk lookups in other OSIDs.
A user may not be authorized to traverse the entire hierarchy. Parts
of the hierarchy may be made invisible through omission from the
returns of ``get_parent_resources()`` or ``get_child_resources()``
in lieu of a ``PermissionDenied`` error that may disrupt the
traversal through authorized pathways.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: resource elements may be silently omitted or
re-ordered
* plenary view: provides a complete set or is an error condition
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_access_group_hierarchy(self):
"""Tests if this user can perform hierarchy queries.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if hierarchy traversal methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_resource_view(self):
"""The returns from the group methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_resource_view(self):
"""A complete view of the ``Resource`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_federated_bin_view(self):
"""Federates the view for methods in this session.
A federated view will include resources in bins which are
children of this bin in the bin hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_bin_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this bin only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def is_member_of_group(self, group_id, resource_id):
"""Tests if a resource ``Id`` is a member of a group either directly or indirectly through nested groups.
:param group_id: a resource group ``Id``
:type group_id: ``osid.id.Id``
:param resource_id: the ``Id`` of a resource
:type resource_id: ``osid.id.Id``
:return: ``true`` if this ``resource_id`` is a member of ``group_id,`` f ``alse`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``group_id`` is not found
:raise: ``NullArgument`` -- ``group_id`` or ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``resource_id`` not found return
``false``.
"""
return # boolean
@abc.abstractmethod
def get_resource_node_ids(self, resource_id, ancestor_levels, descendant_levels, include_siblings):
"""Gets a portion of the hierarchy for the given resource group.
:param resource_id: the ``Id`` to query
:type resource_id: ``osid.id.Id``
:param ancestor_levels: the maximum number of ancestor levels to include. A value of 0 returns no parents in the node.
:type ancestor_levels: ``cardinal``
:param descendant_levels: the maximum number of descendant levels to include. A value of 0 returns no children in the node.
:type descendant_levels: ``cardinal``
:param include_siblings: ``true`` to include the siblings of the given node, ``false`` to omit the siblings
:type include_siblings: ``boolean``
:return: a resource node
:rtype: ``osid.hierarchy.Node``
:raise: ``NotFound`` -- ``resource_id`` is not found
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.hierarchy.Node
@abc.abstractmethod
def get_resource_nodes(self, resource_id, ancestor_levels, descendant_levels, include_siblings):
"""Gets a portion of the hierarchy for the given resource group.
:param resource_id: the ``Id`` to query
:type resource_id: ``osid.id.Id``
:param ancestor_levels: the maximum number of ancestor levels to include. A value of 0 returns no parents in the node.
:type ancestor_levels: ``cardinal``
:param descendant_levels: the maximum number of descendant levels to include. A value of 0 returns no children in the node.
:type descendant_levels: ``cardinal``
:param include_siblings: ``true`` to include the siblings of the given node, ``false`` to omit the siblings
:type include_siblings: ``boolean``
:return: a resource node
:rtype: ``osid.acknowledgement.BillingNode``
:raise: ``NotFound`` -- ``resource_id`` is not found
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.acknowledgement.BillingNode
class ResourceAgentSession:
"""This session provides methods to retrieve ``Resource`` to ``Agent`` mappings.
An ``Agent`` may map to only one ``Resource`` while a ``Resource``
may map to multiple ``Agents``.
This lookup session defines several views
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_lookup_resource_agent_mappings(self):
"""Tests if this user can perform lookups of resource/agent mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known lookup methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
lookup operations to unauthorized users.
:return: ``false`` if looking up mappings is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_agent_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_agent_view(self):
"""A complete view of the ``Agent`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_federated_bin_view(self):
"""Federates the view for methods in this session.
A federated view will include resources in bins which are
children of this bin in the bin hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_bin_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this bin only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_resource_id_by_agent(self, agent_id):
"""Gets the ``Resource`` ``Id`` associated with the given agent.
:param agent_id: ``Id`` of the ``Agent``
:type agent_id: ``osid.id.Id``
:return: associated resource
:rtype: ``osid.id.Id``
:raise: ``NotFound`` -- ``agent_id`` is not found
:raise: ``NullArgument`` -- ``agent_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
@abc.abstractmethod
def get_resource_by_agent(self, agent_id):
"""Gets the ``Resource`` associated with the given agent.
:param agent_id: ``Id`` of the ``Agent``
:type agent_id: ``osid.id.Id``
:return: associated resource
:rtype: ``osid.resource.Resource``
:raise: ``NotFound`` -- ``agent_id`` is not found
:raise: ``NullArgument`` -- ``agent_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Resource
@abc.abstractmethod
def get_agent_ids_by_resource(self, resource_id):
"""Gets the list of ``Agent`` ``Ids`` mapped to a ``Resource``.
:param resource_id: ``Id`` of a ``Resource``
:type resource_id: ``osid.id.Id``
:return: list of agent ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``resource_id`` is not found
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_agents_by_resource(self, resource_id):
"""Gets the list of ``Agents`` mapped to a ``Resource``.
:param resource_id: ``Id`` of a ``Resource``
:type resource_id: ``osid.id.Id``
:return: list of agents
:rtype: ``osid.authentication.AgentList``
:raise: ``NotFound`` -- ``resource_id`` is not found
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authentication.AgentList
class ResourceAgentAssignmentSession:
"""This session provides methods to re-assign ``Resource`` to ``Agents``.
A ``Resource`` may be associated with multiple ``Agents``. An
``Agent`` may map to only one ``Resource``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_assign_agents(self):
"""Tests if this user can alter resource/agent mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
assignment operations to unauthorized users.
:return: ``false`` if mapping is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_assign_agents_to_resource(self, resource_id):
"""Tests if this user can alter resource/agent mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known location methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
assignment operations to unauthorized users.
:param resource_id: the ``Id`` of the ``Resource``
:type resource_id: ``osid.id.Id``
:return: ``false`` if mapping is not authorized, ``true`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def assign_agent_to_resource(self, agent_id, resource_id):
"""Adds an existing ``Agent`` to a ``Resource``.
:param agent_id: the ``Id`` of the ``Agent``
:type agent_id: ``osid.id.Id``
:param resource_id: the ``Id`` of the ``Resource``
:type resource_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``agent_id`` is already assigned to ``resource_id``
:raise: ``NotFound`` -- ``agent_id`` or ``resource_id`` not found
:raise: ``NullArgument`` -- ``agent_id`` or ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def unassign_agent_from_resource(self, agent_id, resource_id):
"""Removes an ``Agent`` from a ``Resource``.
:param agent_id: the ``Id`` of the ``Agent``
:type agent_id: ``osid.id.Id``
:param resource_id: the ``Id`` of the ``Resource``
:type resource_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``agent_id`` or ``resource_id`` not found or ``agent_id`` not assigned to ``resource_id``
:raise: ``NullArgument`` -- ``agent_id`` or ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ResourceRelationshipLookupSession:
"""This session provides methods for examining resource relationships."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_lookup_resource_relationships(self):
"""Tests if this user can access resource relationships.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer
relationship operations.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_resource_relationship_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_resource_relationship_view(self):
"""A complete view of the resource relationship returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_federated_bin_view(self):
"""Federates the view for methods in this session.
A federated view will include relationships in bins which are
children of this bin in the bin hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_bin_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts relationships to this bin only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_effective_resource_relationship_view(self):
"""Only resource relationships whose effective dates are current are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_any_effective_resource_relationship_view(self):
"""All resource relationships of any effective dates are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_resource_relationship(self, resource_relationship_id):
"""Gets the ``ResourceRelationship`` specified by its ``Id``.
:param resource_relationship_id: ``Id`` of the ``Relationship``
:type resource_relationship_id: ``osid.id.Id``
:return: the relationship
:rtype: ``osid.resource.ResourceRelationship``
:raise: ``NotFound`` -- ``resource_relationship_id`` not found
:raise: ``NullArgument`` -- ``resource_relationship_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.resource.ResourceRelationship
@abc.abstractmethod
def get_resource_relationships_by_ids(self, resource_relationship_ids):
"""Gets a ``ResourceRelationshipList`` corresponding to the given ``IdList``.
:param resource_relationship_ids: the list of ``Ids`` to retrieve
:type resource_relationship_ids: ``osid.id.IdList``
:return: the returned ``ResourceRelationship`` list
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``resource_relationship_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_by_genus_type(self, relationship_genus_type):
"""Gets the resource relationships for the given resource relationship genus type.
:param relationship_genus_type: a relationship genus type
:type relationship_genus_type: ``osid.type.Type``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NullArgument`` -- ``relationship_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_by_parent_genus_type(self, relationship_genus_type):
"""Gets the reource relationships for the given resource relationship genus type and include any relationships with a genus type derived from the specified genus type.
:param relationship_genus_type: a relationship genus type
:type relationship_genus_type: ``osid.type.Type``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NullArgument`` -- ``relationship_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_by_record_type(self, relationship_record_type):
"""Gets the resource relationships for the given resource relationship record type.
:param relationship_record_type: a relationship record type
:type relationship_record_type: ``osid.type.Type``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NullArgument`` -- ``relationship_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_on_date(self, from_, to):
"""Gets the resource relationships effective during the entire given date range inclusive but not confined to the date range.
:param from: starting date
:type from: ``osid.calendaring.DateTime``
:param to: ending date
:type to: ``osid.calendaring.DateTime``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``from`` or ``to`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_for_source_resource(self, source_resource_id):
"""Gets the ``ResourceRelationships`` of a resource.
:param source_resource_id: ``Id`` of a ``Resource``
:type source_resource_id: ``osid.id.Id``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NullArgument`` -- ``source_resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_for_source_resource_on_date(self, source_resource_id, from_, to):
"""Gets a list of resource relationships for a resource and effective during the entire given date range inclusive but not confined to the date range.
:param source_resource_id: a resource ``Id``
:type source_resource_id: ``osid.id.Id``
:param from: start of date range
:type from: ``osid.calendaring.DateTime``
:param to: end of date range
:type to: ``osid.calendaring.DateTime``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``source_resource_id, from`` or ``to`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_by_genus_type_for_source_resource(self, source_resource_id, relationship_genus_type):
"""Gets the ``ResourceRelationships`` of a resource of relationship genus type that includes any genus type derived from the given one.
:param source_resource_id: ``Id`` of a ``Resource``
:type source_resource_id: ``osid.id.Id``
:param relationship_genus_type: a relationship genus type
:type relationship_genus_type: ``osid.type.Type``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NullArgument`` -- ``source_resource_id`` or ``relationship_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_by_genus_type_for_source_resource_on_date(self, source_resource_id, relationship_genus_type, from_, to):
"""Gets a list of resource relationships of a given genus type for a resource and effective during the entire given date range inclusive but not confined to the date range.
:param source_resource_id: a resource ``Id``
:type source_resource_id: ``osid.id.Id``
:param relationship_genus_type: a relationship genus type
:type relationship_genus_type: ``osid.type.Type``
:param from: start of date range
:type from: ``osid.calendaring.DateTime``
:param to: end of date range
:type to: ``osid.calendaring.DateTime``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``source_resource_id, relationship_genus_type, from`` or ``to`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_for_destination_resource(self, destination_resource_id):
"""Gets the ``ResourceRelationships`` of a resource.
:param destination_resource_id: ``Id`` of a ``Resource``
:type destination_resource_id: ``osid.id.Id``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NullArgument`` -- ``destination_resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_for_destination_resource_on_date(self, source_resource_id, from_, to):
"""Gets a list of resource relationships for a resource and effective during the entire given date range inclusive but not confined to the date range.
:param source_resource_id: a resource ``Id``
:type source_resource_id: ``osid.id.Id``
:param from: start of date range
:type from: ``osid.calendaring.DateTime``
:param to: end of date range
:type to: ``osid.calendaring.DateTime``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``destination_resource_id, from,`` or ``to`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_by_genus_type_for_destination_resource(self, destination_resource_id, relationship_genus_type):
"""Gets the ``ResourceRelationships`` of a resource of relationship genus type that includes any genus type derived from the given one.
:param destination_resource_id: ``Id`` of a ``Resource``
:type destination_resource_id: ``osid.id.Id``
:param relationship_genus_type: a relationship genus type
:type relationship_genus_type: ``osid.type.Type``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NullArgument`` -- ``destination_resource_id`` or ``relationship_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_by_genus_type_for_destination_resource_on_date(self, destination_resource_id, relationship_genus_type, from_, to):
"""Gets a list of resource relationships of a given genus type for a resource and effective during the entire given date range inclusive but not confined to the date range.
:param destination_resource_id: a resource ``Id``
:type destination_resource_id: ``osid.id.Id``
:param relationship_genus_type: a relationship genus type
:type relationship_genus_type: ``osid.type.Type``
:param from: start of date range
:type from: ``osid.calendaring.DateTime``
:param to: end of date range
:type to: ``osid.calendaring.DateTime``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``destination_resource_id, relationship_genus_type, from`` or ``to`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_for_resources(self, source_resource_id, destination_resource_id):
"""Gets the ``ResourceRelationships`` given two resources.
:param source_resource_id: ``Id`` of a ``Resource``
:type source_resource_id: ``osid.id.Id``
:param destination_resource_id: ``Id`` of another ``Resource``
:type destination_resource_id: ``osid.id.Id``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NullArgument`` -- ``source_relationship_id`` or ``destination_relationship_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_for_resources_on_date(self, source_resource_id, destination_resource_id, from_, to):
"""Gets a list of resource relationships for a two peer resources and effective during the entire given date range inclusive but not confined to the date range.
:param source_resource_id: a resource ``Id``
:type source_resource_id: ``osid.id.Id``
:param destination_resource_id: ``Id`` of another ``Resource``
:type destination_resource_id: ``osid.id.Id``
:param from: start of date range
:type from: ``osid.calendaring.DateTime``
:param to: end of date range
:type to: ``osid.calendaring.DateTime``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``source_resource_id, destination_resource_id, from`` or ``to`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_by_genus_type_for_resources(self, source_resource_id, destination_resource_id, relationship_genus_type):
"""Gets the ``ResourceRelationships`` given two resources and a relationship genus type which includes any genus types derived from the given genus type.
:param source_resource_id: ``Id`` of a ``Resource``
:type source_resource_id: ``osid.id.Id``
:param destination_resource_id: ``Id`` of another ``Resource``
:type destination_resource_id: ``osid.id.Id``
:param relationship_genus_type: a relationship genus type
:type relationship_genus_type: ``osid.type.Type``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NullArgument`` -- ``source_resource_id, destination_resource_id,`` or ``relatonship_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_by_genus_type_for_resources_on_date(self, source_resource_id, destination_resource_id, relationship_genus_type, from_, to):
"""Gets a list of resource relationships of a given genus type for a two peer resources and effective during the entire given date range inclusive but not confined to the date range.
:param source_resource_id: a resource ``Id``
:type source_resource_id: ``osid.id.Id``
:param destination_resource_id: ``Id`` of another ``Resource``
:type destination_resource_id: ``osid.id.Id``
:param relationship_genus_type: a relationship genus type
:type relationship_genus_type: ``osid.type.Type``
:param from: start of date range
:type from: ``osid.calendaring.DateTime``
:param to: end of date range
:type to: ``osid.calendaring.DateTime``
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``source_resource_id, destination_resource_id, relationship_genus_type, from`` or ``to`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships(self):
"""Gets all ``ResourceRelationships``.
:return: the relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
resource_relationships = property(fget=get_resource_relationships)
class ResourceRelationshipQuerySession:
"""This session provides methods for searching ``ResourceRelationship`` objects.
The search query is constructed using the
``ResourceRelationshipQuery``. The resource relationship record
``Type`` also specifies the record for the relationship query.
This session defines views that offer differing behaviors for
searching.
* federated bin view: searches include relationships in bins of
which this bin is a ancestor in the bin hierarchy
* isolated bin view: searches are restricted to relationships in
this bin only
Relationships may have a query record indicated by their respective
record types. The query record is accessed via the
``ResourceRelationshipQuery``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the bin
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_search_resource_relationships(self):
"""Tests if this user can perform ``ResourceRelationship`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
:return: ``false`` if search methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_federated_bin_view(self):
"""Federates the view for methods in this session.
A federated view will include relationships in bin which are
children of this bin in the bin hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_bin_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts searches to this bin only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_resource_relationship_query(self):
"""Gets a relationship query.
:return: the relationship query
:rtype: ``osid.resource.ResourceRelationshipQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipQuery
resource_relationship_query = property(fget=get_resource_relationship_query)
@abc.abstractmethod
def get_resource_relationships_by_query(self, resource_relationship_query):
"""Gets a list of ``ResourceRelationship`` matching the given resource relationship query.
:param resource_relationship_query: the resource relationship query
:type resource_relationship_query: ``osid.resource.ResourceRelationshipQuery``
:return: the returned ``ResourceRelationshipList``
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NullArgument`` -- ``resource_relationship_query is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``resource_relationship_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
class ResourceRelationshipSearchSession:
"""This session provides methods for searching ``ResourceRelationship`` objects.
The search query is constructed using the
``ResourceRelationshipQuery``. The resource relationship record
``Type`` also specifies the record for the relationship query.
``get_resource_relationships_by_query()`` is the basic search method
and returns a list of ``ResourceRelationships``. A more advanced
search may be performed with ``getResourceRelationshipsBySearch()``.
It accepts a ``ResourceRelationshipSearch`` in addition to the query
for the purpose of specifying additional options affecting the
entire search, such as ordering.
``get_resource_relationships_by_search()`` returns an
``ResourceRelationshipSearchResults`` that can be used to access the
resulting ``ResourceRelationshipList`` or be used to perform a
search within the result set through ``ResourceRelationshipSearch``.
This session defines views that offer differing behaviors for
searching.
* federated bin view: searches include relationships in bins of
which this bin is a ancestor in the bin hierarchy
* isolated bin view: searches are restricted to relationships in
this bin only
Relationships may have a resource relationship query record
indicated by their respective record types. The resource
relationship query record is accessed via the
``ResourceRelationshipQuery``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_resource_relationship_search(self):
"""Gets a relationship search.
:return: the relationship search
:rtype: ``osid.resource.ResourceRelationshipSearch``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipSearch
resource_relationship_search = property(fget=get_resource_relationship_search)
@abc.abstractmethod
def get_resource_relationship_search_order(self):
"""Gets a relationship search order.
The ``ResourceRelationshipSearchOrder`` is supplied to a
``ResourceRelationshipSearch`` to specify the ordering of
results.
:return: the relationship search order
:rtype: ``osid.resource.ResourceRelationshipSearchOrder``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipSearchOrder
resource_relationship_search_order = property(fget=get_resource_relationship_search_order)
@abc.abstractmethod
def get_resource_relationships_by_search(self, resource_relationship_query, resource_relationship_search):
"""Gets the search results matching the given search query using the given search.
:param resource_relationship_query: the resource relationship query
:type resource_relationship_query: ``osid.resource.ResourceRelationshipQuery``
:param resource_relationship_search: the resource relationship search
:type resource_relationship_search: ``osid.resource.ResourceRelationshipSearch``
:return: the returned resource relationship search results
:rtype: ``osid.resource.ResourceRelationshipSearchResults``
:raise: ``NullArgument`` -- ``resource_relationship_query`` or r ``esource_relationship_search`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``resource_relationship_search`` or r ``esource_relationship_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipSearchResults
@abc.abstractmethod
def get_resource_relationship_query_from_inspector(self, resource_relationship_query_inspector):
"""Gets a resource relationship query from an inspector.
The inspector is available from a
``ResourceRelationshipSearchResults``.
:param resource_relationship_query_inspector: a query inspector
:type resource_relationship_query_inspector: ``osid.resource.ResourceRelationshipQueryInspector``
:return: the resource relationship query
:rtype: ``osid.resource.ResourceRelationshipQuery``
:raise: ``NullArgument`` -- ``resource_relationship_query_inspector`` is ``null``
:raise: ``Unsupported`` -- ``resource_relationship_query_inspector`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipQuery
class ResourceRelationshipAdminSession:
"""This session creates, updates, and deletes ``ResourceRelationships``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create a
``ResourceRelationship,`` a ``ResourceRelationshipForm`` is
requested using ``ge_resource_relationship_form_for_create()``
specifying the ``nodes`` and desired record ``Types`` or none if no
record ``Types`` are needed. The returned
``ResourceRelationshipForm`` will indicate that it is to be used
with a create operation and can be used to examine metdata or
validate data prior to creation. Once the
``ResourceRelationshipForm`` is submiited to a create operation, it
cannot be reused with another create operation unless the first
operation was unsuccessful. Each ``ResourceRelationshipForm``
corresponds to an attempted transaction.
For updates, ``ResourceRelationshipForms`` are requested to the
``ResourceRelationship`` ``Id`` that is to be updated using
``getResourceRelationshipFormForUpdate()``. Similarly, the
``ResourceRelationshipForm`` has metadata about the data that can be
updated and it can perform validation before submitting the update.
The ``ResourceRelationshipForm`` can only be used once for a
successful update and cannot be reused.
The delete operations delete ``ResourceRelationships``. To unmap s
``ResourceRelationship`` from the current ``Bin,`` the
``ResourceRelationshipBinAssignmentSession`` should be used. These
delete operations attempt to remove the ``ResourceRelationship``
itself thus removing it from all known ``Bin`` catalogs.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the bin
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_create_resource_relationships(self):
"""Tests if this user can create ``ResourceRelationships``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``ResourceRelationship`` will result in a ``PermissionDenied``.
This is intended as a hint to an application that may opt not to
offer create operations to an unauthorized user.
:return: ``false`` if ``ResourceRelationship`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_create_resource_relationship_with_record_types(self, resource_relationship_record_types):
"""Tests if this user can create a single ``ResourceRelationship`` using the desired record types.
While ``ResourceManager.getResourceRelationshipRecordTypes()``
can be used to examine which records are supported, this method
tests which record(s) are required for creating a specific
``ResourceRelationship``. Providing an empty array tests if a
``ResourceRelationship`` can be created with no records.
:param resource_relationship_record_types: array of resource relationship types
:type resource_relationship_record_types: ``osid.type.Type[]``
:return: ``true`` if ``ResourceRelationship`` creation using the specified record ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``resource_relationship_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_relationship_form_for_create(self, source_resource_id, destination_resource_id, resource_relationship_record_types):
"""Gets the relationship form for creating new relationships.
A new form should be requested for each create transaction.
:param source_resource_id: the ``Id`` of the source ``Resource``
:type source_resource_id: ``osid.id.Id``
:param destination_resource_id: the ``Id`` of the destination ``Resource``
:type destination_resource_id: ``osid.id.Id``
:param resource_relationship_record_types: array of resource relationship types
:type resource_relationship_record_types: ``osid.type.Type[]``
:return: the relationship form
:rtype: ``osid.resource.ResourceRelationshipForm``
:raise: ``NotFound`` -- ``source_resource_id`` or ``destination_resource_id`` is not found
:raise: ``NullArgument`` -- ``resource_id`` or ``peer_resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form with requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipForm
@abc.abstractmethod
def create_resource_relationship(self, resource_relationship_form):
"""Creates a new ``ResourceRelationship``.
:param resource_relationship_form: the form for this ``ResourceRelationship``
:type resource_relationship_form: ``osid.resource.ResourceRelationshipForm``
:return: the new ``ResourceRelationship``
:rtype: ``osid.resource.ResourceRelationship``
:raise: ``IllegalState`` -- ``resource_relationship_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``resource_id, peer_resource_id`` or ``resource_relationship_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``resource_relationship_form`` did not originate from ``get_resource_relationship_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationship
@abc.abstractmethod
def can_update_resource_relationships(self):
"""Tests if this user can update ``ResourceRelationships``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``ResourceRelationship`` will result in a ``PermissionDenied``.
This is intended as a hint to an application that may opt not to
offer update operations to an unauthorized user.
:return: ``false`` if relationship modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_relationship_form_for_update(self, resource_relationship_id):
"""Gets the relationship form for updating an existing relationship.
A new relationship form should be requested for each update
transaction.
:param resource_relationship_id: the ``Id`` of the ``ResourceRelationship``
:type resource_relationship_id: ``osid.id.Id``
:return: the relationship form
:rtype: ``osid.resource.ResourceRelationshipForm``
:raise: ``NotFound`` -- ``resource_relationship_id`` not found
:raise: ``NullArgument`` -- ``resource_relationship_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipForm
@abc.abstractmethod
def update_resource_relationship(self, resource_relationship_form):
"""Updates an existing relationship.
:param resource_relationship_form: the form containing the elements to be updated
:type resource_relationship_form: ``osid.resource.ResourceRelationshipForm``
:raise: ``IllegalState`` -- ``resource_relationship_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``resource_relationship_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``resource_relationship_form`` did not originate from ``get_resource_relationship_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_delete_resource_relationships(self):
"""Tests if this user can delete ``ResourceRelationships``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``ResourceRelationship`` will result in a ``PermissionDenied``.
This is intended as a hint to an application that may opt not to
offer delete operations to an unauthorized user.
:return: ``false`` if ``ResourceRelationship`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def delete_resource_relationship(self, resource_relationship_id):
"""Deletes the ``ResourceRelationship`` identified by the given ``Id``.
:param resource_relationship_id: the ``Id`` of the ``ResourceRelationship`` to delete
:type resource_relationship_id: ``osid.id.Id``
:raise: ``NotFound`` -- a ``ResourceRelationship`` was not found identified by the given ``Id``
:raise: ``NullArgument`` -- ``resource_relationship_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_manage_resource_relationship_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``ResourceRelationships``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``ResourceRelationship`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def alias_resource_relationship(self, resource_relationship_id, alias_id):
"""Adds an ``Id`` to a ``ResourceRelationship`` for the purpose of creating compatibility.
The primary ``Id`` of the ``ResourceRelationship`` is determined
by the provider. The new ``Id`` performs as an alias to the
primary ``Id`` . If the alias is a pointer to another resource
relationshp, it is reassigned to the given resource relationship
``Id``.
:param resource_relationship_id: the ``Id`` of a ``ResourceRelationship``
:type resource_relationship_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``resource_relationship_id`` not found
:raise: ``NullArgument`` -- ``resource_relationship_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ResourceRelationshipNotificationSession:
"""This session defines methods to receive asynchronous notifications on adds/changes to resource relationships.
This session is intended for consumers needing to synchronize their
state with this service without the use of polling. Notifications
are cancelled when this session is closed.
The views defined in this session correspond to the views in the
``ResourceRelationshipLookupSession``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the bin
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_register_for_resource_relationship_notifications(self):
"""Tests if this user can register for ``ResourceRelationship`` notifications.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer
notification operations.
:return: ``false`` if notification methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_federated_bin_view(self):
"""Federates the view for methods in this session.
A federated view will include notifications for relationships in
bins which are children of this bin in the bin hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_isolated_bin_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts notifications for relationships in
this bin only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_resource_relationships(self):
"""Register for notifications of new relationship.
``ResourceRelationshipReceiver.newResourceRelationships()`` is
invoked when a new relationship is created.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_resource_relationships_by_genus_type(self, resource_relationship_genus_type):
"""Register for notifications of new relationships of the given genus type.
``ResourceRelationshipReceiver.newResourceRelationships()`` is
invoked when a new relationship is created.
:param resource_relationship_genus_type: the rsource relationship genus type
:type resource_relationship_genus_type: ``osid.type.Type``
:raise: ``NullArgument`` -- ``resource_relationship_genus_type is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_resource_relationships_for_source_resource(self, resource_id):
"""Register for notifications of new relationships from the given resource.
``ResourceRelationshipReceiver.newResourceRelationships()`` is
invoked when a new relationship is created.
:param resource_id: the ``Id`` of the ``Resource`` to monitor
:type resource_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_id is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_resource_relationships_for_destination_resource(self, resource_id):
"""Register for notifications of new relationships to the given resource.
``ResourceRelationshipReceiver.newResourceRelationships()`` is
invoked when a new relationship is created.
:param resource_id: the ``Id`` of the ``Resource`` to monitor
:type resource_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_id is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_resource_relationships(self):
"""Registers for notification of updated relationships.
``ResourceRelationshipReceiver.changedResourceRelationships()``
is invoked when a relationship is changed.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_resource_relationships_by_genus_type(self, resource_relationship_genus_type):
"""Register for notifications of changed relationshipsof the given genus type.
``ResourceRelationshipReceiver.changedResourceRelationships()``
is invoked when a relationship is changed.
:param resource_relationship_genus_type: the rsource relationship genus type
:type resource_relationship_genus_type: ``osid.type.Type``
:raise: ``NullArgument`` -- ``resource_relationship_genus_type is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_resource_relationships_for_source_resource(self, resource_id):
"""Register for notifications of changed relationships from the given resource.
``ResourceRelationshipReceiver.changedResourceRelationships()``
is invoked when a relationship is changed.
:param resource_id: the ``Id`` of the ``Resource`` to monitor
:type resource_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_id is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_resource_relationships_for_destination_resource(self, resource_id):
"""Register for notifications of changed relationships to the given resource.
``ResourceRelationshipReceiver.changedResourceRelationships()``
is invoked when a relationship is changed.
:param resource_id: the ``Id`` of the ``Resource`` to monitor
:type resource_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_id is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_resource_relationship(self, resource_relationship_id):
"""Registers for notification of an updated relationship.
``ResourceRelationshipReceiver.changedResourceRelationships()``
is invoked when the specified relationship is changed.
:param resource_relationship_id: the ``Id`` of the ``ResourceRelationship`` to monitor
:type resource_relationship_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_relationship_id is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_resource_relationships(self):
"""Registers for notification of deleted relationships.
``ResourceRelationshipReceiver.deletedResourceRelationships()``
is invoked when a relationship is deleted.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_resource_relationships_by_genus_type(self, resource_relationship_genus_type):
"""Register for notifications of deleted relationships of the given genus type.
``ResourceRelationshipReceiver.deletedResourceRelationships()``
is invoked when a relationship is deleted.
:param resource_relationship_genus_type: the rsource relationship genus type
:type resource_relationship_genus_type: ``osid.type.Type``
:raise: ``NullArgument`` -- ``resource_relationship_genus_type is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_resource_relationships_for_source_resource(self, resource_id):
"""Register for notifications of deleted relationships from the given resource.
``ResourceRelationshipReceiver.deletedResourceRelationships()``
is invoked when a relationship is deleted.
:param resource_id: the ``Id`` of the ``Resource`` to monitor
:type resource_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_relationship_id is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_resource_relationships_for_destination_resource(self, resource_id):
"""Register for notifications of deleted relationships to the given resource.
``ResourceRelationshipReceiver.deletedResourceRelationships()``
is invoked when a relationship is deleted.
:param resource_id: the ``Id`` of the ``Resource`` to monitor
:type resource_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_relationship_id is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_resource_relationship(self, resource_relationship_id):
"""Registers for notification of a deleted relationship.
``ResourceRelationshipReceiver.changedResourceRelationships()``
is invoked when the specified relationship is deleted.
:param resource_relationship_id: the ``Id`` of the ``ResourceRelationship`` to monitor
:type resource_relationship_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``resource_relationship_id is null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def reliable_resource_relationship_notifications(self):
"""Reliable notifications are desired.
In reliable mode, notifications are to be acknowledged using
``acknowledge_item_notification()`` .
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def unreliable_resource_relationship_notifications(self):
"""Unreliable notifications are desired.
In unreliable mode, notifications do not need to be
acknowledged.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def acknowledge_resource_relationship_notification(self, notification_id):
"""Acknowledge an resource_relationship notification.
:param notification_id: the ``Id`` of the notification
:type notification_id: ``osid.id.Id``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ResourceRelationshipBinSession:
"""This session provides methods to retrieve ``ResourceRelationship`` to ``Bin`` mappings.
A ``Resource`` may appear in multiple ``Bins``. Each ``Bin`` may
have its own authorizations governing who is allowed to look at it.
This lookup session defines several views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def use_comparative_bin_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_bin_view(self):
"""A complete view of the ``Resource`` and ``Bin`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def can_lookup_resource_relationship_bin_mappings(self):
"""Tests if this user can perform lookups of resource relationship/bin mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known lookup methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
lookup operations to unauthorized users.
:return: ``false`` if looking up mappings is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_relationship_ids_by_bin(self, bin_id):
"""Gets the list of ``ResourceRelationship`` ``Ids`` associated with a ``Bin``.
:param bin_id: ``Id`` of a ``Bin``
:type bin_id: ``osid.id.Id``
:return: list of related resource relationship ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``bin_id`` is not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_resource_relationships_by_bin(self, bin_id):
"""Gets the list of ``ResourceRelationships`` associated with a ``Bin``.
:param bin_id: ``Id`` of a ``Bin``
:type bin_id: ``osid.id.Id``
:return: list of related resource relationship
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NotFound`` -- ``bin_id`` is not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_resource_relationships_ids_by_bins(self, bin_ids):
"""Gets the list of ``ResourceRelationship Ids`` corresponding to a list of ``Bin`` objects.
:param bin_ids: list of bin ``Ids``
:type bin_ids: ``osid.id.IdList``
:return: list of resource relationship ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``bin_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_resource_relationships_by_bins(self, bin_ids):
"""Gets the list of ``ResourceRelationships`` corresponding to a list of ``Bins``.
:param bin_ids: list of bin ``Ids``
:type bin_ids: ``osid.id.IdList``
:return: list of resource relationships
:rtype: ``osid.resource.ResourceRelationshipList``
:raise: ``NullArgument`` -- ``bin_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipList
@abc.abstractmethod
def get_bin_ids_by_resource_relationship(self, resource_relationship_id):
"""Gets the list of ``Bin`` ``Ids`` mapped to a ``ResourceRelationship``.
:param resource_relationship_id: ``Id`` of a ``ResourceRelationship``
:type resource_relationship_id: ``osid.id.Id``
:return: list of bin ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``resource_relationship_id`` is not found
:raise: ``NullArgument`` -- ``resource_relationship_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_bins_by_resource_relationship(self, resource_relationship_id):
"""Gets the list of ``Bin`` objects mapped to a ``ResourceRelationship``.
:param resource_relationship_id: ``Id`` of a ``ResourceRelationship``
:type resource_relationship_id: ``osid.id.Id``
:return: list of bins
:rtype: ``osid.resource.BinList``
:raise: ``NotFound`` -- ``resource_relationship_id`` is not found
:raise: ``NullArgument`` -- ``resource_relationship_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinList
class ResourceRelationshipBinAssignmentSession:
"""This session provides methods to re-assign ``ResourceRelationships`` to ``Bins``.
A ``ResourceRelationship`` may map to multiple ``Bin`` objects and
removing the last reference to a ``ResourceRelationship`` is the
equivalent of deleting it. Each ``Bin`` may have its own
authorizations governing who is allowed to operate on it.
Moving or adding a reference of a ``ResourceRelationship`` to
another ``Bin`` is not a copy operation (eg: does not change its
``Id`` ).
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_assign_resource_relationships(self):
"""Tests if this user can alter resource relationship/bin mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
lookup operations to unauthorized users.
:return: ``false`` if mapping is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_assign_resource_relationships_to_bin(self, bin_id):
"""Tests if this user can alter resource relationship/bin mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied`` . This is
intended as a hint to an application that may opt not to offer
lookup operations to unauthorized users.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:return: ``false`` if mapping is not authorized, ``true`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assignable_bin_ids(self, bin_id):
"""Gets a list of bins including and under the given bin node in which any resource relationship can be assigned.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:return: list of assignable bin ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_assignable_bin_ids_for_resource_relationship(self, bin_id, resource_relationship_id):
"""Gets a list of bins including and under the given bin node in which a specific resource relationship can be assigned.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:param resource_relationship_id: the ``Id`` of the ``ResourceRelationship``
:type resource_relationship_id: ``osid.id.Id``
:return: list of assignable bin ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``bin_id`` or ``resource_relationship_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def assign_resource_relationship_to_bin(self, resource_relationship_id, bin_id):
"""Adds an existing ``ResourceRelationship`` to a ``Bin``.
:param resource_relationship_id: the ``Id`` of the ``ResourceRelationship``
:type resource_relationship_id: ``osid.id.Id``
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``resource_relationship_id`` is already assigned to ``bin_id``
:raise: ``NotFound`` -- ``resource_relationship_id`` or ``bin_id`` not found
:raise: ``NullArgument`` -- ``resource_relationship_id`` or ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def unassign_resource_relationship_from_bin(self, resource_relationship_id, bin_id):
"""Removes a ``ResourceRelationship`` from a ``Bin``.
:param resource_relationship_id: the ``Id`` of the ``ResourceRelationship``
:type resource_relationship_id: ``osid.id.Id``
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``resource_relationship_id`` or ``bin_id`` not found or ``resource_relationship_id`` not assigned to ``bin_id``
:raise: ``NullArgument`` -- ``resource_relationship_id`` or ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class ResourceRelationshipSmartBinSession:
"""This session manages queries and sequencing to create "smart" dynamic catalogs.
A ``ResourceRelationshipQuery`` can be retrieved from this session
and mapped to this ``Bin`` to create a virtual collection of
``ResourceRelationships``. The resource relationships may be
sequenced using the ``ResourceRelationshipSearchOrder`` from this
session.
This ``Bin`` has a default query that matches any resource and a
default search order that specifies no sequencing. The queries may
be examined using a ``ResourceRelationshipQueryInspector``. The
query may be modified by converting the inspector back to a
``ResourceRelationshipQuery``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_id(self):
"""Gets the ``Bin`` ``Id`` associated with this session.
:return: the ``Bin Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_id = property(fget=get_bin_id)
@abc.abstractmethod
def get_bin(self):
"""Gets the ``Bin`` associated with this session.
:return: the ``Bin`` associated with this session
:rtype: ``osid.resource.Bin``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
bin = property(fget=get_bin)
@abc.abstractmethod
def can_manage_smart_bins(self):
"""Tests if this user can manage smart bins.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer operations
to unauthorized users.
:return: ``false`` if smart bin management is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_relationship_query(self):
"""Gets a resource relationship query.
:return: the resource relationship query
:rtype: ``osid.resource.ResourceRelationshipQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipQuery
resource_relationship_query = property(fget=get_resource_relationship_query)
@abc.abstractmethod
def get_resource_relationship_search_order(self):
"""Gets a resource relationship search order.
:return: the resource relationship search order
:rtype: ``osid.resource.ResourceRelationshipSearchOrder``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipSearchOrder
resource_relationship_search_order = property(fget=get_resource_relationship_search_order)
@abc.abstractmethod
def apply_resource_relationship_query(self, resource_query):
"""Applies a resource relationship query to this bin.
:param resource_query: the resource relationship query
:type resource_query: ``osid.resource.ResourceRelationshipQuery``
:raise: ``NullArgument`` -- ``resource_relationship_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure occurred
:raise: ``Unsupported`` -- ``resource_relationship_query`` not of this service
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def inspect_resource_relationship_query(self):
"""Gets a resource relationship query inspector for this bin.
:return: the resource relationship query inspector
:rtype: ``osid.resource.ResourceRelationshipQueryInspector``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceRelationshipQueryInspector
@abc.abstractmethod
def apply_resource_relationship_sequencing(self, resource_relationship_search_order):
"""Applies a resource relationship search order to this bin.
:param resource_relationship_search_order: the resource relationship search order
:type resource_relationship_search_order: ``osid.resource.ResourceRelationshipSearchOrder``
:raise: ``NullArgument`` -- ``resource_relationship_search_order`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure occurred
:raise: ``Unsupported`` -- ``resource_relationship_search_order`` not of this service
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def get_resource_relationship_query_from_inspector(self, resource_relationship_query_inspector):
"""Gets a resource relationship query from an inspector.
:param resource_relationship_query_inspector: a resource relationship query inspector
:type resource_relationship_query_inspector: ``osid.resource.ResourceRelationshipQueryInspector``
:return: the resource relationship query
:rtype: ``osid.resource.ResourceQuery``
:raise: ``NullArgument`` -- ``resource_relationship_query_inspector`` is ``null``
:raise: ``Unsupported`` -- ``resource_relationship_query_inspector`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceQuery
class BinLookupSession:
"""This session provides methods for retrieving ``Bin`` objects.
The ``Bin`` represents a collection resources.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete set or is an error condition
Generally, the comparative view should be used for most applications
as it permits operation even if there is data that cannot be
accessed. For example, a browsing application may only need to
examine the ``Bins`` it can access, without breaking execution.
However, an administrative application may require all ``Bin``
elements to be available.
Bins may have an additional records indicated by their respective
record types. The record may not be accessed through a cast of the
``Bin``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_lookup_bins(self):
"""Tests if this user can perform ``Bin`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_bin_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_bin_view(self):
"""A complete view of the ``Bin`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_bin(self, bin_id):
"""Gets the ``Bin`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Bin`` may have a different
``Id`` than requested, such as the case where a duplicate ``Id``
was assigned to a ``Bin`` and retained for compatibility.
:param bin_id: ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:return: the bin
:rtype: ``osid.resource.Bin``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.resource.Bin
@abc.abstractmethod
def get_bins_by_ids(self, bin_ids):
"""Gets a ``BinList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the bins
specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if an ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible ``Bins`` may be omitted from the list and may
present the elements in any order including returning a unique
set.
:param bin_ids: the list of ``Ids`` to retrieve
:type bin_ids: ``osid.id.IdList``
:return: the returned ``Bin list``
:rtype: ``osid.resource.BinList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``bin_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinList
@abc.abstractmethod
def get_bins_by_genus_type(self, bin_genus_type):
"""Gets a ``BinList`` corresponding to the given bin genus ``Type`` which does not include bins of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known bins or an
error results. Otherwise, the returned list may contain only
those bins that are accessible through this session.
:param bin_genus_type: a bin genus type
:type bin_genus_type: ``osid.type.Type``
:return: the returned ``Bin list``
:rtype: ``osid.resource.BinList``
:raise: ``NullArgument`` -- ``bin_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinList
@abc.abstractmethod
def get_bins_by_parent_genus_type(self, bin_genus_type):
"""Gets a ``BinList`` corresponding to the given bin genus ``Type`` and include any additional bins with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known bins or an
error results. Otherwise, the returned list may contain only
those bins that are accessible through this session.
:param bin_genus_type: a bin genus type
:type bin_genus_type: ``osid.type.Type``
:return: the returned ``Bin list``
:rtype: ``osid.resource.BinList``
:raise: ``NullArgument`` -- ``bin_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinList
@abc.abstractmethod
def get_bins_by_record_type(self, bin_record_type):
"""Gets a ``BinList`` containing the given bin record ``Type``.
In plenary mode, the returned list contains all known bins or an
error results. Otherwise, the returned list may contain only
those bins that are accessible through this session.
:param bin_record_type: a bin record type
:type bin_record_type: ``osid.type.Type``
:return: the returned ``Bin list``
:rtype: ``osid.resource.BinList``
:raise: ``NullArgument`` -- ``bin_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinList
@abc.abstractmethod
def get_bins_by_provider(self, resource_id):
"""Gets a ``BinList`` from the given provider.
In plenary mode, the returned list contains all known bins or an
error results. Otherwise, the returned list may contain only
those bins that are accessible through this session.
:param resource_id: a resource ``Id``
:type resource_id: ``osid.id.Id``
:return: the returned ``Bin list``
:rtype: ``osid.resource.BinList``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinList
@abc.abstractmethod
def get_bins(self):
"""Gets all ``Bins``.
In plenary mode, the returned list contains all known bins or an
error results. Otherwise, the returned list may contain only
those bins that are accessible through this session.
:return: a list of ``Bins``
:rtype: ``osid.resource.BinList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinList
bins = property(fget=get_bins)
class BinQuerySession:
"""This session provides methods for searching among ``Bin`` objects.
The search query is constructed using the ``BinQuery``.
Bins may have a bin query record indicated by their respective
record types. The bin query record is accessed via the ``BinQuery``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_search_bins(self):
"""Tests if this user can perform ``Bin`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
:return: ``false`` if search methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_bin_query(self):
"""Gets a bin query.
The returned query will not have an extension query.
:return: the bin query
:rtype: ``osid.resource.BinQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinQuery
bin_query = property(fget=get_bin_query)
@abc.abstractmethod
def get_bins_by_query(self, bin_query):
"""Gets a list of ``Bins`` matching the given bin query.
:param bin_query: the bin query
:type bin_query: ``osid.resource.BinQuery``
:return: the returned ``BinList``
:rtype: ``osid.resource.BinList``
:raise: ``NullArgument`` -- ``bin_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- a ``bin_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinList
class BinSearchSession:
"""This session provides methods for searching among ``Bin`` objects.
The search query is constructed using the ``BinQuery``.
``get_bins_by_query()`` is the basic search method and returns a
list of ``Bin`` objects.A more advanced search may be performed with
``getBinsBySearch()``. It accepts a ``BinSearch`` in addition to the
query for the purpose of specifying additional options affecting the
entire search, such as ordering. ``get_bins_by_search()`` returns a
``BinSearchResults`` that can be used to access the resulting
``BinList`` or be used to perform a search within the result set
through ``BinSearch``.
Bins may have a bin query record indicated by their respective
record types. The bin query record is accessed via the ``BinQuery``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_search(self):
"""Gets a bin search.
:return: the bin search
:rtype: ``osid.resource.BinSearch``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinSearch
bin_search = property(fget=get_bin_search)
@abc.abstractmethod
def get_bin_search_order(self):
"""Gets a bin search order.
The ``BinSearchOrder`` is supplied to a ``BinSearch`` to specify
the ordering of results.
:return: the bin search order
:rtype: ``osid.resource.BinSearchOrder``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinSearchOrder
bin_search_order = property(fget=get_bin_search_order)
@abc.abstractmethod
def get_bins_by_search(self, bin_query, bin_search):
"""Gets the search results matching the given search query using the given search.
:param bin_query: the bin query
:type bin_query: ``osid.resource.BinQuery``
:param bin_search: the bin search
:type bin_search: ``osid.resource.BinSearch``
:return: the bin search results
:rtype: ``osid.resource.BinSearchResults``
:raise: ``NullArgument`` -- ``bin_query`` or ``bin_search`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``bin_query`` or ``bin_search`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinSearchResults
@abc.abstractmethod
def get_bin_query_from_inspector(self, bin_query_inspector):
"""Gets a bin query from an inspector.
The inspector is available from a ``BinSearchResults``.
:param bin_query_inspector: a bin query inspector
:type bin_query_inspector: ``osid.resource.BinQueryInspector``
:return: the bin query
:rtype: ``osid.resource.BinQuery``
:raise: ``NullArgument`` -- ``bin_query_inspector`` is ``null``
:raise: ``Unsupported`` -- ``bin_query_inspector`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinQuery
class BinAdminSession:
"""This session creates, updates, and deletes ``Bins``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create a
``Bin,`` a ``BinForm`` is requested using
``get_bin_form_for_create()`` specifying the desired record
``Types`` or none if no record ``Types`` are needed. The returned
``BinForm`` will indicate that it is to be used with a create
operation and can be used to examine metdata or validate data prior
to creation. Once the ``BinForm`` is submiited to a create
operation, it cannot be reused with another create operation unless
the first operation was unsuccessful. Each ``BinForm`` corresponds
to an attempted transaction.
For updates, ``BinForms`` are requested to the ``Bin`` ``Id`` that
is to be updated using ``getBinFormForUpdate()``. Similarly, the
``BinForm`` has metadata about the data that can be updated and it
can perform validation before submitting the update. The ``BinForm``
can only be used once for a successful update and cannot be reused.
The delete operations delete ``Bins``.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_create_bins(self):
"""Tests if this user can create ``Bins``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a ``Bin``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may not wish to offer create
operations to unauthorized users.
:return: ``false`` if ``Bin`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def can_create_bin_with_record_types(self, bin_record_types):
"""Tests if this user can create a single ``Bin`` using the desired record types.
While ``ResourceManager.getBinRecordTypes()`` can be used to
examine which records are supported, this method tests which
record(s) are required for creating a specific ``Bin``.
Providing an empty array tests if a ``Bin`` can be created with
no records.
:param bin_record_types: array of bin record types
:type bin_record_types: ``osid.type.Type[]``
:return: ``true`` if ``Bin`` creation using the specified ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``bin_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_bin_form_for_create(self, bin_record_types):
"""Gets the bin form for creating new bins.
:param bin_record_types: array of bin record types
:type bin_record_types: ``osid.type.Type[]``
:return: the bin form
:rtype: ``osid.resource.BinForm``
:raise: ``NullArgument`` -- ``bin_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form with requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinForm
@abc.abstractmethod
def create_bin(self, bin_form):
"""Creates a new ``Bin``.
:param bin_form: the form for this ``Bin``
:type bin_form: ``osid.resource.BinForm``
:return: the new ``Bin``
:rtype: ``osid.resource.Bin``
:raise: ``IllegalState`` -- ``bin_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``bin_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``bin_form`` did not originate from ``get_bin_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.Bin
@abc.abstractmethod
def can_update_bins(self):
"""Tests if this user can update ``Bins``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a ``Bin``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may not wish to offer update
operations to unauthorized users.
:return: ``false`` if ``Bin`` modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_bin_form_for_update(self, bin_id):
"""Gets the bin form for updating an existing bin.
A new bin form should be requested for each update transaction.
:param bin_id: the ``Id`` of the ``Bin``
:type bin_id: ``osid.id.Id``
:return: the bin form
:rtype: ``osid.resource.BinForm``
:raise: ``NotFound`` -- ``bin_id`` is not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinForm
@abc.abstractmethod
def update_bin(self, bin_form):
"""Updates an existing bin.
:param bin_form: the form containing the elements to be updated
:type bin_form: ``osid.resource.BinForm``
:raise: ``IllegalState`` -- ``bin_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``bin_id`` or ``bin_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``bin_form`` did not originate from ``get_bin_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_delete_bins(self):
"""Tests if this user can delete ``Bins``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a ``Bin``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may not wish to offer delete
operations to unauthorized users.
:return: ``false`` if ``Bin`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def delete_bin(self, bin_id):
"""Deletes a ``Bin``.
:param bin_id: the ``Id`` of the ``Bin`` to remove
:type bin_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def can_manage_bin_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Bins``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``Bin`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def alias_bin(self, bin_id, alias_id):
"""Adds an ``Id`` to a ``Bin`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Bin`` is determined by the provider.
The new ``Id`` performs as an alias to the primary ``Id``. If
the alias is a pointer to another bin, it is reassigned to the
given bin ``Id``.
:param bin_id: the ``Id`` of a ``Bin``
:type bin_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class BinNotificationSession:
"""This session defines methods to receive notifications on adds/changes to ``Bin`` objects.
This session is intended for consumers needing to synchronize their
state with this service without the use of polling. Notifications
are cancelled when this session is closed.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_register_for_bin_notifications(self):
"""Tests if this user can register for ``Bin`` notifications.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer
notification operations.
:return: ``false`` if notification methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def register_for_new_bins(self):
"""Register for notifications of new bins.
``BinReceiver.newBins()`` is invoked when a new ``Bin`` is
created.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_bin_ancestors(self, bin_id):
"""Registers for notification if an ancestor is added to the specified bin in the bin hierarchy.
``BinReceiver.newBinAncestor()`` is invoked when the specified
bin experiences an addition in ancestry.
:param bin_id: the ``Id`` of the bin to monitor
:type bin_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_new_bin_descendants(self, bin_id):
"""Registers for notification if a descendant is added to the specified bin in the bin hierarchy.
``BinReceiver.newBinDescendant()`` is invoked when the specified
bin experiences an addition in descendants.
:param bin_id: the ``Id`` of the bin to monitor
:type bin_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_bins(self):
"""Registers for notification of updated bins.
``BinReceiver.changedBins()`` is invoked when a bin is changed.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_changed_bin(self, bin_id):
"""Registers for notification of an updated bin.
``BinReceiver.changedBins()`` is invoked when the specified bin
is changed.
:param bin_id: the Id of the Bin to monitor
:type bin_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_bins(self):
"""Registers for notification of deleted bins.
``BinReceiver.deletedBins()`` is invoked when a bin is deleted.
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_bin(self, bin_id):
"""Registers for notification of a deleted bin.
``BinReceiver.deletedBins()`` is invoked when the specified bin
is deleted.
:param bin_id: the ``Id`` of the ``Bin`` to monitor
:type bin_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_bin_ancestors(self, bin_id):
"""Registers for notification if an ancestor is removed from the specified bin in the bin hierarchy.
``BinReceiver.deletedBinAncestor()`` is invoked when the
specified bin experiences a removal of an ancestor.
:param bin_id: the ``Id`` of the bin to monitor
:type bin_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def register_for_deleted_bin_descendants(self, bin_id):
"""Registers for notification if a descendant is removed from fthe specified bin in the bin hierarchy.
``BinReceiver.deletedBinDescendnant()`` is invoked when the
specified bin experiences a removal of one of its descdendents.
:param bin_id: the ``Id`` of the bin to monitor
:type bin_id: ``osid.id.Id``
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def reliable_bin_notifications(self):
"""Reliable notifications are desired.
In reliable mode, notifications are to be acknowledged using
``acknowledge_item_notification()`` .
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def unreliable_bin_notifications(self):
"""Unreliable notifications are desired.
In unreliable mode, notifications do not need to be
acknowledged.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def acknowledge_bin_notification(self, notification_id):
"""Acknowledge an bin notification.
:param notification_id: the ``Id`` of the notification
:type notification_id: ``osid.id.Id``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class BinHierarchySession:
"""This session defines methods for traversing a hierarchy of ``Bin`` objects.
Each node in the hierarchy is a unique ``Bin``. The hierarchy may be
traversed recursively to establish the tree structure through
``get_parent_bins()`` and ``getChildBins()``. To relate these
``Ids`` to another OSID, ``get_bin_nodes()`` can be used for
retrievals that can be used for bulk lookups in other OSIDs. Any
``Bin`` available in the Resource OSID is known to this hierarchy
but does not appear in the hierarchy traversal until added as a root
node or a child of another node.
A user may not be authorized to traverse the entire hierarchy. Parts
of the hierarchy may be made invisible through omission from the
returns of ``get_parent_bins()`` or ``get_child_bins()`` in lieu of
a ``PermissionDenied`` error that may disrupt the traversal through
authorized pathways.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: bin elements may be silently omitted or re-
ordered
* plenary view: provides a complete set or is an error condition
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_hierarchy_id(self):
"""Gets the hierarchy ``Id`` associated with this session.
:return: the hierarchy ``Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_hierarchy_id = property(fget=get_bin_hierarchy_id)
@abc.abstractmethod
def get_bin_hierarchy(self):
"""Gets the hierarchy associated with this session.
:return: the hierarchy associated with this session
:rtype: ``osid.hierarchy.Hierarchy``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.hierarchy.Hierarchy
bin_hierarchy = property(fget=get_bin_hierarchy)
@abc.abstractmethod
def can_access_bin_hierarchy(self):
"""Tests if this user can perform hierarchy queries.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an an application that may not offer traversal
functions to unauthorized users.
:return: ``false`` if hierarchy traversal methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def use_comparative_bin_view(self):
"""The returns from the bin methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def use_plenary_bin_view(self):
"""A complete view of the ``Bin`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
@abc.abstractmethod
def get_root_bin_ids(self):
"""Gets the root bin ``Ids`` in this hierarchy.
:return: the root bin ``Ids``
:rtype: ``osid.id.IdList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
root_bin_ids = property(fget=get_root_bin_ids)
@abc.abstractmethod
def get_root_bins(self):
"""Gets the root bins in the bin hierarchy.
A node with no parents is an orphan. While all bin ``Ids`` are
known to the hierarchy, an orphan does not appear in the
hierarchy unless explicitly added as a root node or child of
another node.
:return: the root bins
:rtype: ``osid.resource.BinList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.resource.BinList
root_bins = property(fget=get_root_bins)
@abc.abstractmethod
def has_parent_bins(self, bin_id):
"""Tests if the ``Bin`` has any parents.
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:return: ``true`` if the bin has parents, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``bin_id`` is not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def is_parent_of_bin(self, id_, bin_id):
"""Tests if an ``Id`` is a direct parent of a bin.
:param id: an ``Id``
:type id: ``osid.id.Id``
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:return: ``true`` if this ``id`` is a parent of ``bin_id,`` ``false`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``bin_id`` is not found
:raise: ``NullArgument`` -- ``id`` or ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
return # boolean
@abc.abstractmethod
def get_parent_bin_ids(self, bin_id):
"""Gets the parent ``Ids`` of the given bin.
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:return: the parent ``Ids`` of the bin
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``bin_id`` is not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_parent_bins(self, bin_id):
"""Gets the parents of the given bin.
:param bin_id: the ``Id`` to query
:type bin_id: ``osid.id.Id``
:return: the parents of the bin
:rtype: ``osid.resource.BinList``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinList
@abc.abstractmethod
def is_ancestor_of_bin(self, id_, bin_id):
"""Tests if an ``Id`` is an ancestor of a bin.
:param id: an ``Id``
:type id: ``osid.id.Id``
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:return: ``true`` if this ``id`` is an ancestor of ``bin_id,`` ``false`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``bin_id`` is not found
:raise: ``NullArgument`` -- ``id`` or ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
return # boolean
@abc.abstractmethod
def has_child_bins(self, bin_id):
"""Tests if a bin has any children.
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:return: ``true`` if the ``bin_id`` has children, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def is_child_of_bin(self, id_, bin_id):
"""Tests if a bin is a direct child of another.
:param id: an ``Id``
:type id: ``osid.id.Id``
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:return: ``true`` if the ``id`` is a child of ``bin_id,`` ``false`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``bin_id`` is not found
:raise: ``NullArgument`` -- ``id`` or ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
return # boolean
@abc.abstractmethod
def get_child_bin_ids(self, bin_id):
"""Gets the child ``Ids`` of the given bin.
:param bin_id: the ``Id`` to query
:type bin_id: ``osid.id.Id``
:return: the children of the bin
:rtype: ``osid.id.IdList``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.IdList
@abc.abstractmethod
def get_child_bins(self, bin_id):
"""Gets the children of the given bin.
:param bin_id: the ``Id`` to query
:type bin_id: ``osid.id.Id``
:return: the children of the bin
:rtype: ``osid.resource.BinList``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinList
@abc.abstractmethod
def is_descendant_of_bin(self, id_, bin_id):
"""Tests if an ``Id`` is a descendant of a bin.
:param id: an ``Id``
:type id: ``osid.id.Id``
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:return: ``true`` if the ``id`` is a descendant of the ``bin_id,`` ``false`` otherwise
:rtype: ``boolean``
:raise: ``NotFound`` -- ``bin_id`` is not found
:raise: ``NullArgument`` -- ``id`` or ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` is not found return ``false``.
"""
return # boolean
@abc.abstractmethod
def get_bin_node_ids(self, bin_id, ancestor_levels, descendant_levels, include_siblings):
"""Gets a portion of the hierarchy for the given bin.
:param bin_id: the ``Id`` to query
:type bin_id: ``osid.id.Id``
:param ancestor_levels: the maximum number of ancestor levels to include. A value of 0 returns no parents in the node.
:type ancestor_levels: ``cardinal``
:param descendant_levels: the maximum number of descendant levels to include. A value of 0 returns no children in the node.
:type descendant_levels: ``cardinal``
:param include_siblings: ``true`` to include the siblings of the given node, ``false`` to omit the siblings
:type include_siblings: ``boolean``
:return: a bin node
:rtype: ``osid.hierarchy.Node``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.hierarchy.Node
@abc.abstractmethod
def get_bin_nodes(self, bin_id, ancestor_levels, descendant_levels, include_siblings):
"""Gets a portion of the hierarchy for the given bin.
:param bin_id: the ``Id`` to query
:type bin_id: ``osid.id.Id``
:param ancestor_levels: the maximum number of ancestor levels to include. A value of 0 returns no parents in the node.
:type ancestor_levels: ``cardinal``
:param descendant_levels: the maximum number of descendant levels to include. A value of 0 returns no children in the node.
:type descendant_levels: ``cardinal``
:param include_siblings: ``true`` to include the siblings of the given node, ``false`` to omit the siblings
:type include_siblings: ``boolean``
:return: a bin node
:rtype: ``osid.resource.BinNode``
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.BinNode
class BinHierarchyDesignSession:
"""This session defines methods for managing a hierarchy of ``Bin`` objects.
Each node in the hierarchy is a unique ``Bin``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_bin_hierarchy_id(self):
"""Gets the hierarchy ``Id`` associated with this session.
:return: the hierarchy ``Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
bin_hierarchy_id = property(fget=get_bin_hierarchy_id)
@abc.abstractmethod
def get_bin_hierarchy(self):
"""Gets the hierarchy associated with this session.
:return: the hierarchy associated with this session
:rtype: ``osid.hierarchy.Hierarchy``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.hierarchy.Hierarchy
bin_hierarchy = property(fget=get_bin_hierarchy)
@abc.abstractmethod
def can_modify_bin_hierarchy(self):
"""Tests if this user can change the hierarchy.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known performing any update
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer these
operations to an unauthorized user.
:return: ``false`` if changing this hierarchy is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def add_root_bin(self, bin_id):
"""Adds a root bin.
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``bin_id`` is already in hierarchy
:raise: ``NotFound`` -- ``bin_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def remove_root_bin(self, bin_id):
"""Removes a root bin.
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``bin_id`` not a root
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def add_child_bin(self, bin_id, child_id):
"""Adds a child to a bin.
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:param child_id: the ``Id`` of the new child
:type child_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``bin_id`` is already a parent of ``child_id``
:raise: ``NotFound`` -- ``bin_id`` or ``child_id`` not found
:raise: ``NullArgument`` -- ``bin_id`` or ``child_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def remove_child_bin(self, bin_id, child_id):
"""Removes a child from a bin.
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:param child_id: the ``Id`` of the new child
:type child_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``bin_id`` not a parent of ``child_id``
:raise: ``NullArgument`` -- ``bin_id`` or ``child_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def remove_child_bins(self, bin_id):
"""Removes all children from a bin.
:param bin_id: the ``Id`` of a bin
:type bin_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``bin_id`` not in hierarchy
:raise: ``NullArgument`` -- ``bin_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
| 36.86218
| 190
| 0.653608
| 24,921
| 217,450
| 5.599133
| 0.026604
| 0.022001
| 0.043573
| 0.063181
| 0.921375
| 0.902333
| 0.880604
| 0.858947
| 0.83997
| 0.825816
| 0
| 0.000049
| 0.244534
| 217,450
| 5,898
| 191
| 36.86843
| 0.849351
| 0.726093
| 0
| 0.75413
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.295432
| false
| 0.109815
| 0.000972
| 0
| 0.595724
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
dd8aa46169d081ca9180741e4dcc38e50ac7bf3e
| 3,198
|
py
|
Python
|
assets/data/edge_table_rebuild.py
|
jedipuppy/reversiAI
|
b1cf1880d0019a107eff412a85816383b0e57a90
|
[
"MIT"
] | null | null | null |
assets/data/edge_table_rebuild.py
|
jedipuppy/reversiAI
|
b1cf1880d0019a107eff412a85816383b0e57a90
|
[
"MIT"
] | null | null | null |
assets/data/edge_table_rebuild.py
|
jedipuppy/reversiAI
|
b1cf1880d0019a107eff412a85816383b0e57a90
|
[
"MIT"
] | null | null | null |
edge_array = [0,100,-1,-30,130,-150,-1,-1,-1,10,10,-120,-10,160,-180,0,10,-180,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,120,-120,-100,30,-200,30,30,-200,20,30,-200,30,220,-230,40,20,-220,0,50,-30,-200,30,-30,30,30,-200,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,60,-160,-10,130,-190,0,40,-200,-1,0,-200,-50,30,-250,5,-50,-250,-1,80,-30,-50,30,-100,0,30,-200,-1,100,-130,-1,30,-50,-30,50,0,-1,30,-200,10,250,-250,0,-50,-250,-1,30,-30,-200,-250,-130,0,-30,250,-1,70,-150,-1,100,-130,-70,30,-200,-1,-30,-200,-1,30,30,0,30,-70,-1,30,-200,-250,0,-120,0,-30,-250,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,200,0,-1,230,-30,-1,220,-30,-1,150,-20,-1,250,-80,-1,100,-100,-1,220,30,-1,220,50,-1,200,-80,-1,-1,-30,-1,30,-30,-1,50,-50,-1,30,-50,-1,180,0,-1,30,0,-1,100,0,-1,30,0,-1,0,-30,-1,-1,0,-1,180,0,-1,180,0,-1,-1,0,-1,100,10,-1,0,0,-1,180,0,-1,120,10,-1,120,-100,-1,-1,30,-1,200,0,-1,180,0,-1,-1,0,-1,120,0,-1,100,0,-1,-1,50,-1,120,0,-1,120,-30,-1,-1,0,-1,-1,50,-1,120,30,-1,-1,100,-1,300,150,-1,200,80,-1,-1,50,-1,200,50,-1,140,0,-1,-1,80,-1,-1,0,-1,120,0,-1,-1,0,-1,-1,50,-1,140,0,-1,-1,0,-1,120,0,-1,100,-60,-1,-1,30,-1,-1,0,-1,220,10,-1,-1,0,-1,-1,0,-1,0,0,-1,-1,30,-1,-1,0,-1,50,30,-1,-1,-30,-1,-1,0,-1,-1,-10,-1,-1,-30,-1,-1,50,-1,120,0,-1,-1,0,-1,-1,0,-1,100,-60,-1,-1,50,-1,-1,0,-1,-1,0,-1,-1,-10,-1,-1,0,-1,-1,-50,-1,-1,10,-1,-1,-50,-1,0,-100,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-300]
par =[0,0,0,0,0,0]
i = 0
par2 =[0,0,0,0,0,0]
for par[0] in range(0,3):
for par[1] in range(0,3):
for par[2] in range(0,3):
for par[3] in range(0,3):
for par[4] in range(0,3):
for par[5] in range(0,3):
index = par[0]+par[1]*3+par[2]*9+par[3]*27+par[4]*81+par[5]*243
if (index > i and index < 728):
try:
edge_array[index] = edge_array[i]
except ValueError:
pass
j = 0
for par_each in par:
if (par_each == 1):
par2[j] = 2
elif (par_each == 2):
par2[j] = 1
else:
par2[j] = 0
j += 1
index2 = par2[0]*243+par2[1]*81+par2[2]*27+par2[3]*9+par2[4]*3+par2[5]
if (index2 > i and index2 < 728):
try:
edge_array[index2] = -1 * edge_array[i]
except ValueError:
pass
i += 1
print(edge_array)
print(len(edge_array))
| 78
| 2,311
| 0.453096
| 894
| 3,198
| 1.60962
| 0.060403
| 0.550382
| 0.740097
| 0.972898
| 0.5205
| 0.47672
| 0.352328
| 0.306463
| 0.269632
| 0.251564
| 0
| 0.374269
| 0.090994
| 3,198
| 41
| 2,312
| 78
| 0.120743
| 0
| 0
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.058824
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
660718d57acc1be8cf7af1d28c6e4fa502f38466
| 101
|
py
|
Python
|
utils/__init__.py
|
BogdanFloris/detecting-and-addressing-change
|
b11082ebd0fc421eca31c5ba82ddb52446fa56f5
|
[
"MIT"
] | 2
|
2020-01-24T15:50:47.000Z
|
2021-04-16T21:21:25.000Z
|
utils/__init__.py
|
BogdanFloris/detecting-and-addressing-change
|
b11082ebd0fc421eca31c5ba82ddb52446fa56f5
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
BogdanFloris/detecting-and-addressing-change
|
b11082ebd0fc421eca31c5ba82ddb52446fa56f5
|
[
"MIT"
] | null | null | null |
from utils.constants import * # noqa: F401, F403
from utils.formatting import * # noqa: F401, F403
| 33.666667
| 50
| 0.722772
| 14
| 101
| 5.214286
| 0.571429
| 0.246575
| 0.383562
| 0.493151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144578
| 0.178218
| 101
| 2
| 51
| 50.5
| 0.73494
| 0.326733
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6632535a9f9ec014dd4ec360bcfe4e4e8993981a
| 129
|
py
|
Python
|
zqy_utils/result_parser/__init__.py
|
qianyizhang/zqy-utils
|
2bbd1c6b3372814227a1e9061f34a0eae100b860
|
[
"MIT"
] | null | null | null |
zqy_utils/result_parser/__init__.py
|
qianyizhang/zqy-utils
|
2bbd1c6b3372814227a1e9061f34a0eae100b860
|
[
"MIT"
] | null | null | null |
zqy_utils/result_parser/__init__.py
|
qianyizhang/zqy-utils
|
2bbd1c6b3372814227a1e9061f34a0eae100b860
|
[
"MIT"
] | null | null | null |
from .eval_recall import * # noqa
from .eval_confusion import * # noqa
from .table_renderer import * # noqa
| 32.25
| 42
| 0.627907
| 15
| 129
| 5.2
| 0.533333
| 0.384615
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.302326
| 129
| 3
| 43
| 43
| 0.866667
| 0.108527
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b0a7709db2320d6ed5e312e68e5aa251c139dae2
| 4,172
|
py
|
Python
|
src/bot/tests/launcher/test_launcher.py
|
ItsCalebJones/SpaceLaunchNow_API
|
09289068465c462557649172792ab0f41f833028
|
[
"Apache-2.0"
] | 11
|
2017-06-26T05:01:31.000Z
|
2019-09-13T18:48:27.000Z
|
src/bot/tests/launcher/test_launcher.py
|
ItsCalebJones/SpaceLaunchNow_API
|
09289068465c462557649172792ab0f41f833028
|
[
"Apache-2.0"
] | 14
|
2019-01-30T23:13:34.000Z
|
2019-10-08T10:43:36.000Z
|
src/bot/tests/launcher/test_launcher.py
|
ItsCalebJones/SpaceLaunchNow_API
|
09289068465c462557649172792ab0f41f833028
|
[
"Apache-2.0"
] | 5
|
2018-04-24T16:52:59.000Z
|
2018-08-22T14:06:01.000Z
|
import unittest
from rest_framework import status
from api.models import *
from api.tests.test__base import LLAPITests
class LauncherTests(LLAPITests):
@unittest.skipIf(settings.IS_LL, "Not supported in this configuration.")
def test_v320_launcher(self):
"""
Ensure Launcher endpoints work as expected.
"""
path = '/3.2.0/launcher/'
response = self.client.get(path)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data['count'], Launcher.objects.all().count())
self.assertIn('id', data['results'][0])
launcher = Launcher.objects.get(pk=data['results'][0]['id'])
self.assertEqual(data['results'][0]['details'], launcher.details)
self.assertEqual(data['results'][0]['flight_proven'], launcher.flight_proven)
self.assertEqual(data['results'][0]['serial_number'], launcher.serial_number)
self.assertEqual(data['results'][0]['previous_flights'], launcher.flights)
self.check_permissions(path)
@unittest.skipIf(settings.IS_LL, "Not supported in this configuration.")
def test_v330_launcher(self):
"""
Ensure Launcher endpoints work as expected.
"""
path = '/api/3.3.0/launcher/'
response = self.client.get(path)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data['count'], Launcher.objects.all().count())
self.assertIn('id', data['results'][0])
launcher = Launcher.objects.get(pk=data['results'][0]['id'])
self.assertEqual(data['results'][0]['details'], launcher.details)
self.assertEqual(data['results'][0]['flight_proven'], launcher.flight_proven)
self.assertEqual(data['results'][0]['serial_number'], launcher.serial_number)
self.assertIn('launcher_config', data['results'][0])
self.assertEqual(data['results'][0]['previous_flights'], launcher.flights)
self.check_permissions(path)
@unittest.skipIf(settings.IS_LL, "Not supported in this configuration.")
def test_v330_launcher_detailed(self):
"""
Ensure Launcher endpoints work as expected.
"""
path = '/api/3.3.0/launcher/?mode=detailed'
response = self.client.get(path)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data['count'], Launcher.objects.all().count())
self.assertIn('id', data['results'][0])
launcher = Launcher.objects.get(pk=data['results'][0]['id'])
self.assertEqual(data['results'][0]['details'], launcher.details)
self.assertEqual(data['results'][0]['flight_proven'], launcher.flight_proven)
self.assertEqual(data['results'][0]['serial_number'], launcher.serial_number)
self.assertIn('launcher_config', data['results'][0])
self.assertEqual(data['results'][0]['previous_flights'], launcher.flights)
self.check_permissions(path)
@unittest.skipIf(settings.IS_LL, "Not supported in this configuration.")
def test_v340_launcher_detailed(self):
"""
Ensure Launcher endpoints work as expected.
"""
path = '/api/3.4.0/launcher/?mode=detailed'
response = self.client.get(path)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data['count'], Launcher.objects.all().count())
self.assertIn('id', data['results'][0])
launcher = Launcher.objects.get(pk=data['results'][0]['id'])
self.assertEqual(data['results'][0]['details'], launcher.details)
self.assertEqual(data['results'][0]['flight_proven'], launcher.flight_proven)
self.assertEqual(data['results'][0]['serial_number'], launcher.serial_number)
self.assertIn('launcher_config', data['results'][0])
self.assertEqual(data['results'][0]['flights'], launcher.flights)
self.check_permissions(path)
| 46.876404
| 85
| 0.661553
| 498
| 4,172
| 5.431727
| 0.146586
| 0.109797
| 0.119778
| 0.153789
| 0.949353
| 0.949353
| 0.949353
| 0.932348
| 0.932348
| 0.912754
| 0
| 0.019534
| 0.177852
| 4,172
| 88
| 86
| 47.409091
| 0.769096
| 0.041946
| 0
| 0.78125
| 0
| 0
| 0.185707
| 0.017418
| 0
| 0
| 0
| 0
| 0.484375
| 1
| 0.0625
| false
| 0
| 0.0625
| 0
| 0.140625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b0e61e3c271ca8a58289e52c8145d6737bfa2b24
| 2,730
|
py
|
Python
|
examples/method_examples/grid_search_tuning.py
|
c60evaporator/param-tuning-utility
|
80625f875428badac37d8439195a9327a565b040
|
[
"BSD-3-Clause"
] | null | null | null |
examples/method_examples/grid_search_tuning.py
|
c60evaporator/param-tuning-utility
|
80625f875428badac37d8439195a9327a565b040
|
[
"BSD-3-Clause"
] | null | null | null |
examples/method_examples/grid_search_tuning.py
|
c60evaporator/param-tuning-utility
|
80625f875428badac37d8439195a9327a565b040
|
[
"BSD-3-Clause"
] | 1
|
2022-01-06T05:13:07.000Z
|
2022-01-06T05:13:07.000Z
|
# %% grid_search_tuning(), no argument
import parent_import
from tune_easy import RFRegressorTuning
import pandas as pd
# Load dataset
df_reg = pd.read_csv(f'../sample_data/osaka_metropolis_english.csv')
TARGET_VARIABLE = 'approval_rate' # Target variable
USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # Explanatory variables
y = df_reg[TARGET_VARIABLE].values
X = df_reg[USE_EXPLANATORY].values
tuning = RFRegressorTuning(X, y, USE_EXPLANATORY)
###### Run grid_search_tuning() ######
best_params, best_score = tuning.grid_search_tuning()
# %% grid_search_tuning(), Set parameter range by 'tuning_params' argument
import parent_import
from tune_easy import RFRegressorTuning
import pandas as pd
# Load dataset
df_reg = pd.read_csv(f'../sample_data/osaka_metropolis_english.csv')
TARGET_VARIABLE = 'approval_rate' # Target variable
USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # Explanatory variables
y = df_reg[TARGET_VARIABLE].values
X = df_reg[USE_EXPLANATORY].values
tuning = RFRegressorTuning(X, y, USE_EXPLANATORY)
# Set 'tuning_params' argument
CV_PARAMS_GRID = {'n_estimators': [20, 80, 160],
'max_depth': [2, 8, 32],
'min_samples_split': [2, 8, 32],
'min_samples_leaf': [1, 4, 16]
}
###### Run grid_search_tuning() ######
best_params, best_score = tuning.grid_search_tuning(tuning_params=CV_PARAMS_GRID)
# %% grid_search_tuning(), Set estimator by 'estimator' argument
import parent_import
from tune_easy import RFRegressorTuning
import pandas as pd
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestRegressor
# Load dataset
df_reg = pd.read_csv(f'../sample_data/osaka_metropolis_english.csv')
TARGET_VARIABLE = 'approval_rate' # Target variable
USE_EXPLANATORY = ['2_between_30to60', '3_male_ratio', '5_household_member', 'latitude'] # Explanatory variables
y = df_reg[TARGET_VARIABLE].values
X = df_reg[USE_EXPLANATORY].values
tuning = RFRegressorTuning(X, y, USE_EXPLANATORY)
# Set 'estimator' argument
ESTIMATOR = Pipeline([("scaler", StandardScaler()), ("rf", RandomForestRegressor())])
# Set 'tuning_params' argument
CV_PARAMS_GRID = {'n_estimators': [20, 80, 160],
'max_features': [2, 5],
'max_depth': [2, 8, 32],
'min_samples_split': [2, 8, 32],
'min_samples_leaf': [1, 4, 16]
}
###### Run grid_search_tuning() ######
best_params, best_score = tuning.grid_search_tuning(estimator=ESTIMATOR,
tuning_params=CV_PARAMS_GRID)
# %%
| 43.333333
| 113
| 0.709158
| 349
| 2,730
| 5.223496
| 0.232092
| 0.049369
| 0.078991
| 0.048272
| 0.817883
| 0.791552
| 0.791552
| 0.791552
| 0.791552
| 0.791552
| 0
| 0.026979
| 0.171795
| 2,730
| 62
| 114
| 44.032258
| 0.779301
| 0.179121
| 0
| 0.76087
| 0
| 0
| 0.209324
| 0.058958
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.26087
| 0
| 0.26087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ff03fa0e71ed501dc907ca677ca6f3f80044505
| 160,718
|
py
|
Python
|
rest-client/documents-converter-file-creation.py
|
bchekuri/python101
|
b025bb52c56ee69647310a6f883c88f80697a2d0
|
[
"MIT"
] | null | null | null |
rest-client/documents-converter-file-creation.py
|
bchekuri/python101
|
b025bb52c56ee69647310a6f883c88f80697a2d0
|
[
"MIT"
] | null | null | null |
rest-client/documents-converter-file-creation.py
|
bchekuri/python101
|
b025bb52c56ee69647310a6f883c88f80697a2d0
|
[
"MIT"
] | null | null | null |
import base64
payload = "TU0AKk1NACoAALa1gD/gUDgkFg0HhEJhULhkNh0PiERiUTikCcEVjEZjUbjkdj0Qi8fkUjkklk0YkMnlUrlktjcpl0xmUzmcwmk3nE5jk2nU9n0/hM8oFDok5oVFpFJldHpVNp0dplPqVTiNRqlXrEHq1ZrlYrddsFPr9hslIsdltE/s9ptk4tdtuExt9xukqud1vEju95vk7vt/uWAwVLweFkt7w2JoOKxkvxuPiuIyGMyWTxOVy2FzGZwWbzl/z2fvmh0V40mlumn1Fw1Wrtmt11o2Gxsmz2lg223rm53Ve3uZ3m/qfB4Vi4uQ4nHpXJ5Vm5uU5+K5nRoHT6k+63XnXZ7Vu7ud7+A7nhmXj8ku83nlnp9V29um9919nxknz+ki+33j35/V+/q0v4/6MwDATIwKssCQOiUEwUkEGtxB6uwZCKGQnCiFQtC6EQzDSDQ5DqCQ/ECLRGqURRHE8QRTDsVw1FsLxfCkYwjGcHxrBsbwVHMDx3AsewFH7/yC/shv1Ir7yO+kkvi0IASdJ8nh/EqNSbKEoSlKaUI7K0ryzLSOS5KMvQNMEwgBLExwXLczTRNMHTLJ4/n+eEzzdNU4AAD6BkBNs7IbKoAADEM+z8hdAT0gZ+UJQrFzggp/UXRitTWB6DF/SU/zXSNMUNLdBU5O6NgBStQKrLdN1LDct1TUyOAOgZgVRVh/tDRBgSdOVZ1Ujk5HxJ9SV1DyPkBJ4B2DSaO19KFj2EjtbyhXNmRIjh/S5WVStDZUoURaVaWdLluWlQFjH/Udu2nUVcIEeFgXFNdXoEfl4W7Ks23nd0wU+gZD3Pb0wXaf5j37JtooEc+Bo7fSBnvhFXINhlztDLE637dCOWJhWIo9bU0UvfGLyfblr0w0J+SheeR0k0M6WKgVIXojtiZaf9FZhMtBH5T58ZTRl6oEQN155Qsm3If5wotoU/Sbbh4IFWObVEgh8adpM7NDouaIFPmoI1cKB4pZjQ4LeOwWO0lfaLX2qzc0k43Xstg2zMVabhXTQ2fPOnbrWdAXMf9ibXNMq5pUkncDMeloFV+Tb3VkmoHgVlcPLzQ3ng+WcnLNa4Wf9n8zKbZ5lz8S7PRFq8NriM2rcnGcbVOVydt/UY+jVnzkcG57DNc65l11sS2P+d3L3OzI7Yx/A/1vfVBzZ/k/lnZ91XiBG/3HibikRv975dONJQXT+vuyOzp4MrdHFFvg/6Ho+LMoH7x9nsYqw/5/p+q9fv/H8o/Jb3/6e1/56oAnngGeSAp4YDnfgSd2BZ2oGnXgedSCJ0YJnPgqc2C5yoMnHg2cWDpwoPm/hCb2EZuoSm3hObSFJsYVmuhaauF5qIYmlhmaIxA/FcjwXIIBrxBB4L3d+RR4Tw3ygDV8AEfieh4JSdi3wio+E9OMD+7iIygYkpzB+4x86NiKjwT0PgB4/g/iAD+PN4UV4lxgjBE4ig4FKjgSwzpKUaAfxLH9EB7pFRxKVGAy5eEZ4lA/JC1h5hFRhKvGCvFUkgIsSJH+0BxxFRjqvkhFcf8jIlyQkc68ioj1jLkigQKTAPxBtOjYRMB7QJQJYlGuSGpTTEAfkTH+Vkc5AyulORF5EfZaSiVHHReA4GxskIopAi68Ily+APMAi0w2VEUUVMKZEtWaSBmDM5ns0A/jwD/L2S8tosTBlyRByU3pWsWkKRNlgH5QKIlHKVzs4yHzrnhKGb81YsTwk3EEiTRx+SpkVKKcEmWfzyIcOMf4/n3x+oFPiJcfR/iDHCHeThExh0JAPMIgUq6HSCD/Hce4N6KkSYEP8AcXh/i/EAP8fso4vT/HuCekZEV+UmV8MAD4wACABWUyak4Px8ADHAB8e8eJsyoZ+6eOqgaeqBjsrge4R6ZkQUQ0AYD31R1Np4nIYC5JsNKf2/ysJ+6xlQrKf6s6A60pUrXWqtqZK3kTleZyuZwK4xtrvXKvKoa9kOrqZav5k7AnIr6m+wqmbD2IsSp2xdjLGqNseruyKyLJrNsqQWwZj7MmNs2dCy9mLP2gtCQOzp0rR2ktPOiz9pTL2pX9ae1hhrYmatdbMwdtjwWptweK2tvbdW+thcC0duzQXCtDcQvtyDR3GtXcyy9yi83QPhb+6lwbq3DudZW6R8rs2Tu2am7tkbvlxvGay8Nj7yltvSa+89jb1oAvbYu95sr42Jvmgi+th77m1vzYW/ZYb/oQuvce/pk3kWaKw4xSqTlXssXgsSQhG1b4IKu4xYyTljYOa0uojz8baYVScoJJ6c8GYbe4RB08Pbb4JxGnHDTf0nYqIrgqzmLHY4uxLjBvxHFlVGtyVR1of0o4vZkwAjNN8I4rwqANaof1BQ8pQntWir8D0JcPGrJOPypj8WMnxQUbsotakuq+q7TljR9HgA4ADnU9R9ilMIfCr4oZckupVPkP3cAPn/QlJ2RsAYJzOD9QUP8wt/Zoq9YicliTxHAB4AAf6cOdD/FqYS7IsZ0zi399VGQAADz26fP2AsgaBUFnHQtK15PDB+6fSDnQfaP0jGRxgH5hVDTmB8f2GdEabdxEhSuoL1aAc7oLQ+p9D4pi1H0QAftHw8b/FnPswmn6WXXl3WmnNfZ8x3fTJew2cAH2NP9nMbqg04j6ADJuzkzz/zjMKMmYyBw600OCjIeAAafx9n/bqsWcUnXDSuMG7ADxQjAMAfwAVqg/3VFnPVGchS+tJl0B+9N3b4vJsLfjNN/EEpXUPOe4OGDAy5wndTyYw0ZD8zpvKtMn8T4cMAPmv986iy3hfYmXHMYmjrrioWqwDjAziP4AgP089C5NHccAfHWadac0AQL7+HdJ5lxfJbJsnca5zjqbaUqhUf5+uzoXRNcAM6PvVeDpyBDB6d1DWvMaMdUyAnHfvWWZdbpNMKO4wKh9GAI8kBmeuTjgDgsDGI/xg0rECAcYPUe3R37hlvEfc3ZuimSAHvAAxgKx748kB3gOkBgW4sRSogY++J8X23qd5sQOx8kxTIs3JH0a8xGToQHu+j4A9MsB+eAgJodwq8QYwA/+J8TtLxvhb2erUF61LDMtwJyEDINW+qwDe2A/FCZe0eVyX6Z10QIh/i5C0/8i+Hq1X9z3+nPh0j6IeY6KAYB3ZPsazzzxyT8whAiDEHpze+v/yNuO4s1h/v0OOJLuuv2KNlbtcADAGOyIltPHDP6iBh8FBKMgfv8gBtsPxvtwAOaiBwCMpMuKNPSQElRh/QGQHJSHGG6F7mco7pMhBwMtewNsZEJNhCBQQMxODwRvpAGADo7gCAONaQLh+AGMvgDFyIxODwXgPhAvFtOPdO3vVNRwPtiv0wdk5PLE5PMPcwgQhQZgHs1o3FyGnokMYwnADv1uLQpwPQcQrQCuDvKvZBgAfMmADgAQhFbtfIyKhlPo4QBtZvSQ0qNQ1tgtuw3NTQrgAo7Ouu8snI7w8KcknJ/lYo4KVlaNcQzFRxBQ1PUxDQqREPdwrt/Nco4O8wBxIAeRJP+qht3CLIwqeM+vSAHxOwpRPw2wBxMNjKgl2OcHkufm/o7hARVGZOYovNpE5IfmcsevoxaRCRPPkxQHhk8tTsMM4xrgHuCnOg/ADxhwmlaOYowNakpB4PLKeRfxmxavHQ2CpM6CBMht/sGReRsqMqcxtxuxVNBuYp/xxvuNOovI7x0xnxbRoxcR4OOM+sFNZo+hwAfRug+APtBgePAptoosMRGPogPw1EnNHAAOZiskWx3RpyKyEOisYqlM0SHOYAPMMyJxSngk9HTwLKTBAovSNgASOyPjfRDnhtBx4k6l1HYs0PcyVljB8AOFKwtI1nhuJwsyayNRCSOMcvyxpMSxqlPsJm/lBI+h8AHOfh+SWJLykG/phNUnOk8wRyagfybycxbi8NMk5tQq/GMrZMCq9sAwbMBrmy9Lny7K8y8Ddy/K7zASQTBK4zCSdrrTFLsS+LtTDK3zECrzIiqTJjhzHq2zKkTTLq1zMjjTGrvTNq0zOinTRpYTQqzzSjlzTqyzUikzWjnTPrxTVqxzXiizaiiTbihzcjqzZqwzdi1Ten9zfjsTgn8zhiezjjtiJnwEzTmznTnzoTozpTpzqTqzrToSdK4MUKviehnjJmIH9JdTuCdTvDITwD8TlzxiczyjHzzqxCJIxClT2DGz3KyT4T1CcT5jGT6qzT7z5Tvn7TxT/zzUAzt0Bz20CiHz4ikz9DFT+K0UDUGUAD6z00Dz6UEiHUFikUGjE0HjHT/UJUCUKUQUN0Jzw0I0S0RUT0FT8Cb0ODDUPK2USCi0XjC0Yq3UBUQ0EUR0c0U0d0V0M0WiaUajB0bkv0e0aUTT0UZiiUiDBUjTtUWCBBH0k0VUl0BQKCEIhlyzsiOzvB8NJk+h8h/0xh+CBBPi6UoK8T/E6UtE0OmHqCVUvosk+vhRKk9Qaiw01K9T/HcJnUtlAiB08iOU5maiCAGAfhhAPhwlK1Biu09q+UpBwAwFF0ts1GXVHCNU50tiBAfgPhHgHhjhDsqi4VIFW0BBw1OQJk0OJ1MU5JLhP1OFIBHhPhDhnhhpLVS0MCGz41cAfh9qEg/lLhzmdmDh/vdg/h7nkUzCTUv1Yk21ZgPhjhHhxtCi2VTLDUDJHBgA/AMAPAHhwhAmdxLxsoxxeVFiS0vhHolhAA+A/hnoxBHgfhjhPhxhw00C41sCHi7z4iQgAAfAIAEADhwgAGdtH1jh8NmA8KhVwV0pLhnh5OiAfAfzy15BzhPh52L001diGV+owycMMHcB8NXkpM9AeAAAcPMWGiSUv2IAfVvAf18WLWMHnWN0eUDIdB/APPcgBhBhwAQA8AfAEE9WTAeAPQm2B2HB8BjhxAeA/APWYiBAHgf2Lh52a182OCFz4lfWdWeBhhwB8AcAeAfWiB+AeWjSNJ4WWJLhzhzAeA/gP24qE2oBzgP2rV8VdWb0pMmywAHhD2vh+WxAflKs9KgWjgDqL2lW2kpW4o52J26272bUgVeFclYhgVv2/hwXA2T3CFFBAWj04W1h8BxhjNVgPhPEpWE2qW7Wr283J2OlcooScW/Bhhw3AgcFzM9NX2d3QiR0vhxhnOFAPhHRyFIXIXWi219K/UKh7ywScAP3M3b3ch8WT3eMs1CJLh4hnAfAA3hk5Gj15163kVr2siFT4hBh+AOAfWj3ogcAcFcxsg8W0S5XsB8B4hHRVAPhmE5KL3wqEW8Xk3yiEz4hg30313oXAXBWpB8X5XDiT0vh4hPAcB/gPhiCBF+VpVqXxi03lLFUDBgB4AMWn4EWwWxWyVjh4B8A+X2XE3RB8YJYKYLVj4KBDhHqL4AXyW9Ugm6AeAOYWPBX5QhHh2w2jv9Gj4XAPYJtaKE2iBHhDmBYcYOYBCET4k6Xq4SAAWgygRYyWBgWk4XAfWnV0FqgHhDAHhhlX4oi0YOkK0K2uAA4f2CuiImPsyWN6Yj3fJLh+Ywk8h4maFRqcI3YN414piDz4s6ABgHYEBwojAfqIOigBRaIvY8CRUv49w6hH4/GTSmpk41Cy42LHUUCDZKUhiD2rUi5CiDUNZRzyCDqEZUYdXKCE5SCZ0OZX0n5UlHqv5aCZUOVjZcZY3YZZ5WlH0u4c3X2tZd5iCCKFUbZc5mUhZa0lT30kUm5pz7Zqih0nDAZQEMUK0dUL5g5k0LT95niB5V5rUrZqZRZ00f0r52ZtZrz+5sigZti/5urIZ4Z655UIUpZwZy5xXzTr6B6CaC6DaD6EGT5zGXaE6G6HaH6ITrZjVI4d5/0HaF1g5yaL6A4B5o5e5+UP56Cf57C+58LJZ9aR6QUZaRCfaSC+aTLKaUaW6VUcaZTu6aUj6bZl5w5kXzaPCY6XC86YLLadT16cUo6K0faeZ35/alaAae6O6NUO6MZ0Z451Zsaiz86j016Wab6r556s0Xat0+au6d6n6mak0q53Z16m61al62a052636sa265az64ZZMyiGIk1SYHiBZbiCXUGvzsmvWoCBhPu0CB7AGaafiM6hrRUBFbz1a+VMiNzy5ebBR33rh/h32pCC7DUz7E6/iCocCY7HkQ0Kvp69tcbK1NO0iD7Mlyy6CCUqZmiCbQHnbRB/7F7SiXbTrUT/BgtIiF7KCWTyqICC4AH1CFbaogFw00be7RiCboiWbfrVZZBgp7CFbiiVzy4ZbkiCPrqv7agziC7n7GCCbebGiMbrLX1tIkh+h/k5GDmGb4kpBObWb1iIzyhCCD7lbqCC0qB+by7wiCboGC71bTaqE5YCgOAYA8VwKrB/AMA+Qfu+8JgPvd5eUvCBBGKPtJpLh/3UY/YKB+A4gP4/I+4/AcXuGaAxh+Axh/AYh/2oKEV770FKhnhxhhh4qLh5ocGDhxgMB/0qCTb21+cGX0gQA4WBs1gAA/AIWFmTNcAIW/B4cN3sB/8PF2KhlY3UN3nkg4xuox8yAQAPGcBxh+AgA/AQAf2oM1hAcDuRgAhxAAh4snocM1gAgOA/8iiS8j0K8GgABwKhcn1/gAA4RYtcSPBDhw8sbLCBAHAfhw2/4y3Tgfuh8Sh4gDgGAfVEVEZI0qX7QigfADAPgPWXg/gMIoptgwh+AYWfh4AYB/AEA+A+AIVu8+bWiKdAbghwAOACBwBghwAYWzgPABB4AgB8AYOeAB9G9H7XcaWKVphH1PhPYD4KB8B4B/X1311PAHhH9RB4lFWjhHgPAeA+AfYTocB4h4B8h8WwYUg+Wz9bWnWoddiJ9e7IgASJ3a2wX1VvhwhwYUtcJU3a6/YKAf1qVQ1pBPW31G+BlIAHA/2J1QBn0qB89x83VP9zg/AOVPbGVUg4h+dXB4B+A8B/AOeUAMA/97iT99YPgASIBhh4XNHk2jhwh4A4a+AHhghh4W21lIVP4KYKBz+Hc+7ZB4lIXiFIBn4Mh/h4h8IcXiBj9zg/8+cD80g4mjB+dtg8B/+VB/8h9TotiEeYUpFboklb+bXLtaAAeR7WBghw+gY8lIBP0qNcWqej0qSPdxg/hHNPV3+n+o+p154k+wA/+s8Tbdh+elev+w+x7cCSez4dpEokliXbNHWjlb9Xe4+56/ZN08FqgD4Ye8eoZNt7lR150qHcV3eqYkw8fE7GFbgx/GeletgOet/I+yiD/KbsJqliXNdj/NgAAweee5Ka2HfQh/tVyPfS+idx5AXOfVm6fXfDXcAOWaocfafbB/fcetgM4KcPeX8FvDJqmBXNeAAPqL+bKFBgh4fk210zdq4KGX/nk9dxl48+fmVph/hziAPw+H9HD9jh4cv9+H9Pwo/sd/uOIvx4v44P8ORcMP8Pow/v+QSGRSOSSWSveTSmVSRwSuTP6PsGFD9hv9wPwOD4PB9wv94PwPv4HsF4IeXUeVM9/vh/sdHvcPu8Pv9PDw/o+OP94wuQA4fv+DVhjv4eD9HB+DjiHQ2FxKKuOKP48Ri5xsPsSkXmT3q+S2+P+YP+ZPwfuN4OA+Bg/TtwvB4HiggdAvAHz2/3mlUynONHscPueq1esxU/x6Cs+w02yD9PB9nh4eRgf2w/3A4vwx3G5hy6n675e+SjgS6/XzA4MfuFwuAcDwcTtwOB4DiggNBvjK8Oj0p8xFH5zPMfQ1OpxWzedH6mlBwfztH7A/+ypwtwvit7mK3MeHiyD4PuK7SVuFAKTQAvLjpmeAAnAAAeAAncFHwACggAAbsMtAiTKUeKImPDxDkeB5PB8/zRn+D0UAenbUqwDgPB8B4PxeHidPmP7HHwfC4IqPg+B5HoeAwnkMwFIiSwMpEEMIfABnCAA+QeD58ACfgDKCCADqBDEjJFDZ/uWA5wAeYERB+ACPvKf5AA/Cc1oMQJ/qwBgAB8AD2zNM0bSmfgAR2fw/SfQB/QnLcuJFAdDJsy8lB+fjKkCfwBgcD5+AOf1Jn8DlKSHRKQqUeatHgD54B+cDWB/OETGAP5AB/VaDJkhpGCAfwQROH5AH+YDaUrSM/D/Kg/gGf4Ag/DlO0PZFFL+wKSUKkygWUkClUShZ/K/AgP1BaR/0RQ0kKPZqR2ekrCW5alDWtbDhn2jiJW5b0uXAl1xJFciSHxddkXRLl1QCflrnPblu2VeaV3qkN7pHVtz07fztH4PyDYHeMjYMlWEJBhSREAj9pX5I2HuHQY/AfimC0Xj1xqPXOG2qP9rwIAGS5PZGLpTjOBr1kGdZ6v+KyJm6X5Vn2d6Lo7g5RZmiaQl2eabqCR6BDOhJLnOopJp+sajqcCaqkmr63T2xbJgmbZTssNbTreuwDr6R7DsmtbXnu2u1t6RbjsW57pmtO7wkO9a3vm+2luzh8AkHBaxwnC2Rw7gcSwGmbpxvHUTyDL8lQYAc7z3P9B0PRdH0nS9N0/UdT1XV9Zz4D6U43W9l2fadr23b9N1+z6Xy5/8t3sjcyv/N8ptff+BDPhL7tHL+P5EA+UvXid753nuH6K8+n5vrb9RPtcd6vuZ/2C9cXqPw/EvXsKR7/C/R9KkfWo/2779/4Jd+TieZ8H707/JK36OVf65h8iB3itpftAMkz/yVQBeNApLkDCUwOgRBB4MBUkwHbLAmCyyXdnGg03KDryYMLhhC3uEaBIJIFf2+6FL0ISr0hO4OF52oVpHha/WGr14YsHhm4yHZwIbkshzAKIL44PvlY8u8lI8CSk9GAhMkw+A/lAL9FQpEHIUxDJG9NQcIYnLOV1FIksVIrEgiwduI8SG/toQkvokC64wsriiVNscaIqn/jxFmNbSYkwGJ9GQkK5iQRzXtGOOxIGTR4jOUuH7vo+vqh6xgj48AISJJBIQn0TyQO6JDIuR0jWjSRfjJNnBHzESgkzHKTg/5PSKJDGaPRfItQji4SJ6YwShAOTi74eABymD8Iu74gRSzLOvJqPcfgAWYg4jyS0b4+EfD/HyY0lLWimEkHOVgkAwJMEpO6SmOE32fS3JDLksADAeTMABL+bw4AIEKAAPwMCrgQydH+ABNQfAAy/HwAiZ4/gAzSBgq4MMhiRzYcpNskIwAAlHTtOKfJUx/TkbrKZoY/yaj/AQDieYAA4AHEAmOfSfA+BAA+ICh8rk+KAnWOAAY8EzxWQlNIEFKQQ0IS6SYeFC5uRjJUqBQcmJwxxomYCizOpzEgenRsAAOAQB4AgDAA4gwDjAVyPgCAPACAPEDStLICAfAIA4gsQKDJnjwqiHyrtX6dR3JIqSbVPxjxpJKRIfwBpVLHqMthaLTalrLhBRqTocDHh4LKA8Q4gRgFLsQB4B4A1UzAB5ZUH4DxhjBOjM8eI8JpWQMlW9aZJh4xwH+wIkIx5NEkIkQsoxIa+FgtkUupL3VvtoEHJ2zyjbEjDEHFcHgHwHgHJlK4fAPlND/swOAxszxwB/mlcK4hSSTDhtNagkBY7akSKZRskFsQfsxto1CwL07cyuHAvkfBZQP2+jCPCxIB5jj4MXQ+xQ4BgyyOlMAPll75XUruVMht4QfzbK4RADgdyFxOIWahjpTBzsBE+MAGI3zu4Eu/bVeFGGrExk6gxXCNEKgHjCg0H4DjsyupkB5M4DxAAAGHfpCN/cUMba0ONSiFsSAPm2OAqeCAg4+yEB8ZlIg/rHl+OYAeFBDjAB/hhL+GnDYcbBh4P6YUGoNTqAPEk8iy41t0hNCeLsYRUorekAE0sT4pbUSUYYjx8U5CGOAQ82xgsmyBncYZQwHjICGx1Y44BBjkBDhQA4DAP3hDiSDHF5MqNww8B/LAOAOYiy5E4fAGMv4pxIm0B+ZL8swP+nwPF/bh42JMU4eA8Rwjj1dNsQ5RiIFlGOiAQ6Hhg6uK1ow5eqx4MSAfeFM5TcpLKvK2iXWn0ODwR8D8qYh4nGP1NpyLBlLFGCzNHoYAONTX/mvUYkQxxnjjHwMMeIxx4zbvaU0fhZRHjDE+ZsYY8yJLv3QfWzrEj0j+AYV8Q+xXH6PbzKgoQDx4D72ZYkX4hyW7SsuUOTtxx/slswO+5pQhwj9Hxty/1xW1KVJGZ0cY+Rhj5GOPndREookDO+VgR489zb1uzNU+0YT0j8RKI/gD/uBOBlQdTgw+A4btB+I8QYg7G3pvjZPnAGOHjD16zAyV6eOAPEGZPb4/FhkiEeOciREOTzbMKrpYIDxx255fujmRTR8b4jDsGWAH+dwEj/BmQNwh4BwDwHAHgBxD0in13mxIAKwE4RcB8AIwbNbD8J3nUtl6VWikgkwkaFewJqACOcgvXyHIVsb5YeJbiQYv3JZ0n3cE4rX7mobY7vI3gPpqg3JdV59ISsTSqTqg06+IphTIj6FUJePTHP3b/lCReWkhSrzXYyIELQqZRCoz/Q67TUn0+yHAAHpJAI/1Wju6wmnkH8B6fE+AeAGmKeBCgCWJGBPeV0+VWKoHghYBhH6Rp8zUA8cFB9v/z8qAGIgKUKw+W84IWK+UqKm+k9EHiKw9KIqAe+0TiT462aQ9asGoEcUWG2eVIp6JAkSQMTgsa3I/GjsVMH/AoY0uo+McUOS84KM80A/AKD+jsAGZNAU12HiKNAcMI+0KvAm+8jad4aiD+1Q60byB+9CH45OGOH25SKaLWHeG+zeHvBuicHHCZB2KuUa6LA9CAe8iKaK28bUH8leWuHg3IHG3O3S508uIWO+7MO/BuipCvDO+vDcwIkW9Wgi56cUkeWQ6uwAkwIW7amq5gHmm2AfDaD+zqO+zq+kVAKAEO5iHwHyPM2Coq7kKysBD4cmawGBBQayXKugDgDjDOHCGGm2EGzwIcGG1vEkEO+kp6UG/01cRyBiUy7g0QPJC8tvCEagHAlfFCJIIWH4Dwx0l+m2m9CeIW/Q/0AfFi/sVGAO3IHwBiWIGOK+VbF3E2++hkawrk2+ZxBIXykKaYp6XzA6IcJ8ugjsZiIgm6bFAsiUeolIlLG6h9HpHqfxE4fMaglqg7HkkAe3H0SLHukpHzIIJTIC7tIHISgXH5D8abH+gtIW/BIbIcJJIrG9IvIwg9CCsHI5I6JBI1HxJDJFJJIPJNI7JQlPIRJEJDJYozJVIxJiw7JdJebNI/HnJnIdJqyrJvJfJ80hKBJPIhKJJXKNJ5ITKE4HKPJpKSf5JxJhKghdKlJHKoh1KtJzC/F9KrKtKY59KdJ7KwiNK/LIgfK1LBD7LFKXLOgrLTLcg3K1K3F7JBKjLNINJbKVIJLVE7L3H1L7H7IkYGqKd7MDIiaQmwJKoYm6pWJWY6JUAAleK4jKn0mo9ZLihEjKp8obMsJMqEjIH4JGF+nzBRMoXxMstihJLzJlLuJHHSJFMYKbNgtYIUr0jQJGK+A+VSkygOQku/MxNZJtJVHBNi3CtWJGtaYmKXNyI48kJGr4Y2MvMPKOtKm03CoqJSu4IjNwJFN0rsJUr448SJOpJUHGK+wGJgYFNFCeA+mUIVPgLAIkicWOKYQHN0IWJQOEO5MuH6+oH+EjODJ1IFNcJExwUqOwOwx6K+yBA4VGyIyMQ4HiHmnoBwGAIWA4deUoVYV0YiAEB4DAD+VWBgyOJDQDD3OFJ/PMSkpixIAPGSx+IwyCP/GcGQAG0AJ9QoDABwEAIHQyI4WuTOSeSgB4CAVwB+BBCIG+JBGtRRQHIZQKtSEeHgDiHAHGHDEZFVCeA4ASz2EfEkTGGG9EHxQoDw0yKs0TSAB8AwRIR+BgB4UmSsAEIideH80XSdK5LtKqjgM23K3TEOEe3i3YLKGeKc3EHPTE5lTJGKvpTRN0H8B8D+A8B+P26I7kRCJsofTtQFTzJ2f45AS63G5I5M5Q50JqiiKsE+HGIa5e5jO5UXTMD9UdSAKsI8H4DyNXUmM8MEsaIpU5LrU8hdBW+2GO847C505USeIY7OTiHm7VVfTKioA5BnUeNiLPVvVyIMKm3ImpNUhVMyhRWGTi69CeodCcoc+dRuTVAA+mu5WiPjWpVoAAAcAHWwKsxYAOjsIWHnW8hhRTKGe2/8+PABCe+U83GWD+88TYAHBuQ5VgKZWnTSKCo8AeSeDiLIPjMlX0D/X5V+XlDAgrYEJC+RAEtPYO+aD+K+MpAS+nYdWiIxXjYmm6B4DhYxWnE1GYoigvX/Kae3XEMLBdZNBjYQjsAPBtZbRzUZZhYkH8mcIUP3ZtN1N6xBTxWBQIhdCMcDCQLgHO5MtPWRYQIao0KxCqK0K3TNaXPwIGMAN5YxUmKyrrOBarY/K6h1DJCO9CR03PVKM/YQO/Bi5fDlCIHiHxTMD/VmUoIGEYIwD5bdW0IimzX6htXAhpE1D6PtEK3NEREUEOM4HHc7BuMIGGs9TMB8RpYkHwMIA8PiD4sRUkA+3+IijDckh5Z5LDJNEHSpFNFQ6NFWIXFaEOEHFa+kueKEmA70BwMUU0de8IUuRQR+QbBnaMPKKJP/PJcoiBGGuhGMHwxIAHRhYRGAGATCAPGgny9gAIDwAwEAoE0lNKnyTMB4pdBmTWKmSZPpY8YtZBLlGGIUZKKBQUEeufYQVEVMVE+kK4ycH4AYD8AwIkAGLQqOVyECIWEFTQVWPm4NetNXSfItSiW4myXotMyndtLXL+S5hDPCvEovhLL9g+WlhSJTdTMRX9g7I3heWVhiimDxhpcnhbMFMSaPh0XLM9hZhtJLhwWRiGbBFAttbpT1KzLxiPJTiTLbh/h6Z9ImghPLirL5ewfPLni5K9KljFijjJi/H9jDjRMHLhivLZi9jdhOlJMCdxjrjtjvjwdtGDiefLjzj9j/kBj/j3f1brLLjPjji7MBjXiDjbinL1kTHrjLkNJxklLRilU7avjNkpkWaPi0gVkrLfkvatShjHk3kRlLKDk4aLk8gHlBf5lFj5kzknlTlPk1lpkdNblRKLlrlnl3lxOHkhjnlVizjVl5ktkPl/RVmCkjldM1lhkIsGiikfGUZG53lYf7OpmkKQiiO1OyjVkbkw7tm0JDbEJLnGJVMKW2qQj5nBlG/BnPm9nNZ0JUS2u9niJXmufvOpPHnWJMvy2LnqcDmtmLmTKHn5nuJGGCUcJdoCcVoHnbli7suKIgHOSYEOHCHiHeidP8MEKEKYEOE+KULHdWH8TUBhPeTOKwLOlVHCbhMtPYJMp+0YasXWYyTte6JsoiGILmqVKMJkoEUHGNouHi6QVKKmJkTGA+EMsiUGH8SEMoEABgGCTISemYAEuFm/pcZNpgJLplO4bBpqcpMkQkD/apfTiNnC/AMGAAUGBwAHqGAG08MElcpGEIAMSoZmAgTGpEBgsYAOAADwo/qwacatMlPjpjOTppdu+OoGQmxAH5rPp7jciiH4AgD4AQAxreMaAGAEAeEEdeJkqq8AzSTpMkGDRuBgAHs/sAqikvpYzabg7Hq4JJq8iZCPsWJCVGKY7MkzdbrRndG8ijcKR8J3ouHgDs0/AhrnaNeCEAOkAgB8AO79uMBm1jdGsPsHnxppdoJDtrsUJUSlo+XfEHt/ojnezSv6H5uKMaZK7+JBtCswEOKI2ZuKtyMa39dG42D5oftjiW65sTrAJWUoLnTFN7v9fyaCbQii2YB+HxvWHgoqEPMhvhFbvmH9uKycualc6gHwBgN9nZv6HGF5oyVAHmH8D2W0H+EcI/PQ3IHPpCd8HwHcsuIgLkW2nDwGIiXeHgAYLnq855jcJkpkVxweT4pFqOH+AMQrvkMOUvdgTIual+GGQYBhoRpbCOHwHEAZqIVWEAD8DC5eH+EQ9gSyAHDSHMAOmYoGEEP8TOLksaV1PwLm+mJ9x6mXidmhHnyFtLyLMlfHrnyVreKIHByc4YSap7fHymAByrv5yxy0HiAiERTWB8CC66TU4YEGHgCC1cHICEzSBgHxgsB+AQvCDwEQI+AIPJGKK1x3x6HwVsw3yCkCB4AZz6TCWGJkAdyX0GD8Um2gACuaEHElypw/qzCOmqH4MesqMWA/0teE6gMbCuMaDhdSDwD8BwD8P8LkLKI4ZNvD1Zt7TNPByBoLKaJkKYvXwfOzCM2S2fvn17dgoEuasU6gJ92LsItnBYOV2SYjXuLQYFc67UR0EeMaRyLl2uJgKD2sK/C5t0K0QYIVdaK3iwLzn2KWD73TdgMaKEF+G+ty2SAeG/yZvpdgHeGD3nEk4Ouij5VAcUQqR2YiK/V0KUEOHmGeHwHONxBiJ+MAP4BiK4LlZS+2nyoH4epL4liAjZrTG93OD424J2GOmsAfqWJqKJ2bwqMcOfdgvlvY6tvyDw6AjVaz5c7MIp5jwuwLAFAYHwtUDEML55GKH8BHOz6CWwZNrH3B4iDxxNhHbnzykB3OOaOeAG8Uiduk78kQ1j5GTpdheEHBRIAOyWDg70zGjVWGZj5gWuHBUmoYEf7WtUDBBiK2TUP4oeWGLktkMoKyKZzoIEDxaphJ3LLDrWSg/MocicpGEAdeiiVc78KIQkN8ya/PRIEAyXsCT5uzBVBR8x7NUgADUnVWH+E/8+nox4K2HwAEP4n0VX9QK++z9WS/vHdb9g2Np8MAZmAAsiHABiicneZNm4TFyYT5wil+HhRIGAzmSgH9qvteJKKUIA8AG/4I/38P3+4348X4/h8Hw8P3mn3+j3ij3wx34YUO54Y+B8eH8GD+8B+/jw/4QD0e/w+/3xCXHBH4fDxM4LOZ1O55PYK959QaFBXBQ54/j/OQDFX+x3hRJLCILJpzA4I+A+/KS8Zg/oGD6NBGfMKtBIPCYXDR4f4i85fFkO+GG/DGx48/HhKH9a34H5RKoPLZfMXHOJqeHDYcVPaBi8XRcdBqTkcplbG/LLBoRCoZe3/EXjCEe82O+IUY2Hd3jeh4Pz/fpTJz/gpg/2Hhptq8ri8bu6DkMdSN9w6NY38B5zZ85DRxnx++YQh3Gx3i49M4bvCpEPg4P9hKq1tJix9weIZUuJPt76Z1wMXwvZ8ZzY5dyc3ab2foi+IQg2GYZwnCeBxni1R8JEH6Iu+k4frAwbjvKeCasm+ScvXCr3MU+EKvi+idOU/AeBwiKtH+SADnAAZ4ACcZ4LufB4QQB4AJO2J/g5ByYAAB7ynwAESw4n8goJDKww3Ibhw8+y0M6HwBkcH8SkwB54AOfABusM6GH4AwcIOQ5AD+v6EEAY76x8H7yn4AMgSHC75SKo0jyQ3clKGlp/xKfgfqwfiwJ0l6CsSnRhzk5E6H/N74zioc50QyM7LCmNH0pC0kUYoVHUqsNIqNSdN0pRT2UwoNNVAoVOqHT9TzpUT01In1TVYntUqEQNZ0RVziVgntZVwnVaqCQdfyRXTh14o8KWIn1gp9M1lw5YzfWQndfWhZtoU3aTd2pD9lWzYFwWhbbK265Nv3EsV02JcjKXMgtrWXbF12LS7KXjYl53pIN2sjd6zXRdN9X3Ct+sdf7JYJdWFVBgzH3vgNxYHhj04cxWEXxX+J4o4eLLDjGI3BjeON3jyjZBhmR5IymTKHlGFZVlbHZaoWX4JmOZMVmjf4hlOc1bezI4zXGcZ+oedp9m196Loyg6QnulXppmmsZoLg5DbOp6onenp5qN161rdLSHr+BbE+Wup3suJbO+O0vbnuYba9m3pzteRbniurPfrFr7y4m6qJuOb7/ju9w1vt5cK33AyJwel8XkvDyNxN88iyvGn/u+sp6fl6HPoGycfqWoT+nr0ILPCCq4nlAs0nZgAGfkaHgACXkGtc3cnOXK40npAUP06d9UgnWJ31yzp0YEfx32vb9zfnd0b3uiJ6AbMp31CCeIf/jUBeHtH+Z5wD4PgD+MH/oWj6VM+pWdUg/QfhJ17nvJz11aH4tZh/RNuCvsVK+5Vj8H7E6fC/Unr+CeDPL2H8cLrCDv+bRABWMAlTqdIOVwihYyKOgGeRIf4+3UlJIQJ91gz3QOgHuS8eZBR8kEhe+KBo8YIJRD+6B9bomhQWVA/BPIByvGYHircAYPxAB+BAB8rgjAPj4AePx84+A5gDVuj8YYHxHjATEDwMBSRwEIgYWuGhVwDlrKXDlILm2/E8LAldNY+AAjxAAP4AIPwAA+BARYf4iEqJWAGPEeA8gAxzAAHwQYHwPiAJODwICZCXxhD+Ph1g+Iyh+AA9GHTV2fE8IGPAMI+AcSeHgDgPgMAfA8A8AQ0Y/xAiHHAMOTw8UBDgAwTYHgCwPA+IeXsBxLwPFgGe/qSMNQeB8JJGhDkanFEqJ0IB7skh+BxRgPwPA/AfIJiwTM6RHh5ySlgPB8pewfgcD8d6cRL4sPiHwHySLtUxPpD5OWZCGHRtgh+e0A485oj8HCPwOE1CHA/E8D88g/xDjhKcdYOIhx4j4JrA0DBwpzkqLG8udiMAATvB4SmBTepMt8Z8lcnSVyFBxLQHBMY/6Bl2oMPE0tJQH0NK1A0CI/iwUTfEP95ZWqMUao5PNOE9WzUiOSAEcAAKTDjH8HGlJERzpmoZS+f0SqHB/mEAMrxBi1qBJa7GnkxA8O2qAouoTbCBNqqOGM21S6lkqEARE6ZFSLkZn8A+bpWgARmH/RmBpLxgTATzMMsz6Q4Vif/R9xDPqzk5JKmYmYx62UmB8ACgY4yxkWIwRoOE+aHB8AEWuS4wJFmvH+IAJ9OStRjM0Dyk0mI01lbxUQmifFClcHPWwlJraBjzJbQwuJcw4jHruHgPxaySF9r6P4D5YzLh/tUQcHgcHdWIcoz4zB9jTPdH8GOaJfy10DHiRSqJppojjuHA0DhSQH3JoJai51YLp2vh2z4492J+hxI2QsvRzTQXipcPkzl5qHVMLWB4tYj7kh/EPe5Alg6Nn1sPfKTTcsIFmiUOBA5eJJEouKfpPhLzpP8HDJIeNw5hAeIjgirZBzkXNwbau4lHXAQUV7DxTaqQAABHgHCUA+AwmrmMDyVCfD+iDlfLEfFww4VbIhevFYHsWp5AA+fBwfAOTXwjMm2DnCeDASsmzMBq5LWTRIQgRAByBRwobXcPleTXiPEAX6rde8ouziVg6S08oJ3Ud5JsnY8CsnIiA+cpAgQfgBSgnsf8TE/HHIYHOhwfgB3pA+McYFWrSJltRJeYgfwAvJbdjRZOfjdwSaa5mZTljh6maNqjLcazfKra3q6+eFDfCwbnrTCbhDhjB1zqJauNlKtT1823XVINbOXN5sBb2pNlFh2PYnZOzyjbRurtPahQtrZ92xtk9WzFz7O29t/Pj09xbjJ5tvc23d0E63U+3c+7Wx4S2RrzeTVdyqZABvvfm/d/b/4BwHgXA+CcF4NwfhHCeFb+eDlpoXC+IcR4lxPinFeCcNnpvfjW+eN8dXLx7kCr+Q8jW5yTkzB+T8pZPyrljSeW8vbhzDmTCOZbt5pzXcfN+cbZ51zvZ/PefOX6B0FxfQ+iN/6N0dufSelNn6Z01rfT+oNN6l1Nn/VerMy6x1lknW+uMU6919hXYexL77J2VdfZ+0Li7V2tbPbe3LL7h3FX/c+6Kz7t3dU/ee9Kb7531Snf/AKI8F4PcHhmV+F8Rw7xbefFeNrJ5DpfkvJ+U6d5by/mOo+a835zqnnvP+g6v6L0fpOtem9P6jrvqvV+s7B671/sOx+y9n7Ts3tvb+47T7r3fvO2e+9/8Dt/wvh/E7l8b4/yO6/K+X8zvHzvn/Q739L6f1O/fW+v9jwP2vt/c8J977/4PD/iYf+Ten5vI/oqD+r9f7FR/u/T/BY/8uRf0/n/Zaf+P8/64//xdz/z/8ADlEAT8sAhj8Axi8BEA8BRl0BkBsBxnkCDl0CRqECkCsCxtUDEDMDRu0DkDsDxx0EBzUEUEcETx74TmgQAAA5rVhJZWJ8IggnB+5cImh9xZ5Z58AngY51R8IeJQIt5eDfcGQgp8IfAI4igoSFonZ1iAr9ox0FUFhvrUAo4qRb5YZ74+YqZyod4gkG6kcGEHcIgnIX4f4YMH4pgs0IR7JtUI7ckJQnUJggzjgoMKCwJTMGEHJ/wfwBZ48Ggf4cJypPELwnIeEMgncMIlQ9As4AsM51Qf0NSAwgg9AYAY8JEHQgkN4nMOLWT+IxyJpI0PAnUPUUIgpD0IYoJ7kJYnsHB1As510IAgorUU6ZkWgpof8S0Q4owrkTj94ysT5OUUkWJZUKZWggsWYnsVMOEVcMUHMV8NEGsWYqQ9BM0XAnUHAoMXcOYoMX5RsYMGsFwoJD0a8VEJMZcSRJcZx4kWUNcWqDhZ0XQ2r84xaJoc4fgPwl4s6Ewc6fQOYg4nCFo1YHghIwLQBPCbo5CGIrgDwPIpATwP6YIpKS4D4cYXAeaGKF6F4HgrwpAY8h0jqqykxMIZ41YPELgfMHYhSMoPIf4fKSgzAD4p4D63kGUWQihPCD6fQA6F6OkSpSZzwrQT6IwlQ0gT7RQf6mIf4DAeJ8bGRXY3aJoQImogaNyP4QIYAPgICcqS6nSIwPwGCIyXQAAeAD0rYQAPA5C0SnQiAMAPAD4TC5YcAl4ACSgcYBAYCL8riLQGADxFQB4Y5Kcv5KgCK0rSUkhQoYAI7OB8yLgP4YAHAA8l60SRLOAyYrQcUyCM6OoQB8wYAvwAMSstMuMuJKYQBK4eYYzNA5BK6hoBAeKo0pr+4yiJoBCagMImAMKWICARAHgIiXQBApIQQD4AgHwEQD4QyXIDAOADwkgfwBEx4f4Awl4YYB4DwIQGIB4SAR4Z4YIsEsQIYcQBARAIgl84IAgH4EABwAYYIQ4Y4SE9hE4cBW8swEMkhYYQQA4R4QgHiMoIgH4QQDEyAAYD4ASRAAgB4QgqQrQcAMYfAEAgwCAHwBE/YAy9cz4T84MMoB87k9wQAcAYIeYYgIYeBQoeE+geAIAeACEuMeQxSJqYoOIp6QChpGIDiVCXLLE6jFAB4B4D4ByYqYoPhKIHwiIlwloR4Q6X8/AQYY4Z4Y7BYY5ALDAHiXso7FAD8/AR48gY4/1LYY4YbXweYeAeEkg0U/CRB9IDwD86ghCRFHbJqREGqB4fAfCqwPiXQD47wltJoT4R4sAY9JwQ9LhAgeYYIcIcYxNMSWQcBGMXj+s2QfC4qaQf6bohicCU5BItZBgRgP6XoR4H4R0jSYqCItgk65cW4Y9NNLI6QZ4R5QtLweDDAh4vwD5KFHdVgZ46Y6VJwc6g9SahqpgPFNgQ9T0oI1tNJKAl1VIiIB4H8JArTEYhiSKvoh65kSoD7BYR4cYD4YdXQYYfIeZ/gjwhKb1MSakbQnyJova1p1gvEew1tUAP4T4g4TgP4RyLDOSqwlJPIf9ZCnKXwH9LIiwZ4T6x40rEce9WYZ1gEvwltXNhyhlXy7oPApLStZB9KgQD9hVZKggD1eo9EWTDaSKZlPEW1PbSsW4c5NNXQmFcDAEGIhYYaaFfTxgyNdKjYpIeApKaa4tesgVeSG1ewR6IAHgrNII2tfq3o51gMpciYpojVg6RFUYY7SoloYdJw6dbKPQeapVYCEgc9PolS3RBljdVA2Z01kAeYg4mNUo6J8Ujq9oillSuIeFloyEeq4NmVc6BIfAvctomEuQhlngRwHBHanlflgAQIh0yAP01Vw4f4eAsDS4iIT8SspYAAgcHaflSCcyG1qdzIAclElFrVrglCd9r5W59IP6gcVwDjKa9IR6M6wI6yo4H4rgH8uJPjFsjrKYl0f1bJMwAA0gfIqh5dvAhlmbjNR4/IpaOgeAAlwMgVwdwtkRKAR4cAiMzYP5MImNZAAC09yFydyoZ9y4poR4fg6wg4vyqwpqgiLIEd0NgiE1Sa29YAzVr6L91Mh11l1w2ctN2QfF2grgHwANPAQAI9twT13gvxNN34eABF4S694oZ9vNFgsNm17QggQCcF6Feo5qwN7lgAfAB124taL+EAf4A4y4A98KEyD8Lt819CcyqwP4Y99oYNrA6dgl0a3F+wjFPC79sYD69NpTQEaC7I2tWQH4A5M1Jshwgi9cid37DF4Qf4pZMyl2ClmkT1vmEqLyfuDlwYpNUd6of2EYfAtY/lOhKC5Q+mFdZsHYcIZ5QNP401vlzgrQQ99tXV3+HQrlrdiQggD9r6m1/OINIjFKm2IxQYmKrYh+Jlt+J65Qcd39mOKjXwmeLF49vR1uLiSIP4fa8mMAtYX4g9Td6oX4RwHw/OMZBjBYb9VGN4R+OIT8LmOmGOBYZgH+PMHd3+PgcdiGP8hlfVqko+QtjQiAH4b7FMo+Iw3WRglya7BdPeZIitbWSY6Yp14Qe4rgcYfoeNmOTWCoo1my/CBwfgc6aC4tTA1+UtUlewZwHAP6jacorQiIpDBcv18NbNVix9b2W4l16t9sid+A6YxIeYhgPNcwlwY+gItYT9WlNlNNJFT9gGZqngfw5oD+aNt2h5M1bNqmbFugY4fNb1cYOJFucOLUeeLmhK5189MddzFA1tUrK7FNjItaYuelIZBg5FLN8NQIZ4joptQuf4B4TyX99tVuglL9SdMSatfQB9M1T2hwB8oIl1NQR9HVe9/5CQmAPgHke472aUSoligwY4B9YYcYc4c+kTACF6htRWlN5NmuLjNpHQeIcF6CO6yYhAAEsia7OAByyaVCviO4hDKCnVNdyh4AZ7LwppFOf7OJ2yglJd0GXwcApabsuYAFfR4DOFK+hzOI5CRB2wD5HbOIl9kBHzKQHwHFK4B13QT4QFyIA+2dbIcAcYYF4RFdx6OJ2t482D/d5WsApYzAfABi/C4srKOwpKRWwyLKa5H5GYggACeTTUuN8ISAB58dPwgWf60SRSggYIcAGOggeAGNSZNYfgAVfUxCLOJWhy0W0aRI7wQG9+1IP46xNYgyOoANoQQCD00Al4cAB8zu24eADl4SSgsihspQmu4Lko9JQ69dj61sK7S7S4ggp420o5QqVkYwq5ZQTghFnImh0xwXDKZ4nqCDQQqfDUWxx0HAyDX3DZD5P5YdJ8U4yFnPEomBNq5QgxP5T/CD/pJEFo9Mao9MJpptRsTpDnI44nJI4aVZtukb8PIzYQofKQ3ygptrF78Yx3KA4fLY3dbhuYcPD3MAxfMQ33MhcvE5raxfNTZQqhtqJ3K8D0E74HPT33Pj3nPz3XQD3HQT2zwUZPQZIfNx1L4zrHIkZ/PZIfR3Q/QvSIo3Sb2jmgfxhMMgigb6ForIswf4b6nIsYd63iEUFAyorDQE1CIQA8q5FAhAZhJ0jYAYfIQ4QN60bz1TmnAnAgYgAbNQA6vPV4l4ZAHhNhH4AIfPYez/VIylP89YYAYOvAeAE1ogD4QYsAYIPAMB/QPAGAfAC4BlHnOHRHaCp4cZ/ibovI7yLBM1bygAHKaictHnS71zmlPWb7JLDdZqRCx41ZLy/BL1gHXfXgypQoi1mImB1mh9pq4WjC7XglsHZ4yKLQQAAPhdMeQO0wgYZ52o5o1YHIg4Z92PPvN/jA0p2YrjOMsnjyo+9CpXech6RXioxySgl40uBwrgAAT9Ih8WTEgfmcpnmwxwQ9PwfFaBPfnxQIZ9REP4f3kYH6Bnc3SnhAlql6aA2dOB8UXYfwGPqIwPR/QIytgihlbw0ohko9NQmckejHr4f0hmihsLxrmlXAQYcPamlEe1HgA5Qvj6agPif8hlNVU3SAyg/4YIYYYBAAYAhgHmqIQeaSUAPgHHcCF/clN3w5fxFAA/YG38R4B4Qav58R2cR5NaF+2e1HzYxwkzQGs+46k2/wccvCBggbQSF4cCLLCvsg4ccb9DoH37834L+X4j+H4z935D9nQnTEEn5j2X572H6PfH536sE360EH6b1n7Xg/6/737P7HPP8MDn7j1H8r03870n9L0TmkzsLB7yHETEPsUQM57gvclYnn+InikUtLHIgwgAjY7/gkFg0EfMEeMHgj+H7/fx/hkTikVf7AAAOiUTR8HT8WkEhg7gkUTYABg4fgjBgzne8Efbzhkqf7fgrwM8dgz+Hh5ijniz4AA/YADf4AlD4gUVfcIhUTh0QjciqchjEaik6gsfktdikkr0FjEpgiFlsDf7jmVkf80f7BnMHnk+idAitCB9jpD/cFLijjpz/hcMqMRf8PkD8qsgq+Lgtaf6frlhyl8yr/Y74qs0yDntFqmcEt0DyEEfl0hl2ij4f6HZ+ammAtETwFdw2Ii2KrzPw0cg2Ty9esGUY783Ftgmdz9rg000b/0r/08/oOYZ/G2OYiu1ku3kO6ru8x3J3/BynDsLHqMF51ngmgtnP6PTuvVce8D/Z2cM7ki7zEvGirxKy8rzOEy71BwyJ/nOD54n4AaImeeJzpQf5ADmZ7tQYD58wgfiCAAc5HtYhxHnMnrBHefJ3tYfMRsAH71o+1j7ramhADG4qJIjCQfkAP5Pny+8KI+RiJHGhxjh8PJ+AOf5wn+1hPD+iJjj+Z54AehZ/B66TpDmtr1n+3ifNYwUpD/EjpH8AZOSwwcDIs9CvPUHkgn+AIPkAHgBy1CcKoIQA5QjC09GBPpwJVERHmAlQPxPFJ4kCfJBmAH5wBOR5xnwA8tOko0aw0/NBR1RUpU8B5mAOQA+ADIYBnicwBn4AiJAAPwfACHwgD5J9L0cTAPngA5jA8AEtSAfwYUxYYJ07TqCt4IA/0ui9m0aD5wAcAZPB+Z8gTlOcEH5O8ZAQD4CA4AZhgPQELEAOAYn4CCIXRdQBkGB6jxGQN9g+Q8UJ8eAIHyAZBA+YQD02eAhnAQ5/ngMMpLTDRHxxHRg33hpggOZBh3iMB8nEMJ4nIMJ8BBKoCB8HgIB8AgeSeQwPkEB5AEeYJBmIDAYDgB4CRkAQP30cM/CGeBh2kfwDB/hB/mEB9/EfmxggNbofk/oNxIrOiumOeA+D/GQePyB9IGHQDPngfB+DxKuy0hSCBmPEZD4gD4DmdFJ4Dxt4HgeR+FnGcJxnHpR4tY1hz4s2JxmPyEoceYZh8LxJ8nCeB48yfB4SqDwPB4PAfPyh4P8CQ5B8gcZg7bsQPA+f3AEfyBhniePCaW/JHo73m799yBP28T/Ya4r7Lowhwfn4PwPB/i9N7U954bc43medi4HqBuuLtJvW+bcP2AYBTfbGPKJ58U1uLABJ/DGPTbMHiR5w9sccH8wfHN/0zTFA8DgfjpBHume6IccYhx5jBHgP4PgfgfgPH8pB7Sm1OP3d090yIzwHvwA+/AcZkgPreX28YhjXiSvJB8H9EDzhnkqSIZ496Dw8EOMUSpi6DDMIjA+UAR4J29k+HiP4HCSoOvlHmMcwbigAAPfY+5x4jyZDHHmJ8eL9jpOYH4/tB8Kg/ieiGD557pi2ojIXFUiAPA/JqSUgyKA4x+H9N48+HZyAPjjYuOcT8eBPgfjAdGEkJiRDHHAH+FMKwfjHJU2lChn0Hhxhonhs52nuA/MAI4E4nlJRCIcg1GBAxDmDHglIobjT3xPJkhqKkVh8RYi0YoxQjkFR9IexCSgjyFv1OkDxsQj41vwHmOcfkoWlxhKA3iO0lCPmSE+nePzxpAEhOKD9O5rAOPtA+hB6RaR5j8DgUM1gn0tAfWi3URwACjDODHJiIEQjFDmB/J1C6FgAMUTIdBxwx5zHQLaPGJBgBAJDizBUxRQgHAxH/LIf4gR/TmioP4AJA1EB8iWkpToHx5kYHgbg8U5Vf0LAHOUAZKjJSYiXCSEqCIIgcYpNUA4HxgAjmyWqbk3jIgAYuMAE8OaQD/GMHOdRgp2B/ncjBDQgCUDwARPRDUkS0uPnyR1B0/R8AEoA7gfEbg/0EoNCmAZfFjgDioOACJxQBS6onIcQAJ6LEYQg0sP9HGI1fpBSIT4jqJQjpMQWZ5ICBgfE4dIP4HCJNEhecsfiYWKCfYW0R7Y5xHGIGIHNls64AVCne9FBbEQ8VKRvKWRB5Kou4bDVWgVWSIVbTUQ8fADiHy2GDPwfDLQ+ENg6INEcRyCIWTIRGx5prWD/t6vsyQzrZ15JGggtomLAAcdMA9x8iz3zbDiaaLrcntQ5huP+yNk2IyakPDtTZpCCDhfVPVUlTRjwjIGlufo4R+Wkf1K5KsX7UkQANDYeMn2wE8bERCDsEx5tKIvW4RxbqW3AJVcIT4zDwXGIJXsi1fRE2AecN8/Ljxwwwm0acf2FI5AfaU9wD47y+Dxu5AuId35Oj5GeS9TlnLzvvEeS+JEtp+xuDzG6q0WzFC+vpV01uI5bP0iQuWNIv4IyIl8OMfrEa3MXxIN+RA78oKbMkMQiIv8HYPpQB8ZOHhD4XfPhqmUM3l2BTUB8H+IYdNKHAPAPz4IZgfhbB6JESDMXlPxPd+ONbnuGHxjmgL/IuAOx+6bJSm8iIPjQ8+CNjxjjP0Bi8gh4oMQbAeMNi+moPifHAibLZljKzRJYuV2D0HKYZGOYAec2w8Ni1NA+PYg7sO0L4PjOLA2/OycA3dw2ASZDjlCmdLIP57iHKAMMeYhxhxI1boHHSnHOpVB+6F0eaTI3pGfsx85mYAvidi4BgAx3Kj5fSbXS0TDoDHEO/CDe7TJDhgiW642ECKkYD+YChYHwAH5EOAMcAARzjBMAPAOY/KJJV36BwB4gxHlGe4IBjeuG+AANZv0QAByijg4AUYeIwJ6b4nuMBJ/HOSRIHgHEcGgqrD8ABwoHwPgAVcV+zdm4BxhiBMyHAHwD9+77cFxweAAR8AB3QRER/EiL826SXkA4g4qFH3pXnexFCxm1T0IDYwwA4jwAiOcYHBeDh+nmUcT4gBjqWDHdhRxgh8FzMFxZC9LgTjgAmsRYiUgGWmIuSie44GNgH8BygPADuWKcH8rYo8DgBVcKMopRwkAHjgBycUA/PetD+BBS4MY8AZj4VoAPo6au2+QUgo7wHUCF9a1D1UixtdacgTQcZiJMjwJPLeP9pUFiDOeInGYvgf3PD4eXOIh/tCQ++HwsMxDzDjkFN0SwiBiCBuekGJwH6ZzkM5IgDgP5g5t7hImWAzXvpBueO59LLfriKmqIscxcScdQoAJ388rZFkxlp/nH9A37iKiXnjP5P9jaEQCCh8jeiGDgDCDcBPj+wBjzP2CKD9v2oSQBQHiDEHiDB+QECPP8KNQHQLjKwIiGB+K8CLENQAwQiJnNQNLNiKQFC5DcN2QVQIDzB+OpwBwLQVOiwNOyiJwYP6q9QTQaCwwRwYwiQkPoPcCGijE5KMwkjzwoQpQpwqQawqwrwsQsrjwtQuQuwoQjQvQwwxDLwwQxwzQzlxw0Q1Q1wRK8stE5P/Q2Q5QiqTHZLAECiCwQPpiCBAgPDFh8LcCJwcNKiuolh4J5t8CCiBwUQHi1w9QpQyiKwSoGCqwFQ9D1w+w/xAjQiGBnxGCQpzRDlquXxFRPCwwACwi7RHQtRIjVjpL+v7iDRLjEAPw/CDxAQJj2CORPiQAfhwh5qMwJn4ReCQw4pAiCRVwsxWiJjWOERKiDxZjRQORkRcjRRdiugfh4CZIOiGBHxiCQRjJoQxxliGRmxYCCRLCoDEEZDHRNi2CDxvRsRAN1xuxviLRwq+RxkDCJDEBPxzCfCEjIkqx+DahPkNENB3R1iIv/RAiHhfigEGjTB/yAxvMtDEMtCPh3iZQAPsooxcyKx0CCCbENCXiZB9yFh/w3sSCgCEyAkQMmkrDDyQkFv7QkQysigHh8F9kIFFB4AGAwA/pBh/lhEtFOthBzADlCh8BBHnlOw/BAEpCIgMLcBmAHhzAPiUABnsBwFMCJAAyqknCIPQh4lLELiCSOjMC0BzkHhjSvydwmkIgBh8OQBAA/AwSdBASwlJkLh+SgShSuSuBMOfEnB5hjADhxlVh8KkwqSbh4u8EnuiyegGAgCiCHvJBwFPPRBwhyABAAuXSltjSnA/uXvnSpiBhkADhyFjqHHsFHFrAAzUPQEpOiAAh4gAkgRSS0HHxkHqS2zUTZjpTPOiTcFeTMBATZB4gAKFg+TKFrFHBAAPhIADlkABh5hiPRBkE/QfRIEEHcBxhwGlB8S7xXgOAPmaKEhDhwhBh4OCHKhgAYHOg8ALIwDDgPA/AMIVA+AfzTCLhhhwAOAMT7nsOHTzhHnKGkjBHNM3g4AOAfAMCHzdC0RgB+H4BhtaB4iSB8UNT4AMAfAcAPACu0hz0EB4AYA+A8AeAHTzJEhnhDGpNmnWKLnChgBwhwPZQpzGi1ILIZRXj8hPiOoDKpID0a0NHwvvT6g/IHOEUPLcHbL3CeHmo5JEHenKh5iEnEn0m2Fcg/iVFMNgiTh/xgB80Kn0HFB+G2g8UtnYNVhz0rGIuEM4tjFIDoEGt2HHn7sAncHbBhphUcTuiBhzjWIgrNuEGspDjWs/qrrnH8G3FlyzgPGxHYg+H/0mqrB/AOUomLhnVDnaHcEQNWL4s4k1CjgA0viUB8hwUx1O0w1BSJohU1Fvpfo3LugeB/geNq1OJOUghztzH7U9vtTuNRnNs8qgEFVChP1OLYI3AxI6wM1GgQ1HrZnYg8VKG6KrpqA/nsHY1OJ+kQJhVQpmVSVTSJ1Un4M8GKDNIgvvCVHnUwhzpgLuiH1blvLxI6rwyJsAkhnzVgQvzuyuCxt5VjGxBPH2joL8h8Bhy+p3wMhAIZwmtqqJHYgcAOA8LcDMsXrBVtKFkn1OhwJ5h4FFkHieKSn2tgi0B8hyA51zDBp5ydRD11hwHnEIBz2PPhCHFEIu2CpeV7Td1Shjkhs71+QkzGliOrohRX2c2OH5h8DigwId2RAAIZkLNqg8N+h/gchGWK1rS1h/gMB/nnLHuXWOEJixvQOXWRAcKSiH2TDTBzWVRvCFuXTg2Xi2hAHnB4ABuwOXoggfgAE7hOWC1to7HHh+WfWgB4jXwqzGkICdEk2BIuiNoqESWmntEHqpoZuytqjUAY2srcDX2uWvWwMzjkkJi0KHKkW0CC21y0jTKexhiFqkEQjADWAPAAnnBgAJsNIggeAA2/iN3BLwh4AEoj3DjM3FTuvdGIC02jxnFvDk2ljigxtkEHnqIZ0j2qCCgYhERcM80Rh/2MgfsCj124MBEoLyW0CN3VnIjpBnAz3XkoEzh4EopqVcIEBzsBMUA/k73nExWeNyNzWfrn1A3j1hEoUJXmGxVkXn3Ph+EdS1os1Gk7z6rih/gIhkXuDM4B3wGLhjDEW4Dah91r2RYJSZSPDTAngT33tKB8LppqWvnn1ZCCMUVbIux+hx3/DM3iYBWhSbU/rYDBG3WkI9jSBw0hBh4Ho3IZ1b4J0uB/lLXuHbYBk71tBjRuYPkQLyDM4R1R31XTYUX33jEHg4IhA/kFHnQGhz1ZoFpHX94rJOLwmwYdKpYCDKHKn70+U3rNxKBPoN1EUnKrsMkHnEoZnSYJtjYnHWrcLX1As4gfHYHAINRFkJryO3NhYR5D3VtbL/INxPCFthEpJuA+AcGW1M4j5KGwg8Fykg4+t2oNlNjPRtYAokUa46Cwhwhghw0a35g4ESu9hPhDl9hBhBn635H74Hh4BwMzUuz7LmZEXuBgYjh+UAgMAPgHAH5gADl9hgkJ35GI35BgWRAfUVSZRDxuZNhj5tpbuQB8G/AcA8AMZRHnHUs3ko0S52E75fl9tNZgFNzwBgY5THTt1+jKuvW8W8Tg5ekghgF9zpFiPQNhYHh8ABMzF9tqzRiHnK5ngDhzqFupBAAnzn5tEJweOih4ACWRW+0IGIuXjZm550mIgCDpAATPy6gAXbgJhwQeTk3C58aFi+KXFITvhwAEaAChZaivB+AYvlydCIADkzkjtPCHlhPlwSlOAzkHh+H/iHJaSniCNV3uYqigCjBAgPhwhniSSuDBEJywwSh8AeA4wNkfyzmIuIXVBPokEJkpE7h/ADk2h8EogDrHhwHEy9BhnZLBaomIygk1FOGw6AB/TF0/Q5RqwpYeQMFxbLQaRyQkbKQobMinkDXjTGQ57Owk7Pk0EDPebJQ2bSwkbTwdZbP1Vg7WQtbXwnQm7Vw17WwibbEDambRw57g7hCDbN7h7jYe7j7kw1bi7lbmv57mbnbouqbpbqRlbhsmwuya7lSbxbwXidgAAADAbYRXQVkkCSxBi7iQt9uAkLp5h+AMwQGLCCP/QCxuxWLkCDbLDgKF7w7UCS1gDJpBh5xHxOCDxUEviCbTiIH22ahAb3b4ROhvidb6ECbrNR7uwfiDjFbxCvVgDnnDbzj6iCvtcFCHBhthHn8EkhxO0f758NcKwsbub87vQNN8jKcPRFKm8QjU8Z8E71RfEk05mKQ9cWocQNcYQr8ZcR8aPocbCw8cavcdCRRBv3cScfncB/nAin8icJ8XjfcLDiSLDIkzyNIcB9iPimrqEbB/iNyBsOiPkNCZVAiZRtkviBnDSRiCSV0wiEsmkOyJcXCucyB8CX8DibJ6iHcsHectyZDfhHiJI8EFjr75imyTCO7sajiuhjScgDkIG2g4yyy6gw6760rABxlYwSjpScgPhMAfEny5S6S5y+OSt+EL9Tk2w+Cjh+BgBwA/FmKXCJFFBnh5hAFZkH2nKE9ZTjiIgPwmp59EjAdFiLgclOF9opB8BnABhGCVSkEIKHQNgchAiSBgdfGGbtQiQyhiTMui0NTbSoOZgg0fzoLqBxTau8zHhIE+qHB8TcA8TcFfOlp5hAd63C8FzlhAAgAfPNk9iH6F9hhAByTay+nYuLaaoGgfgOCUOXNqcsH4CCBgdqO8n79sABhEF9hyS5TPaZjTgABgTR+FHaO29MoT0+HWy7zHUGA/VqAFo9nTrqBwBwzvKmhwBxhAg8Awh+T4AOJdAcZRgGZDhh4+zvryAYDpOEB4AeAcADAPADGzr7vdImJfhwAwUeOEA8AcT7tGgMnFHPdoDMCdB4khzwC0nMhnhPhBmIBwHW+kAYUzh8A4AGGW0QHeci7gY6sdm3NhNcIGnYPschDFHMncNgojr8nE00M4g/e0IUlSHac7nMkPIVB/ZUgeGytTs1HetIpf3z4gnmNXk7gOT9AckQG2tqOCDMBAOPEhoLHCXEZfvdnNUig+LyA+OYtqgHsCyDckjLhhqq/RMis44XhMIHcmlOJ+coopFNh+JHe0VajWE7pI17tAQDvsyotGtTnnDSD7/VL4rNnFCeA/gHA+BPKDSouFPRfb/cnHjWB57B+7H4iAON8PF+Hh/Dxhvh+D8fH9PB9GD8Pv+KRWLReMRmNRuMuCORxjvhxvw4v4cI+CH4fv8PD9IP8HxV+H+BPFwxSbsd/o+RyUcH8eB98P8eH9HxNPvOdOOav4fwt/weVB4PseWzqks95ueewV/wt8Qc/o4+J6fxR/H9/AABuN/sdAAN/vF8uOQv95sBws9Pzx/wKewdDwo/yuHog/oeP4vGY2KR7HRaQsN+GGTB+CU5wB4/oF/gGZTSBuCV6SdI9wAA4AAcUQD0Oi0d/5ilwJ4ACnzFgDyp1UPAAD7N41quSSvPwH2Gfo4cJ48RN/sC1AAH25jo+KXRxsAAXNkOO+X6RuN/HGiA98zMf5sfoB/57I/H5ZD5SFjvwxyYH5n25z3EAlavtEeJ4OCeAHuwR7uDA1h/h8AChta2TaL+2zcH80AAB4pyrg44DhOIro8KiALlJ2HKyLkf4ALUlbrOwua6kA7p8mYMbwrcc5+PI8weAO9LCkAlpAD+YEAvlJDFvo+Jnnw+78hwA7Mh8AL/D+cDoJmmp+LkfADwSpZ/qG9sRwkiYfni2qRIqAB+AFDYfw6P6KTQ4kdDir0VxMR4cmYPzQTdFq/rfGDtRgfJGCHHB/x0c5/DmogByBB5PPa9boSTTKNSWyLJsqkxhsyn7OE4H5/JjASan+YaKGGxS/HjMR/h+fDWtiicKKYfCbooeCDTgqhjg5ALMRC40RzEgyfkeGJmH+jx4LSp1Buu7K6sUf9ECPRbxvKohwyAHgf0qRyJ1RTV0ItTjHJCwIcHHUSWIcH5fxhLSBn/XhwmOeNYVkHyTKIoykTTCt8IofgcoPDjfQCT7hq3EWEBwsRHgiYkxD+eNpRcndCrqnUxEcA9uHweZ+Dyg66JmopHB+2VsXTmV1saY54JHjY8HjUR/M4T6qXtAd+LfNN+nCfg4YCn9bD+T6Jp5NWN1ofwY4XOIPmeq6dvAcZz6OgkRuUsRPkmYiCY0hdprcR+PnGhKvg9ki+rdLdfnhcJ/gdl4HqrmW/ZoxlQnCfB4IKeFRD9n2+aCmt4L+eJhnGR/CjwP2mNaHyJImQ5hzVwqnpMflgQRoB/84Y6mHBwh8D4r/LIOiRBmDICCVNjoP7aYJ4bgB5n7YtxwwIfA8D4Hh8Dzlg/b0R5DkfkO/XRwDFmD3XBjgfCBn4kwcZ8R++Ine6beAYZwmGR58DgfgcRGHgecwBgfuCYYDp0cCBVqP6ChwOFgAMD4HwDkxfm+UcA4AwvoD4B99TFAfLDEG4MeAfx4Dwdss83DbRwE3H4B53ogy/DgGCPgMA/AYB8ZQywHjyxjiHQQ9B6KSRkADHwAMfiJTjEHSq00uD/1UkDHgXKH6BRHj8AAWtEaD0Rg/NwXIcAD36hjIUd2Iyb0OAgf+IAicTRwATHAGCGgBIExGKcix8kRExACgqakna1hxw/KiB93ogS/DkhoAEtY/GEkzQey4R4wxgQthcpl6RHxmAPOQP417VHtA+AeJ5pp2zZw9HiPgiclB8MvH8XJEqs4ZKzGAqsf8FCdD4ihJkikNAPgcQ4T8D50pQq0BnKQmIDA/kThkU4zzqJEFfA/Gk4LbTkFRIkM8cJfhzAPl3DQfwGT1AHj4OEcBRpAyCmnNVmS0yNlumtNubhjJBzdnARebBGptThnNOCb855wTjIzOWdU75AzpnhNadhGJ3TznwpqeU+YXT1IvPeflATHT7oEumfxFqAUFoUpuhdDaHUPMfRCiVE58UEopRejCSaLUZo5R0jlG6PUhpFSCkVJaM0kpNSmiVKKVUtoXSyl1MZ+UwplTWd9NKbU5nRTqnlEKcU9qA9Cn9QaiT6qLUeedQ6kVLm9Uyp03aUAAqlVOqlVarVXqxVmrVW6uVdq9V+sFYasJznikmsVZ60VprVWutlXqyQuqjW2uVc66V1rbW+oVZq7V7r5X2v1eG/16r9YOwlhaw2AZnYKw1i7GWNsQumuNjbJWTr7Y+F6SLKWZs1XKy1RrMJzHxZu0Vo6u2dmpZ9WgAAHVUACamqZ0aqGrqpaGqYA0Z1StXVQA9tzgCPQfaS4FV7TUarMYW0NuapWtqpbCqdsqp20qlbaqdyKpW7qml+39wbtVSuGkiqIf0m2qtZa4PAABwXONVVU29tbeXUOBbwA4vL3XbtJd0+dxbw3UuUAC8t57Y3qqpdK3FVbrXVvlfS7d9j41RDyMy0J7ap37v7ei9AAL13RvbgS+GB8EXBwUZHBmDjcW8wlebCmAL2XTw1dfDmHb6zTu/DLEctRDqxEG6zEwPxBj8A+tED9U4InJLYqeaIDg8B/koMMc4vEWSfF/fPF1mcP0DuLjI9uNMbY4vPjrHmPsgB/yEAPIgf8jZIA/krJiRR/5Pyji+stmAzgwBwAAmwwxvAcB6OEYGRjujAHjE0PAcxjieyAAFrJbBDg8ByeAcAcxvAAHmIc24eNI4tzbZrKZjaoh+AGNjOmegPZ4fKP7Cag9Aj/0IH7C14DfgD0TosZ4gNHAf0idzSgh9LaXsppmptmNOaeJsMDUOeRh6kxNqZSGqdV6H1doo8Gshva0EPrbSuUNdWM14kqszYTu6g1EM/Pl50CgP0DsKqZNtmav0Zo7W+k9q7X0xjDbaYtu7C2/uEOG49ym/qlujVu6hn6NG9u2/m794a73lZgMI/IZjADznbe+Ex4jn3JoLQm/S97/2cM7QPA9K8F1xtbg9hNskfqjwvhvD87553BxLimgdB7n4zojjfHdb5M5vyLkdf+EnyAAGFJoAxgZHbjjYfyHstiDNmtHfmy7Q5iAfkUD2s9IiPJ/J/XPO7C8lo/WboHDeiJSH+IPo+OelY9Z7l+8PUOpdUeb1cf/WetWD64Ru7+9K1m3AfWC8uA+52L7rQyz/eK1d675irv9hvAkdsVWy0IDzQYXq130AF2fE9056fHy/m6weLIxZHznoas+eIv6D0Xp6qekXV431HqPVEV9N630/r6I2Y9l7L2iz0kpHqfSItPmTI+897R73+bz5fC+HRz4tcPdlrAApj5NF/l15SQqaqX0Po0T+nYH6o/ionB+z9L3NKFTff/D+L4BjsIAGRV+eiX27E/VRn+z932vx+7/nEwBg3yZGgWcIoHwXOI2GeHy+E/4HkTmHyIwB+E+H+Ge/qIs/gsg/wAA/oHA/2/6H+/+TFAEI1AJAMH/AQWzAXAbAfAgLQ/u/kOAH2UYH+/4YQIoL4IoH2HmI2QDAIIw/5BrBGIvAZAdBPBQ/SMawgAPBYHPBcJlBjAaH/BpBtBjAUIvB0IpCgItB9BNBPAksu+ORnCLBbBeK/CVBmH5CcLxCOIuHOHPBNCoIrCtCAKjBS+O+9CYUYH4G8/BDGHwAYWwH24YgAE+BgDCHgByhYIoK2B46nBNDQGcDMB+taHmkcToE+EGGQ/BAhCys9DjBnDpDtBhDzD3D6APD/EDEGVRENEQIpEVEZEdEgVnElEpCBEutPEyG/BYHAB4GzBYK+AOHOGZCXD4HOHYHyE+BmE8DeGQDyQCK2BmHMDnESHOGcDCGZGFGIrIAAA8D4GTFzEtDgPi/LFoWfFvFyH5F3F7DFGBGnGLGPGSHPGXGbFRGfGjHRGrGvGzFhG4+DDlCMH4GwRVDHF5F8H5GAH/GGE8DAATAKIoH7D+AMDZDUE2B8DPGlGGDMrIXGABHrCxHu/VHzDpH5BhH/HMHZIHGJINIQH/IUBhIZIdIhIkBnIoTo0JIxG3CEMY/KBPH0GzCTJBCZIDJFIIDeHzJNIUBmH9IbCnIfIjGnJeVmkdJk/rFiuI+6H/JvDpJzI/HLJ5IFJ/KCQDKHKLJXKTInIrKbG1KfI1CG+8HzJxHEUZKxBpHOE9GIDeGTJMLSBmHOE+QCH8D4H9LCB5CoXHGxLK/dKgu8+aWzLXKvF8K3GDLjGLLpL0D/LvLyLRL5L9MAE9MFHtJoMWVMe0D3E3DuRKAZDNACkoAOHDGIDgByHzBMLSKDMoKjMsGetaH5MBFdErLNM4I+/LDmoVDXN0+NG7I5N/DdMKvu+7AfMGn5OBMJLPJqH9OUoXOa/POOwXMPN9DcoVOsxBOxOXO0nzO4yo+6ujPBO3OfM6+c/bPMn5PE0093PYobPc16+PPioXPm20+rPtPPN2I4+RP2nnPw5NPhQAoDQE67P1QLPbPRN5QVQXP7DJQdQDQZP9QknxQO7tQIegiItqonQ5PKoxQw8FPqhdQ+LZQ8wCozRE8ZQTQ2o5DGpDRW8/Q0b9RgoxRso7Rk9LRoZlRwopR9RVQpQiIqNvPWMjSANInyO4VQO4AAADSAMeUxADPvSEI0+RSKSTSRP+nNSWIpSbSeU3SlA7QfOE+Ci0/ARmDA7s+eLQqmJjRgVYjfTjS8qlS5RPB+Wy4YqmUxTZAAO6JkAePgnhR09W+rTOIpTTTWOg+cOpDAlBTkItSbTsRVDVT0+uTZSlT+YRUDQvSqIyPWVoOC+dTUV6+hAbCW+8TnAfDGHGKDDfB2VaHCd2MaHuItAfTGI2PuBgIoGcLYPTCpCWx4IugoJkGeEzU7QhSsNIS8KiqlVIlDVNIGLQH/VVDBVaImLTVgVXVkMdVqIrVuXYhJV5V9NsIrWC+xWIYRWNWRTK/VWWOCtodbBjDMIpAarI+8QZVWH+E4B7COHyHmH7VsaOMcViIqJ1VwI0PuKKH/V6ABV+RmaaYQsAKhU3UEnfUI9g93Xe3pUcVmhnCDXuH/XzDBX4HMWyHBYDW/YGMagiIqGCJhXCRZYYA+AYeQHyRmA/CXVmIuXOHwAFZlUHU9AXY2tDY6VoRU9/ZDZHDHZLZPZTBjZWMZZaIpZfYQIyPvZkGdZpZtZxZ0IzZ7Z+9yMZYw9q+OI8l2jVRsJXRgLTHHWnWrDGHezyB/X/aeJwMiHgUwOlasIxawTna0ABZs0kEPa8IwXOH2AEDBbEMXbI91UM/NbTDZUdba/BVTB/bjbnbqIuV5ZZb0zBZjb+x/cEHhcIV7a+IrcRcVQnWTU+HAe0ODciIot+InAVXKKjZEDBX1bkHPbpJPc3bxEqGAjgXYQFZncDNs0kHDCXT7SINAMeqlS2m7ca/JddbQO7RtdnBHdtXxdzDBd3d7bsXzeAIreFb4IuJ0JneNdHeUIpeYV7ecgsNxdXXbCHYGMUHNeuIqBzFQlDV9WnIpVWH8AmUYVMD/W0XyEYBDZZfgGAgDeJfTa0hOHy0kADCWlcIsS4IqEIAYDBfMnpaFB7fsH/fxY7f3BbSLCg+9gAK/gFgJWzc3gTgXfJgcZreLgjdHgrS9Ym/bg3g7fm+Y+qaOB+MUHCPdRthNCPDVWmDBe7Nsm0VMA/ZeIsHCF/OoIyHhgZhoMZfRdDglgpZ0LWLlSWXPh7g8mrem93iFiJiNf1f5iVdviZVWLqToH9ijc3irhlS9i0MXi5Zni9ixjBRPjHg1g5jMmnjRiCIXfvfyIoBvXrTxhTWpcvX/duWeBgrxc5aleDeHhrgggVcEXzCWWyKTB+HeIsEIABh9aDdZAWHPbzCPiLY7kdWlETiWABgCTnDkl5injxalc9kMLfhtk/eRlCIrLxBrAflMIqEBZ/mAn7hBCrlcA/lhjZkbkflrjhlvhZlyJk+hipisIxbzfJc/k7i7lBfZFRlJmSItmZUblXfpJrCOG/COugIqVRWrBbdvbhWll0l2Irm/ZYrfargfnNmJnQWe1plKIsEwB+mPh++o+PnlnoteIpnvTxDNctX1Abn7NzoBaloFZhnLj9nPlESxmQH/mUIpoZodnfiBojBdonTvpDnxoxZFm1DHAbDXYtkyMXYKIpYPoJYYqlpJmMLxAdSmIrpXmcehkRpfnm7w/botmxXxpvWlp1imMjp9mDmBj7YbqJf5mRqRpVobqWb9qbG7COGPCO+dEropW9VRkld1B4MfXuADl6MXW9BjpDi3htqHoNlFCPmQHzpSH+QAiboe+5pfrVWaQ/fbTqH/rfbfcvpRrmWfrrruI/rzB/q5r7ePgnmLrBAdsHnYPXrKmvmhckIvZwI0tpXvTdDBBrXpsqH/hinBSbnxV/iJsBqNAJlEoFrO+DnDfgPlDHH8IvCvCCm7tuuZSfs/oONTjFaAoLuA/VuEUzuLnzW+nFcXUiu5uZr+9hkFult/tQToI0mDuJDBtlUeIrjqnzUtfc/tlZB7vPtNuxvXTmLQ+wnVvhv2ohupCHSGMdShr1Aju4MXwIopwBJrwEMbwTuRDfve99vKVmkDwSodwu/fwpejSzRfwnvnCrQtsQ/jRJxEnVwXM7xNYvw3xVxPxZxanNxQI+Bg9u9bwpxpxq9nxAIqBmnhbWB/vXPZxkI5x6nfx/yDPNyGI2Bmtcq4Imq/QqIXyRPByUI1yYq9yeq9yjyBQLyqIzLuMbCPw5xDSsK/y5QBy8IxzAMZzFwDzLyly7wpzWMXAVzHu1zfzPP3zSIvzmI+LTztEKI2Ibyk8lxyrvx2Ipz6I5z+MbgOIt0HyB0L0MrXzlymIyF/0Bd8I0B8gVyB0dyp0qMd0wMbfD0f06HP0/O1z2ItzBaBvyLnW/aOreQCHsrfCyB8APyl1TON1D1cIvp8Gf1ltT1rvast1x11zR1CKoVb1+IqHmJWB/1nuTkeIySp2Rz12UOh1fp92eVn2lwjXN03Sf092T0QH/zA1DUCGGAw8qt2BiEPev2gAOE4GCEOAGECHgHsDwSPymQeD53J2x3N1aDwGEAwCABwDGAeHCEGHgWHvMB+DyE4GcDyB4kcBsDOItit2t4BPt1Xx5COv54L4P4T4X4aQD2h4j4n4qD/4v4z3F2v472UAx5ElIAcACcmD/UoVn5T4oE8DgBsWRWn2r1z45Pj490TCOA95n4N5r5ugj514h4l575/6DkqIx2P6LyF5l5F4QAYDCcmx6IpVr6j5VGKBsMLzIIv055h6N634N676/bzVn7H554qDf7O95P/7X6zyT16D53UAwDH5sJQEIRVzqDyHB4mAH58BtWfwL7Ufz751B4F5B7+ED3X8F5uHj8LCn538SDz8X5/8dTx8h7Z618oMjO50/yP13M3ngMX0Vz8w/9XzN1R3L9eI/9iI/t8I/Sh9Z9vpcPj90I50tU/AX9r9bIz9QMj+L+Pvp9N773Nxwsn8l1Vxusz+r154Enx+h8n9xyJ+5+z9d+CMjyKnh+7+t+2nn/R+1+/yX/D9t4D/dyt+Mm7/Z/Hoh+F/qm5/v+V/mIA/4FA3+M4JAx/B4VC4ZDX+/B+53nDopFYtF4xGY1A38f43FXBH4LC4TIoxEIlJpVK5ZJo7LYPIY/BoUAYhBAfB0PAn8AIE94zKInMKJRaNL6NMo3NIPNpLApzBJ2/57P6DEaHRq1W4/SKLSo1TII+ZvDam/h5HmPV5TXLdb4pXqJYIzYoHZKfC7PaX/a5PWLhgcFHI9SZFdoFZBihh+CAODwC8AegH+hgg+D8Pw8AwC4LzDqFg9Fb7lMLpGMQ/8VjMdkMllMtmM1nM9F9Do9xR8LX8PC7IMDAPwC4wYeCGxg4P0CgNkGXgx0PUYrt9z1ZZpZbp4vBmAHjIzcS/Bi4UYMeIf2GxgO/0P5X8Pw+gOh0op1Ot967u7nve73/C8byvO9L1vaGL3vi+bbMA/EGI07CWO0izUsU8jzAYP5gAOykCveDwIGOQD6NBBcGxKisHpXCKKwm8UKvPDMNvczUPxDBS2xNHCGRQlUVIpFkAQs9B/nwH8OB+DA4Ogv8bxzJrCK1HqHR/F0LmHIcixlJElRsrMnSdHaTSihspmAeLiOM5DlEeA60DxJLooqAE5RJL0mzAkUxIZMkzOK47kkDNc2zfESFzkAE6TrHE7o/PKFzIfDHsiybKg/NgAJ6eLaovRFExLRaN0ahUJovQiM05TsGU+jVQoPUaLVKjFT1Q+9VIzViCVcih4M+jVZVm6taoxW6B1yh1dpVX1ftxYKL2GgVirdZNlNFZiLWckaFHywVpWmwVqpA3qOAAGAA23Jlu2A/TTXCnlx3KwNuXQ0l1Ozdjc3jeSuW+ilrtSwd8XyrV9odfqFGff9z4CweBobgrrYBhSiYYhmHOriGIpbiaF4qgeDoHLqjYvjCV40hWOIFjyBZAouRZGl16QheyF5WomW5c/MoZkhWaJhm2bwdmEU51lJ/skfDJj4A7PH8AZ/A+f5wo/n2foxkqY6GgmjaRpQf6Zp2oalhOqN1nKZoqZ5xmGDwYHi6J8DgfGnkDsOebGmGrIJk6F7RtW2bduG5bpuy3bwge9aIf+D77toH7eeGn3fXuxcHjOgx5nWDH/xe/ntp4w8FymBctMPMIPxW18Zt/On/z6N6n0MddHPGdGGEwyY9g4H9Rv+v6j13J9hl+yqWhXa9vlB/91v3Gjh3vQeDu/ZUZ0qCGeiHd+Z53f7r6CN8KgW9YN6/l7f7XJe57qM++f/D9MfB/92BkkH8DwH998/08r4aw8z6v3vxfm/V+6pngP5aq9JUD1GOjjfg+R5rT4BqxgLAYiz632vVHAAMQDSWltNgg8+CjODDNmdM6aDMG2uNeg+9uELwoRvEIo4hmsE4WkNgtAp5BW3XwGhvCR6sJYdQ0hqQuHsMIfxHK1Dt/MRX+QxK5Ep9MTC6tnifEKIZB4pGof6PBOSsCWRQe7Fk7cW4uxVfRFchUYkJH3jA9CNSK3+rwitGgnkCFVtYXNGeOiT4XxNYtHOOkb0fRskBGiQSUo4j/j0SKNrwZDpjkTIuEEeyHSPT0RSSULJKInjsraHEmX8SbLjJ1YUeCBGSH4A8cAfwDpEH+IMl7c5NSihtKRZsOHEj7HmJ8Xg+RzjPH8HwfBHg/S2H/I12EllHNnl1LyX0wJhTEmNMh0MylRSJcTIqZ0v5gj2I8tWajlJrKtmwweXcvZuB8m8VSachYrzjVxFSc8z5gkvnBO6Ic8FiTym3NCe07ZQTvmMuCHxA3jO4cTP2epHltSzlpESga/HaO2oQM+hQfCX0NlDQ+iD+4pxOotOgZgHqMB+AfRqAlAZ80RYJLhg9IZfUjpLSeSdHI+G8oLDljsx6LkvHPTWm07KPRahiPwf9PSPCfqBTafSz2z1GqRUepdHKmrYiQygfAA5VytITLAjwAKAVBjTSxhspqsValZK6rw/6wUOrFUKPtHyHQyZ7PiGtVV/VXZZXaFteJyxmreQSv0QKdxBpVXesjFKXWAsDHWocY4gRcABF4lc4XB2DiPZKyiyK+QhswvezsFLPm4ss3a0c2Y82NsdXGolelo2hh5YljcuLX2Hr7bJk1tLGWNtO0S2xFbStjt6QS35FLgtUtOQe4pDrjs/uGQOVEqq01dljVOh9z5TgPlTVutV1a3ViuS6aZs6JoTDH/MW79QbwvVvHPSaN56w2quw8iec6Z1z3uXEu3DV6c2op3fWaF97428v23mxeAKF1wpTarBR+7+uZwRRib+A7A2noO8imE6SXwRU3bC/Vj41kHwvNnDM/iPYcItc1m966d4ZpkP6k1+MGXzmyD+qOMr5YFcNWbGuN8KVvxYyiqF5ME3oo3hXHT4KXZDvcS/I2C8c4gjhYSs93LqEellkfIGSX2Y8GfVnK0r7vZavBlx8OVK935ijmaxdhsZ5sv64iuhLcVMuyDf5kK5xAKGz5n3P2f9AaB0FoPQmhdDZ8zhEYhuc4v560Po/SGkdJaTrbayyFmYy5uIJUqpmiY/WFs1bsgWnKqaerlaBkGpLr6mtbqgg+qpaY0trq+9WrNL2FjlqnWuUpBsPXPrCUWd9Z6b13pbENV81Ek1/sWnGimZ7DIHsCTeQdkkKURtKSl87o5hrXSiCWutO68kRlTbd08xUMkntiPedxn3tnTebFFwNl7h2NlOue7ry4n3TszB2zs8TZwjOveNxt56l3FJDNE5qL8C33vTZunyGcKyJhIf4meG8G3rr0gmI6X1R4tW7dUgdbbHoNRTDHHuL6r4PJekE28X0mDzynWPI97aLoTSKkmMAH8x5BvxdecYS424mS/SuUNo8+Xr0CH/Qsm1fx/yGQ3NONaLyZhrdHPeHb94g3vquJjVcy2D1Lcdesv1oq5ufr/WOMcP1PTrKu5tu9g2n2LhFc9RVS6zz/f2cu79QoFyuZfCe+9IZjg/NEM9wdr6121vfg+89J73KfTMSeC8q4z2PUHk88+J8t2zVtpPK8z8BNfX3nPReX7rrguG1/CNC6VrnWnj/C7+NF6z2XrvIsf2hqP1rl/XyK933jxXeut3K+D36lfo5yEO2qQf23w/IdbaJuXs9axAdP966T3/1Luke+v2rzvi/Pt73xMG83yNreh7D8qeMMfy3v/R879Xc/2T78FwEj1A/n/h+J4zCCqJyLoz3gghGsAYlYeAjw9YYBoop4QZsAgYqZVLujljuzCIl8ALb72IgcAr4QlUBAf8BUBggcByAcCI/DCzkzEiqLJ8DLYgggd4eLo4lgB4T4fkGItId5jwfwDjiogYfqn5BsFB48FToaaT8Af8GEGQlcGkGwf8HEHUHjj4f8H5T0CbwLuzFznLGL7EF5bQAYf4R4qhXgjQiQfIfwf4P4T8HQP4z8IECT+qpzlrnCmbHEJQgQd8L0MEMQlcMsM8NMNcNsKsOCqz1THsIrBrDr0wd4fwPCo0MIcJKwlUPsNAT4oCdkQMIMKz0jsinkQ8OsA0O8RkRxqESIk0ScNMSwjsTEN71ECjqio8Q8FkRMDUUIP6qCo6owlQT4icM6o8SwgUVcE8TT5aubrrIsLggcRcWyqUGolcXYqjUcX6Y4g8N0YUQYxDOTMDuDMcAUDsJAjsW8ZolQD8XkaKrMYB8EL8apWkYb9ruzTUO0b8RsZglao0csXyOsYMdka6KkeEUEeUcMer4EXsSsfMakQUVsK8QrRiyr0MPAskeglQiYecM0SkQEg8TMfjwUf0b0h8gMiSRUisP8gwgkdY6zdjx0ZIeIfIeCqQc8XIkQZ8l5bQH4P8HMkggckxdMjUd7ykRUlclqpUl4lUmQfkmkm0i8kshDzzW6HLUMjjWBEJo4D4cBxsL4kwAAA4fg9YnxTQf4ykAofhQ8jMhMTbzKyclIgUqQB8qkqwlUrMrath9gp8sAqMsUMZZcdr+yP70z9cssYkQr477J2b34t7/b08pjkj0Evr+kv8dzV0Fz6D2b6S4kwT273z3JlUy0yT3D4systMxD8Uppnczb/j6Lxj6a7S6T6sbkFseMv0xLmrtz7jK4f7LMbr+Kz0vUOLe7CK80WTFL+bbM3cQjtzgDhb/LuU4cnkhabTiadb/U4Tdc4ivK/6qJ1rLU3K0U6ifkQ87E3EwZ6a/rjjm7pp+EZE00ybxk8jErBID09E0L/r8ZzMLKmc4DeUxk5cx0vbm0+rnU+7gk/M6c5k4xg7pjqy+EI82E0UxTrkWE81AC5k6TkVAlB0Fc+FBc+U0cbLsz7s205VAc/c3khcbU1jLFEFClEU4rxsqE8KBLwy1yutAVFM2LqdAs0FDM078bvlFsy77T2jw9GUWk+NHU0abDxFIdHM9VHbyUtFHszkzEzy1MyM9Mzr/xhFGbqNCshjNNJMxtGrzE400tIlJdI1LFL0/VMD1MxdNFENNUVztz5ogkw9JVK1Jj3VHFL9Bk2U0lPNNNPdG1PtJ9KtKM1BrM1TbjolDFPVDVBpgz968zb0WdKlMlO1My9k3zq87NFyO8wriS9yddSU4NLLv9FUbCEr/DtNTdH0wlICH8Cwj0nU/FNtGlQFMLxtWAf9WVANWlLVUx4sFLjsQ9XdCVUj5NX7EVYM8tBFYghtOlRlItRyEs/yk1ZohlZ9P9RtPjvlA7r07UYFCdX1N8hVG9brBNb6Y9cNUtccsziMYzildFbAhtSVUQjId4hVWQfhp4hUmAhgb6EVdkwFd1B9BFeNdQg9eglde8jFflfYg9fohdf5707jhNEtD1gz00Dg3xbI0DoohdhcpTZ4hdiAhQbw3dhyUdLZ91Ds2tjFIdjVjlhFjp0dkEnIhhnlkgg9kwgdlCStiknrzdIcJIooB8Ew6tnaRViSTlCtI9IVSloYolopBlpAedpVlNZETlQcJEPIcCYABgh4kofy7Q3dfUB9mQizgYgUUoha8whdpAf9q1n1lVGLOkh1rlr1sAnlsYgdsrFFeohVtJzQh1ttko3duKWtplINusRTGAfkNJ+weAfwOYZ5bQYIedxzr4YYcIeAHxeklgeItAP4cZpQYCkwfgAIB4YBpo9Y9QAE7AAAD4YAY4fAGFfYkIcAnwxgAACYgUsQ6Vw52Nucp8n1oQfzmIj0osPRbUoYxJxIsg7AfMe8So97aIrKn4rDb1yh5KiAZ4Tw3dt94CjtrEs6zYk0w8Rd45xMocMN5cmBbVygfl6F6Qe96jUd61dMih0xbRUojt7t79wwi7YU0F9ESjvCpV9ou6Y7BNhAYbGD3kXt+0nKoyXTbwX8VVtgP6YF/4gd8KsdxNKceOAkNOAzr95jr5rrimBmByqWCCo962Cd/IgmC0NmDGDQgd8GANn8wOAd4y7LVSht92BWFIsZjzSuFr4AgUoeCgg+GcMa8wr2HCCuHVMWHjngyWH93uBOFF6GIowuI+F4f+JeGWC4hWJ+DYgWDqLGKY0d84fJbV/tmWE0mmBeIjaKpWFsZ+JOGGCuMls4pF32DmHOD72Fp+NydhxGBF5uLdzxlIT+O7TeMGMQgeJtjauGQGNGQV8dONMcGGN4D95N5oc9yLdF+A7AiWB2R1+jTYYaht68Mt/UaawQP6f6m+NOWWQeJFrREInoPyyarIfBS6yaDInoP8rIeAA4YAHFlEsQD90+YkquYZQ8qQAcu4YAD4zowt2GY+YBwxponwQAH4T0Hi1aLiBDalMeXYAGXsq2YAnqVWb2YoA+Y+ZOZd2GZysGaGbwH+aeaua6VYgWbSVon0dIqucGcSPmcuKVua7K7cbdE9BSy6YzbVRGh1D+iC02iWNdR9TNBNVdKFH9KRjtSEI2j1QmkFQ1V85Gjs8Ajdjwll05L+NbvlVNCNZ1g4hWl0esDEpdW1NdXFC+i9N2ntOGn8WNRdbNaNPk9iqM2810f9OtQs+dabl0LQB+mta+m62+XEIimOquq7ZVYyxGrdA2oGktSuqNS6ndczimr79OsOrWTVC2o2oNWtbVQJ8Vgjr2tr+Wt83Wsdi02uptSc19aFMtaR/1ljtGwVUdXtdeodclFl4lSmqGk+qVujRuvs7esdP2oWu1W9ptxextY9gMx7iOzmuupOu9Jt8qRmrOv2uK6DzVLuyewtS2w+QmwmpGw1Pj2u12zW2FM+2m3W223m4O3Ozu1Oz+42p+2utG271e3y2OrdMeylVukO6GzO6W4GEG5m4e524u7kb26s8VV23G7u5G3e1W8NdDNe6e09cWx9duNm6LD+7e828W5uyutO7G0WsW+2/m4W9G4m9W++9iMOjW5e/G72/W58w2+m9u//B27O+u0k/lNnAO1G9O5W9dTiT1GHAvDqUvD/AG4/DPAfDfEFVm8m6/CW/uuHCtEfC/Eu+Gz2n23vCfCHGFFfG/F21/HU6vEm8/E27/AnIPBXAXInFHI3AyN3BHDkAmnMt4AIYAfAD2YJNkvBE2AWXXKMrHKfKvK9+pOvLeyUeIT9zoooHgPIeaYY+AR4e4cPLOnnGuonHlSnM4wpXfMQk0oyngf4oHOUsnH6Qj03PCU5rvQIi3PrG3P+WPMfJ3FLaIHzmJ+oZ8SGl/QFOZTvMloPO/SZ+AB/S1tYkQfnTIhHTfSHJbaxbUVEcQlYG6ngd4hPRMa3CO6gH/VkSvVwlXWDG3WXRxL3Tm2c15rqqAe/XYkwnI+A2vWkffW1HHYsX3ZAkXZUqnWfVG91rUacXPY4mFA0dHZsk/VPFuyYhIg3XXb3YHcMnfZ/bXc3aXdIvPdY3PYVJHctwQT4Y+E0ondXbHdvMup4hJK3fPfYk3b/U5RPetp0eOY6bOacD2ZoAJr0rN3Hf3Qb0tIfhpg/h4kxx+Z2bxpWgfR/bPgHBXeeiPknTu3Pk+jHlPYe7vli4Xce6kdHGm5PG3BLaRo8c+0e+NgW+fHHA/l3e3Gex3OmyHO3ovnvo++XpPIXm3DXnHJ+j+61K/qek3quy3SPrHFfq3res/Bm8Hr+8dF+8vVXFXsvFnmnBfrO/fcnpW/3i8vnHu3/uUyHuHF/n20voHum7Xu3GXp/o3m/OvnPEKW/Eft/wPpfwfpHwvtFTvs3xPI/IfsPIvyXJiR3me9/wXqPwnq/sHtvBvtfJHyvJXy/wxay3XbW/P0PsXs/qnrvrX1/rntPr32f0H2Pt30fyn1vy33fqHE/qXsf1n3P0Xzfxfzvxvz/snyHtX4/uPvXC3vnDH4HJP4X2/5nD3yP3/zn4Pz34ftn4v130/x/7X531f8P2v2X8n2H9X3X5/vPpnn/p3yf6v0v6/9n2n5v23/P3H9wgD/gUDgkDGcFhEJhULhg/c7zgqfhkTikVi0XjEWfx/jMVcEdgUHkEjgsOiEEiUklUrlkajktgkfkEimEZk0Rms5nUkjc7mUdmk7ik3lFCo1Hhc9nU/jNBpEJokDlNPqlGpU5pkYp1VgdRgVTgTgH8MsEwcb/fEEQNoD9chVXmtZi9bt1ef9lsVkndntMDtb4ttuhFwmFyi10rl2sr8Z96nL+Ob/T0Dfwef7nQ+CwcvrEjxFVxUFxmOmuQyWUy2YzUFwktw0Vz9U0ME0Y8P7wglnsstiD+yh/H7fc+rgmtlmvimxp+zge12+5u873u/4PD4kC40r5ET5VI5kCebPcLDT6fB6Heb4DnRnXT7HA4XX98+z3y77/8Pj8vn9Pr3aWPcf6Nuq+UBM4uL6uu+55nufJnk8P57oefJ/h+/6VwCgTgwLAz6Jm+yHpwgkGH/B8IwnCsLpVDMKm/DjspU7aGO6o8FnvEsIQkecKQs6UBJLF0CxgkkZIXGijPucAAxKiUdR5FSSQDAcgvlIaRyKhUjqFJMlmfJsUR69sfvfDchQOwsEuI+53wpE0nRTHzfTI+MqzO100tXNc2xzMEoJHKT4Os68rJBLCEy0nb7n/PYPmPPs4q/AZz0E4lCI7QyEUQnVFUZR0dzhMU5E/SVKNXSyM0wgtNJy75/AGeA8mGQ5AB+fAYHAPgHnAB7HgAB5AAGf5Hn+QB/nAwNBzs488M1VtX1jWda1vXNd17X9g2HYtjzNDygRAk6pQ5cSGVOjFUoJVaa0Vcd2XKi9zoNb8RXZej5qXZjBXXetuXvD8FRCot93pdyLXgkN5YDgVx4Ij18LrgFw4VhdlO1hzE4gr+JYnbqm4RiONX4zt/TVjD2ZBOuOK1j2M5PlF+29f9wZZltK4pGOLNBks/Zoo+GIpgx/3SmF9Z4p+fInoGhJbomi57m0iZw2WdaazWjoZpOV5Nqiq6shesZjeetqprqFa/kmZa1sSkbIhOzTzqe1bHp8r6i5e4bjte50LurvbvvCrb1S++Rrv2/p3tiEbdZvC8Mx/A1RwckcZxqYcQgvFXzyfKJZyyY8jLfNc2lXOoHzGH7RnfRIz0iw8/RPQ9UkHWWN11N9h2PV8fc3a1Z2/cIv2fTYv1Hfpb4PeXV33iop4+R7f4nl9H3V3+RofleipPp4L6ul+v7CE+bmGz7D76MfDjuwYT8vze1hvncX6H1/ZlK56z1P5OL9uf+4lmmfwZt+hh2siAABAWA0B4EQJgVAuBkDYHQPghBEAD+mkP8JWXaAkEoNQbg5B2D0FGrwWJUXZ/782XvofHCV2UIGvQiJJCSFRFXzsqfTDGEzInxPPhs7mAJsGsw7InDN+sNYgPMhY2WFxI4YRFfBEdtsSSQRLiYayJziYoEdilFM38PTkw/i1FSLh3IvRfi3CeGkKYyRNjCQyD0bY3RvjhHGOUVXLkjjlHePEeY9Rtjo56NMf4/SAkA0CQT5ZCSFexIeRDy5FSLd/I2RzsZISRdFJOSjlJLSXcNJmTTeJOSdbVJ+UDW5RSjabKWUzPJUSpZbKuVjIJXSvYlLGWTApaS1XrLeXC7JdS7XFKJUrTTGyLaBAQHBxSxj/EG8w2xAy+kIQoQyZo/A/j+mShUgswR/zUIIAcgoxx/lnIEMAAI/gAAfHwr6bYP5rEjm4Qw4bMpnmrmKACY5lJkiDT8P6ZpAp5kDHzNEhc05qzXmuQKbU7yBTeIGOecE4h/jAAAH5X06VeD+A/QojM1EYTxIGP2f5gpCTomQhpZBb5+loISPGgRCqCTtQ1NkhNCqYECGPQ8gYxx+A+B8AeeYD6NEYmpUEglHiBD5pCW6kc86YA/pO+Cg9SaWETm5Ryg1MiEU0oPTeb5lQfjjnmI+pJFqhs2qMousZVal0lmtU8wdUaV0tpmRyqxXasGiM5RqrhBBj1eHjPMQ48SSVlIXWepB8qlo3RvNYe9bUDE9IkSkZxY0XDvLTMMf4vx/0sRudagSFJqDnH4HOxhAgeTboKdYX5jTGjcI4O+1Fgh9j/puRJOU4APK1nmMOwU4KAEbH4SWbBEhOB/mpYIf4/UOkOE+RAsdSLFT0JJOgYKxEBA/EAPgAk3hMToAeOYA6rkBAGB4HwAY8RAj4H8AEcY/gcTeGADktaS6Im2GOMAH5tgAD8CBNZYo/wQA/WOPAB45x4jGAeAO9Y8RCB/A8AAf+BBAB+DAP8AAxgDjjWrbi3Rub2gBuDOwMAeAP4EA/YIRgDx8VAAOPwAht1fkcLEJjFQBxyAHHmPybw4AcXVv/SK6Y+FiwTmsAAPF2x/iRV1jYAY/Elj8AFeUAI8QID4H4AAeN7lg3xABOYgQgLcjHEAB8HlFA+UTB/BPCwHxgZLHOOMYgBsm5YAIH4H0E8CAAB8EDCwxAB4am9hwfgeCCDxp1ftCo/ggAwzYAcR9ghEAHHhT4Ac6cYAALGMAD4kNJADHAAM9KwceZDj69skc6AcB+AxOwHwGAfVOH+IcQ4x8NBhHwGGbYMAeB4HwPAPAeKkBxH4HGwVfw4B8AwRwH4DgfiPB+A+8ofgeAMp5qsf4BgHjDAOOEQdDhgjADjUgOAJA+blB+PwPwDAPAEtoMEYI4xjjDtoP+3Og9C06BgHzRIDgBiPEeMcR5ZxAjDHAMMeGtgwXGD+AwtohgH8DHOOEcAYR5jw3lUgPGqqD1qyCHwHwfwPj+4/bktojxhjHHmOGlhacra7H4PDQY8R+Bw2GhSv+5Q/TsB+I7nezbyz8B+B7j5bQPiH5MMccI59sjhHzzLY/NOc7o2WW0Y4wR47wLPTrV+g9MT+rEPzfU7APgPGeI8c/AdYjjHGPGv9f7jc5V4J8Y4h94DxHCPgeY87BVI49yAzVS5+B/Ef4HnnJRxjP5T02al6geBxwjoPHIeB+BjIFX8f2hOQ7NA/swR/PwedBNssNRvARjjxHMo3tvMx+BhuDNsPnPbaDDHz4eceetzh467NudE/NE7PGOJ/rGsR5jHHwOPYdQ+c+iHHwEZ/dh+DzrBWjwKw8gan8Z4LwmsBHkQ+gPnHIf/GB4Nve6bevzcWbvV5ggQjwP8lNt0AR3oR/gfHOB/5Y8RyA/9KWn1QQx+CxuwA/uePYvZt5KIs0OuJkqMEKLTudFGpwquD0PiPjA4rgA+PBEKvlizurrRhzi0qkPePqKlMgp+FfQStmOSh5lXBxh8h4IJi0tdgHh4PyBAA+A8B4C2qWPLpzsIgHhPMStoA/hgPPv4p1P6APh5hHh8hwA/uUC0hAOaMWpvQArJwCBxhgCOJyJrN7J8LBD1wHKdABqHFhB5qJBwAAQKrjQLjGgAN+sPvjA5wzvpAeJ1PqiQPdgcQTQ8wUC7tQhzwloJrBMpMsgcJzQbB8FgwdA4QeM2hHM2QggADbBHQ8leCHQkQlCxQnJzNhMFMINBg/wqqbvZsNrrwuDsAfwvPelGh4M/pwPtoCKJAxwLA/hnpzQ3B8L2LRqJPpRKO/wSJ7vyLHLmhwAJw/jrRBA/ssL3LtA+AYr1rNh8h/PHCBAABHueRqrysowBJ7kKhzgfh5hPvZlGj0rtNhAYliLjNCRQhjvZh8NArrh8KmRvh/wvq2r7gRvlwyFhqHwLESp2uAhwAIrRA5phwQRuQ7COw8COL3KCwUj0Q/pxQYDbxCBwt0RzhwxoB/A4CBP/hHhmPPvOuMxJAcCXwjwkh8uzwnSKthNCB8NzwLx1vZplN5prR4pkRUwwAHhxwkwyKcMsw1RaKMFhBxrAw4LfSDNStTQ7vrhfvAhfPtQkh5w/h5rlPrh8QKgcPjA/AJsIyMtCOFBHhiQLyQvCDbLNFGvgPmyUj0wKNBh/QtwLwBxRBxh3usx4R5EKAOJrR7P7B4iJPthjh9hxh+uZO3xaP5h3uAtHw4BzrZwQSzRfPrPAgHvAw+TFOkvFPwJ+NhA4PLytAJvowdOMqMvBSxA/toQKxJDbFePRt4SpKby2PjwbDgOwAfhPCxy5j0SaAfh4R5PGS9qnBjgPreQUuDO7thO3tmv5hhzFOrh8A5vozHu/SECMvdvPuQvPuSNYzmTMTevwA+OXNjg8O3LzwPxog8AfKMtnlcNnuXTszrh/gHhHhDhhhhhxhzuTj0zjuwDbNzg/APrcjLx2TXTdu7pkB8TwOcy+PZQUhwh4Teg4B8Jqz/leAHuASiBwu7uUjcTHtYTqCMPdgfAHOQ0RLcpvBBgDhDhAw/h8SNh+NdPzAYNewmNJLBB8Ach/A+AONzgOFdB+Uetdg4AeAOOhURtrtstthDs3wnBwBwNhNpDbN0NqCxhwUBuTyaQ2i+i2wtgYA/Udx7MEyXQyBwUHUIuEh/AHuGJlOjBxhBhA0muKyMB8RC0RONiqPAM0udPbJlhIAHhgSBB8srkBRIg+MpgANesXhDxEC0UcKKJqxqwZxsVCgeAgNXuugQAPldtJuIwnB4ACUnsHrUPbFjUBxSJzMVCBUtp2MugAwwBAARv/wyB4AAxcMrpqlftMz5QrgIhyAAj0kl05si07Cn08Fir/JrCzhOMBB4w/rxB/hBMygDh4hhx4tXhDh+KL0cA/r6ADhjjcVusygHgOAwL/CBAYTeFav6B4QnB8AfAYpqMErUBgJ2h8UBh4TcVBvwVUrrh/lghBwwBgscixvtsdB/JvABCOBHhgCwtmsNB8LwMcpvL3VyzIijHUrkCBwRHYqiJQikWLnyHRPWnGpV2PiCK3HNq0pSWPCK2MMI1hnKC1pMWViKWW0xHfwDWR2ZiJ2anlrfWcpfHvpe2gLEWhpE2ino2hWj2K2lJJCn2RJAWUmNJCOriCLMCCV72WrUCE2fES2TV9psCEKQqxwXKnqNDdzbqVJ/h4A/qfNcCBjMiFtNLqiEw2iCvziEW1wRiR2qCB2rCB2sK8Wt2qiCFeP5qY2wkzp5rNCBQXXCjmkD2zixuWCC0JNK23CBW4CFFfW5iEAAL6W/iFW8iuWpqIWuiEQN3AiEWuLMU0Wv1h3RJnCCJrssqGXHn1CBW0XJiCSXXLCCXMiBrYVBgD3OCCoC27XQykoQ29iC2/CBXUDaFlWuDigj3XCEh5ONp53Zh/2cSOXICECUqk12lX3LtY3TjsM5XiJu3YSuiE311iCSWs3mjLrN3Upvi33qXDWwCC3r3KXZCBrBXS2tGPmI3wgfFXiC3fnWvdBB30iBqfKD2727XkoWnZPHLgiNhn2nprDhh4jGjhvjCOFPkKB+pwE5EOh+Dhv6xBEx3CxvJsQmh8KPNzix4AB/iOLiA/rYEbkIDfiJD02+jo12gAvziIDMC4C8v/t3LYFS4HkKCN4iLlJog4LXuOCQB4AYsNAPruhnwcMTwexAlgsIAAQLgOBgQmL8AYAjuJC2hngMC2h+CzgBgPlaMAwcB+LkP6sVhmAfVaC1wascJwYAMVh+MmgGVth+N5BIP2rBQcTbrsr3B5pygDLjAfAIh4UHWFgAhDsCBDu7g/lq1sB/hgi11Dz/jM4HlaMCTegYBgW1laBGAIA+APsIU7krgIwrgHhIBHhns2tHljADs8MQKJxItpBAAgLsAPgQAjuCFeBngIFePir15Z5pldscqbMSgDhkQ6J0sLAeADvEX6BxtJxcLtKKB+JlhAT5LBFdzbsjNBh5L92EUfgI0yB4MJgABDhwADuCUJVZkBFghgsu4xgfDzh/qfRk1MtJB4A4BAAwZkBEAIA8Q6YJo6iQBwhAvlhDhBhjuy0L5RNusIgYUXtWgONygcADP2hHgBO5hnpwBngBjMh8Ms43AGMEtsgHh2APCOOyh4QrhcAYA8AMOcgPgfhYA836BhtuOKh8AwA+N8lghAujCIBghjhPURAcR4h4AcA8OE0fgY0MwXM7gDhgAB5+VH1HwDszgeaCDMoCxk0/FXhwQXJzlfBAAINjtrX3iQOkPlu5vDuzO0Bnz7vHg8PAgOOaT0v2hn0LOyDh7F5AuZA/LchH0LBnhzhuBj2BuA1p1AuMuhgfg5j1wXADsDKwObugO0u0O6BPDbAMO7uYTkUfgYawaJLthAuB5WsvixlfZWg8AEAOAeAHleICh/ADADhABAh4ZWgcAHbhbjhYAwUEWXocCMvSx8rAuyqHN5BHuk7CbDPWT/hPgfhngPhjAPvqbF4ObI7J0LPthuB5rNBPuAh8vSh+NU7Pg5hnMIlfQJ6ZB8NCAOCOO6NaPhbWA/gcVqbYCNh/AObZ0HTwAP0U0yBgSN2XFiaxtfURAOAD7hsugPgBhB65bkgeAPbhaNB4Ag2ba9CO7rbt4O0BRXVmLNvycAhGP/OciJbyBiAfwDb0v0JqMwv7WCB9748CPSyGOiRv79QXP9OWOZcAWEtDDhlh8DB8VFOZQ0h/AMA8Bg0HB+AeOi5Nhw8KSOUqABhg7/tl8N8Oz/hBhB0HlXuPFeUUxWMV4q8WOr2FtHhnx3DVNH4OB/AYB/tkhGMWprBPsTQsWFkSv6JwsrA/0dzhBxhAAAheBxyz8jRCOQFjgfB5hHLNjcRdssOucAlhVmRXDJDbMV6ZLR8FgEA8Ah8vAcAfhDtJBwhCW3Ykcz0Wg/gHAMAjC2qfcwBBlXg4NJc5Z9ZNsWlg5aiQTFlj8959SiP58ZMsxzgMB/hGMm9Ds2s2MBY2dGwz8odJXPBeADrNQ2pwNaPyY5gPgfB4DLTCoCMrvjRFj19TN4FhDJJjx3O8PJ9XdYdZdaQZMGW3PchA80YpgH9f9g0f8eNJA4FXqdlf86RO7pkEdnOrsCBP4OhgATx8sTp4h/dr9s139DgAAnzhwgdGDhqJdxqvi2huBnym90raL2pjgPAA6iA5gfcZzlB4AEQKJtivurjGiJBPd+Ax9/RZVHg8AB+BOjUHhGAQi0VgAft3b/z4gEBheG0dlZaF+JaCZ8ZNklwecV7q2qNHYOsCh50K9q9A9Bg/hGOaJrQf7yThXy7yDhqdYyP9P7FeBuB3ymqv91Bw8aKnbQBfX6Oi3t9WUXP1h4yecqdVK/scw0sLZCcvPPtZUHgGNRQpgPhw80PxAHgAL8aDUfgPhB5Ng4AwuYURDz5Nxzlt+ziMbrXt89y+uS+3pj8avJe67xvR3t+WLaPA8gzmB7huB7/B1mV6xCR6P57QBvX6N/h9vicnh/t9dTS/ESh58DBMfKthier9+nuYPP8JB4N+FjQAfQ/Rg/fSi8sWgPfVc3/W/X7m/OAYB/B+WTozCLiAMdwuNHuNDvNnh9hvEPv9HvNzv94vwcP8eH9OHw+P4fpw/o8PsdnwV/wmIo9/Dw/B4fwlhwV2Ps/vw/vFxvljvF/Hh/hh/h8frN2xJ/odjvBjvh4vg8Sl/w5wvFPv9juNPRcMPh8PN+HmaP8APwDuF4PweB8DoNwPgHAN/uCnvwHoFg0wfA9AOAfv8Dvy7gO1HB4WUfA60Id4Hh4H6G0/HY/IZHJZO4ZPLU9hwNnuNBuOEoN8Q1DuOI2U8H4eD8OHweRxPD8HyFj1WSh+Tv8OD4PB9Hg9D6OnzM/vBwzjTajGXt+0Rhwtx4Omain6PQv9hudPD4fjimVw8Pg/v8A2KyX4GAdAoF8B4Dv+Iv9/AAB3U8BgPoFweEAAAfAb0EAwbCAcBwDsQPA8NSy8FQWxzKwYyBwkCcZjkOQJDmeB4BnwvZgkO0o4D4HA/A4lTWh+TwPNiqphkO2qIoaDz7MM/0WPgf58H+wcJnwGB8BwHgcLQf59qIcZwHCcJgHwMA+NWp5BtAvZwmO7IHBwfg8K4PgAPCQJ/LGwgAAOYAAtC9rHvkustvueD9S5MUyHgfA4L8B0xMQ1j+L3B8+MhB0+xyAMjLwB5nkAEZ+L2cAHoieQAPisA+RGjhPgAJ6Q0HFyfocAAPkAH4QAecD2uXHCmAOY5+TKQA/AAT72nuoiyAGcAEH4AFWj4p5IL6vZ4Ae14ADwiiuVbLsvvKH4AEOcABn5ITHkAB661aH5gzasAAn/aVmzlOi7vlPAfVxPdAT7P8+nwCLnh+RiWmCcdExuH6Inmty3ECmgGUoQZjpCeJ4L2k1NkeQIPmAf4YB+eAPy/G8bj4kK+HwYK+EepKGnjHJ8AeeFJAIP7xqeTAP3lDbXrqfwMK4P4BPCQZ/AfZJBkPjlEgeyBgWs8BAOseCngBnC9Y4soYL8B8oMQxgAo5c1AXRpyHMer+o6rq0F6oy5/PDq+u69PmoacR+p6/suun5BaJ65s22bZsNzbGx0cbbuk+bmy586zuu96jt9Abip5A75wbJ8FBR8WxwnFQfv0+8Af5B8XyWKwVVXJcuy/Gz5x8W8xwZjwXh3PdHBvSdN0/UbbzXU9Z1vXdL1/Y9l1/V9n23b7r2vcd33mrd13vgeDBff+F4vjKf4mprc+Fy+Yy2NMjM7I+guOueky0sMf6jIHnq4fbXB/QbH0DHHGyLG9n5O5W15zHD95rIegfLHdEyB9+3qgD/gyPssd7Zjh5vdas99QBInxwAfMZB9DeHRvqKePAMBT2mwSfe88p71FnmSHm/g8I/iWOVJ4/4yQ44BNVgIn0Y4j4DlPHHAkx8C4LOegcP9eMHVytbMu9CDD4IAQcPhBUy7en/wshK1F/aDIVGQha+dBkQ3FQzHG02CZ8IePxguY5vUPWpnhH5EB7D4InQkavEdBcSTHxLgVE2BrURzxShvFV7UV3qwah9F2MkWIwQjiK06O5l4zPlhcY6GBk4nOEhmMMp44SOPmHmWwHhcCCI5IaP4GI/ABjAD+i0eADAjk0EYB8howA4yIdATQo4CGcAwkkM5nBjRGFNAOOMD5EwBqpYitsOcpCnkRXiMcGJPy5DmIaI8fLFRhg+DwOGGhDZVK/H4HFTZOR+DGAGb0Y4nCGiADHIhzssmND5I4I8GD3yODHTHHBwcM5lLbfelwYAOAHA8EAH8QDYxgM4HgBEfAARgB+W1JsIIwAPiIPOP+UTTAAD/i6AAeEqB/ggB/PcZABx8HtH4AQ74AxxADKWoIfgcGcCADk0x9iuCwiDBAHiigBxyUUAGPmhIAZkM9ACQ0EAH1Rz5HwW4580wBiIVQJCkIYqSFPpqACiJLBHggAwx0B4xJLzob5DM/I/gGA+B8AxdoHjUVaAYzgQZ7UIjwGAUuVQ/A+AJGGXgQEjB4AQH4DAmYfAYKSL2AYD9YRgFHkQksPA8RgjgDCwAYC8SDQ0HgCCuJ8AMA8AwHwCYAg4DDHGMMcAwx8BhHwBgPwGJkAcs6Q0AYDxhgDrGxVIw+IUiAX6IF0CRgQI8sYB8BgHxDAOB+I8AQARDjhEPYWQs6WosMI4B575QKuA8qwbAhzoBzkDHgUsmlaLcjHEGMN7o8zEkbB/WglMFTeMXsoOOb5TB4jxHCUo596YlnPK0TQfwfgcEpNheQcZA7zlaI0H6ZF/DGD/AfCkYZApGw0HATgR5nUKPmtSPi+BK7cifEdbk3xs7K3njW05VRNBHEXwmI41APzUs4UYVQgZSidXdD4R8gl5B/wbeyyUnloC9khEeOeyg50cETDw+a9JNilWqhYROcF3Q/gYH8BwP5th8S8KUlgPxp7+kqIbgEgohybE0HjgeFJBYU4MXjikmhLF/DGtyB8owzx4kLuDVNqpYQ/4dziD++JF8lNxIiTlG95yOVoyXi1jWMA8zAxneC5wxyIY7Sw91eM0HED5PfeQfmRSaI4yVkzBg+F4lNIrMgP5FyGgfwYI8eI58tDEHjl1CZJIWj8zEH8DgP1/ThzQOfNQwx85tb3DOgyW54gAtxnXWBHynugz1dEeFSM/AfHnjfQOPFOpYABjQn+edEDAoSToxWopnsby2XvSWycjaWyXtgvVhh/Xz06TzUBP3zEvy2lweIxA56qy/DReO4iaax1nmfG4gAAjH1zhnDQONgcGAYH/OpuNiDwPbqRW+8cVZL2aOfZ9H9ohw2nBVenDhnjz2xDQndFdGq3KUr+GhXNxY7NwT/bCv9TRRBzukf8yB/kV1Cc+ao8dsB/HwM7esKtV5gH4OCpG5MzThtyOfgHAtdO5aqyrg4AOE8LA41IYAJyHMAARxLPwP+K8X40yXjW1F6dZ4/yGKJTQB6NoaPgOZYuUj85WUTq7BywgD1NG3mYOeak85wP8H44xgAjE/zxLg+BHBj3tqxePRgf9IzpbnpfTeBwyai2Mn3NFE9W2IIMiOai1k2LjivZmzsXkTmhQqaEdvBDn9Bx8Z5Tx5k7H+QOZ5ax+BjOt3OAXLO7+zH+MHmI/uZ7rItkvd1lNSPk8UGfxvRIE77B+MZhvlMblT6dwSAvDCU8KB/iAHnDGxm2KpqnFHpc/+oxhCF7PriQm24++THiORwzPKSPzrUyoSaueqJ87ufIHgHGK2H4+O7++S5yE+II9IRuAcBO+i3w/8oUD+zIA+zMEe6WfG8ucwhmdAyUzkw+Ok1ilalI/Qz44mIK/Y/qxkNw0M1E4+x0IkSwx0/u/Q/0zU7m0oyO5ayYKTAIIm+OD+5s3aUZAWt6hqH4PZAi1a1fAtAw6WzU+28wacGGySB+A8IuJYOy/GN3BKxMKSYAumxXBU4sIkx4z6KaJUxqX8GG4/AJBmSyHwx+KSS/DMgCH4Hwz6D+vm/G1E01EAImDy3Sv4J4qwyqX8sqxOoUA+ysy88c7oumwgJAwnAyxuJzCnA6b6HgN0uMB8A+A8uNC6q+IckQHGGCLIKWDwrmD+us9QvOrQD4xUNYgqAeAeEfDcg2mUcQMUHgHAvOLIZiSQIkK4Sw4UB4B4NYJ+vQHCSOvSHhEHGQ5sB8PsYsEOGGGCECYBEZEc6HAkKZGOlBFuAeAcAe6Wsq6cbuckhmHwoST0qQEAA8niJ+aCW2LcHgAC6MY8oSz8UW9QHgDioWJmVyD8ZwU6EAAG4+n2oUWEn2HAAC3iwAHhHu7iPiP0XGbQA+n3HwWaVUnAXG5sWUIbIOVoTKW0H9EbFdG8HADGH4VuaAU8oiAeaC6XIm4EMTCoXMH8LcEAI4Z6kxHmPuns94XUHgA4DwB8Lcz8YC9QHxICZDAoPGD+PaBAD+GADm4+7kH+AEWGo0AOYAyUAeoqRu7iPEKeZ6WcmApWHIAeS+yKEDJCYMH+AhKsDiHgkqPaI4JCGDEefMHhJbJ2cCYOLeB+EAGe6WY44ER9Jybqcebaj6fKjSacPeMu1CjieOeGb2OXMcbZMghZMkXNMoMkSJMsKeF+6fMw14dC+Ub3M8hoMhNcMlNEf48EU2KeM3MxMybqySkGbNNckCXlMmiCQSfQGGcjNycyb4S4b5N+MePyjYa1OWfQUXOROSb2cNNaQYkCcTNCQUfYfQYDOqMtNVPFPKdNPJPNPScvPRPVPauFPdPgdlPZPjPoa/PnPrPwarPvPzP4XPP7P/HXQBQFPfQHQKbdQNQQbLP3QTQHQXQZQBQdQfP7QjQlPzQpQrPrQvQxPjQ1Q3PdQ7Q9PVRBRDPNRHRJPFRNRPORRTRVNTRaQZNmcGH8ddA8H+KmemMiimJLMijOMifmUBR0hGfIMmbQQWE/LkMefhRvNvMufojgPeHwBHSHRyL3SCdUaikRRu+ESGeohwO0MfS2kDNeMhR+QWSIglMgQmfsfpSKQVSOgWF/SUMcHfS2KIMehwMiPeHhSkMsaai9TrQUaiYrRucAg2jwH9S+MdTCiVR8QehLSsiVSmxeixTaMuE+lBSTTlNvTqf/TwMg0jTUMmI4viXLM4a8hmYRUIi0erUQjJTFTEH/TKQVUfTSMihLJcQehgj6GfU4MhU8Me0iKpT6zoi9VMa7VRRsKfUKh3VaMnVfUaQZVoQVVhVvUqMufhV3V7TvSfM/UlTvWJVKbohmGID+E+bQJQPCg2HePcvgB8/HTaa2H4fMXKHGG/R3Xs0gPdUnSKOWvge7XsGcL2e6fmuySHXkipSKe69nJcSIgESIbQbQhxYFUmL2Y1TbSLV4F+H/Y0Hi9mwYhwK5ViH+H7WCGPRmMcKm9nYCviB6oUalSuaczME+HAA8TKZwlEcioZI0AcYUL0JKPsHABHMSRwHGAOH8PGLcHyW1J+D8YUoMpwoGVwAGH8AML2kwkwZ6VYDDJYGOEwY6BmoMn8RyA4YQkwW2B+YUYYYYEwAOU8W2D4f0VZacE9acACGYAe7kHiLdHcD81gGAYCHAA+EwDOY0m0KOMaksHmaZaqLYrlZoeXQPZiB+E+EAAcAC4coMDioQDhZ2puoEJKAgAeGAAG4cxco2VuctHaLAnoB8puoNdEAeEQAAD4PGAEL3HgWUVaB8oDSkV4HAAmoMDwoTKMnmU+W2A+BEpwVEAfbYA+TeB4f0P5dcE9dcAComp2UCPjd0NQEADBMKAeEgDOfNcNOoRuACUcH5dsLYogGALbXEaiX8E+EMAOEet8xeHgToLM4UMKaQZwGeAHGuVoOcLeOKKYVMs4AwD+quAOZwEeEOrCECBmR4ri8mzQA+AdGoA8rUlqEDGwcikaDAJmLMB+q0J/FstIWmGOE+B/EbEOB+qyPYZwE9gYJKOaHgYqHgaMs8AwA8I0A5baN4EEHOteQmGGTOu0HgBwrpWIN2E+EPWMavA8AfUuhSHOrcKY+/UQvCdAQwwEvIyFFSSwbQHwv8B/CwN4uaE+RYHmbyWG8nUuwmyUN3FwwWOAxhhFE/hYJ+JAQuwXhXC2yXheB+MMbGE9jQGeMywKLKI3C0W+A/UuGeQ8ZwhaJEgTAINM86MYRVigd9fgEfUuHOYu4vGjf2Ja1EJKUYy8K0COhpAK9XjJC6ziA+9m1CJfjY3S87cmA/YCI8qUwGNGIO9SumzGtzNqxsIMHnj+RPkCNSEdlmH/kM9mGeJyhqHg9syUE8DCi6N41kEemE3whSegx0NNVGffAyhTfeacNnUuIIz1jvf1VaISfMQxlCHm/zkmVNFWYe6uA9lk9mRa1EIeJ0vmT2lBXiznL6Ie9Sz7mI9mPaNtBVRtCxSqKAw9mfmiJKJy1MOG5pBACGjsbHieHI3AQm8OMcuiSxnLmLifnSXMGOGHkMIITJmFKsIvniPkRcIe2xJRlaW2DwZw0sAAA82AJaW23cIe2ToIkkYa59oQ6HoUHmkUu6zHHqW0NtIQbHmULfoqD/YC9mNfmk9ES5msIrgSEcL7nMorZmPDko4AMcABDRXAEeork5P1fgHCNeIJrdjuABpsO0GdpwJM2YEAUe3BD1dmW0RwAw2nqI9mGAJYIIu0AABiabse3GH+Ecz+y+2YxeijqmNxHqU+XoPlqyNUPCEAKABxq7qLrAJKzU56J0l+J8EcWdnMVHZnpIGOAAfY4imfpUEeVHrqb7fgHjrzk+9oImD4AFr8B/YDNuNs9PEZlaHwZAoSfmAzZcI49mHgtxsifMxSLANfjHsxs0ILs4Hns9Vw6ueQKAHOL2KnhWyVaPtSH/q9mhqLV41SgukoJ7swkrnMEACPAWIbkpPA67t9rlwDuEadA8HhpijM/biyO0ITufnqlLnw/uOGYeJ8vhqKL5oDqiRuz6RO3GzixYy/qhqlvUa4ZjugalviyNq5vttbvykQIkH8lVtmWGffmdhVlDnAZwMwyeKbXBx7wWXNA8NDoCGeVi0FwllNXWJMy8H2HwCflahqPA0sH/b6F9qKG/EToEH9D0yNhWGMD+EZxNoSuyzEbRn7Y0F+JBhVTpmTomEYJ+xEznq/vwzUHGOXoG5aEdx2/C1kE/x/koEeViKpyHpUwnnRZhpdyVlDnc9VyeQwkRyksqMGte00me0qyTAr0G9mwDogxgSuyMNjl4D+E9xOILNwg3XjzatzJQISJGQmHHmUERzuNTtbxpz4x25pC0v30GEP0LiI3v0Ux5kz0H0dcjpcWewDkQY1FgWHuZkj0uNtGvFS0zFU0qxCN2wm9mYv3FDFiV1OAeGcA+E5CwkywGy8kQu1mGH/jmJ+QxgcsqHPj/3SOTE+JZz1rCvQRwOONSuTx4B/2JitnAOAsQx5FlVJmdpZ0egLJQEOWmHCkRIASwDgLOH8twGeZoRctLGc0YcQHAQORuBwB81iAd36KKwTFuMTiUYULMN9gv5Se+tKy+WaRyDwDgoU7uMMwAAerwRYSnhXguA4SCAOqzwnvu9mGCzUOIRyRAs4NyOQ/CPZ0KzBpfGu9urKrQYV44/CQJOP2b4kYOA+WvHukqDwAELuH/aqGenuRcW6AGIXlaHgAJdpViTfML5YoEnuEeHgB4UeqQPiEeZ0nmVwD+Way/ImRz70H47vtRbOpupzinhYEAXGLQWV6Ztb6eo6Il8ILBb6xoEeABwFDNkoVGkQVqImWr7D9MCPOwbNRqpwJqY4oUkqYjjQzMGfPCJMHwAmLksxumqwPaHycFaZqKPzKYL8LddsYsLgYqS859SkIL+CoUD8TO7vbMYRtUYYQ36MYrfoGOoju16aIkzVbyciXwa2EZnM9B0LaIQmYZ4uD5Kf+hEqM6nX7LQQj2IA/4FA4GP4JB4RCYVC4ZA3nDYhEYY4IlFYtF4xFkfD4jBozH4hHJBI3/FJJJ5RJGO44lHpTJ3vL4zJplNZtCWGg5bN5Az55EppP6FKXAD53Q6RL6DSaZF3hLobUKbU4tS6pV6xWaTVq1Xa9X4/XLBY7JZYHYrNabVV7Ra7db55bbhc7pI7ldbxeYhd5HLHO/5E/3xB0CHqlFpZDH8fIbPoQ/oNkIJUnzBMlAw/B3jCQMOLzfJA43vf5E/cHBMLh4av4HiYXi4RLmfjoPksvAsplpcv8zBM2/37BM7n6E43PpIG+dPAwPhovLtdCthuoGj9p1Nu/9zA9uP97A9/lYE/g9nrxoJBx4PyoOH+zHdbDX8f9rLutFu3Ce9moF4n+yDlro9CPvUgj2II9zVIY6D5Po6iBPuir8ti76BPCgZ+B/AK5wGjMCuTDYfveiEGMVBzuPs68SPWhb9t8/sMD/Da4Q6gZmB+fgBn+Rg/mGf5An+eJ8H8EZAHOQ55nOA6ShwcJ/pYfABsMxJgB+GJ/N6T5zB/HxAD4A7Bn4MbBSWT7tMgYg/kMPh4x8Y8cTAgQAmeozekefJPB+cYPB4ikqoMwYIn8DJ/o290+n+fgDg5HD6H8GMcnAH4AH+eAHM8Q66xqgRkTAAJ+AIP4AH9Sh8HwP0inIAZ51VJhAH+AEZSlSaBEAD4QDxJccnID9KAAHgDmAD5wBHSoBxyfADqKYgPAFNgAn+AM4EAgwA07ZLBAGfIDB+AM+EBUQPoNagIDwBMcnkA5/B5cFFAhYajHgCJ8ACcAfVjSwcH9aEBI+YBhngYB8DAPgMD4PEZHwPgRnOcAwnmcCWOUBA/Awgwfg8HwMPoAwHgEMEfHwMJwC4DA/AZcRDAeYcdHAQORHgYZwkOYBOBgPh4X0GERB+QzMkff+YsEMJ8BNjU+AcH2UIMAwPgEOOXjCeAT3WB2LgHlclnCQJ4CAfJ+Z2fg/BwfAYU0j5hnicZ4OUPg/B4PkNYUMZznCeB5nGzblB4Hw/szjAeD+gwPkeB+8n+fJ4HDsQfO8H5PmOZ8fbyee1HGlZhnwfmcc2yDIcJQuZHmyshVOHw/T5jHHMhwpznjIR4P+Hj9h+B5DmevxwyE5UM7Fsh+ROt9Nn+YZ8nGfB4n4PB/g4gR+HgHm6ePvG9SHwMzcAf5P9YD5jsHIXjn+D2/O8c5Hx913j7SY54mOeHN/ePHPO8Y9C/Yc5+SD5V19R2bDCPZ4Id7o8R5j8Di7Jvx2nukbH++x45yjIO+HiiMtTw3vGCeS8t5qiWcvSHw3gysEAeKFO0nx7SEHuvfSeYMzK4gPjjEeYltUDVCvtgmzgfzy0RONfqI9+7+Rxv7hGnyBMADIDHe6PmAsB11wJfoJ8zb7DBGVgi2OCaCizQWMGPCDIAAOK6DgDg4w54PjnhC9YABRmMA4AAb0B4HxgKUM28eNqWFJgDhiQIcJLIej4fYYuCcOh/ONJZD4eMcUnj+X1ENwJmQHs8iQMcfI4AARMkbAoY4gFoSSUqZV3rYx4AAiwWWLQ/AARBDgAADy0B/BzAxGOMsZ5FRphLGwzIwI3iAUoSxtcbVbL3AHC8wQAX6x9j+mwf0B4dp6UKM+Q8ux/A5kU+KEccJHvAiQneSklonPdAAtBO8pjKyid9JSUZZHhjPHwPABEqJfB/H8GMCEsEkyyjZGo8sbVYCPA/LpJ6T5QyrA/MCYQ4AIv1J8J+Pw85ATJTO32QszpEDnmjNGag/1ewAmw4SSclYEN/m9OBxICGvgCRw2Ocy/SMveePEGA5gkRBjAjPSMziEhnmlqZifgjyBS8M259xr2qfTFULH4fNDZBSEma+2ntFTPQmf+o2SI+S/0/ku/Q3skndpDpPGI85H3vPegK8s/6IgRiRppLJwJuIiVln4/V/QY0wvkZ+SyZzx5JDPHhJKpCZwfpcqWYmAs0qnwjA4H+IwP6pksqtN0Y9PIGjxjIPmK8EqvkZeK96DI/wMGSBOIGtNNomvYiI9sH9b3vpiMHYeFz5q7NqHw3l9z3qkPzA+j46zan8vJsI/15liJIW3iVAaj8mBHxRskPiykVQcRXpURh9c6x8B4beH4gwBxB2hhFE6IgH3gT8YAkG2KpzPQusfbhtTxobPec3De60mB/mztg/oPK6wfQmsPI8yAz7buvuJE2kFj4+PtuVZWUCGbnkXGCOMcLAmCA8whde7JxpYmCSGD6oLGAfOzH8ByN44UnTriDhAHAH3CQ+J0MMQI8XdDjHGMEY7ypkB8Bw4MR6S3IuvxCPC+oPAMVQw84PD44Z12Nha90Y7kxg4NVMHCrt0w+PBLW8MYCygASmD8ADLJ9BDjDHGOCMk9TBL6Umxcw0olYRvHgtAfgARxqkyyB9YIDxwMtACvRKI8ADjHVJMjLN11hD/GDM5eiiQA1HXvVCNK4zvZrlNkYf4B3ujgSXmuUI+I2VdlNlFDhHxiAfHgBwPyoVoKUfs2vMNNR+ZkVfCUH+rRARIWwP4A5LFoABEeIdSQ8FijwAwoofgDx8PdH+s6CYfwBkG10fQcMzlFH/APUdbsJgPpzIMMCjA/AEj+AHpCAT7jMj8XmAwPiwLvNjX28DTtKyCbqLxOchCFS9E/gsQMd+7i5j7goQofdGN5lI3qQIeaMy3bSIqPzfe/ylVgIGPB2JdXlbwQxjXhRQuAzDLwqQiypMpcVJlxfZ5eEdEWX5x4m7w+TcpLByjlXLSs8s5dzEpvMOZc14tzbnEWec875XzznxWuac/6ERfoPQ+jF76P0nenSumE16L03pvT+odK6l1Po/VerdD6x1nn/W+uc8691/nPYexc27J2XmXZ+0cu7V2vlXbe3cm7h3Hivc+6b/7t3cvXee9WX772Xvnf91+C6/4HwhbvDeHgr4rrnifGSk8f1bx3kee+U6j5by/mOqea835zq/nvP+g616L0fpOu+m9P6jsHqvV+s7H671/sOzey9n7TtPtvb+47Z7r3fvO3++9/8DuXwvh/E7r8b4/yO8fK+X8zvZWOJeaHf0sjMuNKD4VjpRFBtT6LYIOYHv+dSE/TIETr8BRF/ADzr9gP/4jxlSPmpVH33/FDg5KQQ2iOjo8LIyM8cJ1w8R1z7Yyw+hw7+jw58IhCyA7Sf4mydK5MAI/w97+IcY/whzxSILKQfwow3B4jk4j4Z6qgfgPIgSmoX4fwyQT4X4yodw+iTieAfwf4d6AsBA+ZHAwBRIo0GIgwScD7/qqgf0Egf6M0DYfDDAGQBzOgPwITKIv7YITAPAHgQZiD6LrjN5L4eAB7bAfgnwfBYAf4CcHwjEEKikISMyPxPYHgOAIgQ4QQHwIR5YlgeENoGADgCIeYYbeTu7N4OAIYcYB4Qg+gnwcZcQf4GcMQi7/yigPBSjOpvAZ4RwHJtSx5jIOZSoH5JIQ4BwDgPgeYZ8Bbvp44OIcYd6fggwn0T5EUS0Bz9MRcRoA4ecTwZwGZvAc4T4TwPwM5RMWCGAR7UUTxFTu6SiAg2YH41kVAZ5HD84lDKgA8MsEoYZyoZ4YwE8Wp8QPwJ78p4gR4T7UQOYb8YLuj+xJ8YpM0VBN4fkZYk6dMIcIMEqyQeIT4YxVY4wcADwPxJa8QAYT8bqVoQEDjv4lhHIT4ZyfShJN6JURAiwnx/EMwfB10eUegc4QEXEfKdYEcfgDkf0gEUIwQCcgkgx7RN6UMhQishkEcEofhvIR4YwGMWoD8XBMYZ8lQccjIPwOZMzwglgcMWRBwnyHwH8kkVj/sIclEoslYYwHEl8XCA5/Ac8msbsm4R4b8nRJ8YEYy+J+wg0dQkkdiMkM0mgR4R0pQ4wH8XEEivEqETgeZCMgMIcTyfgzMn6AEHEocMYkoAA8UYZy4RwHgOY4wT8e55YcD/4YcjMpQY8nMtxvCx4R0U6Bsukrgu0EAkpbQgT+xZMe0vw4wQEe4xgeD9UfcboAJiEPTukOT9ULUx4cEQsyQkEdk1kvQB5LB0wGY4wYYZwPwPwP7YIeEnEboY4eYeDjs04wQPIfAZ4k0LpwUurpwr5/LzEcLm4r06DyktopjyZRLzAY8jgrc57zE1gqk7M6ryM4c8T5z589Dv09Twc9hGk9zBM+DKc+U98+jxE+0+8/Dxc/QtM7M/ghE/0/4glANAUy9AqdFA4sdAlAtBdAVBs/9B8/lCM/VCc/FCs+1C8+lDM+VDc+FDs91D89lEM9VEc9FEr51E75lFL5VFb5FFr41F74lGL4VGb4FGr31G73lHL3VHb3FHr21H72lIL2VIb2FIr11I71lJL1VJb1FJr01J70lKL0VKb0FKrz1K7zlLLzVLc8FBLytL4rtLry1MbylMryNM7x9NLxlNb+tMIr1Nrw9OLwlObwVOr8NN9MVPLoFPbl9PorFO7vtQLvVQcYVP4tlQ889RLmdRdRlRs71R7gFSNSVSc6dSouNS76lTMu1Tbj9TtTlT8ZlUNT1Ub9FUtUVU8ddVNVVVcydVs19V9WFWImdWdWlWrolW9XFXIitQscVXdXlX4oFYIiNXruNYrt1Y7tdZLtFZbwFYbpFZ4idaNaVaYhVZrsVa7wtata1bYhNbLxtbtAFcIg9b7rNcryVcdAddIs4gIAAAC2tVR3ZWx2ZU1vbmtleXMgSW1hZ2VJTyBUSUZGIHdyaXRlciAzLjUAAAsBAAADAAAAAQajAAABAQADAAAAAQiYAAABAgADAAAAAQABAAABAwADAAAAAQAFAAABBgADAAAAAQABAAABEQAEAAAAAQAAAAwBEgADAAAAAQABAAABFQADAAAAAQABAAABFgADAAAAAQiYAAABFwAEAAAAAQAAtn8BMQACAAAAJgAAto8AARJhAAAAAIA/4FA4JBYNB4RCYVC4ZDYdD4hEYlE4pAnBFYxGY1G45HY9EIvH5FI5JJZNGJDJ5VK5ZLY3KZdMZlM5nMJpN5xOY5Np1PZ9P4TPKBQ6JOaFRaRSZXR6VTadHaZT6lU4jUapV6xB6tWa5WK3XbBT6/YbJSLHZbRP7PabZOLXbbhMbfcbpKrndbxI7veb5O77f7lgMFS8HhZLe8NiaDisZL8bj4riMhjMlk8TlcthcxmcFm85f89n75odFeNJpbpp9RcNVq7ZrddaNhsbJs9pYNtt65ud1Xt7md5v6nweFYuLkOJx6VyeVZublOfiuZ0aB0+pPut1512e1bu7ne/gO54Zl4/JLvN55Z6fVdvbpvfdfZ8ZJ8/pIvt949+f1fv6tL+P+jMAwEyMCrLAkDolBMFJBBrcQersGQihkJwohULQuhEMw0g0OQ6gkPxAi0RqlEURxPEEUw7FcNRbC8XwpGMIxnB8awbG8FRzA8dwLHsBR+/8gv7Ib9SK+8jvpJL4tCAEnSfJ4fxKjUmyhKEpSmlCOytK8sy0jkuSjL0DTBMIASxMcFy3M00TTB0yyeP5/nhM83TVOAAA+gZATbOyGyqAAAxDPs/IXQE9IGflCUKxc4IKf1F0YrU1gegxf0lP810jTFDS3QVOTujYAUrUCqy3TdSw3LdU1MjgDoGYFUVYf7Q0QYEnTlWdVI5OR8SfUldQ8j5ASeAdg0mjtfShY9hI7W8oVzZkSI4f0uVlUrQ2VKFEWlWlnS5blpUBYx/1Hbtp1FXCBHhYFxTXV6BH5eFuyrNt53dMFPoGQ9z29MF2n+Y9+ybaKBHPgaO30gZ74RVyDYZc7QyxOt+3QjliYViKPW1NFL3xi8n25a9MNCfkoXnkdJNDOlioFSF6I7YmWn/RWYTLQR+U+fGU0ZeqBEDdeeULJtyH+cKLaFP0m24eCBVjm1RIIfGnaTOzQ6LmiBT5qCNXCgeKWY0OC3jsFjtJX2i19qs3NJON17LYNszFWm4V00Nnzzp261nQFzH/Ym1zTKuaVJJ3AzHpaBVfk291ZJqB4FZXDy80N54PlnJyzWuFn/Z/Mym2eZc/Euz0RavDa4jNq3JxnG1Tlcnbf1GPo1Z85HBuewzXOuZddbEtj/ndy9zsyO2MfwP9b31Qc2f5P5Z2fdV4gRv9x4m4pEb/e+XTjSUF0/r7sjs6eDK3RxRb4P+h6PizKB+8fZ7GKsP+f6fqvX7/x/KPyW9/+ntf+eqAJ54BnkgKeGA534EndgWdqBp14HnUgidGCZz4KnNgucqDJx4NnFg6cKD5v4Qm9hGbqEpt4Tm0hSbGFZroWmrheaiGJpYZmiMQPxXI8FyCAa8QQeC93fkUeE8N8oA1fABH4noeCUnYt8IqPhPTjA/u4iMoGJKcwfuMfOjYio8E9D4AeP4P4gA/jzeFFeJcYIwROIoOBSo4EsM6SlGgH8Sx/RAe6RUcSlRgMuXhGeJQPyQtYeYRUYSrxgrxVJICLEiR/tAccRUY6r5IRXH/IyJckJHOvIqI9Yy5IoECkwD8QbTo2ETAe0CUCWJRrkhqU0xAH5Ex/lZHOQMrpTkReRH2WkolRx0XgOBsbJCKKQIuvCJcvgDzAItMNlRFFFTCmRLVmkgZgzOZ7NAP48A/y9kvLaLEwZckQclN6VrFpCkTZYB+UCiJRylc7OMh8654Shm/NWLE8JNxBIk0cfkqZFSinBJln88iHDjH+P598fqBT4iXH0f4gxwh3k4RMYdCQDzCIFKuh0gg/x3HuDeipEmBD/AHF4f4vxAD/H7KOL0/x7gnpGRFflJlfDAA+MAAgAVlMmpOD8fAAxwAfHvHibMqGfunjqoGnqgY7K4HuEemZEFENAGA99UdTaeJyGAuSbDSn9v8rCfusZUKyn+rOgOtKVK11qramSt5E5XmcrmcCuMba71yryqGvZDq6mWr+ZOwJyK+pvsKpmw9iLEqdsXYyxqjbHq7sisiyazbKkFsGY+zJjbNnQsvZiz9oLQkDs6dK0dpLTzos/aUy9qV/WntYYa2JmrXWzMHbY8FqbcHitrb23VvrYXAtHbs0FwrQ3EL7cg0dxrV3MsvcovN0D4W/upcG6tw7nWVukfK7Nk7tmpu7ZG75cbxmsvDY+8pbTEDxraYiHhCZ4UJb8QZJw8A+RbSVMW+ZBr4rVXmPMgl9b7tDmgACoxAr+gAaLQhr4AL7X4SZNABkhCCX9A4vq+LwxwM1rARNRWFCBrziTSvBBA6FSiq+TnBldIhSkITiID5KV54nkviknDrl5tqILJYg2HFiA8B8wApJe2dsCiyDgf8gqLzLjnIIf4z2c5JoSpUcI+A/0XKGyaHuISBq+YBjzHaWMf5BRNNCJQAx+AgB5RkAA/6MqVmiyZnMOsptGHwH7NpQ8vYvy7gbHeW2aZiABkDIRzsPKvHhEgAAEKhAAD8A8QCr2djgV8HwAI4GcJ6HCPBX2NiZD4AxgfN2fZCYcx7oIPGZDjYeUEOAIA+MLiDHADDIIAVjaTV8DjS7QKfDBHgPACGECNZ3wPjROYf5IZdUjENeGYDl36aNpQDwBxBjAHxkEA9F9JjwDhkEYciafDD1/gMoE3NirAm5Pue5B8rOKXjoAol7kpaUyDtUfgHgHAPkTlYi+18qM0wVuLDe8CaNNEfGIeAcYxDBEPQrdu6WAs0eCDwBwHg/jPyeP4DwOA+KV0QHwD97CnGIqG53f29t8b6xqRceGj5H8A2yPDgeet5gHHgEAPFOQA0KpxscYa1VaAPHwHgDwEAPZoAAP4DAOOc6jvsB8AGnjtxPAeyYfHIBDjBH4BwDygdjx90cB8fCgnhDDHC7jghM411dToCB94AIkrmIuIPTukduA+AAAynmjgIBwDgD/E48NUvc6nNAB7uOn9Z62Bzr03NrdhqvPeiSt+06fWMMAQI8AcACAOMAMAfAPAGVe7jmwMAf62HgGBeQAAgq+B4AAOAYL5YOHwDwBnlfCkSH7HdYg8IkiHED4vBScx/jBdwH7qHZEpeT8IS7KIg/jA8AeAfzPoAP+jTztIA+vggz/ADr4OAPAAjwCD7PTmhORkUlmIDXyitqawA4ohpogdZqKABtpKQhxw7qJ/1H4ocDkD6jq7+JfjtAcLe4B7TYQaJIAbfbNYeCUp07SgHAH7QooYvZo4A7rLTkCoQIfAHAHhRDubcRRQB6i6bgf4Q7b4pBp7+kAMDwPkAhujfTrcBKH8BiRK+wB6H6+TS8D4P8Cw6qvAA7fTSkDsD7IIb5bzbLDYHwD6UsFEJgfooqLzsbegB7ugPD16RBeQYbrYD7s7NAH77ZcrkAcBYyK4cDbi/bQxBbNb6kK4AAcDVLajOzakK5Y0KIYcObYTYbnUK8LIHDPyRIAcLz+MMJnb7ZXznMM6fDTYOENgoovZ3DSCoTfxJzwb7Dt4fAA7P0PR3DqQlwfaf5QLKrIKIwPEQSRCiMBYDAH74yI0Mb4wAAPCQUNDTUNcIItQihOgD4QAAbKrR5JxnbPztB3D1bRBOTmL/woAQahQAL6DO76bwQHBXKRIQcVsV4cAMJncEzWbSYMKfD8DMoiZtQYAYLXzkCnabgQCPjqIeAGBeQQCUoi6hQQ5Xwojn4B8LEdKdkakayiIYLWCWQcEU0LDcRnYcKfxKUcbVYiToDkQgb0RpBqTKbFYggfzEAnIYIfwBydjcTkCoTB5OSVUgUEL6ENTR4QcPabkPae8PbO8hwjaO4toYAPDfMQq+wDwAIYEnRV8BbzsHUBslID4Q7WUlrfpKUmEEL9IjrYwtDyjA0kDrrBwPhcxYjzoAEq0BrX75AQ5W8lpprsrSj5o70p0XQrJ3AH8qUnSpzPBSpYjw7pLAz8ErzxDK6bksZk0PqsUs4tp8gQIQ4Q7egBCoRcqJhM7RJcsLD1MEseAi4Q6bkFDrJasUMsxaktArDKMi4fwBhUiiBRJV5p4gpqYhZjMNojbZwuif6zg4s1S281z3C6Y38163M2k2S7g4SIa0y664686xq9I1836xc4JAE4axM4o2U46w85JBE5aws5o2s56vo0Jg5/YvaMQgqMk66aBPpjwg8JQgQd58QiYfoT4gs74g08If88agwhofbjAgir89c9qSIiYeZg4falIf5OTjAfwgSMof8+k8QgZS884opSE/4g9A4grLC6Iigc4e8hQfwJ7sYfwCYfgHAOIBgH4BhapiYD4YYQ4R4Q4YwJ4fgEopB5FBQg1BjCq5IjAfobgfLAAP4fgPIf4WYXgMYT6MVAYfYHYd4e4eIe4fAfNFShIhFFwgc/S5YiofoH4edI4P4fofgf4U4fgec84P9AYfQP9IQeYfYfcKdBAiVJtB4itKwe8KYP4fKMQT4foe9LdAdI9IQe4fYedMgolFgh9M82c8of4c9K1Ns/gX9OIT4cNLggc8c/VO1PApNPk79Bk8Zg8/NGFJ5uk/k/QP9Q1OQB1RVAlRoe9O5iFMoi9Ez4wPgE6K4WYP4BIf4TAP9P03IikKc9tANTlOITwP9UE9gf9UVO861Ms/4eNCIfIfDAAb4fwPwPwHgf4P1WVS9Wof5iFNqMVTtXdXtRk9lUdR9JE/4edO4fIfdI4b9KwP1ZdXlWa8FTDAE/lN1XIe4Tk/lH9blO9d1Ydale4edK1cwf4PrJJKVda8lGIf4fNNgflK1bCHE+phk/Qe9CNclK1fNUYfoeYec/4b8KcKYX5V9ga8wyKZYONCwGJkweAOYR7nIc5UgeYY4YAB4eYQIYMA4ONb9fQftCNjNI8c1X9aQirYymogcKBXKIc3czImk/8/9UYfgc4c9jM/VM9aNJw1FpNfQfwZ9flAVjdAtj69Q11qtUdq9rNjRrM/4X9rs4Q1dsBhgL4fNcqMQHwP4X1TltE41tRzr7Zo55IMcfDJADgPwTwH9us5Vu4g5YBfkelntqY0tPgh1wc51woiFx86VtU3DEtxc303q5tzS586YhZo7Lhhsh6YdI5zggk+r7YcEDQcCoaKBOkqwPCJKkpFU7og10ogVUtAggVs8/gcdMYgQR4eYb9L9a6kR9E8tJdgwgl3NX13dTYcN34f4V94V4gH4X95hGAik+F215Ygs+tnoP4ct6IM96gd4fwX9650gik/DJIeIfAcwOIfNSlagf7AFI5rahNTYc18d8ofwb99Iok9IgiHAiFoIq4vdCIfAYdjAeIGAfISYf4SN+gRgH4dlL4gYbwPc/l/dMgO9/oZ+AAmll4g+AU+U7RXNWYfwGY3YjFPFNweYRwPIPgfwPl+gX96wX8+obuDQP4c4PcKYf1ZM9gfwd+EImdxohpsYd9A+FN2+A4jFcVOIbwPFZc/hhlTlAWHSimHuH6hOIV8wc+IwmWJAhmJU+NWYftPWJ4irAAfmKVHFgF+hKVLk+ob+LYc4XeIFcuIYeeMQmOMgheJU62NGNQqgyTKwfwfYe4b1HAHqlJhhfhUc+obYW8/gc4TdjePd82Pt7Al0/4ZlEJo4X5mrjF+YgVHEos8VjCKZSpjwfNiY3wjAY+RORdHAHeR5mgP13Qf+SmSwbb3bKMJV8wfOP2Twf4dAR4d4fdCilIfwCAf+FYDBXIMVTk9uZQcIT8/QfmM+WGNeNgf4O+RYPAf4HOXAf9uN5tX+K4c4db3ZrOYQfwfuYols/4cdIVcjAGUYPmNNZ5XIJ4H4b+a0/QZ9h4e53APebuQwjGZWcGcQf+RwH4e9sxOVhudeduRNjOIYfmeYlk/9CNUYe9I9Q2eIf2kpXOgAb88OhmhlO4gUUeFgiuhgV+cWGmK1sxKVhoa+S1POjFRV8xSGTuelblcNNalM/WIBaOnGlVRtRtJmhI4YjAc8/WmeRgPgPwPgP+iVTgT4Z4e89ur2ned1H2IeoAnE/+e9QNjk/TTpYAH4d+Is8UUYB8/TTGl2p8cgigdBSob4YIZwEJoDRQHylanBdk8Qd91QYIfZ3EnoHzRTnMtcvghus9O+t9jjAFJs0s/k8Zj2ZWe2gZugPuu8mQis9YuGycKYc9jlI90s7LJNIRXNIQZ+hlYVKumAjO0otu04b+ooX+1alk/ggQH7jG2Ae+2U/W2mzObwjGEotOydNWkUKdHFNm4Ib4fVRZ52lhROJ2hQjW5gtGyc/9R+UYfmGuGpXIL4D+608QZ4b4R+NNPJeMiO5SNtywq9l8lZSBeROgfznYAAPpnQMFKhPaOocYfiIxfTUSWA8+QtkA8PBlrw8lyZCQ8/CW2w780/CC63DS7FzgiDjBLB5DiQh9fDAgic7LNpLCK+Aig6YjD2ObKR6lAF3YhlJuQB0FWtLYgs8JXO7wgnGvEtMwZ+JHHfGYhfH7DoiU/FPk88JQZ/Hl3dI8/+WAf2DTwAgU+IqfFc2wiNCKHRnYfimUJQCZORkwP8jwQwP4DNKKRYTYfAY7RNC4q7dog++XDPE13YfC9lZQPPJ4dmzYY/JogQeYdd+vKVHAn+oMjHJ4ggQpWEy20aXXKBqdjXFZS4dIgWrmt89oc+y8/+2vRAhm1uAVPVKFtKYt51fr3fJ86wT+gNYWqVX+8O1onvRLE3RYgfUnHstIjFBQX9jFrW4E/c8YfHVuw9Rc/QBe8PYPWl3E7MJVBgb2Hs/dfFPQffXWWPU6lJOlAVTfGc8YQ4D+gKe2glZE/58gn8doYYYYeIR4cwF4fkcIgQCQPwDPMoQnXFJl5JCFn93ZiAb/bvYSlPYt5vf9O9pQodhwXNQIeFNd0ofgPFG0/YDzPAA/au3AsIyXXtJtjXUVXwH/gZiGZWqXZQoBhlL1alMN6OV6j8/YP5LFPQe/i/ffbN3lrXjs8YP/Yu3ZgwfeZU/+7YnRhl0tbt29eFs0/lA/mF79ynmgfljNhJXOYSlPcId+rogQcdcIcaMVQfUE/QfwC9O7DleAYWZtUFPQc/pfjAjCHXOAOBXwfwGJRAd7qAH4TgA8DQac+IeIZAealYHAfzJHrof9YulpcljFZuZoADrviq0nBPbAjRea+JpogSTaSyJKUJfnxwmNh1e0/V0tjGGeZoglPQd9YVyA1HzmrultMlN30IX/WcKbK3q1u31Fnu3dUn1gfwPNs32CS9AXEnpg0uy89gY9e9MlhIPvSbJM8P2PbnUw0ofNlwB4c774cAejkQfwGQDyJYZwH4RxRCHi9jzv540TDiZLdwgXzJoy14gXOtwg9v4H94mSJovP032g9X+3+VzPDf/Y4TyggD/gUDgkFg0HhEJhULhkNf7gh0RiUTikVi0XjEZjUDiEVfx/f4/f6/f7fgbvjcplUrlktiMdij5T78b7+Z7/M8DWb/e8DfcuoFBoVDhMwib3Z75d79c7+ecDPL+C8DBdEq1XrEao0Sc7nfbnf9Pn8Cfk8n1ZtFptUIrcRd73fbzfb7fr9gT+f1mgVjtd9v1WtsRuFyul2f94vT/vl/xmNlWBh1wub9fOGvJ/nt7x0eD8IfmdzdoyENe9we79ukCu2Ys+hiV5zMGk2urGjhlvfen1L/1exxe0hewhGz4FE20Lb+m1D9fAAPr/zD+AAf3/FhHCg/E61B48KX/KunN5/R6fV7cF7Gy89C7sJzFxymGsuszXr67/YAPYLwYwHf5PpK+ygPag7YLmfLKruj7fQFAqzKWegxnwMcAwalkCIMn7JH3BLDvS80BOE055nzEkKwsx6LJ63J8n2fh8ruPzExA+zhLkeZ+tPE8UI3DCCpRFh5n4sZ/D5GceIG4S4yEe68u1JCMR8gjknwZ5+Dufw5oGHMjyg4R8tKfxwABHcoIvKSBmAA5+B+fwPn8B6BkO6SQH+6cvLMpkmpRMszIrNE/I8sx+HfJiBSfQKJ0BRLXrMm1DT7RiHUXSSGuEfwvnqeJ9ydSqKUpTyFLzFs3HsA5+rLRFQoVUFVwcfgBpCnp/VjVVXIPVtboIvKBh+2NI10gtc2Cw6CV9KdiVZZKMzcu76MO0FloNYdpQXaSGWpZZgB+QFroXbNvXCtlxXIl1wXLctz3RcVWpukVoH+fk6pC695oFAF1wtRaPztel4s7eS73eg1+IJfF8wFRc2Ohf0K3mkkCzqw2D4Q9dFn7AF7YcgeIIUw2KwbRZ9mfXlkYYgWOoTj+QPtRZ5qbg0AmfOpzl+n8YLKf8+JHlab1+4GUoJgKN5y4tFnOe54AGfAfn4E8AgnOpEl+ch/nMD54huYZDngP5DCOcABs+fwJnwZLz6Cgeho0emNNDUFeF+fB4pKfw8zqHxfk+H5vk+c5jn2HboHLHFDn4qIeMc/KD7SgeNPzn6BHgAG3IFmbrbhlB8nwktUJAj4fh+T5fm+b5zn0kA/nHDpv1Qf0ZMbkqI409KCHub/KpLxrHcykcXc7fiPl+P/SpKe8YOgc5+MMb589f2S/eghvaMT23cOHijXd6X55yc1dil8P5nmDAKxj/5XmRgP3pLXXl54Gm6brAgQ8vRRyQLH286p7+aS+y28jDcXJklJ+SAsovgAAPHgD8kj5n0KHJ++sxxeRGCfECMAfgT2mD+Bk2Qf4GE6hiD8A8Q5dx/iAAePgT4iBwkCduHgPgERkD/ESQMT6tjGvbMyN+ApvB/i+htA0gTqnlqHKfBJ2I/xxjvHehwp4vx+RRfqH5OoT4GJ8RsOccYHSButH+D0HwfwfB8gS6RzEAWUFjda8EkadRnkmfMOMuqh3nRIMYXlpBpkYC/H6P51zBXRDfiwWYc7LxOxdLyDyMQf13xmaNGh3yh4+ufD+L8kQvx3xwiG6t9Lz4Js6NKaUuwv3nR9ior10pszhRNHeL2RAfkjRAYhI44D2x+JOii590MN4bm4iG4RXg34omIiSbiURI0YIwD+nEgQPxnjvXwcIpMrYuj/D8WUXh/2UQ4MYqBpQ8AAj+Dgc0fwMTQAcEeMcD4pwPjzEeflsI5gHzinIDEfAPGBvRlAPuJkoyfnND+MBx0TGIHCGAAGaihx/g8LsLwAAHIyzcL+tk/xAhBkDfIRAfAf6NkEfnRUf8JTNl5LeUopZIyxLOiGW9d5wiTUJQCL0ssrmOUSL8uoib/Ti0kNOcmUcyXHTNOSnWVbOqaIBEzAcgktDaU4K4eeng/afTIH+jB75ISa1EkJUaao215SnUO7uHJf6dHAp4XkuZI5bpGSMnUD4nx9JJMUWGflRyTDLH7GGBlYYzl+fIds/Igxhj+aaOOcA/gcWID7IpeIRmmppAePkcY+BxiHE5AZWI5x+iBH+I5d453c1jL8t1CycE5D/ovX8f48IfNCA+Z8g9Jx/2qeRI9lhK2Vm3tyxa25K0aHZt2eepzCLfmyuCdu4a+QAkRH5CmfNvLekpuWQ4fABx4LRZbdG7SuLt3dI5d671OCbkEKeRGHA97Q3gJbeIgt5SDWcPUQi9Fx71IXIsMC7BBbxkjLCgW59CnK3oGLfU4yKr9neIS+yoJBr54EKGose9ZSEVivoycguDcHHcRVe7BBCMK4BD/hXDJGcIIwaa4IZw/A8QjHwMBOlF0YAYXeOA/wPx4QtX+c0f44R8DOXfhg+2Ik/YQH6PAY4+Ahj8IgHcH47x/CMkrXMYwHxrmgLtkYfwzJqh/H8BgfgcTZwoyEStyJBrQiFTTelHmRJColHyWUJ7OnQRCF+M/Oxqh/yFH8wck2XEihzNmPbEJWcykFtCx8ft/8hoqH7E1m5eSRZOkrnSTKfC7DnecvPPsiXbkC0FmMlWhSCaHIGP2sSSMiFLLK+bOQf3R1zybILPGmHZZ9IELzTo/9P6EIZqQvep816MoOP8B95SQZOd1XMP+lc8DPj6sgP4/AAA81yIAD+oCUwoHGMOjY5wH2wH+PgAI/AfAYoFazPBe3/6LIrT0gTSIh5y2TAXZkPtna1OgXYfmuRzWtKuisXI8R3j0eUGFxxeRfZQAYH4ACpyfU2X1oxPlaWGaS1eT+TGsofWa3xlxeO/N/FWJ66gng80cW1dSL6BkjN0k84gyFFRSieGo3jxbSnGtLqc2gXnfZs9+7YI2T15BpeKbxD+H4kWruWj3Z2ozCBuUcxzD/scf3KohKH0sTznWWzoa459yEoiKyBB3Lg0VhgvoqO4Nmx/pinsIFeRKPEfx885UCjeWOXueCv74dGL/anX+gEa7EPwA5cB/0gJAL4j43gAAe4ax8cEzFJKLvx4Qeh/ml7jAA7C6507WAHouQ9NYH2wkCoAtsYAeA4jHIFv1xe/65jx6JEqoI3C8jeIIx8d+Et2EtwURkdxaexFv8Nygf43fFrz91qLVBQPfoo+H0RHW8XnA/G98rcLxPmbBJd89C3YnTGDY+SAdeewfvEzw5yHnbvnKSTAT8c/hS4t0Xu6of1Am9fpgJ+wl111GPCD8h4AYhghwPLs0kmiBFtiBIUG6ADqBFKrklvQECDsOQIMRiEwJsGFXQIlrtkCDveFEwOFpQPCDOmwLQLiDFYB8PJFkwRQULoQUBgE7lyQXQXrkQbF8l2l/Fmm1wcQbiPCQEyF3rYQewfLbCKGFiQGBjZk6tgLiwjMNCKmMMLKFGTwnQoC1GRGSL4wmiFwnwsL1iLGXnZEACTHLr+KqOeDWnLB/sDo7tFCDQWFlmjmkmlmmmniTGol4nKAHAPhDA/gMgfh5pmE1BwB4AhgJoPKRr8iEQ5QWpIG5G6Cam7mUB/h2CUBfhjwzDNHuh9A8nDjQvvOKlwntnNnOmhiSB0l7pnomDNHnM3lURQtemdRSRIHfo1wun5pen+ifi6nnA9mCokiFtjxaiLm4nunOwrRaIVJBCYCfkXifg9wqC/i8iPkcmGiURiFvHtoBv1wuiUBDgPpBB8DQCfh/K0g4HKKRsdhPgIBhoMhxhwh4hJsdQTRHxjGUIdoer+RMO8uJjDg5kYA7jXFeBnhjAMkSi6RQRtFrnto1JJxKiUJAiUDMixg7nnQvisleB3hngenjyFHvx7FiSHJJGCiSRspejkj6taLapPmdBvg+jdC6yQxiiLG4pbiSpcqFRMRxSODMjDBXh9M3xpp9B+AXyYSZOpRaRtiMJvpwpxgAJyjQB3gPgAAfhOADgDhDhprxh+G6Bnh9gDrERZBxo3g+iljCyaSmCUqQPQrVqMG1DQLTP1vDxZJnyYS0SZxRy1jGS6SCQ2pMyYjlylSRFgwaiFmbKzD/zAyky1SGjHTEjaEnBHyYDJjKi7NIyai/LrhwM1C1KSABhDgOB+BxslB4A8A/rLzCldTDiivSTPC0i8mui7p5vJBhylzHrvRRCBTVlbzWiCAAEyKoCHTewNrwFmiGRHSRwwTNTmR7znQ5zoFpTfzpQszqzniUuXzrjar7gBhhxDAHkxh+AvztoACKhvg9ivB+lNlOzyneCLT0B5jTkhz2z3Juz4A9kwI+udTtFAtgCLwQCHUAwwzzz8ilxzz6liHdqO0ACMBJzuUCh+N7p+FgEzPXiCqxGNBAKQCFCQPeQHkHEt0ICKsuAvz9iUT+kaiKGNNcHpiEwiq5CBkjCrlKMuAflUQV0Kk8CJ0WPtt4sPHcnZKwMCiLQlB+AA0KUUj1zdtDCC0WxZiEB9w4UhTYEeiLnUi7UklQoKAPoLiQmmB/hnB/zbqQA4tiKRNcNttutvk3jOhnoyS6nugYE60pAMhjiPhjhhhxrrLpjoUqitUrt8q50dEkC8h1hHomh8TbooAYB+AYh/gPE6rHJMNbi4BZuBOCHDh+RgA4OpizBGAfhXF3h9hfhHAZh4hDi4B9BxzZAAAfSiEB1AtBz+Uts8mkEWJkHliyqwSJiyB/uSB7uTEEDeJb1PCetXBPl8B7hPhHT0z5EOHnK5U/sSVZUs0ElGKeJQqfo/HXljJU1fOhjJDVD5iQVjhvpHHbhPCvjcjJmSjL0aVqjFVrlEqeT5LNKqDVmBpnJoVfSLOyiBBujDVyiP1zjZikV1B3v3i6V3VYFzV41aFPKeCfqTnuNwnKLVNlh3mINVl4v5S6gAnkWBndDZvI11NnDJNnnJ1X1pkoiLB4IUrrTwgANpVajcJ+qUVBnOKVh7whvYvZzSg+tWQEWCiT11HW2UWGWWEzzjpQC7EdJSKqqVCQi3k62OPiGbjCt41zJaB72EEc120Y14TdWmqp2oKrGTm+GC2rPpTB2tDoWiHjWEV2WFkknYDAWmC32OGeB/jnq2qhK41+iS1bkcHji8nhizBf3EjZhvh511DCC5NnkY0Rrtqzl4onu5n1g+Ghgvgf2dF4q5v4jBkSHyIqHRiehnoGDiXGkONMBhjUvziBAM3JrtLArBk2gPoBrlgAB+rph8A4N/BwAPwAwBwCk1gGA/rOBgAfSrBwk4E4h/ADhAgHAcDPh+AMRDITrT2xToPAvuXuTlzpSMkzTqFl3xEoXyFkzhTWT7TjX2FQ30X3VY34wTiLnyQhmAWlX51AQgB/gImGwmRKiLz/39LviKwkiC4AQ0CLYB4CCHiLQpmNYE4GMJ4GiGwtHpYJCMYJ39GXBzmlUwCTQzHPnKAHhfoWsZGimmBmEKBvh4h4VFNpX8zFP+CKGkG4h/Ccw8wuh3Q/A/hop2pmB4BnhxAQh/gTo+h/Ia4NigUfCFGdiYXgwQxIVhxJwuh0GURNQ2iBiugGEX4kHE4liXYmiEmdt+i90BmExbFUyIL+McBPxWGdumBfEXYkEZYwiW4xiEGdmSi44pR8HfHvRlRdJBBxnbB3451uDEY7iWY8rY2wkOY/SbGURuqryTtwyeiYXoZDkm4kXDDGNtB701ptCBAHh4hOCwh+A+AfU5wZ4+OzXzxIR9RlRwO8l8PZZN5E5PC/kVhlhwhHh6ATh+ODF4gZh8BkA/oZpKhRMmoTyxCfUmQf5JWcRkxviRx/N345Hu5O4FC+kViRYPDTranNhxgfowpKhH1KAAAB4+ZoK+5pTIxcSIiQyUjMmkZDlOZt5FsyPsuSjJtSilHh6Au8oTC952wjlBKqHgZqtXJBSVQ245Y6C8ZdC/OhIXC4XPDeClIgDoXFY95n5I6EIoEnB/UGHSyeJBBnjMh3hj5DjKaSDEVz5dq53oOymBhg6NCStJuNFi6C6QFPgBrDgAKAABjQBwPHAfmtvQBoDQB8A4hBhHAfZ13pp8QFaKWfPDZCl4geilJXHcaOk7Z16PlA34D7PovijVDKB3ps6vHSaPC93uqmrwazOoa0acBP6daPE2DdvertvwW2a36NJK68K5a9OnLwV2GdBjvxDVEEWNAfAfurDkjMueA/4+6xrwLq3hwCQDDDgOAYBxhHlu6ngHoFJmMaHOPI6fTy6MbLi1hAX1FpSW7Wi1Tglwv6bDX3QK6+YKvm7eXxwbExw4TsMRwZRGTp7fbfzs4tbkW70isLCcgb7mbmiKBwg/h4qiHuxpbpMH1ZZn7Zbt0CUSGTi6u57wD2bujeI+5X7zCV0amTjKpib2bw0VmTjC6Db5FJ70b7b8b50eb67yb+L7bnU6cAcAkU8BiBTL778DCi70cFcGCU73E68H8IX97xCQIoJS8K8Lb6CQLX58cN1qcBk4Esh/AZ8Q2Wk/rrFYzaticUWl8XrRcYqJ8Zz78aqb8b8ccci16yca8e8Z8f8Y8g8X8h8Uci8Q8j8N8k8Krh5G8iSbM1UOcdlpiLGFsE7o8plxwpN1nbcs8tCKGRiF8ncjQxBzhDooCRLLr98x8kCLGkBzBjssBi8TCB82clCMBxifi5TL868vLuCLo5DFDUmS87cmc8NVi6DF9C8IFQHlB/gBjTqAIXc/cqUAB+B8OoFf9F8GFQTSvxdFdKFhc8WvkcdQdQ4CiLBwilVoGV9N8DFQByBxh7zShyu6Hq9QlQJ5B8AMB/gGjpC7x1dT4HUGtS9hcviKqda4cfCMdk9jc/83CCbv9nFQLSDDlY9ndRdsX5dtb+9ucI9vdu9wcRdxdv9ydy9zcOd0cYCM7hdjF9iQX7CyX8CI6s8yCPCRDOwl0fz/kic28txp4MwvaCdDCKh9iawuYAiFd+87iKh5wSJqxleBaeeCCKDS4PiRQyvD4Rg/w+4TsbE+B8Adk2Jwbl9XiLC3m4h8ODCagYwuh2BnoSsqyyiBB4hFh8QEVHdGYpv1F4wuxVCRs7RWuS15Kq7175SHY1yTM8l7pBGdiveiOt+TY/mbHvelE+Je96omh/hdq0WG7wRuEyYMiUXgxxl3ywFaVW4ZX4odIIZZqQ5aoIdMHa8AySZqZ5G9NZE+Imi4eu+depyH+rZ56G6VV2e+9ORINnphfA6GJMjMhx+HRkKr+6RIbyiagco3NZBOSes7h/h4h1h5qLgfC8e1X3FQGw+SAAqABBjQB4ajhghD6lDQXrA8deFu0+/J91dx/ciLcl+/fd/eff91/glFC+7a8gfhk//kFP/lfifmCX/nfn/oYLfpfpiKqOjOk4qRUxjE968vFFh4hHh8B3h+Az86XYy6iBBifSej83NMLNEhC+A+B/csD/928hc3DTZ/EFDE3E9KFKCAPd9vN9vt+v1/v9/P5/veEwlfw+JROKRWLReMRmNRuOR2PR+QRNwSGEwKCv18wh/vx/n+HQ+IySZTOaTWbTeMyOSPeBPd+waEwiXRKYzijUekUmbzqQu+ez+VUKXwk/0qrVesVmH0yQN+nwZ8AA+y17v4AB+GVq1Wu2SGuR9P19+2Gxy6zB+WW29Xu+VuZD+5UGyQ9833DYes2+Ov6XSeUwmGUOEvPEZXLTjFRx8Q2CvnHwrGVNj5fSaWQZmN5R7wTPYXQZJ/o/TbPaRfURpzv99vd8vN+a5/HyGw8P7XjcfbxlnwJjv15v5xw88cOFD+i8fsZfkxhgAd+WgHv4Dw9B2aq2Yf9n1Zbt+v3e/bfD5fOc/T7feE+38fvZ/r+P+9kAQE5EBwK/sDQQ7UEwWw7/QZB6jwdCEJprCUKQut0MQ0pMLQ3DyMQ6iZnn+9KFOKfiqoTEqKMYiZPw+98QoUqoARIhLvpXFJ/RWicWolF8YPXGR+PSqsVm+qiYItHyVSBILsxkfsXxSh8kJbJSMpVJ8hJIfZnrSiUrSo66LS1LcoJIeZzzAf8XyQZ8UnOX59n+wq8nemEzRGqcztLGRznueABnwH5+BOf8kAnFJEl+ch/nMD54huYZDngP5DCOcABu+fwJnwZM+wOmS0l+fB40Qfw8xSHxfk+H5vk+c5jn2HaWnKeaVG+fg8n8HlQtNGTIIgfLNm+fsURmH4fk+X5vm+c59KqP5xs/Yx+H8P1fz8mdSH2lljRaxhfj/Z1EN4qhzn5XJ82xNltMRYKFIg59EKFeRfD+Z5g0Q3V0XUhJvsKP13Xew141IeEam/OiqpYXwAAeeDrX6lt01zOmB4LANRogl+FyuhBfIfZmKWnf9EMpjONXhbiITpetwn/caEmfJGX2mg+AXZlWV4NluZZfcDzj+X70l+d+bKpal13bnsG5+X9r0QflkR3ZZnk+b6nZvW6011a+Cactl40EeAAn8OCwn8GLin+DhHmOD5Tg+eZHmAB9NHM8O0gBtZ8B5HmxbGnDxoSQaH32kZ8D/xaJNyf/Cn+Q/BZ8w3H8pja+8vzGWctznMr5ffP86vpAdH0nTw3ePUvx1fWPt13Xvn2PZPh2navd2/cPV3Xdux3uaImyiOysi0qd806kxGiXhopw6KeKih7j+eHkQykJgcCiXl5kf/mokfwQorJHjof6Zx+16z6pCe/uIxMnwIx8qS3zJ31I3GR782i/4IevLxiKvTG++5+76yQGrI2/0wRF35kNKqniAr+CdmFPSrUZw/GBgPHwc8AAf3DkIAwiUcB4wfjwZed8sJDR8DORS9Mf8EIIwGI+T4fAzx+BDH8CAf6qh3j+GY91Og8g/jRbaQgfATx/DhSqlcTI/AyExEAA9fkMYZEeJ8Tw5zVB/hnheYxoi/WsDHSsQgeKazZMAJaQsfgcUrD2Kq9GKhFX8kIIKQYhkUoetEIinQX4zxnwQIQQIfzbV+B/IYHwe8bSqwKjifknY/U8GbZeemPLJI+NIkBC9GcS0Uj8kSQmNyV5GkWjmOAAKRGbxdURHslsmDBDvH6mx8g/AAA8k+P8ey8pRykkelZQCSYeyrX6H+Vw/yEDPlimElpCJPJWLOmaXZEo5wQIHMAxklkSTFIQOeZMnCWTNISOZ700Y5SPTwT6VMeRfysaO1owQ512TKkMSuW84n9zkH+/k3ZDSUEtge1adjNZMyCnkYycCjx/wEnw/kc6uB5jxaoH+f4jFyMfVhO6Yz3hxyyZk0UXk9U2wwnxPkkgwAPj4AGPQA4/lBynAAH6JQ4APlnHAD8A7z4Rj4bwAMhJYQ/iAB+IAHAcTR0IijSMvxSWwk1HdUgkRSqlk0qbU6pJSKo1UKXVCrBV3gU9kJVtCNYEY1idzWRLlZk0Vod/WqtdbDjVdrcZiuJtXVojRLINHNcyaIyRajVEqOFkV6JkkNIqNo0SikYQll9gppEkSlKKJdiCM2LsZI4kKXmCJiIhZOytjSQpqTYm6hKKSYi/MKQx/9lER0KrQn9QKg1CqHUSw2DoDgPiGD+BkH484pD/O6OAeAQwJqdr0wciCplUKqs2OxPAv4xUJsU94fw+g8q7uK1BYi9bAkRHSQkT8fx3zUTqP4fK6n/1uuM0Bb8sbSD/ce1kd7l06EHXYHtH16GoL0WNZKSN8CmJ0N+nQPdkK2XpF+wlRDDLNp4EOB9rQ+G2p0H8QUf4cIO1zwMx7BT3bm3wk0v0fwczCh3sFgZoN7MFokw8S9l4d12WUvwxxoDAMUYcJbfAr10b3LsNdhhqDUmvopSRc3Bw7zlwLFePq8uBK1NkAG2ZtDam2EJHfTMH4nADgHEONN5Y/FTjPH3SsHF1ybuReeP96pCXRErbaP6KTH3IZkMRnDEplk52VrgSDO9jM8keHgB8cEDcmmVploHPlnUFF8AAjXRFltGtP0fpDSJfc+6TI1pXSyINMl80xpucpRre6e009gAYw7gN4b6DPUUVXiB7HOQYgg/jf6r1GV0PZlCDj7H8sfWh8SQjfD2Pkn2EyGVX1WjLYA+ZYbEl1r0imyA9j82WPtPF59nVVJBIZcG1KM7XqeSSQwf1j06Ypt7bBH0jD8ADtxPm5lgrSH4AfblIt3EyWlHSw+5tHbZmWbohj6dnbv37rq+++uBbi39tbeu4OB3k2NqJGQ8INAH4kOAAEKd9b74yUbTvAeNoc4+UjjuveR605LsfkNYeU1y5XVnlpNuT8Q5fzDmaFSaV2MgifQXMiQ19sNYDnenrCJXSOkl7pMrE1OSilMidmujkk6TUhGVmHodG6i+/AqaU1o/X4nCzbMrTkrIkD/CabXzdPrNa5QShFDL8UUSu2tt7c27t6PAZ4iAwj+BOPEeCn5adB6k1C5A31U3tuYzK573BzjjAYb+WKve0VkwNdla17buptvBDB/QnlvePWz1ehbUFvMwvbe9rTmx7jvF9rrx5DPQTkwNfpe2Nh8X+IeP4C/qvWEL9d1nGWB2FYbIjgzIuECExl9UWX1vkaxYZYB8KTSrZ3QQUB8nXfvPmVgxNjRH3w8VTuJf9X1f1yF/Zq39v0mKQ/44Jecv3X5Pe6D9/kCiMaMiNayPC8Y/qiD6y/Kjg+aJmbKbOb4bWbayqAAyuyyy2eWHuECGGEcD8DibQDwcAGc4AqoduzMIezSH+zWRwH+UIIePGciiU/k5qsHBQJm5i6FBVBTBcevBgI/BYIS0W0jBo0zBw0tB00nB5BvBkeTCAI8duTe4+SGD+sCi2H+BvCMJIHCD+HiR0OewG424OIe10x64M3slE/64U2vCsmMljC8484YRS/64fBbDKsU/7CbDUN1DZCrC2RSIMm64zDBDpDQ03DAJTDrC1DdD5DzBzDkITEBDaJCWkZkN+13EM34KqLw9ZEY3QjSPCDmH81VDiLc4op4PCik1DD9CEI5B80fFE0bFI0RFMs7FQzxFAI7FU0PFYglFg0vFlFnFo1rFs0/FxFzF02+JA4KIqqLC+JISIIw9w4WJAseIw3a5MS6tYIlGW5Qs+NyIiaMugJLGOI+UAUwHwGOH8CYD8Am7O28XiVue8HyHqutGvHGJmWoN0H2JSf/Gg54JIXTHcJ+R9HlDTHoJYAWVww3HzD0JmXSg2J8YShbGwJAXSIEVwT5IBEEJkHGH61fH9GfIQI+HC2UM7CzIdB2JmHIHmHvIioglFI5B6Jmb0HmdMAKH8zGIUwvGEJkNy+MIQcjE+JCc2mhJtISIlJzIsI8c3CzDsJmzWpY5SXidMMhE9KFF42fKZKbKcs9Kg41KdFczrKlKnKYdjAxHW56KqX2r+50I6OjFOJIR2H+OK6Ksks42azkI+sefK6c6iZfECqw6m8I6rLUIxLnFWs+mC6a6sI1L3FegOtfBEtEAOPO7iiUhCHgggHwB2SIbNGsriRkKcVIHwDCVQBi8MGeIiGuOKeWHiEWN2tQBjLbF8WGWKrye68uj6vAJKMo10TrDG8k9EvW+6vcu8a0hg1e39N8x89/NkaExSTaa1LGheToF2IYw2xjLKIgwRLik0pkweRKzApYIZINOBOcZkw0v4ckw8SAYWHxNlJLLq9E+4vaw6+myoN3Nkxg99O2z3OGxsWWa0xyhfJBPdKtPimSyC/U/YISHGHmHeXo9nOa56Ig1lM0RSoEH+E4yKj8+OGWHmcOB8IW8BPMJlAGH+AChSEGbaHgA9ASGCEOy0GgbaH4AwHwAwH+dMADNPKpKvKxF5KrRhKzRkpI0U0ZFLRxRrO1KlR9MpR7SHRkr4KqReNlGDOPEI5IJIHyEeH8S+COH+DGIeEiH/GCGNQxFTHongm4OesWD5GMu9K3R5JuJ7DefiT49fP2I+a2IJDoMgIZTXGYJ2IGJOJU/LTpGifZTgJRTyMGIfTLLJHoMCoyNgcbT4JAOWN2KgMENhA7HmJALjUaLALELILvF/IeJCMBUqKjUCRVTrF8MaIMM+MiT4ftICJCJYJMM9TyNCmVUUI8TpVbVNVgsjUkisN0N4W8OAWzPLFGJIa2N4N9LmOEKmYnVkI6O62kH8DuxCIeFyPMISEdS3L4L+7McmISGGInBNWVRvSLSJKvSDQPKhXJPhSBXFXTXDXZXGJtUGIrF/VTXKMXK85/LCOqSWfLXnXQI9LNLQsiTHX0gXX5BPLc6ZVjYESzUIJAH3LvL+sg6jJ7SFL6sy6M/NJ5YYhnMKPTMORSEGF+MoHCV0HuUoUsEMAOU0RwGeHwzXYMI9MsuPMy8JM2ISAwF+EYVeE+HG3k1UD+HIGGVyH4BiH8FxYpPi8pNWD+WUE+ayWeWiJaaWZ0WuOFXpX8ZdNuaGXGG+IiK8NcD+m4aYZ5ZeI+W6IZOG9cXyRGReZuYsZ0abasMXOeYUSSYcYgUslYYqZOG+ZTLo9gahO6KqZCMgYmZuHHb3b7aPQQxm/TbSYAaTajb2Z3b8mi/RbQRIkWaQZLakURcncVNQZlP6/qoAZkSQIEKoHCHjaEbBc/bLQSW+bWRSA4WbaaOKEOZuHMpWWzZo/LbiI5Q3Q6RrQ+zUEepqFAcYA+iiU0ce4uRqEOb/XgpHB4nvTaL3epRsLZevR/etWCMNKRY1XNXVfDXbXXXdfJfGKOgIe+I1AAIlWshiq7fUIszPVigDCRQyJAeyIue4IjfWMgB6fGyYfoHPeigKfzGcImkYqugagFfal2oYgSIvNpgEgcw+8DT7giTKflftgtekgmRsgsgwzcg2PMg+H+hCPyhIhMRupORqHuhWhagfAykehqhuhyh2mIh8iAH+iFM+MEiOiSk4H+iaDKigilYKlGjmiwN8JYi4jyi+ToawZqMEjKH8jOkKjUDmkVGI6UkfHcjsH+jwXEoCj+MEkEkIfIkOlujdLNi6fYkhBCYokomunYm0k0vufIRvjWjfhnjelMlQmsmEYZjs2Wnklolsjaypj6gOH6l8Jen/kElaowmPD6fImYluLPg69DjemoeGn+/WlYB/jsm4o5fuoOnFk1b/k4n4nTjoX6nakynhlKtQpBlTcrTsIcJ8MKoki6aKoDkmM5lnCRlq3plUgOoaOcogRQn+GYoqTooukyHmo2mUnWB+o+SsnFabkWI+pMpQpUpYHwpcpgnypm0ApspwAOp0U0p6g6qAqEqInCt9gKfurhcoI2qnfwKPnsI1nxm2KNn3MHn1J0Jw+NK5fNoPfLoTfRoVRjfPoaJpnnH1bLXtLBNXoNbKPTYAsOvbL1e7GRYRYC6+IvPfeqI7YdYtLyItpJewI1QHpRo5pHo9Y27XY6X5MRJcD+ttMWhKgghGHAHwDGp5gRACKaLLZlQWuXM6H/h8eWToOeH0BjaHpYI2VJaSu2H/Naj8vCuix4N/gm+1Ns/Se69Nq3q5HvNle3dcaBbOxq+9OKHfOOwAHywFgpNq9/OhMATxOmHfBExAwowtffg89/cCxSEPPAuixCxHpLavcZPm+8+kTwvEH+xcH9CprTsZPlrak1PqaQxYISDOxfsXbldDbO/qxs/XPts8H+ySHxQNX7rUF/QVZpQYndQe/wy6IQEeH2AHQvsuI7eBQ9RBREB/RJRMzbRezhRft8L0zpqmLZubuWLaz3tEL1unucLUpkGBsDjdoQLbBtploXvDofoZXBu7vGJtqHW+I2GA1K1O4tWhEw1s1eOc9ZYnSa1+1vDDNloBS5vw2Eljv3Ei1a2VwA2LwEI42A2lwATxKDou3QH822TxW5vi2yMY3GilSXJ8I43S3WTxvtVFEajoTxv5WvxCYpxJoDEk4RwDwpxUjpwNxaI83vN9xRuoI1xm9ZwbJgJA4kHw4o1RKLxjvLvJRpfFvPyPyHvFyTyRyLodyXyfybvNyhF1XPbJynFxyqtbyNyvFtyy7Ty3yjyJypzBzHydzDyVzOI6EAABJaI4cvwzCSIkqKHgCOSdivLOIfGCIfe0GPiuq+Ivz+Q+RlzXzaI3zeInzjzyPzzqIfzukJz0Ibz5z8KIfH0CQ90HzZJ+IfzggaqKGCGPzsIl0eImGP0kRUkJjaImA/0sdUJkpP01QD0R06ISNHYL1Hzlz5UEfAfT1Z1aJJ1eI70OIl0T1p2L1t0V1wIvGCR51T1ET6WD2Bzd031lGB2MIt1vzz1zVC9v152f1cHwiVLGHgR2GOeyhCB+GYB+TWPSHGcmNHcOD8B4GAMKeyBiGOOiAB1AM2NiBwUKH+A8B+EfG4RQ+MAAHOEOHmB+OiVPBCCPeKD4AOA8DgD+WIPG0CeqaoAOUiH/36YlzuQp2gHwdMwueyA+EeEAA+AgA4AeGQAeHO4kH+HEU2bOAAH4D4D4EADAqCA+BAGONH3yE/f0EeBAA+4l4AbsUybwikAAHIAGHkAfnaPSO75OB54iCAB+GABwPGqAeyHAA541AT6IpN4+Qn5CAQD9hQAN5MEeAMAeACAT3L1KHCcmHADCHwBhRSD4DwH4AQCMA/7b3zST1AEMOKEeAGAf7n6PZRRIGCPGGOHACCHgAchCAYOLZQEeAIA+B+A98yEN4AH/8z7UGGAf1X8r8OEEAf7IQh5CB4B8D+OL5Mwb5My0HOGGTV3uH+HCVMH4DwV6DiH4B51X5MAebjSx1B5MNiAeEOGeGP6OE/+WHH59SwHCHh3H9f4CTaEOEf1X831WE+Adoz9IEeE/+36P+UEf9V9X2+V6/WkGGOGMB94CA/3uGGjKGOVP919+BwH8A4DmgwIAHw8P0eD2OP3+x2Onw+x3+j4aj3HA4gh0e8Ue54S43w8H4f08P2e/4ajh+nx+Hg/DJM/h+nobMYGh5jI3/N5xOZ1O55PZ9P6BQaE/3BQ54H3w/h4f0fLmO4j/D4a42O+HOx3i/3C/Di/hw/w4Z34fg+H6kxqjCoZDogx0e/4pEXjNmPHHjH5NDojCJTBB/Ln/fx+x7MHw5g4an6Ni8ZjcdPqLj6RSofTnBUQeH2fVKtF61XK9YCfYx/ZsyxJvCpDGrbb7jC3iwLTdn4/0cf7050eAcDFME/wDTsLh8Ix0Bj+RyeVPcjjsmOAAD34AAO8H+wMzm6q50BvHG/DG/hjYAO/JcH+x6LSx091KlhLgP7yAAC4LMx3O+Hi+AADOolzdEAP6Uj+lqEEAPgAAMhDDNwhpgIQ5cJQmxjmsa57oumA6sgAiDNmeqzun+77wvGDgBvM0sOvQhDVPctqzIG+YAnwB6Evyeb+P8A8AEecAfwJAyiB5BUGA5Bxjngs0KSZJrmOQ56bo+oh/gPDzOHOxURvA8SwBDFIPysD5hPu9iovfGK/Pgf5DxufEcn/KbAt0fwPyC35/SOwEGuKf8bSdQFAQsxjJqWf7AHAPyCM1LC3y2rqvg4OMUtfMqQpu1p3tfRxxzceaPsAki3IfAiIG9CKTJId6BMQh020DWEKUGxdCqYpxjB9RbNmGfJzmerLv0gsA8UpRdRrUwb3olTaMU6/E3rxZKSr8DymA+YzBLymVWobWNvOXWajVqD86keQ6lyuYZ4qurJwv0fg8KUPA+JdI62zatVutaidFosqlOnPHCPopUSIIhaq2wYRyYsJTaHW/iDHXCobJh8HzTAeB6QuycZgnGc5xmGoh4HwfA+D4HmTpcgbMoUhKFoK95jpoBwHkeA5jmHnURxwf2Lg/G2boKzOEILjGM3MsugIaQ6LYjp6jYmoTJh+AEDrKR7o0YYADnOeABn+cQAumfwALGHjz60uuXk+7D3nAA4Pujrm4bgokcH/qxALNrhH72A+EGBMJ/72QAHuxuJANKY7sUdqHHshKCkh+47rrI4zCM2YgPvzGxxx5sEUD+DjzkA4qsrVH73yUD5AR8I8lHhGxwbw47ZH+eATkfCHAKYy4DuuH8IR+Ab0VE+3Hch5Sc6lJmH+X6Ho4h5sKef6Xr+xWWIet7Pu+9Cvt+/8XxqD6kJ+58n0/J80JfR9X3+79n4fn+jF/l+v8fynf7/1/v8P8f9AF98AIBQFfFASA0CXsQIgVA15cDIHQRafBCCUFVYwUgtBlJsGINQdXBB6EDyoOQhhI1GEsJ1vQjhRCt/cLIXPahfDE5MKoZQohpDWEsN4cQhh1DuD0PYfQaiBEGC0Q4iQSiNEeB0SYlQKiZE2A0T4oQCilFN/0Kh8E7NqYtTpQV4E7H8mcnBNorHIixFoxkXSgRfJ1GEnRNh7xlYkhOLJOotlAQiiMoUbCcxuJyXSORjYzmOjzGon6co+xiJvICQL9o6GPkLHuRRN4/RjNTI2RyEo6mNkjF6Sah5JyMkwUKLCvSsg/HObUkZTSzJ1H+M8fI7ADxZH5GoH4/C+J1HGSMTgeJdm2KiP4PJHyHDzGebU4KS5Rk/lKIGLIP2xj+bGihsA/ABzSHyIQA7ghwAjJuPiWc4R4AHHGigAkvprgCmCEBXJDhgBPPsjR4Ey5mR0HyDApIfAID8H4BAfAIR8BhH/QEfAIB8gEAOIIB4wWwFEGGPCh4QxwBDHEGGgoeKKj8BBMEARAyRiEAeIMB59Qhz0nrJpkq8Q8LzHwHgeA8B4nWpiyQPCAwHiHGOIMm44x4jjHCPEeI4ahMkHgHgcLJB8TBAeA4ghJGbEKqFSZyJjh8k6ZKPwPgfliA8I8HEfFXh/n7LvTVxYj2RD/V4Oeno+F1LuP3UZd8wQPsGJcI8jLOSs1SJ5Ax1ROGSj+ZOUmrlYSuE3KyPM0JpUs1nH+PlgBCR8VupaVseMbk6jPJXXUf5Vyq16r2Y8+xOa/h+D4VkHiwF4FRqSOM0KuRxifjUPEopGhD2RWhUYfllZgmDIYS4kZCra2ehaY5wVoh8j+q1aYcJRAAB4LMPgD9rAcj/B8AB4sXZpDwWSIAABHDvy+lrZa3gHyPogZxdy4ROoGXFr9ccPwOR5j/B4p0cAAA/G8mldK6l1rok3HgAmaxDruXeXgd+1kwTcCPluH8Z4gATnGABel5hj293GjCDm01j3CE3EBfpXJJIujgqqAEtiIx8XfO/Yi3YxxnYLGeIMq7ycJXrmVQO44fwZ2VtOP6LtSaBhhH9dNi5D4uq8dwMMfabTaXgt0ocH4h8W2+AeVdV+Eib18xqPjG4E8dF3DGP0fZH8wldumaUd5EjUjwHvicfAxx95LO/k2upaLfOcIUPvK2VzHjwkVlqMIk7BF3UmGEj5W1Jh5MCRGLoz6IYnHgknOA/B8pTrqVBcpgyFHWzzKWMNtQ8VcI7XEu4cB8aIB+A8Z4hxBkOGOOAjlQl1EcpfeBkofA/j+A8A8y4/mMgfpwuqvOM57RhGOdMPh/NkYR2TVUA7hhgADIcOMYA8AA7UHAAMjh/LwbG1uEAD+mjos3GDtfYN6dOG4bLPxsZwb8br2ZNwEZDh4AYmsPgAc4yOD8nOd8fwBtbD+DAD/TQgAnjADHtNDem1A0NR4jaeaPJ56tD/nyOKoDpXkSXryRJUTraaKKVkj2NdzPejiYsj8Yty55uG9kjRRi7h+jFfHlST3uvuJ9pPWxOJDczwm90d/Jh4D+JwP6efPL1QBmkTrovRicRVUD0sknTOj9SiL1TqvVokdY6z1qJfXOu9eid2DsPYoo9k7L2aKnaO09qiv2ztvbn9dO7hDDub9e5d1g/3h+nd+9Rm76/Pvnf5BeCgH4R9XgfDFD8R4l8vjHx+L8dVPyL8ZHx2jTHsPEYIxKh6ZIMnMdyfi/515jzXQwfSf5/sKTUaCgydjX5mNvm1DRvyt54xnrpDyfkqnGT8ZLhe2MX7gn0iOhyK+JIv2sj+Wyv+QYpLXzh/jvB+P2xstjA/XKiYoThUSRpamF58P98TzfckoVGT8jZmlZB9NGa81E4zUmyAdH44AxpSnCjVGo5kUAMnQPwAyYIMCYgfwGBw4PwGC5w4AD4eITAB5GqqSUoACLJIjdQfhGg3gfECyg6bY9Cbx3De7e46oA4chGic5sQfCdRQ4IBRQY8AYB4QAHwEEAg4Ai4SAQ6ccB6OgfAHBeIPgGAfkHSgKmKsJkgMAfAF4H4QwD4YahpjqnweCnwQYcDUgMAPEKQfAMCYIBhXIpopcLQBwAJNoZ4R4eYQYYYcKnSkyBiqq0QfBeAfxYi0ofgPI/SLKsS1Ik8MSs6nkPZdaoMOiuA/a3YigpRAYmBnCV4Q4ccRKtUHBiSPKgcNpk5eK063Kwof6+KsbRI3Ss4c5XpEYfIqqyUQDShRYB4pRRTFoR4hwZ4usRKzsNIx69ib6fK0opSw4fgODjgP6yor5IA3SNQfIopTpEDJZdzFTJwiBcgHkVAgkVYYY3QeMV6ejGjCy5K+TkAAAOAsxJQcLIIwI6jNBQ4+oH5ToYC7pd6XxN68QZ4mEU4ACpgT8VYqgi8c0RoxrCq9q5AHDDK5gPBsDezDw6i/qgbasQ7AbE7ArE6yzBhhYpRBMeEeQtweK9EWAx0fCb69y+C+Sx7pK/MbzD8gYcDADaAh7EzFEhTBAZ4lplLJ0eKV4cIZ4T4jCvUakfIP4GK0xYDPQojDwsy2Am7IwYJVzEysY2i3YZ4Zgv5lLW4k8VYjAi7Ksaa0DLLLa47HYOYf4fapMrQOcbw0of8n4hIfIe4eA/LaMdEo0ZAYjW8ZcpklzRgT4i7myQKBi7TCwP4CbQIrgfjQgP7Q0OUTSsxTAeIYbE8UDNit4jjSa3YR8tcU4+UpqV4eAT4Q4ecaSZaBiTcjCMIQZkwHkNqroj7UcwDU5mas4Y4cLV8witamjbLPhQ4srXYHwsgk0lwqoD4QZkLlKTDc4Q4fgGDY4HAfDUjjpkgODZgQ4B8JYhwYYYKoYcYcE1JksKjbIADf4BgD4opnypYgclwYIfAH4QIQ86M3cujYbdBs0Co6cCsgkCrd7XUkkkScacBr7fK5s+zf4IDgRQ4H0F07qdyXBwxr88qOTc4hxFBKofya54AfjiCqrVrgTeJ3APC6AfgpAB4jgfzfaLIAjf4MB3EZAYBRUlwjwH4RgH5GtAiMryAnbkoowsblCNYpj37kgxZT7mAnFFRKVGaz1FgnT5YoQu7oT0Yn6LLGUqR7r3woKl76iSihonwYEktHqAMDCPrqAnYYNKUmiANBqPrkQnZkVI8zDyZ71H1MjptM57NM1NNNdM9NtMlN7ydOLyNObx1OrxlO7xNPLw1PbwlPrwVP7v9QLvtQbvVQrvFQ7utRLudRbuFRrt1R7tlSLtVSbtFSrs1S7slTLsVTbsD4BOLy4m8NYnwygncR4oUzU3jyrz9UKxooFUonUR71NUlVIoNJqGtT70Anz11UYntWAnMR9MUNgxlHSE9T4odXlV9U4m9YIoFWooNYqHNVb29IlVwn7zlYAnNYSvwxtaKElY4oVZNa9Zb64nFbab9bqHaUoYKU7Yr5gpp4Dogh4fIRiWZOL+pQ4DIyp4AxQB4eZkQYy09LIg4m4Hi11BQRFBQeNLJsA3gc4cADCyC5ZywGApYpxdabdciFcCECSaKaaaz98DEDTeCb8kaa1k4AYeQ4IAS07CIAJCIEAsq1gAYRAHAAIeJsxsbCIAFiECofByoAEGACDXQgweIcgAZCCHCLAfCfCwEH036gIeCgVqUK6cEJE5ShoeKZwMNqlrlrkH1ioeAGAfwDBCIAYzI1gQIGAMFsVplpgPwDFCltwDDW4BgDwAUMAYKnC2QMNbyHgx9W8WalsN6rBkqsSmSmEO8eIZ8TioA/QeAfJN7UIPgfjUBehCIgxfQQ4/amilqrAPlCisYPgwTXJnAiQrCo9vyEEurPsSIPwOApQjwOYriqoeYfIu8XpzkwQhIeJT4Od248EvarYjxeNzAtwtgi4fgjyYYpIPAP04V3AlwlwmCnAR4cN1A/VdS0ER6v5k7LsS8wCsJEaxMckcIY4fJT4ON8SVJScSboLzJCN3SVYt95T75Q7zNwwrxUN6kiQrAu17S4kqi5C0sa8fsba6KxK6180YMbMBS5l9sW0N9cq6MMQh4t4/UN5GyL9/IHBUIlUVUuStd1SH8WOAUawHi5a+oPh4CcEgK66ReBgPCcg64ABSYrrHd9V+MeeCw6ayoPia65sSC1mDpA4igwkaJdd7NpTCmE0jS+a5i+8cWF0gaWC+q+8cuGo0GHFcscgheCweAAi3UCIATRBkuId6QAAkMVUBQrA/OEaDsmszYP7DEjazcnbV0j4hEsN87lrVwhKwgPOCN+Am66Ic4syuweEWriYPAH1/EhWIjXmNWI+NuJVXGEsu7HOCNfDHyr8r2Q6Lt87HpSb+qseQWCYcYcYhCuzOKMI0DzN6F/TBeSWEON2ACQV7jLcvV2YMYj5YOT0X8oF9F4OUd4WQRUJzmVOC2X2GywF5+DGIhaOEGJGWuJYx018mzQEW0SpScXcvbUwiKs7YEvarzJdyuHxRWQhJOVQjOTpYl2arMz+UqW4f6pmEAqmJNXqGM3peEz5kqoigapDRCucVTVYrRdyl6oimIu90DHYPgDDIam816uymIrqldCgPxlJktwYv4H2eohUMwYI/Jkuao5bP09Cfhs2lJONnM97a7eVnQ/g6cDDl4ADHYP1oIswEEBoqIQYR4eGGqwECIAAP86zehGmNGjwcDXQAOkQr+SyTTG9A48z9sjo4IA6qs8dCIhwfgBNBUCurxDYj9li3QAbIYGFFAqIYIR4fABgrq+7/08NsjohyY4Ak0VS7QeADI/IfmpyGVN9Fz25LSgbq2v4xrPAk9bjqlN9IAoZFOwWfLnlN8ub4YHDD4m9ibqVSpsqRVJ7zqAzCInMNGzNNKBe0h69Trr21Drm1TrW1jrG12wm0x6W2GxW2R6O2m0e2yB+3W3e3hyG3Gz23xx+4Do24myO4W4e5BqG4zme5jlW5zhW5RiO6D5O6R6e626+7CFO7W7e7hWG6j1W7xQW8RQO8Dke8hJ281Gm9CDe9m9u9zum+DvO+W+e+iGe+2+u/COe/Tv2/i0G/2//ADwfAR8HAiTPAzxXBCE3BSUnBnBvBwoG9VKfCCk/CjmnCyz/DHDPDTqfDjnvD1NHEHEPERKnEnEvEnCVLfE3FMe3EXFkivFHE3E/F3GXF9JHGPFfGvHXHPHnHHH3GnHvIHH/EHG1MfIPInHfIfD3IqUfJlVXI/JfJPIXKfJHKHDnJz9HKXKvJXK/LXKPK3DXLE83MHDHMVAvL3LvMnC3M1FfNHMPN3MvOHNfOXCnNiK3OyKfPCKHPSJvPiJXPyI/QCInQSIPQiH3Q2W3KnL/LnN/NXOvOnCHRGknRXNPRnOPR3SPSHB3SWp/S3OfTHTfTXBnTmv3UXBXUmfXU3BHVCF/ViF3VyFnWFjfVXA3WSG3WnAnW1Y3XHAXXVaXUHUfXnAHX1b/YW/3Ylv/YHU/Y2/nZF1fZm/XZ2EnZXVfaG/HaWOHa2+3bCIXbW+nbiDPcDq/T3R/anWvb2+XcSCvdTrfcnTPc3XPdG+HdiCPejr/d3UPeHXveW93eyBvfzsffHYPfXYffm9ngCBPhDs/gXZfgnY/g29HhSAviTtfhnavh3ZviG8niiAPjjt/i3c/jHaPjW8Xjx/vkzuPkm73lB/Plh//lW7nlzu3mG7XmTvfmm7HmzwHnG63nR+HnzwvkXa/nm6XoDw/om5Xox9PpR9fpG5Hpjx/p24XqCA/qW33qh7/rFMvq23nrTynoXbfrm3Xr1NXsW23sm0vsHb/s22XtG0/tm03t22fuG0nuW2/ulNnvFN3vVOHvlOXv1OnwFO3wVPHwlPXw1PnxFP3xVQHxlQXx1QnyFQ3yVRHylRXy1RnzFR3zVSHzlSXz1Sn0FS30VTH0lTX01Tn1FT31W1P1m1f121v2G1/2W2PtXdP2m2v23ef3G3PkHeP3Xfv3m4P33ff4Hg/4W4v5G4/43iP5W5v525/6G6P4ngv5njf6W6v6nh/63kv7G8PSnRv7XjP7nlf728/8Xkf8nmP829f9Hof9Xmv9nCf93sP+HnP+XFX+ntf+3nv/HFogD/gUDgkFg0HhEJhULhkNh0PiERiUTiTgikXjEZjUbjkdjUWj0hkUjkklj0gk0plUrlkilEtmExmUzgkvmk3nE5jk2nU9n0/hM8oFDok5oVFpFJldHpVNp0dplPqVTiNRqlXrEHq1ZrlYrddsFPr9hslIsdltE/s9ptk4tdtuExt9xukqud1vEju95vk7vt/uWAwVLweFkt7w2JoOKxkbxGNyGPyGMyWTxOVy2FzGZwWbzl/z2fvmh0V40mlumn1Fw1Wrtmt11o2Gxsmz2lg223rm53Ve3uf3m/qfB4Vi4uW4nHpXJ5Vm5uN5nPofR6Vq6uX6+G6nZo3cwfb7008HhmXj8kw83nlnp9V29t59nvw/yuvx+ku+9x+35qH8tr9v8j8ArTAEBovAsDInBEEqrBiwwXByHQhCKGQnCiFQtC6EQzDSDQ5DqaxAqkPxEf8SRFE8QRTDsVw1FsLxfCkYwjGcHRrBkbwTHMDR3AcewDH7/SC/khvzIr7yO+kkvlJb3ya9snvVKLzym8kqvDK7vSy7ktuzLrry+6swulMbnzK5szuVNLjzW4s2uFN7fzi3s5t1Orbzu2k8tjPbXT61c/tRQLS0G0VCuBEqpUOzlFszRrkUS41IqbR7J0qyNJ0pTLl02pNLuhTrnVColPspUdSVO6dUqBUrFVa7FVp9V7tVjWVap7WbNVunVcu/Xbu1+m9es7YNhWK8VjpnYbAWW0FkvLZ7A2iltmr7arR2naiZh+/J3vmiZgAecADnwAA/3GgZ/W4gh/D+f58AOgx5zcAaDk+gZBn/eb8XAAZwAHctz3qgV1ILdp/ngYd5TcAKDH4MaB3qca9IoZ5wnOeJ8oFjF03XdN3HGeOFuEcZ8IMfuRIFbmJ34iRnnic58Y0f5z5mf+C3ZkGbIHfbfnGfl3IIfObW5hWWoiZ58nOfg843jRfn9gpPl/jR3XcY+NB/qJ/neeZ+ZJdof6/eZ85Mfx/24SeKInpJzn9puaHyfwHnwHwfBkB1xD8IQ+D+c5/n4B5MDwHhBnmcGPNucZ/D4A54AeYCBZMfAeXiCe15dpW36cfBjnGDweDgIhDkEHwhDwf+Jnh0gYA4CJ5mGD+fH8OAhnGB5CXdkxxg/bgZ8xpGLn8PGG3+eZxmeRwcnicZjkeDwfDnhAfnmc5DgcDg+HmZ5H58fA4nGd5H96f+Re5rXpaOiBgAHt3iRMA55+2ZwZ+Oc5Pk8Pwz8B+JxkeI8DgfntjPTkAAeMAwfi/H+OEf4zxntiZ6ScigwADtubgOcYY8x4jPGMCd+w/wPB+CeQJfIwxHifgCHMb8BE6MNeRA9e7ChnjHgg8AiEBILMbZgPET4xgBv2HBCFeL5R8ADE/CgfwcxAAPZ84CIwzgAOyEOP8T8NB8wRP62xmjTGNj4YxD2H44xziAfzEMfA8ARxHA5EmJcTR8ATE/FB2Tsoqg/HgACGxD4cRci2yER4xgYv2A+/liAzx+DjHHGoPwc17xNHC/NdzBRHw0jvHkh0e4LyHHjH8HEgn8hxi2OeRMKJFiPG/E0ccCGoLukmtyLBfotMxkzIgR4jpOxiB+/lprJXPRqe0I+FhumJjne2+MD7Xx/v/ldJYhsBBwAAZnM95gxxHA8DnGIT8IXUDgYuMONUnRjyNmEvp5stQfrlH+86Zb6o9ImAHNEAK8IgzWjEICEIfGEL+iNCgALh3ZG9dUv5yEdo8DgfJK8xzFUTAfmiA8fwHx8B+B8DOMQwxnB+D8H9wI8JGQoGOPMeDQZxj4DyPgZ5FpKj4D/OskJ6ZkHSmCesnVLzmzAWkTimhynnU3JvTlNk/1tE5p8cWkNPFskmWufCo5hKlkpqSaaptTqo1Iqmt+qs7Kr0JqzS2rdXKuxZq/VqsKAqx1krKRip59azkZrSamtdaK3oHriRStp+q5oKruRWvKDa9kPrqayvtfrAoSsGQ2v5/7CoVsSQuw5r7FoYseYuyJWrJ2UsqQWxqBLL2Ys2iGzpArMmys/aC0aJrS2hLLag2tp7WWjtUg+1tn7Xm4tjZ22ZXbbm7trZu3JWbem+tdbuy9vyr3ERHcKytxjh3IsncpRVzLI3OUlbK6Fj7pFOuupq4N27qXcttdWxd2VOXet5eCxN4lPXmsLehUV3b3XfvJcO9Vg72FFvqqi+Nyb52Bvuqq/Nzb919v6qzANe8BnWv/dHAtecDq2tcQEAAARJhVHdlbHZlTW9ua2V5cyBJbWFnZUlPIFRJRkYgd3JpdGVyIDMuNQAACwEAAAMAAAABBqMAAAEBAAMAAAABCJgAAAECAAMAAAABAAEAAAEDAAMAAAABAAUAAAEGAAMAAAABAAEAAAERAAQAAAABAAC3QwESAAMAAAABAAEAAAEVAAMAAAABAAEAAAEWAAMAAAABCJgAAAEXAAQAAAABAABa9AExAAIAAAAmAAESOwABmLsAAAAAgD/gUDgkFg0HhEJhULhkNh0PiERiUTikCcEVjEZjUbjkdj0Qi8fkUjkklk0YkMnlUrlktjcpl0xmUzmcwmk3nE5jk2nU9n0/hM8oFDok5oVFpFJldHpVNp0dplPqVTiNRqlXrEHq1ZrlYrddsFPr9hslIsdltE/s9ptk4tdtuExt9xukqud1vEju95vk7vt/uWAwVLweFkt7w2JoOKxkvxuPiuIyGMyWTxOVy2FzGZwWbzl/z2fvmh0V40mlumn1Fw1Wrtmt11o2Gxsmz2lg223rm53Ve3uZ3m/qfB4Vi4uQ4nHpXJ5Vm5uU5+K5nRoHT6k+63XnXZ7Vu7ud7+A7nhmXj8ku83nlnp9V29um9919nxknz+ki+33j35/V+/q0v4/6MwDATIwKssCQOiUEwUkEGtxB6uwZCKGQnCiFQtC6EQzDSDQ5DqCQ/ECLRGqURRHE8QRTDsVw1FsLxfCkYwjGcHxrBsbwVHMDx3AsewFH7/yC/shv1Ir7yO+kkvi0IASdJ8nh/EqNSbKEoSlKaUI7K0ryzLSOS5KMvQNMEwgBLExwXLczTRNMHTLJ4/n+eEzzdNU4AAD6BkBNs7IbKoAADEM+z8hdAT0gZ+UJQrFzggp/UXRitTWB6DF/SU/zXSNMUNLdBU5O6NgBStQKrLdN1LDct1TUyOAOgZgVRVh/tDRBgSdOVZ1Ujk5HxJ9SV1DyPkBJ4B2DSaO19KFj2EjtbyhXNmRIjh/S5WVStDZUoURaVaWdLluWlQFjH/Udu2nUVcIEeFgXFNdXoEfl4W7Ks23nd0wU+gZD3Pb0wXaf5j37JtooEc+Bo7fSBnvhFXINhlztDLE637dCOWJhWIo9bU0UvfGLyfblr0w0J+SheeR0k0M6WKgVIXojtiZaf9FZhMtBH5T58ZTRl6oEQN155Qsm3If5wotoU/Sbbh4IFWObVEgh8adpM7NDouaIFPmoI1cKB4pZjQ4LeOwWO0lfaLX2qzc0k43Xstg2zMVabhXTQ2fPOnbrWdAXMf9ibXNMq5pUkncDMeloFV+Tb3VkmoHgVlcPLzQ3ng+WcnLNa4Wf9n8zKbZ5lz8S7PRFq8NriM2rcnGcbVOVydt/UY+jVnzkcG57DNc65l11sS2P+d3L3OzI7Yx/A/1vfVBzZ/k/lnZ91XiBG/3HibikRv975dONJQXT+vuyOzp4MrdHFFvg/6Ho+LMoH7x9nsYqw/5/p+q9fv/H8o/Jb3/6e1/56oAnngGeSAp4YDnfgSd2BZ2oGnXgedSCJ0YJnPgqc2C5yoMnHg2cWDpwoPm/hCb2EZuoSm3hObSFJsYVmuhaauF5qIYmlhmaIxA/FcjwXIIBrxBB4L3d+RR4Tw3ygDV8AEfieh4JSdi3wio+E9OMD+7iIygYkpzB+4x86NiKjwT0PgB4/g/iAD+PN4UV4lxgjBE4ig4FKjgSwzpKUaAfxLH9EB7pFRxKVGAy5eEZ4lA/JC1h5hFRhKvGCvFUkgIsSJH+0BxxFRjqvkhFcf8jIlyQkc68ioj1jLkigQKTAPxBtOjYRMB7QJQJYlGuSGpTTEAfkTH+Vkc5AyulORF5EfZaSiVHHReA4GxskIopAi68Ily+APMAi0w2VEUUVMKZEtWaSBmDM5ns0A/jwD/L2S8tosTBlyRByU3pWsWkKRNlgH5QKIlHKVzs4yHzrnhKGb81YsTwk3EEiTRx+SpkVKKcEmWfzyIcOMf4/n3x+oFPiJcfR/iDHCHeThExh0JAPMIgUq6HSCD/Hce4N6KkSYEP8AcXh/i/EAP8fso4vT/HuCekZEV+UmV8MAD4wACABWUyak4Px8ADHAB8e8eJsyoZ+6eOqgaeqBjsrge4R6ZkQUQ0AYD31R1Np4nIYC5JsNKf2/ysJ+6xlQrKf6s6A60pUrXWqtqZK3kTleZyuZwK4xtrvXKvKoa9kOrqZav5k7AnIr6m+wqmbD2IsSp2xdjLGqNseruyKyLJrNsqQWwZj7MmNs2dCy9mLP2gtCQOzp0rR2ktPOiz9pTL2pX9ae1hhrYmatdbMwdtjwWptweK2tvbdW+thcC0duzQXCtDcQvtyDR3GtXcyy9yi83QPhb+6lwbq3DudZW6R8rs2Tu2am7tkbvlxvGay8Nj7yltvSa+89jb1oAvbYu95sr42Jvmgi+th77m1vzYW/ZYb/oQuvce/tfcAoSwLXvA5u8E15wWVnB5vsB3Nwnc/Btd8IlXwyVTDZw8L1xw6ibD9b8QnGwrdrEdbcSlOxXLDFNa8WnLxfWnGJScanOxPd7GdZ8blFx6UTH5Q8gnVx3WXIZasi1jyOdjJNYclk9yedvJr+8olGIosQHDNKvsuU3D0hcOEaZXABlnMBCmXkIy8Qcfy8ctpIzFmTNuZyD5pIM1PLWYSKAfzsQzORBs6EFz2jIiueiHZ9ILn9qSLNB6BzNl0h2jIuZ5HwOGHCr47j+AGPEc48x+BjUhQgf4ME5gfjEB8Q9Qx8DnzOJ95w4xBxwAAPwPMiRB5lSFosQA/A+LGqCPgAI8RAjAD4EAPwPwAJyBAD8YAD12AffeA8cAJ4fs0AGPwAY4gAjgB8AAPgeQAD+iRm2AmiwEB8DgGFOYYR4BgHgBARAPACA+B8BhKQAgPiDAOOEQ4HgPiGAOMPfMpR8bqCCOAMI+B+AQB4HgDAfAIa2SJosHgPA4tNHiPAeI+B4B44mB8DwPg/J6AeI8R4xxnjHA4D/UwjxDjnaOPkePFxwjw4QHzhfNg+bi3HpIf3FGpjz5gPweA/AfA8B+B4Hgf09AfGOA8c4jxxgc6V00Y4xx4j/4yOPgY+OEc9DwP4HEYkeaL56HkgXV2adD6KH8R3SRHj/6YP/p4/wMECGOB8c4x+f9yH4GEfjU+vc9Rx2ThcSov9DD90nto/+394YD28CJAhD94Ef3sc+ne/s04Xz2o2buecLVePiZYAB4j88T2wHC5gfjnHAAdPQAFaAG7wsQf9CBj+YamADzYPmM8R8+H574AR4AE9L6cR3qVK+rGACf18Ywjer9owf24Y+/j8AF5sH9K+x+fimRYeAfPi+KyyQL1Ygxzp6AZN3u/c/asB9x0T7A+OdQG8I8FXo4x+fhB/21XPTPKB7uPgfv1qShxh+v8PqB8B8vAosotoRPCA5ocP8P8A4vTOjBPO3O4BngPhhuoQAu7uqmpvSvqQEQFPNgctDEjPCA8ocPShzh8QKPEujukulhngfwOBxuPQBOSurOsB8Axh8P8B8B4vAgeQUPPCJs9OvJuB4B4B5h4QYAeOPt5ORAPhPhBuqgPhHtTN8BxmjuNOLwguEPNuJukkfPCA+PYFfE6QoAAAfNjImAPhAKhPXBHgAADhgADodGaKeABwggePdOvttucwzPfmcIjAGQoAgFIPtAQA/hgAxovOmBABxhwAzo1o7p/wggPABgOOvtjOIQUiTRQFUGDm2CTxRiEg/xSnBRTmxsuqSxWRRRXCEuSmrRWiClNtlxbRZRcCgtEEXCgP5RdrrRiLsMcrxMpn8sqjvRjr0Rkn7xlibxoiaRpiaxnn6xqjyxrn5xsjAxmr3RtmKxuj0RwmGxisCRvr5RymNR0iDHlEzR4R4x5R5x6R6x7R7x8R8x5xfq8CJR3x9SASAyBSByCSCx+K9R/CkQGi0NWH9CIh+SFDPyGj8CJyICiyFiyyJqxSEyLyJD6yKyIjOSNKySOCiSMCySRqzSSihyTiwyUq0CISLSTSPSHSYyQjMyXjHSVigSWiwScq2Sdifyeiuyfq3SHybjLSikvyjyOyRSPygifShiuSlK4SbSmycSnymSZynSaiHyZSWSaSKSoCeypCsyqR+ytSwSuSxS0yeSwyNy2yhS3ySS4yoy5yVS6yyS7yYSvGtAAQmP5iTkpHcCElgIhi8SzyESmFiTAS9HcFIlSO/yyidTEq+S+m/y/puTHG8iDlSOaTJiczKlWymJSzGidEpBwxdTOiLTDi6zRLDSrJ4TNTTmjJ/rHC8zXiHi7yLTZA/hnmss1nnB3h/zfiVzURMKEk5NWThh/pBCBB5iBTcipTpLFTYk5puIkAYI6t+gHpSveiSTUB/w8n1Hkw8hhgHhGH3t6IyN/C6TqEKyQKIzrg/NYgQKcgHhCADAAlqiWTwqMtoADqgh4BBgHhEQ7t6EzhCJCC0z3zbzLzSvvtuFRhAADmZGTT+mjB/hDw8BgBBh4PhABgHw5BgAMNtw4zvyMysyrBAPhPvuFAHAHhDhBtghwB8AITAiMTwhDhggBhhhgh4A4B4AL0RBA0ageAGTup9i2UGkMT40WQmA8ObNnUZNg0oTJ0dBDhhl+OMg4A/zuhwUjAfgDqJT3UVTL0nuNg+OUlKgBqJQmA+Ur0MhDhDhwpEmjmm0Yh4Uw05mji40mLITrUoAAU1odU3E6U4mjhD0WFY07h/080906UyyuiHTeTrg4VBgfn3gB06U9HuCN0dFAo3KEU8BD09B8AcA/0+VJS2VA0gVMURVNuZzHzjUMgDqnABpE1SVTVUVVU/UzVKCBUIA8GaVM0x1OGm0MGjgDlSAA1c1HVS1IU+i4U/rJVWg8A/lFAH1jOZzZiVT/Kahghwh8A4VHUPVo1Vy4VrObVMzu1OB8U4VaBwqFUs06Ua1yUvuaAeAPgD1I1fVJiG1K0oAcOjVtVYuaAcU4ncN7hBhghghwUg1HBA0fh8Uj0Y1pC21qLKVWw0U4AHgAWDGTUcCKzBkz0OPhS/1HUn0SlR1+1p1f2AVgz5tuVMw5VOFq2RCKWSFaKMoq08Q5B4UENTWL0l2XiGWAzsVshgUtOZh/0UCRTBteH1AHtrU8BCAH1TtlVU2h0GWisvykCEqai/2MrLSrStiF2wi+2xrRS8zaCGW0C+W1EQz42zCFW3zcWuiFSvy3W3DBW4rUSx222z2+28CE29S5S110zL26DJ2/LVWYSryk3CCEXDS7XES6Wyy1SsV/2jWvjIXGrX3MW93NVWXFXM3I3N2vXIXGXJCD3KS9XLS8XQ3D3R3E1KSC3b3cXc3dSCSDzLXbXd3gXg3hXg3ezR3ZXK3aXL3S3RXT3SVgXVXPXWR3XOjH3Pzd3qDG3rW53TXV3UW83sDGXtXACczQCqXxW2XyS9ydX0CcXyip3z3j3X3k3Y3l3Z3m3a3H3F3o3vXC3wDFX4X63kX73lXn39Xq3pCC3XXA4B36YC3uX93nX84H4D3+XJ3/DE4AYHXmXu4I3OXoYKYO3U4DXs4ECCXXToCCOxCDRSxKPdVPCFkpFiAOCGu0M/CBwjX34SlEiEYUYbpnYWHcQ/mACJ4Y1B4aCBB+tD4b33CRYM4JCCYemXYfiBYWw/vOiIYi4ZiCSXu0YaiBFuYcTp4dF44eFH4pvaqEIcJ4UciBBH29YuOz4lSGYxmsiD4ozkiD4WY0g/qLiMksY3CC44OsY5UU4K3W4y4U4zhx492tiJY/43iD4u5CSUY6SLF2BmEsB5qLmDh5oxAOBhyII4BMhzur5FstYvWc42uhZBtMIcBGAfhPhzAPvSgBlYwCZZlaNPgPUuisYnYPPWhkFSB5FBPYBAB/PEhAIloeBIAzqEZTQWCNZHoyIyNMKhhERNhyNM0PhgPeCBAAw4oxtigAgPAgYmCN5fXUhh5aqSt2uEg/AEA/A+A+AGAzAPgDAH5mGBZnng5o5VAGAf5/nnTzhABGOCgwh4hwBgvStRB/AMAP5/gGN5AMAPPZZe5K42hhhh4UPv55OJgfA8OiAP6ROWO54WuctIZHZVAfAPOVHnOS2GBw1xB4hxuYuuKPg+aWAPwZA/N+YhinZ0XvvGBxhhh8l1h/OvgOaeQYAPhPAPhnuWk9aTZ+Y/ZVA/6WO3hPuoBHhwh46ZOtOLlFPTaWBHAfhPAcaeAfu3ir6gX+4qO9ajVhupAPPqOQtWBnhHhzEpapaUCI5H6rAfzf6sh5uoaZh+A4pLuYlIIcaWQauwB/6r6LZDXpiBBh63usB/Ach/4ZhHAjvTZYPGBHhyE5Z96+YsaqupbAgP7B6aQJk57Eosg/7UAB7HbIa16Lk5gB7LQhgYu6B/hHNqtihPo3bha9Y97SiH5H7NFzBP2PhH5SQJmTaEEzmpgOUJ7aa07I4Q6ghgARhns7ZOgZh/gM7fIkbghAAn7hao7jZ+vGSIbqlK7mAB7nNNbDB4AEbpKgblH3gfbM7a3zbbhBhx7dB/beO6hHAYbPBPgDhnhPhxlK7Sb2ZAOpTok9b5wJ0ah44Ub3E5B+APMs7/Yc7JYE4v8BM7PS7Mu6gHaQNihPbABP6S491kY2b27NEsBPlK8LbDO9OYs2aWCBAHcP7sbbcRYTcSaiOzh+a4g/gHZ5cWAfhD8Xl+Z95UYiZVbH7AaXbB5SNKB48dwh7YaWBnPUa0a1b/8iYdu4ah4u8kue6rObcncoBzqL595G6+8rbIashx756ZBjh4B8h8bFcyP9geaV8hczbta2hH0fNQU3g+OigPPE7g0xcX5F5n2P8IyIOPE9BPhjhD75hwOYhx8/ckqP19N+AHAHAeaJ603iie62YLG/5al4B5NvvYAAA/bY7gw7cXhw9KmaQ7dMO4IeKIvW9PtMncB8Ntk5E8oeBAOPgAar9WzKbbhgA4hiFEB5pPpHoxBGbzBj8Xh4dfGTdpCDbkqck5UPQs5SI9uLgOGdlPhA9z5uAfgB9o8h9EdYO7X7CFtQCH9ydXbb99YBd+CHh9u4Sp+AmA99iFd+2jYwim9X5DnIeFiE+Gsvti+Ecz4yeJn4iZGeeLMzNj+M98eJd9eOiY+PiIPYeR38YPYR3w+E4N4IeW4RYJ4SeNY6+X3/+Y+Keb+SbJ+dYMeeeB+Z4CYn+e+YecYFX03YS+YNeked+lYLjDeIx3Xh+r+sesx8d/z4R/etev+wewzOef8R4P+feaag+g+qeh35YGenej+iYQe0a2+1DC+q+y+6jB+78i+zek+ye+e83B+pe++o+/80fAjAe9/D+be/e598/GfC/HeS/IehfB/EWxe2YF4OfJegfKe1/LfPe7fM+mX5+3+XfQ+9fR3231SgX2Cb5zaf/VfX/WSjX4/Nei4G+4B/474VCCxVlEg/afbTYZYjiJGXl7hwT9qsm/TQ+2feYz4Tfg6qfiCCBv5I4kCCM4kpfkdbw7FfGAGIGXCWfFeNiDfn48x3V4Y2fqCBsvZJM2Y7YlnkEQw22P/viCxYJL/x/nYzf0YEiAHx/wOCQWDQeCD+Bp9+QYPwh4wOIwN+H+EPOCP4fxqCuN/B4fsF8QhjwWRwiUSmVQRPyuXSlwS+Dw2LwZ/RaDueZniZSqFP+GQ6IRKCRWawONPyfwNxv8HD9wyeDSWCVKe1eCy2sTKY1uaQeMQWbwidQZ+Tytwaf0GCw+DxOJv+jWCMj+50x/o4fvCrQSqQO+2mZVrBSqu1iaPAHsyfvNhv+dPObhxhw1wD9MueIsYPs9/owwW7C2t+PB/xF+AOKowfp9zB946lgD9Dv/FIA/CN+HN/B9gH9Hv9jXsHvl/DN/029PipRVHuMAa9xvzL4Wr4TrQjD1eaOADsgHwN5AF/gB/oB/H4eIB4D9AB9IGemsQHgF+ATQ9nSIA//zUnAD5EA8AZyAGeJ4ACYAfPIxQAB8EBwAmfADmAPgAn8ATiQUEDkryuypQA54AHIA7puq7KXuxFCCu2nqaGGAZgKoeAIH4CA/AQPw+D4BgzA+AwHvikpgCMEB8DAtDrNIBgfyYf4PmGB5AEYIJwDCeJwGCeJ8BguQ/AYDwQRgcBhnnI5+Bguw/gJMMOpAs6CGCB7nmPMjpsDFaERVPJ/xamSaEeYcyoGeA+DwPgeB4Hw8H4HwP0eR5DnO4JgnAeB8nhJLRoXRqQIeD5HmOQZgnCqJ4nGeMtnwP5+D8H4PM6YZxnCeZ8nwPh8TUD4HPCj03jwACHkOQ86VRO8+JXPc8z8l9AHGYZ8okfw8H8Dg/A8ONWg/WBn0kh5jr5LdNME0g/pA4NQHGR5wnjUxxnxA6lVbc10mGeB5tjRi7VcQ1enxX9gyeYdinxY9kJTZUV2YlyvnGY6TnjaZ/g5cwx20lpnkecyFGPLd8xRcqQM9UB5ufU5+Dif94YjNR/3Phx8XwfwcKTVzOT7By7WBT91Ydd+DYPPWgoHhaVq+YeH2kHOJryI9Wtaf5HkeciLY7eE4P1Tg/4pkYP5LY2UNtVKkj+j4fkeeeHnmyWaX2P+bnAACkySD+emO6egaGrO9aKlTEgHpJ/skGJ/gwvLVVcT5gAeT7qmPMzp3GtLSYmADwg+AABkezTp2yAEsABNRAU8eZAAAee4hipI+beh6m7mpAf7sc5+bzvSF74tKaGAEZnpOyTkAyvL7cSQAn8bb/IaxJVOcry/M82ePaDieAEdBfYA9J03UAB1S7dY4cO9gf6Ndn2p+TxoeExRvqUpoQeYWlwnDEcGGnk+A5nk+cbw8fmLy1NlAIaxQgaj2oucZQOBeBTUvAPXOyVwRTTjPfbeQp17OikN1WK+d9LQX1nZfaSgmjdXAmxaW4YByjFXCeB+/pSY/3/MfazAJiZPwPnhei51h6qSKOsZeSVfDKmag/fDBcfDdG7M/fQ7cg0HzrQhIRCNZ60TTpwYpCkPkKzaCfHObWGMAFyPNXOk8570Rwj8HjDtlg/IfNnZgzJtrT4ivkB+cwgkGmfD5TNB1g8TjCxQJnAVZ5cCzj+B4uZREWhDxcMfDGI7IIxNnKBGUzSplwIHdgSAY6gh4r4LOvJVxenxF7KkB8Y5zxzjjj1HaJhLHcleIGoEYMDB4KFD4D4HgHj1OJB+AeLg42HR7UzJCGi2yHifGOsQzQ4FUjnHguJVkWSQIwVKqpQyuinviAAAEqSkRhiDHOOEcY8IFF3dvH4wUgCzEDEAjEA54m5HmAAH4Py1jWgAl9OGYI+B+AAckVtygHz3j/EGd56I4EDNxHgDwACrAHAAJBQgeAA1CzxdED6Cz5HMlScWMAAY4KJAAHxIeVpQJXmIIGMAOIxC3DzAGP+l4gSbgMeKMeLg8J9j+AJP8rFATf0EHgqAzQ4gHoHA4PhBb5D6l6HgDMfADx4B+pehkiowCOOvnwiAH44AZjgHIA8fABB8QElbOgtM6iClfhhSUnpSyXQMJcWFPJZa2SuaHWgopBS/11J8T2uBKhf1/RRXSvlZit14IpXqvhLq3ErsESh/ayLCV1sMVixBcrFWLr6TKx5CDn2Ss1Sau7urFAAdDaEtVfiXjAnenyydbLKlXsvWoktprG2htuSmzpBxwHhtdaG2JPbZ2oJfbm4lxyD3BK5aS5BKbjXNubcol9w7oEIufdW4l0iXXUuwQW693bNXaJXdy8A/7v3lspSc7l6CB3nvZWW9SLr33uvfOe+Kf7531uPeIw1zL0X0v1B6+6zb84BuBgNhmBcDWLv4TB3VpsIYRwlhPCmFcLYXwxhnDWG8OYdw9hQ0TB7Z4fxJiXE2J8UYpwziFZF5LwYAwWnnBpKMXXdxhjFFGMztX+vLjfHB1sdW8x5i/H96bRywvZj7IpacgkGxrdjJWSysZNRZkPG2Ur4ZHpRknLETMqEEyfdXKOXUU4IaNgrMkfczN+zRmlPmX2iZWyhm7AWWr15czosjOCfc5ZizznrNb7s25/yBoGEWg9CGCz3mG6GY9Et7ztfLPGj9C6RvxpPSmitDRR0Rpknui8+6N08YXUGSL/6j01pbAmmDs2vsZezUuWy6FiJwQaukVE8k/OCSqvaeBBA8KKTgfzASPWmVZiy7Gsc76zIzrUgutyBj9RWT82uvCTWJIIALYBFNhT4H+OHYdDR+bIIHq6wum5AlH2aWQgkVNcPMH/a0lOvTAEZlxsEpAPqXjBfIRbd5U7kbK0lswpGziCbQH/v+MI/wRkr3oyoutai6jhH+MDfvCd58B3ROvdXBd2ED3dtMgfDdrFV3q7HiTsTTDM4uXEg9e8D6qwTxcxo7x/meHOWMeZDVoj9LKP4PJQOFOTfIHMgZGFokNIsD8Y5Yx/g45uSMnRSeb8HjoSMRnLeqjvH4Tcm/MLw8brSoQxdvqXD+PI3JrYwB4B/MuJgM5ER8AZH9R/R17XyMp4qH82YMAeAfEYA9cBDx8AY7QZc8w+ADbAPIPwPIgOrj/E42UiyCpuCDBgDgD6DeNcyzOn079rTxz92HPQ9YYD3HwPkbYBI+AAoneY2E8/qQQAYAeIgA6dTwvVn6OAH1DfFA82GXIPNDWyeTLGMChg+BBggAgA87x5r99irzzdGJjzbP2BgH4DAPkdg8AOtsB4ghzklSyljodAC5E8H8o4HgDwHABEOIEQYxxgm1HgMAfI4FGBwI2lwTQ/WA4I2TUJALGNiBgHwAm/iEOVE7AwY+o2wUCGGImHgkKB4WuDgUaA+E+VAUkPDAoS2/Sp6ZUJGI0JAAeAfAaEOYcbuOSmeiO66/+B4H4IqLOVcbIoxAOn4DwD+Ae9yGeYc86aCuoOeGGJOHgYkiuDCYuhaY0IfAo53BGKuIUjsI0I0EeA+UkEObSGeIinGXzBk/aB4HwbnBwTVB0IsS2ZmjIHPCDAe3O88zYKYhLDYA4LyCGfu5uamY46TCmraZUKMt9CzC2jSEfC+JGThCulwJGZ1CuX2B+A/AOYi6gqEOTDgyNCIv8aQYgH8S6foAPD0EecaaqWiHxD+JkIUNiIspeH+AeqEEOHyGOEAPIXeZUJ4KVDGHwdCkLAIXnElDUIicNEsaRCGxEuYonDqfmcOfut6cbD62HFQuKNstOP4cWqEEfFlFohhDKn8PLBmHhF4WpF8D4ctEmcKvMc3GTGMxauYd4d86OH8cIeEEceINaeMeQ5uVsAJGk1ebiIUNmABCyc3GyGOLcYerCUYQzDGLDF6KSn6AfAOIwcMbOHOd5EwthAisxFcHHHgNPHlHQEdA0hWM6i4f6HyjPH6JWgsI4H5AHIGi7Fkt8HOHwjODwHxBwlwInIcLsIaEfHPIoc3BZIwpK4E0uhIYgH4aXDuEchUB+hYhcOCGOHyOnJUr6KaLcAYOBFhEM3KYKLOgnDGipJ4js6bDUWjKCUnCE+nDk0ESekGIkH4ZokOA8R2kUi4NrAoXe6CkgMfGAEZK3KEbSEPC+pwLOZZF0bnEfLLB3DZIqhJHYT4uoAeGGS0UIiOPUB9LrAxKei2HOMeS0Xep5BIJKOCB+NZFec2fgVE4oHCHgKjJvFzEYVdJ4HiWiI9DUVu6hIqUC3NEzGOyQAO+uUIDAD4Bw+4B8l3KeA8nymASyHC/47uvMH+NqOCA8AYA5NSHOEDAaGCMe/ypADwBw/9DHLkVZHG2GQMOTAOTQJ5IqEOo+t3IzLa0OPOncng+Gnma2cSnwi4nEQRHCDw3JBINqNqdGcsqE9wTqnfF3QCA4dDDG2HPPCucsIi3BDU2HEac2o8HBPlKLI0d2DiZucEpepiJuEYpqpuOm9appQHCoairW4sADIMc2EclMqCLkAQHwAdM0AHEk3u8bPQoaIwYjDUphQ0HOHABiYLLZE01MrWzmJW121Q0hSc1lSgz9SlSoibRAszSyJVSnS2tFSs2XSw1FS1TEdxPo04L8PKtOuQvPTDS3KM1WIGtrTeuPTjTTTVTI4Gx7T2MHS61ZUAshUE1PUIWTUNT/UQJVTo5nUPUZULTW3TUhUiuTUUyJUsaFUm45UrU0rtT60vU9U/THOBSvUzVJVBVNTLVRVTVLHaK8xVVlVnVpVqxTRchAwfVtV3V5V7V5Vwie1CubOk0fUc8/VHU/WNDnWRU1WVLdWZUtWdPrWhUjWlTZWpUZWtUpUXVdVfMlWFThW7T5VXT9VbVTW1U7W5VdXQ7HUHWTUwyvXFXY+rWxURXm2xXrUJXvI3XzUBX25TSjW7X+07WjXhYDXXYNS/YRU5XbX7T3YHXdWbYTTPYFYnWGpLKsurYg4I4uJy3KILYyusliJWrosFYBRgT6CPWAvBY2IMrk49Y8MhZAmI2qJTZLXSIK12HBZUxxZaILZfY61tY+3amI3kJRZvYaIM12GPINZ7YTaA6daFZlaIhm5JZsIJZMJRaWSfadYZXpZcJs4NaGsJZCIOJ/ataPaxZwII12ZHa7VC1WLG5q6q5yIs524w583KYwHPbKtSH86MH+H24wIaIUB/bqOSG+H/cSGeJoHPcEIwH3baSfb65jbg5mMUGYta7O7SMm7Y7cA+7gIiHMRKAOECHynM/UH870HA74B+78A+GYAemcdcAO7q7Q66ByECJiGAD8BAUC2HIMp+wNYgO8GRFa9Gni9MEA9QEASC9WQKHEAGAAVsazdUnY9o9sGQAPSSPDei9cn7Bq+KN+Qdd8EPeAoFWJQ/a82wGeRi34+yTROQR2A4/CA8/G/KT6UqHAAwDy5c6IaxEi/ePq/snA/uT6DCTOBgVwHwDgAYlwA8AIEePCGfIMANZXY1UFAnArAuXNA0UdA6UiHOV7NeUKDy+wU2iWvMhZBSUkGOHC/KKaVNAuKiluowVhRs5vIMVAwXYhCNCRCUD+AdCYD8VA6ZCeOSYKn4DzZqjCiXEfEI/KelC82+auZoMon4B+owhYNZhyA+6Zh5UMfjI+6gYoEdDyVdKlD4OSdriUt8U2n4ItEHC0/KHyUnC+ZRDsEPiwIUhYEQItgok1jBfXX5E4WlE+D+EdFDjROpFJjWlULPgur7dPFYIHO1KmHPG2OmDDE8YmrAIqIUJA8hhyA829eHUFGTE7GWEccQbOO9GeMgdrdODxjcjDHCIVGtO0GfjrkyH4DHJBfpjgPdlDj+GPlJaMvrYhHed/JAfpHsEeEACPHydpkeDxmO/Vls4qB/IEqFl0HOLcdpl8cIA4AHjg9+hYEAIVgorJlNkGJoAfjEYjlVJGB/RspthFkddPhMayMkIUH4XOqFku13k1k4A4KiIqB5M6lFkBfTfVcs8/KQaUaZKaaeL1nsOCZ/nzQ9GnKwIGAdMDiihfjWDjDsS3oOD+hZCzlHoYyzodDmbrApLjLmkQiznpM8JLow8df8/VKxRPo+Y6i7ERpGJ4B4L5oOKcbOIfkAlNkFpbLdMpMsNtBvoRM2lDM8MeXZkeDyNMeYJ1NML1O0GHhfpgKjAsDwPVBiD/AwKeali68FqZXI0vOEGA+wHgDhOMnrOTM4EdOYl+HGSzqyj5GnNKKcAdOyqEGDgKMepBgZLkD4k/rSB5rWJLgoA+AOTnnZqbPqnatZPwnkD9P2bPP6c4QLqzdRNJOoPPlJO1e1SSpeHIPsQuABBqBwIq9/sli7ebTk1hUEpVRGpcpgH+pkD/RQbOEAHOi4YLdHqyI4U2M8SGH/RmqEGcA/JoPDdGH8neNUH8Axk+ACL0s/soMvt0vRZbKIsWvOM9YfUVvMr5vRX9vXXiJVvTTTvLviJTvnTFvqttYuJXvxTnvhv3XDv7vfnbYJWrYtwFYrwLYjYLwXYdvpwRTzXFW8WXXBwlXlwiuJpWzzZ9vtXPwytRw2zpw7YPw/wdXVxNsyijV9xZxbxcw9kisPV1xfxpxrxrxistwtw1wnxJYVxTrhTrxRVJx7YpYXxVW3XNyHxAtxx5yWs1xEzdyJv5wVyPbXxLyVxPyTXfyzw9yxyraTy1Yly5yvy3y/a/zDwbzNXxyFzLyBUfzZzFzVX5zhzTzdWPzpwPzHx9y9ztWXzxWzydvPybz1yLx/VhVOTyF+mJv9zr0PVYIJahbFamKKH9lpia+lZiZwPKAen7mtxHafbC4+KKH50s6JF30krpH+nx07bBzPbfz7Wf1a3X0yIpsDFS+ytzZKHwUeNVbEr/1svfx70j1F1q5ENi3JZKN6NhtN1+yx2F1D1oLl2A1eYj2RbVFfjgIP2aylYhbk6O5s5w5054H/bydxDI5FKw6TOncMLGKbCyjYJwYbcU5OyXYhcw7MAG7QPLc67aQBdCH/dGH4AYQAkgLC767/djdmQ73edYaIM6HiEADGQMH4Pz3rUFeK9EPteSPVeWPdecKaQKrD4IayImPe9qAftYt74XEkpI4qAfC8EADEACNj4qyLYhfbrmUI+1fkURfrfu/MGCHgDB2nJWNsItgFRnsQHPgN3d5YJwGGfyVOHADAhl4twXg0UJg4A9g9A5A9nunGKj6Ir6qevNhY/LhfBcI8UekMYCOfC8VmavNGwDh6Wfh+J5CXCaYyNfiR7C5E+HqRhbikIibj7WoYIeXX7fhl7kv1Y3nhDtDxD0YyHIgsg45EPOItihoD8GdD5YYCE+Hj8SZQjB1f0c4HkLI/E+LzkUahFHdZkdhShmOrktl2PIbiSekMJwNh9CWz8XmRlOcBlTJDlYcacYOrmn9g3gN7Guc3m7kygL9wgL9AVOc79GxjmSd7mXHmeGfvHx4Idodp7EucH+HuSfm3+ZjqLcgYA/3gvb+kmB9F972DUFnfGVJDnmE9JL6+H453/CJQIUIAw3+nh+jw+j3Ox3y50e/3+44cH34fD/EXiz3i43C/Di/DxDpBIZFI5JJZNJ5RIE/KZZJHBLZE/Ii42O+Ic8X8OX+HH+jjw/D8P4Iz0/DIe+Hm/JtMJMP4fAz/BoQx3jRqe/4lFIcn4vGY3HY/TLFY6ZK7JKZfY5lWHGw3jN34OH8PD+Hj4fKDBEPRUPR3i+KXZ5BTognKjB4TVUPb4hWInFX+j3Gz3DGn48Y9gs1m63nJLabFawew2Db3+8I8fh4P7sPLyP7254E4Xg8MBnqc4X+Ph+D8Qw3DssZEXw/Mgh5plXA+NsfM9z7FZuhINBTLWBwGwIFpzgfBwfg4PtVeQ8B6K43G4GC+DBgc1TngAB8HN9CGCg3OwYFjQAAeMhxhkOcJgnAMAwnw7oOMg6cGJI6UGOqmC1kA7IDoceQAH8AB/gAPw/g4oJPgA8xzsqcgBn4AD3MEpx8Q6AD6nOZADnOcABqvEb/n+cAHnACJ4DEAJ8AEPkFQbI6RQe6cIpatZgDiYwPocecbxuQJ/D+RkQkAc6ingcZzgefwCRWs6nH8AJ/gCY7EGcD5znwB8cB/HR4B+eAYn4MZDn4A0PwXJEjyU6EmJYtaHGOwRzwYpySmesjG0DSSVSRQqU0Of9ErPRbp0aklHrHSNJ0lQbn0slFMU0slOOhTyR1AsVRVHJFSs9U6T1TRVGJNWCmVlWcG1qzlbpNXIAABVyWVY59kpDXqYV/YDp2EzdiJLY1kLFZbcV5SFpUDajNWsklMW+lNm3NdKWXCwVxpHct1JLdF43okd2LPdyY3qk9533fd7rJfKQ3hfyHX7gt44AseBJBgmC4PhF04UsWGIdh1/YhiNv4mpmKn/i994zjVgY4mGPZBeuRZHUeSpbk+V4NmF/0qsmUXplWZVpmi1WPnufZ/oGg6FoeiaLo2j6RpOlaXoMpSPk+majqWp6pquraNp0G5fmGcZzYOdtDmWu69aewOtsWyXVlqWa3lex7Sz21rRmu0bhjezQluu7ZJvEm71veWb7Q2/8BUnBUvwnC51p+6a5xXA8YtXE8fBm5JRtuR7fyiy8PVHJ825/LJPzGNc10F187XHP9OzfRJN0mI9N1iT9cz/G7d2fK9TYvV9ysnapd2/M986HgJH2GEdl4kk93a/e+WmHjJF5GH+gznpJD6mMet1vm3IkrTJAf10WXMqQA/aKmochqHWelNSoMyKR1UlGs3N7DqeEkPwod8aSPlJMB8QZTFGl9fadEkj8X2EhfoSd+zd3IthJI/wf7/iRwAJKB8BcBCHIWgOSkfylCRwKJEQklsD1pP4Ic9SCkFoSkhfMRFrqjUbwfc8Z0kkKFMwNgCwl7y73wEihcSGDBJAPQzIcCMkD7nvw4JHDoY8PIMw+gi2eCcQnyQwJNEeDg/olQ2dVE4kUUIpQ5io1pxqWBnqNIuP9RY83xgcKSP8fJAxzvhBwP9R6jyFxuAxB4lqZw4pTGelhLBko1qPEeQseCUkuk2IgPkfwexHk8EeDwf4+CIAAGOSt9iUhnj4HvJIPJWIKN8iq3k04DxmQeHimlDYgA/A+B4MBOwwAPiYDOY0fACE0IcjoIBNUf4uyDH+MAJ8jR4APMkMwB6KABj5EDI0f4gBPjAB+IAH4wAfBhEeBAg4MAPjgiUAAYgBx+I3H4AFNA+ECA+DAD8D4gFASojQ5If4wwDjICGQ4eAEB+AQD8AiWgfBCBiA+AYB4kAzqaHgEAfAIB/AID+PkBFA5iEwKcZkf4ggHjBR6IcyQyBjAhHwCEfIEJqAEA+IID4hqWgeCCI8AMzARUfRuAAYAQ6TsfBAPgGA+BEDEA8EID4HwCPKLPCpHbjRHmkN0acPAeA+A8B8B4HA/APG+IOIcqw8Tlj4qqH8fFVgeRco0x8sIDwHiHOQMcyQwxwF/NsDg5w/gPEHN9MyvIjwD0iAPW2AYxzSm1kyPgn4eAeAOq3VyHSk6mNbriaY1AeC5l1ByPyeQjgPjPq804cY8DMFyB+Pg8daJAsfJsP4g9TxjjjriOEfJf6qHOs1UcQ5BQP2cEeA+uEzDRqZGHbMfFFR+B5NSB+vsR4Fwph+vohxNCljwH9KUngjgZnGIOpkR45lG2hjgXKsiH68RIOKH+1gj5EDzMkMe2RmA8B+ttPID6bCsDOIKA8hgDxngPIhFEfI4zjGmI8+OvIx4jxMshc9gZIBhk1JuP4GI/wME9BOcYgo/6RDkMgnCOBOpMk7H/agliLTjPjGeB+Nt7bZEbstbYP4nr6oWB++Mg1sLOvoh3gE41xZM2WNYD/BAf8FOGlS35j4A5Q4RwnhURyKA/kFHweYcCjSajjwkQ4m2FcSLnNOsjFGKiG3tHGPkcAAMXsfxiiNK+NbdY4xTf8hWZw/j8TkICyxdchAcRgxLBjDSHDABHksf8cMmk9P9lER4gAjifHA07K+WY6YUxHEg+ONciA/HjmMmmZczh5H9bYgaIxwAevQVEH9sAfxrzkPkYAACKpCAJnnUwx8+Jyfvn9ixDhBmTKWTgGGlBHUA0UAeTo49caRwniHLmlswarzEZHTsfZJYwE8RUfADtMY3EftDVqqhgVl1pnues9kIONv9hAf+wMKB/EdVnKNnJOlWHGPgcI/tg4hQVl0lBhLWD/2gOEvuZMAx01DmogkFdtSHfRt3OOO1IjwDhgXPWtqlVL10x8me6icEfQUI4HhFAfiONgUVTW9ScWkJtvuJBEEpWdAeOEqnBB5j92rmoT4Pxvi/26MYw5DNV3+4gP0f4/SOcUiPgjnWfsjuDLYW6f2BS6AeB4XTkfJTZXRHxfB8dpup8tH+nK1pNNN7TMxqC+dR0o4pGcVG33Dqn47I2H81AY+kEHuZ0ye8EqnjBqiPCqdVary00UB82JjRwjgNtVQPFZeq78X4pl+VbBDiDGGPMQ+D8ymo2rnWvN/OYdtr9SLVflY3EKNRJkjliEsWNryJ+x64OMlrEOdmGqGEUh+PkD9BRBQHIksoioACKfGov8g+pRJfRAAPGAAccABxgADtePlF21QPoanmA/5YgNTCPBBMzVf0Ud+oQ2ikfA4NaAAqP+r2Li+9xWSf81Kc7ENodB+IzRSXCilLl7OtDQwBDYAL45eTyRRKZBHiZacj6adbapOQACa4D6bCbgqKcThwcAMb8b6hNJNBPq8QDxZCeaeTXLppxDdYf5KIkAvpKwkZX4fj65CxG4fAgSAYsRZIl5OwfC0pBYfzXArAf4YKpoeCprLRQAiApcHghwvprLfAh0IDvTc6fBRBBiGLEpaR9IkaIcJ5JZ4SMowUKjLxWZN4lsLMEb96VUKQ6cL7fpYAR8LqLBeqyMLhDhbImBY7csNTyJWYQ8HolEMiCEMzJEKRY5rsOwkcPB9RWaXEMbi4zUOMKJ4ZWYcDcpmURyCR3B7gwUSqK0S8TB372Z58Tok0TUM8SEUKBEEiG8TkUx6MT5x0VZzkVCMMVUV598VsWcWh2kW0UsXAlEUcQMXcXkUT2Zq8YkYsY0Y5qz9pcRmsZEZsZ0Z8Z8ZRdsYMakWMasa5ikbEbRasbcbpgMb0cBk0cMcZy8ckcx4Mc8dKFcdUdRj0dkV8d0d8U0eMeUTsekep7ke8fB6EfUfZ4kfsfx3MgEgJ1kgcgh0Eg0g5ykhMhRxUhkhpwEh8iBu0iUiZtMisixr0jEjJmUjcjhlcj0j5jUkMkRhEkkkpf0k8lBeslUlZeMlslxdMmEmJb8mcmhYEm0m5UcnMnRSUnknrjMoEjUoUf8oh30n8oxQkpJ2cpEpZYcpx08psqEacqchcqsq0q8h0rMrUrciMrsr0r8iksMsUsci8sss0s8octJnMqUtcdEt0kEuBmEtsuR/Muskcu8vEvMk0vcvkvslMv8wEwMlkwcwkwsl8w8xExMmUxcxkxsmsx8yEyMnEycykysncy8zEzMn0zczkzsoMz5uc0MQE0cp80sLc08pU1JU01c1k1s0018Zc2M2U2ZfE2sqk25hc3Mb83cbM3pjs384E4Jl04c4k4s0U450c5Mcs5Z185s5058t86J7M6Z486p6c686k7Mdc7cIc7cuku88Eus8UuU8kuEdwQEOgiIkK5p9byQlBSIeYsapkck9E9UHwkE9od4hsNy6IkE+UU4lAb8ic+xVx+09q9U94k8+M+YlkaUd8eKB9A4kVBIlkK46FB8dlCKMc9lCk9ok1C459DMdowVCVDs9lD4ktEIz1EcdNDZTgcID8ITCoR4fh8bLAAa3oY5Ow04P7R4TAD4H4YzVLf4eYc5CwY44wT4pIPIhomwTio4eITwGbgFI5KhRbAQ7oA9FMfdDZDZDkCIH6cAR7R59AfwAYRDY0RUCShYBgA6c7epCweZE4h4iYAdJhCyXASCZlKSn4A5E4eQAKYT4YIAHlLciFCIfgGC9ADChID4AaZgQw3xRLRYQYY6jyjqloA4QIBAQY7Q9IgQeYcA/Y5gYNPAf9SQQKkQcYTwEAcIQYcAMIeAEKgQDAPgBieVLkfFRJ8YfwPyo6tiuAT4Y63wyLXquDMa9StwYdZgjIjTQtZ4cIpApAjwioT9ZYyQTy/QeI2jxYu6qyo9AlEomwiQurwy/QgwYcNoyK9qzorDFKuAYYtojIYcIQeYcYt4v7eoOdaorAY6t9Vi3weFaQv4u48C+lcQs4D6OtcrpLhrVKkVdgmgsws1eIeYYaTTCFe6Oq0Qc4flfgsLhomgY7GSULlC0w5wDgH72FhIslhYxzGLIVh4mlZImiYSasJMNti9jIm1IyOrrdj1kCCrVIyQhNkqTTrYfAfzjwP9llRFEot4D9J5EZ9DklmliQY6bCY4H759YYebK89LQoc9n4eNJNoRDQAYT5MAQ7GQR4fDD1pQPAABBVp0htCNqNqYA9qrVIYZKS9QmiaiRrRlr1sBDdnyTIMdsy46CoAFtI9AR4glt1uFpdudptFsc9CMIVcowzsIcdqwcdvy9rsIhytqToY9r6TR9lw5PVxSUp/1tS9lyNt9aa6rnFy8c1CNcg4yLi/zG4cIp1v75MJLIVr1jAmhKdsZj4E7AQjqUsH12AD4ggmtyYj9JV28+tcdmDpNdB9F39rDITgAY94l064a6VsSOofl5bo95xN9tQeN6LIV2YedpVlNy1losdqR8d3avSuC+tHl4NYYf4T9Id01i7ALqFw4fwA9k99jY4fN+AqlyYilul68cdRIPjGoHgD6tlLdZQeAit4NiKkTw1rwYI9AcI3WBIB4eI2wOF5y3ttUHIggjVyY1Quj2FXUetCJDQhz9QQAD5GAR6XDHwQa9oYBOWI4QA81076K0JG9w7sIfAAZF152I9tSzQggeBGl2gP4AGG9Q9u1Eof6GoYCa4P4CAgsSTHwQK9rR5HcCIMdrwYi/xON890jOwfgAl5zKuK4oS0pN+LgQFJVHNp5Bh+0Q4kEN2CslchmQ4pmRUruRokKOomEN0RkjmSQkCU4k8LsP0p2TIhyGolsLsSWSJBp+0GmUYk2D+U07s80t2V8teWMtOWcs+WssuW8seXMsOXcr+XuVs787s707OX8reYsrOY8q+ZMquZcqeZsqGZ+T+YWaMpeakpOa0o2bEombUoWbkoGb0nucEnUeNAeSaBxS8SZ+jIp/qB7f4kiDwYbV4PCGtAAm4lDWAkJLAlmTwkQqokOepekeJJWH6HpVAyBQGdUPudqx+eGeRbOgGgiCokQX5DiQCCsScLBkWf0/8wVl2fIAUPgkMEQkBbZiwipHUNCMAkmdx8WhhAGhxOWepMbXA9x/w/ufOjEN4lOjbQugExQselhj7wxeR+2krNWo+RIpmoKCulyfOh2i2oSD2mop1GQsmfiF5KenxdUd0FxgYH47YkekYh2ow/+lFBWfehYkueEIC4uUWk0GmqYrDHwsWq6Ik/+rUxwsaagkC4qqOsOopcmk+hGpWtOd5AGtgP4QIkQm0J2uIiRt+uukmu+josQcJB8IQeIeZR4YYfk+QfgA4ng/6YQD+zROAe44wc6ZY0wfNJMHSUNkgyDTVAC8sHzLFR4B7pYT4eAe9TafOxEIFr4lcIQyofgcAHG04P6Ymx43QY5LAYwHiUsIQHpg2zQgZOm5BLA4zbttQrgc5/wYAOZR8/szwsZLokMIQpD+gfgYDOoA+NBHiauKRKgQAfAYA4wACZaehHYI8BK/gYAI4TBOQeAAYeIYB/oCAB+KRDgPw8oBgB4A4fAAIeAYI7G3yTIP+4AYxCwt5L+4oHG+25JCz64H7WBDoPwAQHgMAP6bVRe+SdgAROhAlGRHoD4RgBwAYcXAm1OJAOQAaX+n9BuTSTKf4fgBAPgRhOgA4AID/CiaoeAIIeQELNHIwPgCAOAB6pIf4YT6BAQQ4Z4QQA4SIvoeAAIeOfBDQA/MhDQ+YB4BAAmKe/AQIA5Cw0nC+FIYggQt6TXD3I3EMHwDypIBgH3KoHgJgH4AQD4EFHwYIeQGCgJOgQKkAcAQ6kARAQgIIcAEIqvSaY4OBPICGgMTJ/fIaqjqoHxKQH9LYY4gSO42oeAHCxTkKsdcKv1ZnVYZ70YgQcAICf5M4AYAfXfIoDwDmIAAAIIeAEAOAAahWpxFQP+FI4EE3PYwDqrPXP4DwDyq4PixXU6thOVfA2wjxOjITzNf7yoYDxNjtZjQvcIrWrfUXIQ5jqUEQ3quAiAc7eo5q+IHiyw1zDLwxAQZ4yfgAiHdIHBKUHlTz9CxYDAH4AYQNboOAGalnZuvrQrqEIzAQwA1XaxKVlTUwDwn41avpOS4Y1A4vcap45AD4YYQYcNbloC93djAuS83TjAkEIy6iPI1fVC+oR8+XfAy6sVpguYoJR/n1sqPS+o5CFYeAPnhA0YYLwAPwBwCAP4A/lw7gH/LOePZ3aA4fablfjonZLK9HfrILFLLTs9/VpocYQ4eZ9A5AjIjghgt4ea+DgHIFAIp7rYnInfei+oriN1jwOI4tpgfg1zIgrHusICNdf4xqW1X6Cqj9jAPAP4BxDYA/aIcAOGLoivrvi1i/sHjXsSLnjzdz67UHkdmSf3tZOgP7horDzFfF5nurQrs/vPd/m+e3voGFyvnoY+m4c7V4Ofw3sgYDkJGB8Yc4/sSS/Hexj+/HyS1g+5BHy7WHzTe4AHzpbPOv0Iqnvn0nsn0zChGHHwOKy7PlvWHozH9H19xoR58a9n2nugQBNIeYpIOE+8yUT3eKOAgAwAAcP4ff4/D7HAABf7nYAAOb4fh4f4cPgCHh8AAPfw/c8LfAHZw/Y6Pcb/eAAeD+g0sYKHfBwf4OAA/B7DcL4ABwP4AgzDYL/fB/oLzY7xf9IfDjfj4fEVP4eH7/D7+DEbYA+OL+Hh/DgAA8GeAIeb8rY/fk9Aafjrzkzxphxj8MedlOE1f95vV7vl9v1/v7gwGDvKfvb5oTxfwxf9pg0IY8GhrHf7ziUUiw+jJ/f8dc8tAcjksncBgeB+loPYKDmJ/mk2nDwGE8nz/oFComVo9JoVMp1QqVUfw4vLwDxwrlezkGcD4sp4jtpqdsD9ucbxcNmz95utmwnf8GDwXhvuGvVPeOK4mOg8JR95c8ny0Tr3QjNTqjn97/QbP9yTmCcDTtSYJhnwPDXAYD6bnDAR4D+AYHtsoKhqK3akLgpqhoI4IPn4rrGg8+yoqmn7nIm6KCn+94PnGt7srk/bupk8kavC8cbMKvahvS4Z/j5FTIEee6GnGfr5wQDg8q4Pwfm+qiSnud5xv80Z/wCeA+NSQZgngPA/AcD0FpxBw/gO97bqGYbdKQeCkrKpzlQ7D4/l+qMRqkb6DGOeDnxST53keecWxe7Uhsq55/s5HNGL7HEcvMvJ+B+cZ4RQgjHvccMinwiKJj8Hith5JsWGOQ54kOeb/EOkzbQEHCpn8A5BmA5oeA8DwHwYcJ4J4A5DkPNI/AHNjcHGyynVBEiqHwroGB/EU6KkQ89z7FC0D+kxjnGD79OunJ4nO3buug/FG3Of9HxsT6kLylk+DwPllIMD6E22f7rucy4eB4PB+B81DDAeA5hnGY9VEfM6TnCYCU1iAABoenUxAPBhwHBXxh2DCiwWLSdTqdZoeB8D8SmBJqpA8Pg8K7epHoNSrnjxbBDmOYZjgfVjrwFcOC0RT9F3RRt1Rrdi9o2lJ+ABICDLCY5gAPdJ4ADJGljwOAfAe2pAAOYABnAAZn6hVpwJUADOH8sBwADpWSYq2KdzNjU0o3Yu0gelJ8BwAA/J9qIAABtIfgBgAAZanyDYYsqNWxqBwAOQAHrftdw7AlA5n4jWg6FHOiPJoy9EAD+p7S9gBoScEJHAPAByQQA+D4A9ckAqZgBPx54AeZ5wSseAIDgQF3AhvAB7S1BBwYfAEV8cONqEvCjXb2h8AIfAMEAnpP7/YmzkAPw/gNlvaLEDiyj8AVsdT3Jgck65xAHcJ4aieHMb7zlz89Rs3VQ/i9zWUIqY+A/ISLyPhTY/0JCPKCSgP4+C0H4MopIP48IDmNLQhIQJfSOo2U2P4jgGFNprgiXkoMB4NrpL0WkvTUTiwNB+PBcyLoClThUP+Fj93Ow4L0u1GxT0awjOLDo8EPjvwnXQSeIUOn8rnh4jWIkRQfjPL4m6JJg4nmAMU5tRsSIquciWo2Jp5IrmEA/FIvcVIul+jHGof0WlGRcjS/iJMYTwxrMAA8yRek3EphvHGOxfEDv3jhHFRkX1GR0iGjV0UUyUMdkIUI8DSpBSPjlJSJIwI3SWRsP5YkmpPF7kNJ+UUo5SSPlDKWVEqZVQ5lXK2V0rzwSnlhLOWkn5ZS1lxLmKst5dS9l80OUY/JfzDlTLxRkeY9ObjajaIExJnSxhwYgvxklIjgaCP578zD4HkHeX6YSii/TSmfJ6Y0WC/mSjzEAfk2Yfl5maYOM0gDOyZkROOOM5S/z1nQXqdU7EczvMBPFHc8y/T1ntF2fFBZzl5nSXudcmTwUAL/QI89BC+0GoPEmhNFx/xSHmYgfpVDEIencQcf4v51h9H/NKZZ0x/jfHmP+kNIZhGUPxFId5iB90rnnFIH5hjDDnKfG2mI/ZlugozKaHAwAftsVqH4GBCBgA8AGPAgwACQjibwA4MAfxwFTdyMwA45nWgPEAP6qIME+NaH4hIeIAR8iHkxV+TAGAPgBGYA4A6sh4iBqsPgB9TAYDwAiPxrq5qkxpo2XwrIAB+CACAD4EC9RAAca+hIlQA6tDwAcEAH4wDmAHGQAMcgAR5NaD9ZMEAx3H1YaiPEAA+XIA/dE7R4YARkAEAGPx+BKgP2AJ8CBtY+GI2IsTLuHA8R+AwH4DgAwHgDR4B5HgYaEk+DDHAF+dYHAPiGIMM9iKATTAeAYD66IBBj3guuSgGAeATgGXqA++AAxDjPGGIAMI+AwjwBwRSbAPAPgEGDX4MNGLjxeuSPxzOAExCPKlHgZ5lFtnYMaagB4nz3iPGGMM7A+CVlRA+I8B5JWbjzTafTEOKcRXpKAenDxEzOpNQWIccJlsDYHXRYsvY8R8JMWhiEDy2cQknW2UvCoP2XmUE+OPDi4C0gOxSaMY9QiUYwB+MYqZJMR4RHGPkpgcMYTYyQB/GhZQ444sVcnHqo8fmRRWB/JY/xjjHyNOvJBJC8sGKWWYtIjiEkJXxlMp5KwcEdZxobLa28vD8ORf4zjLxHoZzPmi5D98eOGZRn93IHxALEzmPwk9DxHsdt5nQpjmA/5+MiMfUpDyUHDbSAcAOsSE5cHy2UrejtN6R1xpTSrnB4kpB4R0qRkX2AfYhnLVmoXviOY6PAAeph+ERH+I4khJNoNPAAUkfwOdYiAB8WDWoz8ukP1zPN9lPx46919ELHUO6Yj82ISTZDL4IDP1BhXVKixgAjGePgc+0yn7WGOSTft6S8mKBzCocBBEoDP3IPkyhitHAAEfkgePE927uuSUjeS0CSFSZeZLT+zNUn4W1nsiPJ+Cs4YNBHhSkzOgORLeniJJ+KGdD5nfSKa8b8bPJu/hJiOP7FyCy882guTcXMLnrU+fdrmRYMOfhO3YNi/2tw/m+3L/JA4uM8uBvOgP36EUnNexIykE0hkQcedXv5JKpkzDpZQ/iejLvRm/bdXpLyuB/VW48uowH910P4DySaS7H2SaMAthrQclg8T4h8iK8JPNhbOI4EYtOaWXx3F2cDDHOTklAfEl5kxDiNe3EVLFbD5zrwoPygE50n4nHMOFmg4D5sS+AHgOAPEGxrOQ8Bg+VActkQ6EgDsRuwGEeY/ry3wJJ8lASmzZGIYqrpipCRg+qDgYpLXzrpg/bWr2/ntPav3HxuH3K0LJge4sINx+crifEWy+wf4gARuOdaP9vlkySP8DmiGK3jELatOI8GnuIidPvCqiaOLmph8AAm9PzJKnOCOm2NiAcCCgDltqrM5K2okBHFsqvkrh4hjEWgHqYhAjhg/iSBgseLDCkhjjEJMBgF0jIB4OIpOIsiDGEhngfh8LCq9jiQJpCo0ojFGDKI4Qji+OfjwwmwiDAOyoNLjJtQlQqIdocNIwoPzoqwlwqi9wvQsH7l7wtpgIuwwjyQkwwQruEocBhhBwywzQuw2Dww1C9Q0OxIvI+w4ugwjQ6KIl8Q1pzH7wOw+EbQpRDRExEREw+RFxGQyxHRHwoRIxJQJxKRKvaRLxMOxxNRNuNxOxPNfRQRQs0RRxSMDxTRTrExUxVKMxWRWp7RXxYJnxZRZpiRaxbJfxcRcpexdxeJcxfRfpaxgxhJZxiRipXxjxkJWxlRlpVRmxnJURoRopSRpxqJRRrRrpyJHqKIupkJPqdxFFGplpkihi9CpIbw8KHJlLERvgHxzIEEcopD9qSwwI3BwGXjAR4DzwruZC8n/xemho+oYR9x0C9AfQ/l3EgRBDCpkIPR4ICEax5i9FxC+RyGghgR8i/x9oZpvFzH/qQxyRjFGhww4I9QfmggPSEF3SVjCHMx2C9F6wwADyIR5EVp+JmplyRBHyNI1ItIHSPC9H/uPKIRnlGhjoKiUIXmgiqj8B/SWjBoapzpkLDSakbKKM5yLG0GgkWR9Sfx+yPi8lNypJYKEjdodnRymIaynyEjGyiiqSqJOi8yIjySsScyti9SuyNyvygx/yxS3Rhyjomh4y0pujPy1jNDGi/KgoVJuqYjuKGMgh/ICghptvDC8ssh/hPopBnqYuqDEFVF3DOM5hfzNC8hPA/hzo2ppEWTHpvknihPWpvhPygN5A/plhuD8B9hho2yxg/zHyyomMdgPhyA/iGABgPAYAEEVh8AHKowRl0ttreB8q/CCKnTbgeJMLDK7QOh8BgihttgGEJLALABzPkh5mIv7g/AwLPx5gYCDAAhjADwYB/ADCerAKmB/AAmXpMB/rQBjoWB8T2BwEPABlmgGL2gPnUhhgCGsl0hhncnaKrHgxvyjIwMdhHhyCNT9TkgGFszmrJrQC8myihK4CVCLLHHaCuHaLDHhnUiUCcm+g/gEH5qqgDhyABLTAAqznCAgNOAnkVgQEJAAhiUDFiB+ABG+g8KzCqz9tOHagH0ioC0fH2QIG9AOAQAYAHnHhhACMAT/AhnH0JNkEVRkzhId0Mg+AYA/AMAPDNiqgfAHPfICUSL9B8AYHYU1A+MoCuMoCqADmCLXjmnGNkh4N1icBgLBg4CLA8AfCwhDIEr6M5ECh4AwihU8VGg/rpkVmXlnv7muiGHnqzADm8gYAeHAGumIgGFYP7mIgB0WA/Fn0Kpilzo4B4hHhmD7A/U3GmSnsQhHoRwaseCJCMj7CECuCECqMtmFjmg8K7AAliO2hzjsB4MPIBF5GSBPsQkVvMGbMatBnMynwWIEt7DDBDlaAgICzxVoDSg8U8K9GCBhhgAmKoj+ABvYt9VZ1aFGyAQY1ch/lR03MZS2GXouIMrlD7GVjkiqgeVkl6iSikFeA8EvmBigsuk+FKh+LlFlMG1tsVtlEMoJ1wgfQWMMsHIoh/1zkujOP01ABgicgcAQB+APFfsNhgB+AQDOPfCcSxuvSRlGhDsdhj1/kOFRyEWCEWi9Q4CmWEjMCC2Fy4WIDzkEGBlNsiCllLWNipuRWUWPs52Qos1xSbyeCpIzV7xzIHVABwjsgcG2Gs2gkuJJPMkr2els2flGS6OMhOCZWig/2jyENRliHsGohxvB2AA+W+WnmW2ohAVQihCeWqxAtTLlLlCOhwWuR5tAWvmyg/osisoJ1yP3EJNoEDWWHwUthgrmrd23riXU1QgPhhh+AA26x6JXKEiWMdhhhOMzgMB/03ESS2NnXBA/XCB/A83DszlMWoWHiFjziZFdFNjKBHuAXKOPhAMgtrXMsJCjiH3Oo2nCA/v7SeCvkJGvXSjcLohwXY22l/mcmwXUttiWLsXZjGiKXaxmFGx/OEhx3dh/3e3fiCXgj8HXkJTVXjiM3k3FEVWHpkCYoEAH3ozmVp2NN5AfAA3sA/3tWQIIvmiL2SOLVOOHVWzv2WAfiwhwCmG932q5AB34TbibhwAQyx37W7oenNi4AeXE3fhOTbyEM/C8hAA8GYXDCM3E2GYFiEy6EDvC4IDbWU3qYKQVuj2u3NCjn/h8vBg/GAMkYQj8FZ4SIAGKhwW2LHW3mNmlYXsN213627TglGI0OEhwl+jgCuhGDO3AI8oYDKkUViioYjl6D3Ed2qYmu2s6YJ3KjiYp4NWvokYsDoGAW7WyYvEuW0YTLA2YAMA+YzMNh+AMCp2d41yX37pVqEykikh4452A4647skAHokJrCkkUHYCKGW3lj3IeEDmsgD2LB8QZZDt5NCuj2PYq2QiJCtgfWfZJIpPfWVihWaZdjmgcCM2314kPm3BBsaW6lI3bV+QmEskvmR3fwa2CGCC83u1ql45Zl55bFTIuEDq7ABw4Bwh8sOZfjh1dvLsM5iPRB8GVl/yEXQ1tvklaoC05ABgA12l+W3hBvfkPkxAB17hAjBHM1k43EchgAAoeNgnYAcU2Xfigh/OaBH5sZzGz0BU7h+CLPcYFGmvtSAZ+ifAIFiIKX1Z7AcAcOj1IM3jKBgijlejGr2msAHPe1OAP0/GINXVRNOiU1GgOW3tO3Y04tOGI6EEPLyaK5uFGXG6NZ+n7AAXfoOgDMkP7T/Gz3ZB8rHAGG+jHWoBBnUIboBCagMGoriH5Z7AeYLv6oEtABgCjwIDOmlgArzYQSeNOCDBAAhhhVQh8UqSaKsAfanGct+6osoBgAh6MKrK2mz19JSqE6todiJEIB/hA6wC8hDMkToXulZZ6B/0PLd6WF8CExCjcHgkxDGrflu3qyngHzUBHhwUgO4B4CjwYB/0ko8BDwN6iz+kCvgXHoJiq0DAfry33Bw3Ygfr4bqkHZPoM7ORqpKJBjAJxEcyOJU1+vExQbwC/7xEbEHpXbzROJKKJC+71kawa73QiRO7iDv76Dyb7JWk1iH7urjxOoPDw7+Dw7/JV8ADa74JCR1DDlzyTb/6y8BRVxtRDRs8LpKcM8NJCcOcOs08QRJ8RcR8SRLcTcT8URM8VcV8WcG8XRP8YOgcP8ZQKcaxS8bxRcctKcacdka8e8fEb8gxUch8B8i8LcjxXck8lclxY8m8ncnxaco8pcpxb8q8rcrxdQ/TTC9s6C9xul0IgZR8QRKIN5th7rWcvi9qQi/EiQwC/8xcWcyqXScBBC9OIMd84J5C9oIC98x8O858uKSx6c8QxC+pmor8+8a9A5t3pc7qBJ69EC+dFcZdGcuybi89Cw2i/dJc+R69Kw/BzzaXfTjBjAAjqh/gAwfCqT/Ioh8B3uMi01QhPiygwAcB7i0gOGouJDHVbjEQnxhc5hABPqmVn9TFBdUhkR3moiswI15AjnUiGLedagcWbivdmdoHJVbqmHacSQpTSw7gfgCAPhBAPgJgHz4z9uqAAhgBDh4TKhgAcAYB8BBhBZdtZCUAg9qAEEgddB/97BgmFBH9y9x9vIccCw7gfExFdGcs5yeOqL7Bx41wXmZmBlgjKFeDnmRCCipmElgiTWyEF+DP0I+1kF6vjs5iSspqOsOFKikiV1GsyFtF8ETg8PGeOCqGdFtmXtrc/xqOyriQwWG2HgHlxeVTOCju9CkYsj3CjCGtpiJinlMMslWCjCSgfiO8wRruytoehO6stejBHhz+kOMj0X6kmjIuMwPHMebY/MsmcFw+rlJ75Ro+uS5DOiuhPNxewj9KOuwNXCkYg+5iFsJeoe2+ptDCFhzhDtrGlQ98LuynLQ7geB/+9Cw+whxjDOIB4/AUkF/A/iFAA/C+2epeOfEgAkXfGpHcyIceu/Ju6jl+wh5/NOwIzMeAfFys3fR+o+3NDCDC3fGy3+6/W+7jkhPES/ZLvuwOYeYNEM5DbM+ebDlGSkoEJFBs/Tc8TegeS+8knFu+HB4ip/Nhjqdj0rml3khwk/okN+cD3B7zCGXssTYfWH7q2+hCEM/+wygfN+lCyjiCAB9Do9wsd/uN8PF+Hh8H4eH8Pj9/h9jwR8h9Hh9jD9Hv+PR+QSGRSOSSWTSeUSmVSuROCWSx/DwPB8HxlzseKviJM9nvFwvF/vl4vgcP8HgdDuODOF4PB8QweDwfRGjUiERwHg4Pp+Oy+vV+wWGxSOXWOSP4fAAPoAHh9zsGKvw/v9gz14AF/0R4UVAAdgAdgv+7vgAHgcAA/WoDv++39+RxAB+1V2zZXLZew2XMR5/D8AJ9gB+3MC43O60MB3keAai6RwA9gXkBvwCHgMIA/5/FsAj6/Ho9gD9ARzN8XjceP5rjYHNPCPXJ/0B/vwPv/O9WPMd4D/nPwH0Z/Bhw3R/waXdvk+Tkev2Wble2PvyVQaSPj4ff8Zv3/f5SfOmekr7PzAcCK+/b4HylKMQDAsGwck8DvaQKUkc7CRwFB8Mw1CL2EGlLgwZDURQdDj1vok7nRHFUVxLFcXRfAsWxhGcaORGUaxxHKxxvHUex8lMeR/IUhvTIkjSOskkSVJUgyXJ0WSfKMfSbKUqxjK0sRhKksy49cty7MD9TDMcrzJMz7y/M81JZNM1zdCE3ziy82zlOqPTpO05TxPM3z3Pk1z9P8z0DQUyUJQsw0PREu0VRcs0bR0rUhSMpUnSkn0mzp/k+kJjwwf8AI+6SP00kRxs4EcTpAc6RQSkZhpC76XwtM1MolTiPnuY7lVCj1Rs4iVTI8fFUpHViQH7VyRVgkFZJZWlDOPTVcOyY5BI+niQV+61gpDU5/nAUdVI/Y9RWUkNmI/ZyV2hMdbU2kCDMpbNRJDUtvI9U9xpIeNzpBdKPXWlV20TaVb3if95166N7W6kFv30lN+pJgCjK9gkwXfajy4TbGF23e+H3zjiUYnV9Y4vQGDHOT77E8P4AmMAIPqAAJnok6p4wBU54s6Dg9vk+RPHGoBADGY7oGOhQ/nOR4fn8HJ/mAHNWW+6w/mG7R5vkB8PH8P65acT+WHfUB8Hmf5z4xRmDEA0AfkwD+YgCR+amQB58MWeK8AAxg/B8HgwO2cAPkwMdT6NXbv6UeADnORgHngBOpBzvjkgOYQAngYB4D+o7BAfwhGAcAZzAOQJ/ACfDY5nlTLF+kDOgID5BA+SYH7mR9TgC0h4CGwQIH8BA/gJwA+CIMwPkMB5IaOxmjmCxfGGGc5EkOcEJmAHHg+HO4BmAAB4EAMAfgc75wEOYIHkQQggnIIYQHwGB8EwPwEbXLlAn9da0JmB5NRDk4Ee7pUAwxxk/MEHgPhUgPAeBwPwmZohDlJI6OMpQh1YNKgOOeDA4zAlDD5AtYIwXvhgDgAB2b5h/tZgmIMYI4SmFOIWHxwD+FHmWby7EHgP4JCPJwRUY6pxnjDHiQgwQ/AcD+A4H8BwOTHkYIoUlTkFiKr6HiOeK5SRjnjKGH4PgfjsDDGGMEeAcAcPmAYdV3QDxhiDJ+UwfA+B/B4IdGB1xZh8GpVJDs0RFSbxAIMM+DR8h4D+KLEsRwMy5ILiqRKKgj20QabTFooBQx/B8IgR+MY+A4B8JoAgiRHQPjjEOT8cY8D5RzjrDZLCgR4R6M4Q8TwP4/QCJuqBpTSDoyGH+Bwf4jgTlyOIQMcRc4qAPKA0ol0HClIJksHgiDqR/wuflJ4B4AC5yiHmUmIpCjpx0B4ROO5Y5Xw6D+J4AAB2xy2I6TweL4JdgxH+BiX5sw/kcbyI8cEjilAALw4wH7TRxiPmdHIPBanUx5GHNUmgAGcGdm20Roh8hAB4l5KxSRlhwSwOtOGdE6o/u6naT2eDPZ5T0EcAGYQjxABHE+4Qg8/Z/kKAHQKgh0R8xzoQAGjdCwYB8B/Nc6oH6IO6iuQofAAqLFFowlWV1HCYznLnOsio81OTuVDSaecvwMUrAOMcT44TvxUOw0of4AabTOpyHkzgARgybp/UEtU4gfjzqNFgfg+AfVLnEmqp85pZqbpCPM6s7j6M9kOH8RwPJhIVrBQOmIx1ZVmiu7qylOQ8EggwMGatQQDHfEePeUddyFSFosQ+pqlYcGLj3OcH436qN1sKT0Y4+5d2JsXJgH4jpaCfKVZG0J5R4j3HzQKy8cx/lzH/ZuMtQH/2gHHaNoleGe2nkzX4yx3odQ8IpbEnSoCexGlRdYDzgJ728serCR542lDDuLZaStOZMHYjY5yTtQXPxroFUcfjW7rWpSipkmRNBH3eJ2T2BA8A8QKgYD8h5HCBW+VPFQpMKxwjjvfBYwI+b5A/WCIMQb4L7v/Q9AGCdpB+FDtODw0V2DjFoLUWxsZcBHnQNOXcwQAB+GIAAD4H8SyOAOnU0SyJf1wOarQ7rHA+Acj+MSsEYD33w3Oa61IvxNbp1IwVjDFqg2DGfNC2M0mNTTE9jydEAI/psY9B+Iye9LBx2+PtFQ15ggEt5oEPgCJecmm5uUOAELmR4ZUOY6HLF/B+GsM6zPACTlLHkOac8uZ0jqEeQ89lYSFyJHUH8rQoCpUPLDI8IfTR6CRIW1Di5AZ/T7rbUuSvR5JdWHtGe2jV5XtYkkX8ibW2t02IFQmfe3+vtfoE1Se1xuxNioDX2cjSuyiVa52glPaafNpbVRztfbCNdtbbRnt3byL9wbhShuRN249zIi3RulDO692Ik3fl7eK0d5ru3rvbe7Gd87632/nfu/t/yt4DwLgdTuC8G4PgHhPCuF6O4bw7h6TOI8S4mkfd3FT3MGXgvFXhJ2QWsJCzg6dyiwNW0kSNhaoDYkHVW7HkiwCRMbPjo1G3GmNq65WR7lJIR+8fJHyI6BYeTcj5QSEZ/K+TLlauw0kNsCR7PQ3zZeIx9Ur0X4tvqBIOgXK5kSZXvQ+g9G6NqHpPLumEgK30/mhx2NMIYUSbV3YetHx64Svr5Ie5LYJEV3ofZiT9pJF1lB/bSPryY93DwPLyPrd6D10kvdyQd5513tkZZ/FEj8B3jtZxvCHZY7zrna2vAkk8Z3UlXkO6dFJCrjvqpPL6n8d4LeGLwfjhZbL1uTMgPtoZsd86o/jFztGAHEiY80PGvHGP4MY/yODnj4PkhYfx/T2Os1FupgRxj8V23JTUpGOfZD8BgYJQDggzGfKdYIzDUj8BHp07w5xEMdEMXNr8+iYkuziPwAYnqiLdHAH4iWEOyq8cPgVsbcOCAgdwZkkgrOGQAOhyjy/0dS+GleHkLwHAAcMQCCU2EOHOBANEGAB4CAaee+O+ck/0Hib6x0ECGAdwHgO+HENmzSx0D4BwEAOCMiBAkENCI8/UMEAGbyca/gB4AGH8AE/oD+pYIcNwM+/0EgciVkzUD4AgA+EIAMdULwTKMqYcM6jSEMA+AGAegEEeVY6OEGHgVgHgDCHwDCH4BgHiHiHgjIBgOmPCB8cmE/A6AELaEMA4dmU2GGMWHCEDDYHgBgPCD4ASEGAOGeHC1IHBDWBgH4AwD4wWAQK0AYAeAIGeEeEGVkGeimAGHAGHFGHOECBxDaBhCQEeAYcAAQB8AwqGEClK1IOmAAB4AAAeEAAAAAMIQaUChyVIx8NEEeAegCIrDIgKwukqHgKGjjDgIQKcbALkJnA4GeJqIiA4xaK4VOHOJ+HwKci+h2h8GeHObQHCISIWJiDiHwh2A8vwGeIxFrFAgKokHCg4qQOg/qB+A8KkB8jsIEay1sH4B4A4AEdOGAHAji9ePar+j2h4lpDEgEJwlwiuPsHmHya2DgH4DiiKk4LkH4LkAclDEYA+GfHeEYB+U4K4vYHjI0IWl7CSHOJ4QTHQDiiQiUDmjlH4wgGdJUVDHossHxHMaaHmH8Dga+aut4E8h2tQImd0GeVdIIA4a7BYKaU+PzIcliD+E/IipCGOQAkFIsYYa2+JDgaRJubAPst4VABPHjJOB+MfJWZouFI0uQiWbTJoH+HDI4l4A4DPJ2jUA+GZCSWwU5KFG6kgH8DjKTH2qiiWqHKgWVAAjYjcKbIZIao0qglknSLcnYvAneb6HgA+HAABLMHjLRJBLWJ2BGE/JNHfLkU3LoGOHzNLMYoPLxLAEALxL5MYkOCejkkyLaGQMZMMYSHmfAn2abNtMbCTCKIeiWU4K2HHN2XAA+HxKojajg5AQGUCo2sBM6pCsgncngbzNsOjNQH4DnNUl+LmGODnK6GeiWLlLmKAoJOYhOiWHBEZOq+yDGH8nkA5Aea+A/BaB+GAD+RAVBOkHnF3BfOXNNOapYJiq3Okt9OqcIEBG0GGlKKdCyQJK0o6H+o/M8iqqupIb6dTNKDnPQaRPXLVPaH+GNPgB/PkbBK5NlPtNrQjPyD+EBLdP6H5P/QDCBQIAAacODBe51QZF2/1QgDnQkGAcBQqInQvAuA8ABQ3Q6zVMwS8MsnKtaZeImsGtmZ0OSVYkrPTRerzRiHMDmw9RtPpRyPKuKl2rYLkEHJmK7L5SGl6BjOCwIB+GMOqtZHoE+rsI8GCabTRQkggnDOjSqziTuAwAZS0KYakYcPxGBO4qisDRKbqwPNQI+HwZ5MWiLPVJALkEcLmHEDzThHfMJJWbquFVKsykXJmPpN7L8DzOCA9CTRqOqPpKC2GsrVLFWH+AdUfRxOmMpHaxZQ5Uu1NO6uzU4h3K6GPU+HjVCHOzmv6IXI5VPTXVVVYHzVezbRwK4mTJdL6szMhVxJrL6kOIYa/V8adGuLdSWYSKUHxNpKKH5KRCQAcsVWUqGt8J6WGgWa7Q7HRUyTQMuVoJiu5WwpCu+nciMIUIUDxXAIQa3GmD/V8XAHwnvLgE5XQgIgOIVMYkwA4D8lpLFL2ISuQB5Eqa+iXXsIGWPWGoWwwaaIVI2/oqCt2nDHeOqE+t8rMLyD5QEKQhiMfC0M2f6IwLbYpPcJ6J8MEKcvHDhDjY6H4f6LnGlWAA/Ugh8VhEHHAZ6IcIfGMiCVgHAKcHwhCB4D8otY+JpRqGOxPQWPKjchhHuEfHAKekxWQAegbaHG0YSwkWYx2A4nTQ6MJYbAKy/QLamxoxszMLwMIx2zUHhHYAHa6H+MRbAmFHjQ1RwEGzqLux2Z6NyIeL6jINSHEpUABCkD8iTY/FzRqGGL+naU4o2LuLuaax2x3cHFyEBH6ImrmEGt9TBcYBBGMKYx3S65q9oGApeh4HOzHcuIUMXAkNSAGHgBiO9dAdQ0mPlWAGAkwU5DOIkz0+ndYEALkGADGs4rGAO+mrOLkAxQIACGPRqHCcJd6LyAmbyzwEffwAFcGEAOBSmA+EDfWzlCiAMA4BgB/S3Q/WmQI1mPw1cMubKJZMw83RAQLg2PgG/g6Mtg+JW9eH6r62zhIQI1qOQ9CJLKS9HhFgyQG12PajEOQMoJS8kOmxYR63Q2DKzO4RgzVciS0QK2OPu2SRwmxiI4wSM4vioMziuSHitiy1xi4R/i3i81hjDimQzhLjGJaQ1jNjOJBi3jVjWTvjTjeJLjbjkJJjpjrjQOOaqH4aAH+nC+kD+rBj9jxjyOMzWH4cEB+BgB5CgGMAONpkIJCUzFeAQeOeSBEAyMAEOGIoWBBkjjYWkcAKigjGMgCHGGGhjk+SKOLf0iUh5GKxMHmiJHRlVjgOMPkPtMgIzA6H/lkHzlplqUmPkQSyAIzGTlOHy+zlqXAOOPsKBmKIwHO/1NpmVmDmaYZmhDIGAAnX7jdjeUnmdJiwhDIGCHnmpm9jXnAD+DhnFmMg5nNmTnRjPmED+npmyaaILnjmXmE/CqjnGZZnzmBlVmEB4BwgWzfmiEeLhNqKdmsxfFuAAt1ncGOEDNoHhF7odkMMSpXmiHABjNoHwAJKxjwUsQ9ic1EM4YFkJjBn3mW3Vpc6jpg8HplpnppF/ptpvpxhHp1hzp5Ydp9U1qBqDqFclqJMzqMPZpZozqQ7ZqZS9qdqbqg85qlqnqoTFqsMxqVoHqxqzq4Tnq8o1rAMrq1k+lcrmJViDkjrMtTrTpXS/rPiBenjkUDhQ1lrlm+MtrqJJrbpJryJfr5jrroV8H8BmI8A+oWH4AOHGAGU8aRkDphsFdCD9k8byJ+lfdlF7fiMTshr8H/FaAIMEiIHiHAGCHABAAwNshDFVpdrMx8A8rGKGgttG0GnAD4ZDrLreh2QqbSjijFl/binAszhw4nrMIft3W4H4COKDHREql6rZuG4jsELkGcOqfAf2OjHRJgA4oPug4fukD/J8akb6bwACIRu0hPu64bukH/vCngA+clvNXbvRtZr9umJ2OsdDmSHxu0+Js4MqKBVUIkIM+Bmpv2mhu3plsEM7LZuQayHhviD+B5uFvpv+W5LYIRVIGfwfwMIcotiXsDs6A9JHuxGgHBvjZmhCx9pbwqMZxUHwACHmHhNLvjoiMTZbxWLMOkGAU1sSIQDgAxviECLkzTw/rwQHKxyLnSQJyRxwPxKxrvjC3GU/sBnnyXpxylyvrE4zy0LFrJrVy5y7zBixzEQNzJzLzMJfy9rdzQ2XzYSBzdjFzhzfzkJRzVr7zoThzwJNztxBz1jnz9z/0AST0FkL0JlX0Nlt0R0T0Vz5rn0V0P0R0byN0f0lyV0fmZ0v0ryr0yMtF3F50/1B1D1F1H1J1L1N1P1R1T1V1X1Z1ZvT0KLFAIPwtsUWG/qiLN1kPv1oUR1tqr1wQz12UL16OKUD1yPh2CUF2Hqv1+Qf2QT/2Vq6Mr2MPb2cT52hq/2l2AUd2vrD2z2b231uLH2mPZ2qTz25rGMt3GPX3KTt3Py32YQd3YTr3cR33T211r3D1j3v153yLD3UOR3kTl3pzD293j3B193F332F36LB3+OP4CTj4HzH3gQb4gTf4kLB2L4V2T4YK/4cON4sTd4xzP4oQL5CTX5Hi74L4r4P2J3t2/3x4R31NKWcYqhWJIPGMFygJCtsf2c6JEMCLA0607pURz5TzT3t5ok0WX5wI95+K956ci8UHf6CK/6GJoK+HuQf6Pzb5nFz6WXR6b50K/6j6eI/oULD6v6KJQ2aPx65zj1/6UI95t5sI95z7MJZ7K8UAf6qK97UK/FqQb7e2j5f8D5v7CJH7v52JB70Vj76Jf6uLB8MQJ8Hzn2Z8N7p7F7wJX8aWb8eJgqI838mQH8rzr8KLz7mgr9RFJlONiHAPCDCc7c6J1pOJN70Z2OmrcgyV85HQ5EIDgJ0Oo6ol/9Cf+MWOrDl50lBDOKaB+jkb7vf9qPZ9Lzz8uMYOqGFCKmwNCjHdlCYD8AgDCPOCAk9+kJJ7KD5BUbArdBmI8OCNf+zF6CADBcoGIpUAF+KARCANSfENyB+IACBwAXggDAH2AHgA+AG8EG/4hEYlE4pFYtFG/F41G4g4I5H3+n4gh3+AAe/2AAX4AB+gAew0G4AAAD8AB8ADCwX+8AAPA/JJA/32/38D3gfJpLH4AWAAJWf5LCAPTQAeJmD0BU4WAA+/g+DwBBADO5tXAAMABPK4gAdaYIA6DcY3GblII9dYrIn+h38BgPKCA+AwHwEB5g4HAMB4PA8AnDOngCA4H7/QaHRXgeDwPgYH3+D2BRwgP3+BgegwGwWAcDwHAcD0Oh2AYHwEK7XwAwXgQJ2GA8HgeATg8DgOAYD8Kh3gYHgF7xz4jdOhF7v04hekO/A/cN0cD5r0GwwO4HgOB4PuA4Ye8B9r8rIMvRjw/D4Ps9oMyfB/XgOD2CYJwM0DgHtgQ7dKO24HtScJ4J2PgPK+A54OIHjXgeA5DwYeCoOsurpQ6ibqus7DtIeYJ8DgPzXmGYQHwY/R/PSv54B/Cy4vi4gfn4H6Tv8fA8D9BMXGGcEfvsk5DmHCcgq9C5gnCfA/nhILPxm7yvtPDMUQ5ECgw/LqOy7EgPxNFEVA+1QPxfHUIHAk8aQuk7LKI+QBx3HsTxSD7tM+cMiSMr5/sNJcEgOx8oymrsCH+cErgdMhDnHFEwLjL8wRE6bsHwH6HmGfAwH+zrVRceEdoSD5wRmP4HgMz05swpB8JMz88q5HavT9IrNq+eFBnAr0m0MpspK8YECH8ADvRqAABtlZNKS9aCI0w6FNU4f9PVABgf1HBh+B8AU21VVlXPgiCjJqq08RRWz9zVP8gK+YFfWBCJw2GeCvADAlZWUttmqaOFpI/S0u2o58x07LdHtVQ1SwrNq4Q2B5BXKj6hs/DZwB5DkfYEf8+SHXUgwVJJ4V/Qs/IhKYOP6B7E38z1Ip3gaOYLEGDrxhNsTNhhgwzh70VQYad1WQDSTnjJ/nwDmkY8qCvF/kUf5I2ElXzlJw5WPzJwI8l/SRrUG5qjWbw7nK6zGv7uxU05hmHbwcD9CBw7EHysaRc1BQmfmmqgA88j6oifv9eGq5NBFg7qe8HA5u9SbAX8MnvseyItszrbQuUSQulEisYA4B7fuOhbqnbJmAqBDnBHaN4wQZgHxudA8BH4eP3C4DvDkYPvBq4cZS8idh4Djgcg/ULtjCePy1vPLcw6fNLi7FkJOpqVg+ADDbrh4AbpBqeIR1XWeciuMEAraFM83MfgAqCTEB0XeLB0R4Vl4Kxp4m8CJ5ZSwFYDgWkkaUHys1egdB6RQTsFRH+MQAKyCWkve4PwHj3k1PgaOOB8brSNMYJSSsH5LB/iAROUgqAgCpOGK+/FJT2HFDwfyWV/iyXkEuGAHggkA1NuWOizWBJIIFupH+MYv4gFGA/SUt5cCj0oJSGAowkb5HXEdLgACEZEISJRLGShKQH4VGgbevkATKR+IziMAE/pggYPIGAD8YAfEJQ6gKwOA5z4fkfL0pcuTGIeEUcrH06EdS8R3I5HlLrWkbyAj9Io6cgi6yEI3IZEBOpEyMIjH+SylYfJiUoUuPcmWVygQ9JtEEkjrD+LhJWTMmJRM2lIh2Ux0x/Hvb1KuVsmmByQI1LGS0fJby/bLK9EcwJfTAmMRORxcpdEXl5IyYsx5oTJLjMsi0zZFTPmhMaaRQZqF5mJNmcA/5tl2k5L+bE4ZWzjI/N0ik1pATnnRKCdRHJ2ETndH2eE8ZLTzI3PUiU94eT5n1IqfhGp/ERoA5agVA4+0FOpOWW9C6GPPmEdN9BM6MUZo1RujlHaPUfpBSGkVI6SUlpKxZSlB4sUmpZS2l1L6YUxpDSiPUpZv0TnTRVatN6cTyp0winlPZ90/Z1UGoVBKiNpqNUehtSXN1LqZRSXNEJW0Sqil2hxFqVEhqhVeOlTnp1dq8tCrJFat0JbJVascjawQKrFWurFbYgVvrgh2spFKz10rrWyqdNpzV7gNXKPFerAF4ruiGqkoq1WFWjX2WFhLGS4WlXmv9kUwWHIlZSiNlrL2CkLZCzkrrHTDsraE61mFp2JlBYu0zl7PSRqMV4iJDyKDjImP6OYxyIiPIiPgiqUSJj9ItHOr9o1MtLIoPya5E0+EQtoROZ4vyKuMIgO8iI/iKobuDcOpFxqd2+uZcsiVsiINEIpdG6ZEbrEQuwRS4BErhEVuItK1CYabXgIlcqd87SI22vORS6RFLqD/vWUQio/EuTRtfLu5FVb+EQv9dDAF6bq3XwPgmY99YoX3s2REnQmA/ARUiXA0heh9jPwIP8c4/7pDvF+Pm6uAx3n7H3e0iQBx+BhHwEYD4+FypcbyRnFGKLi2Tk5b7FZELdD/xQSKtEnyItHEAQoAY4yCDPGMCciA+wJj/EmP8RI/xeAYEMB8QwfzYj3toOwJ4iAwj+RESsla6ljE6EYJ92A+Anw7DMP4GY/wYWBu9UAfA+RAjxH+D4lUqJUD8lStKXyrRAltACOMuA5x6MYH0HwfwfNEh/F8H4GGKAfj2DuPfIg6R5jjAYP6Xw/AMB8AIDh7QggH3CHGM8Z48x44rF+Pwfg/R8j+v1fTBczGlj5IWP+CuwCVABHxFvSBECuD/J+IcBgMRxEnHuPG+I+NOj+D8P8H4vB/B5usJ9Bo98AjjHyOcTw/rqFPPoB4DgMBjgfEfcIc47xzjzHuUPF4/h+7AxtWTY81Wlj4BwHjTsa+GD4DCPGViXWMFcx8I8YYHA4jgM8Pced7cYOCKgL8Wert0jzH+PYiI5x+jvF8P4uhTxwH6AYHEY4gR/3CHfv7fpEOB41D9hizug6ij4HwPzhx8weDxH4HkfA8b8LQ4uQjfQ4+ODgP2AfkJEMYB5H+L3Fgp+UY+cYPXlvL+Y4BKeAA/QjOcNH54O8ee/bhC/KGUMPyctjdGqV0jTofOHdNH504OdaSJQoE+PPrIf9ej35EP8PfYcWCD5QIcD7jHKjv7VeS9nbg/jM5wMAD/cx5667vyonnjtBZGvvuAPgfB4j+6aP8eY/A4dE4tjcT4j/GcdB+OfkF7bfdgF5iwXPKBfifcYPZDnLuYD+yIRAfgHj9eiGOUXuY+RnjH7u4y6nB1KYarzoXcQeNEe1f7TT3ZEQP+96wHEcQH/hdduQ4IH2LNzD7xd8wf4e5pD6DmLJL6gPzt7nAH7fbAgd4fIT4c7GDvAoQiL3SuzhKbz8wPwHD9K2wmQPDR7qi64H7a7jgcYB4Z5yjb7Tof7cYH7UL/gP7/we4kQP7dz6LlS3rerjjfLW8BYfsBzgQobGC4UCi08Cy/kDAHLlQHi2zFYfzaUEAiIAAA0KQBgGAQYQId4Y4ejw4f4fQPzcQP8MIX4Hz/gX7zL/4Z63AdL/jmLGAiIlYDAAADAGAYYkzuYfgX4eblTX8FTT0Iivj1yx78wP4GL9LRC/yra/8KImYBwAgCAQYQAe4QIexOQcYHAf8S4TgPwzgHAc4TgA6FAY4YJHYRgcYYYRwPC95kCjAHACAIYtZty3AfAY6GAfwOIfwHAfoHj1paD8rYYP4CbYbpofwOYfofcVRSiXwh65oiBTbAYf5Tof6SjDy253Aiwopoq+y25cok5tcXilLI8X4AbcDwgOLHLBBgaq0Z61oiT8kcIfwP4QYfAPgHjpJ+0c0P46EdUdgisdy+8X47LiAHBFAfDipEEfcfiZEIyezZMeD7IpwpbOUJ8ZKckhMdshaf8hoP63ROxQTRiWkikiyUbvyp60skSxsQK0jDokApq+ayMfyx8k0k5gkjChC0EmacUmo68m8mcmElTB0nAkEny48mUoIi8oancoso0fsnSrkpUpchUkisMp8qCHsqSt0qkqsnMq6ucrMqspCoEr0qEsCossUpcsipUs0o0tEkslcrQjEpqi6mUucukusuylz9hnBLsuUu8vsv0v8usvJs61STK1kk8tkqct0t8i8riwctUoMxErExUxYiEyMrsycyky0x0zExczSz8x8nEzy2E0EnsprJ8dMykuExsz8zkt80TBk0kw800nk2U1c0c1srU17ZE2MkU3ThU3ki03yb04EhM4TB83Er82c4kfk40hk5cdk5sjM561s6Mm06a006snc660M7Mp0xTzy54iTCK9i3K3a3q38fKn02zBgmTviXq5hcs8IiK9DAS9TCyP09KTM7okU9qxUbYiK8zCQibAIibGU+4icZDFKX8/bJUoE6Qf88c+bCc+rCq9jC4ibAqnM9c3bJa1c49CLLdCdAs+1Cy5LDFDKUVBgf9DswoiTD7ELEbckpzE7FLXzAjF7GK9TGj8RQTHLHbHoD9DI0klzvslMolDrlUww56XzKTKjKweDLDLQoTLrL7MLMbMrM7NLNbNrN7OIpwqokxYwrgQYYIBjPb6aQFFQY63wRh1KISfAiTSTSjSzFTTIiDTcFQHzUDUTUjUzVAiDVTVjVy/LWLWbWoB66wb4fYdL4UdaHlNTBARgQgltIj9rara7bLbb/7b0ZrcMFjczdAkLdbdrd7eLeZ90HLfDfS6wd4fbf7gKodDc34Y7BB4oD4zq/YqLjLjbjrj7+zkYf7krk7/gkLlTlgiEATmT6h9zmoH7m7nNGoe8B8BlWNI0pNWgH5poD8BNXIrj0gR7+LrSWdX4f7sDsQX7sj/jswf7tFZDzrth9ztwH7uAY5o661aTzjzlasXqcr7NbIH4DwH9FjxDKL9736DTx7yLydc7yz/jzDzS9TzpixZAPj0L0dIL2znofolNfccCvz7IDxjgR0jdXIiAA73tg74L4cZtcrMT5D5T/z51dz6NND6r67nAoq6zE8B7DUo85S3VkIP4Rx7Vkraz+Djj+b+r4gf7/D/VmD8EALzsAhkEA1ecBAR66wfIb4c9YljqmsmNoBjgRlolOE8kEYOMEsE4eEFLT0FkFwd8GD8EGcGrmMG8ZsHLnAr660Hwcbus/Vn5ULTwRk/Mg4iUKUKkK0LELULkL0MEMUMjF0M7VENUNgf0N1ZcOMOcOoB4e4e8PAdAY4ea+K7ta0sK3QRlioRlSr9rakRkR0SESUSjCES8TMTYBkTsT8UMUdecU0VEVQlYmcVsV4D4edfwdjHtECqV0ssq3QQjBARwH9noiUZRkBi0ZwiUaMaYiF7QokawisbC7SKBTceC60ZlNNwF5wH4TwH4oCgIkFRyQF8lFNwAYA7RCC3lsojl+CPq5VFF0lflj8aQfhbY/toojV/aHl/t+dWU4a3QcIp6FGAyh6UCJ7nU9V5ctOBzHKLmCUzM5TCEdCZ01MqODEtrFWEK8UliEU4uD9FeFGDszuD4fgB+F9/OEcreEsxIouGt92G8yuD63GHihWH2H+Bk49B2G9FU5EseFs/2ImJWJGEeKGJ2H2KdD2J+JuK+KuLNFuLGI052Jcs+Lk9+L2HMyWKM1M7svkwGNmNuNyj8wRzMwmMmLeL9B+NGD2O062MMteMeEWMuAEmOPkyGP2FOJOQuGE12RGG2KWReHuOuM0y+PGGOPU7WQc0OR2IeQFj2QWSeRWSs72T03OTNgmSGQMn+KmQ+UE07aeTdr+VGLWVWSMzeUU5OVc2k3uUhmtJU4OXU1GVxg2OeP668+KRVBKbVwGVN6q2YitCK3AitDt/Bmk/CcNFVALA+Uq9hcua9CVAbClBTAy91wmcNR85U+VBGbOS15M+lEdCuci/OcdHjIuU8ojMIi7qcKFB+ddEQiVA1Eq5meN8+W7MAjWfEkIiDRATAP4SNcF9uboiVAmftEmd7JTHIfwIwH4fAL4jWeVIuekpIDOgubNJp7TSxTaIjLa3jFDFbMYYxNQvYQ7NQiAd1edLq/NL4qzqxX4GIvkjYYYcZCUieeeTi0gDAf4DAHw/YH5SQeYmOOI6aXwtZf7Kwe4d4egMdO4GAfzQIDFPbUYX4P4e4OdP4f4dGn7VrV7twADWgrAB4b4X4RgGYeOmIfYfT+2cugeowCADA8YA5SQeQALj2XbagzxMgA7bMEtTdtjcTclUC6wX4ej/4vTdzeDeVZYP4+gBje4Y4R4k4b4TwPbujgIfLGugWWaz72gP4AIAJuAQZSQeAGGgxMDqokLjTrNX1pbTz/IX4WLlFYrlZDgcbzrIgp7TtZznAA7FAb4Xz/gfbgIfe0qpuW+1JDAcYY4eO15H9gjjDq7rI0gC7+y3xwUXYX4UblAfj/2yNmbmIvTmkA9aAfgL+5gZ+526G6OvG062EMAD4Y+6weBSTpwPGkbfVg+zAA7yDr1ltc4vj/jj1iG9bzwoleT7CNoeAD+5lRW5++95Wj6oAfgP0EW/oQ4fHADpWbJVrxe7weNlYf7GFhbFgTby7/weRDlfLmIJ9Qlir7AeAB4Y+5YffDO+2jrhGW7BEEQZ4cfEfEvAWwb91o7+T+nFlYG3YXtp7/9qL6NqfD+98BIT/H9VvDXIb8c2Z+4D/JAR/EjqPE3Jt7on4QcEkEzbrb9pkFbFjmMF8GNubztuxpbetZ/M16O5nMHIW03Dro4rnMwcfNHJebNw4A0KsK8LIekN1xrocMQP0MtyUNIH8Nb6Ny8VdzMOgk0M25lzofYee6HMTou/LBjGAD/RPRfNXJmX91oAERsR8SMSYyoed2gf4TgP9ZwPMTwo1gQYZHYZm6wT0VJLl4GtcVwrgbwA4DwPIfIY4fJJ7RD/O/HQypTRAR/RNNfRnNiEYQw0cKoYZE4OLAYcYh5Ewf0Tgc5n93S3ARwl93xLgfwAQDAIAfkOgEAH4cIY4BwHJHYfgHJ4SI3beoi4/b3cHNPAOdIi+BE7E2blTfPJG6/ErT3ceA+TGW623i4cYcZBgcDp1ivjniXj3VjZC2wcfRJXonnk2p8fV9/lXbip7Ffl3JDHwfABDpxKnWgjdN+XOgeS80vouWoicluXvpGZWNOX2VuU3hcpPo02vlc3/qvonq84frPpnreI/p2PPr+MHpOMXpuWORvs+LvqWV8onruFntWOmWXm8xPsuPvuOYfufqcsPt85nqGfPvXto6GNeN/wvw0wHmaO2InxaxHxnx0RPxy0PyHyKy3yfyixny3y6wHzPzSuvznzqtfz/0Cr30X0aqP0v0yo/1H1Knv1f1iif1316gf2P2SeP2n2qcP2/3CbP3X3aY/3v3yYH4H4KW/4f4iUX4346TP5P5SRn5n5qQH5/6CHn6X6Zsn6v62UH7P237a037H7vwX8Cvf7/8Uwf8vzH8/9H9Pzf9f9n9vz39/+H+P0P+f+n+v0n+//H/P0//f/n/ogD/gUDgkFg0HhEJhUIcELh0PiERiUTikVi0XjEQhsZjkdj0fkEhkMbkUlk0nlEpiMklUtl0vmEVlkxmk1m0umc3nU7nkXnM9oFBoUEn9Do1HmtFpFLpknpVNqFRjNPqVVq0PqlXrVbgdZrlfqtesFjplislnodmtFrnlqtlvpNwuVHt1zu0out3vUjvd9uN+wE4wODlN5wmHh2GxGLg+KxmPgWOyGMyWTxGVy2EzGZwObzl+z2fveh0V30mluen1Fw1Wrtmt11o2Gxsmz2lg223rm53Va3m9q2/4FS4XDqHF41l5OT5HLunOx/N6FC6XToHV61t7OH7Hbm/d71/8Og8eA8Hll/n9Et9XrwvuvXt+Em+XzkX1+0g/H5j37/iOP8/6MQDASLQJAqKQPBCJQVBaNQc2sILHBsJIVCkKoZDDdw0rcLw4okPqvD0Qn/EcQxND8UQ5FUNRZDEXQrGEJRlCEaQdG0FxxBEdQLHkBR8/8gP5IT8yI+0jPnJD4SU90mPXJz0Sg8spPHKjwys70sO3LTsy460vOnMDoTE50yOXMzkzQ401OHNjgTc3s4N1OTbzo2k7NjPDXT01c+NRPzS0A0VBM/QjOUMzNEMtRTmRIsNHOJSCo0YyFKOjSTj0wptLMpTTlU8pFOMXUTL1BUNTOfVC01VVdWKDUjuVdV9ZOvWie1gzVbO1XSdVwwdfM7Xle2E79iJtYDzWM8VlPTZiY2Q8lnWbaTBWo9lrWvbD320vFuW7bz6XBcNxPvcly3M/V0XTdT+3Zdt3QBeF43lAd6Xre0DXxfN9QTfl+39BmAYDgUH4IrGDYPhCF2gvuGNHhWF4hC2JIThz44pDOMINizTY1jePILji7ZE1OQRBkzI5RlOVZIuWWtZlUS5jl635o1+Z5xlmc5Rmy1562Wd5Nn6z6HCOdaPnmg5BosJ6Vj2mNxp2Nagr+qQ3pCYB+/h3qcixgAecADnwAA/7CgZ/a0gh/D+f58AOgx5zWAaDkegZBn/uNzooYABnAAex7LuaBbQgu1n+eBh7hNYAoMfgToHuZxpKwxnnCc54nygXL7PtOz7YcZ48U4Zxnwgx8byf+tclvSJmeeJznxzJ/nP2R/8JtXP9qgfUN6cZ+bYgnYoHrXE9YiRnnyc5+DzzXMl+f3CE+X/Mndthj8yP/oH+d55n50e1h/724nzzJ/dSf5J8mi3kHOf3mdmfJ/AefAfB8GQHbAPwhD4P5zn+PwB4mA8A8EGPMcDnTbjjH8HwA48AHjAIE5kfAPG3gTfURV9j7nmj4GOOMDwPA4BEEOIIHwQg8D/ckPCEYMAOARHmMMD5wIFBwCGOMB4hHPwoA+1oGcFyKOVfaHgACJQBjzHGM8RwOR4jjGOI8DwPg5uHB+PMc4hwHAcD4PMZ7dXej4DiOMd4j4dj/bvFsH4/oovGIi3yIMQ2wjzi0M4GcRhzifE8H4M7/wDxGEeI8DgfotDPTeAAeMgQfi/jIP8Z4z3wu8I+YYYAB32vvHOMMeY8RnjGBPHQf4Hg/BPIE3cYYjxPx/DmN+QScXGRHkYJ8f44ZFDHkbD51rs4NOzdePET4xoijjHOOCTzbx/jxHwAMT8pY0CAAfDJ/8xhnAAhi6UT8sh8yOXfD92by3ND4cvLuXo5xAR3mEPgeAI5jgcmTMt3rbQJifmfDF700wfjwiHGoiEgnlSUH46AR4xgYx0A/HcMcip9jjnOH4OcrpmDhji8B7wj5ZT0lo8ebM+p+DGBxQCO4cZsjnoNKWhAjxvzMHHIYX483S0Qa1NYjrlHZj4osOMR4jqMy+B/Hd5jpIOzniyI+VJunJDni1GID75B/x9pXRMiMghwAAdlU2JYxxHA8DnL4T8noTjgcsMOc9GRj0KqA3iJlM3w0cibUmexD6mADqeAFt0wKqS+EBJ4Pjh2+zGlKAGA0MZ1jwb7A+M7cxwRjpYvObFg6ngPH8B8fAfgfAzl8MMZwfg/B/gAPChMpRjjzHg8CsNMB8DPI23UfAf60F8J29461P1qk6tUdCnyzyeWvOcMedS07XJfr5bgm9tDl2dtk1hpbUmMNWQ7cRilxjfXIYlcpEVzGIXOODdBhV0lH3CafdRhF1lI3Yandpg13FJ3gYJeJTN3ri3kYFeZTd6mAXsU+0m9Fyb3L+vgUu+6p753Nvqvy/Kqb5YBaFf1fV/yjYGVbgK4d+7o4EXxgg6mDl7YQVngy6uEl6YUVrha7eGF5YaVvh5eGIFd4Kuzhy8OIl3YkJ3ixYeKLy4qXZi5YuML14yXVjRY+OF0Y6WXgvE138bXvx4ubHxNMj3ByDenId9sirkySTDKNvMgYDybf7J64sp2tytku+jLCAgAABmLtUd2VsdmVNb25rZXlzIEltYWdlSU8gVElGRiB3cml0ZXIgMy41AAALAQAAAwAAAAEGowAAAQEAAwAAAAEImAAAAQIAAwAAAAEAAQAAAQMAAwAAAAEABQAAAQYAAwAAAAEAAQAAAREABAAAAAEAARLvARIAAwAAAAEAAQAAARUAAwAAAAEAAQAAARYAAwAAAAEImAAAARcABAAAAAEAAIWiATEAAgAAACYAAZiVAAHV0AAAAACAP+BQOCQWDQeEQmFQuGQ2HQ+IRGJROKQJwRWMRmNRuOR2PRCLx+RSOSSWTRiQyeVSuWS2NymXTGZTOZzCaTecTmOTadT2fT+EzygUOiTmhUWkUmV0elU2nR2mU+pVOI1GqVesQerVmuVit12wU+v2GyUix2W0T+z2m2Ti1224TG33G6Sq53W8SO73m+Tu+3+5YDBUvB4WS3vDYmg4rGS/G4+K4jIYzJZPE5XLYXMZnBZvOX/PZ++aHRXjSaW6afUXDVau2a3XWjYbGybPaWDbbeubndV7e5neb+p8HhWLi5DicelcnlWbm5Tn4rmdGgdPqT7rdeddntW7u53v4DueGZePyS7zeeWen1Xb26b33X2fGSfP6SL7fePfn9X7+rS/j/ozAMBMjAqywJA6JQTBSQQa3EHq7BkIoZCcKIVC0LoRDMNINDkOoJD8QItEapRFEcTxBFMOxXDUWwvF8KRjCMZwfGsGxvBUcwPHcCx7AUfv/IL+yG/UivvI76SS+LQgBJ0nyeH8So1JsoShKUppQjsrSvLMtI5Lkoy9A0wTCAEsTHBctzNNE0wdMsnj+f54TPN01TgAAPoGQE2zshsqgAAMQz7PyF0BPSBn5QlCsXOCCn9RdGK1NYHoMX9JT/NdI0xQ0t0FTk7o2AFK1Aqst03UsNy3VNTI4A6BmBVFWH+0NEGBJ05VnVSOTkfEn1JXUPI+QEngHYNJo7X0oWPYSO1vKFc2ZEiOH9LlZVK0NlShRFpVpZ0uW5aVAWMf9R27adRVwgR4WBcU11egR+Xhbsqzbed3TBT6BkPc9vTBdp/mPfsm2igRz4Gjt9IGe+EVcg2GXO0MsTrft0I5YmFYij1tTRS98YvJ9uWvTDQn5KF55HSTQzpYqBUheiO2Jlp/0VmEy0EflPnxlNGXqgRA3XnlCybch/nCi2hT9JtuHggVY5tUSCHxp2kzs0Oi5ogU+agjVwoHilmNDgt47BY7SV9otfarNzSTjdey2DbMxVpuFdNDZ886dutZ0Bcx/2Jtc0yrmlSSdwMx6WgVX5NvdWSageBWVw8vNDeeD5Zycs1rhZ/2fzMptnmXPxLs9EWrw2uIzatycZxtU5XJ239Rj6NWfORwbnsM1zrmXXWxLY/53cvc7MjtjH8D/W99UHNn+T+Wdn3VeIEb/ceJuKRG/3vl040lBdP6+7I7Ongyt0cUW+D/oej4sygfvH2exirD/n+n6r1+/8fyj8lvf/p7X/nqgCeeAZ5ICnhgOd+BJ3YFnagadeB51IInRgmc+CpzYLnKgyceDZxYOnCg+b+EJvYRm6hKbeE5tIUmxhWa6Fpq4XmohiaWGZojED8VyPBcggGvEEHgvd35FHhPDfKANXwAR+J6HglJ2LfCKj4T04wP7uIjKBiSnMH7jHzo2IqPBPQ+AHj+D+IAP483hRXiXGCMETiKDgUqOBLDOkpRoB/Esf0QHukVHEpUYDLl4RniUD8kLWHmEVGEq8YK8VSSAixIkf7QHHEVGOq+SEVx/yMiXJCRzryKiPWMuSKBApMA/EG06NhEwHtAlAliUa5IalNMQB+RMf5WRzkDK6U5EXkR9lpKJUcdF4DgbGyQiikCLrwiXL4A8wCLTDZURRRUwpkS1ZpIGYMzmezQD+PAP8vZLy2ixMGXJEHJTelaxaQpE2WAflAoiUcpXOzjIfOueEoZvzVixPCTcQSJNHH5KmRUopwSZZ/PIhw4x/j+ffH6gU+Ilx9H+IMcId5OETGHQkA8wiBSrodIIP8dx7g3oqRJgQ/wBxeH+L8QA/x+yji9P8e4J6RkRX5SZXwwAPjAAIAFZTJqTg/HwAMcAHx7x4mzKhn7p46qBp6oGOyuB7hHpmRBRDQBgPfVHU2nichgLkmw0p/b/Kwn7rGVCsp/qzoDrSlStdaq2pkreROV5nK5nArjG2u9cq8qhr2Q6uplq/mTsCcivqb7CqZsPYixKnbF2Msao2x6u7IrIsms2ypBbBmPsyY2zZ0LL2Ys/aC0JA7OnStHaS086LP2lMvalf1p7WGGtiZq11szB22PBam3B4ra29t1b62FwLR27NBcK0NxC+3INHca1dzLL3KLzdA+Fv7qXBurcO51lTEMFfJD0gQz2u2YCOJ8gq4Rx0lIPDghkYiFj8kuQtg5BB4kOEec4ic/4fJ5u8O+8BGWvDgvHeUgl5yCj7UTV8gd7FLKJH81MhV8SB3zISN8gQ776lFL2PGmrbwH3eEff0jDXhgjHvIQS816CBUIXjghlych8kEZevEfGDiE4QIFhIhCiBH4XKIXsc+Ksbj/w6QfD5ImBYlIRkAgeSiIYvwSm3GZEccEHx0UnHw875D/EPh7EBHcjkKyYP/MJDsnMuyhjQh2UyDZVvtSTLGEWA5cJFeDJBB8w5jIbmWhOZ8pEKzZhgiub8gjHpyD/F7TR/ABGepVPWiR/sHvAM8eI5x5j8DG50H4HAfg/HGB4ODwRjrVxI1OjQHgcJy0sMcTI/09CfjJpIDgf6LyQyxpUODnYxVCB+MMfg4B8TLD+D8Rg4AcEXamI8ecYhHgz0eI8H4nxxAfvmB8eYz2DjjH4HAPlGQPABEfDgRwH88E1Irkq+ehIeVDVjokZAB9fs0AAuXRI/gBjxECMAPgQIxgfAgB8T+nQgA/GAEfUQn1YtPA8CBKQ4ARjHEgHgAY/ABiAD9vUAgfwALVa1GMfm+gAB+D4AGOAAB+CADwAdSAH9hg4jJDgR/FA/iPBBo8RgDgBji3rUEefE1y8dB4AcYADtvVDEQA/cZ5SKzw0GB8AgDxhgPAMnoR4wBhjwouPgGAfgMB8BgPgEI8AICIB4AQH/TQBgPE+McD3URDKvGfiQQwH+oj/7WnoYYA+HBwGCPgMI4+vDxAIHzrYGE5AMB8AgPnhvDB4BgPkfnW/DA/5SIzGYCA/w4E8AgH3MeZjnEAIQII4AQjxHgMEeAIQIda8SD8D/bcdjBlSIfo5gSJ0KyyI8D4H+0e4UqI8YY4Rx3zHwPwPg/A8j4Hh8kPAPAee5A+I8B4hxH9q90J/t2JPde67p9oZ4gxjiHHmPAfI8Rx/JHh8QPgPA+eSD98wHgPgPfwDwPgPI/B+fvA95IHwHxOYzB4iyD+A4B4A85iUqHOEGGCHC18/I+SHwDwD4/S/29y+kEeEPAsouLUmKXaHioUA+B+9wGMA+X4+m/IamHiH4DwH8BiH4DCHwDgH4B8/+/y9wGO7eA8+fBEH+7eE/BE387pA+H+98++78HiHwHGHDBdBQD8A4jEhwAwH801AIB8/qHyHwH8B4D+/yH+B8AeA4xmUgUgA4D8AdAK0eEOHCHiHCH5BLCLAhCW9Y9yGOx2A+GGEe9m9oIkH8w2Tmv1CCA+UgYFBI2wXiDwH+vnDVBfBi5jD8GPDlBmA+YFB3EgURBnCCvPDsH4DmzFDUDjBQH+1iZoeDE+D+E88lELCrFHC0B8CfC9Cs8kB/CxDI9xDNDQ2wDi2xCK5BE+2eT1Dk9wHHDsOqIrAwbeD+T0w7EBCC0kGOvckvEKvmHGH9Be/bEWw6GOHOEfEfEiGPFKGOU/ErDtDlGZE0HHGjE7EKA4cXFE1iE8AGH8DwAeHyxe1jC0A4CeB5C+SlBiH+Ec5iT0HO9k/IH4DjGYDjCtHQD/B4y1DnGBDuPQIqymwkpyAciyAAVew+HiVuoSAADizEH+GGH9E7Go6nGtGxAIEcB/G2E9IsjJB+vq3FDkVu0uHHJBE6DgAA26vevm1iE+4yD4AGHyTpHoSkE4GPHxFc7oBxH45iSlGw+DFsVuDnIOABJ4A+HAAOEfJRIbGEIoz1IkABJQZNIu0lI0HwAJI6oQ1DJFCwEeABBoHGA/JPJTB1G5IsqHJc1YvO4MAADOzFLU2yTyjJBNFG4M82ABKDKpCzKIHNKOZeA8DzKW2c2bKfIHKjKnKqEACPKy05GDAyiEyy1YAfLCVzIwv6HwB5LQzFJCH5JHKxEgHnLiD/JRJUTk9tEq3+xIYEoRHLNXI6m/MHJ4H5AIHgDyvmhxC0EwHMD5HyH+E5ELH7MlKdDTIGYEjMH9MghxB4APDk3DK3M8IkEezQwlEeG8SxIwpK/PEKDiH9FtNY/+3DBoHiB/AI2dJUSwX5HBL0oQHmH7HLIHE8z2hxIQvJAI+ManOOSkEQHFOXKQ8oH/FlKbDtOnFuH/OtELOzBvF83FM6OwIoGOaazg7pH8GNLnIwHPBNBRBVBZP+/aB+E9H8GPPlPpBzB2B/RIYFPyxJPaDHE5E8AwH/ABR+1iE5FJPW+PFSSkGTQXOYEYhxDIAfQi+DDVIFOsD5MI/zQ1O9Q6InRjNA1i9w7jEi0k78zFRTBXE7BPRbS/EgZ3Po+jLpB40JRxCBCE3+HxBODnBPBODxGpSCH/CxFKE9BS+PCvGMSkGCHAH5OYGBQRShMnDTTu79SrADITSwGfBpQ4J6L2GGGGxxPGB+Ae+gAeUqGe0lDTEMHxAg/m+RTTAGB9VBTZCy5i7bTg+gEeVfEqGO+8E+9I+QjMHhBO/S+Y8k+Y/TFIA8A8DxWU+JCxA8H+9NUVKQGDCuA8AdUdOkHAHjW0HDOs/bCxB5BFUvArDtIcPWjaAPU8IEA+VGGAAeEAUqGC0kHwUEV8D8AAD45IADVaTOVHEhOO5iGA+u4MfeUrEqHA7wE/XmZyHg5DTS4+TqAAB9XvITJxXwDxYkD5GMVeHC/OHwBwZeaOAA4xWvQlYPCKADOs4xW+APIsGC6m+fHLO+Iii9XS1YEAGOjgafXjBOcWAMB+AC3ojvVaCAb/TlX+EewBTgjhLvEqHg4aE+XkjuHgD9XQhwUE3ib+H+iRIS4oD8p4A/a2A+Ve+DUVCeSkaOaBLdSiHij3Z5ZSEBOy4mHHZcmFXJZkJ8vUIJGacgKU9nXKlgKJbyIGxoH8YDb6IXb+OXcCbGzRCAKSxszsQRcWIKz0+eKUxRciNrcmIIkga0u9bwYAINcSytc2IG6Uc7c+J69szBckKcxQm4KSxiuiuysmukPldosjdsNTdwsfd0Ljd8NZd4sbeALbeINfeEsXeMQBeQsTeUNleYsPeddauteouwuuuPegsKNgZNEgIddDdCIEprUQAABwU+ymzQ40xhfAxgVyxIvWiBcwILe5NAKde3ItfiIRe+IRfEdwBwbzfOxhItfUIWwVfcIIYgxgXuWqmdcKZNA2RMJIp9fwIPf0IPf5fIXMxxFQwSAAaxdWITgKzrcNfkXC6wSwxtQPgFRDfrgi1lgmINgqINgvf8iBg2ZcUDgGIVgKINfjcFEMeQYMalFDdOOHhaHDheILhiILf4m/iJhsoSoWIhh2ILh6YKoQVJhRFDGIKpe2D+HjiQIJiUIJiYZ2n2H/ieoUo0IfimIJh6IKYOpriyhwaON8JGhxi+XZCaSwAPWAIHIpCDI8AeEGEOAeWMECHwDGEOkTUQm+aPi+Ve2IaOoQoUm4YEHOoUHwHCHCouv6ECvdWSm3G4BmZeB+3i2gBi1YqG2S0uYFDkHmEAHM2oH4DAeTi9kBXQH+AwHiovGZjA3Jjti9Zw3dj4UqqCHgGAVyAQAfXmXKD+AeAOEIAMiQAAHwCCEOlLkYZ3kcGIWM2IpXZFkxXsWqD8feBCHDmmIE5I7o4C4GE8BAXY1+5IAOHEARlSA+4oCDcNGBleHIAO0rlohw+GiQ5yAQHjaA5JiJU0I6Y8xWHmGOGC3rAQX4HgACToYnj44yTPWsZkV8DwACEOkQ2MkFEM6pjOHwAw608lmWB5YfC4EA5E5JZE8fSsA8AE9aAPncHCEO6tQ+CHnpSAH4H881nyvBn2HO4Y0rQueCHwDA7/mOHgBgH4BgD5oSO2WoXahwvOGOGGx/q2aMGAHgAwUQACpODAD4AQA/o2WIHwHBo9pBWedwDxpHl2aPCqD4B8D9D/pWAw+YA9kE5EBhWUiy/S7pVC+sE8Afq3HKGPk1CQUhqC/gT1qK2RqOHzqTFCHxCq/mHiHmHAHhVS/ui2rMI23ewO+9HCw0aPAVYYUQAPATY9C4AYA+EGGA18HgDw9bXhrjrnU7pNULr1Y8/SD9C4lS18DwhxqDppA/BFsRsZU7EaHhDUD8izSc6kzFso2xsvoDBPTQ/LT3CvtErQIyh1b1fvL9CCwk+TrycVk0H4BxVAAQB/kUabAcA+EIT1fHrkIvl2xfCrUKEfkwBw61r0AfteDgjm8vB+GfJSE8A/GWYCGPCNNY8u3ASkvrqM2wDnE9u2H8BzENCJE9FAKGM9vIUSTyYEGHECwls9vWH+APvbvfZYD/vmTnrka3vzrmGPv7CtKVwBmXwEA5aBVHte4gZomXB+GPwY+fi/wgH5GjYy/tNlwtuuHmx/Ezw2eC2SBwAfDZIHFGx6J2awvVQ+AGvrhwWqHhiAyFk3qkm6TOEO6qXLxtvxpFv3x1t9f8jDmWBhJw4+AcllCRXwoSiRRFRvwbpfla2/GjCxUVKXcNGxyqHPyvqVrYAAB43rwjBPJvxFK4I1xKxWYCGABHzMUE7BzSUQAfzYBhzdvk6qH4ARxtt0m/ztx2H9zzkxz5XWB9z/tfXsTkEBpoD/0MB/pevBDlydCuZoanH70fsp0lw10odxZ/i/TvMB05bui7zETlsVJeIsDhantaHCGDqkB/ZZBE6qHgBx1jrhkbENzvv9KVihAcXXr9wLCQimSlNPyRwY2fcN2OoRKV0b2Zktux0nswxVWB0zy9h9S2I1tL1BR0GOHuwvY53BvZCRxi6jzhAaDwD+EHpD3bi/1rCx3l46ZpWt3sHweC0bKGy3G5CAGOH3EbCMH+AyZo3B23B1uwHx2hFDj6kuDjRPT3FDzAI2Xlb123NwHiEevn3QD5tbteBxuI+i6rCR47At3ZjL3d5HITkpYuH/DG+iEDCQD8iSanKHBpFLTHi/wioR5tyj2Zk5Dsx+HwjNqVW0H8BnIHxBIR6KI2XDzHJhp0oQ9MHg+ao2EHs8DwA5VH6oHhuNxmEGGHmzg763t9WadPOEAwB/WsEGADCQ21lrDFEeEPG6/I0nx0oQaByjAIaMed7nCJVTFDDQ+NIHY5u/5JxHhaYC99JpU4bpotnSpPXuTy+j42Tp6uGD8m2N8r5Ft8B9Vf8z0tJxXd8/AcTO/uA4A5WQ+fpw05U6HCx++AoiZoH8/gTl1OEGEGHPk0Hg203giNAcDw9IGBWDWMx54aI/zGEApOqFkgIAAHgAB+/3+AAO+AA/oID0Oh2G8IUeD+h2Cw2C4H/CgO/3ix3zGxw/h/BIYAYYfgAHgewCC4XwfJMAB9KyAP2AB08P3gA3wA3BPXG/2G/35Bz8f43BGBQHBPgAfA+/IRVKo8YU8X5Kj4f0fBrBYbFY7JZbNZo1Z7VZ35SmOwDi+Ae8A+/3gEDggLACA+/gG/70D2HEHg/AQOIq4IxGoVdY/IXxIx/eoZHQDgD8D3AwZhKoMAD+AQ8YD+4CPO7k/M1cqHRaOgX9SoY/3AE3A5Ae+AJUqoD78/gDWgRWtDba/a+RyeRaeVzbW8Od0eUwelY2PyHPyXx1e50uZ3er0PB43+f+p4+va+za2/2/J77L3/hyfF8+lgvIz+x2JD9v8+T/LO6CBr/ALkEBArwOOtT1rUcZ4wM+cAQiscBgBBMKLLBDxnwur1OQnsMvJCcRINC0MRKsKcvGfkPQY5CfxS7sSRlGqynwgrwLa+B/AfGzvR/IMhSHFMaSJI8kSS5clSZJsnLHI0nylKcSyjKkryw+ErSzLkuudLcvTDMSyTBMczTFMszzVLM0zXN0pTbN85SVOM5ztIc6zvPUazzPc/QzPs/0E/9B0LI9A0NRLuURRVGubRlHUitVIUlSsoUtTEtUzTbwUpTlJU9T9HVDUVFVJUtDVPVFB1VVc/1bV091hWM71nGx9rW/UnnnM1WqoD70rXHyxWGsZDrIzQ/qGsFeLCjq12Wfg8PIDyCxcsRAKUskdzhAyqAPYK1WKsFxrDY6xpbZVmWasFnwcg1pLHbTm2qf8XV0f6GRyg12W5J9fD+AdwrPcp/4Kg1z2JbNln+cd2Xzfaz2jaawti6N6xdBZ+AZa+GX9J2AGHgazYLg5/4SsKHLFh2K4is1o3mg2LOckl7LBjWDLDj2YybgBw5GsuSrLlFyaJliwY/l94Z5mbm5rjKwJDlGdyngCPtjrFtF+f92AepRPn+7Nhl+/Rvn+feEmee+w5UgyC4cd5/v0tteVwH+wW1bRx7MTilG/Xh8tjXh+rBiwf6xfJ/8Ifwe3tXB/kfi0IGOf23KGX5z6TJmrGOeDcg+ul8gOeBgLBjkYgBgxBkMB6/gCfAwosogBuoQIGdScZmI6QA5r+v/PEAPwYGYBwDn4jrVGOeJx+MBg8HmB69GAPHoj+GAPwguS5LoukOroDwYD+D5gD+v3Q2aundEAcXo81OlvD+j5wdGA5wR8n54WygwELmlDQAPAOIQAwASqD4CCIcQY/xhABdSAB27DRkF/EAHIlBl3PE0BAMgAgA0QjwXAPEcRPgCB4HkA80AgAcQmB+CAR6EH5weftDEloDwPAgB+B8QDhygI+WW/aCL6wHFRZ45s+DES2s/GOAMcI4xgrHHgAEgaOSEEDKoD8Bz0QAAAImAEQ5HRgRZJVA8cYwCijjHgOAfgMCjB/AYD4AgwxABhHGOFho43lDgDCPgMAeB4BOAYD8A4Ho2AfAII9XgxxBjgjmMcYa4BjiPEOB4AwHwPscchE1hpBpGjAiWOALgeAeRDfeeQfC7o1siGeyIj5QxwlBAwh4AJQI9l8AYA8QYgBgD4HgHgBYhovDgDgD6WqxxxjDKHGYmJbStg/JYRh5aEGHPKJgViPhJFqiOD8D8R4DxHlDGPMWVYz47DnEeMcDwHxHgfm05CcUmW5TgjmVuULVT3oxaQhcY84hjjzhaP8cI8B4FSIMAczhkQfAPL4IMYA4B8D4DwB8QiPhgS7B5QeYkxmGlaDwSSZYjlgIPPWdeaQ+JqDxNiI4H4npsiHo+P+fYwx8z7haV9sE5xnrVOuI+fp6YWvKHGOMo881unkRDPdcDDacoQNoPAPyHgDjhHCPwHAP4VEWQGHgyZdRgkSB5VSYk+6MrSo4V2j03x4lpjpNIfgcVpHEpQP6bIx1gHrHnSJyA8SirHnPOYP9OadkGE+R95ay6hL/PfUVeAAFgFEOuIAy5DB4D+Q8A+qFUpAGgqsQerEOR/1bHxV1lUdleIPH4HAgg/CujOo/Wcgo4Af1qrYHgrQAAHOHD5a8D45z0x2pcYAAL5C9V7A8QhyE/Vl2Op8UMf1hWQHvKeWFHbnQBlfABBYgVkiDWUGHGkP4ByCGEKpZurREjEWhGfaMedpbT2pB/OIeMHi7QfqAGNaV6YHPlK6McH9vCe05IOAF+wwAP04A5cSdM0DPgBuSQe5jPbDoYuiMAEZX0PF3qZU4cN23w3eB+YQfACLNo+IiZGrzDbzsNvSHNeAfB/jOvbaQv4/GBPLH5fR552y2j+vzfs/QwAJn6woRt0cNLXgcW1geTI7y63JOBg2IlRMIFum6sdYc/8L0DqgZG7tijCESxCUS8mJRx4nYdWvFYf8XXus7Jqn1a76j4D+Py21fQfz8ZOPPIDOWTj/kDkVHM28EDjR9cnOGTpRoclNdET8dh4nHytU3LBMAcA+APJPLtDg/iDI7iO0ExMyXpDyvApNKM1VKZ/jS2N6Xyg/mxnQeLYJDDnHyM/RsmqXU4XrcVCEdBhlf0IP/I09DyPHugW4D+jBDoQIiPAHlTiMjw0mAcB5DiIkwIoQ/MFnyWaevRWIH8yyd3uJA3KwQ4x8FxeeW2lE2JtA/1eP/RaD3lQt0WsBaskbX3FV4dCR+5RxuB2DUM8i17omCmMIOOg4aJgcQ8AORMewMbT2pQAOG1xBjD05FfbjXL0h4myVvAeL3lkhHPXjLOXr0xWA/NidAP0OsGGGOGvA8RD5jHHOWc4j+cuQ5qRsr3NdCD84DYZAN0RDxKIwUUcBAiCGfg4VGxRDsugA2uRfEZNbzb7DnagxBbY28iqhP6qBGaGR7HmP4BwHwHEs7Ty4gsCB4Rz6SyIQ/Oorgf2kP8QcmB+bNIvHeoAeAOFd4Efa6KCCevzNp0xHICCEkLIaQ/EfVCK9WIkD8APWS7dbJVnEP4gAfcig6UABI4oCdUV4QQQBLIc7fIKU1/D9fZDAhoCCSggC6iBfsvkgj84730AB4OUSSGAUuLg9suo8AMF5LB2kvxgOYXgAYYgQ5isR4d80PzrZoY1jAJJe7l73gcvMH9CNXhehwFedL642gMTVPedAD/9L4HxPkn8PBHIgS7LAKwGMvz4TwpGw+pM59xUxIcAZM0ApRJWxARNw9xS0BgsxE5MwWBTMCIssCZMY88CEA4uxC5M0DZSsC5WhNcEcEhM8E0E5McFMFRMMFkFpLsF8GBNkGZV0GUGpKkG8HEAMHcC0HpUUHUH7J8IUH0IkIsI0DkJEJMJRUEJkEUJ0JsKBUcKUKcKkA0K0BcLELMLRQsIMLhEUL0L5CkMMMRA0MkMpQkNBWUNUNboqxRoBZBdBobWxlIgxfAswgpsAso65hgoypUIBbwhEOEORYkOYokQpuQtcPIs0Pi6EP4sD4cLY/wqhgQ5JoQshc4osOsRItURcPadxeER6NZTBkMQcRETZY0Ohcg5UTwskRppAscBRVJ+Bn8S0QkVMQ8VERTeERkUAo0WMSMWbop+MU0XS7MQ0TUVY5MVo60Xwo4sUWULp+Aj5zw2LOSgYeEP62pyCTKWwQ51of4QIfAMZ2RkQfAOD/CWxuQDggqUoc5Z4y4YYH4cITwEYkhDpHBzwY4B4cZmof4BiEC/ZrguikwEZ7EKsScYh+aUpzwjaDj+wf5/gfAy5/6AKAaAqA6BKBYfAIAMAD6ABuQCD5IAYcjGJ1IAIyYTwEBzxzwcAD5+YYgA6Mx5AAiECEwwAOCHAPyFkUUaQ95rZpAP6uoYIAYeLvaJyKDpog50byCKyLCLTqiLiL0qAAAB6K5uQAZHwcL3QMIjYGAPwDAH4BiGocIiwiwB8ogYAIaOaBIfAYCEABwDCNiUIAgHyFkPsYQ8ZHrYqOyRgc6OworhT5SWCDgMAPgviK4QAQCLQcAPEqQf4YCPYBgAUqxHwZ5lQY6qA6AfC1APgHwHyvZkRkQY6RgYJ5Yc6OgecbKqMzwPwHgPwDznMvEnzRDYoQaR7ZDhIoKgTPigrSYB5jihShiXYD51izraAHjaaSwZ6xanw7YfAfzjyrqvZn6cgc6cQYZ5Z5RhqkiqIPzIz4TtM0khA8CxAo0QSpCu5EzK7PiyqqahDDqrYjah4QirQyIqU+BuSxYjzcxmRaaUInZ5Q7JXSYofKOw7aoDGgP7Iy1AP6lE8ZRpSk8xX465kU9SpbSE9qqKqa7zyqq4yYgoYM+8j40E/Q9KJY9xeNACbSs4P7+0czm9BAfh5Y7dBisgH4c5ok2hTrKKlwnq6ix666ybDK7jDgwizSrIolEaFVEwjYAM/qzQf9Fax0l0ly/ofAccxZsNGbcwAADAoxaalDm88g7tCYtzCVIAuwCCyNIbDTLbDoiIwzL4YdJaxVJocACM/ofgATUIHgf4naxyUouTCQY9LFLRzNGlL1MAf9MRBcSU8tHqRlMZDyYE9ip4YKNKy4D7LoHDL9EQHE/FO05g67mU/ofAH0/4P4ncPSBIiFA9MZhtLgyFRalAeZa8vI8CUrYqlzRYY4e7RygFDKp7SSQDSzZbTDTSztJYA0yyxbWk/qk1VInbRwolXtLC3dLjHAP4XlG4eNW9HY7rYkoNXjRifrZc3igjSSg7aYwjawipY6idUEj7adJtZ47akwxCrrcLRrV4cdayOwkKqLGjYFBdbpl1cA7rgjYyO0sooarbZjhqRKXYDjiQwihjiwYYYDis/EkFUau9aCqU1yZi16wScTY87LRifykioFBdgoP5apHFMg+C6IR6YzjDpTxjpwgYmUj7ySgDygi1jLqljiBNj0zM/ofKz6is2K/aYzq1mozin4oagB5a5cz8180Dlwr1CJ+ClzxAoAjrpaKIz7x5fTqTybqtoQmNedoqxYYFJ854mggglYniDlsApopqMwv4rIodubZoADj9rcK8hL4wuIub5IvAvR/YvqCT6NOIw4xIYYYQn1jg6ljwcAHFPQAwH4QInA1A3LIYcAGIYkfguRfMmooYvQQA1Q2Db9wVR8ARO8Z9rhIUBBN12dwZH92xNd3F2BGsDJNY/t2hIN4BNT/VmRGV4pM6BN5ENkEt5xWt6BO0M96RTt6pOV6l646t7N7Q6N7l7pR98BNV798RJd8sFd899F9MF19d9l9sGN99+F+MGl+ZLF8l+osF+9/A2l/cHN/rYV/65uAOAWAcIeApJN/V/GBN+uBd+eBt+OB99+CN9uCd9eCt9OC98+DN8uDd8WDt8GD97uEN7WEd6+Et6uE96WFN6GFd52FsNmF8NWGMNGGcMuGsMWG8L+HMLmHcLWHsLGH8K2IMKmIcKWIsKGI8J2JMJmJcJWJsJGJ8I2KMImKcIWKsH+K8HuLMHeLcHGLsGuL8GeMMGGMcFuMsFWM8E+NMEmNZWmNpWON8G2A+A2OZImOJVeO5VGPJUuPcQGOr4mP+QGQOO2QeQmQpIWPpT+RJTmRZTeRsI+Q5G2R8UmSJIOScJeSpGWS8J+TJPmTuT2T5IuUOUWUcMGUuU2U8MeVOVWVcM2VuV2V8NOWJCWWY+2TcKOWq52XJTWXZEeXuX2X962YJGeYeYmYt7eY+ZGZN7y50qothfMYJtwsdRwsy7Bitb8Y5EwsRiObBsMO4sa5eaI5Kjhnlg4sE2eW2ZpkxfIo5psPGaY5GaxwubpYsBBfYb+bs64sJuJirBsoBml1wsM8xlZQGdQs0zmaEXYsWagsueRmWegsA8RwwsCSgsufRm4sWcJlo6WcgsIzebec+go98U2hEZeeAtehxfOiGbTnwsWiosmi4g2akfxwuc2lGgIsBzukAg2dGWmkYs+ko5Fg+hgsmlOlMZQuwgzOGl2fMO+mZl2mhp2nGmWpYsBfenuXg8mkl3os+oeeNW+o+bOpOlosOl4semMbmjetWgBj4B8a+q2kOVmrQf4/oCIkAeaBIcADiyJgKgY4AfIQ5wMrrdwsEuQR8lw2OpZHqJZmTYLJaySQIDwjqDky6BK0rvT3YHgOAowA7hmt4gtHAZ7HIPwv4H4YgpQYwHgPKwOuiUoo4TIR854voP5zwZwB8d8bgQzb4OAto/W2a7YwoA4GKUurGXWn4YAgoCAI4fIeQy+vR4IPgy7GQqgfLAAPwIAP4yZbUkQR72klr5Kp944f0kQnohgfwlgCABgnyKAQckAcACAfjABHwHm7B44CAuZ+4hIA869KwBwnwB4YgvoAW1QA6HIcADB+oH4SAI47clj2QZAA8krGQfwATb4ICbI/SKQfgQAPgA8lYAe4uYGuYAguoAG5YeErszgBFVCNQfAEIfAEAPABAZiYQH8sRHMrIR4QSp6BBHwp7pZmUqocD0874D6LKKCKAAcq6L4hWzKP5ezzKTAeAYYcAQc64YayjuocAQ/AKNPAiSYYy2oQQD/BZCCJocLvYAaMiPB2FTIfkupu4f4gofgDAPiP6FYcAQPEI8ZPJwgsQ68j5gwkC94jYfkz4PDOChqhqUEz6G8z6yaR6coZ6R5Y4Yb3R8JfIAR+qXICAHzhiBgAMwXJSiQQCM4ilKQHwurvAQ6VJriJYcM66SE0lmrjAoyz4iiqgT6lFdk7KlwYcv6b7mKf4rAfhHBFpapsHOYPiiqdUfmxmuQ5weBnhyjDvQQfIrHQoP4HwPIo4rSjTORaoR60C7KchYCbo/AxdmJHri6YAPgD0sIA4QIzgoKQYogoKXYpSUPVXcyOjeQeE64wTmqnTDPQoHjjybQD4YyG4h5B6ly3TRqcCaZaTOTOSdIvsds758Sqin2kV7xl3anQHQYo8znbRig/vidkfcVwUfjXzChZaVsa5HrjAeAOE2ACAP6WyOigAP4BAgoQczfU/fRe3cxCA7NQs67Y4r6nQj4ja5aja3Aknfgofo2nk5ykwPKjnhBw8dtgidQx3PeYQ6LAXP42PkIfK9PQof9VB+5+4PAtpaoZ6UI44B7OzupexZYjPmIwXn/mqIJ1SOnelEvn4u3oJ8XoYY+1lAYfHpFMfpY686Hg3svi6/aFoob9I9KVTc0fvrD1wR4nZmtGvjQuvZ5CJPPsYsIY4tvs3tGhAHwPjGIAYhnt1l4P4Z4HGZxeyfh+YvsD++KqIgqyQwaPTtB/7hCyAP7poQ46DylKXwySgY66q3u2Ic4ZxC5HzA466k21QqlzYfz0Nfx5b/Yr4nrWgc4fDpYOOcgR6lHOUDzIydTl1J/jo5z3HP/1Re2u/kYo4H35Agx14gAEPD8P4eP7PHAAB7/f4feaPQBHD7+AADf7gELhfg/f7+D7DA74MD+BwAP4PQ7heAQeB/AAff4HeD4BB4P7/Hh/mAfD7HAABf7Hf6PfDnZ0VmCPD7xoTxfw8PL8AAGH7+Hx/cafeFCfAHR7/YAjZ7xc74cAAOL+H8ER6Om9VeAADk6H4/cARccMvV7vl9v1/cF/wV6YEwvbHjdCnr5eb8f74x1Xlr/mZ8gcFg48vcOR6DY8TjlgcLgjcdB7DQciP0lH8o0ZwlsvmLweA4m04nUNnuGoVEo0cpNLrlPqJ/j1WH9ZfFCf6DQ7/YLjsdlvL4tUbR7Mt7+GL/ggfurheN5wfl80X89+wt8Y9qxTHxmOyD/q8Ex5zfmXDw/R85vQHnGR4fmOR53sMYbRtKfzTkGeA4NWBjWpSlR4H+AyFkO2h8NunKdp6r5/nGfZjqKZ6knuR5HqYx7iLUhpvB8H5hk+fAnoYcJjnuT7pLIfB4xYtb+GIhi1hw7y6B+sbyPTJi9sDJq9HA0K9PbAaGnOeLGseyK0j+xr8P0/icxAB5zwFAhxg+vJjtGfCOQWQZhwcPgPA8D7XHG2h/gehZBnAeMNo5DrdQJH5xuXEs1EfAMVOGqD3A+Ywfo/GkbLGY54x26cfH8PDryEhgPn5Iy5vBJLxyhVMn1SybDrUr4PyxLR+MjTw/nyfgxvygj9kfGMyHOR0B0WnrmtG25/AOA5gnA2s6zulJgz0B4DkOQJgHBQLcQ9NEQuWshnwBRcUnGrh+Ue/gPkPSZjk+fiFyxNk1U2eNdyCD8nke/KnyRFUl1S9NV4A9iGoWD5wmDWdaptDdAP1OwfzsmCvKUQ5jkOR7nmAAKzpuigDrOfAAB9O7XLPCoHucYAALiPAD23QhjmGf54nCcZwnOZ5D4sYZH5koT8qhgpHge/gD3cmBxmHpRHmHTZ8Hg/K1zscKGTkPg+Q6us2HHf+BsHgWvoYoRAOAcAA1mDj6D4kypZE/QALqQCeH+hSIZAA5gIXjazoYfwQAeeAAqkP2SpTk89udk4AD5l9BgOnpwZefABngA+cmBvE/AOoSKaFue9byD5Po8ynKgGYAB02qSKLWIAfx+i4AIoPidAOuwA67sTzbDsShGBUJ4ATtL6D8oB/cGAwcV4IB/h+YDcgg/hgDGeF8I4cAQHCoCOhgteXgCjxDtclY4T2YMMggmgecc3JBp76zvAefFYmecAP+taVin+AWheglJkAf3SEwK6PgB44AYqbH8RYARawwM0IY5Qf7g3bA/HgBh3Tu2wQag4XofxNy9DfIaeVqsHYTHpRcqkecJ2vu9hYlCD5e4RGGMFCWF8Ny+QpShCuHCTYXQ9PNDEvQn4RmDhtECG8QodxIYDExJsSoRw0L+1Us7L4nQdPsqkc8Vzyw/i5DmEBm4SOyitF9sUXjBrJjMYCNcaYwl6ikX6KgAIyxtVUwBBcdi+Roj1H2P0f4fSAkFIOQkG5CyHkRICPkiZGSNhfIuR0kZJMDkhJOS0l5DSYk1Jt3knJPSfj3KCUUnpKyjlNIOUsp5VR9lTKuV0ZpWyvllEyWMs5bQ3lrLeXUHZcy7l9C2X8wYnS9mFMWTsxpkQnmJMmZiUZmzPmBNCaUgZpzVmPNabBfplzZl9NubkupvTfltOGcUspyTlldOedEqp1TrlNO2d0op4Txk/POeknJ7T3k1PmfUl5+T9knP+gEkaBUDkbQWg0iaEUJkPQuhkhKHUPkFRGiUf6KUVlZRiYNF6NRto5R2WFIJd0fpFFeklJZaUonHSqWdJ6WQ4pdS+R9Mp000lXTGm0vKcynpxTt3dPafTRqDPWocoKgVFShUepCTKlVLPPU2p0Xaoz7qnJiqFVZtVYktVerSTquySq5V89FYqD1krLWahVaK01qobWyttbqIVwrjXKiddK612otXivNeqM18j1WGr9gKu2Cq1YSrFhqq2IqnYqqNjKnWOqXZCpFkqi2UqHZaoNmKfWap3ZynNnqbWgppaKmVpKX2mpZailVqqUWspLa6kVsKQWyo7bSjVtqMW4orbqiVvKH2+oZcChNwqDXEoHcagFyJ+3Kn1cye9zp6XQnjdKd11J13WnRdict2pxXcm/d6bl4Js3imxeSa15pq3omneqaV7JoXumffCZt8pmX0mTfaZF+JjX6mLfyYV/qN1+r/gKO2AJf4Gm7gSj2Co14IpHgykOEIuYOnBhLCeFqTYYmHhqlOHIgYUlviCleHoe4ipbiTEuKKYYqlxiymeLplYwxjjKnWNIOYmnNjbG+OoNY4lfj6muPIz5CyHkSSmRsj5IjvkqpOTMm5OqZlDKOUqn5UyrlaqWWJM5aqzlyNmXsu5glDmLMeZJnZmzPmiseasgU3zUQzNs7M35rzRnGnmc87TvzxnvN+eZR5+nlnzNmgs66EzNoCo2hsyaIqJn3RWYtGSk0fmDSM+NJ5e0rJvTNVNHad0Hp7QuoND6Xy5puq2pMtamn9qjLGqqt6sytq6gOsMqayrBrTKWtqCa4yhrqR2vqz6f2FqHYeo9RaL15k7YEjNl1r2LsjY+kNk5M2bIjatb9n7S2jpTaeStryF2/XPbO3Nt6Y27kjcMqNz5G3TXfce5ty6l3XkTdsit55C3rXvd+8t46p3vjzfMfuA1933v7furd/464HgPg+seE424XgXh+NOI4L4brXieMuK4N4zjDjeEeC8I4vrnjuLuPxf5PhfkeveS4s5ThnleyuW4q5fhvmO1OZ4o5rh3kPDub7e5ziTncSOh4f6Dh7ouKef7o6PhzpOK+l7s6bhrp+Leo706nhjquL+e8Y6vvjrOFutws7HjPrvJOv8A7DhLssJu2417PyztPCu14Q7fjvufEO64M7vj3veCu+0/7/gTwORe48y7zxTweAvC1C2J4/Y3h+ceJ414uv3jck+S6B5Tj3lq+eYYB6DJfmumec5N56vXosn+k6l6bl3qK8eqmp6zrHruaewrt7LKftOwe2517iunuom++6F8CuXwsr+87V8TpHxq4fImv5DaHyu6fM6d86t30Ms/U719bqn2K2fay35H6W2vueK+91r8FavxGC/bl/8/lf09i/XWj9+Yfy7k/j53+fbP6qzP7i+wAsyv8t4P9vTv+u7P/qyQBqvQEu+QFqxQGs0wCt+QDvXwHvAQIrAwNrBwOrCwPrDwQrEwRrFwSrGwTrHwUrIwVrJwWrKwXrLwYrMwZrNwarOwbrPwcrQwdrRwerSwfrTwgrUwhrVwirWwjrXwkrYwlrZwmrawnrbworcwprdwqrewrrfwsrgwtrhwuriwvrjwwrkwxrlwyrmwzrnw0row1rpw2rqw3rrw4rsw5rtw6ruw7rvw8rww9rxw+ryw/rzxAr0xBr1xCr2xDr3xEr4xFr5xGr6xHr7xIr8xJr9xKr+xLr/xMsAwMvCRNsDxPsExOvGRQsHxRvLxSsKxTvPxUsQxWsRwLvbxVvUxXsTxZvYxascxbvcxcsfxesgxYvfxdvgxfs3Rhvjxis5Rjvnxks7xlvsxms9Rnvwxos/xqtAxpv2RrtExsv7RttGxgvixuwARvtJRxwGRytLRzwJR0tNR2tORwvmx1wOR5wPR6wQR7wRR8wSR9wTR+wUR/wVSAwWSBwXSCwYSDwZSEwaSFwbSGwcSHwdSIweSJwfSKwgSLwhSMwiSNwjSOwkSPwlSQwmSRwnSSwoSTwpSUwqSVwrSWwsSXwtSYwuSZwvSawwSbwxScwySdwzSew0Sfw1Sgw2Shw3Siw4Sjw5Skw6Slw7Smw8Snw9Sow+Spw/SqxASrxBSsxCStxDSuxESvxFSwxGSxxHSyxISzxJS0xKS1xLS2xMS3xNS4xOR4vry5xQS7xRS6vvy8xTS9v1S+xVS/v6TAxXTCxYQKuDTBv/TDxbTFwFTGxdTHwITIxfTKxgTEuRTJwNTLxjTNxPTOxlTPxSTQxnTRxUTSxpTTxWTUxrTWxsTVxaTXxuTYxcTZxwTMufTaxeTbxzTdxiTex1TfxkTgx3Tix4TcuvThxmTjtTzmtVzntXzotZzptbzqtdzrtfx3znTlxoTstgzku0TuxqTvtmTtzoTxxtTytnTwu5T0xvT1trTzzpT3xyT4tsT2vET6x0T7twT5zqT9x2T+txT8vJ0Ax6UDx7UEx8UFx9UGx+UHx/UIyAUJyBUKyCULyDUMyEUNyFUOyGUPyHUQyIURyJUSyKUTyLUUyMUVyNUWyOUXyPUYyQUZyRUaySUbyTUcyUUdyVUeyWUfyXUgyYUhyZUiyaUjybUkycUlydUmyeUnyfUoygUpyhUqyiUryjUsykUtylUuymUvynUwyoUxypUyyqUzyrU0ysU1ytU2yuU3yvU4ywU5yxU6yyU7yzU8y0U9y1U+y2U/y3VAy4VBy5VCy6UCvN1Dy8VFy9VEvS1Gy/VHvW1IzBVJva1KzDVMzEPyVOvp1Lve1NzHVQPl1RTJVSPq1TTLVVTMVPPzVUPu1WTPVYP0VZTRVaP5VbTTVcP+VdTVVeQEVfTXVhTYVgQMViTaVjRZVkTcVXP9VlRhVmTfVoRxVpThVqR5VrTjVtTkVnQDVsS7VuTuVwS+VxT0VyTAVzT6V0TCV1UAV2TGV3TrV5TsV6TtUBt1V8N3V4TIV7TwVvQLV+TKV/TzV9N7WDN9WBTOWCT2WATFWFTQWGT5WEOBT/152ITSWJT8WHTNWMTUWNT/WKOCWPTWWQUCWOTdWSTZWTV82WV92UTlWVTbWXWD2aWE2YTxWZTeWbWK2ROGWeWR2cT3WdTgWgWf2iTiWjOJWfWl2lOLTNkpqVB3ulFUm9HJGRA/nJIPWoiOibiui+oeLoiLC+h7i9BBh/2wuzEoHUhwAB2sW22towDKGZi+W0rqnuC+ItiGCLGvO4EoBnmcB4h8iGEsW4i9oYhxnYi927J1lDi/DmHnEQurFUlN3Bh/hz3LCO2uXEXMi9XGJ0BxosjDi9COW6OyGBhnh8hzh+A83CXBhfh/EXBPhf3Bh3Cbj4B/jjB/B/h3ktJ6BxoPi120CGBTiO3IhJ3J2/3VB/XW3Lh8kFh8AfAfAZAHIDg/AhHaotl3hMA8AeBBh5kpXfh/HGh4AHngB/gTjHn2B/gJ3kkm3Uhz3mXXDlhxgPAeA4AiBDhBAfAhA8XJB4X9AYAOAIh5hho4py3gA4AhhxgHhCDjAzkQjwB/gZ33EmXAX4g8AACLgBh5jpBHAcjxkCAPAfA5jKAfh5hzhDgHAOA+B5hnkQJ3FDg4hxh3ilC14IYXiq4SuuW1gB4MYNHJB54XBnAZ4OhzhPhPA/YIB+AD4OkUgOA/YXBnrogAEshnhngfhfiih/4ri13Pu/GAHM343mhzhhh5h4hnhjAT4jB/gPA/EbDmh/mehP4oA5hv4prpigDpYsBP3VYuBj4vYKj04p4x3CCyFMhjYOBxhzhwY3IrFAABhP46B/A5hACFp4i8h+ZIijilothP5ADGZBDz5CXWXCCi5EZFBzhAYkorGoAR5JAOZKZLXfjHgJhP5OG+5PoLYNYeX33L5S5f3EhHhjAY4jAP4kgx4uB+Gu5YA/A5oiZaBw4hibm+mfZd5RDzZSYyZlh45hgcZjYkg45fhzis46ZnBHoRZMEQ4pYs3Lih5AXh5e4LXLh8Ztmui25v5Fgf4k3WlDhj5y4WCH48YY3L4XClCYItkUiOYvvBGAYpiz3LCz4QhHAeA55FhP43X/BwGcEZ46Zvl25aYOkCFhZ3ECaF5sDy6HgB6IgAiu5G6K5FhAY3A+HTW25YG0H75aB4ABhwXzCOItn76T55ZBiLgP6IgHiPB8A/AfAZ5FhhhnA/A/A/l3h4Zn46Bjh5lW6CZ6h8H7XCDHg/6hXTo/DHKGaBuYI+6yqDBH6zuba0qGBj5LuVa3rh4DuiI/61KB6ta0WkTmWnOOWmWn6+zva/uQbBzybCuUWLV67Dz1bE657Gz4bH6+WhT9bIz7bJ63bLz+bMuebN0BbO677A7AbQujbR7DbK0DbP0EbV0FbW0GbX0HbY0IbZ0Jba0Kbb0Lbc0Mbd0Nbe0Obf0Pbg0Qbh0Rbi0Sbj0Tbk0Ubl0Vbm0Wbn0Xbo0Ybp0Zbq0abr0bbs0cbt0dbu0ebv0fbw0gbx0hby0ibz0jb00kb10lb20mb30nb40ob50pb60qb70rb80sM6iAgAAAdXQVHdlbHZlTW9ua2V5cyBJbWFnZUlPIFRJRkYgd3JpdGVyIDMuNQAACwEAAAMAAAABBqMAAAEBAAMAAAABCJgAAAECAAMAAAABAAEAAAEDAAMAAAABAAUAAAEGAAMAAAABAAEAAAERAAQAAAABAAGZSQESAAMAAAABAAEAAAEVAAMAAAABAAEAAAEWAAMAAAABCJgAAAEXAAQAAAABAAA8XQExAAIAAAAmAAHVqgAAAAA="
with open('decode_image.tif', 'wb') as file_to_save:
decode_image_data = base64.b64decode(payload)
file_to_save.write(decode_image_data)
| 20,089.75
| 160,560
| 0.964883
| 5,527
| 160,718
| 28.055907
| 0.924733
| 0.001819
| 0.002399
| 0.003005
| 0.027008
| 0.027008
| 0.027008
| 0.027008
| 0.027008
| 0.027008
| 0
| 0.143325
| 0.000124
| 160,718
| 7
| 160,561
| 22,959.714286
| 0.821622
| 0
| 0
| 0
| 0
| 0.2
| 0.999054
| 0.998942
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b013e26fe8db807ed6aae6d51f2a6b9a2c73236f
| 43
|
py
|
Python
|
tests/generic/generators.py
|
dubesar/model_bakery
|
a4ccef6d1f4ada60d488f509be4e797e8237d6d3
|
[
"Apache-2.0"
] | 448
|
2015-01-01T15:12:05.000Z
|
2018-12-11T12:16:58.000Z
|
tests/generic/generators.py
|
dubesar/model_bakery
|
a4ccef6d1f4ada60d488f509be4e797e8237d6d3
|
[
"Apache-2.0"
] | 161
|
2015-01-09T03:52:48.000Z
|
2018-12-11T22:06:52.000Z
|
tests/generic/generators.py
|
dubesar/model_bakery
|
a4ccef6d1f4ada60d488f509be4e797e8237d6d3
|
[
"Apache-2.0"
] | 109
|
2015-01-15T00:36:52.000Z
|
2018-07-26T11:22:50.000Z
|
def gen_value_string():
return 'value'
| 14.333333
| 23
| 0.697674
| 6
| 43
| 4.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 43
| 2
| 24
| 21.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
c6b341c44d2784468abdcf84a74b3da072386278
| 172
|
py
|
Python
|
src/rl_agents/runners/__init__.py
|
mateuspontesm/RL-Agents
|
70274b2b4b95402f03857818733fc0d52aa3e385
|
[
"BSD-3-Clause"
] | 1
|
2020-10-30T15:40:01.000Z
|
2020-10-30T15:40:01.000Z
|
src/rl_agents/runners/__init__.py
|
mateuspontesm/RL-Agents
|
70274b2b4b95402f03857818733fc0d52aa3e385
|
[
"BSD-3-Clause"
] | null | null | null |
src/rl_agents/runners/__init__.py
|
mateuspontesm/RL-Agents
|
70274b2b4b95402f03857818733fc0d52aa3e385
|
[
"BSD-3-Clause"
] | null | null | null |
from rl_agents.runners.mab_runner import simple_mab_runner
from rl_agents.runners.tab_runner import simple_tab_runner
__all__ = ["simple_mab_runner", "simple_tab_runner"]
| 34.4
| 58
| 0.854651
| 27
| 172
| 4.851852
| 0.37037
| 0.206107
| 0.183206
| 0.290076
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075581
| 172
| 4
| 59
| 43
| 0.823899
| 0
| 0
| 0
| 0
| 0
| 0.197674
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c6c0de5e28abbfbdf5fe8a41f3ddfa1bcaa1110d
| 16,473
|
py
|
Python
|
src/ue4nlp/ue_estimator_msd.py
|
AIRI-Institute/uncertainty_transformers
|
982b5ae8b39cb484ce3559a72f95d18f30487e38
|
[
"MIT"
] | null | null | null |
src/ue4nlp/ue_estimator_msd.py
|
AIRI-Institute/uncertainty_transformers
|
982b5ae8b39cb484ce3559a72f95d18f30487e38
|
[
"MIT"
] | null | null | null |
src/ue4nlp/ue_estimator_msd.py
|
AIRI-Institute/uncertainty_transformers
|
982b5ae8b39cb484ce3559a72f95d18f30487e38
|
[
"MIT"
] | null | null | null |
import torch
from ue4nlp.dropconnect_mc import (
LinearDropConnectMC,
activate_mc_dropconnect,
convert_to_mc_dropconnect,
hide_dropout,
)
from ue4nlp.dropout_mc import DropoutMC, activate_mc_dropout, convert_to_mc_dropout
from utils.utils_dropout import set_last_dropout, get_last_dropout, set_last_dropconnect
from utils.utils_heads import (
ElectraClassificationHeadIdentityPooler,
BertClassificationHeadIdentityPooler,
ElectraNERHeadIdentityPooler,
XLNetClassificationHeadIdentityPooler,
)
from utils.utils_inference import (
is_custom_head,
unpad_features,
pad_scores
)
from ue4nlp.mahalanobis_distance import (
mahalanobis_distance,
mahalanobis_distance_relative,
mahalanobis_distance_marginal,
compute_centroids,
compute_covariance
)
import numpy as np
import copy
from tqdm import tqdm
import time
import logging
log = logging.getLogger()
def convert_dropouts(model, ue_args):
if ue_args.dropout_type == "MC":
dropout_ctor = lambda p, activate: DropoutMC(
p=ue_args.inference_prob, activate=False
)
elif ue_args.dropout_type == "DC_MC":
dropout_ctor = lambda linear, activate: LinearDropConnectMC(
linear=linear, p_dropconnect=ue_args.inference_prob, activate=activate
)
else:
raise ValueError(f"Wrong dropout type: {ue_args.dropout_type}")
if (ue_args.dropout_subs == "all") and (ue_args.dropout_type == "DC_MC"):
convert_to_mc_dropconnect(
model.electra.encoder, {"Linear": dropout_ctor}
) # TODO: check encoder or all dropouts ?
hide_dropout(model.electra.encoder)
elif (ue_args.dropout_subs == "last") and (ue_args.dropout_type == "DC_MC"):
set_last_dropconnect(model, dropout_ctor)
hide_dropout(model.classifier)
elif ue_args.dropout_subs == "last":
set_last_dropout(model, dropout_ctor(p=ue_args.inference_prob, activate=False))
elif ue_args.dropout_subs == "all":
convert_to_mc_dropout(model, {"Dropout": dropout_ctor})
else:
raise ValueError(f"Wrong ue args {ue_args.dropout_subs}")
class UeEstimatorMSD:
def __init__(
self, cls, config, ue_args, eval_metric, calibration_dataset, train_dataset
):
self.cls = cls
self.ue_args = ue_args
self.calibration_dataset = calibration_dataset
self.eval_metric = eval_metric
self.train_dataset = train_dataset
self.config = config
def fit_ue(self, X, y=None, X_test=None):
cls = self.cls
model = self.cls._auto_model
log.info("****************Start fitting covariance and centroids **************")
if y is None:
y = self._exctract_labels(X)
self._replace_model_head()
X_features = self._exctract_features(X)
self.class_cond_centroids = self._fit_centroids(X_features, y)
self.class_cond_covarince = self._fit_covariance(X_features, y)
self._restore_model_head()
log.info("**************Done.**********************")
def _fit_covariance(self, X, y, class_cond=True):
if class_cond:
return compute_covariance(self.class_cond_centroids, X, y, class_cond)
return compute_covariance(self.train_centroid, X, y, class_cond)
def _fit_centroids(self, X, y, class_cond=True):
return compute_centroids(X, y, class_cond)
def _replace_model_head(self):
cls = self.cls
model = self.cls._auto_model
self.old_classifier = copy.deepcopy(model.classifier)
use_paper_version = self.ue_args.get("use_paper_version", False)
use_activation = not use_paper_version
if is_custom_head(model):
model.classifier = ElectraClassificationHeadIdentityPooler(model.classifier, use_activation)
elif "xlnet" in self.config.model.model_name_or_path:
# so XLNet hasn't classifier, we replace sequence_summary and logits_proj
self.cls.model.logits_proj = XLNetClassificationHeadIdentityPooler()
else:
model.classifier = BertClassificationHeadIdentityPooler(model.classifier)
def _restore_model_head(self):
model = self.cls._auto_model
model.classifier = self.old_classifier
def _exctract_labels(self, X):
return np.asarray([example["label"] for example in X])
def _exctract_features(self, X):
cls = self.cls
model = self.cls._auto_model
try:
X = X.remove_columns("label")
except:
X.dataset = X.dataset.remove_columns("label")
X_features = cls.predict(X, apply_softmax=False, return_preds=False)[0]
return X_features
def _calc_distinctivness_score(self, full_mahalanobis_distance, eval_labels, eval_results):
start_unc = time.time()
min_mahalanobis_distance = np.min(full_mahalanobis_distance, axis=-1)
# calc penalty
penalty = self.config.mixup.margin * np.where(
eval_labels == np.argmin(full_mahalanobis_distance, axis=-1), 0, 1
)
dist_score = np.log10(
self.config.mixup.beta1 * penalty
+ self.config.mixup.beta2 * min_mahalanobis_distance
)
# after calc uncertainty score
max_probs = np.max(
np.mean(np.asarray(eval_results["sampled_probabilities"]), axis=0), axis=-1
)
uncertainty_score = (
self.config.mixup.gamma1 / max_probs + self.config.mixup.gamma2 * dist_score
)
end_unc = time.time()
eval_results["uncertainty_score"] = uncertainty_score.tolist()
return eval_results, end_unc - start_unc
def _predict_with_fitted_cov(self, X, y, eval_results):
cls = self.cls
model = self.cls._auto_model
self._replace_model_head()
log.info("****************Compute MD with fitted covariance and centroids **************")
start = time.time()
if y is None:
y = self._exctract_labels(X)
X_features = self._exctract_features(X)
end = time.time()
md, inf_time = mahalanobis_distance(None, None, X_features,
self.class_cond_centroids, self.class_cond_covarince, True)
sum_inf_time = inf_time + (end - start)
eval_results["mahalanobis_distance"] = md.tolist()
self._restore_model_head()
log.info("**************Done.**********************")
return eval_results, md, sum_inf_time
def _activate_dropouts(self, model):
ue_args = self.ue_args
log.info("******Perform stochastic inference...*******")
if ue_args.dropout_type == "DC_MC":
activate_mc_dropconnect(model, activate=True, random=ue_args.inference_prob)
else:
convert_dropouts(model, ue_args)
activate_mc_dropout(model, activate=True, random=ue_args.inference_prob)
if ue_args.use_cache:
log.info("Caching enabled.")
model.enable_cache()
return model
def _deactivate_dropouts(self, model):
activate_mc_dropout(model, activate=False)
activate_mc_dropconnect(model, activate=False)
return model
def _predict_mc(self, X, y):
ue_args = self.ue_args
eval_metric = self.eval_metric
model = self.cls._auto_model
start = time.time()
model = self._activate_dropouts(model)
eval_results = {}
eval_results["sampled_probabilities"] = []
eval_results["sampled_answers"] = []
log.info("****************Start runs**************")
for i in tqdm(range(ue_args.committee_size)):
preds, probs = self.cls.predict(X)[:2]
eval_results["sampled_probabilities"].append(probs.tolist())
eval_results["sampled_answers"].append(preds.tolist())
if ue_args.eval_passes:
eval_score = eval_metric.compute(
predictions=preds, references=true_labels
)
log.info(f"Eval score: {eval_score}")
end = time.time()
log.info("**************Done.********************")
model = self._deactivate_dropouts(model)
return eval_results, end - start
def _predict_msd(self, X, y):
ue_args = self.ue_args
model = self.cls._auto_model
if y is None:
y = self._exctract_labels(X)
eval_results, mc_time = self._predict_mc(X, y)
eval_results, full_mahalanobis_distance, md_time = self._predict_with_fitted_cov(X, y, eval_results)
eval_results["eval_labels"] = y
# so now we have sampled probs and mahalanobis distances in eval_preds
# we have to calc distinctivness score and uncertainty scores
eval_results, unc_time = self._calc_distinctivness_score(full_mahalanobis_distance, y, eval_results)
sum_inf_time = mc_time + md_time + unc_time
eval_results["ue_time"] = sum_inf_time
log.info(f"UE time: {sum_inf_time}")
return eval_results
def __call__(self, X, y):
return self._predict_msd(X, y)
class UeEstimatorMSDNer:
def __init__(
self, cls, config, ue_args, eval_metric, calibration_dataset, train_dataset
):
self.cls = cls
self.ue_args = ue_args
self.calibration_dataset = calibration_dataset
self.eval_metric = eval_metric
self.train_dataset = train_dataset
self.config = config
def fit_ue(self, X, y=None, X_test=None):
cls = self.cls
model = self.cls._auto_model
log.info("****************Start fitting covariance and centroids **************")
if y is None:
y, y_shape = self._exctract_labels(X)
self._replace_model_head()
X_features = self._exctract_features(X)
self.class_cond_centroids = self._fit_centroids(X_features, y)
self.class_cond_covarince = self._fit_covariance(X_features, y)
self._restore_model_head()
log.info("**************Done.**********************")
def _fit_covariance(self, X, y, class_cond=True):
if class_cond:
return compute_covariance(self.class_cond_centroids, X, y, class_cond)
return compute_covariance(self.train_centroid, X, y, class_cond)
def _fit_centroids(self, X, y, class_cond=True):
return compute_centroids(X, y, class_cond)
def _replace_model_head(self):
cls = self.cls
model = self.cls._auto_model
self.old_classifier = copy.deepcopy(model.classifier)
use_paper_version = self.ue_args.get("use_paper_version", False)
use_activation = not use_paper_version
if is_custom_head(model):
model.classifier = ElectraNERHeadIdentityPooler(model.classifier, use_activation)
else:
model.classifier = BertClassificationHeadIdentityPooler(model.classifier)
def _restore_model_head(self):
model = self.cls._auto_model
model.classifier = self.old_classifier
def _exctract_labels(self, X):
y = np.asarray([example["labels"] for example in X])
y_shape = y.shape
return y.reshape(-1), y_shape
def _exctract_features(self, X):
cls = self.cls
model = self.cls._auto_model
try:
X = X.remove_columns("labels")
except:
X.dataset = X.dataset.remove_columns("labels")
X_features = cls.predict(X, apply_softmax=False, return_preds=False)[0]
X_features = X_features.reshape(-1, X_features.shape[-1])
return X_features
def _calc_distinctivness_score(self, full_mahalanobis_distance, eval_labels, eval_shape, eval_results):
start_unc = time.time()
min_mahalanobis_distance = np.min(full_mahalanobis_distance, axis=-1).reshape(
eval_shape
)
# calc penalty
penalty = self.config.mixup.margin * np.where(
eval_labels
== np.argmin(full_mahalanobis_distance, axis=-1).reshape(eval_shape),
0,
1,
)
dist_score = np.log10(
self.config.mixup.beta1 * penalty
+ self.config.mixup.beta2 * min_mahalanobis_distance
)
# after calc uncertainty score
max_probs = np.max(
np.mean(np.asarray(eval_results["sampled_probabilities"]), axis=0), axis=-1
)
uncertainty_score = (
self.config.mixup.gamma1 / max_probs + self.config.mixup.gamma2 * dist_score
)
end_unc = time.time()
eval_results["uncertainty_score"] = uncertainty_score.tolist()
return eval_results, end_unc - start_unc
def _predict_with_fitted_cov(self, X, y, eval_results):
cls = self.cls
model = self.cls._auto_model
self._replace_model_head()
log.info("****************Compute MD with fitted covariance and centroids **************")
start = time.time()
y_pad, y_shape = self._exctract_labels(X)
X_features = self._exctract_features(X)
end = time.time()
md, inf_time = mahalanobis_distance(None, None, X_features,
self.class_cond_centroids, self.class_cond_covarince, True)
sum_inf_time = inf_time + (end - start)
eval_results["mahalanobis_distance"] = md.tolist()
self._restore_model_head()
log.info("**************Done.**********************")
return eval_results, md, sum_inf_time
def _activate_dropouts(self, model):
ue_args = self.ue_args
log.info("******Perform stochastic inference...*******")
if ue_args.dropout_type == "DC_MC":
activate_mc_dropconnect(model, activate=True, random=ue_args.inference_prob)
else:
convert_dropouts(model, ue_args)
activate_mc_dropout(model, activate=True, random=ue_args.inference_prob)
if ue_args.use_cache:
log.info("Caching enabled.")
model.enable_cache()
return model
def _deactivate_dropouts(self, model):
activate_mc_dropout(model, activate=False)
activate_mc_dropconnect(model, activate=False)
return model
def _predict_mc(self, X, y):
ue_args = self.ue_args
eval_metric = self.eval_metric
model = self.cls._auto_model
start = time.time()
model = self._activate_dropouts(model)
eval_results = {}
eval_results["sampled_probabilities"] = []
eval_results["sampled_answers"] = []
log.info("****************Start runs**************")
for i in tqdm(range(ue_args.committee_size)):
preds, probs = self.cls.predict(X)[:2]
eval_results["sampled_probabilities"].append(probs.tolist())
eval_results["sampled_answers"].append(preds.tolist())
if ue_args.eval_passes:
eval_score = eval_metric.compute(
predictions=preds, references=true_labels
)
log.info(f"Eval score: {eval_score}")
end = time.time()
log.info("**************Done.********************")
model = self._deactivate_dropouts(model)
return eval_results, end - start
def _predict_msd(self, X, y):
ue_args = self.ue_args
model = self.cls._auto_model
y_pad, y_shape = self._exctract_labels(X)
eval_results, mc_time = self._predict_mc(X, y)
eval_results, full_mahalanobis_distance, md_time = self._predict_with_fitted_cov(X, y, eval_results)
eval_results["eval_labels"] = y
# so now we have sampled probs and mahalanobis distances in eval_preds
# we have to calc distinctivness score and uncertainty scores
eval_results, unc_time = self._calc_distinctivness_score(full_mahalanobis_distance, y_pad, y_shape, eval_results)
sum_inf_time = mc_time + md_time + unc_time
eval_results["ue_time"] = sum_inf_time
log.info(f"UE time: {sum_inf_time}")
return eval_results
def __call__(self, X, y):
return self._predict_msd(X, y)
| 35.967249
| 121
| 0.624962
| 1,958
| 16,473
| 4.93667
| 0.101634
| 0.030416
| 0.009311
| 0.023174
| 0.83023
| 0.814815
| 0.808194
| 0.795986
| 0.790606
| 0.788537
| 0
| 0.002802
| 0.26334
| 16,473
| 457
| 122
| 36.045952
| 0.793737
| 0.027378
| 0
| 0.728571
| 0
| 0
| 0.083068
| 0.033852
| 0
| 0
| 0
| 0.002188
| 0
| 1
| 0.088571
| false
| 0.005714
| 0.034286
| 0.014286
| 0.197143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c6c5a02a46913bc7c4d40aaecf577b10f782b3bb
| 21,561
|
py
|
Python
|
fhir/resources/tests/test_allergyintolerance.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/tests/test_allergyintolerance.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/tests/test_allergyintolerance.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/AllergyIntolerance
Release: R4
Version: 4.0.1
Build ID: 9346c8cc45
Last updated: 2019-11-01T09:29:23.356+11:00
"""
import io
import json
import os
import unittest
import pytest
from .. import allergyintolerance
from ..fhirdate import FHIRDate
from .fixtures import force_bytes
@pytest.mark.usefixtures("base_settings")
class AllergyIntoleranceTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or ""
with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle:
js = json.load(handle)
self.assertEqual("AllergyIntolerance", js["resourceType"])
return allergyintolerance.AllergyIntolerance(js)
def testAllergyIntolerance1(self):
inst = self.instantiate_from("allergyintolerance-nkla.json")
self.assertIsNotNone(
inst, "Must have instantiated a AllergyIntolerance instance"
)
self.implAllergyIntolerance1(inst)
js = inst.as_json()
self.assertEqual("AllergyIntolerance", js["resourceType"])
inst2 = allergyintolerance.AllergyIntolerance(js)
self.implAllergyIntolerance1(inst2)
def implAllergyIntolerance1(self, inst):
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].code), force_bytes("active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].display), force_bytes("Active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical"
),
)
self.assertEqual(
force_bytes(inst.code.coding[0].code), force_bytes("716184000")
)
self.assertEqual(
force_bytes(inst.code.coding[0].display),
force_bytes("No Known Latex Allergy (situation)"),
)
self.assertEqual(
force_bytes(inst.code.coding[0].system),
force_bytes("http://snomed.info/sct"),
)
self.assertEqual(
force_bytes(inst.code.text), force_bytes("No Known Latex Allergy")
)
self.assertEqual(force_bytes(inst.id), force_bytes("nkla"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(
inst.recordedDate.date, FHIRDate("2015-08-06T15:37:31-06:00").date
)
self.assertEqual(inst.recordedDate.as_json(), "2015-08-06T15:37:31-06:00")
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].code),
force_bytes("confirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].display),
force_bytes("Confirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-verification"
),
)
def testAllergyIntolerance2(self):
inst = self.instantiate_from("allergyintolerance-example.json")
self.assertIsNotNone(
inst, "Must have instantiated a AllergyIntolerance instance"
)
self.implAllergyIntolerance2(inst)
js = inst.as_json()
self.assertEqual("AllergyIntolerance", js["resourceType"])
inst2 = allergyintolerance.AllergyIntolerance(js)
self.implAllergyIntolerance2(inst2)
def implAllergyIntolerance2(self, inst):
self.assertEqual(force_bytes(inst.category[0]), force_bytes("food"))
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].code), force_bytes("active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].display), force_bytes("Active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical"
),
)
self.assertEqual(
force_bytes(inst.code.coding[0].code), force_bytes("227493005")
)
self.assertEqual(
force_bytes(inst.code.coding[0].display), force_bytes("Cashew nuts")
)
self.assertEqual(
force_bytes(inst.code.coding[0].system),
force_bytes("http://snomed.info/sct"),
)
self.assertEqual(force_bytes(inst.criticality), force_bytes("high"))
self.assertEqual(force_bytes(inst.id), force_bytes("example"))
self.assertEqual(
force_bytes(inst.identifier[0].system),
force_bytes("http://acme.com/ids/patients/risks"),
)
self.assertEqual(force_bytes(inst.identifier[0].value), force_bytes("49476534"))
self.assertEqual(inst.lastOccurrence.date, FHIRDate("2012-06").date)
self.assertEqual(inst.lastOccurrence.as_json(), "2012-06")
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(
force_bytes(inst.note[0].text),
force_bytes(
"The criticality is high becasue of the observed anaphylactic reaction when challenged with cashew extract."
),
)
self.assertEqual(inst.onsetDateTime.date, FHIRDate("2004").date)
self.assertEqual(inst.onsetDateTime.as_json(), "2004")
self.assertEqual(
force_bytes(inst.reaction[0].description),
force_bytes(
"Challenge Protocol. Severe reaction to subcutaneous cashew extract. Epinephrine administered"
),
)
self.assertEqual(
force_bytes(inst.reaction[0].exposureRoute.coding[0].code),
force_bytes("34206005"),
)
self.assertEqual(
force_bytes(inst.reaction[0].exposureRoute.coding[0].display),
force_bytes("Subcutaneous route"),
)
self.assertEqual(
force_bytes(inst.reaction[0].exposureRoute.coding[0].system),
force_bytes("http://snomed.info/sct"),
)
self.assertEqual(
force_bytes(inst.reaction[0].manifestation[0].coding[0].code),
force_bytes("39579001"),
)
self.assertEqual(
force_bytes(inst.reaction[0].manifestation[0].coding[0].display),
force_bytes("Anaphylactic reaction"),
)
self.assertEqual(
force_bytes(inst.reaction[0].manifestation[0].coding[0].system),
force_bytes("http://snomed.info/sct"),
)
self.assertEqual(inst.reaction[0].onset.date, FHIRDate("2012-06-12").date)
self.assertEqual(inst.reaction[0].onset.as_json(), "2012-06-12")
self.assertEqual(force_bytes(inst.reaction[0].severity), force_bytes("severe"))
self.assertEqual(
force_bytes(inst.reaction[0].substance.coding[0].code),
force_bytes("1160593"),
)
self.assertEqual(
force_bytes(inst.reaction[0].substance.coding[0].display),
force_bytes("cashew nut allergenic extract Injectable Product"),
)
self.assertEqual(
force_bytes(inst.reaction[0].substance.coding[0].system),
force_bytes("http://www.nlm.nih.gov/research/umls/rxnorm"),
)
self.assertEqual(
force_bytes(inst.reaction[1].manifestation[0].coding[0].code),
force_bytes("64305001"),
)
self.assertEqual(
force_bytes(inst.reaction[1].manifestation[0].coding[0].display),
force_bytes("Urticaria"),
)
self.assertEqual(
force_bytes(inst.reaction[1].manifestation[0].coding[0].system),
force_bytes("http://snomed.info/sct"),
)
self.assertEqual(
force_bytes(inst.reaction[1].note[0].text),
force_bytes(
"The patient reports that the onset of urticaria was within 15 minutes of eating cashews."
),
)
self.assertEqual(inst.reaction[1].onset.date, FHIRDate("2004").date)
self.assertEqual(inst.reaction[1].onset.as_json(), "2004")
self.assertEqual(
force_bytes(inst.reaction[1].severity), force_bytes("moderate")
)
self.assertEqual(
inst.recordedDate.date, FHIRDate("2014-10-09T14:58:00+11:00").date
)
self.assertEqual(inst.recordedDate.as_json(), "2014-10-09T14:58:00+11:00")
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type), force_bytes("allergy"))
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].code),
force_bytes("confirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].display),
force_bytes("Confirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-verification"
),
)
def testAllergyIntolerance3(self):
inst = self.instantiate_from("allergyintolerance-nka.json")
self.assertIsNotNone(
inst, "Must have instantiated a AllergyIntolerance instance"
)
self.implAllergyIntolerance3(inst)
js = inst.as_json()
self.assertEqual("AllergyIntolerance", js["resourceType"])
inst2 = allergyintolerance.AllergyIntolerance(js)
self.implAllergyIntolerance3(inst2)
def implAllergyIntolerance3(self, inst):
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].code), force_bytes("active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].display), force_bytes("Active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical"
),
)
self.assertEqual(
force_bytes(inst.code.coding[0].code), force_bytes("716186003")
)
self.assertEqual(
force_bytes(inst.code.coding[0].display),
force_bytes("No Known Allergy (situation)"),
)
self.assertEqual(
force_bytes(inst.code.coding[0].system),
force_bytes("http://snomed.info/sct"),
)
self.assertEqual(force_bytes(inst.code.text), force_bytes("NKA"))
self.assertEqual(force_bytes(inst.id), force_bytes("nka"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(
inst.recordedDate.date, FHIRDate("2015-08-06T15:37:31-06:00").date
)
self.assertEqual(inst.recordedDate.as_json(), "2015-08-06T15:37:31-06:00")
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].code),
force_bytes("confirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].display),
force_bytes("Confirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-verification"
),
)
def testAllergyIntolerance4(self):
inst = self.instantiate_from("allergyintolerance-medication.json")
self.assertIsNotNone(
inst, "Must have instantiated a AllergyIntolerance instance"
)
self.implAllergyIntolerance4(inst)
js = inst.as_json()
self.assertEqual("AllergyIntolerance", js["resourceType"])
inst2 = allergyintolerance.AllergyIntolerance(js)
self.implAllergyIntolerance4(inst2)
def implAllergyIntolerance4(self, inst):
self.assertEqual(force_bytes(inst.category[0]), force_bytes("medication"))
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].code), force_bytes("active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].display), force_bytes("Active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical"
),
)
self.assertEqual(force_bytes(inst.code.coding[0].code), force_bytes("7980"))
self.assertEqual(
force_bytes(inst.code.coding[0].display), force_bytes("Penicillin G")
)
self.assertEqual(
force_bytes(inst.code.coding[0].system),
force_bytes("http://www.nlm.nih.gov/research/umls/rxnorm"),
)
self.assertEqual(force_bytes(inst.criticality), force_bytes("high"))
self.assertEqual(force_bytes(inst.id), force_bytes("medication"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(
force_bytes(inst.reaction[0].manifestation[0].coding[0].code),
force_bytes("247472004"),
)
self.assertEqual(
force_bytes(inst.reaction[0].manifestation[0].coding[0].display),
force_bytes("Hives"),
)
self.assertEqual(
force_bytes(inst.reaction[0].manifestation[0].coding[0].system),
force_bytes("http://snomed.info/sct"),
)
self.assertEqual(inst.recordedDate.date, FHIRDate("2010-03-01").date)
self.assertEqual(inst.recordedDate.as_json(), "2010-03-01")
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].code),
force_bytes("unconfirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].display),
force_bytes("Unconfirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-verification"
),
)
def testAllergyIntolerance5(self):
inst = self.instantiate_from("allergyintolerance-fishallergy.json")
self.assertIsNotNone(
inst, "Must have instantiated a AllergyIntolerance instance"
)
self.implAllergyIntolerance5(inst)
js = inst.as_json()
self.assertEqual("AllergyIntolerance", js["resourceType"])
inst2 = allergyintolerance.AllergyIntolerance(js)
self.implAllergyIntolerance5(inst2)
def implAllergyIntolerance5(self, inst):
self.assertEqual(force_bytes(inst.category[0]), force_bytes("food"))
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].code), force_bytes("active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].display), force_bytes("Active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical"
),
)
self.assertEqual(
force_bytes(inst.code.coding[0].code), force_bytes("227037002")
)
self.assertEqual(
force_bytes(inst.code.coding[0].display),
force_bytes("Fish - dietary (substance)"),
)
self.assertEqual(
force_bytes(inst.code.coding[0].system),
force_bytes("http://snomed.info/sct"),
)
self.assertEqual(
force_bytes(inst.code.text),
force_bytes("Allergic to fresh fish. Tolerates canned fish"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("fishallergy"))
self.assertEqual(
force_bytes(inst.identifier[0].system),
force_bytes("http://acme.com/ids/patients/risks"),
)
self.assertEqual(force_bytes(inst.identifier[0].value), force_bytes("49476535"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(
inst.recordedDate.date, FHIRDate("2015-08-06T15:37:31-06:00").date
)
self.assertEqual(inst.recordedDate.as_json(), "2015-08-06T15:37:31-06:00")
self.assertEqual(force_bytes(inst.text.status), force_bytes("additional"))
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].code),
force_bytes("confirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].display),
force_bytes("Confirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-verification"
),
)
def testAllergyIntolerance6(self):
inst = self.instantiate_from("allergyintolerance-nkda.json")
self.assertIsNotNone(
inst, "Must have instantiated a AllergyIntolerance instance"
)
self.implAllergyIntolerance6(inst)
js = inst.as_json()
self.assertEqual("AllergyIntolerance", js["resourceType"])
inst2 = allergyintolerance.AllergyIntolerance(js)
self.implAllergyIntolerance6(inst2)
def implAllergyIntolerance6(self, inst):
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].code), force_bytes("active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].display), force_bytes("Active")
)
self.assertEqual(
force_bytes(inst.clinicalStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical"
),
)
self.assertEqual(
force_bytes(inst.code.coding[0].code), force_bytes("409137002")
)
self.assertEqual(
force_bytes(inst.code.coding[0].display),
force_bytes("No Known Drug Allergy (situation)"),
)
self.assertEqual(
force_bytes(inst.code.coding[0].system),
force_bytes("http://snomed.info/sct"),
)
self.assertEqual(force_bytes(inst.code.text), force_bytes("NKDA"))
self.assertEqual(force_bytes(inst.id), force_bytes("nkda"))
self.assertEqual(force_bytes(inst.meta.tag[0].code), force_bytes("HTEST"))
self.assertEqual(
force_bytes(inst.meta.tag[0].display), force_bytes("test health data")
)
self.assertEqual(
force_bytes(inst.meta.tag[0].system),
force_bytes("http://terminology.hl7.org/CodeSystem/v3-ActReason"),
)
self.assertEqual(
inst.recordedDate.date, FHIRDate("2015-08-06T15:37:31-06:00").date
)
self.assertEqual(inst.recordedDate.as_json(), "2015-08-06T15:37:31-06:00")
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].code),
force_bytes("confirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].display),
force_bytes("Confirmed"),
)
self.assertEqual(
force_bytes(inst.verificationStatus.coding[0].system),
force_bytes(
"http://terminology.hl7.org/CodeSystem/allergyintolerance-verification"
),
)
| 40.758034
| 124
| 0.619313
| 2,214
| 21,561
| 5.912376
| 0.103433
| 0.181054
| 0.18029
| 0.225363
| 0.847441
| 0.840107
| 0.799236
| 0.782047
| 0.763636
| 0.756455
| 0
| 0.035481
| 0.254905
| 21,561
| 528
| 125
| 40.835227
| 0.779334
| 0.008441
| 0
| 0.571429
| 0
| 0
| 0.17229
| 0.021244
| 0
| 0
| 0
| 0
| 0.303823
| 1
| 0.026157
| false
| 0
| 0.016097
| 0
| 0.046278
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c6eeb9023482e10f5b4fa845aad6b92ecc185fdc
| 7,747
|
py
|
Python
|
tests/controllers/test_query_param_controller.py
|
mnaumanali94/PYTHON-SDK
|
97eceab462d86b8666ff1f74830d30cae5202a35
|
[
"MIT"
] | null | null | null |
tests/controllers/test_query_param_controller.py
|
mnaumanali94/PYTHON-SDK
|
97eceab462d86b8666ff1f74830d30cae5202a35
|
[
"MIT"
] | null | null | null |
tests/controllers/test_query_param_controller.py
|
mnaumanali94/PYTHON-SDK
|
97eceab462d86b8666ff1f74830d30cae5202a35
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
tests.controllers.test_query_param_controller
This file was automatically generated for Stamplay by APIMATIC BETA v2.0 on 08/03/2016
"""
import jsonpickle
from tests.controllers.controller_test_base import *
class QueryParamControllerTests(ControllerTestBase):
@classmethod
def setUpClass(cls):
super(QueryParamControllerTests, cls).setUpClass()
cls.controller = cls.api_client.query_param
# Todo: Add description for test test_simple_query
def test_simple_query(self):
# Parameters for the API call
boolean = True
number = 4
string = 'TestString'
# dictionary for optional query parameters
optional_query_parameters = {}
# Perform the API call through the SDK function
result = self.controller.simple_query(boolean, number, string, optional_query_parameters)
# Test response code
self.assertEquals(self.response_catcher.response.status_code, 200)
# Test whether the captured response is as we expected
self.assertIsNotNone(result)
expected_body = APIHelper.json_deserialize('{"passed":true}')
received_body = APIHelper.json_deserialize(self.response_catcher.response.raw_body)
self.assertTrue(TestHelper.match_body(expected_body, received_body, check_values = True))
# Todo: Add description for test test_no_params
def test_no_params(self):
# Perform the API call through the SDK function
result = self.controller.no_params()
# Test response code
self.assertEquals(self.response_catcher.response.status_code, 200)
# Test whether the captured response is as we expected
self.assertIsNotNone(result)
expected_body = APIHelper.json_deserialize('{"passed":true}')
received_body = APIHelper.json_deserialize(self.response_catcher.response.raw_body)
self.assertTrue(TestHelper.match_body(expected_body, received_body, check_values = True))
# Todo: Add description for test test_string_param
def test_string_param(self):
# Parameters for the API call
string = 'l;asd;asdwe[2304&&;\'.d??\\a\\\\\\;sd//'
# Perform the API call through the SDK function
result = self.controller.string_param(string)
# Test response code
self.assertEquals(self.response_catcher.response.status_code, 200)
# Test whether the captured response is as we expected
self.assertIsNotNone(result)
expected_body = APIHelper.json_deserialize('{"passed":true}')
received_body = APIHelper.json_deserialize(self.response_catcher.response.raw_body)
self.assertTrue(TestHelper.match_body(expected_body, received_body, check_values = True))
# Todo: Add description for test test_url_param
def test_url_param(self):
# Parameters for the API call
url = 'https://www.shahidisawesome.com/and/also/a/narcissist?thisis=aparameter&another=one'
# Perform the API call through the SDK function
result = self.controller.url_param(url)
# Test response code
self.assertEquals(self.response_catcher.response.status_code, 200)
# Test whether the captured response is as we expected
self.assertIsNotNone(result)
expected_body = APIHelper.json_deserialize('{"passed":true}')
received_body = APIHelper.json_deserialize(self.response_catcher.response.raw_body)
self.assertTrue(TestHelper.match_body(expected_body, received_body, check_values = True))
# Todo: Add description for test test_multiple_params
def test_multiple_params(self):
# Parameters for the API call
number = 123412312
precision = 1112.34
string = '""test./;";12&&3asl"";"qw1&34"///..//.'
url = 'http://www.abc.com/test?a=b&c="http://lolol.com?param=no&another=lol"'
# Perform the API call through the SDK function
result = self.controller.multiple_params(number, precision, string, url)
# Test response code
self.assertEquals(self.response_catcher.response.status_code, 200)
# Test whether the captured response is as we expected
self.assertIsNotNone(result)
expected_body = APIHelper.json_deserialize('{"passed":true}')
received_body = APIHelper.json_deserialize(self.response_catcher.response.raw_body)
self.assertTrue(TestHelper.match_body(expected_body, received_body, check_values = True))
# Todo: Add description for test test_number_array
def test_number_array(self):
# Parameters for the API call
integers = APIHelper.json_deserialize('[1,2,3,4,5]')
# Perform the API call through the SDK function
result = self.controller.number_array(integers)
# Test response code
self.assertTrue(self.response_catcher.response.status_code in list(range(200, 209)))
# Test whether the captured response is as we expected
self.assertIsNotNone(result)
expected_body = APIHelper.json_deserialize('{"passed":true}')
received_body = APIHelper.json_deserialize(self.response_catcher.response.raw_body)
self.assertTrue(TestHelper.match_body(expected_body, received_body, check_values = True))
# Todo: Add description for test test_string_array
def test_string_array(self):
# Parameters for the API call
strings = APIHelper.json_deserialize('["abc", "def"]')
# Perform the API call through the SDK function
result = self.controller.string_array(strings)
# Test response code
self.assertEquals(self.response_catcher.response.status_code, 200)
# Test whether the captured response is as we expected
self.assertIsNotNone(result)
expected_body = APIHelper.json_deserialize('{"passed":true}')
received_body = APIHelper.json_deserialize(self.response_catcher.response.raw_body)
self.assertTrue(TestHelper.match_body(expected_body, received_body, check_values = True))
# Todo: Add description for test test_string_enum_array
def test_string_enum_array(self):
# Parameters for the API call
days = APIHelper.json_deserialize('["Tuesday", "Saturday", "Wednesday", "Monday", "Sunday"]')
# Perform the API call through the SDK function
result = self.controller.string_enum_array(days)
# Test response code
self.assertEquals(self.response_catcher.response.status_code, 200)
# Test whether the captured response is as we expected
self.assertIsNotNone(result)
expected_body = APIHelper.json_deserialize('{"passed":true}')
received_body = APIHelper.json_deserialize(self.response_catcher.response.raw_body)
self.assertTrue(TestHelper.match_body(expected_body, received_body, check_values = True))
# Todo: Add description for test test_integer_enum_array
def test_integer_enum_array(self):
# Parameters for the API call
suites = APIHelper.json_deserialize('[1, 3, 4, 2, 3]')
# Perform the API call through the SDK function
result = self.controller.integer_enum_array(suites)
# Test response code
self.assertEquals(self.response_catcher.response.status_code, 200)
# Test whether the captured response is as we expected
self.assertIsNotNone(result)
expected_body = APIHelper.json_deserialize('{"passed":true}')
received_body = APIHelper.json_deserialize(self.response_catcher.response.raw_body)
self.assertTrue(TestHelper.match_body(expected_body, received_body, check_values = True))
| 41.427807
| 101
| 0.700529
| 933
| 7,747
| 5.630225
| 0.150054
| 0.054445
| 0.100514
| 0.092519
| 0.762421
| 0.762421
| 0.739577
| 0.71521
| 0.701504
| 0.701504
| 0
| 0.012631
| 0.213115
| 7,747
| 186
| 102
| 41.650538
| 0.849081
| 0.249387
| 0
| 0.52381
| 1
| 0
| 0.078408
| 0.006606
| 0
| 0
| 0
| 0.005376
| 0.321429
| 1
| 0.119048
| false
| 0.107143
| 0.02381
| 0
| 0.154762
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c6f2ff86711c334b3062d6097e2808b816b494be
| 1,907
|
py
|
Python
|
crawl_requests/req_api.py
|
shinalone/crawl_requests
|
cda58c3092a9986d7c507361fdb1a0dc97d37daa
|
[
"MIT"
] | null | null | null |
crawl_requests/req_api.py
|
shinalone/crawl_requests
|
cda58c3092a9986d7c507361fdb1a0dc97d37daa
|
[
"MIT"
] | null | null | null |
crawl_requests/req_api.py
|
shinalone/crawl_requests
|
cda58c3092a9986d7c507361fdb1a0dc97d37daa
|
[
"MIT"
] | null | null | null |
#coding:utf-8
import random
import requests
def req_get(url :str,headers :dict,UA_pool :list,proxy_pool :list,**kwargs):
session = requests.Session()
try:
res = session.get(url,headers=headers,**kwargs)
return res
except:
try:
if headers:
headers.update({'User-Agent': random.choice(UA_pool)})
else:
headers = {'User-Agent': random.choice(UA_pool)}
#print('--Add UA visit web:--')
res = session.get(url,headers=headers,**kwargs)
return res
except:
if headers:
headers.update({'User-Agent': random.choice(UA_pool)})
else:
headers = {'User-Agent': random.choice(UA_pool)}
#print('--Add UA and proxy visit web:--')
res = session.get(url,headers=headers,proxies=random.choice(proxy_pool),**kwargs)
return res
finally:
session.close()
def req_post(url :str,headers :dict,UA_pool :list,proxy_pool :list,**kwargs):
session = requests.Session()
try:
res = session.post(url,headers=headers,**kwargs)
return res
except:
try:
if headers:
headers.update({'User-Agent': random.choice(UA_pool)})
else:
headers = {'User-Agent': random.choice(UA_pool)}
# print('--Add UA visit web:--')
res = session.post(url,headers=headers,**kwargs)
return res
except:
if headers:
headers.update({'User-Agent': random.choice(UA_pool)})
else:
headers = {'User-Agent': random.choice(UA_pool)}
# print('--Add UA and proxy visit web:--')
res = session.post(url,headers=headers,proxies=random.choice(proxy_pool),**kwargs)
return res
finally:
session.close()
| 33.45614
| 94
| 0.548506
| 215
| 1,907
| 4.790698
| 0.176744
| 0.058252
| 0.116505
| 0.163107
| 0.946602
| 0.946602
| 0.946602
| 0.946602
| 0.939806
| 0.939806
| 0
| 0.00077
| 0.318825
| 1,907
| 56
| 95
| 34.053571
| 0.792148
| 0.080755
| 0
| 0.869565
| 0
| 0
| 0.045767
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.043478
| 0
| 0.217391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
05a21014f8db5d36c7bfd9ecf3d0d5fc3f691fda
| 142,454
|
py
|
Python
|
omegapoint/schema.py
|
RossFabricant/omegapoint
|
b0a8c419a64395d62976dc39847b094b4f85d47c
|
[
"MIT"
] | 6
|
2020-07-10T15:07:58.000Z
|
2021-08-12T23:13:48.000Z
|
omegapoint/schema.py
|
RossFabricant/omegapoint
|
b0a8c419a64395d62976dc39847b094b4f85d47c
|
[
"MIT"
] | null | null | null |
omegapoint/schema.py
|
RossFabricant/omegapoint
|
b0a8c419a64395d62976dc39847b094b4f85d47c
|
[
"MIT"
] | 1
|
2020-06-23T14:54:14.000Z
|
2020-06-23T14:54:14.000Z
|
import sgqlc.types
import sgqlc.types.datetime
schema = sgqlc.types.Schema()
########################################################################
# Scalars and Enumerations
########################################################################
class ActiveContributorType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('WEIGHT', 'EXPOSURE')
class Aggregation(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('LAST',)
class BetaType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('PREDICTED', 'HISTORICAL')
Boolean = sgqlc.types.Boolean
class CompositionConstraintType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('CLASSIFICATION', 'SECTOR', 'INDUSTRY_GROUP', 'INDUSTRY', 'CURRENCY', 'COUNTRY')
class ContributorGroupType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('SECTOR', 'INDUSTRY_GROUP', 'INDUSTRY', 'CLASSIFICATION', 'CURRENCY', 'COUNTRY', 'LONG_SHORT')
class Cusip(sgqlc.types.Scalar):
__schema__ = schema
Date = sgqlc.types.datetime.Date
DateTime = sgqlc.types.datetime.DateTime
class Descriptor(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('ASSET_CLASS', 'COUNTRY', 'CURRENCY', 'AVERAGE_DAILY_VOLUME', 'SECTOR', 'MARKET_CAPITALIZATION')
class EquityIdFormat(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('SEDOL', 'MODEL_PROVIDER_ID', 'ISIN', 'CUSIP')
class ExperimentType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('SIMULATION', 'PORTFOLIO', 'OPTIMIZATION', 'SMART_TRADE')
Float = sgqlc.types.Float
class ImpliedReturnsType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('MARKET',)
class IndustryClassificationType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('SECTOR', 'INDUSTRY_GROUP', 'INDUSTRY')
Int = sgqlc.types.Int
class Interval(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('END_DATE', 'WEEKLY_START_DATES', 'MONTHLY_START_DATES', 'AUTO')
class Isin(sgqlc.types.Scalar):
__schema__ = schema
class PositionSetInterval(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('END_DATE', 'POSITION_SET_DATES', 'WEEKLY_START_DATES', 'MONTHLY_START_DATES', 'AUTO')
class PositionSetType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('PORTFOLIO', 'BENCHMARK', 'SWAP', 'RESEARCH_TOPIC')
class ReferenceInstrumentType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('SECURITY',)
class ResourceType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('PORTFOLIO', 'RESEARCH_TOPIC')
class RiskType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('TOTAL', 'VAR_DECOMP_SPECIFIC', 'VAR_DECOMP_FACTORS')
class ScaleFormat(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('DEFAULT', 'PERCENT_GMV', 'PERCENT_MODELED_GMV', 'PERCENT_EQUITY_GMV', 'PERCENT_EQUITY_MODELED_GMV')
class SecurityListType(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('WATCHLIST',)
class Sedol(sgqlc.types.Scalar):
__schema__ = schema
class SegmentBookFilter(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('LONG', 'SHORT')
class SegmentNormalization(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('STANDALONE', 'CONTRIBUTION')
class ShortId(sgqlc.types.Scalar):
__schema__ = schema
class SortDirection(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('ASC', 'DESC')
String = sgqlc.types.String
class Universe(sgqlc.types.Enum):
__schema__ = schema
__choices__ = ('PORTFOLIO', 'WATCHLIST', 'ETF', 'SWAP')
########################################################################
# Input Objects
########################################################################
class BetaConstraintInput(sgqlc.types.Input):
__schema__ = schema
historical = sgqlc.types.Field('MinMaxInput', graphql_name='historical')
predicted = sgqlc.types.Field('MinMaxInput', graphql_name='predicted')
class ClassificationSecurityInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null('UniversalIdInput'), graphql_name='id')
as_of = sgqlc.types.Field(Date, graphql_name='asOf')
classification = sgqlc.types.Field(sgqlc.types.non_null('ClassificationSecurityValueInput'), graphql_name='classification')
class ClassificationSecurityValueInput(sgqlc.types.Input):
__schema__ = schema
value = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='value')
class ClassificationSort(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='id')
tier = sgqlc.types.Field(String, graphql_name='tier')
class ContentSetDateInput(sgqlc.types.Input):
__schema__ = schema
date = sgqlc.types.Field(sgqlc.types.non_null(Date), graphql_name='date')
securities = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('ContentSetDateSecurityInput'))), graphql_name='securities')
class ContentSetDateSecurityInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null('UniversalIdInput'), graphql_name='id')
factors = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('ContentSetFactorValueInput'))), graphql_name='factors')
class ContentSetFactorValueInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='id')
value = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='value')
class CustomExposureObjectiveTerm(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='id')
weight = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='weight')
content_set_id = sgqlc.types.Field(String, graphql_name='contentSetId')
class CustomObjectiveTermWeight(sgqlc.types.Input):
__schema__ = schema
weight = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='weight')
class CustomOptimizationObjective(sgqlc.types.Input):
__schema__ = schema
minimize_risk = sgqlc.types.Field('CustomRiskObjectiveTerm', graphql_name='minimizeRisk')
maximize_forecast_return = sgqlc.types.Field(CustomObjectiveTermWeight, graphql_name='maximizeForecastReturn')
minimize_market_impact = sgqlc.types.Field(CustomObjectiveTermWeight, graphql_name='minimizeMarketImpact')
maximize_exposures = sgqlc.types.Field(sgqlc.types.list_of(CustomExposureObjectiveTerm), graphql_name='maximizeExposures')
minimize_exposures = sgqlc.types.Field(sgqlc.types.list_of(CustomExposureObjectiveTerm), graphql_name='minimizeExposures')
class CustomRiskObjectiveTerm(sgqlc.types.Input):
__schema__ = schema
factor_risk = sgqlc.types.Field(CustomObjectiveTermWeight, graphql_name='factorRisk')
specific_risk = sgqlc.types.Field(CustomObjectiveTermWeight, graphql_name='specificRisk')
base = sgqlc.types.Field(sgqlc.types.list_of('PositionSetInput'), graphql_name='base')
class DeleteClassificationSecurityInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null('UniversalIdInput'), graphql_name='id')
all = sgqlc.types.Field(Boolean, graphql_name='all')
from_ = sgqlc.types.Field(Date, graphql_name='from')
to = sgqlc.types.Field(Date, graphql_name='to')
class DeleteForecastSecurityInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null('UniversalIdInput'), graphql_name='id')
all = sgqlc.types.Field(Boolean, graphql_name='all')
from_ = sgqlc.types.Field(Date, graphql_name='from')
to = sgqlc.types.Field(Date, graphql_name='to')
class ExposureConstraint(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='id')
content_set_id = sgqlc.types.Field(String, graphql_name='contentSetId')
max = sgqlc.types.Field(Float, graphql_name='max')
min = sgqlc.types.Field(Float, graphql_name='min')
base = sgqlc.types.Field('PositionSetInput', graphql_name='base')
min_relative = sgqlc.types.Field(Float, graphql_name='minRelative')
max_relative = sgqlc.types.Field(Float, graphql_name='maxRelative')
class FactorExposureTarget(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
target = sgqlc.types.Field(Float, graphql_name='target')
class ForecastCreate(sgqlc.types.Input):
__schema__ = schema
name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
class ForecastEquityInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field('PositionSetEquityIdInput', graphql_name='id')
annualized_return = sgqlc.types.Field(Float, graphql_name='annualizedReturn')
expected_percent_return = sgqlc.types.Field(Float, graphql_name='expectedPercentReturn')
class ForecastExpectedReturnInput(sgqlc.types.Input):
__schema__ = schema
return_ = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='return')
horizon = sgqlc.types.Field(Int, graphql_name='horizon')
class ForecastInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(ShortId, graphql_name='id')
implied_returns = sgqlc.types.Field(ImpliedReturnsType, graphql_name='impliedReturns')
horizon = sgqlc.types.Field(Int, graphql_name='horizon')
equities = sgqlc.types.Field(sgqlc.types.list_of(ForecastEquityInput), graphql_name='equities')
swaps = sgqlc.types.Field(sgqlc.types.list_of('ForecastSwapInput'), graphql_name='swaps')
class ForecastSecurityInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null('UniversalIdInput'), graphql_name='id')
label = sgqlc.types.Field(String, graphql_name='label')
as_of = sgqlc.types.Field(Date, graphql_name='asOf')
expected_return = sgqlc.types.Field(sgqlc.types.non_null(ForecastExpectedReturnInput), graphql_name='expectedReturn')
class ForecastSwapInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
expected_percent_return = sgqlc.types.Field(Float, graphql_name='expectedPercentReturn')
class ForecastUpdate(sgqlc.types.Input):
__schema__ = schema
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
class MinMax(sgqlc.types.Input):
__schema__ = schema
min = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='min')
max = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='max')
class MinMaxConstraintInput(sgqlc.types.Input):
__schema__ = schema
max = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='max')
class MinMaxInput(sgqlc.types.Input):
__schema__ = schema
min = sgqlc.types.Field(Float, graphql_name='min')
max = sgqlc.types.Field(Float, graphql_name='max')
class NewExperiment(sgqlc.types.Input):
__schema__ = schema
name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='name')
type = sgqlc.types.Field(sgqlc.types.non_null(ExperimentType), graphql_name='type')
description = sgqlc.types.Field(String, graphql_name='description')
class NewPortfolio(sgqlc.types.Input):
__schema__ = schema
alias = sgqlc.types.Field(String, graphql_name='alias')
name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
default_model_id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='defaultModelId')
rollover_position_set_to_current_date = sgqlc.types.Field(Boolean, graphql_name='rolloverPositionSetToCurrentDate')
class NewResearchTopic(sgqlc.types.Input):
__schema__ = schema
name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
reference_instrument = sgqlc.types.Field(sgqlc.types.non_null('ReferenceInstrumentInput'), graphql_name='referenceInstrument')
class NewSwap(sgqlc.types.Input):
__schema__ = schema
ticker = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='ticker')
description = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='description')
termination_date = sgqlc.types.Field(Date, graphql_name='terminationDate')
class OptimizationCompositionConstraint(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='id')
type = sgqlc.types.Field(sgqlc.types.non_null(CompositionConstraintType), graphql_name='type')
classification_id = sgqlc.types.Field(String, graphql_name='classificationId')
classification_tier = sgqlc.types.Field(String, graphql_name='classificationTier')
max_economic_exposure = sgqlc.types.Field(Float, graphql_name='maxEconomicExposure')
min_economic_exposure = sgqlc.types.Field(Float, graphql_name='minEconomicExposure')
max_percent_equity = sgqlc.types.Field(Float, graphql_name='maxPercentEquity')
min_percent_equity = sgqlc.types.Field(Float, graphql_name='minPercentEquity')
max_relative_percent_equity = sgqlc.types.Field(Float, graphql_name='maxRelativePercentEquity')
min_relative_percent_equity = sgqlc.types.Field(Float, graphql_name='minRelativePercentEquity')
base = sgqlc.types.Field('PositionSetInput', graphql_name='base')
class OptimizationConstantsInput(sgqlc.types.Input):
__schema__ = schema
equity = sgqlc.types.Field(Float, graphql_name='equity')
class OptimizationConstraints(sgqlc.types.Input):
__schema__ = schema
max_turnover = sgqlc.types.Field(Float, graphql_name='maxTurnover')
risk = sgqlc.types.Field('RiskConstraintInput', graphql_name='risk')
max_liquidation_days = sgqlc.types.Field(Float, graphql_name='maxLiquidationDays')
max_concentration = sgqlc.types.Field(Float, graphql_name='maxConcentration')
min_concentration = sgqlc.types.Field(Float, graphql_name='minConcentration')
exposure = sgqlc.types.Field(sgqlc.types.list_of(ExposureConstraint), graphql_name='exposure')
securities = sgqlc.types.Field('SecurityConstraintInput', graphql_name='securities')
composition = sgqlc.types.Field(sgqlc.types.list_of(OptimizationCompositionConstraint), graphql_name='composition')
min_trade = sgqlc.types.Field(Float, graphql_name='minTrade')
max_trade = sgqlc.types.Field('OptimizationMaxTradeConstraint', graphql_name='maxTrade')
long_market_value = sgqlc.types.Field(Float, graphql_name='longMarketValue')
short_market_value = sgqlc.types.Field(Float, graphql_name='shortMarketValue')
min_short_market_value = sgqlc.types.Field(Float, graphql_name='minShortMarketValue')
max_short_market_value = sgqlc.types.Field(Float, graphql_name='maxShortMarketValue')
min_long_market_value = sgqlc.types.Field(Float, graphql_name='minLongMarketValue')
max_long_market_value = sgqlc.types.Field(Float, graphql_name='maxLongMarketValue')
gmv = sgqlc.types.Field(Float, graphql_name='GMV')
min_gmv = sgqlc.types.Field(Float, graphql_name='minGMV')
max_gmv = sgqlc.types.Field(Float, graphql_name='maxGMV')
net_exposure = sgqlc.types.Field(Float, graphql_name='netExposure')
min_net_exposure = sgqlc.types.Field(Float, graphql_name='minNetExposure')
max_net_exposure = sgqlc.types.Field(Float, graphql_name='maxNetExposure')
max_positions = sgqlc.types.Field(Int, graphql_name='maxPositions')
security_min_trade = sgqlc.types.Field('SecurityMinTradeInput', graphql_name='securityMinTrade')
fix_position_set_securities = sgqlc.types.Field(Boolean, graphql_name='fixPositionSetSecurities')
trade_swaps = sgqlc.types.Field(Boolean, graphql_name='tradeSwaps')
max_market_impact_cost = sgqlc.types.Field(Float, graphql_name='maxMarketImpactCost')
beta = sgqlc.types.Field(BetaConstraintInput, graphql_name='beta')
class OptimizationConstraintsOptionsInput(sgqlc.types.Input):
__schema__ = schema
ignore_adv = sgqlc.types.Field(Boolean, graphql_name='ignoreADV')
class OptimizationMaxTradeConstraint(sgqlc.types.Input):
__schema__ = schema
percent_equity = sgqlc.types.Field(Float, graphql_name='percentEquity')
percent_adv = sgqlc.types.Field(Float, graphql_name='percentADV')
percent_original_economic_exposure = sgqlc.types.Field(Float, graphql_name='percentOriginalEconomicExposure')
class OptimizationObjective(sgqlc.types.Input):
__schema__ = schema
minimize_factor_risk = sgqlc.types.Field(Boolean, graphql_name='minimizeFactorRisk')
minimize_total_risk = sgqlc.types.Field(Boolean, graphql_name='minimizeTotalRisk')
target_exposures = sgqlc.types.Field(sgqlc.types.list_of('TargetExposure'), graphql_name='targetExposures')
target_total_risk = sgqlc.types.Field(Float, graphql_name='targetTotalRisk')
target_factor_risk = sgqlc.types.Field(Float, graphql_name='targetFactorRisk')
target_positions = sgqlc.types.Field(Float, graphql_name='targetPositions')
custom = sgqlc.types.Field(CustomOptimizationObjective, graphql_name='custom')
factor_exposure = sgqlc.types.Field(FactorExposureTarget, graphql_name='factorExposure')
weight = sgqlc.types.Field(Float, graphql_name='weight')
class OptimizationObjectiveOptionsInput(sgqlc.types.Input):
__schema__ = schema
include_market_impact = sgqlc.types.Field(Boolean, graphql_name='includeMarketImpact')
class OptimizationOptionsInput(sgqlc.types.Input):
__schema__ = schema
objectives = sgqlc.types.Field(OptimizationObjectiveOptionsInput, graphql_name='objectives')
constraints = sgqlc.types.Field(OptimizationConstraintsOptionsInput, graphql_name='constraints')
equity = sgqlc.types.Field(Float, graphql_name='equity')
max_time = sgqlc.types.Field(Float, graphql_name='maxTime')
class OptimizationSecuritiesInput(sgqlc.types.Input):
__schema__ = schema
long = sgqlc.types.Field('SecuritiesInput', graphql_name='long')
short = sgqlc.types.Field('SecuritiesInput', graphql_name='short')
long_or_short = sgqlc.types.Field('SecuritiesInput', graphql_name='longOrShort')
class OptimizationSecuritySearchInput(sgqlc.types.Input):
__schema__ = schema
filter = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('SecuritySearchFilter'))), graphql_name='filter')
sort = sgqlc.types.Field(sgqlc.types.list_of('SecuritySearchSort'), graphql_name='sort')
take = sgqlc.types.Field(Int, graphql_name='take')
class PnlDateInput(sgqlc.types.Input):
__schema__ = schema
date = sgqlc.types.Field(sgqlc.types.non_null(Date), graphql_name='date')
equities = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PnlEquityInput')), graphql_name='equities')
currencies = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PnlOtherAssetInput')), graphql_name='currencies')
swaps = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PnlOtherAssetInput')), graphql_name='swaps')
fixed_income = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PnlFixedIncomeInput')), graphql_name='fixedIncome')
commodities = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PnlOtherAssetInput')), graphql_name='commodities')
indices = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PnlOtherAssetInput')), graphql_name='indices')
other_assets = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PnlOtherAssetInput')), graphql_name='otherAssets')
class PnlEquityInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null('PositionSetEquityIdInput'), graphql_name='id')
amount = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='amount')
class PnlFixedIncomeInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null('PositionSetFixedIncomeIdInput'), graphql_name='id')
amount = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='amount')
class PnlOtherAssetInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='id')
amount = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='amount')
class PortfolioUpdate(sgqlc.types.Input):
__schema__ = schema
alias = sgqlc.types.Field(String, graphql_name='alias')
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
default_model_id = sgqlc.types.Field(String, graphql_name='defaultModelId')
rollover_position_set_to_current_date = sgqlc.types.Field(Boolean, graphql_name='rolloverPositionSetToCurrentDate')
class PositionSetDateInput(sgqlc.types.Input):
__schema__ = schema
date = sgqlc.types.Field(sgqlc.types.non_null(Date), graphql_name='date')
equity = sgqlc.types.Field(Float, graphql_name='equity')
equities = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PositionSetEquityInput')), graphql_name='equities')
currencies = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PositionSetOtherAssetInput')), graphql_name='currencies')
swaps = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PositionSetOtherAssetInput')), graphql_name='swaps')
fixed_income = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PositionSetFixedIncomeInput')), graphql_name='fixedIncome')
commodities = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PositionSetOtherAssetInput')), graphql_name='commodities')
indices = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PositionSetOtherAssetInput')), graphql_name='indices')
other_assets = sgqlc.types.Field(sgqlc.types.list_of(sgqlc.types.non_null('PositionSetOtherAssetInput')), graphql_name='otherAssets')
class PositionSetEquityIdInput(sgqlc.types.Input):
__schema__ = schema
ticker = sgqlc.types.Field(String, graphql_name='ticker')
mic = sgqlc.types.Field(String, graphql_name='mic')
sedol = sgqlc.types.Field(Sedol, graphql_name='sedol')
isin = sgqlc.types.Field(Isin, graphql_name='isin')
cusip = sgqlc.types.Field(Cusip, graphql_name='cusip')
model_provider_id = sgqlc.types.Field(String, graphql_name='modelProviderId')
class PositionSetEquityInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(PositionSetEquityIdInput), graphql_name='id')
economic_exposure = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='economicExposure')
class PositionSetFixedIncomeIdInput(sgqlc.types.Input):
__schema__ = schema
isin = sgqlc.types.Field(Isin, graphql_name='isin')
class PositionSetFixedIncomeInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(PositionSetFixedIncomeIdInput), graphql_name='id')
economic_exposure = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='economicExposure')
class PositionSetInput(sgqlc.types.Input):
__schema__ = schema
type = sgqlc.types.Field(PositionSetType, graphql_name='type')
id = sgqlc.types.Field(String, graphql_name='id')
experiment_id = sgqlc.types.Field(String, graphql_name='experimentId')
weight = sgqlc.types.Field(Float, graphql_name='weight')
dates = sgqlc.types.Field(sgqlc.types.list_of(PositionSetDateInput), graphql_name='dates')
segment = sgqlc.types.Field('Segment', graphql_name='segment')
class PositionSetOtherAssetInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='id')
economic_exposure = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='economicExposure')
class ReferenceInstrumentInput(sgqlc.types.Input):
__schema__ = schema
type = sgqlc.types.Field(sgqlc.types.non_null(ReferenceInstrumentType), graphql_name='type')
security_id = sgqlc.types.Field(sgqlc.types.non_null('UniversalIdInput'), graphql_name='securityId')
class RiskConstraintInput(sgqlc.types.Input):
__schema__ = schema
total = sgqlc.types.Field(MinMaxConstraintInput, graphql_name='total')
factor = sgqlc.types.Field(MinMaxConstraintInput, graphql_name='factor')
class SecuritiesEquityInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(PositionSetEquityIdInput, graphql_name='id')
class SecuritiesInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
type = sgqlc.types.Field(SecurityListType, graphql_name='type')
security_search = sgqlc.types.Field(OptimizationSecuritySearchInput, graphql_name='securitySearch')
equities = sgqlc.types.Field(sgqlc.types.list_of(SecuritiesEquityInput), graphql_name='equities')
swaps = sgqlc.types.Field(sgqlc.types.list_of('SecuritiesSwapInput'), graphql_name='swaps')
class SecuritiesSwapInput(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
class SecurityConstraintInput(sgqlc.types.Input):
__schema__ = schema
equities = sgqlc.types.Field(sgqlc.types.list_of('SecurityConstraintInputEquity'), graphql_name='equities')
swaps = sgqlc.types.Field(sgqlc.types.list_of('SecurityConstraintInputSwap'), graphql_name='swaps')
class SecurityConstraintInputEquity(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(PositionSetEquityIdInput, graphql_name='id')
min_economic_exposure = sgqlc.types.Field(Float, graphql_name='minEconomicExposure')
max_economic_exposure = sgqlc.types.Field(Float, graphql_name='maxEconomicExposure')
min_percent_equity = sgqlc.types.Field(Float, graphql_name='minPercentEquity')
max_percent_equity = sgqlc.types.Field(Float, graphql_name='maxPercentEquity')
class SecurityConstraintInputSwap(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
min_economic_exposure = sgqlc.types.Field(Float, graphql_name='minEconomicExposure')
max_economic_exposure = sgqlc.types.Field(Float, graphql_name='maxEconomicExposure')
min_percent_equity = sgqlc.types.Field(Float, graphql_name='minPercentEquity')
max_percent_equity = sgqlc.types.Field(Float, graphql_name='maxPercentEquity')
class SecurityMinTradeInput(sgqlc.types.Input):
__schema__ = schema
equities = sgqlc.types.Field(sgqlc.types.list_of('SecurityMinTradeInputEquity'), graphql_name='equities')
class SecurityMinTradeInputEquity(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(PositionSetEquityIdInput, graphql_name='id')
min_trade = sgqlc.types.Field(Float, graphql_name='minTrade')
class SecuritySearchFilter(sgqlc.types.Input):
__schema__ = schema
factor_exposure = sgqlc.types.Field(sgqlc.types.list_of('SecuritySearchFilterFactorExposure'), graphql_name='factorExposure')
country = sgqlc.types.Field('SecuritySearchFilterString', graphql_name='country')
sector = sgqlc.types.Field('SecuritySearchFilterString', graphql_name='sector')
classification = sgqlc.types.Field(sgqlc.types.list_of('SecuritySearchFilterClassification'), graphql_name='classification')
currency = sgqlc.types.Field('SecuritySearchFilterString', graphql_name='currency')
asset_class = sgqlc.types.Field('SecuritySearchFilterString', graphql_name='assetClass')
asset_subclass = sgqlc.types.Field('SecuritySearchFilterString', graphql_name='assetSubclass')
average_daily_volume = sgqlc.types.Field('SecuritySearchFilterFloat', graphql_name='averageDailyVolume')
market_capitalization = sgqlc.types.Field('SecuritySearchFilterFloat', graphql_name='marketCapitalization')
beta = sgqlc.types.Field('SecuritySearchFilterBeta', graphql_name='beta')
universe = sgqlc.types.Field(sgqlc.types.list_of('SecuritySearchFilterUniverse'), graphql_name='universe')
securities = sgqlc.types.Field('SecuritySearchFilterSecurities', graphql_name='securities')
risk = sgqlc.types.Field(sgqlc.types.list_of('SecuritySearchFilterRisk'), graphql_name='risk')
class SecuritySearchFilterBeta(sgqlc.types.Input):
__schema__ = schema
predicted = sgqlc.types.Field('SecuritySearchFilterFloat', graphql_name='predicted')
historical = sgqlc.types.Field('SecuritySearchFilterFloat', graphql_name='historical')
class SecuritySearchFilterClassification(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='id')
in_ = sgqlc.types.Field(sgqlc.types.list_of('SecuritySearchFilterClassificationTier'), graphql_name='in')
not_in = sgqlc.types.Field(sgqlc.types.list_of('SecuritySearchFilterClassificationTier'), graphql_name='notIn')
eq = sgqlc.types.Field('SecuritySearchFilterClassificationTier', graphql_name='eq')
neq = sgqlc.types.Field('SecuritySearchFilterClassificationTier', graphql_name='neq')
class SecuritySearchFilterClassificationTier(sgqlc.types.Input):
__schema__ = schema
tier = sgqlc.types.Field(String, graphql_name='tier')
value = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='value')
class SecuritySearchFilterFactorExposure(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='id')
content_set_id = sgqlc.types.Field(String, graphql_name='contentSetId')
gt = sgqlc.types.Field(Float, graphql_name='gt')
lt = sgqlc.types.Field(Float, graphql_name='lt')
gte = sgqlc.types.Field(Float, graphql_name='gte')
lte = sgqlc.types.Field(Float, graphql_name='lte')
eq = sgqlc.types.Field(Float, graphql_name='eq')
neq = sgqlc.types.Field(Float, graphql_name='neq')
between = sgqlc.types.Field(MinMax, graphql_name='between')
not_between = sgqlc.types.Field(MinMax, graphql_name='notBetween')
class SecuritySearchFilterFloat(sgqlc.types.Input):
__schema__ = schema
gt = sgqlc.types.Field(Float, graphql_name='gt')
lt = sgqlc.types.Field(Float, graphql_name='lt')
gte = sgqlc.types.Field(Float, graphql_name='gte')
lte = sgqlc.types.Field(Float, graphql_name='lte')
eq = sgqlc.types.Field(Float, graphql_name='eq')
neq = sgqlc.types.Field(Float, graphql_name='neq')
between = sgqlc.types.Field(MinMax, graphql_name='between')
not_between = sgqlc.types.Field(MinMax, graphql_name='notBetween')
class SecuritySearchFilterRisk(sgqlc.types.Input):
__schema__ = schema
type = sgqlc.types.Field(sgqlc.types.non_null(RiskType), graphql_name='type')
gt = sgqlc.types.Field(Float, graphql_name='gt')
lt = sgqlc.types.Field(Float, graphql_name='lt')
gte = sgqlc.types.Field(Float, graphql_name='gte')
lte = sgqlc.types.Field(Float, graphql_name='lte')
eq = sgqlc.types.Field(Float, graphql_name='eq')
neq = sgqlc.types.Field(Float, graphql_name='neq')
between = sgqlc.types.Field(MinMax, graphql_name='between')
not_between = sgqlc.types.Field(MinMax, graphql_name='notBetween')
class SecuritySearchFilterSecurities(sgqlc.types.Input):
__schema__ = schema
in_ = sgqlc.types.Field(sgqlc.types.list_of('UniversalIdInput'), graphql_name='in')
not_in = sgqlc.types.Field(sgqlc.types.list_of('UniversalIdInput'), graphql_name='notIn')
class SecuritySearchFilterString(sgqlc.types.Input):
__schema__ = schema
in_ = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='in')
not_in = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='notIn')
eq = sgqlc.types.Field(String, graphql_name='eq')
neq = sgqlc.types.Field(String, graphql_name='neq')
class SecuritySearchFilterUniverse(sgqlc.types.Input):
__schema__ = schema
type = sgqlc.types.Field(sgqlc.types.non_null(Universe), graphql_name='type')
in_ = sgqlc.types.Field(String, graphql_name='in')
not_in = sgqlc.types.Field(String, graphql_name='notIn')
class SecuritySearchSort(sgqlc.types.Input):
__schema__ = schema
content_set_id = sgqlc.types.Field(String, graphql_name='contentSetId')
factor_exposure_id = sgqlc.types.Field(String, graphql_name='factorExposureId')
risk = sgqlc.types.Field(RiskType, graphql_name='risk')
beta = sgqlc.types.Field(BetaType, graphql_name='beta')
descriptor = sgqlc.types.Field(Descriptor, graphql_name='descriptor')
classification = sgqlc.types.Field(ClassificationSort, graphql_name='classification')
direction = sgqlc.types.Field(sgqlc.types.non_null(SortDirection), graphql_name='direction')
class Segment(sgqlc.types.Input):
__schema__ = schema
filters = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('SegmentFilter'))), graphql_name='filters')
normalization = sgqlc.types.Field(sgqlc.types.non_null(SegmentNormalization), graphql_name='normalization')
class SegmentFilter(sgqlc.types.Input):
__schema__ = schema
long_short = sgqlc.types.Field(SegmentBookFilter, graphql_name='longShort')
sector = sgqlc.types.Field(SecuritySearchFilterString, graphql_name='sector')
classification = sgqlc.types.Field(sgqlc.types.list_of(SecuritySearchFilterClassification), graphql_name='classification')
country = sgqlc.types.Field(SecuritySearchFilterString, graphql_name='country')
currency = sgqlc.types.Field(SecuritySearchFilterString, graphql_name='currency')
asset_class = sgqlc.types.Field(SecuritySearchFilterString, graphql_name='assetClass')
asset_subclass = sgqlc.types.Field(SecuritySearchFilterString, graphql_name='assetSubclass')
average_daily_volume = sgqlc.types.Field(SecuritySearchFilterFloat, graphql_name='averageDailyVolume')
market_capitalization = sgqlc.types.Field(SecuritySearchFilterFloat, graphql_name='marketCapitalization')
class SwapUpdate(sgqlc.types.Input):
__schema__ = schema
description = sgqlc.types.Field(String, graphql_name='description')
termination_date = sgqlc.types.Field(Date, graphql_name='terminationDate')
class TargetExposure(sgqlc.types.Input):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
target = sgqlc.types.Field(Float, graphql_name='target')
class UniversalIdInput(sgqlc.types.Input):
__schema__ = schema
sedol = sgqlc.types.Field(String, graphql_name='sedol')
isin = sgqlc.types.Field(String, graphql_name='isin')
ticker = sgqlc.types.Field(String, graphql_name='ticker')
mic = sgqlc.types.Field(String, graphql_name='mic')
country = sgqlc.types.Field(String, graphql_name='country')
class UpdateExperiment(sgqlc.types.Input):
__schema__ = schema
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
class UpdateResearchTopic(sgqlc.types.Input):
__schema__ = schema
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
class WatchlistSecuritiesInput(sgqlc.types.Input):
__schema__ = schema
equities = sgqlc.types.Field(sgqlc.types.list_of(PositionSetEquityIdInput), graphql_name='equities')
currencies = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='currencies')
swaps = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='swaps')
fixed_income = sgqlc.types.Field(sgqlc.types.list_of(PositionSetFixedIncomeIdInput), graphql_name='fixedIncome')
commodities = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='commodities')
indices = sgqlc.types.Field(sgqlc.types.list_of(String), graphql_name='indices')
class WatchlistUpdate(sgqlc.types.Input):
__schema__ = schema
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
alias = sgqlc.types.Field(String, graphql_name='alias')
########################################################################
# Output Objects and Interfaces
########################################################################
class BenchmarkMetadata(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
available_from = sgqlc.types.Field(Date, graphql_name='availableFrom')
current_date = sgqlc.types.Field(Date, graphql_name='currentDate')
class Beta(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
rolled_over_from = sgqlc.types.Field(Date, graphql_name='rolledOverFrom')
predicted = sgqlc.types.Field(Float, graphql_name='predicted')
historical = sgqlc.types.Field(Float, graphql_name='historical')
class BetaContributor(sgqlc.types.Type):
__schema__ = schema
asset_class = sgqlc.types.Field(String, graphql_name='assetClass')
asset_subclass = sgqlc.types.Field(String, graphql_name='assetSubclass')
id = sgqlc.types.Field(String, graphql_name='id')
country = sgqlc.types.Field(String, graphql_name='country')
currency = sgqlc.types.Field(String, graphql_name='currency')
sector = sgqlc.types.Field(String, graphql_name='sector')
classification = sgqlc.types.Field('SecurityClassification', graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('tier', sgqlc.types.Arg(String, graphql_name='tier', default=None)),
))
)
description = sgqlc.types.Field(String, graphql_name='description')
percent_equity = sgqlc.types.Field(Float, graphql_name='percentEquity')
sedol = sgqlc.types.Field(Sedol, graphql_name='sedol')
isin = sgqlc.types.Field(Isin, graphql_name='isin')
cusip = sgqlc.types.Field(Cusip, graphql_name='cusip')
predicted = sgqlc.types.Field(Float, graphql_name='predicted')
historical = sgqlc.types.Field(Float, graphql_name='historical')
class BetaContributorDate(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
rolled_over_from = sgqlc.types.Field(Date, graphql_name='rolledOverFrom')
contributors = sgqlc.types.Field(sgqlc.types.list_of(BetaContributor), graphql_name='contributors')
class BetaContributorGroup(sgqlc.types.Type):
__schema__ = schema
name = sgqlc.types.Field(String, graphql_name='name')
id = sgqlc.types.Field(String, graphql_name='id')
total_percent_equity = sgqlc.types.Field(Float, graphql_name='totalPercentEquity')
contributors = sgqlc.types.Field(sgqlc.types.list_of(BetaContributor), graphql_name='contributors')
predicted = sgqlc.types.Field(Float, graphql_name='predicted')
historical = sgqlc.types.Field(Float, graphql_name='historical')
class Category(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
factors = sgqlc.types.Field(sgqlc.types.list_of('CategoryFactor'), graphql_name='factors')
class CategoryFactor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
class Classification(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
versions = sgqlc.types.Field(sgqlc.types.list_of('ClassificationVersion'), graphql_name='versions')
version = sgqlc.types.Field('ClassificationVersion', graphql_name='version', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
class ClassificationDeleteResult(sgqlc.types.Type):
__schema__ = schema
count = sgqlc.types.Field(Int, graphql_name='count')
class ClassificationDetailsDate(sgqlc.types.Type):
__schema__ = schema
as_of = sgqlc.types.Field(Date, graphql_name='asOf')
securities = sgqlc.types.Field(sgqlc.types.list_of('ClassificationSecurity'), graphql_name='securities')
class ClassificationMetadata(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
versions = sgqlc.types.Field(sgqlc.types.list_of('ClassificationVersion'), graphql_name='versions')
class ClassificationSecurity(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field('UniversalId', graphql_name='id')
as_of = sgqlc.types.Field(Date, graphql_name='asOf')
classification = sgqlc.types.Field('ClassificationSecurityValue', graphql_name='classification')
class ClassificationSecurityValue(sgqlc.types.Type):
__schema__ = schema
value = sgqlc.types.Field(String, graphql_name='value')
class ClassificationTiers(sgqlc.types.Type):
__schema__ = schema
level = sgqlc.types.Field(Int, graphql_name='level')
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
class ClassificationUpdateResult(sgqlc.types.Type):
__schema__ = schema
success_count = sgqlc.types.Field(Int, graphql_name='successCount')
unmapped_securities = sgqlc.types.Field(sgqlc.types.list_of(ClassificationSecurity), graphql_name='unmappedSecurities')
class ClassificationValue(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
values = sgqlc.types.Field(sgqlc.types.list_of('ClassificationValue'), graphql_name='values')
class ClassificationVersion(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
as_of = sgqlc.types.Field(Date, graphql_name='asOf')
tiers = sgqlc.types.Field(sgqlc.types.list_of(ClassificationTiers), graphql_name='tiers')
values = sgqlc.types.Field(sgqlc.types.list_of(ClassificationValue), graphql_name='values', args=sgqlc.types.ArgDict((
('tier', sgqlc.types.Arg(String, graphql_name='tier', default=None)),
))
)
class Composition(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
gmv = sgqlc.types.Field(Float, graphql_name='gmv')
modeled_gmv = sgqlc.types.Field(Float, graphql_name='modeledGmv')
equity = sgqlc.types.Field(Float, graphql_name='equity')
reference_equity = sgqlc.types.Field(Float, graphql_name='referenceEquity')
positions_count = sgqlc.types.Field(Int, graphql_name='positionsCount')
summary_stats = sgqlc.types.Field('CompositionSummaryStats', graphql_name='summaryStats')
concentration = sgqlc.types.Field('CompositionConcentration', graphql_name='concentration')
composition_by = sgqlc.types.Field(sgqlc.types.list_of('CompositionGroup'), graphql_name='compositionBy', args=sgqlc.types.ArgDict((
('group_by', sgqlc.types.Arg(sgqlc.types.non_null(ContributorGroupType), graphql_name='groupBy', default=None)),
('classification_id', sgqlc.types.Arg(String, graphql_name='classificationId', default=None)),
('classification_tier', sgqlc.types.Arg(String, graphql_name='classificationTier', default=None)),
))
)
positions = sgqlc.types.Field(sgqlc.types.list_of('CompositionPositions'), graphql_name='positions')
class CompositionConcentration(sgqlc.types.Type):
__schema__ = schema
min = sgqlc.types.Field(Float, graphql_name='min')
max = sgqlc.types.Field(Float, graphql_name='max')
class CompositionGroup(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
description = sgqlc.types.Field(String, graphql_name='description')
percent_equity = sgqlc.types.Field(Float, graphql_name='percentEquity')
economic_exposure = sgqlc.types.Field(Float, graphql_name='economicExposure')
class CompositionPositions(sgqlc.types.Type):
__schema__ = schema
asset_class = sgqlc.types.Field(String, graphql_name='assetClass')
asset_subclass = sgqlc.types.Field(String, graphql_name='assetSubclass')
id = sgqlc.types.Field(String, graphql_name='id')
country = sgqlc.types.Field(String, graphql_name='country')
currency = sgqlc.types.Field(String, graphql_name='currency')
industry_classification = sgqlc.types.Field('SecurityDescriptor', graphql_name='industryClassification', args=sgqlc.types.ArgDict((
('type', sgqlc.types.Arg(sgqlc.types.non_null(IndustryClassificationType), graphql_name='type', default=None)),
))
)
classification = sgqlc.types.Field('SecurityClassification', graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('tier', sgqlc.types.Arg(String, graphql_name='tier', default=None)),
))
)
description = sgqlc.types.Field(String, graphql_name='description')
economic_exposure = sgqlc.types.Field(Float, graphql_name='economicExposure')
average_daily_volume = sgqlc.types.Field(Float, graphql_name='averageDailyVolume')
market_cap = sgqlc.types.Field(Float, graphql_name='marketCap')
class CompositionSummaryStats(sgqlc.types.Type):
__schema__ = schema
average = sgqlc.types.Field('CompositionSummaryStatsAverage', graphql_name='average')
max = sgqlc.types.Field('CompositionSummaryStatsMax', graphql_name='max')
class CompositionSummaryStatsAverage(sgqlc.types.Type):
__schema__ = schema
market_capitalization = sgqlc.types.Field(Float, graphql_name='marketCapitalization')
class CompositionSummaryStatsMax(sgqlc.types.Type):
__schema__ = schema
days_to_liquidate = sgqlc.types.Field(Float, graphql_name='daysToLiquidate')
class ContentSet(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
availability = sgqlc.types.Field('ContentSetAvailability', graphql_name='availability')
factors = sgqlc.types.Field(sgqlc.types.list_of('ContentSetFactor'), graphql_name='factors', args=sgqlc.types.ArgDict((
('category', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='category', default=None)),
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
))
)
categories = sgqlc.types.Field(sgqlc.types.list_of(Category), graphql_name='categories', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
))
)
dates = sgqlc.types.Field(sgqlc.types.list_of('ContentSetDate'), graphql_name='dates', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
))
)
class ContentSetAvailability(sgqlc.types.Type):
__schema__ = schema
start_date = sgqlc.types.Field(Date, graphql_name='startDate')
current_date = sgqlc.types.Field(Date, graphql_name='currentDate')
class ContentSetDate(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
securities = sgqlc.types.Field(sgqlc.types.list_of('ContentSetDateSecurity'), graphql_name='securities')
class ContentSetDateSecurity(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field('UniversalId', graphql_name='id')
factors = sgqlc.types.Field(sgqlc.types.list_of('ContentSetFactorValue'), graphql_name='factors')
class ContentSetFactor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
class ContentSetFactorValue(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
value = sgqlc.types.Field(Float, graphql_name='value')
class ContentSetMetadata(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
factors = sgqlc.types.Field(sgqlc.types.list_of(ContentSetFactor), graphql_name='factors')
categories = sgqlc.types.Field(sgqlc.types.list_of(Category), graphql_name='categories')
class Coverage(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
summary = sgqlc.types.Field('CoverageSummary', graphql_name='summary')
missing_equities = sgqlc.types.Field(sgqlc.types.list_of('MissingEquity'), graphql_name='missingEquities')
missing_currencies = sgqlc.types.Field(sgqlc.types.list_of('MissingOtherAsset'), graphql_name='missingCurrencies')
missing_swaps = sgqlc.types.Field(sgqlc.types.list_of('MissingOtherAsset'), graphql_name='missingSwaps')
missing_commodities = sgqlc.types.Field(sgqlc.types.list_of('MissingOtherAsset'), graphql_name='missingCommodities')
missing_indices = sgqlc.types.Field(sgqlc.types.list_of('MissingOtherAsset'), graphql_name='missingIndices')
missing_fixed_income = sgqlc.types.Field(sgqlc.types.list_of('MissingFixedIncome'), graphql_name='missingFixedIncome')
missing_other_assets = sgqlc.types.Field(sgqlc.types.list_of('MissingOtherAsset'), graphql_name='missingOtherAssets')
class CoverageSummary(sgqlc.types.Type):
__schema__ = schema
percent_gmv_available = sgqlc.types.Field(Float, graphql_name='percentGmvAvailable')
percent_gmv_not_available = sgqlc.types.Field(Float, graphql_name='percentGmvNotAvailable')
class DailyPnlDate(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
amount = sgqlc.types.Field(Float, graphql_name='amount')
class DeleteResult(sgqlc.types.Type):
__schema__ = schema
count = sgqlc.types.Field(Int, graphql_name='count')
class Experiment(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
available_from = sgqlc.types.Field(Date, graphql_name='availableFrom')
type = sgqlc.types.Field(ExperimentType, graphql_name='type')
dates = sgqlc.types.Field(sgqlc.types.list_of('PositionSetDate'), graphql_name='dates', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
))
)
last_updated = sgqlc.types.Field(Date, graphql_name='lastUpdated')
class ExperimentMetadata(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
type = sgqlc.types.Field(ExperimentType, graphql_name='type')
available_from = sgqlc.types.Field(Date, graphql_name='availableFrom')
last_updated = sgqlc.types.Field(Date, graphql_name='lastUpdated')
class Exposure(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
rolled_over_from = sgqlc.types.Field(Date, graphql_name='rolledOverFrom')
factors = sgqlc.types.Field(sgqlc.types.list_of('ExposureFactor'), graphql_name='factors', args=sgqlc.types.ArgDict((
('category', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='category', default=None)),
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
))
)
class ExposureContributor(sgqlc.types.Type):
__schema__ = schema
asset_class = sgqlc.types.Field(String, graphql_name='assetClass')
asset_subclass = sgqlc.types.Field(String, graphql_name='assetSubclass')
id = sgqlc.types.Field(String, graphql_name='id')
country = sgqlc.types.Field(String, graphql_name='country')
currency = sgqlc.types.Field(String, graphql_name='currency')
sector = sgqlc.types.Field(String, graphql_name='sector')
classification = sgqlc.types.Field('SecurityClassification', graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('tier', sgqlc.types.Arg(String, graphql_name='tier', default=None)),
))
)
description = sgqlc.types.Field(String, graphql_name='description')
percent_equity = sgqlc.types.Field(Float, graphql_name='percentEquity')
sedol = sgqlc.types.Field(Sedol, graphql_name='sedol')
isin = sgqlc.types.Field(Isin, graphql_name='isin')
cusip = sgqlc.types.Field(Cusip, graphql_name='cusip')
factors = sgqlc.types.Field(sgqlc.types.list_of('ExposureContributorFactor'), graphql_name='factors', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
('category', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='category', default=None)),
))
)
class ExposureContributorDate(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
rolled_over_from = sgqlc.types.Field(Date, graphql_name='rolledOverFrom')
contributors = sgqlc.types.Field(sgqlc.types.list_of(ExposureContributor), graphql_name='contributors')
class ExposureContributorFactor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
z_score = sgqlc.types.Field(Float, graphql_name='zScore')
security_exposure = sgqlc.types.Field(Float, graphql_name='securityExposure')
contribution = sgqlc.types.Field(Float, graphql_name='contribution')
gross_contribution = sgqlc.types.Field(Float, graphql_name='grossContribution')
net_contribution = sgqlc.types.Field(Float, graphql_name='netContribution')
class ExposureContributorGroup(sgqlc.types.Type):
__schema__ = schema
name = sgqlc.types.Field(String, graphql_name='name')
id = sgqlc.types.Field(String, graphql_name='id')
total_percent_equity = sgqlc.types.Field(Float, graphql_name='totalPercentEquity')
contributors = sgqlc.types.Field(sgqlc.types.list_of(ExposureContributor), graphql_name='contributors')
factors = sgqlc.types.Field(sgqlc.types.list_of('ExposureContributorGroupFactor'), graphql_name='factors', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
('category', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='category', default=None)),
))
)
class ExposureContributorGroupFactor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
net_exposure = sgqlc.types.Field(Float, graphql_name='netExposure')
gross_contribution = sgqlc.types.Field(Float, graphql_name='grossContribution')
net_contribution = sgqlc.types.Field(Float, graphql_name='netContribution')
class ExposureFactor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
net = sgqlc.types.Field(Float, graphql_name='net')
long = sgqlc.types.Field(Float, graphql_name='long')
short = sgqlc.types.Field(Float, graphql_name='short')
class Factor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
performance = sgqlc.types.Field(sgqlc.types.list_of('FactorPerformance'), graphql_name='performance', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('aggregation', sgqlc.types.Arg(Aggregation, graphql_name='aggregation', default=None)),
('interval', sgqlc.types.Arg(Interval, graphql_name='interval', default=None)),
))
)
covariance = sgqlc.types.Field('FactorCovariance', graphql_name='covariance', args=sgqlc.types.ArgDict((
('on', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='on', default=None)),
))
)
class FactorCovariance(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
factors = sgqlc.types.Field(sgqlc.types.list_of('FactorCovarianceValue'), graphql_name='factors', args=sgqlc.types.ArgDict((
('category', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='category', default=None)),
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
))
)
class FactorCovarianceValue(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
annualized_percent_squared = sgqlc.types.Field(Float, graphql_name='annualizedPercentSquared')
class FactorPerformance(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
percent_price_change1_day = sgqlc.types.Field(Float, graphql_name='percentPriceChange1Day')
percent_price_change_cumulative = sgqlc.types.Field(Float, graphql_name='percentPriceChangeCumulative')
normalized_return = sgqlc.types.Field(Float, graphql_name='normalizedReturn')
class Forecast(sgqlc.types.Type):
__schema__ = schema
horizon = sgqlc.types.Field(Int, graphql_name='horizon')
total = sgqlc.types.Field(Float, graphql_name='total')
equities = sgqlc.types.Field(sgqlc.types.list_of('ForecastEquity'), graphql_name='equities')
swaps = sgqlc.types.Field(sgqlc.types.list_of('ForecastSwap'), graphql_name='swaps')
class ForecastDeleteResult(sgqlc.types.Type):
__schema__ = schema
count = sgqlc.types.Field(Int, graphql_name='count')
class ForecastDetails(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(ShortId, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
last_updated = sgqlc.types.Field(DateTime, graphql_name='lastUpdated')
dates = sgqlc.types.Field(sgqlc.types.list_of('ForecastDetailsDate'), graphql_name='dates', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
))
)
security_dates = sgqlc.types.Field(sgqlc.types.list_of('ForecastSecurity'), graphql_name='securityDates', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(UniversalIdInput), graphql_name='id', default=None)),
('id_date', sgqlc.types.Arg(Date, graphql_name='idDate', default=None)),
))
)
class ForecastDetailsDate(sgqlc.types.Type):
__schema__ = schema
as_of = sgqlc.types.Field(Date, graphql_name='asOf')
securities = sgqlc.types.Field(sgqlc.types.list_of('ForecastSecurity'), graphql_name='securities')
class ForecastEquity(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field('PositionSetEquityId', graphql_name='id')
expected_percent_return = sgqlc.types.Field(Float, graphql_name='expectedPercentReturn')
class ForecastExpectedReturn(sgqlc.types.Type):
__schema__ = schema
return_ = sgqlc.types.Field(Float, graphql_name='return')
horizon = sgqlc.types.Field(Int, graphql_name='horizon')
class ForecastMeta(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(ShortId, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
last_updated = sgqlc.types.Field(DateTime, graphql_name='lastUpdated')
class ForecastSecurity(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field('UniversalId', graphql_name='id')
label = sgqlc.types.Field(String, graphql_name='label')
as_of = sgqlc.types.Field(Date, graphql_name='asOf')
expected_return = sgqlc.types.Field(ForecastExpectedReturn, graphql_name='expectedReturn')
class ForecastSwap(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
expected_percent_return = sgqlc.types.Field(Float, graphql_name='expectedPercentReturn')
class ForecastTypes(sgqlc.types.Type):
__schema__ = schema
implied_returns = sgqlc.types.Field(Forecast, graphql_name='impliedReturns', args=sgqlc.types.ArgDict((
('risk_factors', sgqlc.types.Arg(sgqlc.types.non_null(ImpliedReturnsType), graphql_name='riskFactors', default=None)),
))
)
custom = sgqlc.types.Field(Forecast, graphql_name='custom', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(ShortId), graphql_name='id', default=None)),
))
)
class ForecastUpdateResult(sgqlc.types.Type):
__schema__ = schema
success_count = sgqlc.types.Field(Int, graphql_name='successCount')
unmapped_securities = sgqlc.types.Field(sgqlc.types.list_of(ForecastSecurity), graphql_name='unmappedSecurities')
class GroupedBetaContributorDate(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
rolled_over_from = sgqlc.types.Field(Date, graphql_name='rolledOverFrom')
grouped_contributors = sgqlc.types.Field(sgqlc.types.list_of(BetaContributorGroup), graphql_name='groupedContributors')
class GroupedExposureContributorDate(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
rolled_over_from = sgqlc.types.Field(Date, graphql_name='rolledOverFrom')
grouped_contributors = sgqlc.types.Field(sgqlc.types.list_of(ExposureContributorGroup), graphql_name='groupedContributors')
class MarketImpact(sgqlc.types.Type):
__schema__ = schema
cost = sgqlc.types.Field(Float, graphql_name='cost')
contributors = sgqlc.types.Field(sgqlc.types.list_of('MarketImpactContributor'), graphql_name='contributors')
class MarketImpactContributor(sgqlc.types.Type):
__schema__ = schema
asset_class = sgqlc.types.Field(String, graphql_name='assetClass')
asset_subclass = sgqlc.types.Field(String, graphql_name='assetSubclass')
id = sgqlc.types.Field(String, graphql_name='id')
country = sgqlc.types.Field(String, graphql_name='country')
currency = sgqlc.types.Field(String, graphql_name='currency')
industry_classification = sgqlc.types.Field('SecurityDescriptor', graphql_name='industryClassification', args=sgqlc.types.ArgDict((
('type', sgqlc.types.Arg(sgqlc.types.non_null(IndustryClassificationType), graphql_name='type', default=None)),
))
)
classification = sgqlc.types.Field('SecurityClassification', graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('tier', sgqlc.types.Arg(String, graphql_name='tier', default=None)),
))
)
cost = sgqlc.types.Field(Float, graphql_name='cost')
class MaxDrawdown(sgqlc.types.Type):
__schema__ = schema
drawdown = sgqlc.types.Field(Float, graphql_name='drawdown')
from_ = sgqlc.types.Field(Date, graphql_name='from')
to = sgqlc.types.Field(Date, graphql_name='to')
days_between = sgqlc.types.Field(Int, graphql_name='daysBetween')
class Meta(sgqlc.types.Type):
__schema__ = schema
points = sgqlc.types.Field('MetaPoints', graphql_name='points')
optimization = sgqlc.types.Field('OptimizationMeta', graphql_name='optimization')
class MetaPoints(sgqlc.types.Type):
__schema__ = schema
cost = sgqlc.types.Field(Int, graphql_name='cost')
points_left = sgqlc.types.Field(Int, graphql_name='pointsLeft')
seconds_to_points_reset = sgqlc.types.Field(Int, graphql_name='secondsToPointsReset')
class MissingEquity(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field('PositionSetEquityId', graphql_name='id')
percent_gmv = sgqlc.types.Field(Float, graphql_name='percentGmv')
class MissingFixedIncome(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field('PositionSetFixedIncomeId', graphql_name='id')
percent_gmv = sgqlc.types.Field(Float, graphql_name='percentGmv')
class MissingOtherAsset(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
percent_gmv = sgqlc.types.Field(Float, graphql_name='percentGmv')
class Model(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
availability = sgqlc.types.Field('ModelAvailability', graphql_name='availability')
benchmarks = sgqlc.types.Field(sgqlc.types.list_of(BenchmarkMetadata), graphql_name='benchmarks')
factors = sgqlc.types.Field(sgqlc.types.list_of(Factor), graphql_name='factors', args=sgqlc.types.ArgDict((
('category', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='category', default=None)),
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
))
)
categories = sgqlc.types.Field(sgqlc.types.list_of(Category), graphql_name='categories', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
))
)
security = sgqlc.types.Field('Security', graphql_name='security', args=sgqlc.types.ArgDict((
('ticker', sgqlc.types.Arg(String, graphql_name='ticker', default=None)),
('exchange', sgqlc.types.Arg(String, graphql_name='exchange', default=None)),
('mic', sgqlc.types.Arg(String, graphql_name='mic', default=None)),
('sedol', sgqlc.types.Arg(Sedol, graphql_name='sedol', default=None)),
('isin', sgqlc.types.Arg(Isin, graphql_name='isin', default=None)),
('cusip', sgqlc.types.Arg(Cusip, graphql_name='cusip', default=None)),
('model_provider_id', sgqlc.types.Arg(String, graphql_name='modelProviderId', default=None)),
))
)
portfolio = sgqlc.types.Field('ModelPortfolio', graphql_name='portfolio', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
simulation = sgqlc.types.Field('ModelSimulation', graphql_name='simulation', args=sgqlc.types.ArgDict((
('position_set', sgqlc.types.Arg(sgqlc.types.non_null(PositionSetInput), graphql_name='positionSet', default=None)),
('base', sgqlc.types.Arg(sgqlc.types.list_of(PositionSetInput), graphql_name='base', default=None)),
('from_', sgqlc.types.Arg(Date, graphql_name='from', default=None)),
('to', sgqlc.types.Arg(Date, graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
))
)
optimization = sgqlc.types.Field('ModelOptimization', graphql_name='optimization', args=sgqlc.types.ArgDict((
('position_set', sgqlc.types.Arg(PositionSetInput, graphql_name='positionSet', default=None)),
('base', sgqlc.types.Arg(sgqlc.types.list_of(PositionSetInput), graphql_name='base', default=None)),
('on', sgqlc.types.Arg(sgqlc.types.list_of(Date), graphql_name='on', default=None)),
('objective', sgqlc.types.Arg(sgqlc.types.non_null(sgqlc.types.list_of(OptimizationObjective)), graphql_name='objective', default=None)),
('constraints', sgqlc.types.Arg(sgqlc.types.non_null(OptimizationConstraints), graphql_name='constraints', default=None)),
('securities', sgqlc.types.Arg(OptimizationSecuritiesInput, graphql_name='securities', default=None)),
('constants', sgqlc.types.Arg(OptimizationConstantsInput, graphql_name='constants', default=None)),
('options', sgqlc.types.Arg(OptimizationOptionsInput, graphql_name='options', default=None)),
('forecast', sgqlc.types.Arg(ForecastInput, graphql_name='forecast', default=None)),
))
)
security_search = sgqlc.types.Field('SecuritySearchResult', graphql_name='securitySearch', args=sgqlc.types.ArgDict((
('filter', sgqlc.types.Arg(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(SecuritySearchFilter))), graphql_name='filter', default=None)),
('sort', sgqlc.types.Arg(sgqlc.types.list_of(SecuritySearchSort), graphql_name='sort', default=None)),
('on', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='on', default=None)),
('take', sgqlc.types.Arg(Int, graphql_name='take', default=None)),
('skip', sgqlc.types.Arg(Int, graphql_name='skip', default=None)),
))
)
class ModelAvailability(sgqlc.types.Type):
__schema__ = schema
current_date = sgqlc.types.Field(Date, graphql_name='currentDate')
factors_start_date = sgqlc.types.Field(Date, graphql_name='factorsStartDate')
securities_start_date = sgqlc.types.Field(Date, graphql_name='securitiesStartDate')
dates = sgqlc.types.Field(sgqlc.types.list_of(Date), graphql_name='dates', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
))
)
class ModelMetadata(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
short_name = sgqlc.types.Field(String, graphql_name='shortName')
availability = sgqlc.types.Field(ModelAvailability, graphql_name='availability')
class ModelOptimization(sgqlc.types.Type):
__schema__ = schema
positions_delta = sgqlc.types.Field(sgqlc.types.list_of('OptimizationPositionsDelta'), graphql_name='positionsDelta')
risk = sgqlc.types.Field(sgqlc.types.list_of('Risk'), graphql_name='risk')
risk_contributors = sgqlc.types.Field(sgqlc.types.list_of('RiskContributor'), graphql_name='riskContributors', args=sgqlc.types.ArgDict((
('on', sgqlc.types.Arg(Date, graphql_name='on', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
))
)
exposure = sgqlc.types.Field(sgqlc.types.list_of(Exposure), graphql_name='exposure', args=sgqlc.types.ArgDict((
('content_set_id', sgqlc.types.Arg(String, graphql_name='contentSetId', default=None)),
))
)
exposure_contributors = sgqlc.types.Field(sgqlc.types.list_of(ExposureContributorDate), graphql_name='exposureContributors', args=sgqlc.types.ArgDict((
('content_set_id', sgqlc.types.Arg(String, graphql_name='contentSetId', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('active', sgqlc.types.Arg(ActiveContributorType, graphql_name='active', default=None)),
))
)
beta = sgqlc.types.Field(sgqlc.types.list_of(Beta), graphql_name='beta')
beta_contributors = sgqlc.types.Field(sgqlc.types.list_of(BetaContributorDate), graphql_name='betaContributors', args=sgqlc.types.ArgDict((
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('active', sgqlc.types.Arg(ActiveContributorType, graphql_name='active', default=None)),
))
)
performance = sgqlc.types.Field(sgqlc.types.list_of('Performance'), graphql_name='performance', args=sgqlc.types.ArgDict((
('aggregation', sgqlc.types.Arg(Aggregation, graphql_name='aggregation', default=None)),
('to', sgqlc.types.Arg(Date, graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
))
)
positions = sgqlc.types.Field('OptimizedPositionSet', graphql_name='positions', args=sgqlc.types.ArgDict((
('as_of', sgqlc.types.Arg(Date, graphql_name='asOf', default=None)),
))
)
period_performance = sgqlc.types.Field('PeriodPerformance', graphql_name='periodPerformance', args=sgqlc.types.ArgDict((
('to', sgqlc.types.Arg(Date, graphql_name='to', default=None)),
('risk_free_rate', sgqlc.types.Arg(Float, graphql_name='riskFreeRate', default=None)),
))
)
correlation = sgqlc.types.Field(Float, graphql_name='correlation', args=sgqlc.types.ArgDict((
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
))
)
composition = sgqlc.types.Field(sgqlc.types.list_of(Composition), graphql_name='composition', args=sgqlc.types.ArgDict((
('scale_format', sgqlc.types.Arg(ScaleFormat, graphql_name='scaleFormat', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
))
)
market_impact = sgqlc.types.Field(MarketImpact, graphql_name='marketImpact', args=sgqlc.types.ArgDict((
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('scale_format', sgqlc.types.Arg(ScaleFormat, graphql_name='scaleFormat', default=None)),
))
)
turnover = sgqlc.types.Field('Turnover', graphql_name='turnover', args=sgqlc.types.ArgDict((
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('scale_format', sgqlc.types.Arg(ScaleFormat, graphql_name='scaleFormat', default=None)),
))
)
class ModelPortfolio(sgqlc.types.Type):
__schema__ = schema
coverage = sgqlc.types.Field(sgqlc.types.list_of(Coverage), graphql_name='coverage', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
))
)
performance = sgqlc.types.Field(sgqlc.types.list_of('Performance'), graphql_name='performance', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('aggregation', sgqlc.types.Arg(Aggregation, graphql_name='aggregation', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
))
)
performance_contributors = sgqlc.types.Field(sgqlc.types.list_of('PerformanceContributor'), graphql_name='performanceContributors', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
))
)
grouped_performance_contributors = sgqlc.types.Field(sgqlc.types.list_of('PerformanceContributorGroup'), graphql_name='groupedPerformanceContributors', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('group_by', sgqlc.types.Arg(sgqlc.types.non_null(ContributorGroupType), graphql_name='groupBy', default=None)),
('classification_id', sgqlc.types.Arg(String, graphql_name='classificationId', default=None)),
('classification_tier', sgqlc.types.Arg(String, graphql_name='classificationTier', default=None)),
))
)
risk = sgqlc.types.Field(sgqlc.types.list_of('Risk'), graphql_name='risk', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
))
)
risk_contributors = sgqlc.types.Field(sgqlc.types.list_of('RiskContributor'), graphql_name='riskContributors', args=sgqlc.types.ArgDict((
('on', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='on', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
))
)
grouped_risk_contributors = sgqlc.types.Field(sgqlc.types.list_of('RiskContributorGroup'), graphql_name='groupedRiskContributors', args=sgqlc.types.ArgDict((
('on', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='on', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('group_by', sgqlc.types.Arg(sgqlc.types.non_null(ContributorGroupType), graphql_name='groupBy', default=None)),
('classification_id', sgqlc.types.Arg(String, graphql_name='classificationId', default=None)),
('classification_tier', sgqlc.types.Arg(String, graphql_name='classificationTier', default=None)),
))
)
exposure = sgqlc.types.Field(sgqlc.types.list_of(Exposure), graphql_name='exposure', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
('content_set_id', sgqlc.types.Arg(String, graphql_name='contentSetId', default=None)),
))
)
exposure_contributors = sgqlc.types.Field(sgqlc.types.list_of(ExposureContributorDate), graphql_name='exposureContributors', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
('content_set_id', sgqlc.types.Arg(String, graphql_name='contentSetId', default=None)),
))
)
grouped_exposure_contributors = sgqlc.types.Field(sgqlc.types.list_of(GroupedExposureContributorDate), graphql_name='groupedExposureContributors', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
('group_by', sgqlc.types.Arg(sgqlc.types.non_null(ContributorGroupType), graphql_name='groupBy', default=None)),
('content_set_id', sgqlc.types.Arg(String, graphql_name='contentSetId', default=None)),
('classification_id', sgqlc.types.Arg(String, graphql_name='classificationId', default=None)),
('classification_tier', sgqlc.types.Arg(String, graphql_name='classificationTier', default=None)),
))
)
beta = sgqlc.types.Field(sgqlc.types.list_of(Beta), graphql_name='beta', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
))
)
beta_contributors = sgqlc.types.Field(sgqlc.types.list_of(BetaContributorDate), graphql_name='betaContributors', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
))
)
grouped_beta_contributors = sgqlc.types.Field(sgqlc.types.list_of(GroupedBetaContributorDate), graphql_name='groupedBetaContributors', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('group_by', sgqlc.types.Arg(sgqlc.types.non_null(ContributorGroupType), graphql_name='groupBy', default=None)),
('classification_id', sgqlc.types.Arg(String, graphql_name='classificationId', default=None)),
('classification_tier', sgqlc.types.Arg(String, graphql_name='classificationTier', default=None)),
))
)
forecast = sgqlc.types.Field(ForecastTypes, graphql_name='forecast', args=sgqlc.types.ArgDict((
('on', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='on', default=None)),
('horizon', sgqlc.types.Arg(Int, graphql_name='horizon', default=None)),
))
)
period_performance = sgqlc.types.Field('PeriodPerformance', graphql_name='periodPerformance', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('risk_free_rate', sgqlc.types.Arg(Float, graphql_name='riskFreeRate', default=None)),
))
)
correlation = sgqlc.types.Field(Float, graphql_name='correlation', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('with_', sgqlc.types.Arg(sgqlc.types.non_null(PositionSetInput), graphql_name='with', default=None)),
))
)
composition = sgqlc.types.Field(sgqlc.types.list_of(Composition), graphql_name='composition', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
('scale_format', sgqlc.types.Arg(ScaleFormat, graphql_name='scaleFormat', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
))
)
market_impact = sgqlc.types.Field(MarketImpact, graphql_name='marketImpact', args=sgqlc.types.ArgDict((
('position_set_delta', sgqlc.types.Arg(sgqlc.types.non_null(PositionSetDateInput), graphql_name='positionSetDelta', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('scale_format', sgqlc.types.Arg(ScaleFormat, graphql_name='scaleFormat', default=None)),
))
)
class ModelSimulation(sgqlc.types.Type):
__schema__ = schema
coverage = sgqlc.types.Field(sgqlc.types.list_of(Coverage), graphql_name='coverage')
performance = sgqlc.types.Field(sgqlc.types.list_of('Performance'), graphql_name='performance', args=sgqlc.types.ArgDict((
('aggregation', sgqlc.types.Arg(Aggregation, graphql_name='aggregation', default=None)),
))
)
performance_contributors = sgqlc.types.Field(sgqlc.types.list_of('PerformanceContributor'), graphql_name='performanceContributors', args=sgqlc.types.ArgDict((
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
))
)
grouped_performance_contributors = sgqlc.types.Field(sgqlc.types.list_of('PerformanceContributorGroup'), graphql_name='groupedPerformanceContributors', args=sgqlc.types.ArgDict((
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('group_by', sgqlc.types.Arg(sgqlc.types.non_null(ContributorGroupType), graphql_name='groupBy', default=None)),
('classification_id', sgqlc.types.Arg(String, graphql_name='classificationId', default=None)),
('classification_tier', sgqlc.types.Arg(String, graphql_name='classificationTier', default=None)),
))
)
risk = sgqlc.types.Field(sgqlc.types.list_of('Risk'), graphql_name='risk')
risk_contributors = sgqlc.types.Field(sgqlc.types.list_of('RiskContributor'), graphql_name='riskContributors', args=sgqlc.types.ArgDict((
('on', sgqlc.types.Arg(Date, graphql_name='on', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
))
)
grouped_risk_contributors = sgqlc.types.Field(sgqlc.types.list_of('RiskContributorGroup'), graphql_name='groupedRiskContributors', args=sgqlc.types.ArgDict((
('on', sgqlc.types.Arg(Date, graphql_name='on', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('group_by', sgqlc.types.Arg(sgqlc.types.non_null(ContributorGroupType), graphql_name='groupBy', default=None)),
('classification_id', sgqlc.types.Arg(String, graphql_name='classificationId', default=None)),
('classification_tier', sgqlc.types.Arg(String, graphql_name='classificationTier', default=None)),
))
)
exposure = sgqlc.types.Field(sgqlc.types.list_of(Exposure), graphql_name='exposure', args=sgqlc.types.ArgDict((
('content_set_id', sgqlc.types.Arg(String, graphql_name='contentSetId', default=None)),
))
)
exposure_contributors = sgqlc.types.Field(sgqlc.types.list_of(ExposureContributorDate), graphql_name='exposureContributors', args=sgqlc.types.ArgDict((
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('content_set_id', sgqlc.types.Arg(String, graphql_name='contentSetId', default=None)),
('active', sgqlc.types.Arg(ActiveContributorType, graphql_name='active', default=None)),
))
)
grouped_exposure_contributors = sgqlc.types.Field(sgqlc.types.list_of(GroupedExposureContributorDate), graphql_name='groupedExposureContributors', args=sgqlc.types.ArgDict((
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('group_by', sgqlc.types.Arg(sgqlc.types.non_null(ContributorGroupType), graphql_name='groupBy', default=None)),
('content_set_id', sgqlc.types.Arg(String, graphql_name='contentSetId', default=None)),
('active', sgqlc.types.Arg(ActiveContributorType, graphql_name='active', default=None)),
('classification_id', sgqlc.types.Arg(String, graphql_name='classificationId', default=None)),
('classification_tier', sgqlc.types.Arg(String, graphql_name='classificationTier', default=None)),
))
)
beta = sgqlc.types.Field(sgqlc.types.list_of(Beta), graphql_name='beta')
beta_contributors = sgqlc.types.Field(sgqlc.types.list_of(BetaContributorDate), graphql_name='betaContributors', args=sgqlc.types.ArgDict((
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('active', sgqlc.types.Arg(ActiveContributorType, graphql_name='active', default=None)),
))
)
grouped_beta_contributors = sgqlc.types.Field(sgqlc.types.list_of(GroupedBetaContributorDate), graphql_name='groupedBetaContributors', args=sgqlc.types.ArgDict((
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('group_by', sgqlc.types.Arg(sgqlc.types.non_null(ContributorGroupType), graphql_name='groupBy', default=None)),
('active', sgqlc.types.Arg(ActiveContributorType, graphql_name='active', default=None)),
('classification_id', sgqlc.types.Arg(String, graphql_name='classificationId', default=None)),
('classification_tier', sgqlc.types.Arg(String, graphql_name='classificationTier', default=None)),
))
)
forecast = sgqlc.types.Field(ForecastTypes, graphql_name='forecast', args=sgqlc.types.ArgDict((
('on', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='on', default=None)),
('horizon', sgqlc.types.Arg(Int, graphql_name='horizon', default=None)),
))
)
period_performance = sgqlc.types.Field('PeriodPerformance', graphql_name='periodPerformance', args=sgqlc.types.ArgDict((
('risk_free_rate', sgqlc.types.Arg(Float, graphql_name='riskFreeRate', default=None)),
))
)
correlation = sgqlc.types.Field(Float, graphql_name='correlation', args=sgqlc.types.ArgDict((
('with_', sgqlc.types.Arg(sgqlc.types.non_null(PositionSetInput), graphql_name='with', default=None)),
))
)
composition = sgqlc.types.Field(sgqlc.types.list_of(Composition), graphql_name='composition', args=sgqlc.types.ArgDict((
('scale_format', sgqlc.types.Arg(ScaleFormat, graphql_name='scaleFormat', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
))
)
market_impact = sgqlc.types.Field(MarketImpact, graphql_name='marketImpact', args=sgqlc.types.ArgDict((
('position_set_delta', sgqlc.types.Arg(sgqlc.types.non_null(PositionSetDateInput), graphql_name='positionSetDelta', default=None)),
('equity_id_format', sgqlc.types.Arg(EquityIdFormat, graphql_name='equityIdFormat', default=None)),
('scale_format', sgqlc.types.Arg(ScaleFormat, graphql_name='scaleFormat', default=None)),
))
)
class Mutation(sgqlc.types.Type):
__schema__ = schema
upload_content_set_date = sgqlc.types.Field('UploadContentSetResult', graphql_name='uploadContentSetDate', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('data', sgqlc.types.Arg(sgqlc.types.non_null(ContentSetDateInput), graphql_name='data', default=None)),
))
)
delete_content_set_dates = sgqlc.types.Field(DeleteResult, graphql_name='deleteContentSetDates', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('from_', sgqlc.types.Arg(Date, graphql_name='from', default=None)),
('to', sgqlc.types.Arg(Date, graphql_name='to', default=None)),
('all_dates', sgqlc.types.Arg(Boolean, graphql_name='allDates', default=None)),
))
)
upload_daily_pnl = sgqlc.types.Field('UploadDailyPnlResult', graphql_name='uploadDailyPnl', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='portfolioId', default=None)),
('date', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='date', default=None)),
('amount', sgqlc.types.Arg(sgqlc.types.non_null(Float), graphql_name='amount', default=None)),
))
)
delete_daily_pnl = sgqlc.types.Field(DeleteResult, graphql_name='deleteDailyPnl', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='portfolioId', default=None)),
('dates', sgqlc.types.Arg(sgqlc.types.list_of(Date), graphql_name='dates', default=None)),
('all_dates', sgqlc.types.Arg(Boolean, graphql_name='allDates', default=None)),
))
)
create_experiment = sgqlc.types.Field(Experiment, graphql_name='createExperiment', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(String, graphql_name='portfolioId', default=None)),
('resource_id', sgqlc.types.Arg(String, graphql_name='resourceId', default=None)),
('resource_type', sgqlc.types.Arg(ResourceType, graphql_name='resourceType', default=None)),
('experiment', sgqlc.types.Arg(sgqlc.types.non_null(NewExperiment), graphql_name='experiment', default=None)),
))
)
update_experiment = sgqlc.types.Field(Experiment, graphql_name='updateExperiment', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(String, graphql_name='portfolioId', default=None)),
('experiment_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='experimentId', default=None)),
('experiment', sgqlc.types.Arg(sgqlc.types.non_null(UpdateExperiment), graphql_name='experiment', default=None)),
))
)
delete_experiment = sgqlc.types.Field('SingleDeleteResult', graphql_name='deleteExperiment', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(String, graphql_name='portfolioId', default=None)),
('experiment_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='experimentId', default=None)),
))
)
upload_experiment_date = sgqlc.types.Field('UploadPositionSetResult', graphql_name='uploadExperimentDate', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(String, graphql_name='portfolioId', default=None)),
('experiment_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='experimentId', default=None)),
('data', sgqlc.types.Arg(sgqlc.types.non_null(PositionSetDateInput), graphql_name='data', default=None)),
))
)
delete_experiment_dates = sgqlc.types.Field(DeleteResult, graphql_name='deleteExperimentDates', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(String, graphql_name='portfolioId', default=None)),
('experiment_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='experimentId', default=None)),
('from_', sgqlc.types.Arg(Date, graphql_name='from', default=None)),
('to', sgqlc.types.Arg(Date, graphql_name='to', default=None)),
('all_dates', sgqlc.types.Arg(Boolean, graphql_name='allDates', default=None)),
))
)
create_forecast = sgqlc.types.Field(ForecastMeta, graphql_name='createForecast', args=sgqlc.types.ArgDict((
('forecast', sgqlc.types.Arg(sgqlc.types.non_null(ForecastCreate), graphql_name='forecast', default=None)),
))
)
update_forecast = sgqlc.types.Field(ForecastMeta, graphql_name='updateForecast', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(ShortId), graphql_name='id', default=None)),
('update', sgqlc.types.Arg(sgqlc.types.non_null(ForecastUpdate), graphql_name='update', default=None)),
))
)
delete_forecast = sgqlc.types.Field('SingleDeleteResult', graphql_name='deleteForecast', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(ShortId), graphql_name='id', default=None)),
))
)
upload_forecast_securities = sgqlc.types.Field(ForecastUpdateResult, graphql_name='uploadForecastSecurities', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(ShortId), graphql_name='id', default=None)),
('values', sgqlc.types.Arg(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(ForecastSecurityInput))), graphql_name='values', default=None)),
))
)
delete_forecast_securities = sgqlc.types.Field(ForecastDeleteResult, graphql_name='deleteForecastSecurities', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(ShortId), graphql_name='id', default=None)),
('values', sgqlc.types.Arg(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(DeleteForecastSecurityInput))), graphql_name='values', default=None)),
))
)
upload_pnl_date = sgqlc.types.Field('UploadPnlResult', graphql_name='uploadPnlDate', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='portfolioId', default=None)),
('data', sgqlc.types.Arg(sgqlc.types.non_null(PnlDateInput), graphql_name='data', default=None)),
))
)
delete_pnl_dates = sgqlc.types.Field(DeleteResult, graphql_name='deletePnlDates', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='portfolioId', default=None)),
('from_', sgqlc.types.Arg(Date, graphql_name='from', default=None)),
('to', sgqlc.types.Arg(Date, graphql_name='to', default=None)),
('all_dates', sgqlc.types.Arg(Boolean, graphql_name='allDates', default=None)),
))
)
update_portfolio = sgqlc.types.Field('Portfolio', graphql_name='updatePortfolio', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('portfolio', sgqlc.types.Arg(sgqlc.types.non_null(PortfolioUpdate), graphql_name='portfolio', default=None)),
))
)
create_portfolio = sgqlc.types.Field('Portfolio', graphql_name='createPortfolio', args=sgqlc.types.ArgDict((
('portfolio', sgqlc.types.Arg(sgqlc.types.non_null(NewPortfolio), graphql_name='portfolio', default=None)),
))
)
delete_portfolio = sgqlc.types.Field('SingleDeleteResult', graphql_name='deletePortfolio', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
upload_position_set_date = sgqlc.types.Field('UploadPositionSetResult', graphql_name='uploadPositionSetDate', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='portfolioId', default=None)),
('data', sgqlc.types.Arg(sgqlc.types.non_null(PositionSetDateInput), graphql_name='data', default=None)),
))
)
delete_position_set_dates = sgqlc.types.Field(DeleteResult, graphql_name='deletePositionSetDates', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='portfolioId', default=None)),
('dates', sgqlc.types.Arg(sgqlc.types.list_of(Date), graphql_name='dates', default=None)),
('from_', sgqlc.types.Arg(Date, graphql_name='from', default=None)),
('to', sgqlc.types.Arg(Date, graphql_name='to', default=None)),
('all_dates', sgqlc.types.Arg(Boolean, graphql_name='allDates', default=None)),
('delete_related_pnl', sgqlc.types.Arg(Boolean, graphql_name='deleteRelatedPnl', default=None)),
))
)
create_research_topic = sgqlc.types.Field('ResearchTopic', graphql_name='createResearchTopic', args=sgqlc.types.ArgDict((
('research_topic', sgqlc.types.Arg(sgqlc.types.non_null(NewResearchTopic), graphql_name='researchTopic', default=None)),
))
)
update_research_topic = sgqlc.types.Field('ResearchTopic', graphql_name='updateResearchTopic', args=sgqlc.types.ArgDict((
('research_topic_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='researchTopicId', default=None)),
('research_topic', sgqlc.types.Arg(sgqlc.types.non_null(UpdateResearchTopic), graphql_name='researchTopic', default=None)),
))
)
delete_research_topic = sgqlc.types.Field('SingleDeleteResult', graphql_name='deleteResearchTopic', args=sgqlc.types.ArgDict((
('research_topic_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='researchTopicId', default=None)),
))
)
create_swap = sgqlc.types.Field('Swap', graphql_name='createSwap', args=sgqlc.types.ArgDict((
('swap', sgqlc.types.Arg(sgqlc.types.non_null(NewSwap), graphql_name='swap', default=None)),
))
)
update_swap = sgqlc.types.Field('Swap', graphql_name='updateSwap', args=sgqlc.types.ArgDict((
('ticker', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='ticker', default=None)),
('swap', sgqlc.types.Arg(sgqlc.types.non_null(SwapUpdate), graphql_name='swap', default=None)),
))
)
delete_swap = sgqlc.types.Field('SingleDeleteResult', graphql_name='deleteSwap', args=sgqlc.types.ArgDict((
('ticker', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='ticker', default=None)),
))
)
upload_swap_date = sgqlc.types.Field('UploadPositionSetResult', graphql_name='uploadSwapDate', args=sgqlc.types.ArgDict((
('ticker', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='ticker', default=None)),
('data', sgqlc.types.Arg(sgqlc.types.non_null(PositionSetDateInput), graphql_name='data', default=None)),
))
)
delete_swap_dates = sgqlc.types.Field(DeleteResult, graphql_name='deleteSwapDates', args=sgqlc.types.ArgDict((
('ticker', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='ticker', default=None)),
('from_', sgqlc.types.Arg(Date, graphql_name='from', default=None)),
('to', sgqlc.types.Arg(Date, graphql_name='to', default=None)),
('all_dates', sgqlc.types.Arg(Boolean, graphql_name='allDates', default=None)),
))
)
create_watchlist = sgqlc.types.Field('WatchlistMeta', graphql_name='createWatchlist', args=sgqlc.types.ArgDict((
('name', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='name', default=None)),
('description', sgqlc.types.Arg(String, graphql_name='description', default=None)),
('alias', sgqlc.types.Arg(String, graphql_name='alias', default=None)),
))
)
delete_watchlist = sgqlc.types.Field('SingleDeleteResult', graphql_name='deleteWatchlist', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
update_watchlist = sgqlc.types.Field('WatchlistMeta', graphql_name='updateWatchlist', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('update', sgqlc.types.Arg(sgqlc.types.non_null(WatchlistUpdate), graphql_name='update', default=None)),
))
)
add_watchlist_securities = sgqlc.types.Field('Watchlist', graphql_name='addWatchlistSecurities', args=sgqlc.types.ArgDict((
('watchlist_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='watchlistId', default=None)),
('securities', sgqlc.types.Arg(sgqlc.types.non_null(WatchlistSecuritiesInput), graphql_name='securities', default=None)),
('as_of', sgqlc.types.Arg(Date, graphql_name='asOf', default=None)),
))
)
remove_watchlist_securities = sgqlc.types.Field('Watchlist', graphql_name='removeWatchlistSecurities', args=sgqlc.types.ArgDict((
('watchlist_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='watchlistId', default=None)),
('securities', sgqlc.types.Arg(sgqlc.types.non_null(WatchlistSecuritiesInput), graphql_name='securities', default=None)),
))
)
clear_watchlist_securities = sgqlc.types.Field('Watchlist', graphql_name='clearWatchlistSecurities', args=sgqlc.types.ArgDict((
('watchlist_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='watchlistId', default=None)),
))
)
class OptimizationMeta(sgqlc.types.Type):
__schema__ = schema
burst_enabled = sgqlc.types.Field(Boolean, graphql_name='burstEnabled')
class OptimizationPositionsDelta(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
equities = sgqlc.types.Field(sgqlc.types.list_of('PositionSetEquity'), graphql_name='equities')
swaps = sgqlc.types.Field(sgqlc.types.list_of('PositionSetOtherAsset'), graphql_name='swaps')
class OptimizedPositionSet(sgqlc.types.Type):
__schema__ = schema
dates = sgqlc.types.Field(sgqlc.types.list_of('PositionSetDate'), graphql_name='dates')
class Performance(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
rolled_over_from = sgqlc.types.Field(Date, graphql_name='rolledOverFrom')
percent_return_cumulative = sgqlc.types.Field('PerformanceItem', graphql_name='percentReturnCumulative')
class PerformanceAttribution(sgqlc.types.Type):
__schema__ = schema
summary = sgqlc.types.Field('PerformanceAttributionSummary', graphql_name='summary')
factors = sgqlc.types.Field(sgqlc.types.list_of('PerformanceAttributionFactor'), graphql_name='factors', args=sgqlc.types.ArgDict((
('category', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='category', default=None)),
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
))
)
class PerformanceAttributionFactor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
value = sgqlc.types.Field(Float, graphql_name='value')
class PerformanceAttributionSummary(sgqlc.types.Type):
__schema__ = schema
trading = sgqlc.types.Field(Float, graphql_name='trading')
factors = sgqlc.types.Field(Float, graphql_name='factors')
specific = sgqlc.types.Field(Float, graphql_name='specific')
class PerformanceContributor(sgqlc.types.Type):
__schema__ = schema
asset_class = sgqlc.types.Field(String, graphql_name='assetClass')
asset_subclass = sgqlc.types.Field(String, graphql_name='assetSubclass')
id = sgqlc.types.Field(String, graphql_name='id')
country = sgqlc.types.Field(String, graphql_name='country')
currency = sgqlc.types.Field(String, graphql_name='currency')
sector = sgqlc.types.Field(String, graphql_name='sector')
classification = sgqlc.types.Field('SecurityClassification', graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('tier', sgqlc.types.Arg(String, graphql_name='tier', default=None)),
))
)
description = sgqlc.types.Field(String, graphql_name='description')
sedol = sgqlc.types.Field(Sedol, graphql_name='sedol')
isin = sgqlc.types.Field(Isin, graphql_name='isin')
cusip = sgqlc.types.Field(Cusip, graphql_name='cusip')
average_percent_equity = sgqlc.types.Field(Float, graphql_name='averagePercentEquity')
total = sgqlc.types.Field(Float, graphql_name='total')
attribution = sgqlc.types.Field('PerformanceContributorAttribution', graphql_name='attribution')
class PerformanceContributorAttribution(sgqlc.types.Type):
__schema__ = schema
summary = sgqlc.types.Field('PerformanceContributorAttributionSummary', graphql_name='summary')
factors = sgqlc.types.Field(sgqlc.types.list_of('PerformanceContributorAttributionFactor'), graphql_name='factors')
class PerformanceContributorAttributionFactor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
value = sgqlc.types.Field(Float, graphql_name='value')
class PerformanceContributorAttributionSummary(sgqlc.types.Type):
__schema__ = schema
factors = sgqlc.types.Field(Float, graphql_name='factors')
specific = sgqlc.types.Field(Float, graphql_name='specific')
trading = sgqlc.types.Field(Float, graphql_name='trading')
class PerformanceContributorGroup(sgqlc.types.Type):
__schema__ = schema
name = sgqlc.types.Field(String, graphql_name='name')
id = sgqlc.types.Field(String, graphql_name='id')
total = sgqlc.types.Field(Float, graphql_name='total')
contributors = sgqlc.types.Field(sgqlc.types.list_of(PerformanceContributor), graphql_name='contributors')
attribution = sgqlc.types.Field(PerformanceContributorAttribution, graphql_name='attribution')
average_percent_equity = sgqlc.types.Field(Float, graphql_name='averagePercentEquity')
class PerformanceItem(sgqlc.types.Type):
__schema__ = schema
total = sgqlc.types.Field(Float, graphql_name='total')
attribution = sgqlc.types.Field(PerformanceAttribution, graphql_name='attribution')
class PeriodPerformance(sgqlc.types.Type):
__schema__ = schema
annualized_returns = sgqlc.types.Field(Float, graphql_name='annualizedReturns')
annualized_volatility = sgqlc.types.Field(Float, graphql_name='annualizedVolatility')
information_ratio = sgqlc.types.Field(Float, graphql_name='informationRatio')
sortino_ratio = sgqlc.types.Field(Float, graphql_name='sortinoRatio')
max_drawdown = sgqlc.types.Field(MaxDrawdown, graphql_name='maxDrawdown')
class PnlDate(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(sgqlc.types.non_null(Date), graphql_name='date')
equities = sgqlc.types.Field(sgqlc.types.list_of('PnlEquity'), graphql_name='equities')
currencies = sgqlc.types.Field(sgqlc.types.list_of('PnlOtherAsset'), graphql_name='currencies')
swaps = sgqlc.types.Field(sgqlc.types.list_of('PnlOtherAsset'), graphql_name='swaps')
fixed_income = sgqlc.types.Field(sgqlc.types.list_of('PnlFixedIncome'), graphql_name='fixedIncome')
commodities = sgqlc.types.Field(sgqlc.types.list_of('PnlOtherAsset'), graphql_name='commodities')
indices = sgqlc.types.Field(sgqlc.types.list_of('PnlOtherAsset'), graphql_name='indices')
other_assets = sgqlc.types.Field(sgqlc.types.list_of('PnlOtherAsset'), graphql_name='otherAssets')
class PnlEquity(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field('PositionSetEquityId', graphql_name='id')
amount = sgqlc.types.Field(Float, graphql_name='amount')
class PnlFixedIncome(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field('PositionSetFixedIncomeId', graphql_name='id')
amount = sgqlc.types.Field(Float, graphql_name='amount')
class PnlOtherAsset(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
amount = sgqlc.types.Field(Float, graphql_name='amount')
class Portfolio(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
alias = sgqlc.types.Field(String, graphql_name='alias')
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
default_model_id = sgqlc.types.Field(String, graphql_name='defaultModelId')
available_from = sgqlc.types.Field(Date, graphql_name='availableFrom')
dates = sgqlc.types.Field(sgqlc.types.list_of('PositionSetDate'), graphql_name='dates', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
('model_id', sgqlc.types.Arg(String, graphql_name='modelId', default=None)),
))
)
daily_pnl = sgqlc.types.Field(sgqlc.types.list_of(DailyPnlDate), graphql_name='dailyPnl', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
))
)
per_security_pnl = sgqlc.types.Field(sgqlc.types.list_of(PnlDate), graphql_name='perSecurityPnl', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
))
)
experiments = sgqlc.types.Field(sgqlc.types.list_of(ExperimentMetadata), graphql_name='experiments', args=sgqlc.types.ArgDict((
('type', sgqlc.types.Arg(ExperimentType, graphql_name='type', default=None)),
))
)
experiment = sgqlc.types.Field(Experiment, graphql_name='experiment', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
rollover_position_set_to_current_date = sgqlc.types.Field(Boolean, graphql_name='rolloverPositionSetToCurrentDate')
last_updated = sgqlc.types.Field(DateTime, graphql_name='lastUpdated')
class PortfolioMetadata(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
alias = sgqlc.types.Field(String, graphql_name='alias')
available_from = sgqlc.types.Field(Date, graphql_name='availableFrom')
rollover_position_set_to_current_date = sgqlc.types.Field(Boolean, graphql_name='rolloverPositionSetToCurrentDate')
model_id = sgqlc.types.Field(String, graphql_name='modelId')
default_model_id = sgqlc.types.Field(String, graphql_name='defaultModelId')
last_updated = sgqlc.types.Field(DateTime, graphql_name='lastUpdated')
class PositionSetDate(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
equity = sgqlc.types.Field(Float, graphql_name='equity')
gmv = sgqlc.types.Field(Float, graphql_name='gmv')
long_market_value = sgqlc.types.Field(Float, graphql_name='longMarketValue')
short_market_value = sgqlc.types.Field(Float, graphql_name='shortMarketValue')
rolled_over_from = sgqlc.types.Field(Date, graphql_name='rolledOverFrom')
equities = sgqlc.types.Field(sgqlc.types.list_of('PositionSetEquity'), graphql_name='equities')
currencies = sgqlc.types.Field(sgqlc.types.list_of('PositionSetOtherAsset'), graphql_name='currencies')
swaps = sgqlc.types.Field(sgqlc.types.list_of('PositionSetOtherAsset'), graphql_name='swaps')
fixed_income = sgqlc.types.Field(sgqlc.types.list_of('PositionSetFixedIncome'), graphql_name='fixedIncome')
commodities = sgqlc.types.Field(sgqlc.types.list_of('PositionSetOtherAsset'), graphql_name='commodities')
indices = sgqlc.types.Field(sgqlc.types.list_of('PositionSetOtherAsset'), graphql_name='indices')
other_assets = sgqlc.types.Field(sgqlc.types.list_of('PositionSetOtherAsset'), graphql_name='otherAssets')
class PositionSetEquity(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field('PositionSetEquityId', graphql_name='id')
economic_exposure = sgqlc.types.Field(Float, graphql_name='economicExposure')
country = sgqlc.types.Field(String, graphql_name='country')
currency = sgqlc.types.Field(String, graphql_name='currency')
sector = sgqlc.types.Field(String, graphql_name='sector')
classification = sgqlc.types.Field('SecurityClassification', graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('tier', sgqlc.types.Arg(String, graphql_name='tier', default=None)),
))
)
trade_as_percent_adv = sgqlc.types.Field(Float, graphql_name='tradeAsPercentADV')
class PositionSetEquityId(sgqlc.types.Type):
__schema__ = schema
ticker = sgqlc.types.Field(String, graphql_name='ticker')
mic = sgqlc.types.Field(String, graphql_name='mic')
sedol = sgqlc.types.Field(Sedol, graphql_name='sedol')
isin = sgqlc.types.Field(Isin, graphql_name='isin')
cusip = sgqlc.types.Field(Cusip, graphql_name='cusip')
model_provider_id = sgqlc.types.Field(String, graphql_name='modelProviderId')
class PositionSetFixedIncome(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field('PositionSetFixedIncomeId', graphql_name='id')
economic_exposure = sgqlc.types.Field(Float, graphql_name='economicExposure')
class PositionSetFixedIncomeId(sgqlc.types.Type):
__schema__ = schema
isin = sgqlc.types.Field(Isin, graphql_name='isin')
class PositionSetOtherAsset(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
economic_exposure = sgqlc.types.Field(Float, graphql_name='economicExposure')
class Query(sgqlc.types.Type):
__schema__ = schema
benchmarks = sgqlc.types.Field(sgqlc.types.list_of(BenchmarkMetadata), graphql_name='benchmarks')
classifications = sgqlc.types.Field(sgqlc.types.list_of(ClassificationMetadata), graphql_name='classifications')
classification = sgqlc.types.Field(Classification, graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
content_sets = sgqlc.types.Field(sgqlc.types.list_of(ContentSetMetadata), graphql_name='contentSets')
content_set = sgqlc.types.Field(ContentSet, graphql_name='contentSet', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
daily_pnl = sgqlc.types.Field(sgqlc.types.list_of(DailyPnlDate), graphql_name='dailyPnl', args=sgqlc.types.ArgDict((
('portfolio_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='portfolioId', default=None)),
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
))
)
forecasts = sgqlc.types.Field(sgqlc.types.list_of(ForecastMeta), graphql_name='forecasts')
forecast = sgqlc.types.Field(ForecastDetails, graphql_name='forecast', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(ShortId), graphql_name='id', default=None)),
))
)
meta = sgqlc.types.Field(Meta, graphql_name='meta')
model = sgqlc.types.Field(Model, graphql_name='model', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
models = sgqlc.types.Field(sgqlc.types.list_of(ModelMetadata), graphql_name='models')
portfolios = sgqlc.types.Field(sgqlc.types.list_of(PortfolioMetadata), graphql_name='portfolios')
portfolio = sgqlc.types.Field(Portfolio, graphql_name='portfolio', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
research_topics = sgqlc.types.Field(sgqlc.types.list_of('ResearchTopicMetadata'), graphql_name='researchTopics')
research_topic = sgqlc.types.Field('ResearchTopic', graphql_name='researchTopic', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
swaps = sgqlc.types.Field(sgqlc.types.list_of('SwapMetadata'), graphql_name='swaps')
swap = sgqlc.types.Field('Swap', graphql_name='swap', args=sgqlc.types.ArgDict((
('ticker', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='ticker', default=None)),
))
)
watchlists = sgqlc.types.Field(sgqlc.types.list_of('WatchlistMeta'), graphql_name='watchlists')
watchlist = sgqlc.types.Field('Watchlist', graphql_name='watchlist', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
class ReferenceInstrument(sgqlc.types.Type):
__schema__ = schema
type = sgqlc.types.Field(ReferenceInstrumentType, graphql_name='type')
security_id = sgqlc.types.Field('UniversalId', graphql_name='securityId')
class ResearchTopic(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
reference_instrument = sgqlc.types.Field(ReferenceInstrument, graphql_name='referenceInstrument')
last_updated = sgqlc.types.Field(DateTime, graphql_name='lastUpdated')
experiments = sgqlc.types.Field(sgqlc.types.list_of(ExperimentMetadata), graphql_name='experiments', args=sgqlc.types.ArgDict((
('type', sgqlc.types.Arg(ExperimentType, graphql_name='type', default=None)),
))
)
experiment = sgqlc.types.Field(Experiment, graphql_name='experiment', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
class ResearchTopicMetadata(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
reference_instrument = sgqlc.types.Field(ReferenceInstrument, graphql_name='referenceInstrument')
last_updated = sgqlc.types.Field(DateTime, graphql_name='lastUpdated')
class Risk(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
rolled_over_from = sgqlc.types.Field(Date, graphql_name='rolledOverFrom')
total = sgqlc.types.Field(Float, graphql_name='total')
attribution = sgqlc.types.Field('RiskAttribution', graphql_name='attribution')
class RiskAttribution(sgqlc.types.Type):
__schema__ = schema
summary = sgqlc.types.Field('RiskAttributionSummary', graphql_name='summary')
factors = sgqlc.types.Field(sgqlc.types.list_of('RiskAttributionFactor'), graphql_name='factors', args=sgqlc.types.ArgDict((
('category', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='category', default=None)),
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
))
)
class RiskAttributionFactor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
value = sgqlc.types.Field(Float, graphql_name='value')
class RiskAttributionSummary(sgqlc.types.Type):
__schema__ = schema
factors = sgqlc.types.Field(Float, graphql_name='factors')
specific = sgqlc.types.Field(Float, graphql_name='specific')
class RiskContributor(sgqlc.types.Type):
__schema__ = schema
asset_class = sgqlc.types.Field(String, graphql_name='assetClass')
asset_subclass = sgqlc.types.Field(String, graphql_name='assetSubclass')
id = sgqlc.types.Field(String, graphql_name='id')
country = sgqlc.types.Field(String, graphql_name='country')
currency = sgqlc.types.Field(String, graphql_name='currency')
sector = sgqlc.types.Field(String, graphql_name='sector')
classification = sgqlc.types.Field('SecurityClassification', graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('tier', sgqlc.types.Arg(String, graphql_name='tier', default=None)),
))
)
description = sgqlc.types.Field(String, graphql_name='description')
sedol = sgqlc.types.Field(Sedol, graphql_name='sedol')
isin = sgqlc.types.Field(Isin, graphql_name='isin')
cusip = sgqlc.types.Field(Cusip, graphql_name='cusip')
percent_equity = sgqlc.types.Field(Float, graphql_name='percentEquity')
total = sgqlc.types.Field(Float, graphql_name='total')
attribution = sgqlc.types.Field('RiskContributorAttribution', graphql_name='attribution')
class RiskContributorAttribution(sgqlc.types.Type):
__schema__ = schema
summary = sgqlc.types.Field('RiskContributorAttributionSummary', graphql_name='summary')
factors = sgqlc.types.Field(sgqlc.types.list_of('RiskContributorAttributionFactor'), graphql_name='factors')
class RiskContributorAttributionFactor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
value = sgqlc.types.Field(Float, graphql_name='value')
class RiskContributorAttributionSummary(sgqlc.types.Type):
__schema__ = schema
factors = sgqlc.types.Field(Float, graphql_name='factors')
specific = sgqlc.types.Field(Float, graphql_name='specific')
class RiskContributorGroup(sgqlc.types.Type):
__schema__ = schema
name = sgqlc.types.Field(String, graphql_name='name')
id = sgqlc.types.Field(String, graphql_name='id')
total_percent_equity = sgqlc.types.Field(Float, graphql_name='totalPercentEquity')
contributors = sgqlc.types.Field(sgqlc.types.list_of(RiskContributor), graphql_name='contributors')
attribution = sgqlc.types.Field(RiskContributorAttribution, graphql_name='attribution')
class SearchResultFacet(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
count = sgqlc.types.Field(Int, graphql_name='count')
class Security(sgqlc.types.Type):
__schema__ = schema
descriptors = sgqlc.types.Field('SecurityDescriptors', graphql_name='descriptors', args=sgqlc.types.ArgDict((
('on', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='on', default=None)),
))
)
classification = sgqlc.types.Field('SecurityClassification', graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('on', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='on', default=None)),
('tier', sgqlc.types.Arg(String, graphql_name='tier', default=None)),
))
)
availability = sgqlc.types.Field('SecurityAvailability', graphql_name='availability')
performance = sgqlc.types.Field(sgqlc.types.list_of('SecurityPerformance'), graphql_name='performance', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('aggregation', sgqlc.types.Arg(Aggregation, graphql_name='aggregation', default=None)),
('interval', sgqlc.types.Arg(Interval, graphql_name='interval', default=None)),
))
)
exposure = sgqlc.types.Field(sgqlc.types.list_of('SecurityExposure'), graphql_name='exposure', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(Interval, graphql_name='interval', default=None)),
('content_set_id', sgqlc.types.Arg(String, graphql_name='contentSetId', default=None)),
))
)
risk = sgqlc.types.Field(sgqlc.types.list_of('SecurityPredictedRisk'), graphql_name='risk', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(Interval, graphql_name='interval', default=None)),
))
)
beta = sgqlc.types.Field(sgqlc.types.list_of('SecurityBeta'), graphql_name='beta', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(Interval, graphql_name='interval', default=None)),
))
)
class SecurityAvailability(sgqlc.types.Type):
__schema__ = schema
from_ = sgqlc.types.Field(Date, graphql_name='from')
to = sgqlc.types.Field(Date, graphql_name='to')
class SecurityBeta(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
predicted = sgqlc.types.Field(Float, graphql_name='predicted')
historical = sgqlc.types.Field(Float, graphql_name='historical')
class SecurityClassification(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
class SecurityDescriptor(sgqlc.types.Type):
__schema__ = schema
code = sgqlc.types.Field(String, graphql_name='code')
description = sgqlc.types.Field(String, graphql_name='description')
class SecurityDescriptors(sgqlc.types.Type):
__schema__ = schema
name = sgqlc.types.Field(String, graphql_name='name')
ticker = sgqlc.types.Field(String, graphql_name='ticker')
mic = sgqlc.types.Field(String, graphql_name='mic')
exchange = sgqlc.types.Field(String, graphql_name='exchange')
country = sgqlc.types.Field(String, graphql_name='country')
currency = sgqlc.types.Field(String, graphql_name='currency')
sector = sgqlc.types.Field(String, graphql_name='sector')
model_provider_id = sgqlc.types.Field(String, graphql_name='modelProviderId')
asset_class = sgqlc.types.Field(String, graphql_name='assetClass')
asset_subclass = sgqlc.types.Field(String, graphql_name='assetSubclass')
sedol = sgqlc.types.Field(Sedol, graphql_name='sedol')
isin = sgqlc.types.Field(Isin, graphql_name='isin')
cusip = sgqlc.types.Field(Cusip, graphql_name='cusip')
average_daily_volume = sgqlc.types.Field(Float, graphql_name='averageDailyVolume')
market_cap = sgqlc.types.Field(Float, graphql_name='marketCap')
class SecurityExposure(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
factors = sgqlc.types.Field(sgqlc.types.list_of('SecurityExposureFactor'), graphql_name='factors', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
('category', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='category', default=None)),
))
)
class SecurityExposureFactor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
z_score = sgqlc.types.Field(Float, graphql_name='zScore')
class SecurityPerformance(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
percent_price_change_cumulative = sgqlc.types.Field('SecurityPerformanceItem', graphql_name='percentPriceChangeCumulative')
class SecurityPerformanceAttribution(sgqlc.types.Type):
__schema__ = schema
summary = sgqlc.types.Field('SecurityPerformanceAttributionSummary', graphql_name='summary')
factors = sgqlc.types.Field(sgqlc.types.list_of('SecurityPerformanceAttributionFactor'), graphql_name='factors', args=sgqlc.types.ArgDict((
('category', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='category', default=None)),
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
))
)
class SecurityPerformanceAttributionFactor(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
name = sgqlc.types.Field(String, graphql_name='name')
category = sgqlc.types.Field(String, graphql_name='category')
value = sgqlc.types.Field(Float, graphql_name='value')
class SecurityPerformanceAttributionSummary(sgqlc.types.Type):
__schema__ = schema
factors = sgqlc.types.Field(Float, graphql_name='factors')
specific = sgqlc.types.Field(Float, graphql_name='specific')
class SecurityPerformanceItem(sgqlc.types.Type):
__schema__ = schema
total = sgqlc.types.Field(Float, graphql_name='total')
attribution = sgqlc.types.Field(SecurityPerformanceAttribution, graphql_name='attribution')
class SecurityPredictedRisk(sgqlc.types.Type):
__schema__ = schema
date = sgqlc.types.Field(Date, graphql_name='date')
total = sgqlc.types.Field(Float, graphql_name='total')
attribution = sgqlc.types.Field(RiskAttribution, graphql_name='attribution')
class SecurityResult(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
sedol = sgqlc.types.Field(String, graphql_name='sedol')
asset_class = sgqlc.types.Field(String, graphql_name='assetClass')
asset_subclass = sgqlc.types.Field(String, graphql_name='assetSubclass')
ticker = sgqlc.types.Field(String, graphql_name='ticker')
mic = sgqlc.types.Field(String, graphql_name='mic')
description = sgqlc.types.Field(String, graphql_name='description')
average_daily_volume = sgqlc.types.Field(Float, graphql_name='averageDailyVolume')
market_capitalization = sgqlc.types.Field(Float, graphql_name='marketCapitalization')
country = sgqlc.types.Field(String, graphql_name='country')
sector = sgqlc.types.Field(String, graphql_name='sector')
classification = sgqlc.types.Field(SecurityClassification, graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('tier', sgqlc.types.Arg(String, graphql_name='tier', default=None)),
))
)
currency = sgqlc.types.Field(String, graphql_name='currency')
model_provider_id = sgqlc.types.Field(String, graphql_name='modelProviderId')
factor_exposure = sgqlc.types.Field(sgqlc.types.list_of(SecurityExposureFactor), graphql_name='factorExposure', args=sgqlc.types.ArgDict((
('content_set_id', sgqlc.types.Arg(String, graphql_name='contentSetId', default=None)),
('id', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='id', default=None)),
('category', sgqlc.types.Arg(sgqlc.types.list_of(String), graphql_name='category', default=None)),
))
)
risk = sgqlc.types.Field('SecurityRisk', graphql_name='risk')
beta = sgqlc.types.Field('SecurityResultBeta', graphql_name='beta')
class SecurityResultBeta(sgqlc.types.Type):
__schema__ = schema
predicted = sgqlc.types.Field(Float, graphql_name='predicted')
historical = sgqlc.types.Field(Float, graphql_name='historical')
class SecurityRisk(sgqlc.types.Type):
__schema__ = schema
standard_deviation = sgqlc.types.Field('SecurityRiskStandardDeviation', graphql_name='standardDeviation')
variance_decomposition = sgqlc.types.Field('SecurityRiskVarianceDecomposition', graphql_name='varianceDecomposition')
class SecurityRiskStandardDeviation(sgqlc.types.Type):
__schema__ = schema
total = sgqlc.types.Field(Float, graphql_name='total')
class SecurityRiskVarianceDecomposition(sgqlc.types.Type):
__schema__ = schema
summary = sgqlc.types.Field('SecurityRiskVarianceDecompositionSummary', graphql_name='summary')
class SecurityRiskVarianceDecompositionSummary(sgqlc.types.Type):
__schema__ = schema
specific = sgqlc.types.Field(Float, graphql_name='specific')
factors = sgqlc.types.Field(Float, graphql_name='factors')
class SecuritySearchResult(sgqlc.types.Type):
__schema__ = schema
securities = sgqlc.types.Field(sgqlc.types.list_of(SecurityResult), graphql_name='securities')
count = sgqlc.types.Field(Int, graphql_name='count')
countries = sgqlc.types.Field(sgqlc.types.list_of(SearchResultFacet), graphql_name='countries')
currencies = sgqlc.types.Field(sgqlc.types.list_of(SearchResultFacet), graphql_name='currencies')
sectors = sgqlc.types.Field(sgqlc.types.list_of(SearchResultFacet), graphql_name='sectors')
classification = sgqlc.types.Field(sgqlc.types.list_of(SearchResultFacet), graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
))
)
class SingleDeleteResult(sgqlc.types.Type):
__schema__ = schema
ok = sgqlc.types.Field(Boolean, graphql_name='ok')
class Swap(sgqlc.types.Type):
__schema__ = schema
ticker = sgqlc.types.Field(String, graphql_name='ticker')
description = sgqlc.types.Field(String, graphql_name='description')
available_from = sgqlc.types.Field(Date, graphql_name='availableFrom')
termination_date = sgqlc.types.Field(Date, graphql_name='terminationDate')
dates = sgqlc.types.Field(sgqlc.types.list_of(PositionSetDate), graphql_name='dates', args=sgqlc.types.ArgDict((
('from_', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='from', default=None)),
('to', sgqlc.types.Arg(sgqlc.types.non_null(Date), graphql_name='to', default=None)),
('interval', sgqlc.types.Arg(PositionSetInterval, graphql_name='interval', default=None)),
('model_id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='modelId', default=None)),
))
)
last_updated = sgqlc.types.Field(DateTime, graphql_name='lastUpdated')
class SwapMetadata(sgqlc.types.Type):
__schema__ = schema
ticker = sgqlc.types.Field(String, graphql_name='ticker')
description = sgqlc.types.Field(String, graphql_name='description')
available_from = sgqlc.types.Field(Date, graphql_name='availableFrom')
termination_date = sgqlc.types.Field(Date, graphql_name='terminationDate')
dates = sgqlc.types.Field(sgqlc.types.list_of(Date), graphql_name='dates')
last_updated = sgqlc.types.Field(DateTime, graphql_name='lastUpdated')
class Turnover(sgqlc.types.Type):
__schema__ = schema
total = sgqlc.types.Field(Float, graphql_name='total')
contributors = sgqlc.types.Field(sgqlc.types.list_of('TurnoverContributor'), graphql_name='contributors')
class TurnoverContributor(sgqlc.types.Type):
__schema__ = schema
asset_class = sgqlc.types.Field(String, graphql_name='assetClass')
asset_subclass = sgqlc.types.Field(String, graphql_name='assetSubclass')
id = sgqlc.types.Field(String, graphql_name='id')
country = sgqlc.types.Field(String, graphql_name='country')
currency = sgqlc.types.Field(String, graphql_name='currency')
classification = sgqlc.types.Field(SecurityClassification, graphql_name='classification', args=sgqlc.types.ArgDict((
('id', sgqlc.types.Arg(sgqlc.types.non_null(String), graphql_name='id', default=None)),
('tier', sgqlc.types.Arg(String, graphql_name='tier', default=None)),
))
)
value = sgqlc.types.Field(Float, graphql_name='value')
class UniversalId(sgqlc.types.Type):
__schema__ = schema
sedol = sgqlc.types.Field(String, graphql_name='sedol')
isin = sgqlc.types.Field(String, graphql_name='isin')
ticker = sgqlc.types.Field(String, graphql_name='ticker')
mic = sgqlc.types.Field(String, graphql_name='mic')
country = sgqlc.types.Field(String, graphql_name='country')
class UploadContentSetResult(sgqlc.types.Type):
__schema__ = schema
ok = sgqlc.types.Field(Boolean, graphql_name='ok')
class UploadDailyPnlResult(sgqlc.types.Type):
__schema__ = schema
ok = sgqlc.types.Field(Boolean, graphql_name='ok')
class UploadPnlResult(sgqlc.types.Type):
__schema__ = schema
ok = sgqlc.types.Field(Boolean, graphql_name='ok')
class UploadPositionSetResult(sgqlc.types.Type):
__schema__ = schema
ok = sgqlc.types.Field(Boolean, graphql_name='ok')
coverage = sgqlc.types.Field(Coverage, graphql_name='coverage', args=sgqlc.types.ArgDict((
('model_id', sgqlc.types.Arg(String, graphql_name='modelId', default=None)),
))
)
class Watchlist(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='id')
name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
alias = sgqlc.types.Field(String, graphql_name='alias')
count = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='count')
last_updated = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name='lastUpdated')
equities = sgqlc.types.Field(sgqlc.types.list_of('WatchlistEquity'), graphql_name='equities')
currencies = sgqlc.types.Field(sgqlc.types.list_of('WatchlistOtherAsset'), graphql_name='currencies')
swaps = sgqlc.types.Field(sgqlc.types.list_of('WatchlistOtherAsset'), graphql_name='swaps')
fixed_income = sgqlc.types.Field(sgqlc.types.list_of('WatchlistFixedIncome'), graphql_name='fixedIncome')
commodities = sgqlc.types.Field(sgqlc.types.list_of('WatchlistOtherAsset'), graphql_name='commodities')
indices = sgqlc.types.Field(sgqlc.types.list_of('WatchlistOtherAsset'), graphql_name='indices')
class WatchlistEquity(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(PositionSetEquityId, graphql_name='id')
added_on = sgqlc.types.Field(Date, graphql_name='addedOn')
class WatchlistFixedIncome(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(PositionSetFixedIncomeId, graphql_name='id')
added_on = sgqlc.types.Field(Date, graphql_name='addedOn')
class WatchlistMeta(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='id')
name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='name')
description = sgqlc.types.Field(String, graphql_name='description')
alias = sgqlc.types.Field(String, graphql_name='alias')
count = sgqlc.types.Field(sgqlc.types.non_null(Float), graphql_name='count')
last_updated = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name='lastUpdated')
class WatchlistOtherAsset(sgqlc.types.Type):
__schema__ = schema
id = sgqlc.types.Field(String, graphql_name='id')
added_on = sgqlc.types.Field(Date, graphql_name='addedOn')
########################################################################
# Unions
########################################################################
########################################################################
# Schema Entry Points
########################################################################
schema.query_type = Query
schema.mutation_type = Mutation
schema.subscription_type = None
| 51.520434
| 182
| 0.736518
| 16,849
| 142,454
| 6.006647
| 0.041545
| 0.223801
| 0.150732
| 0.05059
| 0.832233
| 0.810179
| 0.778709
| 0.746369
| 0.705146
| 0.67992
| 0
| 0.000016
| 0.116199
| 142,454
| 2,764
| 183
| 51.539074
| 0.803838
| 0.000667
| 0
| 0.606772
| 0
| 0
| 0.139333
| 0.026181
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.000903
| 0
| 0.703837
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
05adb478c1ff818385bcb67844d5497a6a59d86c
| 4,108
|
py
|
Python
|
tests/rules/test_S3CrossAccountTrustRule.py
|
lpmi-13/cfripper
|
36bfdc45855112496977806e1a93d98d010399ed
|
[
"Apache-2.0"
] | null | null | null |
tests/rules/test_S3CrossAccountTrustRule.py
|
lpmi-13/cfripper
|
36bfdc45855112496977806e1a93d98d010399ed
|
[
"Apache-2.0"
] | null | null | null |
tests/rules/test_S3CrossAccountTrustRule.py
|
lpmi-13/cfripper
|
36bfdc45855112496977806e1a93d98d010399ed
|
[
"Apache-2.0"
] | null | null | null |
from pytest import fixture
from cfripper.config.config import Config
from cfripper.model.enums import RuleGranularity, RuleMode, RuleRisk
from cfripper.model.result import Failure
from cfripper.rules import S3CrossAccountTrustRule
from tests.utils import compare_lists_of_failures, get_cfmodel_from
@fixture()
def s3_bucket_cross_account():
return get_cfmodel_from("rules/S3CrossAccountTrustRule/s3_bucket_cross_account.json").resolve()
@fixture()
def s3_bucket_cross_account_from_aws_service():
return get_cfmodel_from("rules/S3CrossAccountTrustRule/s3_bucket_cross_account_from_aws_service.json").resolve()
@fixture()
def s3_bucket_cross_account_and_normal():
return get_cfmodel_from("rules/S3CrossAccountTrustRule/s3_bucket_cross_account_and_normal.json").resolve()
def test_s3_bucket_cross_account(s3_bucket_cross_account):
rule = S3CrossAccountTrustRule(Config(aws_account_id="123456789"))
result = rule.invoke(s3_bucket_cross_account)
assert not result.valid
assert compare_lists_of_failures(
result.failures,
[
Failure(
granularity=RuleGranularity.RESOURCE,
reason="S3BucketPolicyAccountAccess has forbidden cross-account policy allow with arn:aws:iam::987654321:root for an S3 bucket.",
risk_value=RuleRisk.MEDIUM,
rule="S3CrossAccountTrustRule",
rule_mode=RuleMode.BLOCKING,
actions=None,
resource_ids={"S3BucketPolicyAccountAccess"},
)
],
)
def test_s3_bucket_cross_account_and_normal(s3_bucket_cross_account_and_normal):
rule = S3CrossAccountTrustRule(Config(aws_account_id="123456789"))
result = rule.invoke(s3_bucket_cross_account_and_normal)
assert not result.valid
assert compare_lists_of_failures(
result.failures,
[
Failure(
granularity=RuleGranularity.RESOURCE,
reason="S3BucketPolicyAccountAccess has forbidden cross-account policy allow with arn:aws:iam::666555444:root for an S3 bucket.",
risk_value=RuleRisk.MEDIUM,
rule="S3CrossAccountTrustRule",
rule_mode=RuleMode.BLOCKING,
actions=None,
resource_ids={"S3BucketPolicyAccountAccess"},
)
],
)
def test_s3_bucket_cross_account_and_normal_with_org_aws_account(s3_bucket_cross_account_and_normal):
rule = S3CrossAccountTrustRule(Config(aws_account_id="123456789", aws_principals=["666555444"]))
result = rule.invoke(s3_bucket_cross_account_and_normal)
assert not result.valid
assert compare_lists_of_failures(
result.failures,
[
Failure(
granularity=RuleGranularity.RESOURCE,
reason="S3BucketPolicyAccountAccess has forbidden cross-account policy allow with arn:aws:iam::666555444:root for an S3 bucket.",
risk_value=RuleRisk.MEDIUM,
rule="S3CrossAccountTrustRule",
rule_mode=RuleMode.BLOCKING,
actions=None,
resource_ids={"S3BucketPolicyAccountAccess"},
)
],
)
def test_s3_bucket_cross_account_for_current_account(s3_bucket_cross_account):
rule = S3CrossAccountTrustRule(Config(aws_account_id="987654321"))
result = rule.invoke(s3_bucket_cross_account)
assert result.valid
assert compare_lists_of_failures(result.failures, [])
def test_s3_bucket_cross_account_from_aws_service(s3_bucket_cross_account_from_aws_service):
rule = S3CrossAccountTrustRule(Config(aws_account_id="123456789"))
result = rule.invoke(s3_bucket_cross_account_from_aws_service)
assert result.valid
assert compare_lists_of_failures(result.failures, [])
def test_rule_supports_filter_config(s3_bucket_cross_account_and_normal, default_allow_all_config):
rule = S3CrossAccountTrustRule(default_allow_all_config)
result = rule.invoke(s3_bucket_cross_account_and_normal)
assert result.valid
assert compare_lists_of_failures(result.failures, [])
| 37.345455
| 145
| 0.728092
| 460
| 4,108
| 6.121739
| 0.16087
| 0.073864
| 0.106179
| 0.163352
| 0.850852
| 0.848366
| 0.81392
| 0.774858
| 0.732955
| 0.732955
| 0
| 0.03831
| 0.199367
| 4,108
| 109
| 146
| 37.688073
| 0.817878
| 0
| 0
| 0.619048
| 0
| 0.035714
| 0.185735
| 0.125122
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.107143
| false
| 0
| 0.071429
| 0.035714
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
05b4c7ce7bc4de4317288c29ca3169c18f3de527
| 3,096
|
py
|
Python
|
twrap/weight_init.py
|
itsnarsi/twrap
|
cc3128428e37fe0a363e5b18fd7fa0039a963365
|
[
"MIT"
] | null | null | null |
twrap/weight_init.py
|
itsnarsi/twrap
|
cc3128428e37fe0a363e5b18fd7fa0039a963365
|
[
"MIT"
] | null | null | null |
twrap/weight_init.py
|
itsnarsi/twrap
|
cc3128428e37fe0a363e5b18fd7fa0039a963365
|
[
"MIT"
] | null | null | null |
# @Author: Narsi Reddy <narsi>
# @Date: 2018-11-12T14:06:36-06:00
# @Last modified by: narsi
# @Last modified time: 2019-01-27T20:55:47-06:00
import numpy as np
import torch
import torch.nn as nn
import torch.nn.init as init
'''
https://gist.github.com/jeasinema/ed9236ce743c8efaf30fa2ff732749f5
'''
def weight_init(m):
'''
Usage:
model = Model()
model.apply(weight_init)
'''
if isinstance(m, nn.Conv1d):
init.normal(m.weight.data)
try:
init.normal(m.bias.data)
except:
pass
elif isinstance(m, nn.Conv2d):
init.xavier_uniform_(m.weight.data , gain=np.sqrt(2))
try:
init.constant_(m.bias.data, 0)
except:
pass
elif isinstance(m, nn.Conv3d):
init.xavier_uniform_(m.weight.data , gain=np.sqrt(2))
try:
init.normal(m.bias.data)
except:
pass
elif isinstance(m, nn.ConvTranspose1d):
init.normal(m.weight.data)
try:
init.normal(m.bias.data)
except:
pass
elif isinstance(m, nn.ConvTranspose2d):
init.xavier_uniform_(m.weight.data , gain=np.sqrt(2))
try:
init.normal(m.bias.data)
except:
pass
elif isinstance(m, nn.ConvTranspose3d):
init.xavier_uniform_(m.weight.data , gain=np.sqrt(2))
try:
init.normal(m.bias.data)
except:
pass
elif isinstance(m, nn.BatchNorm1d):
init.constant_(m.weight.data, 1)
try:
init.constant_(m.bias.data, 0)
except:
pass
elif isinstance(m, nn.BatchNorm2d):
try:
init.constant_(m.weight.data, 1)
except:
pass
try:
init.constant_(m.bias.data, 0)
except:
pass
elif isinstance(m, nn.BatchNorm3d):
init.constant_(m.weight.data, 1)
try:
init.constant_(m.bias.data, 0)
except:
pass
elif isinstance(m, nn.Linear):
init.xavier_uniform_(m.weight.data , gain=np.sqrt(2))
try:
init.constant_(m.bias.data, 0)
except:
pass
elif isinstance(m, nn.LSTM):
for param in m.parameters():
if len(param.shape) >= 2:
init.orthogonal(param.data)
else:
init.normal(param.data)
elif isinstance(m, nn.LSTMCell):
for param in m.parameters():
if len(param.shape) >= 2:
init.orthogonal(param.data)
else:
init.normal(param.data)
elif isinstance(m, nn.GRU):
for param in m.parameters():
if len(param.shape) >= 2:
init.orthogonal(param.data)
else:
init.normal(param.data)
elif isinstance(m, nn.GRUCell):
for param in m.parameters():
if len(param.shape) >= 2:
init.orthogonal(param.data)
else:
init.normal(param.data)
if __name__ == '__main__':
pass
| 28.145455
| 66
| 0.537145
| 376
| 3,096
| 4.348404
| 0.210106
| 0.09419
| 0.111315
| 0.135168
| 0.73211
| 0.73211
| 0.717431
| 0.717431
| 0.717431
| 0.717431
| 0
| 0.039467
| 0.345284
| 3,096
| 109
| 67
| 28.40367
| 0.767144
| 0.062662
| 0
| 0.787234
| 0
| 0
| 0.002856
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010638
| false
| 0.12766
| 0.042553
| 0
| 0.053191
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
05d6ce801be5db3a01d3c29bb636678e2821bba7
| 98
|
py
|
Python
|
u24_lymphocyte/third_party/treeano/node_utils.py
|
ALSM-PhD/quip_classification
|
7347bfaa5cf11ae2d7a528fbcc43322a12c795d3
|
[
"BSD-3-Clause"
] | 45
|
2015-04-26T04:45:51.000Z
|
2022-01-24T15:03:55.000Z
|
u24_lymphocyte/third_party/treeano/node_utils.py
|
ALSM-PhD/quip_classification
|
7347bfaa5cf11ae2d7a528fbcc43322a12c795d3
|
[
"BSD-3-Clause"
] | 8
|
2018-07-20T20:54:51.000Z
|
2020-06-12T05:36:04.000Z
|
u24_lymphocyte/third_party/treeano/node_utils.py
|
ALSM-PhD/quip_classification
|
7347bfaa5cf11ae2d7a528fbcc43322a12c795d3
|
[
"BSD-3-Clause"
] | 22
|
2018-05-21T23:57:20.000Z
|
2022-02-21T00:48:32.000Z
|
from . import core
def copy_node(node):
return core.node_from_data(core.node_to_data(node))
| 16.333333
| 55
| 0.755102
| 17
| 98
| 4.058824
| 0.529412
| 0.231884
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 98
| 5
| 56
| 19.6
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
05faf70d9ae3464ab3ce8fa6474b6773d08418b0
| 1,651
|
py
|
Python
|
tests/test_provider_circonus_labs_circonus.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_circonus_labs_circonus.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_circonus_labs_circonus.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_circonus-labs_circonus.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:13:59 UTC)
def test_provider_import():
import terrascript.provider.circonus_labs.circonus
def test_resource_import():
from terrascript.resource.circonus_labs.circonus import circonus_check
from terrascript.resource.circonus_labs.circonus import circonus_contact_group
from terrascript.resource.circonus_labs.circonus import circonus_dashboard
from terrascript.resource.circonus_labs.circonus import circonus_graph
from terrascript.resource.circonus_labs.circonus import circonus_maintenance
from terrascript.resource.circonus_labs.circonus import circonus_metric
from terrascript.resource.circonus_labs.circonus import circonus_overlay_set
from terrascript.resource.circonus_labs.circonus import circonus_rule_set
from terrascript.resource.circonus_labs.circonus import circonus_rule_set_group
from terrascript.resource.circonus_labs.circonus import circonus_worksheet
def test_datasource_import():
from terrascript.data.circonus_labs.circonus import circonus_account
from terrascript.data.circonus_labs.circonus import circonus_collector
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.circonus_labs.circonus
#
# t = terrascript.provider.circonus_labs.circonus.circonus()
# s = str(t)
#
# assert 'https://github.com/circonus-labs/terraform-provider-circonus' in s
# assert '0.12.2' in s
| 33.693878
| 83
| 0.807389
| 209
| 1,651
| 6.167464
| 0.339713
| 0.158262
| 0.248254
| 0.242048
| 0.645462
| 0.615206
| 0.545384
| 0.545384
| 0.197828
| 0.101629
| 0
| 0.011111
| 0.127801
| 1,651
| 48
| 84
| 34.395833
| 0.884028
| 0.317989
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0
| 1
| 0.1875
| true
| 0
| 1
| 0
| 1.1875
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
af339e145a8017a065e66456a0736a7ad0a5cd35
| 5,254
|
py
|
Python
|
PackGrid/Pack_AmmBeen.py
|
whitegreen/PackingDone
|
b7f1d2966f1d62a052ca9c96ce70b314d694c9f1
|
[
"MIT"
] | null | null | null |
PackGrid/Pack_AmmBeen.py
|
whitegreen/PackingDone
|
b7f1d2966f1d62a052ca9c96ce70b314d694c9f1
|
[
"MIT"
] | null | null | null |
PackGrid/Pack_AmmBeen.py
|
whitegreen/PackingDone
|
b7f1d2966f1d62a052ca9c96ce70b314d694c9f1
|
[
"MIT"
] | null | null | null |
import IPGrid
DM=4
P = [(-2, -2, -2, 0), (-2, -2, -1, 0), (-2, -2, -1, 1), (-2, -2, 0, 1), (-2, -2, 0, 2), (-2, -1, -1, 1), (-2, -1, 0, 1),
(-2, -1, 0, 2), (-2, -1, 1, 1), (-2, -1, 1, 2), (-2, 0, 1, 2), (-2, 0, 2, 2), (-1, -2, -2, -1), (-1, -2, -2, 0),
(-1, -2, -1, -1), (-1, -2, -1, 0), (-1, -2, -1, 1), (-1, -1, -2, -1), (-1, -1, -1, -1), (-1, -1, -1, 0),
(-1, -1, -1, 1), (-1, -1, 0, 0), (-1, -1, 0, 1), (-1, -1, 1, 1), (-1, -1, 1, 2), (-1, 0, 0, 0), (-1, 0, 0, 1),
(-1, 0, 1, 1), (-1, 0, 1, 2), (-1, 0, 2, 2), (-1, 1, 1, 1), (-1, 1, 1, 2), (-1, 1, 2, 1), (-1, 1, 2, 2),
(0, -2, -2, -2), (0, -2, -2, -1), (0, -1, -2, -2), (0, -1, -2, -1), (0, -1, -1, -1), (0, -1, -1, 0), (0, -1, 0, 0),
(0, 0, -1, -1), (0, 0, -1, 0), (0, 0, 0, -1), (0, 0, 0, 0), (0, 0, 0, 1), (0, 0, 1, 0), (0, 0, 1, 1), (0, 1, 0, 0),
(0, 1, 1, 0), (0, 1, 1, 1), (0, 1, 2, 1), (0, 1, 2, 2), (0, 2, 2, 1), (0, 2, 2, 2), (1, -1, -2, -2),
(1, -1, -2, -1), (1, -1, -1, -2), (1, -1, -1, -1), (1, 0, -2, -2), (1, 0, -1, -2), (1, 0, -1, -1), (1, 0, 0, -1),
(1, 0, 0, 0), (1, 1, -1, -2), (1, 1, -1, -1), (1, 1, 0, -1), (1, 1, 0, 0), (1, 1, 1, -1), (1, 1, 1, 0),
(1, 1, 1, 1), (1, 1, 2, 1), (1, 2, 1, -1), (1, 2, 1, 0), (1, 2, 1, 1), (1, 2, 2, 0), (1, 2, 2, 1), (2, 0, -2, -2),
(2, 0, -1, -2), (2, 1, -1, -2), (2, 1, -1, -1), (2, 1, 0, -2), (2, 1, 0, -1), (2, 1, 1, -1), (2, 2, 0, -2),
(2, 2, 0, -1), (2, 2, 1, -1), (2, 2, 1, 0), (2, 2, 2, 0)]
assert ( 89 == len(P)) # en=5
'''P = [(-3, -3, -2, 0), (-3, -3, -2, 1), (-3, -3, -1, 1), (-3, -3, -1, 2), (-3, -3, 0, 2), (-3, -2, -1, 1),
(-3, -2, -1, 2), (-3, -2, 0, 1), (-3, -2, 0, 2), (-3, -2, 0, 3), (-3, -2, 1, 2), (-3, -2, 1, 3), (-3, -1, 0, 2),
(-3, -1, 1, 2), (-3, -1, 1, 3), (-3, -1, 2, 3), (-3, 0, 2, 3), (-2, -3, -3, -1), (-2, -3, -3, 0),
(-2, -3, -2, -1), (-2, -3, -2, 0), (-2, -3, -2, 1), (-2, -3, -1, 0), (-2, -3, -1, 1), (-2, -2, -2, 0),
(-2, -2, -1, 0), (-2, -2, -1, 1), (-2, -2, 0, 1), (-2, -2, 0, 2), (-2, -1, -1, 1), (-2, -1, 0, 1), (-2, -1, 0, 2),
(-2, -1, 1, 1), (-2, -1, 1, 2), (-2, -1, 1, 3), (-2, -1, 2, 3), (-2, 0, 1, 2), (-2, 0, 1, 3), (-2, 0, 2, 2),
(-2, 0, 2, 3), (-2, 0, 3, 3), (-2, 1, 2, 3), (-2, 1, 3, 3), (-1, -3, -3, -2), (-1, -3, -3, -1), (-1, -3, -2, -1),
(-1, -3, -2, 0), (-1, -2, -3, -2), (-1, -2, -3, -1), (-1, -2, -2, -1), (-1, -2, -2, 0), (-1, -2, -1, -1),
(-1, -2, -1, 0), (-1, -2, -1, 1), (-1, -1, -2, -1), (-1, -1, -1, -1), (-1, -1, -1, 0), (-1, -1, -1, 1),
(-1, -1, 0, 0), (-1, -1, 0, 1), (-1, -1, 1, 1), (-1, -1, 1, 2), (-1, 0, 0, 0), (-1, 0, 0, 1), (-1, 0, 1, 1),
(-1, 0, 1, 2), (-1, 0, 2, 2), (-1, 0, 2, 3), (-1, 1, 1, 1), (-1, 1, 1, 2), (-1, 1, 2, 1), (-1, 1, 2, 2),
(-1, 1, 2, 3), (-1, 1, 3, 2), (-1, 1, 3, 3), (-1, 2, 3, 2), (-1, 2, 3, 3), (0, -3, -3, -2), (0, -2, -3, -3),
(0, -2, -3, -2), (0, -2, -3, -1), (0, -2, -2, -2), (0, -2, -2, -1), (0, -1, -3, -2), (0, -1, -2, -2),
(0, -1, -2, -1), (0, -1, -1, -1), (0, -1, -1, 0), (0, -1, 0, 0), (0, 0, -1, -1), (0, 0, -1, 0), (0, 0, 0, -1),
(0, 0, 0, 0), (0, 0, 0, 1), (0, 0, 1, 0), (0, 0, 1, 1), (0, 1, 0, 0), (0, 1, 1, 0), (0, 1, 1, 1), (0, 1, 2, 1),
(0, 1, 2, 2), (0, 1, 3, 2), (0, 2, 2, 1), (0, 2, 2, 2), (0, 2, 3, 1), (0, 2, 3, 2), (0, 2, 3, 3), (0, 3, 3, 2),
(1, -2, -3, -3), (1, -2, -3, -2), (1, -1, -3, -3), (1, -1, -3, -2), (1, -1, -2, -3), (1, -1, -2, -2),
(1, -1, -2, -1), (1, -1, -1, -2), (1, -1, -1, -1), (1, 0, -2, -3), (1, 0, -2, -2), (1, 0, -1, -2), (1, 0, -1, -1),
(1, 0, 0, -1), (1, 0, 0, 0), (1, 1, -1, -2), (1, 1, -1, -1), (1, 1, 0, -1), (1, 1, 0, 0), (1, 1, 1, -1),
(1, 1, 1, 0), (1, 1, 1, 1), (1, 1, 2, 1), (1, 2, 1, -1), (1, 2, 1, 0), (1, 2, 1, 1), (1, 2, 2, 0), (1, 2, 2, 1),
(1, 2, 3, 1), (1, 2, 3, 2), (1, 3, 2, 0), (1, 3, 2, 1), (1, 3, 3, 1), (1, 3, 3, 2), (2, -1, -3, -3),
(2, -1, -2, -3), (2, 0, -3, -3), (2, 0, -2, -3), (2, 0, -2, -2), (2, 0, -1, -3), (2, 0, -1, -2), (2, 1, -2, -3),
(2, 1, -1, -3), (2, 1, -1, -2), (2, 1, -1, -1), (2, 1, 0, -2), (2, 1, 0, -1), (2, 1, 1, -1), (2, 2, 0, -2),
(2, 2, 0, -1), (2, 2, 1, -1), (2, 2, 1, 0), (2, 2, 2, 0), (2, 3, 1, -1), (2, 3, 1, 0), (2, 3, 2, -1),
(2, 3, 2, 0), (2, 3, 2, 1), (2, 3, 3, 0), (2, 3, 3, 1), (3, 0, -2, -3), (3, 1, -2, -3), (3, 1, -1, -3),
(3, 1, -1, -2), (3, 1, 0, -2), (3, 2, -1, -3), (3, 2, -1, -2), (3, 2, 0, -3), (3, 2, 0, -2), (3, 2, 0, -1),
(3, 2, 1, -2), (3, 2, 1, -1), (3, 3, 0, -2), (3, 3, 1, -2), (3, 3, 1, -1), (3, 3, 2, -1), (3, 3, 2, 0)]
assert ( 185 == len(P)) # en=7 '''
# ************************************************************************************
K = 4+12
templates = []
templates.append([(-1, 0, 0, 0), (0, 0, 0, 0), (1, 0, 0, 0)])
templates.append([(0, -1, 0, 0), (0, 0, 0, 0), (0, 1, 0, 0)])
templates.append([(0, 0, -1, 0), (0, 0, 0, 0), (0, 0, 1, 0)])
templates.append([(0, 0, 0, -1), (0, 0, 0, 0), (0, 0, 0, 1)])
for i in range(4):
for j in range(3):
templates.append([templates[i][0], (0, 0, 0, 0), templates[(i + j+1) % 4][2]])
assert (K == len(templates))
rangeType = [[1, 30] for k in range(K)]
IPGrid.optimize(P, None, templates, rangeType)
# the same results by Java: testPack/PackAmmBeen.java
| 87.566667
| 120
| 0.277122
| 1,225
| 5,254
| 1.188571
| 0.029388
| 0.29533
| 0.226648
| 0.17033
| 0.818681
| 0.784341
| 0.778846
| 0.739011
| 0.697115
| 0.659341
| 0
| 0.306723
| 0.275219
| 5,254
| 60
| 121
| 87.566667
| 0.07563
| 0.026837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 1
| 0
| false
| 0
| 0.035714
| 0
| 0.035714
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
af3ef5328db70165281714feca8853de4e3aedbe
| 287
|
py
|
Python
|
OOP-Inheritance/person.py
|
bekzodbuyukov/Python
|
bafd81f5074672db4ea989c447c7fd2a4578d964
|
[
"MIT"
] | null | null | null |
OOP-Inheritance/person.py
|
bekzodbuyukov/Python
|
bafd81f5074672db4ea989c447c7fd2a4578d964
|
[
"MIT"
] | null | null | null |
OOP-Inheritance/person.py
|
bekzodbuyukov/Python
|
bafd81f5074672db4ea989c447c7fd2a4578d964
|
[
"MIT"
] | null | null | null |
class Person:
def __init__(self, first_name: str, last_name: str, age: int) -> None:
self.first_name = first_name
self.last_name = last_name
self.age = age
def __repr__(self) -> str:
return f"{self.first_name} {self.last_name}, {self.age} y.o."
| 28.7
| 74
| 0.620209
| 43
| 287
| 3.767442
| 0.395349
| 0.222222
| 0.240741
| 0.209877
| 0.259259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.250871
| 287
| 9
| 75
| 31.888889
| 0.753488
| 0
| 0
| 0
| 0
| 0
| 0.178322
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
a5b138403103c4e87f2d548cbcae0ceb41d9a400
| 82,357
|
py
|
Python
|
SBMLDiagrams/editSBML.py
|
SunnyXu/SBMLDiagrams
|
a7a9dccf42f544d6f48bc097d4dc7ebf5f4e2d41
|
[
"MIT"
] | null | null | null |
SBMLDiagrams/editSBML.py
|
SunnyXu/SBMLDiagrams
|
a7a9dccf42f544d6f48bc097d4dc7ebf5f4e2d41
|
[
"MIT"
] | 50
|
2021-12-03T22:43:18.000Z
|
2022-03-30T22:15:09.000Z
|
SBMLDiagrams/editSBML.py
|
sys-bio/SBMLDiagrams
|
ff951ff987fadf61a25d239966134e7bbfa1ff1a
|
[
"MIT"
] | 2
|
2022-01-30T00:47:44.000Z
|
2022-03-03T01:13:24.000Z
|
# -*- coding: utf-8 -*-
# This script was written by Jin Xu and available on Github
# https://github.com/SunnyXu/SBMLDiagrams
"""
Created on Mon Aug 23 13:25:34 2021
@author: Jin Xu
"""
#from numpy.core.fromnumeric import shape
import pandas as pd
import os
from SBMLDiagrams import processSBML
from SBMLDiagrams import point
color_data = {"decimal_rgb": ['[240,248,255]', '[250,235,215]', '[0,255,255]', '[127,255,212]', '[240,255,255]', '[245,245,220]', '[255,228,196]', '[0,0,0]', '[255,235,205]', '[0,0,255]', '[138,43,226]', '[165,42,42]', '[222,184,135]', '[95,158,160]', '[127,255,0]', '[210,105,30]', '[255,127,80]', '[100,149,237]', '[255,248,220]', '[220,20,60]', '[0,255,255]', '[0,0,139]', '[0,139,139]', '[184,134,11]', '[169,169,169]', '[0,100,0]', '[189,183,107]', '[139,0,139]', '[85,107,47]', '[255,140,0]', '[153,50,204]', '[139,0,0]', '[233,150,122]', '[143,188,143]', '[72,61,139]', '[47,79,79]', '[0,206,209]', '[148,0,211]', '[255,20,147]', '[0,191,255]', '[105,105,105]', '[30,144,255]', '[178,34,34]', '[255,250,240]', '[34,139,34]', '[255,0,255]', '[220,220,220]', '[248,248,255]', '[255,215,0]', '[218,165,32]', '[128,128,128]', '[0,128,0]', '[173,255,47]', '[240,255,240]', '[255,105,180]', '[205,92,92]', '[75,0,130]', '[255,255,240]', '[240,230,140]', '[230,230,250]', '[255,240,245]', '[124,252,0]', '[255,250,205]', '[173,216,230]', '[240,128,128]', '[224,255,255]', '[250,250,210]', '[144,238,144]', '[211,211,211]', '[255,182,193]', '[255,160,122]', '[32,178,170]', '[135,206,250]', '[119,136,153]', '[176,196,222]', '[255,255,224]', '[0,255,0]', '[50,205,50]', '[250,240,230]', '[255,0,255]', '[128,0,0]', '[102,205,170]', '[0,0,205]', '[186,85,211]', '[147,112,219]', '[60,179,113]', '[123,104,238]', '[0,250,154]', '[72,209,204]', '[199,21,133]', '[25,25,112]', '[245,255,250]', '[255,228,225]', '[255,228,181]', '[255,222,173]', '[0,0,128]', '[253,245,230]', '[128,128,0]', '[107,142,35]', '[255,165,0]', '[255,69,0]', '[218,112,214]', '[238,232,170]', '[152,251,152]', '[175,238,238]', '[219,112,147]', '[255,239,213]', '[255,218,185]', '[205,133,63]', '[255,192,203]', '[221,160,221]', '[176,224,230]', '[128,0,128]', '[255,0,0]', '[188,143,143]', '[65,105,225]', '[139,69,19]', '[250,128,114]', '[244,164,96]', '[46,139,87]', '[255,245,238]', '[160,82,45]', '[192,192,192]', '[135,206,235]', '[106,90,205]', '[112,128,144]', '[255,250,250]', '[0,255,127]', '[70,130,180]', '[210,180,140]', '[0,128,128]', '[216,191,216]', '[255,99,71]', '[64,224,208]', '[238,130,238]', '[245,222,179]', '[255,255,255]', '[245,245,245]', '[255,255,0]', '[154,205,50]'],\
"html_name":['AliceBlue', 'AntiqueWhite', 'Aqua', 'Aquamarine', 'Azure', 'Beige', 'Bisque', 'Black', 'BlanchedAlmond', 'Blue', 'BlueViolet', 'Brown', 'BurlyWood', 'CadetBlue', 'Chartreuse', 'Chocolate', 'Coral', 'CornflowerBlue', 'Cornsilk', 'Crimson', 'Cyan', 'DarkBlue', 'DarkCyan', 'DarkGoldenrod', 'DarkGray', 'DarkGreen', 'DarkKhaki', 'DarkMagenta', 'DarkOliveGreen', 'DarkOrange', 'DarkOrchid', 'DarkRed', 'DarkSalmon', 'DarkSeaGreen', 'DarkSlateBlue', 'DarkSlateGray', 'DarkTurquoise', 'DarkViolet', 'DeepPink', 'DeepSkyBlue', 'DimGray', 'DodgerBlue', 'FireBrick', 'FloralWhite', 'ForestGreen', 'Fuchsia', 'Gainsboro', 'GhostWhite', 'Gold', 'Goldenrod', 'Gray', 'Green', 'GreenYellow', 'Honeydew', 'HotPink', 'IndianRed', 'Indigo', 'Ivory', 'Khaki', 'Lavender', 'LavenderBlush', 'LawnGreen', 'LemonChiffon', 'LightBlue', 'LightCoral', 'LightCyan', 'LightGoldenrodYellow', 'LightGreen', 'LightGrey', 'LightPink', 'LightSalmon', 'LightSeaGreen', 'LightSkyBlue', 'LightSlateGray', 'LightSteelBlue', 'LightYellow', 'Lime', 'LimeGreen', 'Linen', 'Magenta', 'Maroon', 'MediumAquamarine', 'MediumBlue', 'MediumOrchid', 'MediumPurple', 'MediumSeaGreen', 'MediumSlateBlue', 'MediumSpringGreen', 'MediumTurquoise', 'MediumVioletRed', 'MidnightBlue', 'MintCream', 'MistyRose', 'Moccasin', 'NavajoWhite', 'Navy', 'OldLace', 'Olive', 'OliveDrab', 'Orange', 'OrangeRed', 'Orchid', 'PaleGoldenrod', 'PaleGreen', 'PaleTurquoise', 'PaleVioletRed', 'PapayaWhip', 'PeachPuff', 'Peru', 'Pink', 'Plum', 'PowderBlue', 'Purple', 'Red', 'RosyBrown', 'RoyalBlue', 'SaddleBrown', 'Salmon', 'SandyBrown', 'SeaGreen', 'Seashell', 'Sienna', 'Silver', 'SkyBlue', 'SlateBlue', 'SlateGray', 'Snow', 'SpringGreen', 'SteelBlue', 'Tan', 'Teal', 'Thistle', 'Tomato', 'Turquoise', 'Violet', 'Wheat', 'White', 'WhiteSmoke', 'Yellow', 'YellowGreen'],\
"hex_string":['#F0F8FF', '#FAEBD7', '#00FFFF', '#7FFFD4', '#F0FFFF', '#F5F5DC', '#FFE4C4', '#000000', '#FFEBCD', '#0000FF', '#8A2BE2', '#A52A2A', '#DEB887', '#5F9EA0', '#7FFF00', '#D2691E', '#FF7F50', '#6495ED', '#FFF8DC', '#DC143C', '#00FFFF', '#00008B', '#008B8B', '#B8860B', '#A9A9A9', '#006400', '#BDB76B', '#8B008B', '#556B2F', '#FF8C00', '#9932CC', '#8B0000', '#E9967A', '#8FBC8F', '#483D8B', '#2F4F4F', '#00CED1', '#9400D3', '#FF1493', '#00BFFF', '#696969', '#1E90FF', '#B22222', '#FFFAF0', '#228B22', '#FF00FF', '#DCDCDC', '#F8F8FF', '#FFD700', '#DAA520', '#808080', '#008000', '#ADFF2F', '#F0FFF0', '#FF69B4', '#CD5C5C', '#4B0082', '#FFFFF0', '#F0E68C', '#E6E6FA', '#FFF0F5', '#7CFC00', '#FFFACD', '#ADD8E6', '#F08080', '#E0FFFF', '#FAFAD2', '#90EE90', '#D3D3D3', '#FFB6C1', '#FFA07A', '#20B2AA', '#87CEFA', '#778899', '#B0C4DE', '#FFFFE0', '#00FF00', '#32CD32', '#FAF0E6', '#FF00FF', '#800000', '#66CDAA', '#0000CD', '#BA55D3', '#9370DB', '#3CB371', '#7B68EE', '#00FA9A', '#48D1CC', '#C71585', '#191970', '#F5FFFA', '#FFE4E1', '#FFE4B5', '#FFDEAD', '#000080', '#FDF5E6', '#808000', '#6B8E23', '#FFA500', '#FF4500', '#DA70D6', '#EEE8AA', '#98FB98', '#AFEEEE', '#DB7093', '#FFEFD5', '#FFDAB9', '#CD853F', '#FFC0CB', '#DDA0DD', '#B0E0E6', '#800080', '#FF0000', '#BC8F8F', '#4169E1', '#8B4513', '#FA8072', '#F4A460', '#2E8B57', '#FFF5EE', '#A0522D', '#C0C0C0', '#87CEEB', '#6A5ACD', '#708090', '#FFFAFA', '#00FF7F', '#4682B4', '#D2B48C', '#008080', '#D8BFD8', '#FF6347', '#40E0D0', '#EE82EE', '#F5DEB3', '#FFFFFF', '#F5F5F5', '#FFFF00', '#9ACD32']}
df_color = pd.DataFrame(color_data)
#DIR = os.path.dirname(os.path.abspath(__file__))
#df_color = pd.read_csv(os.path.join(DIR, 'colors.txt'), sep="\t")
df_color["html_name"] = df_color["html_name"].str.lower()
def _color_to_rgb(color, opacity):
def _hex_to_rgb(value):
value = value.lstrip('#')
return [int(value[i:i+2], 16) for i in (0, 2, 4)]
#rgba
rgb = []
if isinstance(color, list) and len(color) == 3:
rgb = color.copy()
elif isinstance(color, str):
if '#' in color and len(color) == 7: #hex_string
rgb = _hex_to_rgb(color)
else: #html_name
if color.lower() in df_color.values:
index = df_color.index[df_color["html_name"] == color.lower()].tolist()[0] #row index
rgb_pre = df_color.iloc[index]["decimal_rgb"]
rgb_pre = rgb_pre[1:-1].split(",")
rgb = [int(x) for x in rgb_pre]
if rgb == [] or opacity > 1. or opacity < 0.:
raise ValueError('Please enter a color or/and opacity in a valid format!')
else:
rgba = rgb.copy()
rgba.append(int(opacity*255/1.))
return rgba
def _setCompartmentPosition(df, id, position):
"""
Set the x,y coordinates of the compartment position.
Args:
df: DataFrame-initial information.
id: str-compartment id.
position: list or point.Point()
list-
[position_x, position_y], the coordinate represents the top-left hand corner of
the compartment.
point.Point()-
a Point object with attributes x and y representing
the x/y position of the top-left hand corner of the compartment.
Returns:
df_temp: DataFrame-information after updates.
"""
df_CompartmentData_temp = df[0].copy()
idx_list = df[0].index[df[0]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(position) != list and type(position) != type(point.Point()):
raise Exception("Please enter a valid position type.")
if type(position) == type(point.Point()):
position = [position.x, position.y]
for i in range(len(idx_list)):
df_CompartmentData_temp.at[idx_list[i],"position"] = position
df_temp = (df_CompartmentData_temp, df[1], df[2], df[3], df[4])
return df_temp
def _setCompartmentSize(df, id, size):
"""
Set the compartment size.
Args:
df: DataFrame-initial information.
id: str-compartment id.
size: list or point.Point()
list-
1*2 matrix-size of the compartment [width, height].
point.Point()-
a Point object with attributes x and y representing the width and height of
the compartment.
Returns:
df_temp: DataFrame-information after updates.
"""
df_CompartmentData_temp = df[0].copy()
idx_list = df[0].index[df[0]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(size) != list and type(size) != type(point.Point()):
raise Exception("Please enter a valid size type.")
if type(size) == type(point.Point()):
size = [size.x, size.y]
for i in range(len(idx_list)):
df_CompartmentData_temp.at[idx_list[i],"size"] = size
df_temp = (df_CompartmentData_temp, df[1], df[2], df[3], df[4])
return df_temp
def _setCompartmentFillColor(df, id, fill_color, opacity):
"""
Set the compartment fill color
Args:
df: DataFrame-initial information.
id: str-compartment id.
fill_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
Returns:
df_temp: DataFrame-information after updates.
"""
df_CompartmentData_temp = df[0].copy()
idx_list = df[0].index[df[0]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
fill_color = _color_to_rgb(fill_color, opacity)
for i in range(len(idx_list)):
df_CompartmentData_temp.at[idx_list[i],"fill_color"] = fill_color
df_temp = (df_CompartmentData_temp, df[1], df[2], df[3], df[4])
return df_temp
def _setCompartmentBorderColor(df, id, border_color, opacity):
"""
Set the compartment border color.
Args:
df: DataFrame-initial information.
id: str-compartment id.
border_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
Returns:
df_temp: DataFrame-information after updates.
"""
df_CompartmentData_temp = df[0].copy()
idx_list = df[0].index[df[0]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
border_color = _color_to_rgb(border_color, opacity)
for i in range(len(idx_list)):
df_CompartmentData_temp.at[idx_list[i],"border_color"] = border_color
df_temp = (df_CompartmentData_temp, df[1], df[2], df[3], df[4])
return df_temp
def _setCompartmentBorderWidth(df, id, border_width):
"""
Set the compartment border width.
Args:
df: DataFrame-initial information.
id: str-compartment id.
border_width: float-compartment border line width.
Returns:
df_temp: DataFrame-information after updates.
"""
df_CompartmentData_temp = df[0].copy()
idx_list = df[0].index[df[0]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
for i in range(len(idx_list)):
df_CompartmentData_temp.at[idx_list[i],"border_width"] = border_width
df_temp = (df_CompartmentData_temp, df[1], df[2], df[3], df[4])
return df_temp
def _setFloatingBoundaryNode(df, id, floating_node, alias = 0):
"""
Set a node to be floating node (True) or boundary node (False).
Args:
df: DataFrame-initial information.
id: str-node id.
floating_node: bool-floating node (True) or not (False).
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"floating_node"] = floating_node
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"floating_node"] = floating_node
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodePosition(df, id, position, alias = 0):
"""
Set the x,y coordinates of the node position.
Args:
df: DataFrame-initial information.
id: str-node id.
position: list or point.Point()
list-
[position_x, position_y], the coordinate represents the top-left hand corner of the node.
point.Point()-
a Point object with attributes x and y representing
the x/y position of the top-left hand corner of the node.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(position) != list and type(position) != type(point.Point()):
raise Exception("Please enter a valid position type.")
if type(position) == type(point.Point()):
position = [position.x, position.y]
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"position"] = position
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"position"] = position
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeSize(df, id, size, alias = 0):
"""
Set the node size.
Args:
df: DataFrame-initial information.
id: str-node id.
size: list or point.Point()
list-
1*2 matrix-size of the node [width, height].
point.Point()-
a Point object with attributes x and y representing the width and height of
the node.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(size) != list and type(size) != type(point.Point()):
raise Exception("Please enter a valid size type.")
if type(size) == type(point.Point()):
size = [size.x, size.y]
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"size"] = size
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"size"] = size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeShape(df, id, shape, alias = 0):
"""
Set the node shape by shape index or name string.
Args:
df: DataFrame-initial information.
id: int-node id.
shape: int/str-
int-0:text_only, 1:rectangle, 2:ellipse, 3:hexagon, 4:line, or 5:triangle;
6:upTriangle, 7:downTriangle, 8:leftTriangle, 9: rightTriangle.
str-"text_only", "rectangle", "ellipse", "hexagon", "line", or "triangle";
"upTriangle", "downTriangle", "leftTriangle", "rightTriangle".
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
shape_idx = -1 #undefined shape
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
shape_info = []
shape_type = ''
if isinstance(shape, str):
shape_name = shape
if shape == 'text_only':
shape_idx = 0
elif shape == 'rectangle':
shape_idx = 1
shape_type = 'rectangle'
elif shape == 'ellipse':
shape_idx = 2
shape_type = 'ellipse'
shape_info = [[[50.0, 50.0], [50.0, 50.0]]]
elif shape == 'hexagon':
shape_idx = 3
shape_type = 'polygon'
shape_info = [[100.0, 50.0], [75.0, 7.0], [25.0, 7.0], [0.0, 50.0], [25.0, 86.0], [75.0, 86.0]]
elif shape == "line":
shape_idx = 4
shape_type = 'polygon'
shape_info = [[0.0, 50.0], [100.0, 50.0]]
elif shape == "triangle":
shape_idx = 5
shape_type = 'polygon'
shape_info = [[100.0, 50.0], [25.0, 7.0], [25.0, 86.0]]
elif shape == "upTriangle":
shape_idx = 6
shape_type = 'polygon'
shape_info = [[50.0, 0.0], [100.0, 80.6], [0.0, 80.6]]
elif shape == "downTriangle":
shape_idx = 7
shape_type = 'polygon'
shape_info = [[0.0, 19.4], [100.0, 19.4], [50.0, 100.]]
elif shape == "leftTriangle":
shape_idx = 8
shape_type = 'polygon'
shape_info = [[80.6, 0.0], [80.6, 100.0], [0.0, 50.0]]
elif shape == "rightTriangle":
shape_idx = 9
shape_type = 'polygon'
shape_info = [[19.4, 0.0], [100., 50.0], [19.4, 100.0]]
else:
raise Exception("This is not a valid node shape information.")
elif isinstance(shape, int):
if 0 <= shape <= 9:
shape_idx = shape
if shape == 0:
shape_name = 'text_only'
elif shape == 1:
shape_name = 'rectangle'
shape_type = 'rectangle'
elif shape == 2:
shape_name = 'ellipse'
shape_type = 'ellipse'
shape_info = [[[50.0, 50.0], [50.0, 50.0]]]
elif shape == 3:
shape_name = 'hexagon'
shape_type = 'polygon'
shape_info = [[100.0, 50.0], [75.0, 7.0], [25.0, 7.0], [0.0, 50.0], [25.0, 86.0], [75.0, 86.0]]
elif shape == 4:
shape_name = 'line'
shape_type = 'polygon'
shape_info = [[0.0, 50.0], [100.0, 50.0]]
elif shape == 5:
shape_name = 'triangle'
shape_type = 'polygon'
shape_info = [[100.0, 50.0], [25.0, 7.0], [25.0, 86.0]]
elif shape == 6:
shape_name = 'upTriangle'
shape_type = 'polygon'
shape_info = [[50.0, 0.0], [100.0, 80.6], [0.0, 80.6]]
elif shape == 7:
shape_name = 'downTirangle'
shape_type = 'polygon'
shape_info = [[0.0, 19.4], [100.0, 19.4], [50.0, 100.]]
elif shape == 8:
shape_name = 'leftTriangle'
shape_type = 'polygon'
shape_info = [[80.6, 0.0], [80.6, 100.0], [0.0, 50.0]]
elif shape == 9:
shape_name = 'rightTriangle'
shape_type = 'polygon'
shape_info = [[19.4, 0.0], [100., 50.0], [19.4, 100.0]]
else:
raise Exception("This is not a valid node shape information.")
else:
raise Exception("This is not a valid node shape information.")
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"shape_idx"] = shape_idx
# df_NodeData_temp.at[idx_list[i],"shape_name"] = shape_name
# df_NodeData_temp.at[idx_list[i],"shape_type"] = shape_type
# df_NodeData_temp.at[idx_list[i],"shape_info"] = shape_info
if alias < len(idx_list) and alias >= 0:
i = alias
df_NodeData_temp.at[idx_list[i],"shape_idx"] = shape_idx
df_NodeData_temp.at[idx_list[i],"shape_name"] = shape_name
df_NodeData_temp.at[idx_list[i],"shape_type"] = shape_type
df_NodeData_temp.at[idx_list[i],"shape_info"] = shape_info
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeArbitraryPolygonShape(df, id, shape_name, shape_info, alias = 0):
"""
Set an arbitrary polygon shape to a node by shape name and shape info.
Args:
id: str-node id.
shape_name: str-name of the arbitrary polygon shape.
shape_info: list-[[x1,y1],[x2,y2],[x3,y3],etc], where x,y are floating numbers from 0 to 100.
x represents the percentage of width, and y represents the percentage of height.
alias: int-alias node index [0, num_alias).
"""
shape_idx = -2 #arbitrary polygon
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if isinstance(shape_name, str):
for i in range(len(idx_list)):
df_NodeData_temp.at[idx_list[i],"shape_name"] = shape_name
else:
raise Exception("This is not a valid node shape name.")
#check the shape_info is in the correct format:
shape_info_flag = True
if isinstance(shape_info, list) and len(shape_info) >= 2:
for ii in range(len(shape_info)):
if isinstance(shape_info[ii], list) and len(shape_info[ii]) == 2:
if all(isinstance(float(item), float) for item in shape_info[ii]):
pass
else:
shape_info_flag = False
raise Exception("This is not a valid node shape info.")
else:
shape_info_flag = False
raise Exception("This is not a valid node shape info.")
else:
shape_info_flag = False
raise Exception("This is not a valid node shape info.")
if shape_info_flag == True:
for i in range(len(idx_list)):
df_NodeData_temp.at[idx_list[i],"shape_info"] = shape_info
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"shape_idx"] = shape_idx
# df_NodeData_temp.at[idx_list[i],"shape_type"] = 'polygon'
if alias < len(idx_list) and alias >= 0:
i = alias
df_NodeData_temp.at[idx_list[i],"shape_idx"] = shape_idx
df_NodeData_temp.at[idx_list[i],"shape_type"] = 'polygon'
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
# def _setNodeArbitraryEllipseShape(df, id, shape_name, shape_info):
# """
# Set an arbitrary ellipse shape to a node by shape name and shape info.
# Args:
# id: str-node id.
# shape_name: str-name of the arbitrary ellipse shape.
# shape_info: list-[[[x1,y1],[r1,r2]]], where x,y,r are floating numbers from 0 to 100.
# """
# shape_idx = -3 #arbitrary ellipse
# df_NodeData_temp = df[1].copy()
# idx_list = df[1].index[df[1]["id"] == id].tolist()
# if len(idx_list) == 0:
# raise Exception("This is not a valid id.")
# if isinstance(shape_name, str):
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"shape_name"] = shape_name
# else:
# raise Exception("This is not a valid node shape name.")
# #check the shape_info is in the correct format:
# shape_info_flag = True
# if isinstance(shape_info, list) and len(shape_info) == 1:
# if isinstance(shape_info[0], list) and len(shape_info[0]) == 2:
# for ii in range(len(shape_info[0])):
# if all(isinstance(float(item), float) for item in shape_info[0][ii]):
# pass
# else:
# shape_info_flag = False
# raise Exception("This is not a valid node shape info.")
# else:
# shape_info_flag = False
# raise Exception("This is not a valid node shape info.")
# else:
# shape_info_flag = False
# raise Exception("This is not a valid node shape info.")
# if shape_info_flag == True:
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"shape_info"] = shape_info
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"shape_idx"] = shape_idx
# df_NodeData_temp.at[idx_list[i],"shape_type"] = 'ellipse'
# df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
# return df_temp
def _setNodeTextPosition(df, id, txt_position, alias = 0):
"""
Set the x,y coordinates of the node text position.
Args:
df: DataFrame-initial information.
id: str-node id.
txt_position: list or point.Point()
list-
[txt_position_x, txt_position_y], the coordinate represents the top-left hand
corner of the node text.
point.Point()-
a Point object with attributes x and y representing
the x/y position of the top-left hand corner of the node text.
alias: alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(txt_position) != list and type(txt_position) != type(point.Point()):
raise Exception("Please enter a valid txt_position type.")
if type(txt_position) == type(point.Point()):
txt_position = [txt_position.x, txt_position.y]
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"txt_position"] = txt_position
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextPositionCenter(df, id, alias = 0):
"""
Set the node text position as the center of the node.
Args:
df: DataFrame-initial information.
id: str-node id.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# # txt_str = df_NodeData_temp.at[idx_list[i],"id"]
# node_position = df_NodeData_temp.at[idx_list[i],"position"]
# node_size = df_NodeData_temp.at[idx_list[i],"size"]
# #shape_type = df_NodeData_temp.at[idx_list[i],"shape_type"]
# txt_position = node_position
# txt_size = node_size
# # if shape_type == "polygon":
# # vertex = []
# # shape_info = df_NodeData_temp.at[idx_list[i],"shape_info"]
# # for j in range(len(shape_info)):
# # vertex_x = node_position[0]+node_size[0]*shape_info[j][0]/100.
# # vertex_y = node_position[1]+node_size[1]*shape_info[j][1]/100.
# # vertex.append([vertex_x,vertex_y])
# # def centroid(vertexes):
# # _x_list = [vertex [0] for vertex in vertexes]
# # _y_list = [vertex [1] for vertex in vertexes]
# # _len = len(vertexes)
# # _x = sum(_x_list) / _len
# # _y = sum(_y_list) / _len
# # return[_x, _y]
# # centroid_pos = centroid(vertex)
# df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
# df_NodeData_temp.at[idx_list[i],"txt_size"] = txt_size
if alias < len(idx_list) and alias >= 0:
i = alias
node_position = df_NodeData_temp.at[idx_list[i],"position"]
node_size = df_NodeData_temp.at[idx_list[i],"size"]
txt_position = node_position
txt_size = node_size
df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
df_NodeData_temp.at[idx_list[i],"txt_size"] = txt_size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextPositionLeftCenter(df, id, alias = 0):
"""
Set the node text position as the left center of the node.
Args:
df: DataFrame-initial information.
id: str-node id.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# node_position = df_NodeData_temp.at[idx_list[i],"position"]
# node_size = df_NodeData_temp.at[idx_list[i],"size"]
# txt_position = [node_position[0]-node_size[0], node_position[1]]
# df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
# df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
if alias < len(idx_list) and alias >= 0:
i = alias
node_position = df_NodeData_temp.at[idx_list[i],"position"]
node_size = df_NodeData_temp.at[idx_list[i],"size"]
txt_position = [node_position[0]-node_size[0], node_position[1]]
df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextPositionRightCenter(df, id, alias = 0):
"""
Set the node text position as the right center of the node.
Args:
df: DataFrame-initial information.
id: str-node id.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0 and alias >= 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# node_position = df_NodeData_temp.at[idx_list[i],"position"]
# node_size = df_NodeData_temp.at[idx_list[i],"size"]
# txt_position = [node_position[0]+node_size[0], node_position[1]]
# df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
# df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
if alias < len(idx_list) and alias >= 0:
i = alias
node_position = df_NodeData_temp.at[idx_list[i],"position"]
node_size = df_NodeData_temp.at[idx_list[i],"size"]
txt_position = [node_position[0]+node_size[0], node_position[1]]
df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextPositionUpperCenter(df, id, alias = 0):
"""
Set the node text position as the upper center of the node.
Args:
df: DataFrame-initial information.
id: str-node id.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0 and alias >= 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# node_position = df_NodeData_temp.at[idx_list[i],"position"]
# node_size = df_NodeData_temp.at[idx_list[i],"size"]
# txt_position = [node_position[0], node_position[1]-node_size[1]]
# df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
# df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
if alias < len(idx_list) and alias >= 0:
i = alias
node_position = df_NodeData_temp.at[idx_list[i],"position"]
node_size = df_NodeData_temp.at[idx_list[i],"size"]
txt_position = [node_position[0], node_position[1]-node_size[1]]
df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextPositionLowerCenter(df, id, alias = 0):
"""
Set the node text position as the lower center of the node.
Args:
df: DataFrame-initial information.
id: str-node id.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# node_position = df_NodeData_temp.at[idx_list[i],"position"]
# node_size = df_NodeData_temp.at[idx_list[i],"size"]
# txt_position = [node_position[0], node_position[1]+node_size[1]]
# df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
# df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
if alias < len(idx_list) and alias >= 0:
i = alias
node_position = df_NodeData_temp.at[idx_list[i],"position"]
node_size = df_NodeData_temp.at[idx_list[i],"size"]
txt_position = [node_position[0], node_position[1]+node_size[1]]
df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextPositionUpperLeft(df, id, alias = 0):
"""
Set the node text position as the upper left of the node.
Args:
df: DataFrame-initial information.
id: str-node id.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# node_position = df_NodeData_temp.at[idx_list[i],"position"]
# node_size = df_NodeData_temp.at[idx_list[i],"size"]
# txt_position = [node_position[0]-node_size[0], node_position[1]-node_size[1]]
# df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
# df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
if alias < len(idx_list) and alias >= 0:
i = alias
node_position = df_NodeData_temp.at[idx_list[i],"position"]
node_size = df_NodeData_temp.at[idx_list[i],"size"]
txt_position = [node_position[0]-node_size[0], node_position[1]-node_size[1]]
df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextPositionUpperRight(df, id, alias = 0):
"""
Set the node text position as the upper right of the node.
Args:
df: DataFrame-initial information.
id: str-node id.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# node_position = df_NodeData_temp.at[idx_list[i],"position"]
# node_size = df_NodeData_temp.at[idx_list[i],"size"]
# txt_position = [node_position[0]+node_size[0], node_position[1]-node_size[1]]
# df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
# df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
if alias < len(idx_list) and alias >= 0:
i = alias
node_position = df_NodeData_temp.at[idx_list[i],"position"]
node_size = df_NodeData_temp.at[idx_list[i],"size"]
txt_position = [node_position[0]+node_size[0], node_position[1]-node_size[1]]
df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextPositionLowerLeft(df, id, alias = 0):
"""
Set the node text position as the lower left of the node.
Args:
df: DataFrame-initial information.
id: str-node id.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# node_position = df_NodeData_temp.at[idx_list[i],"position"]
# node_size = df_NodeData_temp.at[idx_list[i],"size"]
# txt_position = [node_position[0]-node_size[0], node_position[1]+node_size[1]]
# df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
# df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
if alias < len(idx_list) and alias >= 0:
i = alias
node_position = df_NodeData_temp.at[idx_list[i],"position"]
node_size = df_NodeData_temp.at[idx_list[i],"size"]
txt_position = [node_position[0]-node_size[0], node_position[1]+node_size[1]]
df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextPositionLowerRight(df, id, alias = 0):
"""
Set the node text position as the lower right of the node.
Args:
df: DataFrame-initial information.
id: str-node id.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0 and alias >= 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# node_position = df_NodeData_temp.at[idx_list[i],"position"]
# node_size = df_NodeData_temp.at[idx_list[i],"size"]
# txt_position = [node_position[0]+node_size[0], node_position[1]+node_size[1]]
# df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
# df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
if alias < len(idx_list) and alias >= 0:
i = alias
node_position = df_NodeData_temp.at[idx_list[i],"position"]
node_size = df_NodeData_temp.at[idx_list[i],"size"]
txt_position = [node_position[0]+node_size[0], node_position[1]+node_size[1]]
df_NodeData_temp.at[idx_list[i],"txt_position"] = txt_position
df_NodeData_temp.at[idx_list[i],"txt_size"] = node_size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextSize(df, id, txt_size, alias = 0):
"""
Set the node text size.
Args:
df: DataFrame-initial information.
id: str-node id.
txt_size: list or point.Point()
list-
1*2 matrix-size of the node text [width, height].
point.Point()-
a Point object with attributes x and y representing the width and height of
the node text.
alias: alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(txt_size) != list and type(txt_size) != type(point.Point()):
raise Exception("Please enter a valid txt_size type.")
if type(txt_size) == type(point.Point()):
txt_size = [txt_size.x, txt_size.y]
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"txt_size"] = txt_size
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"txt_size"] = txt_size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeFillColor(df, id, fill_color, opacity, alias = 0 ):
"""
Set the node fill color.
Args:
df: DataFrame-initial information.
id: str-node id.
fill_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
fill_color = _color_to_rgb(fill_color, opacity)
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"fill_color"] = fill_color
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"fill_color"] = fill_color
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeFillLinearGradient(df, id, gradient_info, stop_info, alias = 0):
"""
Set the node fill linear gradient.
Args:
id: str-node id.
gradient_info: list - [[x1,y1],[x2,y2]], where x,y are floating numbers from 0 to 100.
x represents the percentage of width, and y represents the percentage of height.
stop_info, list - [[x1,[r1,g1,b1,a1]],[x2,[r2,g2,b2,a2]],etc],
where x is floating number from 0 to 100.
alias: int-alias node index [0, num_alias).
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
#check the gradient_info is in the correct format:
gradient_info_flag = True
stop_info_flag = True
if isinstance(gradient_info, list) and len(gradient_info) == 2:
for ii in range(len(gradient_info)):
if isinstance(gradient_info[ii], list) and len(gradient_info[ii]) == 2:
if all(isinstance(float(item), float) for item in gradient_info[ii]):
pass
else:
gradient_info_flag = False
raise Exception("This is not a valid gradient info.")
else:
gradient_info_flag = False
raise Exception("This is not a valid gradient info.")
else:
gradient_info_flag = False
raise Exception("This is not a valid gradient info.")
if isinstance(stop_info, list) and len(stop_info) >= 2:
for ii in range(len(stop_info)):
if isinstance(stop_info[ii], list):
if len(stop_info[ii]) == 2:
if type(stop_info[ii][0]) == float and type(stop_info[ii][1]) == list:
if len(stop_info[ii][1]) == 4 and all(isinstance(int(item), int) and int(item) <= 255 and int(item) >= 0 for item in stop_info[ii][1]):
pass
else:
stop_info_flag = False
raise Exception("This is not a valid stop info.")
else:
stop_info_flag = False
raise Exception("This is not a valid stop info.")
elif len(stop_info[ii]) == 3:
if type(stop_info[ii][0]) == float and type(stop_info[ii][1]) == str and type(stop_info[ii][2]) == float:
html_to_rgba = _color_to_rgb(stop_info[ii][1], stop_info[ii][2])
stop_info[ii] = [stop_info[ii][0], html_to_rgba]
else:
stop_info_flag = False
raise Exception("This is not a valid stop info.")
else:
stop_info_flag = False
else:
stop_info_flag = False
raise Exception("This is not a valid stop info.")
else:
stop_info_flag = False
raise Exception("This is not a valid stop info.")
if gradient_info_flag == True and stop_info_flag == True:
fill_color = ['linearGradient', gradient_info, stop_info]
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"fill_color"] = fill_color
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"fill_color"] = fill_color
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeFillRadialGradient(df, id, gradient_info, stop_info, alias = 0):
"""
Set the node fill radial gradient.
Args:
id: str-node id.
gradient_info: list - [[x1,y1],[r]], where x,y,r are floating numbers from 0 to 100.
x represents the center with percentage of width and height; r represents the radius.
stop_info, list - [[x1,[r1,g1,b1,a1]],[x2,[r2,g2,b2,a2]],etc],
where x is floating number from 0 to 100.
alias: int-alias node index [0, num_alias).
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
#check the gradient_info is in the correct format:
gradient_info_flag = True
stop_info_flag = True
if isinstance(gradient_info, list) and len(gradient_info) == 2:
if isinstance(gradient_info[0], list) and len(gradient_info[0]) == 2:
if all(isinstance(float(item), float) for item in gradient_info[0]):
pass
else:
gradient_info_flag = False
raise Exception("This is not a valid gradient info.")
else:
gradient_info_flag = False
raise Exception("This is not a valid gradient info.")
if isinstance(gradient_info[1], list) and len(gradient_info[1]) == 1:
if type(gradient_info[1][0]) == float:
pass
else:
gradient_info_flag = False
raise Exception("This is not a valid gradient info.")
else:
gradient_info_flag = False
raise Exception("This is not a valid gradient info.")
else:
gradient_info_flag = False
raise Exception("This is not a valid gradient info.")
if isinstance(stop_info, list) and len(stop_info) >= 2:
for ii in range(len(stop_info)):
if isinstance(stop_info[ii], list):
if len(stop_info[ii]) == 2:
if type(stop_info[ii][0]) == float and type(stop_info[ii][1]) == list:
if len(stop_info[ii][1]) == 4 and all(isinstance(int(item), int) and int(item) <= 255 and int(item) >= 0 for item in stop_info[ii][1]):
pass
else:
stop_info_flag = False
raise Exception("This is not a valid stop info.")
else:
stop_info_flag = False
raise Exception("This is not a valid stop info.")
elif len(stop_info[ii]) == 3:
if type(stop_info[ii][0]) == float and type(stop_info[ii][1]) == str and type(stop_info[ii][2]) == float:
html_to_rgba = _color_to_rgb(stop_info[ii][1], stop_info[ii][2])
stop_info[ii] = [stop_info[ii][0], html_to_rgba]
else:
stop_info_flag = False
raise Exception("This is not a valid stop info.")
else:
stop_info_flag = False
else:
stop_info_flag = False
raise Exception("This is not a valid stop info.")
else:
stop_info_flag = False
raise Exception("This is not a valid stop info.")
if gradient_info_flag == True and stop_info_flag == True:
fill_color = ['radialGradient', gradient_info, stop_info]
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"fill_color"] = fill_color
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"fill_color"] = fill_color
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeBorderColor(df, id, border_color, opacity, alias = 0):
"""
Set the node border color.
Args:
df: DataFrame-initial information.
id: str-node id.
border_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
border_color = _color_to_rgb(border_color, opacity)
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"border_color"] = border_color
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"border_color"] = border_color
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeBorderWidth(df, id, border_width, alias = 0):
"""
Set the node border width.
Args:
df: DataFrame-initial information.
id: str-node id.
border_width: float-node border line width.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"border_width"] = border_width
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"border_width"] = border_width
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextFontColor(df, id, txt_font_color, opacity, alias = 0):
"""
Set the node text font color.
Args:
df: DataFrame-initial information.
id: str-node id.
txt_font_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
txt_font_color = _color_to_rgb(txt_font_color, opacity)
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"txt_font_color"] = txt_font_color
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"txt_font_color"] = txt_font_color
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextLineWidth(df, id, txt_line_width, alias = 0):
"""
Set the node text line width.
Args:
df: DataFrame-initial information.
id: str-node id.
txt_line_width: float-node text line width.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"txt_line_width"] = txt_line_width
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"txt_line_width"] = txt_line_width
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setNodeTextFontSize(df, id, txt_font_size, alias = 0):
"""
Set the node text font size.
Args:
df: DataFrame-initial information.
id: str-node id.
txt_font_size: float-node text font size.
alias: int-alias node index [0, num_alias).
Returns:
df_temp: DataFrame-information after updates.
"""
df_NodeData_temp = df[1].copy()
idx_list = df[1].index[df[1]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
# for i in range(len(idx_list)):
# df_NodeData_temp.at[idx_list[i],"txt_font_size"] = txt_font_size
if alias < len(idx_list) and alias >= 0:
df_NodeData_temp.at[idx_list[alias],"txt_font_size"] = txt_font_size
else:
raise Exception("Alias index is beyond number of alias.")
df_temp = (df[0], df_NodeData_temp, df[2], df[3], df[4])
return df_temp
def _setReactionCenterPosition(df, id, position):
"""
Set the reaction center position.
Args:
df: DataFrame-initial information.
id: str-reaction id.
position: list or point.Point()
list-
1*2 matrix-[position_x, position_y].
point.Point()-
a Point object with attributes x and y representing the x/y position.
Returns:
df_temp: DataFrame-information after updates.
"""
df_ReactionData_temp = df[2].copy()
idx_list = df[2].index[df[2]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(position) != list and type(position) != type(point.Point()):
raise Exception("Please enter a valid position type.")
if type(position) == type(point.Point()):
position = [position.x, position.y]
for i in range(len(idx_list)):
df_ReactionData_temp.at[idx_list[i],"center_pos"] = position
df_temp = (df[0], df[1], df_ReactionData_temp, df[3], df[4])
return df_temp
def _setReactionBezierHandles(df, id, position):
"""
Set the reaction bezier handle positions.
Args:
df: DataFrame-initial information.
id: str-reaction id.
position: list-position of the handles: [center handle, reactant handle1, ..., product handle1, ...].
center handle/reactant handle1/product handle1: list or point.Point()
list-
[position_x, position_y], the coordinate represents the top-left hand
corner of the node.
point.Point()-
a Point object with attributes x and y representing the x/y position.
Returns:
df_temp: DataFrame-information after updates.
"""
df_ReactionData_temp = df[2].copy()
idx_list = df[2].index[df[2]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(position) != list:
raise Exception("Please enter a valid position type.")
else:
if all(isinstance(item, list) for item in position):
pass
elif all(type(item) == type(point.Point()) for item in position):
position_to_list = []
for item in position:
position_to_list.append([item.x,item.y])
position = position_to_list
else:
raise Exception("Please enter a valid position type.")
for i in range(len(idx_list)):
df_ReactionData_temp.at[idx_list[i],"handles"] = position
df_temp = (df[0], df[1], df_ReactionData_temp, df[3], df[4])
return df_temp
def _setReactionFillColor(df, id, fill_color, opacity):
"""
Set the reaction fill color.
Args:
df: DataFrame-initial information.
id: str-reaction id.
fill_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
Returns:
df_temp: DataFrame-information after updates.
"""
df_ReactionData_temp = df[2].copy()
idx_list = df[2].index[df[2]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
fill_color = _color_to_rgb(fill_color, opacity)
for i in range(len(idx_list)):
df_ReactionData_temp.at[idx_list[i],"fill_color"] = fill_color
df_temp = (df[0], df[1], df_ReactionData_temp, df[3], df[4])
return df_temp
def _setReactionLineThickness(df, id, line_thickness):
"""
Set the reaction line thickness.
Args:
df: DataFrame-initial information.
id: str-reaction id.
line_thickness: float-reaction border line width.
Returns:
df_temp: DataFrame-information after updates.
"""
df_ReactionData_temp = df[2].copy()
idx_list = df[2].index[df[2]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
for i in range(len(idx_list)):
df_ReactionData_temp.at[idx_list[i],"line_thickness"] = line_thickness
df_temp = (df[0], df[1], df_ReactionData_temp, df[3], df[4])
return df_temp
def _setBezierReactionType(df, id, bezier):
"""
Set the reaction type to bezier curve or not with a certain reaction id.
Args:
df: DataFrame-initial information.
id: str-reaction id.
bezier: bool-bezier reaction (True as default) or not (False as straightline).
Returns:
df_temp: DataFrame-information after updates.
"""
df_ReactionData_temp = df[2].copy()
idx_list = df[2].index[df[2]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
for i in range(len(idx_list)):
df_ReactionData_temp.at[idx_list[i],"bezier"] = bezier
df_temp = (df[0], df[1], df_ReactionData_temp, df[3], df[4])
return df_temp
def _setReactionArrowHeadSize(df, id, size):
#def _setReactionArrowHeadSize(df, size):
"""
Set the reaction arrow head size with a certain reaction id.
Args:
df: DataFrame-initial information.
size: list or point.Point()
list-
1*2 matrix-size of the arrow head [width, height].
point.Point()-
a Point object with attributes x and y representing the width and height of
the arrow head.
Returns:
df_temp: DataFrame-information after updates.
"""
df_ReactionData_temp = df[2].copy()
idx_list = df[2].index[df[2]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(size) != list and type(size) != type(point.Point()):
raise Exception("Please enter a valid size type.")
if type(size) == type(point.Point()):
size = [size.x, size.y]
for i in range(len(idx_list)):
df_ReactionData_temp.at[idx_list[i],"arrow_head_size"] = size
df_temp = (df[0], df[1], df_ReactionData_temp, df[3], df[4])
# df_ReactionData_temp = df[2].copy()
# for i in range(len(df_ReactionData_temp)):
# df_ReactionData_temp.at[i,"arrow_head_size"] = size
# df_temp = (df[0], df[1], df_ReactionData_temp, df[3])
return df_temp
def _setReactionDashStyle(df, id, dash):
"""
Set the reaction dash information with a certain reaction id.
Args:
df: DataFrame-initial information.
id: str-reaction id.
dash: list - [] means solid;
[a,b] means drawing a a-point line and following a b-point gap and etc;
[a,b,c,d] means drawing a a-point line and following a b-point gap, and then
drawing a c-point line followed by a d-point gap.
Returns:
df_temp: DataFrame-information after updates.
"""
df_ReactionData_temp = df[2].copy()
idx_list = df[2].index[df[2]["id"] == id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(dash) != list:
raise Exception("Please enter a valid dash type.")
for i in range(len(idx_list)):
df_ReactionData_temp.at[idx_list[i],"rxn_dash"] = dash
df_temp = (df[0], df[1], df_ReactionData_temp, df[3], df[4])
return df_temp
# def _addText(df_text, txt_str, txt_position, txt_font_color = [0, 0, 0], opacity = 1.,
# txt_line_width = 1., txt_font_size = 12.):
# """
# Set arbitray text onto canvas.
# Args:
# txt_str: str-the text content.
# txt_position: list-[position_x, position_y], the coordinate represents the top-left hand
# corner of the node text.
# txt_font_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
# opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
# txt_line_width: float-node text line width.
# txt_font_size: float-node text font size.
# """
# txt_font_color_rgba = _color_to_rgb(txt_font_color, opacity)
# df_text_temp = df_text.copy()
# text_row_dct = {k:[] for k in processSBML.COLUMN_NAME_df_text}
# text_row_dct[processSBML.ID].append(txt_str)
# text_row_dct[processSBML.TXTPOSITION].append(txt_position)
# text_row_dct[processSBML.TXTFONTCOLOR].append(txt_font_color_rgba)
# text_row_dct[processSBML.TXTLINEWIDTH].append(txt_line_width)
# text_row_dct[processSBML.TXTFONTSIZE].append(txt_font_size)
# if len(df_text_temp) == 0:
# df_text_temp = pd.DataFrame(text_row_dct)
# else:
# df_text_temp = pd.concat([df_text_temp,\
# pd.DataFrame(text_row_dct)], ignore_index=True)
# return df_text_temp
# def _removeText(df_text, txt_str):
# """
# Set arbitray text onto canvas.
# Args:
# txt_str: str-the text content.
# """
# df_text_temp = df_text.copy()
# idx_list = df_text_temp.index[df_text_temp[processSBML.ID] == txt_str].tolist()
# if len(idx_list) == 0:
# raise Exception("This is not a valid text content.")
# df_text_temp = df_text_temp.drop(idx_list)
# return df_text_temp
def _setTextContent(df, txt_id, txt_content):
"""
Set the arbitrary text content.
Args:
df: DataFrame-initial information.
txt_id: str-the text id.
txt_content: str-the text content.
Returns:
df_temp: DataFrame-information after updates.
"""
df_TextData_temp = df[3].copy()
idx_list = df[3].index[df[3]["id"] == txt_id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
for i in range(len(idx_list)):
df_TextData_temp.at[idx_list[i],"txt_content"] = txt_content
df_temp = (df[0], df[1], df[2], df_TextData_temp, df[4])
return df_temp
def _setTextPosition(df, txt_id, txt_position):
"""
Set the x,y coordinates of the node text position.
Args:
df: DataFrame-initial information.
txt_id: str-the text id.
txt_position: list or point.Point()
list-
[txt_position_x, txt_position_y], the coordinate represents the top-left hand corner of
the node text.
point.Point()-
a Point object with attributes x and y representing
the x/y position of the top-left hand corner of the text.
Returns:
df_temp: DataFrame-information after updates.
"""
df_TextData_temp = df[3].copy()
idx_list = df[3].index[df[3]["id"] == txt_id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(txt_position) != list and type(txt_position) != type(point.Point()):
raise Exception("Please enter a valid txt_position type.")
if type(txt_position) == type(point.Point()):
txt_position = [txt_position.x, txt_position.y]
for i in range(len(idx_list)):
df_TextData_temp.at[idx_list[i],"txt_position"] = txt_position
df_temp = (df[0], df[1], df[2], df_TextData_temp, df[4])
return df_temp
def _setTextSize(df, txt_id, txt_size):
"""
Set the arbitrary text size.
Args:
df: DataFrame-initial information.
txt_id: str-the text id.
txt_size: list or point.Point()
list-
1*2 matrix-size of the text [width, height].
point.Point()-
a Point object with attributes x and y representing the width and height of
the text.
Returns:
df_temp: DataFrame-information after updates.
"""
df_TextData_temp = df[3].copy()
idx_list = df[3].index[df[3]["id"] == txt_id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
if type(txt_size) != list and type(txt_size) != type(point.Point()):
raise Exception("Please enter a valid txt_size type.")
if type(txt_size) == type(point.Point()):
txt_size = [txt_size.x, txt_size.y]
for i in range(len(idx_list)):
df_TextData_temp.at[idx_list[i],"txt_size"] = txt_size
df_temp = (df[0], df[1], df[2], df_TextData_temp, df[4])
return df_temp
def _setTextFontColor(df, txt_id, txt_font_color, opacity):
"""
Set the arbitrary text font color.
Args:
df: DataFrame-initial information.
txt_id: str-the text id.
txt_font_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
Returns:
df_temp: DataFrame-information after updates.
"""
df_TextData_temp = df[3].copy()
idx_list = df[3].index[df[3]["id"] == txt_id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
txt_font_color = _color_to_rgb(txt_font_color, opacity)
for i in range(len(idx_list)):
df_TextData_temp.at[idx_list[i],"txt_font_color"] = txt_font_color
df_temp = (df[0], df[1], df[2], df_TextData_temp, df[4])
return df_temp
def _setTextLineWidth(df, txt_id, txt_line_width):
"""
Set the arbitrary text line width.
Args:
df: DataFrame-initial information.
txt_id: str-the text id.
txt_line_width: float-node text line width.
Returns:
df_temp: DataFrame-information after updates.
"""
df_TextData_temp = df[3].copy()
idx_list = df[3].index[df[3]["id"] == txt_id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
for i in range(len(idx_list)):
df_TextData_temp.at[idx_list[i],"txt_line_width"] = txt_line_width
df_temp = (df[0], df[1], df[2], df_TextData_temp, df[4])
return df_temp
def _setTextFontSize(df, txt_id, txt_font_size):
"""
Set the arbitrary text font size.
Args:
df: DataFrame-initial information.
txt_id: str-the text id.
txt_font_size: float-text font size.
Returns:
df_temp: DataFrame-information after updates.
"""
df_TextData_temp = df[3].copy()
idx_list = df[3].index[df[3]["id"] == txt_id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
for i in range(len(idx_list)):
df_TextData_temp.at[idx_list[i],"txt_font_size"] = txt_font_size
df_temp = (df[0], df[1], df[2], df_TextData_temp, df[4])
return df_temp
def _addText(df, txt_id, txt_content, txt_position, txt_size,
txt_font_color = [0, 0, 0], opacity = 1., txt_line_width = 1., txt_font_size = 12.):
"""
Set arbitray text onto canvas.
Args:
txt_id: str-the text id.
txt_content: str-the text content.
txt_position: list or point.Point()
list-
[position_x, position_y], the coordinate represents the top-left hand corner of
the text.
point.Point()-
a Point object with attributes x and y representing
the x/y position of the top-left hand corner of the text.
txt_size: list or point.Point()
list-
1*2 matrix-size of the text [width, height].
point.Point()-
a Point object with attributes x and y representing the width and height of
the text.
txt_font_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
txt_line_width: float-text line width.
txt_font_size: float-text font size.
"""
if type(txt_position) != list and type(txt_position) != type(point.Point()):
raise Exception("Please enter a valid txt_position type.")
if type(txt_position) == type(point.Point()):
txt_position = [txt_position.x, txt_position.y]
if type(txt_size) != list and type(txt_size) != type(point.Point()):
raise Exception("Please enter a valid txt_size type.")
if type(txt_size) == type(point.Point()):
txt_size = [txt_size.x, txt_size.y]
txt_font_color_rgba = _color_to_rgb(txt_font_color, opacity)
df_TextData_temp = df[3].copy()
text_row_dct = {k:[] for k in processSBML.COLUMN_NAME_df_TextData}
text_row_dct[processSBML.TXTCONTENT].append(txt_content)
text_row_dct[processSBML.TXTPOSITION].append(txt_position)
text_row_dct[processSBML.TXTSIZE].append(txt_size)
text_row_dct[processSBML.TXTFONTCOLOR].append(txt_font_color_rgba)
text_row_dct[processSBML.TXTLINEWIDTH].append(txt_line_width)
text_row_dct[processSBML.TXTFONTSIZE].append(txt_font_size)
text_row_dct[processSBML.ID].append(txt_id)
if len(df_TextData_temp) == 0:
df_TextData_temp = pd.DataFrame(text_row_dct)
else:
df_TextData_temp = pd.concat([df_TextData_temp,\
pd.DataFrame(text_row_dct)], ignore_index=True)
df_temp = (df[0], df[1], df[2], df_TextData_temp, df[4])
return df_temp
def _removeText(df, txt_id):
"""
Remove the arbitray text.
Args:
txt_id: str-the text id.
"""
df_TextData_temp = df[3].copy()
idx_list = df[3].index[df[3]["id"] == txt_id].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
df_TextData_temp = df_TextData_temp.drop(idx_list)
df_temp = (df[0], df[1], df[2], df_TextData_temp, df[4])
return df_temp
def _addRectangle(df, shape_name, position, size, fill_color=[255,255,255], fill_opacity = 1., border_color = [0,0,0],
border_opacity = 1., border_width = 2.):
"""
Add a rectangle onto canvas.
Args:
shape_name: str-the name of the rectangle.
position: list or point.Point()
list-
[position_x, position_y], the coordinate represents the top-left hand corner of
the rectangle.
point.Point()-
a Point object with attributes x and y representing
the x/y position of the top-left hand corner of the rectangle.
size: list or point.Point()
list-
1*2 matrix-size of the rectangle [width, height].
point.Point()-
a Point object with attributes x and y representing the width and height of
the rectangle.
fill_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
fill_opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
border_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
border_opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
border_width: float-node text line width.
"""
if type(position) != list and type(position) != type(point.Point()):
raise Exception("Please enter a valid position type.")
if type(position) == type(point.Point()):
position = [position.x, position.y]
if type(size) != list and type(size) != type(point.Point()):
raise Exception("Please enter a valid size type.")
if type(size) == type(point.Point()):
size = [size.x, size.y]
fill_color_rgba = _color_to_rgb(fill_color, fill_opacity)
border_color_rgba = _color_to_rgb(border_color, border_opacity)
df_ShapeData_temp = df[4].copy()
shape_row_dct = {k:[] for k in processSBML.COLUMN_NAME_df_ShapeData}
shape_row_dct[processSBML.SHAPENAME].append(shape_name)
shape_row_dct[processSBML.POSITION].append(position)
shape_row_dct[processSBML.SIZE].append(size)
shape_row_dct[processSBML.FILLCOLOR].append(fill_color_rgba)
shape_row_dct[processSBML.BORDERCOLOR].append(border_color_rgba)
shape_row_dct[processSBML.BORDERWIDTH].append(border_width)
shape_row_dct[processSBML.SHAPETYPE].append('rectangle')
shape_row_dct[processSBML.SHAPEINFO].append([])
if len(df_ShapeData_temp) == 0:
df_ShapeData_temp = pd.DataFrame(shape_row_dct)
else:
df_ShapeData_temp = pd.concat([df_ShapeData_temp,\
pd.DataFrame(shape_row_dct)], ignore_index=True)
df_temp = (df[0], df[1], df[2], df[3], df_ShapeData_temp)
return df_temp
def _addEllipse(df, shape_name, position, size, fill_color = [255,255,255], fill_opacity = 1., border_color = [0,0,0],
border_opacity = 1., border_width = 2.):
"""
Add an ellipse onto canvas.
Args:
shape_name: str-the name of the ellipse.
position: list or point.Point()
list-
[position_x, position_y], the coordinate represents the top-left hand corner of
the ellipse.
point.Point()-
a Point object with attributes x and y representing
the x/y position of the top-left hand corner of the ellipse.
size: list or point.Point()
list-
1*2 matrix-size of the ellipse [width, height].
point.Point()-
a Point object with attributes x and y representing the width and height of
the ellipse.
fill_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
fill_opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
border_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
border_opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
border_width: float-node text line width.
"""
if type(position) != list and type(position) != type(point.Point()):
raise Exception("Please enter a valid position type.")
if type(position) == type(point.Point()):
position = [position.x, position.y]
if type(size) != list and type(size) != type(point.Point()):
raise Exception("Please enter a valid size type.")
if type(size) == type(point.Point()):
size = [size.x, size.y]
fill_color_rgba = _color_to_rgb(fill_color, fill_opacity)
border_color_rgba = _color_to_rgb(border_color, border_opacity)
df_ShapeData_temp = df[4].copy()
shape_row_dct = {k:[] for k in processSBML.COLUMN_NAME_df_ShapeData}
shape_row_dct[processSBML.SHAPENAME].append(shape_name)
shape_row_dct[processSBML.POSITION].append(position)
shape_row_dct[processSBML.SIZE].append(size)
shape_row_dct[processSBML.FILLCOLOR].append(fill_color_rgba)
shape_row_dct[processSBML.BORDERCOLOR].append(border_color_rgba)
shape_row_dct[processSBML.BORDERWIDTH].append(border_width)
shape_row_dct[processSBML.SHAPETYPE].append('ellipse')
shape_row_dct[processSBML.SHAPEINFO].append([])
if len(df_ShapeData_temp) == 0:
df_ShapeData_temp = pd.DataFrame(shape_row_dct)
else:
df_ShapeData_temp = pd.concat([df_ShapeData_temp,\
pd.DataFrame(shape_row_dct)], ignore_index=True)
df_temp = (df[0], df[1], df[2], df[3], df_ShapeData_temp)
return df_temp
def _addPolygon(df, shape_name, shape_info, position, size, fill_color=[255,255,255], fill_opacity = 1.,
border_color = [0,0,0], border_opacity = 1., border_width = 2.):
"""
Add an polygon onto canvas.
Args:
shape_name: str-the name of the polygon.
shape_info: list-[[x1,y1],[x2,y2],[x3,y3],etc], where x,y are floating numbers from 0 to 100.
x represents the percentage of width, and y represents the percentage of height.
position: list or point.Point()
list-
[position_x, position_y], the coordinate represents the top-left hand corner of
the Polygon.
point.Point()-
a Point object with attributes x and y representing
the x/y position of the top-left hand corner of the polygon.
size: list or point.Point()
list-
1*2 matrix-size of the polygon [width, height].
point.Point()-
a Point object with attributes x and y representing the width and height of
the Polygon.
fill_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
fill_opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
border_color: list-decimal_rgb 1*3 matrix/str-html_name/str-hex_string (6-digit).
border_opacity: float-value is between [0,1], default is fully opaque (opacity = 1.).
border_width: float-node text line width.
"""
if type(position) != list and type(position) != type(point.Point()):
raise Exception("Please enter a valid position type.")
if type(position) == type(point.Point()):
position = [position.x, position.y]
if type(size) != list and type(size) != type(point.Point()):
raise Exception("Please enter a valid size type.")
if type(size) == type(point.Point()):
size = [size.x, size.y]
fill_color_rgba = _color_to_rgb(fill_color, fill_opacity)
border_color_rgba = _color_to_rgb(border_color, border_opacity)
df_ShapeData_temp = df[4].copy()
shape_row_dct = {k:[] for k in processSBML.COLUMN_NAME_df_ShapeData}
shape_row_dct[processSBML.SHAPENAME].append(shape_name)
shape_row_dct[processSBML.POSITION].append(position)
shape_row_dct[processSBML.SIZE].append(size)
shape_row_dct[processSBML.FILLCOLOR].append(fill_color_rgba)
shape_row_dct[processSBML.BORDERCOLOR].append(border_color_rgba)
shape_row_dct[processSBML.BORDERWIDTH].append(border_width)
shape_row_dct[processSBML.SHAPETYPE].append('polygon')
shape_row_dct[processSBML.SHAPEINFO].append(shape_info)
if len(df_ShapeData_temp) == 0:
df_ShapeData_temp = pd.DataFrame(shape_row_dct)
else:
df_ShapeData_temp = pd.concat([df_ShapeData_temp,\
pd.DataFrame(shape_row_dct)], ignore_index=True)
df_temp = (df[0], df[1], df[2], df[3], df_ShapeData_temp)
return df_temp
def _removeShape(df, shape_name_str):
"""
Remove the arbitray shape.
Args:
shape_name_str: str-the shape name.
"""
df_ShapeData_temp = df[4].copy()
idx_list = df[4].index[df[4]["shape_name"] == shape_name_str].tolist()
if len(idx_list) == 0:
raise Exception("This is not a valid id.")
df_ShapeData_temp = df_ShapeData_temp.drop(idx_list)
df_temp = (df[0], df[1], df[2], df[3], df_ShapeData_temp)
return df_temp
| 37.417992
| 2,267
| 0.609019
| 11,803
| 82,357
| 4.06134
| 0.062442
| 0.044247
| 0.049357
| 0.037154
| 0.828959
| 0.818487
| 0.80693
| 0.795498
| 0.781271
| 0.771132
| 0
| 0.044314
| 0.251964
| 82,357
| 2,201
| 2,268
| 37.417992
| 0.733792
| 0.349381
| 0
| 0.764254
| 0
| 0
| 0.171372
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054825
| false
| 0.007675
| 0.004386
| 0
| 0.114035
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3c53991ac44ee6ae56e3cc82b381262ef9e97ab9
| 5,628
|
py
|
Python
|
easelenium/mouse.py
|
kirillstrelkov/easyselenium
|
99af15df42d3b4fe2c83a4a8d8d73b0f468539f7
|
[
"MIT"
] | 1
|
2021-06-13T10:49:01.000Z
|
2021-06-13T10:49:01.000Z
|
easelenium/mouse.py
|
kirillstrelkov/easelenium
|
99af15df42d3b4fe2c83a4a8d8d73b0f468539f7
|
[
"MIT"
] | null | null | null |
easelenium/mouse.py
|
kirillstrelkov/easelenium
|
99af15df42d3b4fe2c83a4a8d8d73b0f468539f7
|
[
"MIT"
] | 1
|
2019-02-24T03:06:56.000Z
|
2019-02-24T03:06:56.000Z
|
class Mouse(object):
def __init__(self, browser):
self.browser = browser
def left_click(
self,
element=None,
by_id=None,
by_xpath=None,
by_link=None,
by_partial_link=None,
by_name=None,
by_tag=None,
by_css=None,
by_class=None,
):
self.left_click_by_offset(
element,
0,
0,
by_id=by_id,
by_xpath=by_xpath,
by_link=by_link,
by_partial_link=by_partial_link,
by_name=by_name,
by_tag=by_tag,
by_css=by_css,
by_class=by_class,
)
def left_click_by_offset(
self,
element=None,
xoffset=0,
yoffset=0,
by_id=None,
by_xpath=None,
by_link=None,
by_partial_link=None,
by_name=None,
by_tag=None,
by_css=None,
by_class=None,
):
actions = self.browser.get_action_chains()
element = self.browser._get_element(
element=element,
by_id=by_id,
by_xpath=by_xpath,
by_link=by_link,
by_partial_link=by_partial_link,
by_name=by_name,
by_tag=by_tag,
by_css=by_css,
by_class=by_class,
)
self.browser.wait_for_visible(
element=element,
)
if type(element) == tuple:
element = self.browser.find_element(
element=element,
)
self.browser._safe_log(
"Click at '%s' by offset(%s,%s)", element, xoffset, yoffset
)
actions.move_to_element(element).move_by_offset(
xoffset, yoffset
).click().perform()
def hover(
self,
element=None,
by_id=None,
by_xpath=None,
by_link=None,
by_partial_link=None,
by_name=None,
by_tag=None,
by_css=None,
by_class=None,
):
self.browser._safe_log("Hover at '%s'", element)
self.hover_by_offset(
element,
0,
0,
by_id=by_id,
by_xpath=by_xpath,
by_link=by_link,
by_partial_link=by_partial_link,
by_name=by_name,
by_tag=by_tag,
by_css=by_css,
by_class=by_class,
)
def hover_by_offset(
self,
element=None,
xoffset=0,
yoffset=0,
by_id=None,
by_xpath=None,
by_link=None,
by_partial_link=None,
by_name=None,
by_tag=None,
by_css=None,
by_class=None,
):
actions = self.browser.get_action_chains()
element = self.browser._get_element(
element=element,
by_id=by_id,
by_xpath=by_xpath,
by_link=by_link,
by_partial_link=by_partial_link,
by_name=by_name,
by_tag=by_tag,
by_css=by_css,
by_class=by_class,
)
self.browser.wait_for_visible(
element=element,
)
element = self.browser.find_element(
element=element,
)
self.browser._safe_log(
"Mouse over '%s' by offset(%s,%s)", element, xoffset, yoffset
)
actions.move_to_element(element).move_by_offset(xoffset, yoffset).perform()
def right_click(
self,
element=None,
by_id=None,
by_xpath=None,
by_link=None,
by_partial_link=None,
by_name=None,
by_tag=None,
by_css=None,
by_class=None,
):
actions = self.browser.get_action_chains()
element = self.browser._get_element(
element=element,
by_id=by_id,
by_xpath=by_xpath,
by_link=by_link,
by_partial_link=by_partial_link,
by_name=by_name,
by_tag=by_tag,
by_css=by_css,
by_class=by_class,
)
self.browser.wait_for_visible(
element=element,
)
if type(element) == tuple:
element = self.browser.find_element(
element=element,
)
self.browser._safe_log(
"Right click at '%s'",
self.browser._to_string(
element=element,
),
)
actions.context_click(element).perform()
def right_click_by_offset(
self,
element=None,
xoffset=0,
yoffset=0,
by_id=None,
by_xpath=None,
by_link=None,
by_partial_link=None,
by_name=None,
by_tag=None,
by_css=None,
by_class=None,
):
actions = self.browser.get_action_chains()
element = self.browser._get_element(
element=element,
by_id=by_id,
by_xpath=by_xpath,
by_link=by_link,
by_partial_link=by_partial_link,
by_name=by_name,
by_tag=by_tag,
by_css=by_css,
by_class=by_class,
)
self.browser.wait_for_visible(
element=element,
)
if type(element) == tuple:
element = self.browser.find_element(
element=element,
)
self.browser._safe_log(
"Right click at '%s' by offset(%s,%s)", element, xoffset, yoffset
)
actions.move_to_element(element).move_by_offset(
xoffset, yoffset
).context_click().perform()
| 24.902655
| 83
| 0.515991
| 657
| 5,628
| 4.062405
| 0.070015
| 0.101161
| 0.087673
| 0.076433
| 0.905583
| 0.905583
| 0.905583
| 0.905583
| 0.905583
| 0.905583
| 0
| 0.002941
| 0.395878
| 5,628
| 225
| 84
| 25.013333
| 0.782059
| 0
| 0
| 0.805825
| 0
| 0
| 0.023099
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033981
| false
| 0
| 0
| 0
| 0.038835
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c9ad72ce752f6b5bae237d5f88c958104f5d8ad
| 201
|
py
|
Python
|
python/src/converters/__init__.py
|
vvucetic/keyvi
|
e6f02373350fea000aa3d20be9cc1d0ec09441f7
|
[
"Apache-2.0"
] | 199
|
2017-12-29T13:36:53.000Z
|
2022-03-31T19:38:01.000Z
|
python/src/converters/__init__.py
|
vvucetic/keyvi
|
e6f02373350fea000aa3d20be9cc1d0ec09441f7
|
[
"Apache-2.0"
] | 188
|
2017-11-03T18:22:46.000Z
|
2022-03-03T17:43:55.000Z
|
python/src/converters/__init__.py
|
vvucetic/keyvi
|
e6f02373350fea000aa3d20be9cc1d0ec09441f7
|
[
"Apache-2.0"
] | 35
|
2017-11-02T19:18:34.000Z
|
2021-10-05T09:37:22.000Z
|
from .pykeyvi_autowrap_conversion_providers import *
from autowrap.ConversionProvider import special_converters
def register_converters():
special_converters.append(MatchIteratorPairConverter())
| 28.714286
| 59
| 0.860697
| 19
| 201
| 8.789474
| 0.684211
| 0.203593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084577
| 201
| 6
| 60
| 33.5
| 0.907609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b1caffcf4ef72abfec8b504afb8c6bf3d8e5acf8
| 370
|
py
|
Python
|
PythonExercicios/ex108 - Formatando moedas em Python/programa principal.py
|
laaisfmaia/Exercicios-do-Curso-de-Python
|
bdaaf682db9f058d51d4dd084dab0e095f7e74c3
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex108 - Formatando moedas em Python/programa principal.py
|
laaisfmaia/Exercicios-do-Curso-de-Python
|
bdaaf682db9f058d51d4dd084dab0e095f7e74c3
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex108 - Formatando moedas em Python/programa principal.py
|
laaisfmaia/Exercicios-do-Curso-de-Python
|
bdaaf682db9f058d51d4dd084dab0e095f7e74c3
|
[
"MIT"
] | null | null | null |
import moeda
n = float(input('Digite um preço: R$'))
print(f'A metade de {moeda.moeda(n)} é {moeda.moeda(moeda.metade(n))}')
print(f'O dobro de {moeda.moeda(n)} é {moeda.moeda(moeda.dobro(n))}')
print(f'Aumentando 10% de {moeda.moeda(n)} temos {moeda.moeda(moeda.aumentar(n,10))}')
print(f'Diminuindo 20% de {moeda.moeda(n)} temos {moeda.moeda(moeda.diminuir(n,20))}')
| 46.25
| 86
| 0.691892
| 67
| 370
| 3.820896
| 0.358209
| 0.46875
| 0.1875
| 0.203125
| 0.484375
| 0.484375
| 0.484375
| 0.484375
| 0
| 0
| 0
| 0.023739
| 0.089189
| 370
| 7
| 87
| 52.857143
| 0.735905
| 0
| 0
| 0
| 0
| 0.666667
| 0.786486
| 0.348649
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
b1d3ad08f7e74a8dd7683b822ec07fdd04f68f8c
| 504
|
py
|
Python
|
HS08TEST.py
|
ankitpipalia/codechef-solutions
|
d10e7f15b74a11655b0e53953a8e2bc7efbf7377
|
[
"MIT"
] | 1
|
2022-01-23T08:13:17.000Z
|
2022-01-23T08:13:17.000Z
|
HS08TEST.py
|
ankitpipalia/codechef-solutions
|
d10e7f15b74a11655b0e53953a8e2bc7efbf7377
|
[
"MIT"
] | null | null | null |
HS08TEST.py
|
ankitpipalia/codechef-solutions
|
d10e7f15b74a11655b0e53953a8e2bc7efbf7377
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD
amount, balance = input().split(" ")
amount = float(amount)
balance = float(balance)
charges = float(amount + 0.5)
if amount % 5 == 0 and charges <= balance:
print("%.2f" %(balance - charges))
else:
=======
amount, balance = input().split(" ")
amount = float(amount)
balance = float(balance)
charges = float(amount + 0.5)
if amount % 5 == 0 and charges <= balance:
print("%.2f" %(balance - charges))
else:
>>>>>>> 83b4ac685732d66a95a50f86bc7f8eee45be0e28
print("%.2f" %balance)
| 25.2
| 48
| 0.634921
| 59
| 504
| 5.423729
| 0.254237
| 0.1625
| 0.13125
| 0.14375
| 0.81875
| 0.81875
| 0.81875
| 0.81875
| 0.81875
| 0.81875
| 0
| 0.082938
| 0.162698
| 504
| 20
| 49
| 25.2
| 0.675355
| 0
| 0
| 0.777778
| 0
| 0
| 0.027723
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b1d80ae236da43a2332116c3e32bfb46607392d5
| 3,374
|
py
|
Python
|
actions/postpone.py
|
dmakeienko/remind_me_bot
|
54c49abe4dc7674c02cf01390fb6f1551e951b07
|
[
"Apache-2.0"
] | 13
|
2020-01-19T18:27:29.000Z
|
2022-02-07T21:15:24.000Z
|
actions/postpone.py
|
dmakeienko/remind_me_bot
|
54c49abe4dc7674c02cf01390fb6f1551e951b07
|
[
"Apache-2.0"
] | 14
|
2020-06-16T07:18:11.000Z
|
2020-07-10T09:04:24.000Z
|
actions/postpone.py
|
dmakeienko/remind_me_bot
|
54c49abe4dc7674c02cf01390fb6f1551e951b07
|
[
"Apache-2.0"
] | 6
|
2019-12-18T05:23:36.000Z
|
2021-09-20T20:56:08.000Z
|
from db.database import _update_time, _get_last_remind
from datetime import datetime, timedelta
from utils.constants import DATETIME_FORMAT, HOUR
def postpone(bot, update, args):
user_chat_id = update.message.chat_id
user_message = ' '.join(args).split(" ")
postpone_timedelta = user_message[0]
postpone_time = 0
try:
if len(user_message) > 1:
postpone_format = user_message[1]
else:
postpone_format = 'm'
if postpone_format.lower() == 'h':
postpone_time = int(postpone_timedelta) * HOUR
elif postpone_format.lower() == 'm':
postpone_time = int(postpone_timedelta)
remind_id = _get_last_remind(user_chat_id)['id']
old_time = _get_last_remind(user_chat_id)['remind_time']
new_time = (datetime.strptime(old_time, DATETIME_FORMAT) + timedelta(minutes=postpone_time)).strftime(DATETIME_FORMAT)
_update_time(user_chat_id, remind_id, new_time)
bot.send_message(chat_id=update.message.chat_id, text=f"📌 Remind postponed for {str(timedelta(minutes=postpone_time))}")
except ValueError:
bot.send_message(chat_id=update.message.chat_id, text=f"Oops 😯, you forgot to specify postpone time!")
except TypeError:
bot.send_message(chat_id=update.message.chat_id, text=f"Sorry, there is no active remind to postpone😔")
def postpone_30(bot, user_chat_id):
postpone_timedelta = '30'
postpone_time = 0
try:
postpone_format = 'm'
if postpone_format.lower() == 'h':
postpone_time = int(postpone_timedelta) * HOUR
elif postpone_format.lower() == 'm':
postpone_time = int(postpone_timedelta)
remind_id = _get_last_remind(user_chat_id)['id']
old_time = _get_last_remind(user_chat_id)['remind_time']
new_time = (datetime.strptime(old_time, DATETIME_FORMAT) + timedelta(minutes=postpone_time)).strftime(DATETIME_FORMAT)
_update_time(user_chat_id, remind_id, new_time)
bot.send_message(chat_id=user_chat_id, text=f"📌 Remind postponed for {str(timedelta(minutes=postpone_time))}")
except ValueError:
bot.send_message(chat_id=user_chat_id, text=f"Oops 😯, you forgot to specify postpone time!")
except TypeError:
bot.send_message(chat_id=user_chat_id, text=f"Sorry, there is no active remind to postpone😔")
def postpone_1h(bot, user_chat_id):
postpone_timedelta = '1'
postpone_time = 0
try:
postpone_format = 'h'
if postpone_format.lower() == 'h':
postpone_time = int(postpone_timedelta) * HOUR
elif postpone_format.lower() == 'm':
postpone_time = int(postpone_timedelta)
remind_id = _get_last_remind(user_chat_id)['id']
old_time = _get_last_remind(user_chat_id)['remind_time']
new_time = (datetime.strptime(old_time, DATETIME_FORMAT) + timedelta(minutes=postpone_time)).strftime(DATETIME_FORMAT)
_update_time(user_chat_id, remind_id, new_time)
bot.send_message(chat_id=user_chat_id, text=f"📌 Remind postponed for {str(timedelta(minutes=postpone_time))}")
except ValueError:
bot.send_message(chat_id=user_chat_id, text=f"Oops 😯, you forgot to specify postpone time!")
except TypeError:
bot.send_message(chat_id=user_chat_id, text=f"Sorry, there is no active remind to postpone😔")
| 46.861111
| 128
| 0.696503
| 470
| 3,374
| 4.689362
| 0.138298
| 0.084392
| 0.08167
| 0.073503
| 0.876588
| 0.876588
| 0.817151
| 0.817151
| 0.817151
| 0.817151
| 0
| 0.004441
| 0.19917
| 3,374
| 71
| 129
| 47.521127
| 0.80792
| 0
| 0
| 0.709677
| 0
| 0
| 0.14997
| 0.034677
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048387
| false
| 0
| 0.048387
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b1ead9b6e35c20b386d4a1f486314f8b3f445b53
| 45
|
py
|
Python
|
Ultra Easy/second_element.py
|
dabockster/PythonExcercises
|
3fdf6f406e588d0e854abe57a684d2f639071586
|
[
"MIT"
] | 1
|
2018-10-11T01:49:26.000Z
|
2018-10-11T01:49:26.000Z
|
Ultra Easy/second_element.py
|
dabockster/PythonExcercises
|
3fdf6f406e588d0e854abe57a684d2f639071586
|
[
"MIT"
] | null | null | null |
Ultra Easy/second_element.py
|
dabockster/PythonExcercises
|
3fdf6f406e588d0e854abe57a684d2f639071586
|
[
"MIT"
] | null | null | null |
arr = [0,1,2,3,4,5,6,7,8,9,10]
print(arr[1])
| 15
| 30
| 0.533333
| 15
| 45
| 1.6
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.317073
| 0.088889
| 45
| 3
| 31
| 15
| 0.268293
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
591a874f66c91bfa443b56554370f73c4a680124
| 178
|
py
|
Python
|
nbmetalog/get_dataframe_short_digest.py
|
mmore500/nbmetalog
|
670f8ad76a587d8848c81e4f790c31c96402f8b0
|
[
"MIT"
] | null | null | null |
nbmetalog/get_dataframe_short_digest.py
|
mmore500/nbmetalog
|
670f8ad76a587d8848c81e4f790c31c96402f8b0
|
[
"MIT"
] | 1
|
2021-09-02T16:08:58.000Z
|
2021-09-02T16:08:58.000Z
|
nbmetalog/get_dataframe_short_digest.py
|
mmore500/nbmetalog
|
670f8ad76a587d8848c81e4f790c31c96402f8b0
|
[
"MIT"
] | null | null | null |
from . import _except_return_none
from . import get_dataframe_full_digest
@_except_return_none
def get_dataframe_short_digest(df):
return get_dataframe_full_digest(df)[:16]
| 25.428571
| 45
| 0.837079
| 27
| 178
| 4.962963
| 0.481481
| 0.268657
| 0.238806
| 0.328358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0125
| 0.101124
| 178
| 6
| 46
| 29.666667
| 0.825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
a709e401158df0557ac6e8a69ec4c4d484bdee19
| 32,901
|
py
|
Python
|
great_international/migrations/0102_add_new_investment_sector_model.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2018-03-20T11:19:07.000Z
|
2021-10-05T07:53:11.000Z
|
great_international/migrations/0102_add_new_investment_sector_model.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 802
|
2018-02-05T14:16:13.000Z
|
2022-02-10T10:59:21.000Z
|
great_international/migrations/0102_add_new_investment_sector_model.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2019-01-22T13:19:37.000Z
|
2019-07-01T10:35:26.000Z
|
# Generated by Django 2.2.24 on 2021-09-07 10:07
import core.model_fields
from django.db import migrations, models
import django.db.models.deletion
import great_international.blocks.great_international
import great_international.panels.great_international
import modelcluster.fields
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0022_uploadedimage'),
('wagtailcore', '0059_apply_collection_ordering'),
('export_readiness', '0075_auto_20200518_1253'),
('great_international', '0101_remove_limit_on_atlas_landing_page_block_main_content'),
]
operations = [
migrations.CreateModel(
name='InternationalInvestmentSectorPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('service_name', models.CharField(choices=[('FIND_A_SUPPLIER', 'Find a Supplier'), ('EXPORT_READINESS', 'Export Readiness'), ('INVEST', 'Invest'), ('COMPONENTS', 'Components'), ('GREAT_INTERNATIONAL', 'Great International')], db_index=True, max_length=100, null=True)),
('uses_tree_based_routing', models.BooleanField(default=False, help_text="Allow this page's URL to be determined by its slug, and the slugs of its ancestors in the page tree.", verbose_name='tree-based routing enabled')),
('heading', models.CharField(max_length=255, verbose_name='Sector name')),
('heading_en_gb', models.CharField(max_length=255, null=True, verbose_name='Sector name')),
('heading_de', models.CharField(max_length=255, null=True, verbose_name='Sector name')),
('heading_ja', models.CharField(max_length=255, null=True, verbose_name='Sector name')),
('heading_zh_hans', models.CharField(max_length=255, null=True, verbose_name='Sector name')),
('heading_fr', models.CharField(max_length=255, null=True, verbose_name='Sector name')),
('heading_es', models.CharField(max_length=255, null=True, verbose_name='Sector name')),
('heading_pt', models.CharField(max_length=255, null=True, verbose_name='Sector name')),
('heading_ar', models.CharField(max_length=255, null=True, verbose_name='Sector name')),
('standfirst', models.TextField(blank=True, help_text='Displayed below the sector name')),
('standfirst_en_gb', models.TextField(blank=True, help_text='Displayed below the sector name', null=True)),
('standfirst_de', models.TextField(blank=True, help_text='Displayed below the sector name', null=True)),
('standfirst_ja', models.TextField(blank=True, help_text='Displayed below the sector name', null=True)),
('standfirst_zh_hans', models.TextField(blank=True, help_text='Displayed below the sector name', null=True)),
('standfirst_fr', models.TextField(blank=True, help_text='Displayed below the sector name', null=True)),
('standfirst_es', models.TextField(blank=True, help_text='Displayed below the sector name', null=True)),
('standfirst_pt', models.TextField(blank=True, help_text='Displayed below the sector name', null=True)),
('standfirst_ar', models.TextField(blank=True, help_text='Displayed below the sector name', null=True)),
('featured_description', models.TextField(blank=True, help_text='This is the description shown when the sector is featured on another page')),
('featured_description_en_gb', models.TextField(blank=True, help_text='This is the description shown when the sector is featured on another page', null=True)),
('featured_description_de', models.TextField(blank=True, help_text='This is the description shown when the sector is featured on another page', null=True)),
('featured_description_ja', models.TextField(blank=True, help_text='This is the description shown when the sector is featured on another page', null=True)),
('featured_description_zh_hans', models.TextField(blank=True, help_text='This is the description shown when the sector is featured on another page', null=True)),
('featured_description_fr', models.TextField(blank=True, help_text='This is the description shown when the sector is featured on another page', null=True)),
('featured_description_es', models.TextField(blank=True, help_text='This is the description shown when the sector is featured on another page', null=True)),
('featured_description_pt', models.TextField(blank=True, help_text='This is the description shown when the sector is featured on another page', null=True)),
('featured_description_ar', models.TextField(blank=True, help_text='This is the description shown when the sector is featured on another page', null=True)),
('intro_text', core.model_fields.MarkdownField(blank=True, null=True)),
('intro_text_en_gb', core.model_fields.MarkdownField(blank=True, null=True)),
('intro_text_de', core.model_fields.MarkdownField(blank=True, null=True)),
('intro_text_ja', core.model_fields.MarkdownField(blank=True, null=True)),
('intro_text_zh_hans', core.model_fields.MarkdownField(blank=True, null=True)),
('intro_text_fr', core.model_fields.MarkdownField(blank=True, null=True)),
('intro_text_es', core.model_fields.MarkdownField(blank=True, null=True)),
('intro_text_pt', core.model_fields.MarkdownField(blank=True, null=True)),
('intro_text_ar', core.model_fields.MarkdownField(blank=True, null=True)),
('contact_name', models.CharField(blank=True, max_length=255)),
('contact_name_en_gb', models.CharField(blank=True, max_length=255, null=True)),
('contact_name_de', models.CharField(blank=True, max_length=255, null=True)),
('contact_name_ja', models.CharField(blank=True, max_length=255, null=True)),
('contact_name_zh_hans', models.CharField(blank=True, max_length=255, null=True)),
('contact_name_fr', models.CharField(blank=True, max_length=255, null=True)),
('contact_name_es', models.CharField(blank=True, max_length=255, null=True)),
('contact_name_pt', models.CharField(blank=True, max_length=255, null=True)),
('contact_name_ar', models.CharField(blank=True, max_length=255, null=True)),
('contact_job_title', models.CharField(blank=True, max_length=255)),
('contact_job_title_en_gb', models.CharField(blank=True, max_length=255, null=True)),
('contact_job_title_de', models.CharField(blank=True, max_length=255, null=True)),
('contact_job_title_ja', models.CharField(blank=True, max_length=255, null=True)),
('contact_job_title_zh_hans', models.CharField(blank=True, max_length=255, null=True)),
('contact_job_title_fr', models.CharField(blank=True, max_length=255, null=True)),
('contact_job_title_es', models.CharField(blank=True, max_length=255, null=True)),
('contact_job_title_pt', models.CharField(blank=True, max_length=255, null=True)),
('contact_job_title_ar', models.CharField(blank=True, max_length=255, null=True)),
('contact_link', models.URLField(blank=True, max_length=1500, null=True)),
('contact_link_en_gb', models.URLField(blank=True, max_length=1500, null=True)),
('contact_link_de', models.URLField(blank=True, max_length=1500, null=True)),
('contact_link_ja', models.URLField(blank=True, max_length=1500, null=True)),
('contact_link_zh_hans', models.URLField(blank=True, max_length=1500, null=True)),
('contact_link_fr', models.URLField(blank=True, max_length=1500, null=True)),
('contact_link_es', models.URLField(blank=True, max_length=1500, null=True)),
('contact_link_pt', models.URLField(blank=True, max_length=1500, null=True)),
('contact_link_ar', models.URLField(blank=True, max_length=1500, null=True)),
('contact_link_button_preamble', models.CharField(blank=True, help_text='eg: "Contact the sector lead"', max_length=255)),
('contact_link_button_preamble_en_gb', models.CharField(blank=True, help_text='eg: "Contact the sector lead"', max_length=255, null=True)),
('contact_link_button_preamble_de', models.CharField(blank=True, help_text='eg: "Contact the sector lead"', max_length=255, null=True)),
('contact_link_button_preamble_ja', models.CharField(blank=True, help_text='eg: "Contact the sector lead"', max_length=255, null=True)),
('contact_link_button_preamble_zh_hans', models.CharField(blank=True, help_text='eg: "Contact the sector lead"', max_length=255, null=True)),
('contact_link_button_preamble_fr', models.CharField(blank=True, help_text='eg: "Contact the sector lead"', max_length=255, null=True)),
('contact_link_button_preamble_es', models.CharField(blank=True, help_text='eg: "Contact the sector lead"', max_length=255, null=True)),
('contact_link_button_preamble_pt', models.CharField(blank=True, help_text='eg: "Contact the sector lead"', max_length=255, null=True)),
('contact_link_button_preamble_ar', models.CharField(blank=True, help_text='eg: "Contact the sector lead"', max_length=255, null=True)),
('contact_link_button_label', models.CharField(blank=True, max_length=255)),
('contact_link_button_label_en_gb', models.CharField(blank=True, max_length=255, null=True)),
('contact_link_button_label_de', models.CharField(blank=True, max_length=255, null=True)),
('contact_link_button_label_ja', models.CharField(blank=True, max_length=255, null=True)),
('contact_link_button_label_zh_hans', models.CharField(blank=True, max_length=255, null=True)),
('contact_link_button_label_fr', models.CharField(blank=True, max_length=255, null=True)),
('contact_link_button_label_es', models.CharField(blank=True, max_length=255, null=True)),
('contact_link_button_label_pt', models.CharField(blank=True, max_length=255, null=True)),
('contact_link_button_label_ar', models.CharField(blank=True, max_length=255, null=True)),
('related_opportunities_header', models.CharField(blank=True, help_text='You may want to phrase this to suit the kind of opportunities being featured, if not automatic', max_length=255)),
('related_opportunities_header_en_gb', models.CharField(blank=True, help_text='You may want to phrase this to suit the kind of opportunities being featured, if not automatic', max_length=255, null=True)),
('related_opportunities_header_de', models.CharField(blank=True, help_text='You may want to phrase this to suit the kind of opportunities being featured, if not automatic', max_length=255, null=True)),
('related_opportunities_header_ja', models.CharField(blank=True, help_text='You may want to phrase this to suit the kind of opportunities being featured, if not automatic', max_length=255, null=True)),
('related_opportunities_header_zh_hans', models.CharField(blank=True, help_text='You may want to phrase this to suit the kind of opportunities being featured, if not automatic', max_length=255, null=True)),
('related_opportunities_header_fr', models.CharField(blank=True, help_text='You may want to phrase this to suit the kind of opportunities being featured, if not automatic', max_length=255, null=True)),
('related_opportunities_header_es', models.CharField(blank=True, help_text='You may want to phrase this to suit the kind of opportunities being featured, if not automatic', max_length=255, null=True)),
('related_opportunities_header_pt', models.CharField(blank=True, help_text='You may want to phrase this to suit the kind of opportunities being featured, if not automatic', max_length=255, null=True)),
('related_opportunities_header_ar', models.CharField(blank=True, help_text='You may want to phrase this to suit the kind of opportunities being featured, if not automatic', max_length=255, null=True)),
('downpage_content', wagtail.core.fields.StreamField([('content_section', wagtail.core.blocks.StructBlock([('content', wagtail.core.blocks.StreamBlock([('header', wagtail.core.blocks.CharBlock(max_length=255, required=False)), ('nested_content', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock(help_text='Use H3 headers or lower, not H2 or H1')), ('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))])), ('columns', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock())], help_text='Smaller snippets of content'))], help_text='Smaller snippets of content', max_num=2, min_num=1))], required=False)), ('block_slug', wagtail.core.blocks.CharBlock(help_text="Only needed if special styling is involved: check with a developer. If in doubt, it's not needed", max_length=255, required=False))]))], blank=True, null=True)),
('downpage_content_en_gb', wagtail.core.fields.StreamField([('content_section', wagtail.core.blocks.StructBlock([('content', wagtail.core.blocks.StreamBlock([('header', wagtail.core.blocks.CharBlock(max_length=255, required=False)), ('nested_content', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock(help_text='Use H3 headers or lower, not H2 or H1')), ('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))])), ('columns', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock())], help_text='Smaller snippets of content'))], help_text='Smaller snippets of content', max_num=2, min_num=1))], required=False)), ('block_slug', wagtail.core.blocks.CharBlock(help_text="Only needed if special styling is involved: check with a developer. If in doubt, it's not needed", max_length=255, required=False))]))], blank=True, null=True)),
('downpage_content_de', wagtail.core.fields.StreamField([('content_section', wagtail.core.blocks.StructBlock([('content', wagtail.core.blocks.StreamBlock([('header', wagtail.core.blocks.CharBlock(max_length=255, required=False)), ('nested_content', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock(help_text='Use H3 headers or lower, not H2 or H1')), ('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))])), ('columns', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock())], help_text='Smaller snippets of content'))], help_text='Smaller snippets of content', max_num=2, min_num=1))], required=False)), ('block_slug', wagtail.core.blocks.CharBlock(help_text="Only needed if special styling is involved: check with a developer. If in doubt, it's not needed", max_length=255, required=False))]))], blank=True, null=True)),
('downpage_content_ja', wagtail.core.fields.StreamField([('content_section', wagtail.core.blocks.StructBlock([('content', wagtail.core.blocks.StreamBlock([('header', wagtail.core.blocks.CharBlock(max_length=255, required=False)), ('nested_content', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock(help_text='Use H3 headers or lower, not H2 or H1')), ('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))])), ('columns', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock())], help_text='Smaller snippets of content'))], help_text='Smaller snippets of content', max_num=2, min_num=1))], required=False)), ('block_slug', wagtail.core.blocks.CharBlock(help_text="Only needed if special styling is involved: check with a developer. If in doubt, it's not needed", max_length=255, required=False))]))], blank=True, null=True)),
('downpage_content_zh_hans', wagtail.core.fields.StreamField([('content_section', wagtail.core.blocks.StructBlock([('content', wagtail.core.blocks.StreamBlock([('header', wagtail.core.blocks.CharBlock(max_length=255, required=False)), ('nested_content', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock(help_text='Use H3 headers or lower, not H2 or H1')), ('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))])), ('columns', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock())], help_text='Smaller snippets of content'))], help_text='Smaller snippets of content', max_num=2, min_num=1))], required=False)), ('block_slug', wagtail.core.blocks.CharBlock(help_text="Only needed if special styling is involved: check with a developer. If in doubt, it's not needed", max_length=255, required=False))]))], blank=True, null=True)),
('downpage_content_fr', wagtail.core.fields.StreamField([('content_section', wagtail.core.blocks.StructBlock([('content', wagtail.core.blocks.StreamBlock([('header', wagtail.core.blocks.CharBlock(max_length=255, required=False)), ('nested_content', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock(help_text='Use H3 headers or lower, not H2 or H1')), ('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))])), ('columns', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock())], help_text='Smaller snippets of content'))], help_text='Smaller snippets of content', max_num=2, min_num=1))], required=False)), ('block_slug', wagtail.core.blocks.CharBlock(help_text="Only needed if special styling is involved: check with a developer. If in doubt, it's not needed", max_length=255, required=False))]))], blank=True, null=True)),
('downpage_content_es', wagtail.core.fields.StreamField([('content_section', wagtail.core.blocks.StructBlock([('content', wagtail.core.blocks.StreamBlock([('header', wagtail.core.blocks.CharBlock(max_length=255, required=False)), ('nested_content', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock(help_text='Use H3 headers or lower, not H2 or H1')), ('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))])), ('columns', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock())], help_text='Smaller snippets of content'))], help_text='Smaller snippets of content', max_num=2, min_num=1))], required=False)), ('block_slug', wagtail.core.blocks.CharBlock(help_text="Only needed if special styling is involved: check with a developer. If in doubt, it's not needed", max_length=255, required=False))]))], blank=True, null=True)),
('downpage_content_pt', wagtail.core.fields.StreamField([('content_section', wagtail.core.blocks.StructBlock([('content', wagtail.core.blocks.StreamBlock([('header', wagtail.core.blocks.CharBlock(max_length=255, required=False)), ('nested_content', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock(help_text='Use H3 headers or lower, not H2 or H1')), ('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))])), ('columns', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock())], help_text='Smaller snippets of content'))], help_text='Smaller snippets of content', max_num=2, min_num=1))], required=False)), ('block_slug', wagtail.core.blocks.CharBlock(help_text="Only needed if special styling is involved: check with a developer. If in doubt, it's not needed", max_length=255, required=False))]))], blank=True, null=True)),
('downpage_content_ar', wagtail.core.fields.StreamField([('content_section', wagtail.core.blocks.StructBlock([('content', wagtail.core.blocks.StreamBlock([('header', wagtail.core.blocks.CharBlock(max_length=255, required=False)), ('nested_content', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock(help_text='Use H3 headers or lower, not H2 or H1')), ('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))])), ('columns', wagtail.core.blocks.StreamBlock([('text', great_international.blocks.great_international.MarkdownBlock())], help_text='Smaller snippets of content'))], help_text='Smaller snippets of content', max_num=2, min_num=1))], required=False)), ('block_slug', wagtail.core.blocks.CharBlock(help_text="Only needed if special styling is involved: check with a developer. If in doubt, it's not needed", max_length=255, required=False))]))], blank=True, null=True)),
('early_opportunities', wagtail.core.fields.StreamField([('early_opportunity', wagtail.core.blocks.StructBlock([('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))], blank=False)), ('text', great_international.blocks.great_international.MarkdownBlock(blank=False))]))], blank=True, null=True)),
('early_opportunities_en_gb', wagtail.core.fields.StreamField([('early_opportunity', wagtail.core.blocks.StructBlock([('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))], blank=False)), ('text', great_international.blocks.great_international.MarkdownBlock(blank=False))]))], blank=True, null=True)),
('early_opportunities_de', wagtail.core.fields.StreamField([('early_opportunity', wagtail.core.blocks.StructBlock([('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))], blank=False)), ('text', great_international.blocks.great_international.MarkdownBlock(blank=False))]))], blank=True, null=True)),
('early_opportunities_ja', wagtail.core.fields.StreamField([('early_opportunity', wagtail.core.blocks.StructBlock([('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))], blank=False)), ('text', great_international.blocks.great_international.MarkdownBlock(blank=False))]))], blank=True, null=True)),
('early_opportunities_zh_hans', wagtail.core.fields.StreamField([('early_opportunity', wagtail.core.blocks.StructBlock([('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))], blank=False)), ('text', great_international.blocks.great_international.MarkdownBlock(blank=False))]))], blank=True, null=True)),
('early_opportunities_fr', wagtail.core.fields.StreamField([('early_opportunity', wagtail.core.blocks.StructBlock([('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))], blank=False)), ('text', great_international.blocks.great_international.MarkdownBlock(blank=False))]))], blank=True, null=True)),
('early_opportunities_es', wagtail.core.fields.StreamField([('early_opportunity', wagtail.core.blocks.StructBlock([('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))], blank=False)), ('text', great_international.blocks.great_international.MarkdownBlock(blank=False))]))], blank=True, null=True)),
('early_opportunities_pt', wagtail.core.fields.StreamField([('early_opportunity', wagtail.core.blocks.StructBlock([('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))], blank=False)), ('text', great_international.blocks.great_international.MarkdownBlock(blank=False))]))], blank=True, null=True)),
('early_opportunities_ar', wagtail.core.fields.StreamField([('early_opportunity', wagtail.core.blocks.StructBlock([('image', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=True)), ('image_alt', wagtail.core.blocks.CharBlock(max_length=255, required=True)), ('caption', wagtail.core.blocks.CharBlock(max_length=255, required=False))], blank=False)), ('text', great_international.blocks.great_international.MarkdownBlock(blank=False))]))], blank=True, null=True)),
('contact_avatar', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('contact_avatar_ar', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('contact_avatar_de', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('contact_avatar_en_gb', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('contact_avatar_es', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('contact_avatar_fr', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('contact_avatar_ja', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('contact_avatar_pt', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('contact_avatar_zh_hans', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('hero_image', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('hero_image_ar', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('hero_image_de', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('hero_image_en_gb', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('hero_image_es', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('hero_image_fr', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('hero_image_ja', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('hero_image_pt', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('hero_image_zh_hans', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('intro_image', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('intro_image_ar', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('intro_image_de', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('intro_image_en_gb', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('intro_image_es', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('intro_image_fr', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('intro_image_ja', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('intro_image_pt', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('intro_image_zh_hans', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('manually_selected_related_opportunities', modelcluster.fields.ParentalManyToManyField(blank=True, help_text='Max 3 will be shown. If none is selected, three will be automatically chosen based on priority and/or most recently created', to='great_international.InvestmentOpportunityPage')),
('tags', modelcluster.fields.ParentalManyToManyField(blank=True, to='export_readiness.Tag')),
],
options={
'abstract': False,
},
bases=(great_international.panels.great_international.InternationalInvestmentSectorPagePanels, 'wagtailcore.page'),
),
]
| 189.086207
| 1,157
| 0.726331
| 4,220
| 32,901
| 5.477251
| 0.051185
| 0.065199
| 0.086787
| 0.060742
| 0.934888
| 0.921087
| 0.918448
| 0.914121
| 0.903738
| 0.903522
| 0
| 0.015713
| 0.123765
| 32,901
| 173
| 1,158
| 190.179191
| 0.786049
| 0.001398
| 0
| 0
| 1
| 0.065868
| 0.275439
| 0.047728
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.053892
| 0
| 0.071856
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
596e3dc0802785542397f0d9d5b0b5879035c950
| 20,538
|
py
|
Python
|
exapi/request_creators/binance/spot/trading/creator.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
exapi/request_creators/binance/spot/trading/creator.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
exapi/request_creators/binance/spot/trading/creator.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
"""Has binance trading request creator interface."""
from typing import Optional
from yarl import URL
from exapi.auth.binance import IBinanceAuth
from exapi.request_creators.binance.spot.base import BinanceBaseSpotRequestCreator
from exapi.request_creators.request import Request
from exapi.requesters.typedefs import Params
from exapi.typedefs.binance import (OrderResponseType, OrderSide,
OrderType, TimeInForce)
from exapi.utils.time import get_timestamp
class BinanceSpotTradingRequestCreator(BinanceBaseSpotRequestCreator):
"""Has methods for creating requests to binance spot trading api."""
BASE_URL: str = BinanceBaseSpotRequestCreator.BASE_URL + "/api/v3"
def __init__(self, auth: IBinanceAuth) -> None:
self._auth = auth
def create_order_request(self, test_order: bool,
symbol: str,
side: OrderSide,
type: OrderType,
time_in_force: Optional[TimeInForce] = None,
quantity: Optional[str] = None,
quote_order_qty: Optional[str] = None,
price: Optional[str] = None,
new_client_order_id: Optional[str] = None,
stop_price: Optional[str] = None,
iceberg_qty: Optional[str] = None,
new_order_resp_type: Optional[OrderResponseType] = None,
recv_window: Optional[int] = None,
timestamp: Optional[int] = None
) -> Request:
"""Creates request for order or test order endpoint.
Args:
test_order (bool): if True request for test order will be created.
symbol (str)
side (OrderSide)
type (OrderType)
time_in_force (Optional[TimeInForce], optional)
quantity (Optional[str], optional)
quote_order_qty (Optional[str], optional)
price (Optional[str], optional)
new_client_order_id (Optional[str], optional): A unique id among open orders.
Automatically generated if not sent.
stop_price (Optional[str], optional): Used with STOP_LOSS, STOP_LOSS_LIMIT,
TAKE_PROFIT, and TAKE_PROFIT_LIMIT orders.
iceberg_qty (Optional[str], optional): Used with LIMIT, STOP_LOSS_LIMIT,
and TAKE_PROFIT_LIMIT to create an iceberg order.
new_order_resp_type ([type], optional): Set the response JSON. ACK, RESULT, or FULL;
MARKET and LIMIT order types default to FULL, all other orders default to ACK.
recv_window (Optional[int], optional): The value cannot be greater than 60000.
timestamp (Optional[int]): if None current timestamp in milliseconds will be used.
Returns:
Request
"""
method = "POST"
path = "/order/test" if test_order else "/order"
url = URL(self._create_url(path))
timestamp = timestamp if timestamp is not None else get_timestamp()
params: Params = {
"symbol": symbol,
"side": side,
"type": type,
"timestamp": str(timestamp)
}
if time_in_force is not None:
params["timeInForce"] = time_in_force
if quantity is not None:
params["quantity"] = quantity
if quote_order_qty is not None:
params["quoteOrderQty"] = quote_order_qty
if price is not None:
params["price"] = price
if new_client_order_id is not None:
params["newClientOrderId"] = new_client_order_id
if stop_price is not None:
params["stopPrice"] = stop_price
if iceberg_qty is not None:
params["icebergQty"] = iceberg_qty
if new_order_resp_type is not None:
params["newOrderRespType"] = new_order_resp_type
if recv_window is not None:
params["recvWindow"] = str(recv_window)
auth_res = self._auth.sign(params)
url = url.with_query(auth_res.params)
req = Request(method=method, url=url, headers=auth_res.headers)
return req
def create_new_test_order_request(self, symbol: str,
side: OrderSide,
type: OrderType,
time_in_force: Optional[TimeInForce] = None,
quantity: Optional[str] = None,
quote_order_qty: Optional[str] = None,
price: Optional[str] = None,
new_client_order_id: Optional[str] = None,
stop_price: Optional[str] = None,
iceberg_qty: Optional[str] = None,
new_order_resp_type: Optional[OrderResponseType] = None,
recv_window: Optional[int] = None,
timestamp: Optional[int] = None
) -> Request:
"""Creates new test order request.
Args:
symbol (str)
side (OrderSide)
type (OrderType)
time_in_force (Optional[TimeInForce], optional)
quantity (Optional[str], optional)
quote_order_qty (Optional[str], optional)
price (Optional[str], optional)
new_client_order_id (Optional[str], optional): A unique id among open orders.
Automatically generated if not sent.
stop_price (Optional[str], optional): Used with STOP_LOSS, STOP_LOSS_LIMIT,
TAKE_PROFIT, and TAKE_PROFIT_LIMIT orders.
iceberg_qty (Optional[str], optional): Used with LIMIT, STOP_LOSS_LIMIT,
and TAKE_PROFIT_LIMIT to create an iceberg order.
new_order_resp_type ([type], optional): Set the response JSON. ACK, RESULT, or FULL;
MARKET and LIMIT order types default to FULL, all other orders default to ACK.
recv_window (Optional[int], optional): The value cannot be greater than 60000.
timestamp (Optional[int]): if None current timestamp in milliseconds will be used.
Returns:
Request
"""
return self.create_order_request(
test_order=True,
symbol=symbol,
side=side,
type=type,
timestamp=timestamp,
time_in_force=time_in_force,
quantity=quantity,
quote_order_qty=quote_order_qty,
price=price,
new_client_order_id=new_client_order_id,
stop_price=stop_price,
iceberg_qty=iceberg_qty,
new_order_resp_type=new_order_resp_type,
recv_window=recv_window)
def create_new_order_request(self, symbol: str,
side: OrderSide,
type: OrderType,
time_in_force: Optional[TimeInForce] = None,
quantity: Optional[str] = None,
quote_order_qty: Optional[str] = None,
price: Optional[str] = None,
new_client_order_id: Optional[str] = None,
stop_price: Optional[str] = None,
iceberg_qty: Optional[str] = None,
new_order_resp_type: Optional[OrderResponseType] = None,
recv_window: Optional[int] = None,
timestamp: Optional[int] = None
) -> Request:
"""Creates new test order request.
Args:
symbol (str)
side (OrderSide)
type (OrderType)
time_in_force (Optional[TimeInForce], optional)
quantity (Optional[str], optional)
quote_order_qty (Optional[str], optional)
price (Optional[str], optional)
new_client_order_id (Optional[str], optional): A unique id among open orders.
Automatically generated if not sent.
stop_price (Optional[str], optional): Used with STOP_LOSS, STOP_LOSS_LIMIT,
TAKE_PROFIT, and TAKE_PROFIT_LIMIT orders.
iceberg_qty (Optional[str], optional): Used with LIMIT, STOP_LOSS_LIMIT,
and TAKE_PROFIT_LIMIT to create an iceberg order.
new_order_resp_type ([type], optional): Set the response JSON. ACK, RESULT, or FULL;
MARKET and LIMIT order types default to FULL, all other orders default to ACK.
recv_window (Optional[int], optional): The value cannot be greater than 60000.
timestamp (Optional[int]): if None current timestamp in milliseconds will be used.
Returns:
Request
"""
return self.create_order_request(
test_order=False,
symbol=symbol,
side=side,
type=type,
timestamp=timestamp,
time_in_force=time_in_force,
quantity=quantity,
quote_order_qty=quote_order_qty,
price=price,
new_client_order_id=new_client_order_id,
stop_price=stop_price,
iceberg_qty=iceberg_qty,
new_order_resp_type=new_order_resp_type,
recv_window=recv_window)
def create_cancel_order_request(self, symbol: str,
order_id: Optional[int] = None,
orig_client_order_id: Optional[str] = None,
new_client_order_id: Optional[str] = None,
recv_window: Optional[int] = None,
timestamp: Optional[int] = None
) -> Request:
"""Creates cancel order request.
Args:
symbol (str)
order_id (Optional[int], optional)
orig_client_order_id (Optional[str], optional)
new_client_order_id (Optional[str], optional): Used to uniquely identify this cancel.
Automatically generated by default.
recv_window (Optional[int], optional): The value cannot be greater than 60000.
timestamp (Optional[int]): if None current timestamp in milliseconds will be used.
Returns:
Request
"""
method = "DELETE"
path = "/order"
url = URL(self._create_url(path))
timestamp = timestamp if timestamp is not None else get_timestamp()
params: Params = {
"symbol": symbol,
"timestamp": str(timestamp)
}
if order_id is not None:
params["orderId"] = str(order_id)
if orig_client_order_id is not None:
params["origClientOrderId"] = orig_client_order_id
if new_client_order_id is not None:
params["newClientOrderId"] = new_client_order_id
if recv_window is not None:
params["recvWindow"] = str(recv_window)
auth_res = self._auth.sign(params)
url = url.with_query(auth_res.params)
req = Request(method=method, url=url, headers=auth_res.headers)
return req
def create_cancel_orders_request(self, symbol: str,
recv_window: Optional[int] = None,
timestamp: Optional[int] = None
) -> Request:
"""Creates cancel all orders request.
Args:
symbol (str)
recv_window (Optional[int], optional): The value cannot be greater than 60000.
timestamp (Optional[int]): if None current timestamp in milliseconds will be used.
Returns:
Request
"""
method = "DELETE"
path = "/openOrders"
url = URL(self._create_url(path))
timestamp = timestamp if timestamp is not None else get_timestamp()
params: Params = {
"symbol": symbol,
"timestamp": str(timestamp)
}
if recv_window is not None:
params["recvWindow"] = str(recv_window)
auth_res = self._auth.sign(params)
url = url.with_query(auth_res.params)
req = Request(method=method, url=url, headers=auth_res.headers)
return req
def create_query_order_request(self, symbol: str,
order_id: Optional[int] = None,
orig_client_order_id: Optional[str] = None,
recv_window: Optional[int] = None,
timestamp: Optional[int] = None
) -> Request:
"""Creates query order request.
Args:
symbol (str)
order_id (Optional[int], optional)
orig_client_order_id (Optional[str], optional)
recv_window (Optional[int], optional): The value cannot be greater than 60000.
timestamp (Optional[int]): if None current timestamp in milliseconds will be used.
Returns:
Request
"""
method = "GET"
path = "/order"
url = URL(self._create_url(path))
timestamp = timestamp if timestamp is not None else get_timestamp()
params: Params = {
"symbol": symbol,
"timestamp": str(timestamp)
}
if order_id is not None:
params["orderId"] = str(order_id)
if orig_client_order_id is not None:
params["origClientOrderId"] = orig_client_order_id
if recv_window is not None:
params["recvWindow"] = str(recv_window)
auth_res = self._auth.sign(params)
url = url.with_query(auth_res.params)
req = Request(method=method, url=url, headers=auth_res.headers)
return req
def create_get_current_open_orders_request(self, symbol: Optional[str] = None,
recv_window: Optional[int] = None,
timestamp: Optional[int] = None
) -> Request:
"""Creates get current open orders request.
Args:
symbol (Optional[str], optional): If the symbol is not sent,
orders for all symbols will be returned in an array.
recv_window (Optional[int], optional): The value cannot be greater than 60000.
timestamp (Optional[int]): if None current timestamp in milliseconds will be used.
Returns:
Request
"""
method = "GET"
path = "/openOrders"
url = URL(self._create_url(path))
timestamp = timestamp if timestamp is not None else get_timestamp()
params: Params = {
"timestamp": str(timestamp)
}
if symbol is not None:
params["symbol"] = symbol
if recv_window is not None:
params["recvWindow"] = str(recv_window)
auth_res = self._auth.sign(params)
url = url.with_query(auth_res.params)
req = Request(method=method, url=url, headers=auth_res.headers)
return req
def create_get_all_orders_request(self, symbol: str,
order_id: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
limit: Optional[int] = None,
recv_window: Optional[int] = None,
timestamp: Optional[int] = None
) -> Request:
"""Creates get all orders request.
Args:
symbol (str)
order_id (Optional[int], optional)
start_time (Optional[int], optional)
end_time (Optional[int], optional)
limit (Optional[int], optional): Default 500; max 1000.
recv_window (Optional[int], optional): The value cannot be greater than 60000.
timestamp (Optional[int]): if None current timestamp in milliseconds will be used.
Returns:
Request
"""
method = "GET"
path = "/allOrders"
url = URL(self._create_url(path))
timestamp = timestamp if timestamp is not None else get_timestamp()
params: Params = {
"symbol": symbol,
"timestamp": str(timestamp)
}
if order_id is not None:
params["orderId"] = str(order_id)
if start_time is not None:
params["startTime"] = str(start_time)
if end_time is not None:
params["endTime"] = str(end_time)
if limit is not None:
params["limit"] = str(limit)
if recv_window is not None:
params["recvWindow"] = str(recv_window)
auth_res = self._auth.sign(params)
url = url.with_query(auth_res.params)
req = Request(method=method, url=url, headers=auth_res.headers)
return req
def create_get_account_info_request(self, recv_window: Optional[int] = None,
timestamp: Optional[int] = None,
) -> Request:
"""Creates get account info request.
Args:
recv_window (Optional[int], optional): The value cannot be greater than 60000.
timestamp (Optional[int]): if None current timestamp in milliseconds will be used.
Returns:
Request
"""
method = "GET"
path = "/account"
url = URL(self._create_url(path))
timestamp = timestamp if timestamp is not None else get_timestamp()
params: Params = {
"timestamp": str(timestamp)
}
if recv_window is not None:
params["recvWindow"] = str(recv_window)
auth_res = self._auth.sign(params)
url = url.with_query(auth_res.params)
req = Request(method=method, url=url, headers=auth_res.headers)
return req
def create_get_trades_request(self, symbol: str,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
from_id: Optional[int] = None,
limit: Optional[int] = None,
recv_window: Optional[int] = None,
timestamp: Optional[int] = None
) -> Request:
"""Creates get account trades request.
Args:
symbol (str)
start_time (Optional[int], optional)
end_time (Optional[int], optional)
from_id (Optional[int], optional): trade id to fetch from.
Default gets most recent trades.
limit (Optional[int], optional): Default 500; max 1000.
recv_window (Optional[int], optional): The value cannot be greater than 60000.
timestamp (Optional[int]): if None current timestamp in milliseconds will be used.
Returns:
Request
"""
method = "GET"
path = "/myTrades"
url = URL(self._create_url(path))
timestamp = timestamp if timestamp is not None else get_timestamp()
params: Params = {
"symbol": symbol,
"timestamp": str(timestamp)
}
if start_time is not None:
params["startTime"] = str(start_time)
if end_time is not None:
params["endTime"] = str(end_time)
if from_id is not None:
params["fromId"] = str(from_id)
if limit is not None:
params["limit"] = str(limit)
if recv_window is not None:
params["recvWindow"] = str(recv_window)
auth_res = self._auth.sign(params)
url = url.with_query(auth_res.params)
req = Request(method=method, url=url, headers=auth_res.headers)
return req
| 40.270588
| 97
| 0.546207
| 2,156
| 20,538
| 5.018089
| 0.074675
| 0.061004
| 0.031611
| 0.041593
| 0.865145
| 0.849986
| 0.843978
| 0.841667
| 0.841667
| 0.833534
| 0
| 0.00507
| 0.375791
| 20,538
| 509
| 98
| 40.349705
| 0.838846
| 0.285909
| 0
| 0.783688
| 0
| 0
| 0.038685
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039007
| false
| 0
| 0.028369
| 0
| 0.109929
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
59a0cca5392068f294385a1c88efc9c1ec4bd151
| 47
|
py
|
Python
|
vkwave/bots/core/dispatching/extensions/callback/__init__.py
|
krasnovmv/vkwave
|
e0db86cc16f97797765aadfb811ec87ff7945b1f
|
[
"MIT"
] | 222
|
2020-03-30T18:09:20.000Z
|
2022-03-27T18:25:04.000Z
|
vkwave/bots/core/dispatching/extensions/callback/__init__.py
|
krasnovmv/vkwave
|
e0db86cc16f97797765aadfb811ec87ff7945b1f
|
[
"MIT"
] | 62
|
2020-03-30T18:31:25.000Z
|
2021-12-21T17:00:44.000Z
|
vkwave/bots/core/dispatching/extensions/callback/__init__.py
|
krasnovmv/vkwave
|
e0db86cc16f97797765aadfb811ec87ff7945b1f
|
[
"MIT"
] | 91
|
2020-03-30T18:34:49.000Z
|
2022-03-23T12:58:49.000Z
|
from ._aiohttp import AIOHTTPCallbackExtension
| 23.5
| 46
| 0.893617
| 4
| 47
| 10.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
59a85f1a58f8c6f217bb247a9651a39dba0eb4ba
| 6,080
|
py
|
Python
|
source/lib/probe.py
|
WinterWinds-Robotics/pymmw
|
3e2841f24a6cd98ccbee10a0ee7479f394417708
|
[
"MIT"
] | 144
|
2019-06-10T03:03:31.000Z
|
2022-03-25T09:33:41.000Z
|
source/lib/probe.py
|
WinterWinds-Robotics/pymmw
|
3e2841f24a6cd98ccbee10a0ee7479f394417708
|
[
"MIT"
] | 15
|
2019-12-11T00:03:04.000Z
|
2022-03-21T15:12:28.000Z
|
source/lib/probe.py
|
WinterWinds-Robotics/pymmw
|
3e2841f24a6cd98ccbee10a0ee7479f394417708
|
[
"MIT"
] | 51
|
2019-06-11T14:12:29.000Z
|
2022-03-28T08:38:06.000Z
|
#
# Copyright (c) 2019, Manfred Constapel
# This file is licensed under the terms of the MIT license.
#
#
# xds110 support
#
import sys
import time
import array
from lib.ports import *
from lib.utility import *
from lib.shell import *
# ------------------------------------------
XDS_USB = (0x0451, 0xbef3)
# ------------------------------------------
def xds_reset(dev, delay=100):
#_ = {0:'CDC Communication',
# 1:'CDC Data', 2:'Vendor Specific', 3:'CDC Communication',
# 4:'CDC Data', 5:'Human Interface Device', 6:'Vendor Specific'}
ep = usb_point(dev, 2, 2)
if ep is None: return False
for v in ('00', '01') * 2:
ep.write(hex2dec('{} {} {} {}'.format('2a', '02', '00', '0e {}'.format(v))))
time.sleep(delay / 1000)
return True
# ------------------------------------------
__scan_test__ = (
'2a 01 00 01',
'2a 01 00 03', '2a 05 00 04 00 00 00 00',
'2a 01 00 06', '2a 02 00 05 00', '2a 05 00 07 88 13 00 00',
'2a 02 00 05 01', '2a 05 00 07 a0 86 01 00',
'2a 05 00 2b 01 00 00 00',
'2a 01 00 06', '2a 02 00 05 00', '2a 05 00 07 88 13 00 00',
'2a 02 00 05 01', '2a 05 00 07 a0 86 01 00', '2a 09 00 09 01 00 00 00 01 00 00 00',
'2a 01 00 1a',
'2a 01 00 2f',
'2a 01 00 02',
'2a 01 00 01',
'2a 01 00 03', '2a 05 00 04 00 00 00 00',
'2a 01 00 06', '2a 02 00 05 00', '2a 05 00 07 88 13 00 00',
'2a 02 00 05 01', '2a 05 00 07 a0 86 01 00',
'2a 05 00 2b 01 00 00 00',
'2a 10 00 0a 00 08 04 01 06 01 00 00 00 00 00 00 01 00 01',
'2a 13 01 0c 00 08 04 01 06 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('ff ff ff ff',)*4*16)),
'2a 13 01 0c 00 08 04 01 06 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('00 00 00 00',)*4*16)),
'2a 13 01 0c 00 08 04 01 06 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('00 00 00 00',)*4*16)),
'2a 13 01 0c 00 08 04 01 06 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('ff ff ff ff',)*4*16)),
'2a 10 00 0a 00 08 03 01 05 01 00 00 00 00 00 00 01 00 01',
'2a 13 01 0c 00 08 03 01 05 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('ff ff ff ff',)*4*16)),
'2a 13 01 0c 00 08 03 01 05 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('00 00 00 00',)*4*16)),
'2a 13 01 0c 00 08 03 01 05 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('00 00 00 00',)*4*16)),
'2a 13 01 0c 00 08 03 01 05 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('ff ff ff ff',)*4*16)),
'2a 15 00 0b 20 00 04 01 06 01 00 00 00 00 00 00 01 00 04 00 ff ff ff ff',
'2a 13 01 0c 00 08 04 01 06 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('ff ff ff ff',)*4*16)),
'2a 15 00 0b 20 00 04 01 06 01 00 00 00 00 00 00 01 00 04 00 00 00 00 00',
'2a 13 01 0c 00 08 04 01 06 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('00 00 00 00',)*4*16)),
'2a 15 00 0b 20 00 04 01 06 01 00 00 00 00 00 00 01 00 04 00 e2 e0 03 fe',
'2a 13 01 0c 00 08 04 01 06 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('e2 e0 03 fe',)*4*16)),
'2a 15 00 0b 20 00 04 01 06 01 00 00 00 00 00 00 01 00 04 00 1d 1f fc 01',
'2a 13 01 0c 00 08 04 01 06 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('1d 1f fc 01',)*4*16)),
'2a 15 00 0b 20 00 04 01 06 01 00 00 00 00 00 00 01 00 04 00 aa cc 33 55',
'2a 13 01 0c 00 08 04 01 06 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('aa cc 33 55',)*4*16)),
'2a 15 00 0b 20 00 04 01 06 01 00 00 00 00 00 00 01 00 04 00 55 33 cc aa',
'2a 13 01 0c 00 08 04 01 06 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('55 33 cc aa',)*4*16)),
'2a 10 00 0a 00 08 04 01 06 01 00 00 00 00 00 00 01 00 01',
'2a 01 00 08',
'2a 09 00 09 05 00 00 00 02 00 00 00',
'2a 15 00 0b 20 00 03 01 05 01 00 00 00 00 00 00 01 00 04 00 ff ff ff ff',
'2a 13 01 0c 00 08 03 01 05 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('ff ff ff ff',)*4*16)),
'2a 15 00 0b 20 00 03 01 05 01 00 00 00 00 00 00 01 00 04 00 00 00 00 00',
'2a 13 01 0c 00 08 03 01 05 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('00 00 00 00',)*4*16)),
'2a 15 00 0b 20 00 03 01 05 01 00 00 00 00 00 00 01 00 04 00 e2 e0 03 fe',
'2a 13 01 0c 00 08 03 01 05 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('e2 e0 03 fe',)*4*16)),
'2a 15 00 0b 20 00 03 01 05 01 00 00 00 00 00 00 01 00 04 00 1d 1f fc 01',
'2a 13 01 0c 00 08 03 01 05 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('1d 1f fc 01',)*4*16)),
'2a 15 00 0b 20 00 03 01 05 01 00 00 00 00 00 00 01 00 04 00 aa cc 33 55',
'2a 13 01 0c 00 08 03 01 05 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('aa cc 33 55',)*4*16)),
'2a 15 00 0b 20 00 03 01 05 01 00 00 00 00 00 00 01 00 04 00 55 33 cc aa',
'2a 13 01 0c 00 08 03 01 05 01 00 00 00 00 00 00 01 00 00 01 00 01 {}'.format(' '.join(('55 33 cc aa',)*4*16)),
'2a 01 00 1a',
'2a 01 00 2f',
'2a 01 00 02'
)
# ------------------------------------------
def xds_test(dev, reset=True):
if reset:
xds_reset(dev)
ep2o = usb_point(dev, 2, 2)
ep2i = usb_point(dev, 2, 3)
_ = dev.read(ep2i.bEndpointAddress, 1024)
def send(epo, msg, epi=None):
_ = epo.write(hex2dec(msg))
if epi is not None:
buf = dev.read(epi.bEndpointAddress, 1024)
return buf
return None
def collect(v):
res = send(ep2o, v, ep2i)
if res is not None:
if len(res) > 21:
res = set(res[8:])
if len(res) % 3 != 1: # super-lazy check
return False
return True
for entry in __scan_test__:
if not collect(entry):
raise Exception('integrity scan-test on the JTAG DR/IR has failed')
| 44.705882
| 115
| 0.530757
| 1,346
| 6,080
| 2.382615
| 0.100297
| 0.30184
| 0.314312
| 0.291862
| 0.736514
| 0.728407
| 0.725288
| 0.722794
| 0.720923
| 0.720923
| 0
| 0.482825
| 0.320066
| 6,080
| 135
| 116
| 45.037037
| 0.292937
| 0.074836
| 0
| 0.329787
| 0
| 0.340426
| 0.576217
| 0
| 0
| 0
| 0.002139
| 0
| 0
| 1
| 0.042553
| false
| 0
| 0.06383
| 0
| 0.159574
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
ab7a797149c69e509799ce529a57a9f5aadedcf2
| 4,821
|
py
|
Python
|
config.py
|
guoxianru/proxy_pool_lite
|
f9d48cb03ed3bf923610203ba8c026794eadc307
|
[
"Apache-2.0"
] | null | null | null |
config.py
|
guoxianru/proxy_pool_lite
|
f9d48cb03ed3bf923610203ba8c026794eadc307
|
[
"Apache-2.0"
] | null | null | null |
config.py
|
guoxianru/proxy_pool_lite
|
f9d48cb03ed3bf923610203ba8c026794eadc307
|
[
"Apache-2.0"
] | 1
|
2021-08-25T05:48:06.000Z
|
2021-08-25T05:48:06.000Z
|
# -*- coding: utf-8 -*-
# @Author: GXR
# @CreateTime: 2021-04-01
# @UpdateTime: 2021-12-10
# 代理池Redis设置
REDIS_HOST = "redis"
REDIS_PORT = "6379"
REDIS_DB = "0"
# 代理池Redis-key
REDIS_KEY_PROXY_FREE = "proxy_free"
REDIS_KEY_PROXY_USEFUL = "proxy_useful"
# 代理池检查线程数
THREAD_COUNT_PROXY_CHECK = 10
# 代理池刷新线程数
THREAD_COUNT_PROXY_REFRESH = 5
# 获取新代理时间周期
TIME_GET = 1
# 检查新代理时间周期
TIME_CHECK = 2
# 刷新可用代理时间周期
TIME_REFRESH = 3
# 代理池API设置
API_HOST = "0.0.0.0"
API_PORT = "31605"
# 测试代理网站
PROXY_CHECK_URL = "https://www.baidu.com/"
# 通用请求头
HEADERS = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36",
"Cache-Control": "no-cache",
"Pragma": "no-cache",
}
# USER_AGENT池
USER_AGENT_LIST = [
# Windows:Chrome
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.35 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.43 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.19 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.24 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.23 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.22 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.87 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.30 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.39 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.69 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.16 Safari/537.36",
# Mac:Chrome
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.35 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.43 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.19 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.24 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.23 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.22 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.87 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.30 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.39 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.69 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.16 Safari/537.36",
]
| 59.518519
| 135
| 0.704418
| 890
| 4,821
| 3.749438
| 0.137079
| 0.098891
| 0.089002
| 0.207672
| 0.860653
| 0.860653
| 0.860653
| 0.860653
| 0.860653
| 0.860653
| 0
| 0.223425
| 0.137523
| 4,821
| 80
| 136
| 60.2625
| 0.579125
| 0.044597
| 0
| 0
| 0
| 0.634615
| 0.865083
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
abacc73a6cd855145705390674cb931a47256f49
| 135
|
py
|
Python
|
hello-world-veronika-cj.py
|
egriswol/astr-119-section-assignments
|
6cbeb0397b6a1a9102e8efa771e8d9ed1d3fab18
|
[
"MIT"
] | 2
|
2018-09-28T18:46:05.000Z
|
2018-09-28T19:00:27.000Z
|
hello-world-veronika-cj.py
|
egriswol/astr-119-section-assignments
|
6cbeb0397b6a1a9102e8efa771e8d9ed1d3fab18
|
[
"MIT"
] | 67
|
2018-09-26T06:39:43.000Z
|
2018-10-03T15:32:12.000Z
|
hello-world-veronika-cj.py
|
egriswol/astr-119-section-assignments
|
6cbeb0397b6a1a9102e8efa771e8d9ed1d3fab18
|
[
"MIT"
] | 62
|
2018-09-27T20:12:32.000Z
|
2018-10-03T23:53:47.000Z
|
#!/usr/bin/env python3
#This program will write:
#Hello from Veronika Cabalova Joseph.
print("Hello from Veronika Cabalova Joseph.")
| 22.5
| 45
| 0.762963
| 19
| 135
| 5.421053
| 0.736842
| 0.174757
| 0.330097
| 0.485437
| 0.601942
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008547
| 0.133333
| 135
| 6
| 45
| 22.5
| 0.871795
| 0.607407
| 0
| 0
| 0
| 0
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
abdd8acfdc5c5e885c6182c8ee575267b1c7f555
| 118
|
py
|
Python
|
optimtool/constrain/__init__.py
|
linjing-lab/optimtool
|
9ca298b91ba755b4dab4028879af2c5a06c2e6d6
|
[
"MIT"
] | 3
|
2022-01-24T14:16:07.000Z
|
2022-02-18T20:02:50.000Z
|
optimtool/constrain/__init__.py
|
linjing-lab/optimtool
|
9ca298b91ba755b4dab4028879af2c5a06c2e6d6
|
[
"MIT"
] | null | null | null |
optimtool/constrain/__init__.py
|
linjing-lab/optimtool
|
9ca298b91ba755b4dab4028879af2c5a06c2e6d6
|
[
"MIT"
] | null | null | null |
from optimtool.constrain import equal
from optimtool.constrain import unequal
from optimtool.constrain import mixequal
| 39.333333
| 40
| 0.881356
| 15
| 118
| 6.933333
| 0.466667
| 0.375
| 0.634615
| 0.807692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09322
| 118
| 3
| 40
| 39.333333
| 0.971963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
abf60e3a73f1b0c49155a95da368acff6204ed11
| 3,455
|
py
|
Python
|
day2.py
|
fredericmorin/adventofcode2017
|
460e804e2ef9c9e5e60b6dadb686f0896b3b16e5
|
[
"MIT"
] | null | null | null |
day2.py
|
fredericmorin/adventofcode2017
|
460e804e2ef9c9e5e60b6dadb686f0896b3b16e5
|
[
"MIT"
] | null | null | null |
day2.py
|
fredericmorin/adventofcode2017
|
460e804e2ef9c9e5e60b6dadb686f0896b3b16e5
|
[
"MIT"
] | null | null | null |
def solve(input):
lines = []
for line in input.split('\n'):
elems = map(int, line.split('\t'))
lines.append(elems)
# print lines
checksum = 0
for line in lines:
mi = ma = line[0]
for elem in line:
mi = min(mi, elem)
ma = max(ma, elem)
checksum += ma - mi
return checksum
sample_input = (
'5 1 9 5\n'
'7 5 3\n'
'2 4 6 8'
)
assert 18 == solve(sample_input)
puzzle_input = (
'104 240 147 246 123 175 372 71 116 230 260 118 202 270 277 292\n'
'740 755 135 205 429 822 844 90 828 115 440 805 526 91 519 373\n'
'1630 991 1471 1294 52 1566 50 1508 1367 1489 55 547 342 512 323 51\n'
'1356 178 1705 119 1609 1409 245 292 1434 694 405 1692 247 193 1482 1407\n'
'2235 3321 3647 212 1402 3711 3641 1287 2725 692 1235 3100 123 144 104 101\n'
'1306 1224 1238 186 751 734 1204 1275 366 149 1114 166 1118 239 153 943\n'
'132 1547 1564 512 2643 2376 2324 2159 1658 107 1604 145 2407 131 2073 1878\n'
'1845 91 1662 108 92 1706 1815 1797 1728 1150 1576 83 97 547 1267 261\n'
'78 558 419 435 565 107 638 173 93 580 338 52 633 256 377 73\n'
'1143 3516 4205 3523 148 401 3996 3588 300 1117 2915 1649 135 134 182 267\n'
'156 2760 1816 2442 2985 990 2598 1273 167 821 138 141 2761 2399 1330 1276\n'
'3746 3979 2989 161 4554 156 3359 173 3319 192 3707 264 762 2672 4423 2924\n'
'3098 4309 4971 5439 131 171 5544 595 154 571 4399 4294 160 6201 4329 5244\n'
'728 249 1728 305 2407 239 691 2241 2545 1543 55 2303 1020 753 193 1638\n'
'260 352 190 877 118 77 1065 1105 1085 1032 71 87 851 56 1161 667\n'
'1763 464 182 1932 1209 640 545 931 1979 197 1774 174 2074 1800 939 161'
)
print solve(puzzle_input)
def solve2(input):
lines = []
for line in input.split('\n'):
elems = map(int, line.split('\t'))
lines.append(elems)
# print lines
from itertools import permutations
checksum = 0
for line in lines:
# print line
for tup in permutations(line, 2):
quotient = tup[0] / float(tup[1])
if quotient == int(quotient):
# print 'found %s' % str(tup)
checksum += int(quotient)
return checksum
sample_input = (
'5 9 2 8\n'
'9 4 7 3\n'
'3 8 6 5'
)
assert 9 == solve2(sample_input)
puzzle_input = (
'104 240 147 246 123 175 372 71 116 230 260 118 202 270 277 292\n'
'740 755 135 205 429 822 844 90 828 115 440 805 526 91 519 373\n'
'1630 991 1471 1294 52 1566 50 1508 1367 1489 55 547 342 512 323 51\n'
'1356 178 1705 119 1609 1409 245 292 1434 694 405 1692 247 193 1482 1407\n'
'2235 3321 3647 212 1402 3711 3641 1287 2725 692 1235 3100 123 144 104 101\n'
'1306 1224 1238 186 751 734 1204 1275 366 149 1114 166 1118 239 153 943\n'
'132 1547 1564 512 2643 2376 2324 2159 1658 107 1604 145 2407 131 2073 1878\n'
'1845 91 1662 108 92 1706 1815 1797 1728 1150 1576 83 97 547 1267 261\n'
'78 558 419 435 565 107 638 173 93 580 338 52 633 256 377 73\n'
'1143 3516 4205 3523 148 401 3996 3588 300 1117 2915 1649 135 134 182 267\n'
'156 2760 1816 2442 2985 990 2598 1273 167 821 138 141 2761 2399 1330 1276\n'
'3746 3979 2989 161 4554 156 3359 173 3319 192 3707 264 762 2672 4423 2924\n'
'3098 4309 4971 5439 131 171 5544 595 154 571 4399 4294 160 6201 4329 5244\n'
'728 249 1728 305 2407 239 691 2241 2545 1543 55 2303 1020 753 193 1638\n'
'260 352 190 877 118 77 1065 1105 1085 1032 71 87 851 56 1161 667\n'
'1763 464 182 1932 1209 640 545 931 1979 197 1774 174 2074 1800 939 161'
)
print solve2(puzzle_input)
| 37.967033
| 80
| 0.683647
| 702
| 3,455
| 3.353276
| 0.401709
| 0.011895
| 0.015293
| 0.014444
| 0.872557
| 0.850467
| 0.830926
| 0.830926
| 0.830926
| 0.830926
| 0
| 0.67449
| 0.247757
| 3,455
| 90
| 81
| 38.388889
| 0.231243
| 0.017945
| 0
| 0.657895
| 0
| 0
| 0.701637
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0
| null | null | 0
| 0.013158
| null | null | 0.026316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e64e907824de8ffeada773c15b39da729d294891
| 118
|
py
|
Python
|
apps/users/models/__init__.py
|
Sult/daf
|
a4da9e8c96f70577e2490c05e82bdf7d0de1a563
|
[
"MIT"
] | null | null | null |
apps/users/models/__init__.py
|
Sult/daf
|
a4da9e8c96f70577e2490c05e82bdf7d0de1a563
|
[
"MIT"
] | null | null | null |
apps/users/models/__init__.py
|
Sult/daf
|
a4da9e8c96f70577e2490c05e82bdf7d0de1a563
|
[
"MIT"
] | null | null | null |
from apps.users.models.contact import *
from apps.users.models.users import *
import apps.users.models.user_functions
| 29.5
| 39
| 0.822034
| 18
| 118
| 5.333333
| 0.444444
| 0.28125
| 0.46875
| 0.395833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084746
| 118
| 3
| 40
| 39.333333
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
053745c38729917a7a4c7d432ad75a5f646ebb67
| 19,239
|
py
|
Python
|
tests/test_listing.py
|
countermeasure/entomb
|
4eba79a66ca8d01ccd634f9d74928cb061b5efdb
|
[
"MIT"
] | null | null | null |
tests/test_listing.py
|
countermeasure/entomb
|
4eba79a66ca8d01ccd634f9d74928cb061b5efdb
|
[
"MIT"
] | null | null | null |
tests/test_listing.py
|
countermeasure/entomb
|
4eba79a66ca8d01ccd634f9d74928cb061b5efdb
|
[
"MIT"
] | null | null | null |
import unittest
import unittest.mock as mock
import entomb.listing as listing
from tests import (
constants,
helpers,
)
class TestListing(unittest.TestCase):
"""Tests for the listing module.
"""
def setUp(self):
"""Create temporary directories and files.
"""
helpers.set_up()
def test_list_files(self):
"""Test the list_files function.
"""
# Test immutable files excluding git.
with mock.patch("builtins.print") as mocked_print:
listing.list_files(
constants.DIRECTORY_PATH,
immutable=True,
include_git=False,
)
expected = [
mock.call("\033[?25l", end=""),
mock.call("List immutable files"),
mock.call(),
mock.call("Immutable files"),
mock.call("---------------"),
mock.call("Counting file paths: 0", end="\r"),
mock.call("\033[K", end=""),
mock.call("\033[K", end=""),
mock.call(
"░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 0.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("/tmp/entomb_testing/immutable.txt"),
mock.call("\033[K", end=""),
mock.call(
"█████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 12.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"██████████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 25.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"███████████████░░░░░░░░░░░░░░░░░░░░░░░░░ 37.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████░░░░░░░░░░░░░░░░░░░░ 50.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"█████████████████████████░░░░░░░░░░░░░░░ 62.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("/tmp/entomb_testing/subdirectory/immutable.txt"),
mock.call("\033[K", end=""),
mock.call(
"██████████████████████████████░░░░░░░░░░ 75.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"███████████████████████████████████░░░░░ 87.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████████████████",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(),
mock.call("Summary"),
mock.call("-------"),
mock.call("6 files were examined"),
mock.call("2 files are immutable"),
mock.call(),
mock.call("\033[?25h", end=""),
]
self.assertEqual(mocked_print.mock_calls, expected)
# Test immutable files including git.
with mock.patch("builtins.print") as mocked_print:
listing.list_files(
constants.DIRECTORY_PATH,
immutable=True,
include_git=True,
)
expected = [
mock.call("\033[?25l", end=""),
mock.call("List immutable files"),
mock.call(),
mock.call("Immutable files"),
mock.call("---------------"),
mock.call("Counting file paths: 0", end="\r"),
mock.call("\033[K", end=""),
mock.call("\033[K", end=""),
mock.call(
"░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 0.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("/tmp/entomb_testing/immutable.txt"),
mock.call("\033[K", end=""),
mock.call(
"████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 10.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 20.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 30.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████░░░░░░░░░░░░░░░░░░░░░░░░ 40.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████░░░░░░░░░░░░░░░░░░░░ 50.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████░░░░░░░░░░░░░░░░ 60.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████░░░░░░░░░░░░ 70.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("/tmp/entomb_testing/subdirectory/immutable.txt"),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████████░░░░░░░░ 80.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████████████░░░░ 90.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████████████████",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(),
mock.call("Summary"),
mock.call("-------"),
mock.call("8 files were examined"),
mock.call("2 files are immutable"),
mock.call(),
mock.call("\033[?25h", end=""),
]
self.assertEqual(mocked_print.mock_calls, expected)
# Test mutable files excluding git.
with mock.patch("builtins.print") as mocked_print:
listing.list_files(
constants.DIRECTORY_PATH,
immutable=False,
include_git=False,
)
expected = [
mock.call("\033[?25l", end=""),
mock.call("List mutable files"),
mock.call(),
mock.call("Mutable files"),
mock.call("-------------"),
mock.call("Counting file paths: 0", end="\r"),
mock.call("\033[K", end=""),
mock.call("\033[K", end=""),
mock.call(
"░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 0.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"█████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 12.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"██████████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 25.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("/tmp/entomb_testing/mutable.txt"),
mock.call("\033[K", end=""),
mock.call(
"███████████████░░░░░░░░░░░░░░░░░░░░░░░░░ 37.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████░░░░░░░░░░░░░░░░░░░░ 50.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"█████████████████████████░░░░░░░░░░░░░░░ 62.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"██████████████████████████████░░░░░░░░░░ 75.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"███████████████████████████████████░░░░░ 87.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("/tmp/entomb_testing/subdirectory/mutable.txt"),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████████████████",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(),
mock.call("Summary"),
mock.call("-------"),
mock.call("6 files were examined"),
mock.call("2 files are mutable"),
mock.call(),
mock.call("\033[?25h", end=""),
]
self.assertEqual(mocked_print.mock_calls, expected)
# Test mutable files including git.
with mock.patch("builtins.print") as mocked_print:
listing.list_files(
constants.DIRECTORY_PATH,
immutable=False,
include_git=True,
)
expected = [
mock.call("\033[?25l", end=""),
mock.call("List mutable files"),
mock.call(),
mock.call("Mutable files"),
mock.call("-------------"),
mock.call("Counting file paths: 0", end="\r"),
mock.call("\033[K", end=""),
mock.call("\033[K", end=""),
mock.call(
"░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 0.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 10.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 20.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("/tmp/entomb_testing/mutable.txt"),
mock.call("\033[K", end=""),
mock.call(
"████████████░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 30.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████░░░░░░░░░░░░░░░░░░░░░░░░ 40.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████░░░░░░░░░░░░░░░░░░░░ 50.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("/tmp/entomb_testing/.git/mutable.txt"),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████░░░░░░░░░░░░░░░░ 60.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("/tmp/entomb_testing/.git/subdirectory/mutable.txt"),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████░░░░░░░░░░░░ 70.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████████░░░░░░░░ 80.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████████████░░░░ 90.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("/tmp/entomb_testing/subdirectory/mutable.txt"),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████████████████",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(),
mock.call("Summary"),
mock.call("-------"),
mock.call("8 files were examined"),
mock.call("4 files are mutable"),
mock.call(),
mock.call("\033[?25h", end=""),
]
self.assertEqual(mocked_print.mock_calls, expected)
# Test mutable files excluding git after making all files immutable.
helpers.set_file_immutable_attribute(
constants.GIT_SUBDIRECTORY_MUTABLE_FILE_PATH,
immutable=True,
)
helpers.set_file_immutable_attribute(
constants.MUTABLE_FILE_PATH,
immutable=True,
)
helpers.set_file_immutable_attribute(
constants.SUBDIRECTORY_MUTABLE_FILE_PATH,
immutable=True,
)
with mock.patch("builtins.print") as mocked_print:
listing.list_files(
constants.DIRECTORY_PATH,
immutable=False,
include_git=False,
)
expected = [
mock.call("\033[?25l", end=""),
mock.call("List mutable files"),
mock.call(),
mock.call("Mutable files"),
mock.call("-------------"),
mock.call("Counting file paths: 0", end="\r"),
mock.call("\033[K", end=""),
mock.call("\033[K", end=""),
mock.call(
"░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 0.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"█████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 12.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"██████████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 25.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"███████████████░░░░░░░░░░░░░░░░░░░░░░░░░ 37.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████░░░░░░░░░░░░░░░░░░░░ 50.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"█████████████████████████░░░░░░░░░░░░░░░ 62.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"██████████████████████████████░░░░░░░░░░ 75.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"███████████████████████████████████░░░░░ 87.5%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████████████████",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("-"),
mock.call(),
mock.call("Summary"),
mock.call("-------"),
mock.call("6 files were examined"),
mock.call("0 files are mutable"),
mock.call(),
mock.call("\033[?25h", end=""),
]
self.assertEqual(mocked_print.mock_calls, expected)
# Test for a non-existent path.
with self.assertRaises(AssertionError):
listing.list_files(
constants.NON_EXISTENT_PATH,
immutable=False,
include_git=False,
)
# Test a named pipe.
with mock.patch("builtins.print") as mocked_print:
listing.list_files(
constants.NAMED_PIPE_PATH,
immutable=False,
include_git=True,
)
expected = [
mock.call("\033[?25l", end=""),
mock.call("List mutable files"),
mock.call(),
mock.call("Mutable files"),
mock.call("-------------"),
mock.call("Counting file paths: 0", end="\r"),
mock.call("\033[K", end=""),
mock.call("\033[K", end=""),
mock.call(
"░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 0.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████████████████",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("-"),
mock.call(),
mock.call("Summary"),
mock.call("-------"),
mock.call("1 files were examined"),
mock.call("0 files are mutable"),
mock.call(),
mock.call("\033[?25h", end=""),
]
self.assertEqual(mocked_print.mock_calls, expected)
# Test a file which is readable only by root.
with mock.patch("builtins.print") as mocked_print:
listing.list_files(
constants.READABLE_BY_ROOT_FILE_PATH,
immutable=True,
include_git=False,
)
expected = [
mock.call("\033[?25l", end=""),
mock.call("List immutable files"),
mock.call(),
mock.call("Immutable files"),
mock.call("---------------"),
mock.call("Counting file paths: 0", end="\r"),
mock.call("\033[K", end=""),
mock.call("\033[K", end=""),
mock.call(
"░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 0.0%",
end="\r",
),
mock.call("\033[K", end=""),
mock.call(
"████████████████████████████████████████",
end="\r",
),
mock.call("\033[K", end=""),
mock.call("-"),
mock.call(),
mock.call("Summary"),
mock.call("-------"),
mock.call("1 files were examined"),
mock.call("0 files are immutable"),
mock.call(),
mock.call("\033[?25h", end=""),
]
self.assertEqual(mocked_print.mock_calls, expected)
def test__print_the_path(self):
"""Test the _print_the_path function.
"""
self.assertTrue(
listing._print_the_path(
constants.IMMUTABLE_FILE_PATH,
immutable=True,
),
)
self.assertFalse(
listing._print_the_path(
constants.IMMUTABLE_FILE_PATH,
immutable=False,
),
)
self.assertFalse(
listing._print_the_path(
constants.MUTABLE_FILE_PATH,
immutable=True,
),
)
self.assertTrue(
listing._print_the_path(
constants.MUTABLE_FILE_PATH,
immutable=False,
),
)
with self.assertRaises(AssertionError):
listing._print_the_path(
constants.NON_EXISTENT_PATH,
immutable=True,
)
with self.assertRaises(AssertionError):
listing._print_the_path(constants.DIRECTORY_PATH, immutable=False)
with self.assertRaises(AssertionError):
listing._print_the_path(constants.LINK_PATH, immutable=True)
def tearDown(self):
"""Delete temporary directories and files.
"""
helpers.tear_down()
| 34.416816
| 78
| 0.342689
| 1,641
| 19,239
| 5.234613
| 0.078001
| 0.217928
| 0.116531
| 0.107567
| 0.943888
| 0.924331
| 0.909197
| 0.902445
| 0.902445
| 0.870896
| 0
| 0.038911
| 0.394875
| 19,239
| 558
| 79
| 34.478495
| 0.516836
| 0.02708
| 0
| 0.907869
| 0
| 0
| 0.240888
| 0.134493
| 0
| 0
| 0
| 0
| 0.028791
| 1
| 0.007678
| false
| 0
| 0.007678
| 0
| 0.017274
| 0.042226
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
054c023ad1c078a248aabd3e51d5f14ba152a83d
| 76
|
py
|
Python
|
plugins/python/container/fn/__init__.py
|
proglang/servercodetest
|
f0acd5940a22be9a72a95494afb308bb3017ea64
|
[
"MIT"
] | null | null | null |
plugins/python/container/fn/__init__.py
|
proglang/servercodetest
|
f0acd5940a22be9a72a95494afb308bb3017ea64
|
[
"MIT"
] | 2
|
2020-06-05T16:24:37.000Z
|
2020-11-15T09:02:48.000Z
|
plugins/python/container/fn/__init__.py
|
proglang/servercodetest
|
f0acd5940a22be9a72a95494afb308bb3017ea64
|
[
"MIT"
] | 1
|
2020-10-22T05:46:52.000Z
|
2020-10-22T05:46:52.000Z
|
from .execute import run as run_execute
from .radon import run as run_radon
| 25.333333
| 39
| 0.815789
| 14
| 76
| 4.285714
| 0.428571
| 0.3
| 0.366667
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 76
| 2
| 40
| 38
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
55aabec6f816c0859d7d0e94a44ea2be3a8f40a0
| 136
|
py
|
Python
|
plugins/carbon_black_cloud/icon_carbon_black_cloud/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/carbon_black_cloud/icon_carbon_black_cloud/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/carbon_black_cloud/icon_carbon_black_cloud/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .get_agent_details.action import GetAgentDetails
from .quarantine.action import Quarantine
| 34
| 53
| 0.830882
| 19
| 136
| 5.842105
| 0.789474
| 0.216216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 136
| 3
| 54
| 45.333333
| 0.932773
| 0.272059
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
55abfa40e77238bdced81ca1c74f32570570d0fa
| 5,554
|
py
|
Python
|
torpido/wavelet/wavelets/db20.py
|
AP-Atul/Torpido
|
a646b4d6de7f2e2c96de4c64ce3113f53e3931c2
|
[
"Unlicense"
] | 21
|
2020-12-23T07:13:10.000Z
|
2022-01-12T10:32:22.000Z
|
wavelet/wavelets/db20.py
|
AP-Atul/wavelets-ext
|
00ced22462c369584ebd32f9b5f357f092de0142
|
[
"MIT"
] | 2
|
2020-12-30T10:45:42.000Z
|
2021-09-25T09:52:00.000Z
|
wavelet/wavelets/db20.py
|
AP-Atul/wavelets-ext
|
00ced22462c369584ebd32f9b5f357f092de0142
|
[
"MIT"
] | 1
|
2021-02-06T21:39:41.000Z
|
2021-02-06T21:39:41.000Z
|
""" Daubechies 20 wavelet """
class Daubechies20:
"""
Properties
----------
asymmetric, orthogonal, bi-orthogonal
All values are from http://wavelets.pybytes.com/wavelet/db20/
"""
__name__ = "Daubechies Wavelet 20"
__motherWaveletLength__ = 40 # length of the mother wavelet
__transformWaveletLength__ = 2 # minimum wavelength of input signal
# decomposition filter
# low-pass
decompositionLowFilter = [
-2.998836489615753e-10,
4.05612705554717e-09,
-1.814843248297622e-08,
2.0143220235374613e-10,
2.633924226266962e-07,
-6.847079596993149e-07,
-1.0119940100181473e-06,
7.241248287663791e-06,
-4.376143862182197e-06,
-3.710586183390615e-05,
6.774280828373048e-05,
0.00010153288973669777,
-0.0003851047486990061,
-5.349759844340453e-05,
0.0013925596193045254,
-0.0008315621728772474,
-0.003581494259744107,
0.00442054238676635,
0.0067216273018096935,
-0.013810526137727442,
-0.008789324924555765,
0.03229429953011916,
0.0058746818113949465,
-0.061722899624668884,
0.005632246857685454,
0.10229171917513397,
-0.024716827337521424,
-0.1554587507060453,
0.039850246458519104,
0.22829105082013823,
-0.016727088308801888,
-0.3267868004335376,
-0.13921208801128787,
0.36150229873889705,
0.6104932389378558,
0.4726961853103315,
0.21994211355113222,
0.06342378045900529,
0.010549394624937735,
0.0007799536136659112
]
# high-pass
decompositionHighFilter = [
-0.0007799536136659112,
0.010549394624937735,
-0.06342378045900529,
0.21994211355113222,
-0.4726961853103315,
0.6104932389378558,
-0.36150229873889705,
-0.13921208801128787,
0.3267868004335376,
-0.016727088308801888,
-0.22829105082013823,
0.039850246458519104,
0.1554587507060453,
-0.024716827337521424,
-0.10229171917513397,
0.005632246857685454,
0.061722899624668884,
0.0058746818113949465,
-0.03229429953011916,
-0.008789324924555765,
0.013810526137727442,
0.0067216273018096935,
-0.00442054238676635,
-0.003581494259744107,
0.0008315621728772474,
0.0013925596193045254,
5.349759844340453e-05,
-0.0003851047486990061,
-0.00010153288973669777,
6.774280828373048e-05,
3.710586183390615e-05,
-4.376143862182197e-06,
-7.241248287663791e-06,
-1.0119940100181473e-06,
6.847079596993149e-07,
2.633924226266962e-07,
-2.0143220235374613e-10,
-1.814843248297622e-08,
-4.05612705554717e-09,
-2.998836489615753e-10
]
# reconstruction filters
# low pass
reconstructionLowFilter = [
0.0007799536136659112,
0.010549394624937735,
0.06342378045900529,
0.21994211355113222,
0.4726961853103315,
0.6104932389378558,
0.36150229873889705,
-0.13921208801128787,
-0.3267868004335376,
-0.016727088308801888,
0.22829105082013823,
0.039850246458519104,
-0.1554587507060453,
-0.024716827337521424,
0.10229171917513397,
0.005632246857685454,
-0.061722899624668884,
0.0058746818113949465,
0.03229429953011916,
-0.008789324924555765,
-0.013810526137727442,
0.0067216273018096935,
0.00442054238676635,
-0.003581494259744107,
-0.0008315621728772474,
0.0013925596193045254,
-5.349759844340453e-05,
-0.0003851047486990061,
0.00010153288973669777,
6.774280828373048e-05,
-3.710586183390615e-05,
-4.376143862182197e-06,
7.241248287663791e-06,
-1.0119940100181473e-06,
-6.847079596993149e-07,
2.633924226266962e-07,
2.0143220235374613e-10,
-1.814843248297622e-08,
4.05612705554717e-09,
-2.998836489615753e-10
]
# high-pass
reconstructionHighFilter = [
-2.998836489615753e-10,
-4.05612705554717e-09,
-1.814843248297622e-08,
-2.0143220235374613e-10,
2.633924226266962e-07,
6.847079596993149e-07,
-1.0119940100181473e-06,
-7.241248287663791e-06,
-4.376143862182197e-06,
3.710586183390615e-05,
6.774280828373048e-05,
-0.00010153288973669777,
-0.0003851047486990061,
5.349759844340453e-05,
0.0013925596193045254,
0.0008315621728772474,
-0.003581494259744107,
-0.00442054238676635,
0.0067216273018096935,
0.013810526137727442,
-0.008789324924555765,
-0.03229429953011916,
0.0058746818113949465,
0.061722899624668884,
0.005632246857685454,
-0.10229171917513397,
-0.024716827337521424,
0.1554587507060453,
0.039850246458519104,
-0.22829105082013823,
-0.016727088308801888,
0.3267868004335376,
-0.13921208801128787,
-0.36150229873889705,
0.6104932389378558,
-0.4726961853103315,
0.21994211355113222,
-0.06342378045900529,
0.010549394624937735,
-0.0007799536136659112
]
| 28.777202
| 72
| 0.618293
| 422
| 5,554
| 8.109005
| 0.225118
| 0.00526
| 0.022209
| 0.024547
| 0.880187
| 0.880187
| 0.880187
| 0.880187
| 0.880187
| 0.880187
| 0
| 0.755076
| 0.290601
| 5,554
| 192
| 73
| 28.927083
| 0.113452
| 0.052575
| 0
| 0.906977
| 0
| 0
| 0.004022
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.046512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e99fd9aa9025c70ad7ae1c9caa8491347234b688
| 1,451
|
py
|
Python
|
ProjectApplication/grant_management/forms/utils.py
|
code-review-doctor/project-application
|
d85b40b69572efbcda24ce9c40803f76d8ffd192
|
[
"MIT"
] | 5
|
2020-07-29T10:00:11.000Z
|
2022-02-19T11:00:34.000Z
|
ProjectApplication/grant_management/forms/utils.py
|
code-review-doctor/project-application
|
d85b40b69572efbcda24ce9c40803f76d8ffd192
|
[
"MIT"
] | 471
|
2019-09-20T14:37:28.000Z
|
2022-03-25T14:16:34.000Z
|
ProjectApplication/grant_management/forms/utils.py
|
code-review-doctor/project-application
|
d85b40b69572efbcda24ce9c40803f76d8ffd192
|
[
"MIT"
] | 5
|
2020-03-15T12:42:47.000Z
|
2022-02-15T18:06:52.000Z
|
from django.urls import reverse
from django.utils.safestring import mark_safe
from project_core.utils.utils import format_date
def error_due_date_too_early(project):
project_basic_information_edit = reverse('logged-grant_management-project-basic-information-update',
kwargs={'project': project.id})
return mark_safe(f'Due date should be after project start date ({format_date(project.start_date)}). '
f'If needed <a href="{project_basic_information_edit}">edit the project start date</a>')
def error_due_date_too_late(project):
project_basic_information_edit = reverse('logged-grant_management-project-basic-information-update',
kwargs={'project': project.id})
return mark_safe(
f'Due date should be before the project end date ({format_date(project.end_date)}). '
f'If needed <a href="{project_basic_information_edit}">edit the project end date</a>')
def error_received_date_too_early(project):
project_basic_information_edit = reverse('logged-grant_management-project-basic-information-update',
kwargs={'project': project.id})
return mark_safe(f'Date received should be after project start date ({format_date(project.start_date)}). '
f'If needed <a href="{project_basic_information_edit}">edit the project start date</a>')
| 50.034483
| 110
| 0.684356
| 186
| 1,451
| 5.096774
| 0.225806
| 0.113924
| 0.218354
| 0.170886
| 0.799578
| 0.768987
| 0.768987
| 0.768987
| 0.768987
| 0.768987
| 0
| 0
| 0.219159
| 1,451
| 28
| 111
| 51.821429
| 0.836717
| 0
| 0
| 0.421053
| 0
| 0
| 0.474156
| 0.279807
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157895
| false
| 0
| 0.157895
| 0
| 0.473684
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e9c2cb50020f44cd612aa908d4129a7fd8aed005
| 749
|
py
|
Python
|
tests/distancetest.py
|
sasilva1998/uPyArlo
|
f40567c2e92f17485991851a423c868419a8665c
|
[
"MIT"
] | null | null | null |
tests/distancetest.py
|
sasilva1998/uPyArlo
|
f40567c2e92f17485991851a423c868419a8665c
|
[
"MIT"
] | null | null | null |
tests/distancetest.py
|
sasilva1998/uPyArlo
|
f40567c2e92f17485991851a423c868419a8665c
|
[
"MIT"
] | 2
|
2019-12-05T14:10:34.000Z
|
2020-09-16T17:12:04.000Z
|
from arlorobot import *
import utime
bot = ArloRobot()
print("Reseteando encoder...")
bot.clear_counts()
print("Posiciones efectuadas por motor derecho: ", bot.read_right_counts())
bot.move(0, 100, 200)
utime.sleep(4)
print("Posiciones efectuadas por motor derecho: ", bot.read_right_counts())
bot.move(0, 50, 200)
utime.sleep(4)
print("Posiciones efectuadas por motor derecho: ", bot.read_right_counts())
bot.move(0, -150, 200)
utime.sleep(4)
print("Posiciones efectuadas por motor derecho: ", bot.read_right_counts())
bot.move(0, -200, 200)
utime.sleep(4)
print("Posiciones efectuadas por motor derecho: ", bot.read_right_counts())
bot.move(0, -200, 200)
utime.sleep(4)
print("Posiciones efectuadas por motor derecho: ", bot.read_right_counts())
| 32.565217
| 75
| 0.751669
| 113
| 749
| 4.867257
| 0.221239
| 0.163636
| 0.272727
| 0.305455
| 0.843636
| 0.843636
| 0.843636
| 0.843636
| 0.843636
| 0.843636
| 0
| 0.057864
| 0.100134
| 749
| 22
| 76
| 34.045455
| 0.75816
| 0
| 0
| 0.619048
| 0
| 0
| 0.356475
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.095238
| 0
| 0.095238
| 0.333333
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e9c31aac34313904ed5dfbf3f439c8ec4b3d3666
| 238
|
py
|
Python
|
nezzle/utils/__init__.py
|
dwgoon/nezzle
|
c69d111ae5e57ee2a7db85e14299c23d3b98a6d5
|
[
"MIT"
] | 2
|
2021-10-06T08:54:02.000Z
|
2021-10-06T16:17:18.000Z
|
nezzle/utils/__init__.py
|
dwgoon/nezzle
|
c69d111ae5e57ee2a7db85e14299c23d3b98a6d5
|
[
"MIT"
] | null | null | null |
nezzle/utils/__init__.py
|
dwgoon/nezzle
|
c69d111ae5e57ee2a7db85e14299c23d3b98a6d5
|
[
"MIT"
] | null | null | null |
from nezzle.utils.etc import *
from nezzle.utils.math import *
from nezzle.utils.meta import *
from nezzle.utils.networkx import *
from nezzle.utils.triggerdict import *
from nezzle.utils.search import *
from nezzle.utils.module import *
| 29.75
| 38
| 0.794118
| 35
| 238
| 5.4
| 0.314286
| 0.37037
| 0.555556
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 238
| 7
| 39
| 34
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e9d9dbbd606d73861ab3f902454068bdcecad8fa
| 11,651
|
py
|
Python
|
tests/autogen/input/ifort/9-1_linux_intel/l-input-test-1.py
|
michaelackermannaiub/py-fortranformat
|
edc530d5edde41f41939c716da8e1ef01fa8a6fe
|
[
"MIT"
] | null | null | null |
tests/autogen/input/ifort/9-1_linux_intel/l-input-test-1.py
|
michaelackermannaiub/py-fortranformat
|
edc530d5edde41f41939c716da8e1ef01fa8a6fe
|
[
"MIT"
] | null | null | null |
tests/autogen/input/ifort/9-1_linux_intel/l-input-test-1.py
|
michaelackermannaiub/py-fortranformat
|
edc530d5edde41f41939c716da8e1ef01fa8a6fe
|
[
"MIT"
] | null | null | null |
import sys
import os
import unittest
from nose.plugins.attrib import attr
# To change this, re-run 'build-unittests.py'
from fortranformat._input import input as _input
from fortranformat._lexer import lexer as _lexer
from fortranformat._parser import parser as _parser
import unittest
class LEditDescriptorBatch1TestCase(unittest.TestCase):
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_1(self):
inp = '''.TRUE.'''
fmt = '''(L1)'''
result = ['''ERR''']
eds, rev_eds = _parser(_lexer(fmt))
self.assertRaises(ValueError, _input, eds, rev_eds, inp)
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_2(self):
inp = '''.FALSE.'''
fmt = '''(L1)'''
result = ['''ERR''']
eds, rev_eds = _parser(_lexer(fmt))
self.assertRaises(ValueError, _input, eds, rev_eds, inp)
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_3(self):
inp = '''T'''
fmt = '''(L1)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_4(self):
inp = '''F'''
fmt = '''(L1)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_5(self):
inp = '''.TRUE.'''
fmt = '''(L2)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_6(self):
inp = '''.FALSE.'''
fmt = '''(L2)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_7(self):
inp = '''T'''
fmt = '''(L2)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_8(self):
inp = '''F'''
fmt = '''(L2)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_9(self):
inp = '''.TRUE.'''
fmt = '''(L3)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_10(self):
inp = '''.FALSE.'''
fmt = '''(L3)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_11(self):
inp = '''T'''
fmt = '''(L3)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_12(self):
inp = '''F'''
fmt = '''(L3)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_13(self):
inp = '''.TRUE.'''
fmt = '''(L4)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_14(self):
inp = '''.FALSE.'''
fmt = '''(L4)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_15(self):
inp = '''T'''
fmt = '''(L4)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_16(self):
inp = '''F'''
fmt = '''(L4)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_17(self):
inp = '''.TRUE.'''
fmt = '''(L5)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_18(self):
inp = '''.FALSE.'''
fmt = '''(L5)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_19(self):
inp = '''T'''
fmt = '''(L5)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_20(self):
inp = '''F'''
fmt = '''(L5)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_21(self):
inp = '''.TRUE.'''
fmt = '''(L6)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_22(self):
inp = '''.FALSE.'''
fmt = '''(L6)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_23(self):
inp = '''T'''
fmt = '''(L6)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_24(self):
inp = '''F'''
fmt = '''(L6)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_25(self):
inp = '''.TRUE.'''
fmt = '''(L7)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_26(self):
inp = '''.FALSE.'''
fmt = '''(L7)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_27(self):
inp = '''T'''
fmt = '''(L7)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_28(self):
inp = '''F'''
fmt = '''(L7)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_29(self):
inp = '''.TRUE.'''
fmt = '''(L8)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_30(self):
inp = '''.FALSE.'''
fmt = '''(L8)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_31(self):
inp = '''T'''
fmt = '''(L8)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_32(self):
inp = '''F'''
fmt = '''(L8)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_33(self):
inp = '''.TRUE.'''
fmt = '''(L9)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_34(self):
inp = '''.FALSE.'''
fmt = '''(L9)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_35(self):
inp = '''T'''
fmt = '''(L9)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_36(self):
inp = '''F'''
fmt = '''(L9)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_37(self):
inp = '''.TRUE.'''
fmt = '''(L10)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_38(self):
inp = '''.FALSE.'''
fmt = '''(L10)'''
result = [False]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
@attr(platform='9-1_linux_intel')
@attr('input')
@attr(ed='L')
def test_l_ed_input_39(self):
inp = '''T'''
fmt = '''(L10)'''
result = [True]
eds, rev_eds = _parser(_lexer(fmt))
self.assertEqual(result, _input(eds, rev_eds, inp))
if __name__ == '__main__':
unittest.main()
| 28.556373
| 64
| 0.543816
| 1,534
| 11,651
| 3.841591
| 0.063885
| 0.079416
| 0.119124
| 0.092652
| 0.869167
| 0.869167
| 0.869167
| 0.869167
| 0.869167
| 0.869167
| 0
| 0.022311
| 0.269076
| 11,651
| 408
| 65
| 28.556373
| 0.669681
| 0.003691
| 0
| 0.867403
| 0
| 0
| 0.098311
| 0
| 0
| 0
| 0
| 0
| 0.107735
| 1
| 0.107735
| false
| 0
| 0.022099
| 0
| 0.132597
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
75af4046dd5c59cf9d02310f8cfdd3815d2d22b1
| 15,368
|
py
|
Python
|
pclinfmri/preproc/pilots.py
|
neurospin/preclinicfmri
|
637ba9f5f594ec837515e7364482f66c37ecf03f
|
[
"CECILL-B"
] | null | null | null |
pclinfmri/preproc/pilots.py
|
neurospin/preclinicfmri
|
637ba9f5f594ec837515e7364482f66c37ecf03f
|
[
"CECILL-B"
] | null | null | null |
pclinfmri/preproc/pilots.py
|
neurospin/preclinicfmri
|
637ba9f5f594ec837515e7364482f66c37ecf03f
|
[
"CECILL-B"
] | null | null | null |
#! /usr/bin/env python
##########################################################################
# NSAp - Copyright (C) CEA, 2013
# Distributed under the terms of the CeCILL-B license, as published by
# the CEA-CNRS-INRIA. Refer to the LICENSE file or to
# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
# for details.
##########################################################################
# CAPSUL import
try:
from capsul.utils.pilot import pilotfunction
except:
def pilotfunction(func):
return func
@pilotfunction
def pilot_slice_timing():
"""
Slice Timing Correction
=======================
"""
# Pilot imports
import os
from caps.toy_datasets import get_sample_data
from capsul.study_config import StudyConfig
from pclinfmri.preproc.pipeline import SliceTiming
"""
Study configuration
-------------------
We first define the working directory and guarantee this folder exists on
the file system:
"""
working_dir = "/volatile/nsap/pclinfmri/spmslicetiming"
if not os.path.isdir(working_dir):
os.makedirs(working_dir)
"""
And then define the study configuration (here we activate the smart
caching module that will be able to remember which process has already been
processed):
"""
study_config = StudyConfig(
modules=["SmartCachingConfig", "MatlabConfig", "SPMConfig", "FSLConfig",
"NipypeConfig"],
use_smart_caching=True,
fsl_config="/etc/fsl/4.1/fsl.sh",
use_fsl=True,
matlab_exec="/neurospin/local/bin/matlab",
use_matlab=True,
spm_directory="/i2bm/local/spm8",
use_spm=True,
output_directory=working_dir)
"""
Load the toy dataset
--------------------
To do so, we use the get_sample_data function to download the toy
dataset on the local file system (here localizer data):
"""
toy_dataset = get_sample_data("localizer")
"""
The toy_dataset is an Enum structure with some specific elements of
interest:
* **??**: ??.
Processing definition
---------------------
First create the
:ref:`slice timing pipeline <pclinfmri.preproc.pipeline.SliceTiming>` that
define the different step of the processings:
"""
pipeline = SliceTiming()
print pipeline.get_input_spec()
"""
Now we need now to parametrize this pipeline:
"""
pipeline.fmri_file = toy_dataset.fmri
pipeline.select_slicer = "spm"
pipeline.force_repetition_time = toy_dataset.TR
pipeline.force_slice_orders = [index + 1 for index in range(40)]
"""
The pipeline is now ready to be run:
"""
study_config.run(pipeline, executer_qc_nodes=True, verbose=1)
"""
Results
-------
Finally, we print the pipeline outputs:
"""
print("\nOUTPUTS\n")
for trait_name, trait_value in pipeline.get_outputs().items():
print("{0}: {1}".format(trait_name, trait_value))
@pilotfunction
def pilot_realignement():
"""
Realignement
============
"""
# Pilot imports
import os
from caps.toy_datasets import get_sample_data
from capsul.study_config import StudyConfig
from pclinfmri.preproc.pipeline import SpmRealignement
"""
Study configuration
-------------------
We first define the working directory and guarantee this folder exists on
the file system:
"""
working_dir = "/volatile/nsap/pclinfmri/spmrealignement"
if not os.path.isdir(working_dir):
os.makedirs(working_dir)
"""
And then define the study configuration (here we activate the smart
caching module that will be able to remember which process has already been
processed):
"""
study_config = StudyConfig(
modules=["SmartCachingConfig", "MatlabConfig", "SPMConfig",
"NipypeConfig"],
use_smart_caching=True,
matlab_exec="/neurospin/local/bin/matlab",
use_matlab=True,
spm_directory="/i2bm/local/spm8",
use_spm=True,
output_directory=working_dir)
"""
Load the toy dataset
--------------------
To do so, we use the get_sample_data function to download the toy
dataset on the local file system (here localizer data):
"""
toy_dataset = get_sample_data("localizer")
"""
The toy_dataset is an Enum structure with some specific elements of
interest:
* **??**: ??.
Processing definition
---------------------
First create the
:ref:`slice timing pipeline <pclinfmri.preproc.pipeline.SliceTiming>` that
define the different step of the processings:
"""
pipeline = SpmRealignement()
print pipeline.get_input_spec()
"""
Now we need now to parametrize this pipeline:
"""
pipeline.fmri_file = toy_dataset.fmri
pipeline.register_to_mean = True
"""
The pipeline is now ready to be run:
"""
study_config.run(pipeline, executer_qc_nodes=True, verbose=1)
"""
Results
-------
Finally, we print the pipeline outputs:
"""
print("\nOUTPUTS\n")
for trait_name, trait_value in pipeline.get_outputs().items():
print("{0}: {1}".format(trait_name, trait_value))
@pilotfunction
def pilot_coregistration():
"""
Coregistration
==============
"""
# Pilot imports
import os
from caps.toy_datasets import get_sample_data
from capsul.study_config import StudyConfig
from pclinfmri.preproc.pipeline import SpmCoregistration
"""
Study configuration
-------------------
We first define the working directory and guarantee this folder exists on
the file system:
"""
working_dir = "/volatile/nsap/pclinfmri/spmcoregistration"
if not os.path.isdir(working_dir):
os.makedirs(working_dir)
"""
And then define the study configuration (here we activate the smart
caching module that will be able to remember which process has already been
processed):
"""
study_config = StudyConfig(
modules=["SmartCachingConfig", "MatlabConfig", "SPMConfig",
"NipypeConfig"],
use_smart_caching=True,
matlab_exec="/neurospin/local/bin/matlab",
use_matlab=True,
spm_directory="/i2bm/local/spm8",
use_spm=True,
output_directory=working_dir)
"""
Load the toy dataset
--------------------
To do so, we use the get_sample_data function to download the toy
dataset on the local file system (here localizer data):
"""
toy_dataset = get_sample_data("localizer")
"""
The toy_dataset is an Enum structure with some specific elements of
interest:
* **??**: ??.
Processing definition
---------------------
First create the
:ref:`slice timing pipeline <pclinfmri.preproc.pipeline.SliceTiming>` that
define the different step of the processings:
"""
pipeline = SpmCoregistration()
print pipeline.get_input_spec()
"""
Now we need now to parametrize this pipeline:
"""
pipeline.reference_image = toy_dataset.mean
pipeline.moving_image = toy_dataset.anat
pipeline.fwhm = [7, 7]
pipeline.jobtype = "estwrite"
"""
The pipeline is now ready to be run:
"""
study_config.run(pipeline, executer_qc_nodes=True, verbose=1)
"""
Results
-------
Finally, we print the pipeline outputs:
"""
print("\nOUTPUTS\n")
for trait_name, trait_value in pipeline.get_outputs().items():
print("{0}: {1}".format(trait_name, trait_value))
@pilotfunction
def pilot_normalization():
"""
Normalization
=============
"""
# Pilot imports
import os
from caps.toy_datasets import get_sample_data
from capsul.study_config import StudyConfig
from pclinfmri.preproc.pipeline import SpmNormalization
"""
Study configuration
-------------------
We first define the working directory and guarantee this folder exists on
the file system:
"""
working_dir = "/volatile/nsap/pclinfmri/spmnormalization"
if not os.path.isdir(working_dir):
os.makedirs(working_dir)
"""
And then define the study configuration (here we activate the smart
caching module that will be able to remember which process has already been
processed):
"""
study_config = StudyConfig(
modules=["SmartCachingConfig", "MatlabConfig", "SPMConfig",
"NipypeConfig"],
use_smart_caching=True,
matlab_exec="/neurospin/local/bin/matlab",
use_matlab=True,
spm_directory="/i2bm/local/spm8",
use_spm=True,
output_directory=working_dir)
"""
Load the toy dataset
--------------------
To do so, we use the get_sample_data function to download the toy
dataset on the local file system (here localizer data):
"""
toy_dataset = get_sample_data("localizer")
"""
The toy_dataset is an Enum structure with some specific elements of
interest:
* **??**: ??.
Processing definition
---------------------
First create the
:ref:`slice timing pipeline <pclinfmri.preproc.pipeline.SliceTiming>` that
define the different step of the processings:
"""
pipeline = SpmNormalization()
print pipeline.get_input_spec()
"""
Now we need now to parametrize this pipeline:
"""
pipeline.reference_image = toy_dataset.mean
pipeline.coregistered_struct_file = toy_dataset.mean
pipeline.fmri_file = toy_dataset.fmri
"""
The pipeline is now ready to be run:
"""
study_config.run(pipeline, executer_qc_nodes=True, verbose=1)
"""
Results
-------
Finally, we print the pipeline outputs:
"""
print("\nOUTPUTS\n")
for trait_name, trait_value in pipeline.get_outputs().items():
print("{0}: {1}".format(trait_name, trait_value))
@pilotfunction
def pilot_template_registration():
"""
Template Registration
=====================
"""
# Pilot imports
import os
from caps.toy_datasets import get_sample_data
from capsul.study_config import StudyConfig
from capsul.process import get_process_instance
"""
Study configuration
-------------------
We first define the working directory and guarantee this folder exists on
the file system:
"""
working_dir = "/volatile/nsap/pclinfmri/spmtemplateregistration"
if not os.path.isdir(working_dir):
os.makedirs(working_dir)
"""
And then define the study configuration (here we activate the smart
caching module that will be able to remember which process has already been
processed):
"""
study_config = StudyConfig(
modules=["SmartCachingConfig", "MatlabConfig", "SPMConfig",
"NipypeConfig"],
use_smart_caching=True,
matlab_exec="/neurospin/local/bin/matlab",
use_matlab=True,
spm_directory="/i2bm/local/spm8",
use_spm=True,
output_directory=working_dir)
"""
Load the toy dataset
--------------------
To do so, we use the get_sample_data function to download the toy
dataset on the local file system (here localizer data):
"""
toy_dataset = get_sample_data("localizer")
template_dataset = get_sample_data("mni_1mm")
"""
The toy_dataset is an Enum structure with some specific elements of
interest:
* **??**: ??.
Processing definition
---------------------
First create the
:ref:`slice timing pipeline <pclinfmri.preproc.pipeline.SliceTiming>` that
define the different step of the processings:
"""
pipeline = get_process_instance(
"pclinfmri.preproc.pipeline.spm_template_registration.xml")
print pipeline.get_input_spec()
"""
Now we need now to parametrize this pipeline:
"""
pipeline.template_file = template_dataset.brain
pipeline.coregistered_struct_file = toy_dataset.mean
pipeline.fmri_file = toy_dataset.fmri
"""
The pipeline is now ready to be run:
"""
study_config.run(pipeline, executer_qc_nodes=True, verbose=1)
"""
Results
-------
Finally, we print the pipeline outputs:
"""
print("\nOUTPUTS\n")
for trait_name, trait_value in pipeline.get_outputs().items():
print("{0}: {1}".format(trait_name, trait_value))
@pilotfunction
def pilot_preproc():
"""
FMRI preprocessings
===================
"""
# Pilot imports
import os
from caps.toy_datasets import get_sample_data
from capsul.study_config import StudyConfig
from capsul.process import get_process_instance
"""
Study configuration
-------------------
We first define the working directory and guarantee this folder exists on
the file system:
"""
working_dir = "/volatile/nsap/pclinfmri/fmripreproc"
if not os.path.isdir(working_dir):
os.makedirs(working_dir)
"""
And then define the study configuration (here we activate the smart
caching module that will be able to remember which process has already been
processed):
"""
study_config = StudyConfig(
modules=["SmartCachingConfig", "MatlabConfig", "SPMConfig", "FSLConfig",
"NipypeConfig"],
use_smart_caching=True,
fsl_config="/etc/fsl/4.1/fsl.sh",
use_fsl=True,
matlab_exec="/neurospin/local/bin/matlab",
use_matlab=True,
spm_directory="/i2bm/local/spm8",
use_spm=True,
output_directory=working_dir)
"""
Load the toy dataset
--------------------
To do so, we use the get_sample_data function to download the toy
dataset on the local file system (here localizer data):
"""
toy_dataset = get_sample_data("localizer")
template_dataset = get_sample_data("mni_1mm")
"""
The toy_dataset is an Enum structure with some specific elements of
interest:
* **??**: ??.
Processing definition
---------------------
First create the
:ref:`slice timing pipeline <pclinfmri.preproc.pipeline.SliceTiming>` that
define the different step of the processings:
"""
pipeline = get_process_instance("pclinfmri.preproc.fmri_preproc.xml")
print pipeline.get_input_spec()
"""
Now we need now to parametrize this pipeline:
"""
pipeline.fmri_file = toy_dataset.fmri
pipeline.structural_file = toy_dataset.anat
pipeline.realign_register_to_mean = True
pipeline.select_slicer = "none"
pipeline.select_registration = "template"
pipeline.template_file = template_dataset.brain
pipeline.force_repetition_time = toy_dataset.TR
pipeline.force_slice_orders = [index + 1 for index in range(40)]
"""
The pipeline is now ready to be run:
"""
study_config.run(pipeline, executer_qc_nodes=True, verbose=1)
"""
Results
-------
Finally, we print the pipeline outputs:
"""
print("\nOUTPUTS\n")
for trait_name, trait_value in pipeline.get_outputs().items():
print("{0}: {1}".format(trait_name, trait_value))
if __name__ == "__main__":
#pilot_slice_timing()
#pilot_realignement()
#pilot_coregistration()
#pilot_normalization()
#pilot_template_registration()
pilot_preproc()
| 27.740072
| 80
| 0.637949
| 1,779
| 15,368
| 5.349635
| 0.120292
| 0.038878
| 0.02732
| 0.023957
| 0.88841
| 0.88841
| 0.88841
| 0.880845
| 0.880845
| 0.880845
| 0
| 0.004154
| 0.232496
| 15,368
| 553
| 81
| 27.790235
| 0.802713
| 0.029672
| 0
| 0.792553
| 0
| 0
| 0.143824
| 0.06127
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.132979
| null | null | 0.095745
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
75d139e0f4d9738b2a686687ea1ed2045d32d53a
| 1,156
|
py
|
Python
|
File Compiler/Automate.py
|
Elysian01/Mini-Projects
|
1b952304582053fc3ff559477494c2a99c921c4b
|
[
"MIT"
] | 1
|
2020-12-10T06:24:41.000Z
|
2020-12-10T06:24:41.000Z
|
File Compiler/Automate.py
|
Elysian01/Mini-Projects
|
1b952304582053fc3ff559477494c2a99c921c4b
|
[
"MIT"
] | null | null | null |
File Compiler/Automate.py
|
Elysian01/Mini-Projects
|
1b952304582053fc3ff559477494c2a99c921c4b
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD
import os
import subprocess
try :
if os.name == 'nt': # for Windows
FileName = os.path.expanduser("~/Desktop") + "\\FileComipler"
FileName2 = FileName + "\\lib"
os.mkdir(FileName)
os.mkdir(FileName2)
command1 = "xcopy lib C:\\Users\\abhis\\Desktop\\FileComipler\\lib /s"
command2 = "xcopy FileCompiler.exe C:\\Users\\abhis\\Desktop\\FileComipler /s"
subprocess.check_output(command1,shell=True)
subprocess.check_output(command2,shell=True)
except :
pass
=======
import os
import subprocess
try :
if os.name == 'nt': # for Windows
FileName = os.path.expanduser("~/Desktop") + "\\FileComipler"
FileName2 = FileName + "\\lib"
os.mkdir(FileName)
os.mkdir(FileName2)
command1 = "xcopy lib C:\\Users\\abhis\\Desktop\\FileComipler\\lib /s"
command2 = "xcopy FileCompiler.exe C:\\Users\\abhis\\Desktop\\FileComipler /s"
subprocess.check_output(command1,shell=True)
subprocess.check_output(command2,shell=True)
except :
pass
>>>>>>> 826d842434e84e842a44bae2589ad6180cff7b86
| 30.421053
| 86
| 0.622837
| 122
| 1,156
| 5.868852
| 0.278689
| 0.159218
| 0.061453
| 0.100559
| 0.938547
| 0.938547
| 0.938547
| 0.938547
| 0.938547
| 0.938547
| 0
| 0.04387
| 0.230969
| 1,156
| 37
| 87
| 31.243243
| 0.76153
| 0.019896
| 0
| 0.903226
| 0
| 0
| 0.269027
| 0.146903
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.064516
| 0.129032
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
f95e972f37d013ba167b6015341ecf7bc9e60e44
| 124
|
py
|
Python
|
synth/__init__.py
|
billbrod/synthesis-examples
|
891c25c2396f556a93e2bd0dc12ca53731a434b9
|
[
"MIT"
] | null | null | null |
synth/__init__.py
|
billbrod/synthesis-examples
|
891c25c2396f556a93e2bd0dc12ca53731a434b9
|
[
"MIT"
] | null | null | null |
synth/__init__.py
|
billbrod/synthesis-examples
|
891c25c2396f556a93e2bd0dc12ca53731a434b9
|
[
"MIT"
] | null | null | null |
from . import create_metamers
from . import create_mad_images
from . import utils
from . import style
from . import figures
| 20.666667
| 31
| 0.798387
| 18
| 124
| 5.333333
| 0.5
| 0.520833
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 124
| 5
| 32
| 24.8
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f9b55dc1d657760843951802ed28d01f053583c0
| 281
|
py
|
Python
|
Semana7/matriz8x8.py
|
BrayanTorres2/Algoritmosyprogramaci-n-Grupo2Ciclo4-
|
ad64b5a3d3d129efaa297617748a74872522d7a1
|
[
"MIT"
] | 4
|
2021-09-27T17:20:56.000Z
|
2021-09-28T23:12:49.000Z
|
Semana7/matriz8x8.py
|
BrayanTorres2/Algoritmosyprogramaci-n-Grupo2Ciclo4-
|
ad64b5a3d3d129efaa297617748a74872522d7a1
|
[
"MIT"
] | null | null | null |
Semana7/matriz8x8.py
|
BrayanTorres2/Algoritmosyprogramaci-n-Grupo2Ciclo4-
|
ad64b5a3d3d129efaa297617748a74872522d7a1
|
[
"MIT"
] | 13
|
2021-07-26T16:38:57.000Z
|
2021-11-19T02:28:12.000Z
|
ajedrez=[[0,1,0,1,0,1,0,1],[1,0,1,0,1,0,1,0],[0,1,0,1,0,1,0,1],[1,0,1,0,1,0,1,0],[0,1,0,1,0,1,0,1],[1,0,1,0,1,0,1,0],[0,1,0,1,0,1,0,1],[1,0,1,0,1,0,1,0]]
print(ajedrez[0][7])
c=0
for x in range(0,8):
for y in range(0,8):
if(ajedrez[x][y]==1):
c+=1
print(c)
| 35.125
| 154
| 0.47331
| 92
| 281
| 1.445652
| 0.141304
| 0.421053
| 0.541353
| 0.601504
| 0.481203
| 0.481203
| 0.481203
| 0.481203
| 0.481203
| 0.481203
| 0
| 0.300412
| 0.135231
| 281
| 8
| 155
| 35.125
| 0.246914
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9e408994519f3ad3ec653f48f0705a34e618a02
| 894
|
py
|
Python
|
wrappers/arlexecute/visibility/operations.py
|
ska-telescope/algorithm-reference-library
|
1b2c8d6079249202864abf8c60cdea40f0f123cb
|
[
"Apache-2.0"
] | 22
|
2016-12-14T11:20:07.000Z
|
2021-08-13T15:23:41.000Z
|
wrappers/arlexecute/visibility/operations.py
|
ska-telescope/algorithm-reference-library
|
1b2c8d6079249202864abf8c60cdea40f0f123cb
|
[
"Apache-2.0"
] | 30
|
2017-06-27T09:15:38.000Z
|
2020-09-11T18:16:37.000Z
|
wrappers/arlexecute/visibility/operations.py
|
SKA-ScienceDataProcessor/algorithm-reference-library
|
1b2c8d6079249202864abf8c60cdea40f0f123cb
|
[
"Apache-2.0"
] | 20
|
2017-07-02T03:45:49.000Z
|
2019-12-11T17:19:01.000Z
|
""" Visibility operations
"""
from processing_components.visibility.operations import append_visibility
from processing_components.visibility.operations import sort_visibility
from processing_components.visibility.operations import concatenate_visibility
from processing_components.visibility.operations import sum_visibility
from processing_components.visibility.operations import subtract_visibility
from processing_components.visibility.operations import qa_visibility
from processing_components.visibility.operations import remove_continuum_blockvisibility
from processing_components.visibility.operations import divide_visibility
from processing_components.visibility.operations import integrate_visibility_by_channel
from processing_components.visibility.operations import convert_visibility_to_stokes
from processing_components.visibility.operations import convert_visibility_to_stokesI
| 63.857143
| 88
| 0.914989
| 97
| 894
| 8.134021
| 0.237113
| 0.304183
| 0.334601
| 0.474018
| 0.833967
| 0.833967
| 0.707224
| 0.174905
| 0.174905
| 0
| 0
| 0
| 0.053691
| 894
| 14
| 89
| 63.857143
| 0.932624
| 0.02349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
fb1ca6d817d02674ccfb0da1016d4d96161faab3
| 7,125
|
py
|
Python
|
2015/Day1/Part1.py
|
Grohiik/Advent_of_code
|
c9fc0f1b276a8833b8af6d1ad707e2fd545563e1
|
[
"MIT"
] | null | null | null |
2015/Day1/Part1.py
|
Grohiik/Advent_of_code
|
c9fc0f1b276a8833b8af6d1ad707e2fd545563e1
|
[
"MIT"
] | null | null | null |
2015/Day1/Part1.py
|
Grohiik/Advent_of_code
|
c9fc0f1b276a8833b8af6d1ad707e2fd545563e1
|
[
"MIT"
] | null | null | null |
input = "((((()(()(((((((()))(((()((((()())(())()(((()((((((()((()(()(((()(()((())))()((()()())))))))))()((((((())((()))(((((()(((((((((()()))((()(())()((())((()(()))((()))()))()(((((()(((()()))()())((()((((())()())()((((())()(()(()(((()(())(()(())(((((((())()()(((())(()(()(()(())))(()((((())((()))(((()(()()(((((()()(()(((()(((((())()))()((()(()))()((()((((())((((())(()(((())()()(()()()()()(())((((())((())(()()))()((((())))((((()())()((((())((()())((())(())(((((()((((()(((()((((())(()(((()()))()))((((((()((())()())))(((()(()))(()()(()(((()(()))((()()()())((()()()(((())())()())())())((()))(()(()))(((((()(()(())((()(())(())()((((()())()))((((())(())((())())((((()(((())(())((()()((((()((((((()(())()()(()(()()((((()))(())()())()))(())))(())))())()()(())(()))()((()(()(())()()))(()())))))(()))(()()))(())(((((()(()(()()((())()())))))((())())((())(()(())((()))(())(((()((((((((()()()(()))()()(((()))()((()()(())(())())()(()(())))(((((()(())(())(()))))())()))(()))()(()(((((((()((((())))())())())())()((((((((((((((()()((((((()()()())())()())())())(())(())))())((()())((()(()))))))()))))))))))))))))())((())((())()()))))))(((()((()(()()))((())(()()))()()())))(())))))))(()(((())))())()())))()()(())()))()(()))())((()()))))(()))))()))(()()(())))))))()(((()))))()(()))(())())))))()))((()))((()))())(())))))))))((((())()))()))()))())(())()()(())))())))(()())()))((()()(())))(())((((((()(())((()(((()(()()(())))()))))))()))()(()((()))()(()))(()(((())((((())())(())(()))))))))())))))))())())))))())))))()()(((())()(()))))))))())))))(())()()()))()))()))(()(())()()())())))))))())()(()(()))))()()()))))())(()))))()()))))()())))))(((())()()))(()))))))))))()()))))()()()))))(()())())()()())()(()))))()(()))(())))))))(((((())(())())()()))()()))(())))))()(()))))(())(()()))()())()))()))()))()))))())()()))())())))(()))(()))))))())()(((())()))))))))()))()())))())))())))()))))))))))()()))(()()))))))(())()(()))))())(()))))(()))))(()())))))())())()()))))())()))))))))(()))))()))))))()(()())))))))()))())))())))())))())))))))())(()()))))))(()())())))()())()))))))))))))))())))()(())))()))())()()(())(()()))(())))())()())(()(()(()))))())))))))))))())(()))()))()))))(())()())()())))))))))))()()))))))))))))())())))))(()())))))))))))())(())))()))))))))())())(()))()))(())))()))()()(())()))))))()((((())()))())())))))()))()))))((()())()))))())))(())))))))))))))))))()))))()()())()))()()))))())()))((()())))())))(()))(()())))))))()))()))))(())))))))(())))))())()()(()))())()))()()))))())()()))))())()))())))))))(()))))()())()))))))))(()))())))(()))()))))(())()))())())(())())())))))))((((())))))()))()))()())()(())))()))()))()())(()())()()(()())()))))())())))))(()))()))))())(()()(())))))(())()()((())())))))(())(())))))))())))))))))()(())))))))()())())())()(()))))))))(()))))))))())()()))()(()))))))()))))))())))))))(())))()()(())()())))))(((())))()((())()))())))(()()))())(())())))()(((()())))))()(()()())))()()(()()(()()))())()(()()()))())()()))()())(()))))())))))())))(())()()))))(()))))(())(()))(())))))()()))()))))())()))()()(())())))((()))())()))))))()()))))((()(()))))()()))))))())))))())(()((()())))))))))))()())())))()))(()))))))(()))(())()())))(()))))))))())()()()()))))(()())))))))((())))()))(()))(())(())()())()))))))))(())))())))(()))()()))(()()))(()))())))()(())))())((()((()(())))((())))()))))((((())())()())))(())))()))))))())(()()((())))())()(()())))))(()())()))())))))))((())())))))))(()(()))())()()(()()(((()(((()())))))()))))))()(())(()()((()()(())()()))())()())()))()())())())))))))(((())))))))()()))))))(((())()))(()()))(()()))))(()(()()((((())()())((()()))))(()(())))))()((()()()())()()((()((()()))(()))(((()()()))(((())))()(((())()))))))((()(())())))(()())(((((()(()))(()((()))(()())()))))(()(()))()(()))(())(((())(()()))))()()))(((()))))(()()()()))())))((()()()(())()))()))))()()))()))))))((((((()()()))))())((()()(((()))))(()(())(()()())())())))()(((()()))(())((())))(()))(()()()())((())())())(()))))()))()((()(())()(()()(())(()))(())()))(())(()))))(())(())())(()()(()((()()((())))((()))()((())))(((()()()()((((()))(()()))()()()(((())((())())(()()(()()()))()((())(())()))())(((()()(())))()((()()())()())(()(())())(((())(())())((())(())()(((()()))(())))((())(()())())(())((()()()((((((())))((()(((((())()))()))(())(()()))()))(())()()))(())((()()())()()(()))())()((())))()((()()())((((()())((())())())((()((()))()))((())((()()(()((()()(((())(()()))))((()((())()(((())(()((())())((())(()((((((())())()(()())()(())(((())((((((()(())(()((()()()((()()(()()()())))()()(((((()()))()((((((()))()(()(()(()(((()())((()))())()((()))(())))()))()()))())()()))())((((())(()(()))(((((((())(((()(((((()(((()()((((())(((())())))(()()()(()(()))()))((((((()))((()(((()(())((()((((()((((((())(((((())))(((()(()))))(((()(((())()((())(()((()))(((()()(((())((((()(()(((((()))(((()(((((((()(()()()(()(()(()()())(())(((((()(())())()())(()(()(()))()(()()()())(()()(()((()))()((())())()(()))((())(()))()(()))()(((()(()(()((((((()()()()())()(((((()()(((()()()((()(((((()))((((((((()()()(((((()))))))(()()()(())(()))(()()))))(())()))(((((()(((((()()(()(()())(((()))((((()((()(()(()((()(()((())))()(((()((()))((()))(((((((((()((()((()(())))()((((()((()()))((())(((()(((((()()(()(()()((()(()()()(((((((())())()())))))((((()()(()))()))(()((())()(()(((((((((()()(((()(()())(()((()())((())())((((()(((()(((()((((()((()((((()(()((((((())((((((((((((()()(()()((((((((((((((()((()()))()((((((((((((())((((()(()())((()(()(()))()(((((()()(((()()))()())(())((()(((((()((())(((((()((()(((((()))()()((((())()((((())(((((((((()(())(()(())))())(()((())(((())(())(())())(()(()(())()()((()((())()(((()(((((()(())))()(((()((())))((()()()(((()(((()((()(()(())(()((()())(()(()(((()(((((((((())(()((((()()))(()((((()()()()(((()((((((((()(()()((((((()(()()(()((()((((((((((()()(((((((()())(())))(((()()))(((((()((()()())(()()((((())((()((((()))))(())((()(()()(((()(()(((()((((()(((((()))())())(()((())()))(((()())((())((())((((()((()((((((())(()((((()()))((((((())()(()))((()(((())((((((((((()()(((((()(((((()((()()()((((())))(()))()((()(())()()((()((((((((((()((())(())(((((()(()(()()))((((()((((()()((()(((()(((((((((()(()((()((()))((((((()(((())()()((()(((((((()())))()()(()((()((()()(((()(()()()()((((()((())((((()(((((((((()(((()()(((()(()(((()(((()((())()(()((()(()(()(()))()(((()))(()((((()((())((((())((((((())(()))(()((((())((()(()((((((((()()((((((()(()(()()()(())((()((()()(((()(((((((()()((()(((((((()))(((((()(((()(()()()(()(((()((()()((())(()(((((((((()(()((()((((((()()((())()))(((((()((())()())()(((((((((((()))((((()()()()())(()()(()(()()))()))(()))(()(((()()))())(()(()))()()((())(()())()())()(()))()))(()()(()((((((())((()(((((((((((()(())()((()(()((()((()(()((()((((((((((()()())((())()(())))((())()())()(((((()(()())((((()((()(())(()))(((())()((()))(((((())(()))()()(()))(((())((((()((((()(())))(((((((()))))())()())(())((())()(()()((()(()))()(()()(()()((()())((())((()()))((((()))()()))(()()(())()()(((((()(())((()((((()))()))(()())())(((()()(()()))(())))))(()))((())(((((()((((()))()((((()))()((())(((())))(((()())))((()(()()(("
floor = 0
for char in input:
if(char == "("):
floor+=1
else:
floor-=1
print(floor)
| 593.75
| 7,010
| 0.008
| 16
| 7,125
| 3.5625
| 0.625
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000424
| 0.006737
| 7,125
| 12
| 7,011
| 593.75
| 0.00763
| 0
| 0
| 0
| 0
| 0
| 0.982459
| 0.982318
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
34b481f4fb64a4a63a5736260047c18768167db8
| 5,762
|
py
|
Python
|
hyperion/model/tests/test_amr_checks.py
|
bluescarni/hyperion
|
4a0d33fcbd3943b5bcfbd318f11e199d2498956d
|
[
"BSD-2-Clause"
] | 2
|
2015-05-14T17:26:16.000Z
|
2019-03-13T17:33:18.000Z
|
hyperion/model/tests/test_amr_checks.py
|
bluescarni/hyperion
|
4a0d33fcbd3943b5bcfbd318f11e199d2498956d
|
[
"BSD-2-Clause"
] | null | null | null |
hyperion/model/tests/test_amr_checks.py
|
bluescarni/hyperion
|
4a0d33fcbd3943b5bcfbd318f11e199d2498956d
|
[
"BSD-2-Clause"
] | null | null | null |
from astropy.tests.helper import pytest
import numpy as np
from .. import Model
from .test_helpers import random_id, get_test_dust
from ...grid import AMRGrid
@pytest.mark.parametrize(('direction'), ['x', 'y', 'z'])
def test_amr_differing_widths(tmpdir, direction):
# Widths of grids in same level are not the same
dust = get_test_dust()
amr = AMRGrid()
level1 = amr.add_level()
grid1 = level1.add_grid()
grid1.nx = grid1.ny = grid1.nz = 4
grid1.xmin = grid1.ymin = grid1.zmin = -10.
grid1.xmax = grid1.ymax = grid1.zmax = +10.
grid1.quantities['density'] = np.ones(grid1.shape) * 1.e-10
grid2 = level1.add_grid()
grid2.nx = grid2.ny = grid2.nz = 4
grid2.xmin = grid2.ymin = grid2.zmin = -10.
grid2.xmax = grid2.ymax = grid2.zmax = +10.
grid2.quantities['density'] = np.ones(grid2.shape) * 1.e-10
setattr(grid2, direction + 'min', -10.1)
m = Model()
m.set_grid(amr)
m.add_density_grid(amr['density'], dust)
m.set_n_photons(initial=1, imaging=0)
m.write(tmpdir.join(random_id()).strpath)
log_file = tmpdir.join(random_id()).strpath
with pytest.raises(SystemExit) as exc:
m.run(tmpdir.join(random_id()).strpath, logfile=log_file)
assert exc.value.args[0] == 'An error occurred, and the run did not ' + \
'complete'
assert ('Grids 1 and 2 in level 1 have differing cell widths in the %s \n direction ( 5.0000E+00 and 5.0250E+00 respectively)' % direction) in open(log_file).read()
@pytest.mark.parametrize(('direction'), ['x', 'y', 'z'])
def test_amr_misaligned_grids_same_level(tmpdir, direction):
# Widths of grids in same level are not the same
dust = get_test_dust()
amr = AMRGrid()
level1 = amr.add_level()
grid1 = level1.add_grid()
grid1.nx = grid1.ny = grid1.nz = 4
grid1.xmin = grid1.ymin = grid1.zmin = -10.
grid1.xmax = grid1.ymax = grid1.zmax = +10.
grid1.quantities['density'] = np.ones(grid1.shape) * 1.e-10
grid2 = level1.add_grid()
grid2.nx = grid2.ny = grid2.nz = 4
grid2.xmin = grid2.ymin = grid2.zmin = -10.
grid2.xmax = grid2.ymax = grid2.zmax = +10.
grid2.quantities['density'] = np.ones(grid2.shape) * 1.e-10
setattr(grid2, direction + 'min', -10.1)
setattr(grid2, direction + 'max', 9.9)
m = Model()
m.set_grid(amr)
m.add_density_grid(amr['density'], dust)
m.set_n_photons(initial=1, imaging=0)
m.write(tmpdir.join(random_id()).strpath)
log_file = tmpdir.join(random_id()).strpath
with pytest.raises(SystemExit) as exc:
m.run(tmpdir.join(random_id()).strpath, logfile=log_file)
assert exc.value.args[0] == 'An error occurred, and the run did not ' + \
'complete'
assert ('Grids 1 and 2 in level 1 have edges that are not separated by \n an integer number of cells in the %s direction' % direction) in open(log_file).read()
@pytest.mark.parametrize(('direction'), ['x', 'y', 'z'])
def test_amr_non_integer_refinement(tmpdir, direction):
# Widths of grids in same level are not the same
dust = get_test_dust()
amr = AMRGrid()
level1 = amr.add_level()
grid1 = level1.add_grid()
grid1.nx = grid1.ny = grid1.nz = 4
grid1.xmin = grid1.ymin = grid1.zmin = -10.
grid1.xmax = grid1.ymax = grid1.zmax = +10.
grid1.quantities['density'] = np.ones(grid1.shape) * 1.e-10
level2 = amr.add_level()
grid2 = level2.add_grid()
grid2.nx = grid2.ny = grid2.nz = 4
grid2.xmin = grid2.ymin = grid2.zmin = -5.
grid2.xmax = grid2.ymax = grid2.zmax = +5.
grid2.quantities['density'] = np.ones(grid2.shape) * 1.e-10
setattr(grid2, direction + 'min', -6.)
m = Model()
m.set_grid(amr)
m.add_density_grid(amr['density'], dust)
m.set_n_photons(initial=1, imaging=0)
m.write(tmpdir.join(random_id()).strpath)
log_file = tmpdir.join(random_id()).strpath
with pytest.raises(SystemExit) as exc:
m.run(tmpdir.join(random_id()).strpath, logfile=log_file)
assert exc.value.args[0] == 'An error occurred, and the run did not ' + \
'complete'
assert ('Refinement factor in the %s direction between level 1 and \n level 2 is not an integer (1.818)' % direction) in open(log_file).read()
@pytest.mark.parametrize(('direction'), ['x', 'y', 'z'])
def test_amr_not_aligned_across_levels(tmpdir, direction):
# Widths of grids in same level are not the same
dust = get_test_dust()
amr = AMRGrid()
level1 = amr.add_level()
grid1 = level1.add_grid()
grid1.nx = grid1.ny = grid1.nz = 4
grid1.xmin = grid1.ymin = grid1.zmin = -10.
grid1.xmax = grid1.ymax = grid1.zmax = +10.
grid1.quantities['density'] = np.ones(grid1.shape) * 1.e-10
level2 = amr.add_level()
grid2 = level2.add_grid()
grid2.nx = grid2.ny = grid2.nz = 4
grid2.xmin = grid2.ymin = grid2.zmin = -5.
grid2.xmax = grid2.ymax = grid2.zmax = +5.
grid2.quantities['density'] = np.ones(grid2.shape) * 1.e-10
setattr(grid2, direction + 'min', -6.)
setattr(grid2, direction + 'max', 4.)
m = Model()
m.set_grid(amr)
m.add_density_grid(amr['density'], dust)
m.set_n_photons(initial=1, imaging=0)
m.write(tmpdir.join(random_id()).strpath)
log_file = tmpdir.join(random_id()).strpath
with pytest.raises(SystemExit) as exc:
m.run(tmpdir.join(random_id()).strpath, logfile=log_file)
assert exc.value.args[0] == 'An error occurred, and the run did not ' + \
'complete'
assert ('Grid 1 in level 2 is not aligned with cells in level 1 in the \n %s direction' % direction) in open(log_file).read()
| 34.094675
| 179
| 0.633461
| 861
| 5,762
| 4.138211
| 0.138211
| 0.029189
| 0.053887
| 0.060623
| 0.860511
| 0.860511
| 0.860511
| 0.860511
| 0.8476
| 0.8476
| 0
| 0.050847
| 0.221798
| 5,762
| 168
| 180
| 34.297619
| 0.743756
| 0.032454
| 0
| 0.871795
| 0
| 0.025641
| 0.139497
| 0
| 0
| 0
| 0
| 0
| 0.068376
| 1
| 0.034188
| false
| 0
| 0.042735
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9b62d5280c861399552f01c29019d6e2fc59db3d
| 135
|
py
|
Python
|
100.projects/links/www/router.py
|
wangpin34/learn-python
|
e8a17501dbda3ba6e7ca1eccbac89536409b3d79
|
[
"MIT"
] | null | null | null |
100.projects/links/www/router.py
|
wangpin34/learn-python
|
e8a17501dbda3ba6e7ca1eccbac89536409b3d79
|
[
"MIT"
] | null | null | null |
100.projects/links/www/router.py
|
wangpin34/learn-python
|
e8a17501dbda3ba6e7ca1eccbac89536409b3d79
|
[
"MIT"
] | null | null | null |
from flask import request,render_template
@app.route('/', methods=['GET', 'POST'])
def home():
return render_template('home.html')
| 27
| 41
| 0.703704
| 18
| 135
| 5.166667
| 0.833333
| 0.301075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 135
| 5
| 42
| 27
| 0.775
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
9b7572140a6af887fdd47ad35682b2a813b94293
| 564
|
py
|
Python
|
omoide/migration_engine/operations/unite/__init__.py
|
TaXeH/Omoide
|
8ccc9d47e802433bb2de21ff930e6630658cd5e3
|
[
"MIT"
] | null | null | null |
omoide/migration_engine/operations/unite/__init__.py
|
TaXeH/Omoide
|
8ccc9d47e802433bb2de21ff930e6630658cd5e3
|
[
"MIT"
] | null | null | null |
omoide/migration_engine/operations/unite/__init__.py
|
TaXeH/Omoide
|
8ccc9d47e802433bb2de21ff930e6630658cd5e3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from omoide.migration_engine.operations.unite import identity
from omoide.migration_engine.operations.unite import persistent
from omoide.migration_engine.operations.unite import raw_entities
from omoide.migration_engine.operations.unite import preprocessing
from omoide.migration_engine.operations.unite.class_identity_master import *
from omoide.migration_engine.operations.unite.class_router import *
from omoide.migration_engine.operations.unite.class_uuid_master import *
from omoide.migration_engine.operations.unite.unite import act
| 56.4
| 76
| 0.863475
| 74
| 564
| 6.391892
| 0.27027
| 0.169133
| 0.321353
| 0.422833
| 0.82241
| 0.82241
| 0.82241
| 0.338266
| 0
| 0
| 0
| 0.001898
| 0.065603
| 564
| 9
| 77
| 62.666667
| 0.895636
| 0.037234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
9b99c62aa2fb3bf0ae8a2b025c558bd5145a8c0d
| 176
|
py
|
Python
|
laia/decoders/__init__.py
|
eivtho/PyLaia
|
2a7a6e2eeb9b5af68c0faed0c564b02063e72be0
|
[
"MIT"
] | 89
|
2018-12-12T23:06:26.000Z
|
2022-02-03T09:04:21.000Z
|
laia/decoders/__init__.py
|
eivtho/PyLaia
|
2a7a6e2eeb9b5af68c0faed0c564b02063e72be0
|
[
"MIT"
] | 30
|
2019-03-06T14:29:48.000Z
|
2022-03-16T14:53:43.000Z
|
laia/decoders/__init__.py
|
jpuigcerver/PyLaia
|
1b2e864247f1bfb8d95ac1910de9c52df71c017a
|
[
"MIT"
] | 26
|
2018-12-13T17:48:19.000Z
|
2022-02-28T12:52:29.000Z
|
from laia.decoders.ctc_alignment import ctc_alignment
from laia.decoders.ctc_greedy_decoder import CTCGreedyDecoder
from laia.decoders.ctc_nbest_decoder import CTCNBestDecoder
| 44
| 61
| 0.897727
| 24
| 176
| 6.333333
| 0.458333
| 0.157895
| 0.315789
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 176
| 3
| 62
| 58.666667
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
32cc9564c7bb05da5b7c7aedc80965bffca20e71
| 31,749
|
py
|
Python
|
theano_models/dA/AutoEncoder.py
|
lzamparo/SdA_reduce
|
1dc11f23b395f316df5f41448542d2d5c2e619ff
|
[
"BSD-3-Clause"
] | null | null | null |
theano_models/dA/AutoEncoder.py
|
lzamparo/SdA_reduce
|
1dc11f23b395f316df5f41448542d2d5c2e619ff
|
[
"BSD-3-Clause"
] | null | null | null |
theano_models/dA/AutoEncoder.py
|
lzamparo/SdA_reduce
|
1dc11f23b395f316df5f41448542d2d5c2e619ff
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import theano.tensor as T
from theano import shared, config
from theano.tensor.shared_randomstreams import RandomStreams
class AutoEncoder(object):
def __init__(self, numpy_rng=None, theano_rng=None, input=None, n_visible=784, n_hidden=500,
W=None, bhid=None, bvis=None, W_name=None, bvis_name=None, bhid_name=None, sparse_init=-1):
""" A de-noising AutoEncoder class from theano tutorials.
:type numpy_rng: numpy.random.RandomState
:param numpy_rng: number random generator used to generate weights
:type theano_rng: theano.tensor.shared_randomstreams.RandomStreams
:param theano_rng: Theano random generator; if None is given one is generated
based on a seed drawn from `rng`
:type input: theano.tensor.TensorType
:paran input: a symbolic description of the input or None for standalone
dA
:type n_visible: int
:param n_visible: number of visible units
:type n_hidden: int
:param n_hidden: number of hidden units
:type W: theano.tensor.TensorType
:param W: Theano variable pointing to a set of weights that should be
shared Theano variables connecting the visible and hidden layers.
:type bhid: theano.tensor.TensorType
:param bhid: Theano variable pointing to a set of biases values (for
hidden units).
:type bvis: theano.tensor.TensorType
:param bvis: Theano variable pointing to a set of biases values (for
visible units).
:type W_name: string
:param W_name: name to be assigned to the W matrix.
:type bvis_name: string
:param bvis_name: name to be assigned to the b vector for the visible units.
:type bhid_name: string
:param bhid_name: name to be assigned to the b vector for the hidden units.
:type sparse_init: int
:param sparse_init: Initialize the weight matrices using Martens sparse initialization (Martens ICML 2010)
>0 specifies the number of units in the layer that have initial weights drawn from
a N(0,1). Use -1 for Glorot & Bengio (i.e dense) init.
"""
self.n_visible = n_visible
self.n_hidden = n_hidden
if numpy_rng is None:
raise AssertionError("numpy_rng cannot be unspecified in AutoEncoder.__init__")
# create a Theano random generator that gives symbolic random values
if theano_rng is None:
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
self.theano_rng = theano_rng
if W_name is None:
W_name = 'W'
if bvis_name is None:
bvis_name = 'bvis'
if bhid_name is None:
bhid_name = 'bhid'
if not W:
if sparse_init > 0:
initial_W = self.sparse_w(n_visible, n_hidden, sparse_init)
else:
initial_W = self.dense_w(n_visible, n_hidden, numpy_rng)
W = shared(value=initial_W, name=W_name)
self.W = W
# Tie the weights of the decoder to the encoder
self.W_prime = self.W.T
# Bias of the visible units
if not bvis:
bvis = shared(value=np.zeros(n_visible,
dtype = config.floatX), name = bvis_name)
self.b_prime = bvis
# Bias of the hidden units
if not bhid:
bhid = shared(value=np.zeros(n_hidden,
dtype = config.floatX), name = bhid_name)
self.b = bhid
if input is None:
self.x = T.dmatrix(name='input')
else:
self.x = input
self.params = [self.W, self.b, self.b_prime]
def get_corrupted_input(self, input, corruption_level):
""" This function keeps ``1-corruption_level`` entries of the inputs the same
and zero-out randomly selected subset of size ``coruption_level``
Note : first argument of theano.rng.binomial is the shape(size) of
random numbers that it should produce
second argument is the number of trials
third argument is the probability of success of any trial
this will produce an array of 0s and 1s where 1 has a probability of
1 - ``corruption_level`` and 0 with ``corruption_level``
"""
return T.cast(self.theano_rng.binomial(size=input.shape, n=1, p=1 - corruption_level),config.floatX) * input
def get_hidden_values(self, input):
""" Compute the values of the hidden layer """
raise NotImplementedError(str(type(self))+ " does not implement get_hidden_values.")
def get_reconstructed_input(self, hidden):
""" Compute the reconstructed input given the hidden rep'n """
raise NotImplementedError(str(type(self))+ " does not implement get_reconstructed_input.")
def get_cost_updates(self, corruption_level, learning_rate):
""" Compute the reconstruction error over the mini-batched input
taking into account a certain level of corruption of the input """
raise NotImplementedError(str(type(self))+ " does not implement get_cost_updates.")
def __getstate__(self):
""" Return a tuple of all the important parameters that define this dA """
return (self.W.get_value(), self.b.get_value(), self.b_prime.get_value(), self.n_visible, self.n_hidden)
def __setstate__(self, state):
""" Set the state of this dA from values returned from a deserialization process like unpickle. """
W, b, b_prime, n_visible, n_hidden = state
self.W = shared(value=W, name='W')
self.b = shared(value=b, name = 'bvis')
self.b_prime = shared(value=b_prime, name= 'bhid')
self.n_visible = n_visible
self.n_hidden = n_hidden
numpy_rng = np.random.RandomState(123)
self.theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
self.W_prime = self.W.T
self.params = [self.W, self.b, self.b_prime]
def get_params(self):
""" Return the params of this dA. This is for pickling testing purposes """
return self.params
def set_input(self, input):
""" Set the input for an unpickled dA """
self.x = input
def dropout_from_layer(self, layer, prob):
""" Apply masking noise to the hidden (i.e output) layer for this dA.
:type layer: theano.shared
:param layer: number random generator used to generate weights
:type prob: float
:param prob: retain each unit in this layer with probability prob """
return T.cast(self.theano_rng.binomial(size=layer.shape, n=1, p=prob),config.floatX) * layer
def sparse_w(self, n_visible, n_hidden, sparsity):
''' Return a numpy array for a sparse W matrix, the method of Martens (ICML 2010) '''
initial_W = np.zeros((n_visible,n_hidden),dtype = config.floatX)
# Make only sparse_init connections from each hidden unit back to each visible unit
idx = np.arange(n_hidden)
# Don't make more connections than there are hidden units
n_connections = min(sparsity,n_hidden)
for j in xrange(n_visible):
np.random.shuffle(idx)
initial_W[j,idx[:n_connections]] = np.random.randn(n_connections)
print "... returned sparse init matrix "
return initial_W
def dense_w(self, n_visible, n_hidden, numpy_rng):
''' Return a numpy array for a dense W matrix, the method of Glorot and Bengio (AISTATS2010) '''
initial_W = np.asarray(numpy_rng.uniform(
low = -4 * np.sqrt(6. / (n_hidden + n_visible)),
high = 4 * np.sqrt(6. / (n_hidden + n_visible)),
size = (n_visible, n_hidden)), dtype = config.floatX)
print "... returned dense init matrix "
return initial_W
class BernoulliAutoEncoder(AutoEncoder):
def __init__(self, numpy_rng, theano_rng=None, input=None, n_visible=784, n_hidden=500,
W=None, bhid=None, bvis=None, W_name=None, bvis_name=None, bhid_name=None, sparse_init=-1):
"""
A de-noising AutoEncoder with [0,1] inputs and hidden values
:type numpy_rng: numpy.random.RandomState
:param numpy_rng: number random generator used to generate weights
:type theano_rng: theano.tensor.shared_randomstreams.RandomStreams
:param theano_rng: Theano random generator; if None is given one is generated
based on a seed drawn from `rng`
:type input: theano.tensor.TensorType
:paran input: a symbolic description of the input or None for standalone
dA
:type n_visible: int
:param n_visible: number of visible units
:type n_hidden: int
:param n_hidden: number of hidden units
:type W: theano.tensor.TensorType
:param W: Theano variable pointing to a set of weights that should be
shared Theano variables connecting the visible and hidden layers.
:type bhid: theano.tensor.TensorType
:param bhid: Theano variable pointing to a set of biases values (for
hidden units).
:type bvis: theano.tensor.TensorType
:param bvis: Theano variable pointing to a set of biases values (for
visible units).
:type W_name: string
:param W_name: name to be assigned to the W matrix.
:type bvis_name: string
:param bvis_name: name to be assigned to the bvis vector.
:type bhid_name: string
:param bhid_name: name to be assigned to the bhid vector.
:type sparse_init: int
:param sparse_init: Initialize the weight matrices using Martens sparse
initialization (Martens ICML 2010) >0 specifies the number of units
in the layer that have initial weights drawn from a N(0,1).
Use -1 for Glorot & Bengio (i.e dense) init.
"""
super(BernoulliAutoEncoder,self).__init__(numpy_rng, theano_rng, input, n_visible, n_hidden, W, bhid, bvis, W_name, bvis_name, bhid_name,sparse_init)
self.output = T.nnet.sigmoid(T.dot(input, self.W) + self.b)
@classmethod
def class_from_values(cls, *args, **kwargs):
""" This constructor is intended for dynamically constructing a dA layer subclass
Args that always get specified in this constructor: numpy_rng, theano_rng, input, n_visible, n_hidden, W_name, bvis_name, bhid_name
Args that *sometimes* get specified in this constructor: W, bvis, bhid, sparse_init
"""
keys = kwargs.keys()
if 'W' not in keys:
kwargs['W'] = None
if 'bhid' not in keys:
kwargs['bhid'] = None
if 'bvis' not in keys:
kwargs['bvis'] = None
if 'sparse_init' not in keys:
kwargs['sparse_init'] = -1
return cls(numpy_rng=kwargs['numpy_rng'], theano_rng=kwargs['theano_rng'], input=kwargs['input'],
n_visible=kwargs['n_visible'], n_hidden=kwargs['n_hidden'], W=kwargs['W'],
bhid=kwargs['bhid'], bvis=kwargs['bvis'], W_name=kwargs['W_name'],
bvis_name=kwargs['bvis_name'], bhid_name=kwargs['bhid_name'], sparse_init=kwargs['sparse_init'])
def get_hidden_values(self, input):
""" Compute the values of the hidden layer """
return T.nnet.sigmoid(T.dot(input, self.W) + self.b)
def get_reconstructed_input(self, hidden):
""" Compute the reconstructed input given the hidden rep'n """
return T.nnet.sigmoid(T.dot(hidden, self.W_prime) + self.b_prime)
def get_cost_updates(self, corruption_level, learning_rate):
""" Compute the reconstruction error over the mini-batched input
taking into account a certain level of corruption of the input """
x_corrupted = super(BernoulliAutoEncoder,self).get_corrupted_input(self.x, corruption_level)
y = self.get_hidden_values(x_corrupted)
z = self.get_reconstructed_input(y)
# Use the cross entropy loss
L = -T.sum(self.x * T.log(z) + (1 - self.x) * T.log(1 - z), axis=1)
cost = T.mean(L)
# compute the gradients of the cost of the dA w.r.t the params
gparams = T.grad(cost, self.params)
# populate the list of updates to each param
updates = []
for param, gparam in zip(self.params, gparams):
updates.append((param, param - learning_rate * gparam))
return (cost, updates)
def get_cost_gparams(self, corruption_level, learning_rate):
""" Compute the reconstruction error over the mini-batched input (with corruption).
Instead of returning a list of tuples (updates) were the rval has the form of an update to
a theano.tensor variable (param, update_value), return instead (param, gparam)."""
x_corrupted = super(BernoulliAutoEncoder,self).get_corrupted_input(self.x, corruption_level)
y = self.get_hidden_values(x_corrupted)
z = self.get_reconstructed_input(y)
# Take the sum over columns
# Use the squared error loss function
L = T.sum((self.x - z) **2, axis = 1)
cost = T.mean(L)
# compute the gradients of the cost of the dA w.r.t the params
gparams = T.grad(cost, self.params)
# populate the list of parameter, gradient tuples
updates = []
for param, gparam in zip(self.params, gparams):
updates.append((param, gparam))
return (cost, updates)
class GaussianAutoEncoder(AutoEncoder):
def __init__(self, numpy_rng, theano_rng=None, input=None, n_visible=784, n_hidden=500,
W=None, bhid=None, bvis=None, W_name=None, bvis_name=None, bhid_name=None, sparse_init=-1):
""" A de-noising AutoEncoder with Gaussian visible units
:type numpy_rng: numpy.random.RandomState
:param numpy_rng: number random generator used to generate weights
:type theano_rng: theano.tensor.shared_randomstreams.RandomStreams
:param theano_rng: Theano random generator; if None is given one is generated
based on a seed drawn from `rng`
:type input: theano.tensor.TensorType
:paran input: a symbolic description of the input or None for standalone
dA
:type n_visible: int
:param n_visible: number of visible units
:type n_hidden: int
:param n_hidden: number of hidden units
:type W: theano.tensor.TensorType
:param W: Theano variable pointing to a set of weights that should be
shared Theano variables connecting the visible and hidden layers.
:type bhid: theano.tensor.TensorType
:param bhid: Theano variable pointing to a set of biases values (for
hidden units).
:type bvis: theano.tensor.TensorType
:param bvis: Theano variable pointing to a set of biases values (for
visible units).
:type W_name: string
:param W_name: name to be assigned to the W matrix.
:type bvis_name: string
:param bvis_name: name to be assigned to the bvis vector.
:type bhid_name: string
:param bhid_name: name to be assigned to the bhid vector.
:type sparse_init: int
:param sparse_init: Initialize the weight matrices using Martens sparse
initialization (Martens ICML 2010) >0 specifies the number of units
in the layer that have initial weights drawn from a N(0,1).
Use -1 for Glorot & Bengio (i.e dense) init.
"""
super(GaussianAutoEncoder,self).__init__(numpy_rng, theano_rng, input, n_visible, n_hidden, W, bhid, bvis, W_name, bvis_name, bhid_name,sparse_init)
self.output = T.nnet.sigmoid(T.dot(input, self.W) + self.b)
@classmethod
def class_from_values(cls, *args, **kwargs):
""" This constructor is intended for dynamically constructing a dA layer subclass
Args that always get specified through this constructor: numpy_rng, theano_rng, input, n_visible, n_hidden, W_name, bvis_name, bhid_name.
Args that *might* be specified: W, bhid, bvis, sparse_init.
"""
keys = kwargs.keys()
if 'W' not in keys:
kwargs['W'] = None
if 'bhid' not in keys:
kwargs['bhid'] = None
if 'bvis' not in keys:
kwargs['bvis'] = None
if 'sparse_init' not in keys:
kwargs['sparse_init'] = -1
return cls(numpy_rng=kwargs['numpy_rng'], theano_rng=kwargs['theano_rng'], input=kwargs['input'],
n_visible=kwargs['n_visible'], n_hidden=kwargs['n_hidden'],W=kwargs['W'],
bhid=kwargs['bhid'], bvis=kwargs['bvis'],W_name=kwargs['W_name'],
bvis_name=kwargs['bvis_name'], bhid_name=kwargs['bhid_name'], sparse_init=kwargs['sparse_init'])
def get_hidden_values(self, input):
""" Compute the values of the hidden layer """
return T.nnet.sigmoid(T.dot(input, self.W) + self.b)
def get_reconstructed_input(self, hidden):
""" Use a linear decoder to compute the reconstructed input given the hidden rep'n """
return T.dot(hidden, self.W_prime) + self.b_prime
def get_cost_updates(self, corruption_level, learning_rate):
""" Compute the reconstruction error over the mini-batched input
taking into account a certain level of corruption of the input """
x_corrupted = super(GaussianAutoEncoder,self).get_corrupted_input(self.x, corruption_level)
y = self.get_hidden_values(x_corrupted)
z = self.get_reconstructed_input(y)
# Take the sum over columns
# Use the squared error loss function
L = T.sum((self.x - z) **2, axis = 1)
cost = T.mean(L)
# compute the gradients of the cost of the dA w.r.t the params
gparams = T.grad(cost, self.params)
# populate the list of updates to each param
updates = []
for param, gparam in zip(self.params, gparams):
updates.append((param, param - learning_rate * gparam))
return (cost, updates)
def get_cost_updates_debug(self, corruption_level, learning_rate):
""" Compute the reconstruction error over the mini-batched input
taking into account a certain level of corruption of the input, return intermediate results """
x_corrupted = super(GaussianAutoEncoder,self).get_corrupted_input(self.x, corruption_level)
y = self.get_hidden_values(x_corrupted)
z = self.get_reconstructed_input(y)
# Take the sum over columns
# Use the squared error loss function
L = T.sum((self.x - z) **2, axis = 1)
cost = T.mean(L)
# compute the gradients of the cost of the dA w.r.t the params
gparams = T.grad(cost, self.params)
# populate the list of updates to each param
updates = []
for param, gparam in zip(self.params, gparams):
updates.append((param, param - learning_rate * gparam))
return (cost, y, z, updates)
def get_cost_gparams(self, corruption_level, learning_rate):
""" Compute the reconstruction error over the mini-batched input (with corruption).
Instead of returning a list of tuples (updates) were the rval has the form of an update to
a theano.tensor variable (param, update_value), return instead (param, gparam)."""
x_corrupted = super(GaussianAutoEncoder,self).get_corrupted_input(self.x, corruption_level)
y = self.get_hidden_values(x_corrupted)
z = self.get_reconstructed_input(y)
# Take the sum over columns
# Use the squared error loss function
L = T.sum((self.x - z) **2, axis = 1)
cost = T.mean(L)
# compute the gradients of the cost of the dA w.r.t the params
gparams = T.grad(cost, self.params)
# populate the list of parameter, gradient tuples
updates = []
for param, gparam in zip(self.params, gparams):
updates.append((param, gparam))
return (cost, updates)
class ReluAutoEncoder(AutoEncoder):
def __init__(self, numpy_rng, theano_rng=None, input=None, n_visible=784, n_hidden=500,
W=None, bhid=None, bvis=None, W_name=None, bvis_name=None, bhid_name=None, sparse_init=-1):
""" A de-noising AutoEncoder with ReLu visible units
:type numpy_rng: numpy.random.RandomState
:param numpy_rng: number random generator used to generate weights
:type theano_rng: theano.tensor.shared_randomstreams.RandomStreams
:param theano_rng: Theano random generator; if None is given one is generated
based on a seed drawn from `rng`
:type input: theano.tensor.TensorType
:paran input: a symbolic description of the input or None for standalone
dA
:type n_visible: int
:param n_visible: number of visible units
:type n_hidden: int
:param n_hidden: number of hidden units
:type W: theano.tensor.TensorType
:param W: Theano variable pointing to a set of weights that should be
shared Theano variables connecting the visible and hidden layers.
:type bhid: theano.tensor.TensorType
:param bhid: Theano variable pointing to a set of biases values (for
hidden units).
:type bvis: theano.tensor.TensorType
:param bvis: Theano variable pointing to a set of biases values (for
visible units).
:type W_name: string
:param W_name: name to be assigned to the W matrix.
:type bvis_name: string
:param bvis_name: name to be assigned to the bvis vector.
:type bhid_name: string
:param bhid_name: name to be assigned to the bhid vector.
:type sparse_init: int
:param sparse_init: Initialize the weight matrices using Martens sparse
initialization (Martens ICML 2010) >0 specifies the number of units
in the layer that have initial weights drawn from a N(0,1).
Use -1 for Glorot & Bengio (i.e dense) init.
"""
# ReLU units require a different weight matrix initialization scheme
#if W_name is None:
#W_name = 'W'
#if bvis_name is None:
#bvis_name = 'bvis'
#if bhid_name is None:
#bhid_name = 'bhid'
#if W is None:
#if sparse_init > 0:
#initial_W = super(ReluAutoEncoder,self).sparse_w(n_visible,n_hidden,sparse_init)
#else:
#initial_W = super(ReluAutoEncoder,self).dense_w(n_visible,n_hidden,numpy_rng)
#W = shared(value=initial_W, name=W_name)
#if bvis is None:
#bvis = shared(value=np.zeros(n_visible, dtype = config.floatX), name = bvis_name)
#if bhid is None:
#bhid = shared(value=np.zeros(n_hidden, dtype = config.floatX), name = bhid_name)
super(ReluAutoEncoder,self).__init__(numpy_rng, theano_rng, input, n_visible, n_hidden, W, bhid, bvis, W_name, bvis_name, bhid_name,sparse_init)
self.pre_activation = T.dot(input, self.W) + self.b
self.output = (self.pre_activation + abs(self.pre_activation)) / 2.
@classmethod
def class_from_values(cls, *args, **kwargs):
""" This constructor is intended for dynamically constructing a dA layer subclass
Args that always get specified through this version of the constructor:
numpy_rng, theano_rng, input, n_visible, n_hidden, W_name, bvis_name, bhid_name.
Args that *might* be specified: W, bhid, bvis.
"""
keys = kwargs.keys()
if 'W' not in keys:
kwargs['W'] = None
if 'bhid' not in keys:
kwargs['bhid'] = None
if 'bvis' not in keys:
kwargs['bvis'] = None
if 'sparse_init' not in keys:
kwargs['sparse_init'] = -1
return cls(numpy_rng=kwargs['numpy_rng'], theano_rng=kwargs['theano_rng'], input=kwargs['input'],
n_visible=kwargs['n_visible'], n_hidden=kwargs['n_hidden'], W=kwargs['W'],
bhid=kwargs['bhid'], bvis=kwargs['bvis'], W_name=kwargs['W_name'],
bvis_name=kwargs['bvis_name'], bhid_name=kwargs['bhid_name'],
sparse_init=kwargs['sparse_init'])
def get_reconstructed_input(self, hidden):
""" Use a linear decoder to compute the reconstructed input given the hidden rep'n """
return T.dot(hidden, self.W_prime) + self.b_prime
def get_hidden_values(self, input):
""" Apply ReLu elementwise to the transformed input """
return (T.dot(input, self.W) + self.b + abs(T.dot(input, self.W) + self.b)) / 2.0
def get_cost_updates(self, corruption_level, learning_rate):
""" Compute the reconstruction error over the mini-batched input
taking into account a certain level of corruption of the input """
x_corrupted = super(ReluAutoEncoder,self).get_corrupted_input(self.x, corruption_level)
y = self.get_hidden_values(x_corrupted)
z = self.get_reconstructed_input(y)
# Take the sum over columns
# Use the squared error loss function
L = T.sum((self.x - z) **2, axis = 1)
cost = T.mean(L)
# compute the gradients of the cost of the dA w.r.t the params
gparams = T.grad(cost, self.params)
# populate the list of updates to each param
updates = []
for param, gparam in zip(self.params, gparams):
updates.append((param, param - learning_rate * gparam))
return (cost, updates)
def get_cost_updates_safe(self, corruption_level, learning_rate, mb_size, max_grad=15.0):
""" Compute the reconstruction error over the mini-batched input
taking into account a certain level of corruption of the input. This SGD is called safe
because the gradient updates are shunted between min_grad, max_grad to try and prevent
exploding gradients. """
x_corrupted = super(ReluAutoEncoder,self).get_corrupted_input(self.x, corruption_level)
y = self.get_hidden_values(x_corrupted)
z = self.get_reconstructed_input(y)
# Take the sum over columns
# Use the squared error loss function
L = T.sum((self.x - z) **2, axis = 1)
cost = T.mean(L)
# compute the gradients of the cost of the dA w.r.t the params
gparams = T.grad(cost, self.params)
# compute list of weights updates
updates = []
for param, gparam in zip(self.params, gparams):
UD = param - gparam * learning_rate / mb_size
col_norms = UD.norm(2, axis=0)
desired_norms = T.clip(col_norms, 0, max_grad)
updates.append((param , UD * (desired_norms / (1e-6 + col_norms))))
return (cost, updates)
def get_cost_updates_debug(self, corruption_level, learning_rate):
""" Compute the reconstruction error over the mini-batched input
taking into account a certain level of corruption of the input, return intermediate results """
x_corrupted = super(ReluAutoEncoder,self).get_corrupted_input(self.x, corruption_level)
y = self.get_hidden_values(x_corrupted)
z = self.get_reconstructed_input(y)
# Take the sum over columns
# Use the squared error loss function
L = T.sum((self.x - z) **2, axis = 1)
cost = T.mean(L)
# compute the gradients of the cost of the dA w.r.t the params
gparams = T.grad(cost, self.params)
# populate the list of updates to each param
updates = []
for param, gparam in zip(self.params, gparams):
updates.append((param, param - learning_rate * gparam))
return (cost, y, z, updates)
def get_cost_gparams(self, corruption_level, learning_rate):
""" Compute the reconstruction error over the mini-batched input (with corruption)
But instead of returning a list of tuples (updates) were the rval has the form of an update to
a theano.tensor variable (param, update_value), return instead (param, gparam)."""
x_corrupted = super(ReluAutoEncoder,self).get_corrupted_input(self.x, corruption_level)
y = self.get_hidden_values(x_corrupted)
z = self.get_reconstructed_input(y)
# Take the sum over columns
# Use the squared error loss function
L = T.sum((self.x - z) **2, axis = 1)
cost = T.mean(L)
# compute the gradients of the cost of the dA w.r.t the params
gparams = T.grad(cost, self.params)
# populate the list of parameter, gradient tuples
updates = []
for param, gparam in zip(self.params, gparams):
updates.append((param, gparam))
return (cost, updates)
| 44.779972
| 157
| 0.587168
| 4,031
| 31,749
| 4.489457
| 0.082362
| 0.017019
| 0.009946
| 0.01492
| 0.84141
| 0.828259
| 0.817649
| 0.810963
| 0.801901
| 0.78571
| 0
| 0.006
| 0.333333
| 31,749
| 709
| 158
| 44.779972
| 0.849003
| 0.075152
| 0
| 0.676923
| 0
| 0
| 0.040149
| 0.001517
| 0
| 0
| 0
| 0
| 0.003846
| 0
| null | null | 0
| 0.015385
| null | null | 0.007692
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
32de13d07920192eeaaa9e6883688f1e214ed9b5
| 12,714
|
py
|
Python
|
mayan/apps/documents/tests/test_document_views.py
|
atitaya1412/Mayan-EDMS
|
bda9302ba4b743e7d829ad118b8b836221888172
|
[
"Apache-2.0"
] | 343
|
2015-01-05T14:19:35.000Z
|
2018-12-10T19:07:48.000Z
|
mayan/apps/documents/tests/test_document_views.py
|
atitaya1412/Mayan-EDMS
|
bda9302ba4b743e7d829ad118b8b836221888172
|
[
"Apache-2.0"
] | 191
|
2015-01-03T00:48:19.000Z
|
2018-11-30T09:10:25.000Z
|
mayan/apps/documents/tests/test_document_views.py
|
atitaya1412/Mayan-EDMS
|
bda9302ba4b743e7d829ad118b8b836221888172
|
[
"Apache-2.0"
] | 257
|
2019-05-14T10:26:37.000Z
|
2022-03-30T03:37:36.000Z
|
from django.test import override_settings
from ..events import event_document_type_changed, event_document_viewed
from ..permissions import (
permission_document_properties_edit, permission_document_view
)
from .base import GenericDocumentViewTestCase
from .mixins.document_mixins import DocumentViewTestMixin
class DocumentViewTestCase(
DocumentViewTestMixin, GenericDocumentViewTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_document_stub()
def test_document_properties_view_no_permission(self):
self._clear_events()
response = self._request_test_document_properties_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_properties_view_with_access(self):
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
self._clear_events()
response = self._request_test_document_properties_view()
self.assertContains(
response=response, status_code=200, text=self.test_document.label
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_trashed_document_properties_view_with_access(self):
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_properties_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_properties_edit_get_view_no_permission(self):
self._clear_events()
response = self._request_test_document_properties_edit_get_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_properties_edit_get_view_with_access(self):
self.grant_access(
permission=permission_document_properties_edit,
obj=self.test_document_type
)
self._clear_events()
response = self._request_test_document_properties_edit_get_view()
self.assertEqual(response.status_code, 200)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_trashed_document_properties_edit_get_view_with_access(self):
self.grant_access(
permission=permission_document_properties_edit,
obj=self.test_document_type
)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_properties_edit_get_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
@override_settings(DOCUMENTS_LANGUAGE='fra')
def test_document_properties_view_setting_non_us_language_with_access(self):
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
self._clear_events()
response = self._request_test_document_properties_view()
self.assertContains(
response=response, status_code=200, text=self.test_document.label
)
self.assertContains(
response=response, status_code=200,
text='Language:</label>\n \n \n English'
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
@override_settings(DOCUMENTS_LANGUAGE='fra')
def test_document_properties_edit_get_view_setting_non_us_language_with_access(self):
self.grant_access(
permission=permission_document_properties_edit,
obj=self.test_document_type
)
self._clear_events()
response = self._request_test_document_properties_edit_get_view()
self.assertContains(
response=response, status_code=200,
text='<option value="eng" selected>English</option>',
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_list_view_no_permission(self):
self._clear_events()
response = self._request_test_document_list_view()
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['object_list'].count(), 0)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_list_view_with_access(self):
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
self._clear_events()
response = self._request_test_document_list_view()
self.assertContains(
response=response, status_code=200, text=self.test_document.label
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_trashed_document_list_view_with_access(self):
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_list_view()
self.assertNotContains(
response=response, status_code=200, text=self.test_document.label
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_change_post_view_no_permission(self):
self._create_test_document_type()
document_type = self.test_document.document_type
self._clear_events()
response = self._request_test_document_type_change_post_view()
self.assertEqual(response.status_code, 404)
self.test_document.refresh_from_db()
self.assertEqual(
self.test_document.document_type, document_type
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_change_post_view_with_access(self):
self._create_test_document_type()
document_type = self.test_document.document_type
self.grant_access(
obj=self.test_document,
permission=permission_document_properties_edit
)
self._clear_events()
response = self._request_test_document_type_change_post_view()
self.assertEqual(response.status_code, 302)
self.test_document.refresh_from_db()
self.assertNotEqual(
self.test_document.document_type, document_type
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_types[1])
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_document)
self.assertEqual(events[0].verb, event_document_type_changed.id)
def test_trashed_document_document_type_change_post_view_with_access(self):
self._create_test_document_type()
document_type = self.test_document.document_type
self.grant_access(
obj=self.test_document,
permission=permission_document_properties_edit
)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_type_change_post_view()
self.assertEqual(response.status_code, 404)
self.test_document.refresh_from_db()
self.assertEqual(
self.test_document.document_type, document_type
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_change_view_get_no_permission(self):
self._create_test_document_type()
document_type = self.test_document.document_type
self._clear_events()
response = self._request_test_document_type_change_get_view()
self.assertEqual(response.status_code, 404)
self.test_document.refresh_from_db()
self.assertEqual(
self.test_document.document_type, document_type
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_change_view_get_with_access(self):
self.grant_access(
obj=self.test_document,
permission=permission_document_properties_edit
)
self._clear_events()
response = self._request_test_document_type_change_get_view()
self.assertEqual(response.status_code, 200)
self.test_document.refresh_from_db()
self.assertEqual(
self.test_document.document_type, self.test_document_type
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_trashed_document_type_change_view_get_with_access(self):
self.grant_access(
obj=self.test_document,
permission=permission_document_properties_edit
)
self._create_test_document_type()
document_type = self.test_document.document_type
self.test_document.delete()
self._clear_events()
response = self._request_test_document_type_change_get_view()
self.assertEqual(response.status_code, 404)
self.test_document.refresh_from_db()
self.assertEqual(
self.test_document.document_type, document_type
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_multiple_document_type_change_view_no_permission(self):
self._create_test_document_type()
document_type = self.test_document.document_type
self._clear_events()
response = self._request_test_document_multiple_type_change()
self.assertEqual(response.status_code, 404)
self.test_document.refresh_from_db()
self.assertEqual(
self.test_document.document_type, document_type
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_multiple_document_type_change_view_with_permission(self):
self.grant_access(
obj=self.test_document,
permission=permission_document_properties_edit
)
self._create_test_document_type()
document_type = self.test_document.document_type
self._clear_events()
response = self._request_test_document_multiple_type_change()
self.assertEqual(response.status_code, 302)
self.test_document.refresh_from_db()
self.assertNotEqual(
self.test_document.document_type, document_type
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_types[1])
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_document)
self.assertEqual(events[0].verb, event_document_type_changed.id)
def test_document_preview_view_no_permission(self):
self._clear_events()
response = self._request_test_document_preview_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_preview_view_with_access(self):
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
self._clear_events()
response = self._request_test_document_preview_view()
self.assertContains(
response=response, status_code=200, text=self.test_document.label
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, None)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_document)
self.assertEqual(events[0].verb, event_document_viewed.id)
def test_trashed_document_preview_view_with_access(self):
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_preview_view()
self.assertEqual(response.status_code, 404)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
| 31.626866
| 101
| 0.68869
| 1,451
| 12,714
| 5.58856
| 0.059959
| 0.150943
| 0.108521
| 0.0624
| 0.932051
| 0.923295
| 0.923295
| 0.922432
| 0.909977
| 0.904181
| 0
| 0.010831
| 0.230219
| 12,714
| 401
| 102
| 31.705736
| 0.817717
| 0
| 0
| 0.742049
| 0
| 0
| 0.011326
| 0.001966
| 0
| 0
| 0
| 0
| 0.233216
| 1
| 0.081272
| false
| 0
| 0.017668
| 0
| 0.106007
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b5dbb250062e60fe2fec3123cfe97d971434e7c7
| 594
|
py
|
Python
|
Utilities/Time.py
|
Lucas-Uchiha/FuinhaBot
|
f9e17fbe9d728916fa165c127a1e895e7873c407
|
[
"MIT"
] | null | null | null |
Utilities/Time.py
|
Lucas-Uchiha/FuinhaBot
|
f9e17fbe9d728916fa165c127a1e895e7873c407
|
[
"MIT"
] | null | null | null |
Utilities/Time.py
|
Lucas-Uchiha/FuinhaBot
|
f9e17fbe9d728916fa165c127a1e895e7873c407
|
[
"MIT"
] | null | null | null |
# Gerencia informações de data e hora
from datetime import datetime
class Time:
@staticmethod
def agora():
return datetime.timestamp(datetime.now())
@staticmethod
def dia():
return datetime.now().strftime("%d")
@staticmethod
def mes():
return datetime.now().strftime("%m")
@staticmethod
def ano():
return datetime.now().strftime("%Y")
@staticmethod
def dataFormatada():
return datetime.now().strftime("%d-%m-%Y")
@staticmethod
def horaFormatada():
return datetime.now().strftime("%H:%M:%S")
| 20.482759
| 50
| 0.611111
| 64
| 594
| 5.671875
| 0.4375
| 0.247934
| 0.23416
| 0.344353
| 0.143251
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.245791
| 594
| 28
| 51
| 21.214286
| 0.810268
| 0.058923
| 0
| 0.3
| 0
| 0
| 0.039497
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| true
| 0
| 0.05
| 0.3
| 0.7
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
bd29aa1762e8d90998171623db9ebd5d58930736
| 15,643
|
py
|
Python
|
aws-dev/awsdev8/venv/Lib/site-packages/amazondax/generated/Stubs.py
|
PacktPublishing/-AWS-Certified-Developer---Associate-Certification
|
3f76e3d3df6797705b5b30ae574fe678250d5e92
|
[
"MIT"
] | 13
|
2020-02-02T13:53:50.000Z
|
2022-03-20T19:50:02.000Z
|
aws-dev/awsdev8/venv/Lib/site-packages/amazondax/generated/Stubs(1).py
|
PacktPublishing/-AWS-Certified-Developer---Associate-Certification
|
3f76e3d3df6797705b5b30ae574fe678250d5e92
|
[
"MIT"
] | 2
|
2020-03-29T19:08:04.000Z
|
2021-06-02T00:57:44.000Z
|
aws-dev/awsdev8/venv/Lib/site-packages/amazondax/generated/Stubs(1).py
|
PacktPublishing/-AWS-Certified-Developer---Associate-Certification
|
3f76e3d3df6797705b5b30ae574fe678250d5e92
|
[
"MIT"
] | 10
|
2019-12-25T20:42:37.000Z
|
2021-11-17T15:19:00.000Z
|
#
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not
# use this file except in compliance with the License. A copy of the License
# is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#
from .. import Constants, AttributeValueEncoder
def write_authorizeConnection_1489122155_1(accessKeyId, signature, stringToSign, sessionToken, userAgent, tube):
tube.write_int(1)
tube.write_int(1489122155)
tube.write_string(accessKeyId)
tube.write_string(signature)
tube.write_binary(stringToSign)
if sessionToken is None:
tube.write_null()
else:
tube.write_string(sessionToken)
if userAgent is None:
tube.write_null()
else:
tube.write_string(userAgent)
tube.flush()
def write_batchGetItem_N697851100_1(request, tube):
tube.write_int(1)
tube.write_int(-697851100)
AttributeValueEncoder.encode_batchGetItem_N697851100_1_getItemKeys_direct(tube.enc, request)
has_kwargs = (("ReturnConsumedCapacity" in request))
if has_kwargs:
tube.enc.append_map_stream_header()
if 'ReturnConsumedCapacity' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnConsumedCapacity)
tube.write_int(getattr(Constants.ReturnConsumedCapacityValues, request.ReturnConsumedCapacity.upper()))
tube.enc.append_break()
else:
tube.write_null()
tube.flush()
def write_batchWriteItem_116217951_1(request, tube):
tube.write_int(1)
tube.write_int(116217951)
AttributeValueEncoder.encode_batchWriteItem_116217951_1_keyValuesByTable_direct(tube.enc, request)
has_kwargs = (("ReturnConsumedCapacity" in request) or
("ReturnItemCollectionMetrics" in request))
if has_kwargs:
tube.enc.append_map_stream_header()
if 'ReturnConsumedCapacity' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnConsumedCapacity)
tube.write_int(getattr(Constants.ReturnConsumedCapacityValues, request.ReturnConsumedCapacity.upper()))
if 'ReturnItemCollectionMetrics' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnItemCollectionMetrics)
tube.write_int(getattr(Constants.ReturnItemCollectionMetricsValues, request.ReturnItemCollectionMetrics.upper()))
tube.enc.append_break()
else:
tube.write_null()
tube.flush()
def write_defineAttributeList_670678385_1(attributeListId, tube):
tube.write_int(1)
tube.write_int(670678385)
tube.write_int(attributeListId)
tube.flush()
def write_defineAttributeListId_N1230579644_1(attributeNames, tube):
tube.write_int(1)
tube.write_int(-1230579644)
tube.write_array(attributeNames)
tube.flush()
def write_defineKeySchema_N742646399_1(tableName, tube):
tube.write_int(1)
tube.write_int(-742646399)
tube.write_binary(tableName)
tube.flush()
def write_deleteItem_1013539361_1(request, tube):
tube.write_int(1)
tube.write_int(1013539361)
tube.write_binary(request.TableName)
tube.write_binary(AttributeValueEncoder.encode_key(request.Key, request._key_schema))
has_kwargs = (("ReturnValues" in request) or
("ReturnConsumedCapacity" in request) or
("ReturnItemCollectionMetrics" in request) or
("ConditionExpression" in request) or
("ExpressionAttributeNames" in request) or
("ExpressionAttributeValues" in request))
if has_kwargs:
tube.enc.append_map_stream_header()
if 'ReturnValues' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnValues)
tube.write_int(getattr(Constants.ReturnValuesValues, request.ReturnValues.upper()))
if 'ReturnConsumedCapacity' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnConsumedCapacity)
tube.write_int(getattr(Constants.ReturnConsumedCapacityValues, request.ReturnConsumedCapacity.upper()))
if 'ReturnItemCollectionMetrics' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnItemCollectionMetrics)
tube.write_int(getattr(Constants.ReturnItemCollectionMetricsValues, request.ReturnItemCollectionMetrics.upper()))
if 'ConditionExpression' in request:
tube.write_int(Constants.DaxDataRequestParam.ConditionExpression)
# kwargs strings are always written as UTF-8 encoded binary types
tube.write_binary(request.ConditionExpression.encode('utf8'))
# This operation has expressions, so deal with those together
AttributeValueEncoder.encode_expressions_direct(tube.enc, request)
tube.enc.append_break()
else:
tube.write_null()
tube.flush()
def write_endpoints_455855874_1(tube):
tube.write_int(1)
tube.write_int(455855874)
tube.flush()
def write_getItem_263244906_1(request, tube):
tube.write_int(1)
tube.write_int(263244906)
tube.write_binary(request.TableName)
tube.write_binary(AttributeValueEncoder.encode_key(request.Key, request._key_schema))
has_kwargs = (("ConsistentRead" in request) or
("ReturnConsumedCapacity" in request) or
("ProjectionExpression" in request) or
("ExpressionAttributeNames" in request))
if has_kwargs:
tube.enc.append_map_stream_header()
if 'ConsistentRead' in request:
tube.write_int(Constants.DaxDataRequestParam.ConsistentRead)
tube.write_boolean(request.ConsistentRead)
if 'ReturnConsumedCapacity' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnConsumedCapacity)
tube.write_int(getattr(Constants.ReturnConsumedCapacityValues, request.ReturnConsumedCapacity.upper()))
# This operation has expressions, so deal with those together
AttributeValueEncoder.encode_expressions_direct(tube.enc, request)
tube.enc.append_break()
else:
tube.write_null()
tube.flush()
def write_putItem_N2106490455_1(request, tube):
tube.write_int(1)
tube.write_int(-2106490455)
tube.write_binary(request.TableName)
tube.write_binary(AttributeValueEncoder.encode_key(request.Item, request._key_schema))
tube.write_binary(AttributeValueEncoder.encode_values(request.Item, request._key_schema, request._attr_names, request._attr_list_id));
has_kwargs = (("ReturnValues" in request) or
("ReturnConsumedCapacity" in request) or
("ReturnItemCollectionMetrics" in request) or
("ConditionExpression" in request) or
("ExpressionAttributeNames" in request) or
("ExpressionAttributeValues" in request))
if has_kwargs:
tube.enc.append_map_stream_header()
if 'ReturnValues' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnValues)
tube.write_int(getattr(Constants.ReturnValuesValues, request.ReturnValues.upper()))
if 'ReturnConsumedCapacity' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnConsumedCapacity)
tube.write_int(getattr(Constants.ReturnConsumedCapacityValues, request.ReturnConsumedCapacity.upper()))
if 'ReturnItemCollectionMetrics' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnItemCollectionMetrics)
tube.write_int(getattr(Constants.ReturnItemCollectionMetricsValues, request.ReturnItemCollectionMetrics.upper()))
if 'ConditionExpression' in request:
tube.write_int(Constants.DaxDataRequestParam.ConditionExpression)
# kwargs strings are always written as UTF-8 encoded binary types
tube.write_binary(request.ConditionExpression.encode('utf8'))
# This operation has expressions, so deal with those together
AttributeValueEncoder.encode_expressions_direct(tube.enc, request)
tube.enc.append_break()
else:
tube.write_null()
tube.flush()
def write_query_N931250863_1(request, tube):
tube.write_int(1)
tube.write_int(-931250863)
tube.write_binary(request.TableName)
expressions = AttributeValueEncoder.encode_key_condition_expression_direct(tube.enc, request)
has_kwargs = (("IndexName" in request) or
("Select" in request) or
("Limit" in request) or
("ConsistentRead" in request) or
("ScanIndexForward" in request) or
("ExclusiveStartKey" in request) or
("ReturnConsumedCapacity" in request) or
("ProjectionExpression" in request) or
("FilterExpression" in request) or
("ExpressionAttributeNames" in request) or
("ExpressionAttributeValues" in request))
if has_kwargs:
tube.enc.append_map_stream_header()
if 'IndexName' in request:
tube.write_int(Constants.DaxDataRequestParam.IndexName)
# kwargs strings are always written as UTF-8 encoded binary types
tube.write_binary(request.IndexName.encode('utf8'))
if 'Select' in request:
tube.write_int(Constants.DaxDataRequestParam.Select)
tube.write_int(getattr(Constants.SelectValues, request.Select.upper()))
if 'Limit' in request:
tube.write_int(Constants.DaxDataRequestParam.Limit)
tube.write_int(request.Limit)
if 'ConsistentRead' in request:
tube.write_int(Constants.DaxDataRequestParam.ConsistentRead)
tube.write_int(int(request.ConsistentRead))
if 'ScanIndexForward' in request:
tube.write_int(Constants.DaxDataRequestParam.ScanIndexForward)
tube.write_int(int(request.ScanIndexForward))
if 'ExclusiveStartKey' in request:
tube.write_int(Constants.DaxDataRequestParam.ExclusiveStartKey)
# No encoder for map so use custom encoder
AttributeValueEncoder.encode_ExclusiveStartKey_direct(tube.enc, request)
if 'ReturnConsumedCapacity' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnConsumedCapacity)
tube.write_int(getattr(Constants.ReturnConsumedCapacityValues, request.ReturnConsumedCapacity.upper()))
# This operation has expressions, so deal with those together
# For Query, the expressions are already eval'd for KeyCondExpr
AttributeValueEncoder.write_expressions_direct(tube.enc, expressions, request)
tube.enc.append_break()
else:
tube.write_null()
tube.flush()
def write_scan_N1875390620_1(request, tube):
tube.write_int(1)
tube.write_int(-1875390620)
tube.write_binary(request.TableName)
has_kwargs = (("IndexName" in request) or
("Limit" in request) or
("Select" in request) or
("ExclusiveStartKey" in request) or
("ReturnConsumedCapacity" in request) or
("TotalSegments" in request) or
("Segment" in request) or
("ProjectionExpression" in request) or
("FilterExpression" in request) or
("ExpressionAttributeNames" in request) or
("ExpressionAttributeValues" in request) or
("ConsistentRead" in request))
if has_kwargs:
tube.enc.append_map_stream_header()
if 'IndexName' in request:
tube.write_int(Constants.DaxDataRequestParam.IndexName)
# kwargs strings are always written as UTF-8 encoded binary types
tube.write_binary(request.IndexName.encode('utf8'))
if 'Limit' in request:
tube.write_int(Constants.DaxDataRequestParam.Limit)
tube.write_int(request.Limit)
if 'Select' in request:
tube.write_int(Constants.DaxDataRequestParam.Select)
tube.write_int(getattr(Constants.SelectValues, request.Select.upper()))
if 'ExclusiveStartKey' in request:
tube.write_int(Constants.DaxDataRequestParam.ExclusiveStartKey)
# No encoder for map so use custom encoder
AttributeValueEncoder.encode_ExclusiveStartKey_direct(tube.enc, request)
if 'ReturnConsumedCapacity' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnConsumedCapacity)
tube.write_int(getattr(Constants.ReturnConsumedCapacityValues, request.ReturnConsumedCapacity.upper()))
if 'TotalSegments' in request:
tube.write_int(Constants.DaxDataRequestParam.TotalSegments)
tube.write_int(request.TotalSegments)
if 'Segment' in request:
tube.write_int(Constants.DaxDataRequestParam.Segment)
tube.write_int(request.Segment)
if 'ConsistentRead' in request:
tube.write_int(Constants.DaxDataRequestParam.ConsistentRead)
tube.write_int(int(request.ConsistentRead))
# This operation has expressions, so deal with those together
AttributeValueEncoder.encode_expressions_direct(tube.enc, request)
tube.enc.append_break()
else:
tube.write_null()
tube.flush()
def write_updateItem_1425579023_1(request, tube):
tube.write_int(1)
tube.write_int(1425579023)
tube.write_binary(request.TableName)
tube.write_binary(AttributeValueEncoder.encode_key(request.Key, request._key_schema))
has_kwargs = (("ReturnValues" in request) or
("ReturnConsumedCapacity" in request) or
("ReturnItemCollectionMetrics" in request) or
("UpdateExpression" in request) or
("ConditionExpression" in request) or
("ExpressionAttributeNames" in request) or
("ExpressionAttributeValues" in request))
if has_kwargs:
tube.enc.append_map_stream_header()
if 'ReturnValues' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnValues)
tube.write_int(getattr(Constants.ReturnValuesValues, request.ReturnValues.upper()))
if 'ReturnConsumedCapacity' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnConsumedCapacity)
tube.write_int(getattr(Constants.ReturnConsumedCapacityValues, request.ReturnConsumedCapacity.upper()))
if 'ReturnItemCollectionMetrics' in request:
tube.write_int(Constants.DaxDataRequestParam.ReturnItemCollectionMetrics)
tube.write_int(getattr(Constants.ReturnItemCollectionMetricsValues, request.ReturnItemCollectionMetrics.upper()))
if 'ConditionExpression' in request:
tube.write_int(Constants.DaxDataRequestParam.ConditionExpression)
# kwargs strings are always written as UTF-8 encoded binary types
tube.write_binary(request.ConditionExpression.encode('utf8'))
# This operation has expressions, so deal with those together
AttributeValueEncoder.encode_expressions_direct(tube.enc, request)
tube.enc.append_break()
else:
tube.write_null()
tube.flush()
| 40.110256
| 138
| 0.694943
| 1,570
| 15,643
| 6.76242
| 0.119108
| 0.099181
| 0.093812
| 0.054253
| 0.825186
| 0.808891
| 0.797683
| 0.773853
| 0.760196
| 0.742583
| 0
| 0.02554
| 0.224062
| 15,643
| 389
| 139
| 40.213368
| 0.849151
| 0.088091
| 0
| 0.761029
| 0
| 0
| 0.102227
| 0.058807
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047794
| false
| 0
| 0.003676
| 0
| 0.051471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1f820ba0b5d23e9c582b771d069724fec5bfa6e4
| 13,136
|
py
|
Python
|
graylog/apis/systemcluster_api.py
|
yumimobi/graylog.py
|
3118f4a49c91c2cbbd660523b0ab99e56fbfd861
|
[
"Apache-2.0"
] | 10
|
2016-09-27T08:13:22.000Z
|
2018-09-04T13:15:42.000Z
|
graylog/apis/systemcluster_api.py
|
yumimobi/graylog.py
|
3118f4a49c91c2cbbd660523b0ab99e56fbfd861
|
[
"Apache-2.0"
] | 1
|
2019-08-28T16:16:09.000Z
|
2019-08-28T16:16:09.000Z
|
graylog/apis/systemcluster_api.py
|
yumimobi/graylog.py
|
3118f4a49c91c2cbbd660523b0ab99e56fbfd861
|
[
"Apache-2.0"
] | 5
|
2016-11-03T07:45:18.000Z
|
2021-08-19T14:21:49.000Z
|
# coding: utf-8
"""
No descripton provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.1.1+01d50e5
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class SystemclusterApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def node(self, **kwargs):
"""
Information about this node.
This is returning information of this node in context to its state in the cluster. Use the system API of the node itself to get system information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.node(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: NodeSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.node_with_http_info(**kwargs)
else:
(data) = self.node_with_http_info(**kwargs)
return data
def node_with_http_info(self, **kwargs):
"""
Information about this node.
This is returning information of this node in context to its state in the cluster. Use the system API of the node itself to get system information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.node_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: NodeSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method node" % key
)
params[key] = val
del params['kwargs']
resource_path = '/system/cluster/node'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeSummary',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def node_0(self, node_id, **kwargs):
"""
Information about a node.
This is returning information of a node in context to its state in the cluster. Use the system API of the node itself to get system information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.node_0(node_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Object node_id: (required)
:return: NodeSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.node_0_with_http_info(node_id, **kwargs)
else:
(data) = self.node_0_with_http_info(node_id, **kwargs)
return data
def node_0_with_http_info(self, node_id, **kwargs):
"""
Information about a node.
This is returning information of a node in context to its state in the cluster. Use the system API of the node itself to get system information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.node_0_with_http_info(node_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Object node_id: (required)
:return: NodeSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['node_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method node_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'node_id' is set
if ('node_id' not in params) or (params['node_id'] is None):
raise ValueError("Missing the required parameter `node_id` when calling `node_0`")
resource_path = '/system/cluster/nodes/{nodeId}'.replace('{format}', 'json')
path_params = {}
if 'node_id' in params:
path_params['nodeId'] = params['node_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeSummary',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def nodes(self, **kwargs):
"""
List all active nodes in this cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.nodes(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: NodeSummaryList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.nodes_with_http_info(**kwargs)
else:
(data) = self.nodes_with_http_info(**kwargs)
return data
def nodes_with_http_info(self, **kwargs):
"""
List all active nodes in this cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.nodes_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: NodeSummaryList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method nodes" % key
)
params[key] = val
del params['kwargs']
resource_path = '/system/cluster/nodes'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NodeSummaryList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 37.531429
| 155
| 0.564099
| 1,385
| 13,136
| 5.168231
| 0.145848
| 0.067058
| 0.021794
| 0.030176
| 0.846186
| 0.814334
| 0.812378
| 0.77354
| 0.77354
| 0.765437
| 0
| 0.002732
| 0.359166
| 13,136
| 349
| 156
| 37.638968
| 0.847589
| 0.384896
| 0
| 0.729032
| 1
| 0
| 0.120022
| 0.03455
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045161
| false
| 0
| 0.045161
| 0
| 0.154839
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1f9284a03ce8a2d26c4826d0023374c94f930301
| 22,461
|
py
|
Python
|
tests/camel_test_values.py
|
katapal/python-bandwidth
|
4477c4129ce856ccd41adbc94e951336b56d7898
|
[
"MIT"
] | 18
|
2017-03-07T18:49:19.000Z
|
2020-11-04T14:27:37.000Z
|
tests/camel_test_values.py
|
katapal/python-bandwidth
|
4477c4129ce856ccd41adbc94e951336b56d7898
|
[
"MIT"
] | 12
|
2016-11-14T21:43:46.000Z
|
2019-10-11T20:18:51.000Z
|
tests/camel_test_values.py
|
katapal/python-bandwidth
|
4477c4129ce856ccd41adbc94e951336b56d7898
|
[
"MIT"
] | 13
|
2017-10-10T17:50:00.000Z
|
2020-08-13T13:14:22.000Z
|
before_array_dict = [
{"theRealCat": "unavailable",
"code": "callback-http-failure-status",
"nestedDeeply": {
"stillNesting": {
"yetStillNesting": {
"wowSuchNest": True
}
}
},
"details": [{"nextId": "ued-abc123",
"name": "callnextId",
"superValue": "c-abc123"},
{"nextId": "ued-abc123",
"name": "callbackStatusCode",
"superValue": "404"},
{"nextId": "ued-abc123",
"name": "totalDuration",
"superValue": "55"},
{"nextId": "ued-abc123",
"name": "responseDuration",
"superValue": "0"},
{"nextId": "ued-abc123",
"name": "requestDuration",
"superValue": "0"},
{"nextId": "ued-abc123",
"name": "callbackUrl",
"superValue": "http://google.com"},
{"nextId": "ued-abc123",
"name": "callbackMethod",
"superValue": "POST"},
{"nextId": "ued-abc123",
"name": "callbackEvent",
"superValue": "hangup"}],
"nextId": "ue-abc123",
"message": "The callback returned an HTTP failure status",
"time": "2017-02-28T15:40:35Z"},
{"theRealCat": "unavailable",
"code": "callback-http-failure-status",
"newErrorMessage": [
[
"a", "b", "c"
],
[
"a", "b", "c"
]
],
"errorMessagesABC123": [
{
"valueLesser": [
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEzzznabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
}
]
},
{
"valueLesser": [
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
}
]
},
{
"valueLesser": [
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
}
]
},
{
"valueLesser": [
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
},
{
"smallEnabled": True,
"bigDisabled": [
"abc",
"123",
"xyz"
]
}
]
}
],
"details": [{"nextId": "ued-abc123",
"name": "callnextId",
"superValue": "c-abc123"},
{"nextId": "ued-abc123",
"name": "callbackStatusCode",
"superValue": "404"},
{"nextId": "ued-abc123",
"name": "totalDuration",
"superValue": "7195"},
{"nextId": "ued-abc123",
"name": "responseDuration",
"superValue": "0"},
{"nextId": "ued-abc123",
"name": "requestDuration",
"superValue": "0"},
{"nextId": "ued-abc123",
"name": "callbackUrl",
"superValue": "http://google.com"},
{"nextId": "ued-abc123",
"name": "callbackMethod",
"superValue": "POST"},
{"nextId": "ued-abc123",
"name": "callbackEvent",
"superValue": "incomingcall"}],
"nextId": "ue-abc123",
"message": "The callback returned an HTTP failure status",
"time": "2017-02-28T15:40:03Z"},
{"theRealCat": "unavailable",
"code": "callback-http-failure-status",
"details": [{"nextId": "ued-abc123",
"name": "messagenextId",
"superValue": "m-abc123"},
{"nextId": "ued-abc123",
"name": "totalDuration",
"superValue": "37"},
{"nextId": "ued-abc123",
"name": "responseDuration",
"superValue": "0"},
{"nextId": "ued-abc123",
"name": "callbackStatusCode",
"superValue": "405"},
{"nextId": "ued-abc123",
"name": "requestDuration",
"superValue": "0"},
{"nextId": "ued-abc123",
"name": "callbackUrl",
"superValue": "http://google.com"},
{"nextId": "ued-abc123",
"name": "callbackMethod",
"superValue": "POST"},
{"nextId": "ued-abc123",
"name": "callbackEvent",
"superValue": "sms"}],
"nextId": "ue-abc123",
"message": "The callback returned an HTTP failure status",
"time": "2017-02-27T16:45:54Z"},
{"theRealCat": "unavailable",
"code": "callback-http-failure-status",
"details": [{"nextId": "ued-abc123",
"name": "totalDuration",
"superValue": "20"},
{"nextId": "ued-abc123",
"name": "responseDuration",
"superValue": "0"},
{"nextId": "ued-abc123",
"name": "requestDuration",
"superValue": "0"},
{"nextId": "ued-abc123",
"name": "callbackUrl",
"superValue": "http://google.com"},
{"nextId": "ued-abc123",
"name": "messagenextId",
"superValue": "m-abc123"},
{"nextId": "ued-abc123",
"name": "callbackStatusCode",
"superValue": "405"},
{"nextId": "ued-abc123",
"name": "callbackMethod",
"superValue": "POST"},
{"nextId": "ued-abc123",
"name": "callbackEvent",
"superValue": "sms"}],
"nextId": "ue-abc123",
"message": "The callback returned an HTTP failure status",
"time": "2017-02-27T16:45:50Z"}]
after_array_dict = [{'code': 'callback-http-failure-status',
'details': [{'name': 'callnextId',
'next_id': 'ued-abc123',
'super_value': 'c-abc123'},
{'name': 'callbackStatusCode',
'next_id': 'ued-abc123',
'super_value': '404'},
{'name': 'totalDuration',
'next_id': 'ued-abc123',
'super_value': '55'},
{'name': 'responseDuration',
'next_id': 'ued-abc123',
'super_value': '0'},
{'name': 'requestDuration',
'next_id': 'ued-abc123',
'super_value': '0'},
{'name': 'callbackUrl',
'next_id': 'ued-abc123',
'super_value': 'http://google.com'},
{'name': 'callbackMethod',
'next_id': 'ued-abc123',
'super_value': 'POST'},
{'name': 'callbackEvent',
'next_id': 'ued-abc123',
'super_value': 'hangup'}],
'message': 'The callback returned an HTTP failure status',
'nested_deeply': {'still_nesting': {'yet_still_nesting': {'wow_such_nest': True}}},
'next_id': 'ue-abc123',
'the_real_cat': 'unavailable',
'time': '2017-02-28T15:40:35Z'},
{'code': 'callback-http-failure-status',
'details': [{'name': 'callnextId',
'next_id': 'ued-abc123',
'super_value': 'c-abc123'},
{'name': 'callbackStatusCode',
'next_id': 'ued-abc123',
'super_value': '404'},
{'name': 'totalDuration',
'next_id': 'ued-abc123',
'super_value': '7195'},
{'name': 'responseDuration',
'next_id': 'ued-abc123',
'super_value': '0'},
{'name': 'requestDuration',
'next_id': 'ued-abc123',
'super_value': '0'},
{'name': 'callbackUrl',
'next_id': 'ued-abc123',
'super_value': 'http://google.com'},
{'name': 'callbackMethod',
'next_id': 'ued-abc123',
'super_value': 'POST'},
{'name': 'callbackEvent',
'next_id': 'ued-abc123',
'super_value': 'incomingcall'}],
'error_messages_abc123': [{'value_lesser': [{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_ezzznabled': True}]},
{'value_lesser': [{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True}]},
{'value_lesser': [{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True}]},
{'value_lesser': [{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True},
{'big_disabled': ['abc',
'123',
'xyz'],
'small_enabled': True}]}],
'message': 'The callback returned an HTTP failure status',
'new_error_message': [['a', 'b', 'c'], ['a', 'b', 'c']],
'next_id': 'ue-abc123',
'the_real_cat': 'unavailable',
'time': '2017-02-28T15:40:03Z'},
{'code': 'callback-http-failure-status',
'details': [{'name': 'messagenextId',
'next_id': 'ued-abc123',
'super_value': 'm-abc123'},
{'name': 'totalDuration',
'next_id': 'ued-abc123',
'super_value': '37'},
{'name': 'responseDuration',
'next_id': 'ued-abc123',
'super_value': '0'},
{'name': 'callbackStatusCode',
'next_id': 'ued-abc123',
'super_value': '405'},
{'name': 'requestDuration',
'next_id': 'ued-abc123',
'super_value': '0'},
{'name': 'callbackUrl',
'next_id': 'ued-abc123',
'super_value': 'http://google.com'},
{'name': 'callbackMethod',
'next_id': 'ued-abc123',
'super_value': 'POST'},
{'name': 'callbackEvent',
'next_id': 'ued-abc123',
'super_value': 'sms'}],
'message': 'The callback returned an HTTP failure status',
'next_id': 'ue-abc123',
'the_real_cat': 'unavailable',
'time': '2017-02-27T16:45:54Z'},
{'code': 'callback-http-failure-status',
'details': [{'name': 'totalDuration',
'next_id': 'ued-abc123',
'super_value': '20'},
{'name': 'responseDuration',
'next_id': 'ued-abc123',
'super_value': '0'},
{'name': 'requestDuration',
'next_id': 'ued-abc123',
'super_value': '0'},
{'name': 'callbackUrl',
'next_id': 'ued-abc123',
'super_value': 'http://google.com'},
{'name': 'messagenextId',
'next_id': 'ued-abc123',
'super_value': 'm-abc123'},
{'name': 'callbackStatusCode',
'next_id': 'ued-abc123',
'super_value': '405'},
{'name': 'callbackMethod',
'next_id': 'ued-abc123',
'super_value': 'POST'},
{'name': 'callbackEvent',
'next_id': 'ued-abc123',
'super_value': 'sms'}],
'message': 'The callback returned an HTTP failure status',
'next_id': 'ue-abc123',
'the_real_cat': 'unavailable',
'time': '2017-02-27T16:45:50Z'}]
{'created_time': '2014-04-14T19:16:07Z',
'id': 'n-dsd3wg3o4ng33icuooksngi',
'name': 'Jailbreak Phone',
'national_number': '(502) 286-6496',
'number': '+15022866496',
'number_state': 'released',
'price': '0.35'}
| 47.486258
| 104
| 0.261208
| 1,052
| 22,461
| 5.454373
| 0.110266
| 0.100383
| 0.083653
| 0.10596
| 0.925932
| 0.925932
| 0.911642
| 0.881666
| 0.852039
| 0.852039
| 0
| 0.065793
| 0.62308
| 22,461
| 472
| 105
| 47.586864
| 0.611977
| 0
| 0
| 0.764454
| 0
| 0
| 0.263835
| 0.012021
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1f92be7e17366c6452310a943e4ca8e5dfec4e38
| 40
|
py
|
Python
|
Avaliacion9.py
|
JoaozinhoProgramation/infosatc-lp-avaliativo-02
|
9049f69c3d66767e1b26bdd4f980eacbcede8713
|
[
"MIT"
] | null | null | null |
Avaliacion9.py
|
JoaozinhoProgramation/infosatc-lp-avaliativo-02
|
9049f69c3d66767e1b26bdd4f980eacbcede8713
|
[
"MIT"
] | null | null | null |
Avaliacion9.py
|
JoaozinhoProgramation/infosatc-lp-avaliativo-02
|
9049f69c3d66767e1b26bdd4f980eacbcede8713
|
[
"MIT"
] | null | null | null |
n = [2,4,3,1,5,9,6,7,8]
print(sorted(n))
| 20
| 23
| 0.55
| 13
| 40
| 1.692308
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.243243
| 0.075
| 40
| 2
| 24
| 20
| 0.351351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.