hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8ca4305bf27ee3867690505a7764b93195be5977
| 30
|
py
|
Python
|
stepik/3559/66578/step_5/script.py
|
tshemake/Software-Development
|
e6dff82dffa49e66dc5fa5650af775437ebb6a9b
|
[
"Unlicense"
] | null | null | null |
stepik/3559/66578/step_5/script.py
|
tshemake/Software-Development
|
e6dff82dffa49e66dc5fa5650af775437ebb6a9b
|
[
"Unlicense"
] | null | null | null |
stepik/3559/66578/step_5/script.py
|
tshemake/Software-Development
|
e6dff82dffa49e66dc5fa5650af775437ebb6a9b
|
[
"Unlicense"
] | null | null | null |
print(241.90263432641407 / 77)
| 30
| 30
| 0.8
| 4
| 30
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.678571
| 0.066667
| 30
| 1
| 30
| 30
| 0.178571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
8ca65b9706dc95450bd40aa7ed91ab64422e8e46
| 30,008
|
py
|
Python
|
test_gitstatus.py
|
ksomf/zsh-git-prompt
|
fba95921f9545d6ffc6bf7a13548331a77e72a1a
|
[
"MIT"
] | 64
|
2018-05-31T02:28:18.000Z
|
2022-02-10T14:27:20.000Z
|
test_gitstatus.py
|
ksomf/zsh-git-prompt
|
fba95921f9545d6ffc6bf7a13548331a77e72a1a
|
[
"MIT"
] | 36
|
2018-05-25T13:01:14.000Z
|
2020-01-06T18:07:35.000Z
|
test_gitstatus.py
|
ksomf/zsh-git-prompt
|
fba95921f9545d6ffc6bf7a13548331a77e72a1a
|
[
"MIT"
] | 19
|
2018-06-02T13:35:19.000Z
|
2022-01-18T23:56:35.000Z
|
"""
Test module for gitstatus
Fixtures used to to setup git repo scenarios on the fly.
Tests are short and at the end of this file.
"""
from __future__ import absolute_import, print_function
import os
import re
import shlex
import shutil
import subprocess as sub
import tempfile
import pytest
import gitstatus
GIT_STATUS = os.path.join(os.path.dirname(__file__), 'gitstatus.py')
def run_gitstatus():
"""
Helper to simply run gitstatus in the current directory.
Returns:
The output of gitstatus.py in the CWD.
"""
return sub.check_output(['python', GIT_STATUS]).decode('utf-8', errors='ignore')
@pytest.yield_fixture(scope="function")
def empty_working_directory():
"""
Run a test inside an empty temporary directory.
"""
cwd = os.getcwd()
try:
folder = tempfile.mkdtemp()
os.chdir(folder)
yield
finally:
os.chdir(cwd)
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
@pytest.yield_fixture(scope="function")
def git_repo_initial_commit():
"""
Create a fake git repo with the following properties:
- No commits beyond initialization.
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_find_git_root():
"""
Create a fake git repo with the following properties:
- 1 commit
- nested folders called, first/second/third
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"git add first",
"git commit -m 'first commit'",
]
try:
subs = os.path.join(folder, 'd_one', 'd_two', 'd_three')
os.makedirs(subs)
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_with_worktree():
"""
Create a fake git repo with the following properties:
- main repo has 3 commits
- upstream repo has 3 commits
- main repo has upstream set and is has diverged by 1 commit each way
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
folder_tree = folder + "_worktree"
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"git add first",
"git commit -m 'first commit'",
"first:Second line",
"git add first",
"git commit -m 'second commit'",
"git branch tree",
"git checkout tree",
"first:third line",
"git add first",
"git commit -m 'third commit'",
"git checkout master",
"git worktree add --detach %s tree" % (folder_tree),
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
os.chdir(folder_tree)
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
try:
shutil.rmtree(folder_tree)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_parse_stats():
"""
Create a fake git repo with the following properties:
- upstream set to another local git repo
- 3 staged files (1 changed, 2 additions)
- 1 changed file unstaged
- 2 untracked files
- 1 stashed change set
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
folder_up = folder + "_upstream"
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"second:A single line",
"third:A single line",
"touch untracked1 untracked2",
"git add first",
"git commit -m 'first commit'",
"first:Changes to stash",
"git stash",
"first:Changes to stage",
"git add first second third",
"first:Changes but unstaged",
"cp -R %s %s" % (folder, folder_up),
"git remote add -f up %s" % folder_up,
"git branch --set-upstream-to=up/master",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
try:
shutil.rmtree(folder_up)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_parse_stats_only_conflicts():
"""
Create a fake git repo with the following properties:
- upstream set to another local git repo
- edit the same file and create a merge conflict
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
folder_up = folder + "_upstream"
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line\nsecond line\third line",
"git add first",
"git commit -m 'first commit'",
"first:fourth line\nfifth line\n",
"git add first",
"git commit -m 'second commit'",
"cp -R %s %s" % (folder, folder_up),
"git reset --hard HEAD~1",
"first:ninth line\ntenth line\n",
"git add first",
"git commit -m 'new second commit'",
"git remote add -f up %s" % folder_up,
"git branch --set-upstream-to=up/master",
"git fetch up",
"git merge up/master",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
try:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
except sub.CalledProcessError:
pass
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
try:
shutil.rmtree(folder_up)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_branch_on_hash():
"""
Create a fake git repo with the following properties:
- 3 commits made
- yield when on checkout hash
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"git add first",
"git commit -m 'first commit'",
"first:A second line",
"git add first",
"git commit -m 'second commit'",
"git checkout HEAD~1",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_branch_on_master():
"""
Create a fake git repo with the following properties:
- 3 commits made
- yield when on checkout hash
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"git add first",
"git commit -m 'first commit'",
"first:A second line",
"git add first",
"git commit -m 'second commit'",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_branch_local_only():
"""
Create a fake git repo with the following properties:
- 1 commit
- no upstream copy or set value
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"git add first",
"git commit -m 'first commit'",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_remote_ahead():
"""
Create a fake git repo with the following properties:
- main repo has 3 commits
- upstream repo has 2 commits
- main repo has upstream set and is AHEAD by 1 commit
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
folder_up = folder + "_upstream"
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"git add first",
"git commit -m 'first commit'",
"first:Second line",
"git add first",
"git commit -m 'second commit'",
"cp -R %s %s" % (folder, folder_up),
"first:third line",
"git add first",
"git commit -m 'third commit'",
"git remote add -f up %s" % folder_up,
"git branch --set-upstream-to=up/master",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
try:
shutil.rmtree(folder_up)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_remote_behind():
"""
Create a fake git repo with the following properties:
- main repo has 2 commits
- upstream repo has 3 commits
- main repo has upstream set and is BEHIND by 1 commit
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
folder_up = folder + "_upstream"
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"git add first",
"git commit -m 'first commit'",
"first:Second line",
"git add first",
"git commit -m 'second commit'",
"first:third line",
"git add first",
"git commit -m 'third commit'",
"cp -R %s %s" % (folder, folder_up),
"git remote add -f up %s" % folder_up,
"git branch --set-upstream-to=up/master",
"git reset --hard HEAD~1",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
try:
shutil.rmtree(folder_up)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_remote_diverged():
"""
Create a fake git repo with the following properties:
- main repo has 3 commits
- upstream repo has 4 commits
- main repo has upstream set and is has diverged 2 behind, 1 ahead
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
folder_up = folder + "_upstream"
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"git add first",
"git commit -m 'first commit'",
"first:Second line",
"git add first",
"git commit -m 'second commit'",
"first:third line",
"git add first",
"git commit -m 'third commit'",
"first:fourth line",
"git add first",
"git commit -m 'fourth commit'",
"cp -R %s %s" % (folder, folder_up),
"git remote add -f up %s" % folder_up,
"git branch --set-upstream-to=up/master",
"git reset --hard HEAD~2",
"first:different third line",
"git add first",
"git commit -m 'different third commit'",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
try:
shutil.rmtree(folder_up)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_in_merge():
"""
Create a fake git repo with the following properties:
- master branch with 2 commits
- dev branch that has 2 commits, last one differs from master
- dev branch is merging master into it
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"git add first",
"git commit -m 'first commit'",
"git branch dev",
"first:the second master line here",
"git add first",
"git commit -m 'second master commit'",
"git checkout dev",
"first:Second line for dev",
"git add first",
"git commit -m 'second dev commit'",
"git merge master",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
proc = sub.Popen(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
proc.wait()
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_in_rebase():
"""
Create a fake git repo with the following properties:
- master branch with 3 commits
- dev branch that has 3 commits, last two differ from master
- dev is rebasing master, 2 commits need resolving
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"git add first",
"git commit -m 'first commit'",
"git branch dev",
"first:the second master line here",
"git add first",
"git commit -m 'second master commit'",
"first:there is also a third master",
"git add first",
"git commit -m 'third master commit'",
"git checkout dev",
"first:Second line",
"git add first",
"git commit -m 'second dev commit'",
"first:Third line\nForuth line",
"git add first",
"git commit -m 'third dev commit'",
"git rebase master",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
proc = sub.Popen(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
proc.wait()
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
os.chdir(cwd)
@pytest.yield_fixture(scope="function")
def git_repo_upstream_gone():
"""
Create a fake git repo with the following properties:
- create a repo with 2 commits and a 'dev' branch
- copy repo to create an upstream
- set 'dev' branch to track 'up/dev'
- delete upstream dev
"""
cwd = os.getcwd()
folder = tempfile.mkdtemp()
folder_up = folder + "_upstream"
cmds = [
"git init",
"git config user.email 'you@example.com'",
"git config user.name 'Your Name'",
"first:A single line",
"git add first",
"git commit -m 'first commit'",
"first:Second line",
"git add first",
"git commit -m 'second commit'",
"cp -R %s %s" % (folder, folder_up),
"git remote add -f up %s" % folder_up,
"git branch dev",
"git checkout dev",
"git push -u up dev",
"git fetch up",
"git push up :dev",
]
try:
os.chdir(folder)
for cmd in cmds:
if re.match(r'\S+:', cmd):
assert len(cmd.split(":")) == 2
fname, text = cmd.split(":")
with open(os.path.join(folder, fname), 'a') as fout:
fout.write(text + '\n')
else:
with open(os.devnull, 'w') as devnull:
sub.check_call(shlex.split(cmd),
stdout=devnull, stderr=sub.STDOUT)
yield
finally:
try:
shutil.rmtree(folder)
except (OSError, IOError):
pass
try:
shutil.rmtree(folder_up)
except (OSError, IOError):
pass
os.chdir(cwd)
# ----------
# Unit Tests
# ----------
def test_find_git_root(git_repo_find_git_root):
""" A unit test for gitstatus. """
expect = os.path.join(os.getcwd(), '.git')
sub_d = os.path.join(os.getcwd(), 'd_one', 'd_two', 'd_three')
assert os.path.isdir(sub_d)
os.chdir(sub_d)
assert gitstatus.find_git_root() == expect
def test_find_git_root_fail(empty_working_directory):
""" A unit test for gitstatus. """
with pytest.raises(IOError):
gitstatus.find_git_root()
def test_git_paths_in_normal_repo(git_repo_initial_commit):
""" A unit test for gitstatus. """
head_file, stash_file, merge_file, rebase_dir = gitstatus.git_paths(gitstatus.find_git_root())
assert head_file == os.path.join(os.getcwd(), '.git', 'HEAD')
assert stash_file == os.path.join(os.getcwd(), '.git', 'logs', 'refs', 'stash')
assert merge_file == os.path.join(os.getcwd(), '.git', 'MERGE_HEAD')
assert rebase_dir == os.path.join(os.getcwd(), '.git', 'rebase-apply')
def test_git_paths_in_working_tree(git_repo_with_worktree):
""" A unit test for gitstatus. """
repo_root = os.getcwd().replace('_worktree', '')
tree_root = os.path.join(repo_root, '.git', 'worktrees',
os.path.basename(repo_root) + '_worktree')
head_file, stash_file, merge_file, rebase_dir = gitstatus.git_paths(gitstatus.find_git_root())
assert head_file == os.path.join(tree_root, 'HEAD')
assert stash_file == os.path.join(repo_root, '.git', 'logs', 'refs', 'stash')
assert merge_file == os.path.join(tree_root, 'MERGE_HEAD')
assert rebase_dir == os.path.join(tree_root, 'rebase-apply')
def test_parse_stats():
""" A unit test for gitstatus. """
status_input = """?? untracked1
?? untracked2
?? untracked3
AA conflicts1
AU conflicts2
DD conflicts3
DU conflicts4
UA conflicts5
UD conflicts6
UD conflicts7
A_ staged1
C_ staged2
D_ staged3
M_ staged4
R_ staged5
_C changed1
_D changed2
_M changed3
_R changed4"""
assert gitstatus.parse_stats(status_input.splitlines()) == (5, 7, 4, 3)
def test_parse_ahead_behind_only_ahead():
""" A unit test for gitstatus. """
assert gitstatus.parse_ahead_behind("## master...up/master [ahead 2]") == (2, 0)
def test_parse_ahead_behind_only_behind():
""" A unit test for gitstatus. """
assert gitstatus.parse_ahead_behind("## master...up/master [behind 1]") == (0, 1)
def test_parse_ahead_behind_both():
""" A unit test for gitstatus. """
assert gitstatus.parse_ahead_behind("## master...up/master [ahead 2, behind 1]") == (2, 1)
def test_parse_branch_on_local_branch():
""" A unit test for gitstatus. """
branch_line = "## master"
assert gitstatus.parse_branch(branch_line, None) == ('master', '..', 1)
def test_parse_branch_has_upstream():
""" A unit test for gitstatus. """
branch_line = "## master...up/master [ahead 2, behind 1]"
assert gitstatus.parse_branch(branch_line, None) == ('master', 'up/master', 0)
def test_parse_branch_out_on_hash(git_repo_branch_on_hash):
""" A unit test for gitstatus. """
actual_hash = sub.check_output(shlex.split('git rev-parse --short HEAD'))
actual_hash = actual_hash.decode('utf-8', errors='ignore').strip()
head_file = os.path.join(os.getcwd(), '.git', 'HEAD')
branch_line = "## HEAD (no branch)"
assert gitstatus.parse_branch(branch_line, head_file) == (':' + actual_hash, '..', 0)
def test_stash_count_one_stash(git_repo_parse_stats):
""" A unit test for gitstatus. """
stash_file = os.path.join(os.getcwd(), '.git', 'logs', 'refs', 'stash')
assert gitstatus.stash_count(stash_file) == 1
def test_stash_count_no_stash(git_repo_initial_commit):
""" A unit test for gitstatus. """
stash_file = os.path.join(os.getcwd(), 'logs', 'refs', 'stash')
assert gitstatus.stash_count(stash_file) == 0
def test_rebase_progress_active_rebase(git_repo_in_rebase):
rebase_dir = os.path.join(os.getcwd(), '.git', 'rebase-apply')
assert gitstatus.rebase_progress(rebase_dir) == '1/2'
def test_rebase_progress_no_rebase(git_repo_initial_commit):
rebase_dir = os.path.join(os.getcwd(), '.git', 'rebase-apply')
assert gitstatus.rebase_progress(rebase_dir) == '0'
# ----------------
# Functional Tests
# ----------------
def test_gitstatus_no_repo(empty_working_directory):
""" A unit test for gitstatus. """
assert run_gitstatus() == ''
def test_gitstatus_initial_commit(git_repo_initial_commit):
""" A unit test for gitstatus. """
assert run_gitstatus() == 'master 0 0 0 0 0 0 0 1 {} 0 0'.format(gitstatus.SYM_NOUPSTREAM)
def test_gitstatus_local_branch(git_repo_branch_on_master):
""" A unit test for gitstatus. """
assert run_gitstatus() == 'master 0 0 0 0 0 0 0 1 {} 0 0'.format(gitstatus.SYM_NOUPSTREAM)
def test_gitstatus_on_hash(git_repo_branch_on_hash):
""" A unit test for gitstatus. """
actual_hash = sub.check_output(shlex.split('git rev-parse --short HEAD'))
actual_hash = actual_hash.decode('utf-8', errors='ignore').strip()
assert run_gitstatus() == ':{} 0 0 0 0 0 0 0 0 {} 0 0'.format(actual_hash,
gitstatus.SYM_NOUPSTREAM)
def test_gitstatus_parse_stats_no_conflicts(git_repo_parse_stats):
""" A unit test for gitstatus. """
assert run_gitstatus() == 'master 0 0 3 0 1 2 1 0 up/master 0 0'
def test_gitstatus_parse_stats_only_conflicts(git_repo_parse_stats_only_conflicts):
""" A unit test for gitstatus. """
assert run_gitstatus() == 'master 1 1 0 1 0 0 0 0 up/master 1 0'
def test_gitstatus_remote_ahead(git_repo_remote_ahead):
""" A unit test for gitstatus. """
assert run_gitstatus() == 'master 1 0 0 0 0 0 0 0 up/master 0 0'
def test_gitstatus_remote_behind(git_repo_remote_behind):
""" A unit test for gitstatus. """
assert run_gitstatus() == 'master 0 1 0 0 0 0 0 0 up/master 0 0'
def test_gitstatus_remote_diverged(git_repo_remote_diverged):
""" A unit test for gitstatus. """
assert run_gitstatus() == 'master 1 2 0 0 0 0 0 0 up/master 0 0'
def test_gitstatus_stdin(git_repo_parse_stats):
""" A unit test for gitstatus. """
std_input = sub.check_output(['git', 'status', '--branch', '--porcelain'])
with tempfile.TemporaryFile() as finput:
finput.write(std_input)
finput.seek(0)
out = sub.check_output(['python', GIT_STATUS], stdin=finput).decode('utf-8')
assert out == 'master 0 0 3 0 1 2 1 0 up/master 0 0'
def test_gitstatus_merging(git_repo_in_merge):
""" A unit test for gitstatus. """
assert run_gitstatus() == 'dev 0 0 0 1 0 0 0 1 .. 1 0'
def test_gitstatus_rebasing(git_repo_in_rebase):
""" A unit test for gitstatus. """
actual_hash = sub.check_output(shlex.split('git rev-parse --short HEAD'))
actual_hash = actual_hash.decode('utf-8', errors='ignore').strip()
assert run_gitstatus() == ':{} 0 0 0 1 0 0 0 0 .. 0 1/2'.format(actual_hash)
def test_gitstatus_upstream_gone(git_repo_upstream_gone):
""" A unit test for gitstatus. """
assert run_gitstatus() == 'dev 0 0 0 0 0 0 0 0 up/dev 0 0'
| 30.589195
| 98
| 0.550753
| 3,770
| 30,008
| 4.271883
| 0.077984
| 0.00832
| 0.008382
| 0.029556
| 0.821049
| 0.794908
| 0.77181
| 0.757715
| 0.720708
| 0.69258
| 0
| 0.010778
| 0.322881
| 30,008
| 980
| 99
| 30.620408
| 0.78183
| 0.112137
| 0
| 0.752434
| 0
| 0
| 0.222405
| 0.005178
| 0
| 0
| 0
| 0
| 0.066759
| 1
| 0.061196
| false
| 0.031989
| 0.012517
| 0
| 0.075104
| 0.001391
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50a958e3b16c7cd9237a78942c721421f767ff80
| 940
|
py
|
Python
|
evaluation/regression_metrics.py
|
rupakc/DriftGAN
|
acd2ebbed96d0b4d3ede38b01d7d6620f9b0f426
|
[
"Apache-2.0"
] | null | null | null |
evaluation/regression_metrics.py
|
rupakc/DriftGAN
|
acd2ebbed96d0b4d3ede38b01d7d6620f9b0f426
|
[
"Apache-2.0"
] | null | null | null |
evaluation/regression_metrics.py
|
rupakc/DriftGAN
|
acd2ebbed96d0b4d3ede38b01d7d6620f9b0f426
|
[
"Apache-2.0"
] | null | null | null |
from sklearn.metrics.regression import mean_squared_error, median_absolute_error, mean_absolute_error
from sklearn.metrics.regression import r2_score, mean_squared_log_error, explained_variance_score
def get_mean_squared_error(gold_values, predicted_values):
return mean_squared_error(gold_values, predicted_values)
def get_median_absolute_error(gold_values, predicted_values):
return median_absolute_error(gold_values, predicted_values)
def get_mean_absolute_error(gold_values, predicted_values):
return mean_absolute_error(gold_values, predicted_values)
def get_r2_score(gold_values, predicted_values):
return r2_score(gold_values, predicted_values)
def get_mean_squared_log_error(gold_values, predicted_values):
return mean_squared_log_error(gold_values, predicted_values)
def get_explained_variance_score(gold_values, predicted_values):
return explained_variance_score(gold_values, predicted_values)
| 34.814815
| 101
| 0.857447
| 130
| 940
| 5.692308
| 0.161538
| 0.162162
| 0.308108
| 0.405405
| 0.856757
| 0.764865
| 0.695946
| 0.309459
| 0
| 0
| 0
| 0.003505
| 0.089362
| 940
| 26
| 102
| 36.153846
| 0.860981
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.142857
| 0.428571
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
50b40fc09dbcb8024f993618e62657bbcf5d806b
| 157
|
py
|
Python
|
src/lk_db/ents/both/census/EntMaritalStatusOfPopulation.py
|
nuuuwan/lk_db
|
ac0abfa47ba31b0d4c2c8566b3101b83749bd45d
|
[
"MIT"
] | null | null | null |
src/lk_db/ents/both/census/EntMaritalStatusOfPopulation.py
|
nuuuwan/lk_db
|
ac0abfa47ba31b0d4c2c8566b3101b83749bd45d
|
[
"MIT"
] | null | null | null |
src/lk_db/ents/both/census/EntMaritalStatusOfPopulation.py
|
nuuuwan/lk_db
|
ac0abfa47ba31b0d4c2c8566b3101b83749bd45d
|
[
"MIT"
] | null | null | null |
# Auto Generated - DO NOT EDIT!
from lk_db.ents.both.EntCensusResult import EntCensusResult
class EntMaritalStatusOfPopulation(EntCensusResult):
pass
| 19.625
| 59
| 0.808917
| 17
| 157
| 7.411765
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133758
| 157
| 7
| 60
| 22.428571
| 0.926471
| 0.184713
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
50b539b6ce9a951a4e07355f9fb756a1bd3278b7
| 24,910
|
py
|
Python
|
src/LLS_search_patterns.py
|
frankeverdij/logic-life-search
|
0e6bf5f6b7a8fbbf838f4a65dbcff18e01a5c8a8
|
[
"MIT"
] | null | null | null |
src/LLS_search_patterns.py
|
frankeverdij/logic-life-search
|
0e6bf5f6b7a8fbbf838f4a65dbcff18e01a5c8a8
|
[
"MIT"
] | null | null | null |
src/LLS_search_patterns.py
|
frankeverdij/logic-life-search
|
0e6bf5f6b7a8fbbf838f4a65dbcff18e01a5c8a8
|
[
"MIT"
] | null | null | null |
import copy
from . import LLS_files
from . import LLS_formatting
from . import LLS_rules
from . import LLS_defaults
from .SearchPattern import SearchPattern
from .LLS_messages import print_message
from .LLS_literal_manipulation import neighbours_from_coordinates, variable_from_literal, negate
def search_pattern_from_string(input_string, indent = 0, verbosity = 0):
"""Create the grid and ignore_transitionof a search pattern from the given string"""
grid, ignore_transition = LLS_formatting.parse_input_string(input_string, indent = indent, verbosity = verbosity)
print_message(
"Pattern parsed as:\n" + LLS_formatting.make_csv(grid, ignore_transition) + "\n",
3,
indent = indent, verbosity = verbosity
)
for t, generation in enumerate(grid):
for y, row in enumerate(generation):
for x, cell in enumerate(row):
if cell not in ["0", "1", "*"]:
variable, negated = variable_from_literal(cell)
grid[t][y][x] = negate("user_input_" + variable, negated)
return grid, ignore_transition
def blank_search_pattern(width, height, duration, indent = 0, verbosity = 0):
print_message('Creating spaceship search pattern...', 3, indent = indent, verbosity = verbosity)
grid = [[["*" for i in range(width)] for j in range(height)] for k in range(duration)]
print_message("Pattern created:\n" + LLS_formatting.make_csv(grid) + "\n", 3, indent = indent+1, verbosity = verbosity)
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return grid
def check_orphan(file_name, number_of_generations, indent = 0, verbosity = 0):
print_message(
'Creating search pattern to see if file "' + file_name + '" contains an orphan...',
3,
indent = indent, verbosity = verbosity
)
input_string = LLS_files.string_from_file(file_name, indent = indent + 1, verbosity = verbosity)
grid, _ = LLS_formatting.parse_input_string(input_string, indent = indent + 1, verbosity = verbosity)
assert len(grid) == 1, "More than one generation in input"
print_message(
"Pattern parsed as:\n" + LLS_formatting.make_csv(grid) + "\n",
3,
indent = indent+1, verbosity = verbosity
)
pattern = grid[0]
width = len(pattern[0])
height = len(pattern)
padding = number_of_generations
pattern = [["*" for columns in range(padding)] + row + ["*" for columns in range(padding)] for row in [["*" for cell in range(
width)] for rows in range(padding)] + pattern + [["*" for cell in range(width)] for rows in range(padding)]]
width = len(pattern[0])
height = len(pattern)
duration = number_of_generations + 1
grid = [[["0" for cell in range(width)] for row in range(
height)] for generation in range(number_of_generations)] + [pattern]
ignore_transition = [[[True for cell in range(width)] for row in range(
height)] for generation in range(number_of_generations)] + [[[False for cell in range(width)] for row in range(
height)]]
for t in range(number_of_generations - 1, -1, -1):
for y in range(height):
for x in range(width):
if t == number_of_generations - 1:
neighbours = neighbours_from_coordinates(grid,x,y,t,t_offset=1,background_grid=[[["*"]]])
nonempties = ["0", "1"]
else:
neighbours = neighbours_from_coordinates(grid,x,y,t,t_offset=1)
nonempties = ["*"]
if any(nonempty in neighbours for nonempty in nonempties):
grid[t][y][x] = "*"
ignore_transition[t][y][x] = False
for y in range(height):
for x in range(width):
if grid[-1][y][x] not in ["0", "1"]:
grid[-1][y][x] = "0"
ignore_transition[-1][y][x] = True
print_message("Search pattern:\n" + LLS_formatting.make_csv(grid, ignore_transition) + "\n", 3, indent = indent+1, verbosity = verbosity)
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return grid, ignore_transition
def glider_eater_search_pattern(width,height,digestion_time,symmetry="C1",indent = 0, verbosity = 0):
print_message('Creating eater search pattern...', 3, indent = indent, verbosity = verbosity)
glider_in = [
[["1","0","0","0","0"],
["0","1","1","0","0"],
["1","1","0","0","0"],
["0","0","0","0","0"],
["0","0","0","0","0"]],
[["0","1","0","0","0"],
["0","0","1","0","0"],
["1","1","1","0","0"],
["0","0","0","0","0"],
["0","0","0","0","0"]],
[["0","0","0","0","0"],
["1","0","1","0","0"],
["0","1","1","0","0"],
["0","1","0","0","0"],
["0","0","0","0","0"]],
[["0","0","0","0","0"],
["0","0","1","0","0"],
["1","0","1","0","0"],
["0","1","1","0","0"],
["0","0","0","0","0"]],
[["0","0","0","0","0"],
["0","1","0","0","0"],
["0","0","1","1","0"],
["0","1","1","0","0"],
["0","0","0","0","0"]],
[["0","0","0","0","0"],
["0","0","1","0","0"],
["0","0","0","1","0"],
["0","1","1","1","0"],
["0","0","0","0","0"]]
]
width = width + 2
height = height + 2
duration = digestion_time + 6
grid = [[["0"
for i in range(width)] for j in range(height)] for k in range(duration)]
for y in range(1,height-1):
for x in range(1,width-1):
grid[-1][y][x] = "*"
search_pattern = SearchPattern(grid)
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
for x in range(5):
for y in range(5):
search_pattern.grid[-1][y][x] = "0"
search_pattern.force_symmetry(symmetry)
for t, generation in enumerate(glider_in):
search_pattern.grid[t] = copy.deepcopy(search_pattern.grid[-1])
for t, generation in enumerate(glider_in):
for y, row in enumerate(generation):
for x, cell in enumerate(row):
search_pattern.grid[t][y][x] = cell
for t in range(6,duration-1):
for y in range(1,height-1):
for x in range(1,width-1):
search_pattern.grid[t][y][x] = "*"
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
print_message("Pattern created:\n" + search_pattern.make_string(pattern_output_format = "csv") + "\n", 3, indent = indent+1, verbosity = verbosity)
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return search_pattern.grid, search_pattern.ignore_transition
def lwss_eater_search_pattern(width,height,digestion_time,symmetry="C1",indent = 0, verbosity = 0):
print_message('Creating lwss search pattern...', 3, indent = indent, verbosity = verbosity)
assert height % 2 == 1, "Height must be odd"
lwss_in = [
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["1","1","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["0","1","0","0","0","0","0"],
["0","1","0","0","0","0","0"],
["1","1","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0"],
["0","1","1","0","0","0","0"],
["1","1","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["1","1","1","0","0","0","0"],
["0","0","1","0","0","0","0"],
["0","0","1","0","0","0","0"],
["0","1","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0"],
["1","1","1","0","0","0","0"],
["1","0","1","1","0","0","0"],
["0","1","1","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","1","0","0","0","0"],
["0","0","0","1","0","0","0"],
["0","0","0","1","0","0","0"],
["1","1","1","1","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","1","1","0","0","0"],
["1","1","0","1","1","0","0"],
["1","1","1","1","0","0","0"],
["0","1","1","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","1","1","1","1","0","0"],
["1","0","0","0","1","0","0"],
["0","0","0","0","1","0","0"],
["1","0","0","1","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","1","1","0","0","0"],
["0","1","1","1","1","0","0"],
["0","1","1","0","1","1","0"],
["0","0","0","1","1","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]]
]
in_time = len(lwss_in)
width = width + 2
height = height + 2
duration = digestion_time + in_time
grid = [[["0"
for i in range(width)] for j in range(height)] for k in range(duration)]
for y in range(1,height-1):
for x in range(1,width-1):
grid[-1][y][x] = "*"
search_pattern = SearchPattern(grid)
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
for x in range(7):
for y in range(height/2 - 3, height/2 + 4):
search_pattern.grid[-1][y][x] = "0"
search_pattern.force_symmetry(symmetry)
for t, generation in enumerate(lwss_in):
search_pattern.grid[t] = copy.deepcopy(search_pattern.grid[-1])
for t, generation in enumerate(lwss_in):
for y, row in enumerate(generation):
if y not in [0, len(generation) - 1]:
search_pattern.ignore_transition[t][height/2 - 3 + y][0] = True
for x, cell in enumerate(row):
search_pattern.grid[t][height/2 - 3 + y][x] = cell
for t in range(in_time,duration-1):
for y in range(1,height-1):
for x in range(1,width-1):
search_pattern.grid[t][y][x] = "*"
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
print_message("Pattern created:\n" + search_pattern.make_string(pattern_output_format = "csv") + "\n", 3, indent = indent+1, verbosity = verbosity)
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return search_pattern.grid, search_pattern.ignore_transition
def hwss_eater_search_pattern(width,height,digestion_time,symmetry="C1",indent = 0, verbosity = 0):
print_message('Creating lwss search pattern...', 3, indent = indent, verbosity = verbosity)
assert height % 2 == 1, "Height must be odd"
hwss_in = [
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","1","0","0","0","0","0","0","0"],
["0","1","0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["0","1","1","0","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","1","1","0","0","0","0","0","0"],
["0","0","1","0","0","0","0","0","0"],
["0","0","1","0","0","0","0","0","0"],
["0","1","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["1","1","1","0","0","0","0","0","0"],
["1","0","1","1","0","0","0","0","0"],
["0","1","1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","0","1","0","0","0","0","0","0"],
["0","0","0","1","0","0","0","0","0"],
["0","0","0","1","0","0","0","0","0"],
["1","1","1","1","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","1","1","0","0","0","0","0"],
["1","1","0","1","1","0","0","0","0"],
["1","1","1","1","0","0","0","0","0"],
["1","1","1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","1","1","1","1","0","0","0","0"],
["0","0","0","0","1","0","0","0","0"],
["0","0","0","0","1","0","0","0","0"],
["0","0","0","1","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","1","1","1","0","0","0","0","0"],
["1","1","1","1","1","0","0","0","0"],
["1","1","1","0","1","1","0","0","0"],
["0","0","0","1","1","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","1","1","0","0","0","0","0","0"],
["0","0","0","0","1","0","0","0","0"],
["0","0","0","0","0","1","0","0","0"],
["0","0","0","0","0","1","0","0","0"],
["1","1","1","1","1","1","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","1","1","0","0","0"],
["1","1","1","1","0","1","1","0","0"],
["1","1","1","1","1","1","0","0","0"],
["0","1","1","1","1","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","1","1","1","1","1","1","0","0"],
["1","0","0","0","0","0","1","0","0"],
["0","0","0","0","0","0","1","0","0"],
["1","0","0","0","0","1","0","0","0"],
["0","0","1","1","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","1","1","1","1","0","0","0"],
["0","1","1","1","1","1","1","0","0"],
["0","1","1","1","1","0","1","1","0"],
["0","0","0","0","0","1","1","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]]
]
in_time = len(hwss_in)
width = width + 2
height = height + 2
duration = digestion_time + in_time
grid = [[["0"
for i in range(width)] for j in range(height)] for k in range(duration)]
for y in range(1,height-1):
for x in range(1,width-1):
grid[-1][y][x] = "*"
search_pattern = SearchPattern(grid)
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
for x in range(9):
for y in range(height/2 - 4, height/2 + 5):
search_pattern.grid[-1][y][x] = "0"
search_pattern.force_symmetry(symmetry)
for t, generation in enumerate(hwss_in):
search_pattern.grid[t] = copy.deepcopy(search_pattern.grid[-1])
for t, generation in enumerate(hwss_in):
for y, row in enumerate(generation):
if y not in [0, len(generation) - 1]:
search_pattern.ignore_transition[t][height/2 - 4 + y][0] = True
for x, cell in enumerate(row):
search_pattern.grid[t][height/2 - 4 + y][x] = cell
for t in range(in_time,duration-1):
for y in range(1,height-1):
for x in range(1,width-1):
search_pattern.grid[t][y][x] = "*"
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
print_message("Pattern created:\n" + search_pattern.make_string(pattern_output_format = "csv") + "\n", 3, indent = indent+1, verbosity = verbosity)
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return search_pattern.grid, search_pattern.ignore_transition
def stator_search_pattern(pattern, rule=None, background_grid=None, indent=0, verbosity=0):
if rule == None:
rule = LLS_rules.rule_from_rulestring(LLS_defaults.rulestring)
if background_grid == None:
(
background_grid,
_
) = LLS_formatting.parse_input_string(
LLS_files.string_from_file(
"backgrounds/" + LLS_defaults.background,
)
)
width = len(pattern[0])
height = len(pattern)
grid = [pattern]
duration = 1
print_message('Creating oscillator...', 3, indent = indent, verbosity = verbosity)
while True:
print_message('Generation: ' + str(duration), 3, indent = indent + 1, verbosity = verbosity)
grid.append([["0" for x in range(width)] for y in range(height)])
duration += 1
for x in range(width):
for y in range(height):
BS_letter = "S" if grid[-2][y][x] == "1" else "B"
grid[-1][y][x] = rule[BS_letter + LLS_rules.transition_from_cells(neighbours_from_coordinates(grid,x,y,-1, background_grid = background_grid))]
if grid[0] == grid[-1]:
break
number_of_variables = 0
for x in range(width):
for y in range(height):
if all(grid[t][y][x] == grid[0][y][x] for t in range(duration)):
for t in range(duration):
grid[t][y][x] = "stator_cell_" + str(number_of_variables)
number_of_variables += 1
grid = [[["0" for x in range(width)]] + generation + [["0" for x in range(width)]] for generation in grid]
grid = [[["0"] + row + ["0"] for row in generation] for generation in grid]
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return grid
| 47.538168
| 159
| 0.368286
| 3,570
| 24,910
| 2.505322
| 0.038375
| 0.369633
| 0.513529
| 0.63551
| 0.815407
| 0.77907
| 0.757267
| 0.713551
| 0.713551
| 0.696892
| 0
| 0.127737
| 0.307025
| 24,910
| 523
| 160
| 47.629063
| 0.390395
| 0.003131
| 0
| 0.710084
| 0
| 0
| 0.104568
| 0
| 0
| 0
| 0
| 0
| 0.006303
| 1
| 0.014706
| false
| 0
| 0.016807
| 0
| 0.046218
| 0.044118
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
50e8cf525998f3838566d57c8a177c902eacb06a
| 16,833
|
py
|
Python
|
core/variance_predictor.py
|
carankt/FastSpeech2-1
|
42c06e4fbdf741a0719154d1cb4617b7d3f15a5c
|
[
"Apache-2.0"
] | 1
|
2021-12-08T09:07:09.000Z
|
2021-12-08T09:07:09.000Z
|
core/variance_predictor.py
|
carankt/FastSpeech2-1
|
42c06e4fbdf741a0719154d1cb4617b7d3f15a5c
|
[
"Apache-2.0"
] | null | null | null |
core/variance_predictor.py
|
carankt/FastSpeech2-1
|
42c06e4fbdf741a0719154d1cb4617b7d3f15a5c
|
[
"Apache-2.0"
] | null | null | null |
import torch
import torch.nn.functional as F
from typing import Optional
from core.modules import LayerNorm
import pycwt as wavelet
import numpy as np
from sklearn import preprocessing
class VariancePredictor(torch.nn.Module):
def __init__(
self,
idim: int,
n_layers: int = 2,
n_chans: int = 256,
out: int = 1,
kernel_size: int = 3,
dropout_rate: float = 0.5,
offset: float = 1.0,
):
super(VariancePredictor, self).__init__()
self.offset = offset
self.conv = torch.nn.ModuleList()
for idx in range(n_layers):
in_chans = idim if idx == 0 else n_chans
self.conv += [
torch.nn.Sequential(
torch.nn.Conv1d(
in_chans,
n_chans,
kernel_size,
stride=1,
padding=(kernel_size - 1) // 2,
),
torch.nn.ReLU(),
LayerNorm(n_chans),
torch.nn.Dropout(dropout_rate),
)
]
self.linear = torch.nn.Linear(n_chans, out)
def _forward(
self,
xs: torch.Tensor,
is_inference: bool = False,
is_log_output: bool = False,
alpha: float = 1.0,
) -> torch.Tensor:
xs = xs.transpose(1, -1) # (B, idim, Tmax)
for f in self.conv:
xs = f(xs) # (B, C, Tmax)
# NOTE: calculate in log domain
xs = self.linear(xs.transpose(1, -1)).squeeze(-1) # (B, Tmax)
if is_inference and is_log_output:
# # NOTE: calculate in linear domain
xs = torch.clamp(
torch.round(xs.exp() - self.offset), min=0
).long() # avoid negative value
xs = xs * alpha
return xs
def forward(
self, xs: torch.Tensor, x_masks: Optional[torch.Tensor] = None
) -> torch.Tensor:
"""Calculate forward propagation.
Args:
xs (Tensor): Batch of input sequences (B, Tmax, idim).
x_masks (ByteTensor, optional): Batch of masks indicating padded part (B, Tmax).
Returns:
Tensor: Batch of predicted durations in log domain (B, Tmax).
"""
xs = self._forward(xs)
if x_masks is not None:
xs = xs.masked_fill(x_masks, 0.0)
return xs
def inference(
self, xs: torch.Tensor, is_log_output: bool = False, alpha: float = 1.0
) -> torch.Tensor:
"""Inference duration.
Args:
xs (Tensor): Batch of input sequences (B, Tmax, idim).
x_masks (ByteTensor, optional): Batch of masks indicating padded part (B, Tmax).
Returns:
LongTensor: Batch of predicted durations in linear domain (B, Tmax).
"""
return self._forward(
xs, is_inference=True, is_log_output=is_log_output, alpha=alpha
)
class EnergyPredictor(torch.nn.Module):
def __init__(
self,
idim,
n_layers=2,
n_chans=256,
kernel_size=3,
dropout_rate=0.1,
offset=1.0,
min=0,
max=0,
n_bins=256,
out = 10
):
"""Initilize Energy predictor module.
Args:
idim (int): Input dimension.
n_layers (int, optional): Number of convolutional layers.
n_chans (int, optional): Number of channels of convolutional layers.
kernel_size (int, optional): Kernel size of convolutional layers.
dropout_rate (float, optional): Dropout rate.
offset (float, optional): Offset value to avoid nan in log domain.
"""
super(EnergyPredictor, self).__init__()
# self.bins = torch.linspace(min, max, n_bins - 1).cuda()
self.register_buffer("energy_bins", torch.linspace(min, max, n_bins - 1))
self.predictor = VariancePredictor(idim)
self.conv = torch.nn.ModuleList()
for idx in range(n_layers):
in_chans = idim if idx == 0 else n_chans
self.conv += [
torch.nn.Sequential(
torch.nn.Conv1d(
in_chans,
n_chans,
kernel_size,
stride=1,
padding=(kernel_size - 1) // 2,
),
torch.nn.ReLU(),
LayerNorm(n_chans),
torch.nn.Dropout(dropout_rate),
)
]
self.spectrogram_out = torch.nn.Linear(n_chans, out)
self.mean = torch.nn.Linear(n_chans, 1)
self.std = torch.nn.Linear(n_chans, 1)
self.out = out
def forward(self, xs: torch.Tensor, olens: torch.Tensor, x_masks: torch.Tensor):
"""Calculate forward propagation.
Args:
xs (Tensor): Batch of input sequences (B, Tmax, idim).
x_masks (ByteTensor, optional): Batch of masks indicating padded part (B, Tmax).
Returns:
Tensor: Batch of predicted durations in log domain (B, Tmax).
"""
xs = xs.transpose(1, -1) # (B, idim, Tmax)
for f in self.conv:
xs = f(xs) # (B, C, Tmax)
# NOTE: calculate in log domain
xs = xs.transpose(1, -1)
f0_spec = self.spectrogram_out(xs) # (B, Tmax, 10)
if x_masks is not None:
# print("olen:", olens)
#f0_spec = f0_spec.transpose(1, -1)
# print("F0 spec dimension:", f0_spec.shape)
# print("x_masks dimension:", x_masks.shape)
f0_spec = f0_spec.masked_fill(x_masks, 0.0)
#f0_spec = f0_spec.transpose(1, -1)
# print("F0 spec dimension:", f0_spec.shape)
#xs = xs.transpose(1, -1)
xs = xs.masked_fill(x_masks, 0.0)
#xs = xs.transpose(1, -1)
# print("xs dimension:", xs.shape)
x_avg = xs.sum(dim=1).squeeze(1)
# print(x_avg)
# print("xs dim :", x_avg.shape)
# print("olens ;", olens.shape)
if olens is not None:
x_avg = x_avg / olens.unsqueeze(1)
# print(x_avg)
f0_mean = self.mean(x_avg).squeeze(-1)
f0_std = self.std(x_avg).squeeze(-1)
# if x_masks is not None:
# f0_spec = f0_spec.masked_fill(x_masks, 0.0)
#print(f0_spec.shape, f0_mean.shape, f0_std.shape, "Energy Predictor Output")
return f0_spec, f0_mean, f0_std
def inference(self, xs: torch.Tensor, olens = None, alpha: float = 1.0):
"""Inference duration.
Args:
xs (Tensor): Batch of input sequences (B, Tmax, idim).
x_masks (ByteTensor, optional): Batch of masks indicating padded part (B, Tmax).
Returns:
LongTensor: Batch of predicted durations in linear domain (B, Tmax).
"""
f0_spec, f0_mean, f0_std = self.forward(xs, olens, x_masks=None) # (B, Tmax, 10)
#print(f0_spec)
f0_reconstructed = self.inverse(f0_spec, f0_mean, f0_std)
#print(f0_reconstructed)
return self.to_one_hot(f0_reconstructed)
def to_one_hot(self, x):
# e = de_norm_mean_std(e, hp.e_mean, hp.e_std)
# For pytorch > = 1.6.0
quantize = torch.bucketize(x, self.energy_bins).to(device=x.device) # .cuda()
return F.one_hot(quantize.long(), 256).float()
def inverse(self, Wavelet_lf0, f0_mean, f0_std):
mother = wavelet.MexicanHat()
dt = 0.005
dj = 2
s0 = dt*2
J = self.out - 1
_, scales, _, _, _, _ = wavelet.cwt(np.ones([10]), dt, dj, s0, J, mother)
#scales = np.array([0.01, 0.02, 0.04, 0.08, 0.16]) #np.arange(1,11)
#print(Wavelet_lf0.shape)
Wavelet_lf0 = Wavelet_lf0.squeeze(0).cpu().numpy()
lf0_rec = np.zeros([Wavelet_lf0.shape[0], len(scales)])
for i in range(0,len(scales)):
lf0_rec[:,i] = Wavelet_lf0[:,i]*((i+200+2.5)**(-2.5))
lf0_rec_sum = np.sum(lf0_rec,axis = 1)
lf0_rec_sum_norm = preprocessing.scale(lf0_rec_sum)
f0_reconstructed = (torch.Tensor(lf0_rec_sum_norm).cuda()*f0_std) + f0_mean
f0_reconstructed = torch.exp(f0_reconstructed)
#print(f0_reconstructed.shape)
#print(f0_reconstructed.shape)
return f0_reconstructed.reshape(1,-1)
class PitchPredictor(torch.nn.Module):
def __init__(
self,
idim,
n_layers=2,
n_chans=384,
kernel_size=3,
dropout_rate=0.1,
offset=1.0,
min=0,
max=0,
n_bins=256,
out=10,
):
"""Initilize pitch predictor module.
Args:
idim (int): Input dimension.
n_layers (int, optional): Number of convolutional layers.
n_chans (int, optional): Number of channels of convolutional layers.
kernel_size (int, optional): Kernel size of convolutional layers.
dropout_rate (float, optional): Dropout rate.
offset (float, optional): Offset value to avoid nan in log domain.
"""
super(PitchPredictor, self).__init__()
# self.bins = torch.exp(torch.linspace(torch.log(torch.tensor(min)), torch.log(torch.tensor(max)), n_bins - 1)).cuda()
self.register_buffer(
"pitch_bins",
torch.exp(
torch.linspace(
torch.log(torch.tensor(min)),
torch.log(torch.tensor(max)),
n_bins - 1,
)
),
)
self.offset = offset
self.conv = torch.nn.ModuleList()
for idx in range(n_layers):
in_chans = idim if idx == 0 else n_chans
self.conv += [
torch.nn.Sequential(
torch.nn.Conv1d(
in_chans,
n_chans,
kernel_size,
stride=1,
padding=(kernel_size - 1) // 2,
),
torch.nn.ReLU(),
LayerNorm(n_chans),
torch.nn.Dropout(dropout_rate),
)
]
self.spectrogram_out = torch.nn.Linear(n_chans, out)
self.mean = torch.nn.Linear(n_chans, 1)
self.std = torch.nn.Linear(n_chans, 1)
self.out = out
def forward(self, xs: torch.Tensor, olens: torch.Tensor, x_masks: torch.Tensor):
"""Calculate forward propagation.
Args:
xs (Tensor): Batch of input sequences (B, Tmax, idim).
x_masks (ByteTensor, optional): Batch of masks indicating padded part (B, Tmax).
Returns:
Tensor: Batch of predicted durations in log domain (B, Tmax).
"""
xs = xs.transpose(1, -1) # (B, idim, Tmax)
for f in self.conv:
xs = f(xs) # (B, C, Tmax)
# NOTE: calculate in log domain
xs = xs.transpose(1, -1)
f0_spec = self.spectrogram_out(xs) # (B, Tmax, 10)
if x_masks is not None:
# print("olen:", olens)
#f0_spec = f0_spec.transpose(1, -1)
# print("F0 spec dimension:", f0_spec.shape)
# print("x_masks dimension:", x_masks.shape)
f0_spec = f0_spec.masked_fill(x_masks, 0.0)
#f0_spec = f0_spec.transpose(1, -1)
# print("F0 spec dimension:", f0_spec.shape)
#xs = xs.transpose(1, -1)
xs = xs.masked_fill(x_masks, 0.0)
#xs = xs.transpose(1, -1)
# print("xs dimension:", xs.shape)
x_avg = xs.sum(dim=1).squeeze(1)
# print(x_avg)
# print("xs dim :", x_avg.shape)
# print("olens ;", olens.shape)
if olens is not None:
x_avg = x_avg / olens.unsqueeze(1)
# print(x_avg)
f0_mean = self.mean(x_avg).squeeze(-1)
f0_std = self.std(x_avg).squeeze(-1)
# if x_masks is not None:
# f0_spec = f0_spec.masked_fill(x_masks, 0.0)
#print(f0_spec.shape, f0_mean.shape, f0_std.shape, "Pitch Predictor Output")
return f0_spec, f0_mean, f0_std
def inference(self, xs: torch.Tensor, olens = None, alpha: float = 1.0):
"""Inference duration.
Args:
xs (Tensor): Batch of input sequences (B, Tmax, idim).
x_masks (ByteTensor, optional): Batch of masks indicating padded part (B, Tmax).
Returns:
LongTensor: Batch of predicted durations in linear domain (B, Tmax).
"""
f0_spec, f0_mean, f0_std = self.forward(xs, olens, x_masks=None) # (B, Tmax, 10)
#print(f0_spec)
f0_reconstructed = self.inverse(f0_spec, f0_mean, f0_std)
#print(f0_reconstructed)
#f0_reconstructed = torch.from_numpy(np.load("/results/chkpts/LJ/Fastspeech2_V2/data/pitch/LJ001-0001.npy").reshape(1,-1)).cuda()
#print(f0_reconstructed, "Pitch coef output")
return self.to_one_hot(f0_reconstructed)
def to_one_hot(self, x: torch.Tensor):
# e = de_norm_mean_std(e, hp.e_mean, hp.e_std)
# For pytorch > = 1.6.0
quantize = torch.bucketize(x, self.pitch_bins).to(device=x.device) # .cuda()
return F.one_hot(quantize.long(), 256).float()
def inverse(self, Wavelet_lf0, f0_mean, f0_std):
mother = wavelet.MexicanHat()
dt = 0.005
dj = 2
s0 = dt*2
J = self.out - 1
_, scales, _, _, _, _ = wavelet.cwt(np.ones([10]), dt, dj, s0, J, mother)
#scales = np.array([0.01, 0.02, 0.04, 0.08, 0.16]) #np.arange(1,11)
#print(Wavelet_lf0.shape)
Wavelet_lf0 = Wavelet_lf0.squeeze(0).cpu().numpy()
lf0_rec = np.zeros([Wavelet_lf0.shape[0], len(scales)])
for i in range(0,len(scales)):
lf0_rec[:,i] = Wavelet_lf0[:,i]*((i+200+2.5)**(-2.5))
lf0_rec_sum = np.sum(lf0_rec,axis = 1)
lf0_rec_sum_norm = preprocessing.scale(lf0_rec_sum)
f0_reconstructed = (torch.Tensor(lf0_rec_sum_norm).cuda()*f0_std) + f0_mean
f0_reconstructed = torch.exp(f0_reconstructed)
#print(f0_reconstructed.shape)
#print(f0_reconstructed.shape)
return f0_reconstructed.reshape(1,-1)
class PitchPredictorLoss(torch.nn.Module):
"""Loss function module for duration predictor.
The loss value is Calculated in log domain to make it Gaussian.
"""
def __init__(self, offset=1.0):
"""Initilize duration predictor loss module.
Args:
offset (float, optional): Offset value to avoid nan in log domain.
"""
super(PitchPredictorLoss, self).__init__()
self.criterion = torch.nn.MSELoss()
self.offset = offset
def forward(self, outputs, targets):
"""Calculate forward propagation.
Args:
outputs (Tensor): Batch of prediction durations in log domain (B, T)
targets (LongTensor): Batch of groundtruth durations in linear domain (B, T)
Returns:
Tensor: Mean squared error loss value.
Note:
`outputs` is in log domain but `targets` is in linear domain.
"""
# NOTE: We convert the output in log domain low error value
# print("Output :", outputs[0])
# print("Before Output :", targets[0])
# targets = torch.log(targets.float() + self.offset)
# print("Before Output :", targets[0])
# outputs = torch.log(outputs.float() + self.offset)
loss = self.criterion(outputs, targets)
# print(loss)
return loss
class EnergyPredictorLoss(torch.nn.Module):
"""Loss function module for duration predictor.
The loss value is Calculated in log domain to make it Gaussian.
"""
def __init__(self, offset=1.0):
"""Initilize duration predictor loss module.
Args:
offset (float, optional): Offset value to avoid nan in log domain.
"""
super(EnergyPredictorLoss, self).__init__()
self.criterion = torch.nn.MSELoss()
self.offset = offset
def forward(self, outputs, targets):
"""Calculate forward propagation.
Args:
outputs (Tensor): Batch of prediction durations in log domain (B, T)
targets (LongTensor): Batch of groundtruth durations in linear domain (B, T)
Returns:
Tensor: Mean squared error loss value.
Note:
`outputs` is in log domain but `targets` is in linear domain.
"""
# NOTE: outputs is in log domain while targets in linear
# targets = torch.log(targets.float() + self.offset)
loss = self.criterion(outputs, targets)
return loss
| 34.423313
| 137
| 0.554922
| 2,135
| 16,833
| 4.222482
| 0.101171
| 0.02396
| 0.021963
| 0.01442
| 0.878758
| 0.869884
| 0.85746
| 0.844481
| 0.820521
| 0.820521
| 0
| 0.031826
| 0.329888
| 16,833
| 488
| 138
| 34.493852
| 0.767376
| 0.359591
| 0
| 0.718367
| 0
| 0
| 0.002102
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073469
| false
| 0
| 0.028571
| 0
| 0.17551
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50f75c338e0bc24cd380ba90a26460b67bdadd3f
| 4,824
|
py
|
Python
|
function/set_ops.py
|
facebookresearch/task_bench
|
1a75797d635d2b2e79336b5c02af654f1bec7013
|
[
"CC0-1.0"
] | 1
|
2022-03-20T22:09:25.000Z
|
2022-03-20T22:09:25.000Z
|
function/set_ops.py
|
facebookresearch/task_bench
|
1a75797d635d2b2e79336b5c02af654f1bec7013
|
[
"CC0-1.0"
] | null | null | null |
function/set_ops.py
|
facebookresearch/task_bench
|
1a75797d635d2b2e79336b5c02af654f1bec7013
|
[
"CC0-1.0"
] | null | null | null |
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
from function import Function, FUNCTION_REGISTRY, WordFunction
class SetIntersect(WordFunction):
"""
Maps set -> set
"""
def __init__(self, fn_tree, inner_fns, **kwargs):
super().__init__(fn_tree=fn_tree, inner_fns=inner_fns)
assert len(self.inner_fns) <= 2
@classmethod
def get_func_name(cls):
return ['intersection']
def to_nl(self):
inner_nls = [inner_fn.to_nl() for inner_fn in self.inner_fns]
assert len(inner_nls) == 2
return f"{inner_nls[0]} and {inner_nls[1]}"
def __call__(self, inputs: list=None):
inputs = self.compute_inner_fns(inputs)
assert len(inputs) == 2
return {'out': set(inputs[0]).intersection(set(inputs[1])), 'inner': inputs}
@classmethod
def build(cls, fn_tree, inner_fns, **kwargs):
return cls(fn_tree=fn_tree, inner_fns=inner_fns, **kwargs)
class SetUnion(WordFunction):
"""
Maps set -> set
"""
def __init__(self, fn_tree, inner_fns, **kwargs):
super().__init__(fn_tree=fn_tree, inner_fns=inner_fns)
assert len(self.inner_fns) <= 2
@classmethod
def get_func_name(cls):
return ['union']
def to_nl(self):
inner_nls = [inner_fn.to_nl() for inner_fn in self.inner_fns]
assert len(inner_nls) == 2
return f"{inner_nls[0]} or {inner_nls[1]}"
def __call__(self, inputs: list=None):
inputs = self.compute_inner_fns(inputs)
assert len(inputs) == 2
return {'out': set(inputs[0]).union(set(inputs[1])), 'inner': inputs}
@classmethod
def build(cls, fn_tree, inner_fns, **kwargs):
return cls(fn_tree=fn_tree, inner_fns=inner_fns, **kwargs)
class SetComplement(WordFunction):
"""
Maps set -> set
"""
def __init__(self, fn_tree, inner_fns, **kwargs):
super().__init__(fn_tree=fn_tree, inner_fns=inner_fns)
self.inner_fns = inner_fns
@classmethod
def get_func_name(cls):
return ['complement']
def __call__(self, inputs: list=None):
breakpoint()
inputs = self.compute_inner_fns(inputs)
assert len(inputs) == 1
return {'out': (self.get_universe() - set(inputs[0])), 'inner': inputs}
@classmethod
def build(cls, fn_tree, inner_fns, **kwargs):
return cls(fn_tree=fn_tree, inner_fns=inner_fns, **kwargs)
class Subset(Function):
"""
Maps [set, set] -> bool
"""
def __init__(self, inner_fns, **kwargs):
self.inner_fns = inner_fns
@classmethod
def get_func_name(cls):
return ['subset']
def __call__(self, inputs: list = None):
inputs = self.compute_inner_fns(inputs)
assert len(inputs) == 2
return {'out': {set(inputs[0]).issubset(set(inputs[1]))}, 'inner': inputs}
@classmethod
def build(cls, fn_tree, inner_fns, **kwargs):
return cls(inner_fns)
class SetEquals(Function):
"""
Maps [set, set] -> bool
"""
def __init__(self, inner_fns, **kwargs):
self.inner_fns = inner_fns
@classmethod
def get_func_name(cls):
return ['eq']
def __call__(self, inputs: list = None):
inputs = self.compute_inner_fns(inputs)
assert len(inputs) == 2
return {'out': {set(inputs[0]) == set(inputs[1])}, 'inner': inputs}
@classmethod
def build(cls, fn_tree, inner_fns, **kwargs):
return cls(inner_fns)
class IsIn(Function):
"""
Maps [element, set] -> bool
"""
def __init__(self, fn_tree, inner_fns, **kwargs):
super().__init__(fn_tree=fn_tree, inner_fns=inner_fns)
@classmethod
def get_func_name(cls):
return ['in']
def to_nl(self):
inner_nls = [inner_fn.to_nl() for inner_fn in self.inner_fns]
assert len(inner_nls) == 2
return f"{inner_nls[1]} include {inner_nls[0]}"
def __call__(self, inputs: list = None):
inputs = self.compute_inner_fns(inputs)
assert len(inputs) == 2
assert len(inputs[0]) == 1
for el in inputs[0]: break
return {'out': el in inputs[1], 'inner': inputs}
@classmethod
def build(cls, fn_tree, inner_fns, **kwargs):
return cls(fn_tree=fn_tree, inner_fns=inner_fns)
class IsEmpty(Function):
"""
Maps set -> bool
"""
def __init__(self, inner_fns, **kwargs):
self.inner_fns = inner_fns
@classmethod
def get_func_name(cls):
return ['empty']
def __call__(self, inputs: list=None):
inputs = self.compute_inner_fns(inputs)
assert len(inputs) == 1
return {'out': len(inputs[0]) == 0, 'inner': inputs}
@classmethod
def build(cls, fn_tree, inner_fns, **kwargs):
return cls(inner_fns)
| 27.884393
| 84
| 0.614635
| 644
| 4,824
| 4.295031
| 0.111801
| 0.15329
| 0.07556
| 0.096168
| 0.858279
| 0.855748
| 0.84671
| 0.84671
| 0.84671
| 0.84671
| 0
| 0.009098
| 0.248134
| 4,824
| 172
| 85
| 28.046512
| 0.753515
| 0.044154
| 0
| 0.736364
| 0
| 0
| 0.044405
| 0
| 0
| 0
| 0
| 0
| 0.118182
| 1
| 0.281818
| false
| 0
| 0.009091
| 0.127273
| 0.572727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
0faffdbb6ebebbe7b50afac21592165fe57f89f1
| 30,754
|
py
|
Python
|
google-datacatalog-qlik-connector/src/google/datacatalog_connectors/qlik/prepare/datacatalog_tag_template_factory.py
|
anugrah86/datacatalog-connectors-bi
|
58cc57e12632cbd1e237b3d6930e519333c51f4e
|
[
"Apache-2.0"
] | 27
|
2020-04-27T21:55:49.000Z
|
2022-02-18T22:09:13.000Z
|
google-datacatalog-qlik-connector/src/google/datacatalog_connectors/qlik/prepare/datacatalog_tag_template_factory.py
|
anugrah86/datacatalog-connectors-bi
|
58cc57e12632cbd1e237b3d6930e519333c51f4e
|
[
"Apache-2.0"
] | 36
|
2020-05-01T15:26:14.000Z
|
2022-03-26T00:09:19.000Z
|
google-datacatalog-qlik-connector/src/google/datacatalog_connectors/qlik/prepare/datacatalog_tag_template_factory.py
|
anugrah86/datacatalog-connectors-bi
|
58cc57e12632cbd1e237b3d6930e519333c51f4e
|
[
"Apache-2.0"
] | 18
|
2020-04-30T22:14:09.000Z
|
2022-01-13T10:28:03.000Z
|
#!/usr/bin/python
#
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.cloud import datacatalog
from google.datacatalog_connectors.commons import prepare
from google.datacatalog_connectors.qlik.prepare import \
constants, dynamic_properties_helper as dph
class DataCatalogTagTemplateFactory(prepare.BaseTagTemplateFactory):
__ASCII_CHARACTER_ENCODING = 'ASCII'
__BOOL_TYPE = datacatalog.FieldType.PrimitiveType.BOOL
__DOUBLE_TYPE = datacatalog.FieldType.PrimitiveType.DOUBLE
__STRING_TYPE = datacatalog.FieldType.PrimitiveType.STRING
__TIMESTAMP_TYPE = datacatalog.FieldType.PrimitiveType.TIMESTAMP
def __init__(self, project_id, location_id):
self.__project_id = project_id
self.__location_id = location_id
def make_tag_template_for_app(self):
tag_template = datacatalog.TagTemplate()
tag_template.name = datacatalog.DataCatalogClient.tag_template_path(
project=self.__project_id,
location=self.__location_id,
tag_template=constants.TAG_TEMPLATE_ID_APP)
tag_template.display_name = 'Qlik App Metadata'
self._add_primitive_type_field(tag_template=tag_template,
field_id='id',
field_type=self.__STRING_TYPE,
display_name='Unique Id',
is_required=True,
order=16)
self._add_primitive_type_field(tag_template=tag_template,
field_id='owner_username',
field_type=self.__STRING_TYPE,
display_name='Owner username',
order=15)
self._add_primitive_type_field(tag_template=tag_template,
field_id='owner_name',
field_type=self.__STRING_TYPE,
display_name='Owner name',
order=14)
self._add_primitive_type_field(tag_template=tag_template,
field_id='modified_by_username',
field_type=self.__STRING_TYPE,
display_name='Username who modified it',
order=13)
self._add_primitive_type_field(tag_template=tag_template,
field_id='published',
field_type=self.__BOOL_TYPE,
display_name='Published',
is_required=True,
order=12)
self._add_primitive_type_field(tag_template=tag_template,
field_id='publish_time',
field_type=self.__TIMESTAMP_TYPE,
display_name='Publish time',
order=11)
self._add_primitive_type_field(tag_template=tag_template,
field_id='last_reload_time',
field_type=self.__TIMESTAMP_TYPE,
display_name='Last reload time',
order=10)
self._add_primitive_type_field(tag_template=tag_template,
field_id='stream_id',
field_type=self.__STRING_TYPE,
display_name='Stream Id',
is_required=True,
order=9)
self._add_primitive_type_field(tag_template=tag_template,
field_id='stream_name',
field_type=self.__STRING_TYPE,
display_name='Stream name',
is_required=True,
order=8)
self._add_primitive_type_field(
tag_template=tag_template,
field_id='stream_entry',
field_type=self.__STRING_TYPE,
display_name='Data Catalog Entry for the Stream',
order=7)
self._add_primitive_type_field(tag_template=tag_template,
field_id='file_size',
field_type=self.__STRING_TYPE,
display_name='File size',
order=6)
self._add_primitive_type_field(tag_template=tag_template,
field_id='thumbnail',
field_type=self.__STRING_TYPE,
display_name='Thumbnail',
order=5)
self._add_primitive_type_field(tag_template=tag_template,
field_id='saved_in_product_version',
field_type=self.__STRING_TYPE,
display_name='Saved in product version',
order=4)
self._add_primitive_type_field(tag_template=tag_template,
field_id='migration_hash',
field_type=self.__STRING_TYPE,
display_name='Migration hash',
order=3)
self._add_primitive_type_field(tag_template=tag_template,
field_id='availability_status',
field_type=self.__DOUBLE_TYPE,
display_name='Availability status',
order=2)
self._add_primitive_type_field(tag_template=tag_template,
field_id='site_url',
field_type=self.__STRING_TYPE,
display_name='Qlik Sense site url',
is_required=True,
order=1)
return tag_template
def make_tag_template_for_custom_property_definition(self):
tag_template = datacatalog.TagTemplate()
tag_template.name = datacatalog.DataCatalogClient.tag_template_path(
project=self.__project_id,
location=self.__location_id,
tag_template=constants.TAG_TEMPLATE_ID_CUSTOM_PROPERTY_DEFINITION)
tag_template.display_name = 'Qlik Custom Property Definition Metadata'
self._add_primitive_type_field(tag_template=tag_template,
field_id='id',
field_type=self.__STRING_TYPE,
display_name='Unique Id',
is_required=True,
order=6)
self._add_primitive_type_field(tag_template=tag_template,
field_id='modified_by_username',
field_type=self.__STRING_TYPE,
display_name='Username who modified it',
order=5)
self._add_primitive_type_field(tag_template=tag_template,
field_id='value_type',
field_type=self.__STRING_TYPE,
display_name='Value type',
order=4)
self._add_primitive_type_field(tag_template=tag_template,
field_id='choice_values',
field_type=self.__STRING_TYPE,
display_name='Choice values',
order=3)
self._add_primitive_type_field(tag_template=tag_template,
field_id='object_types',
field_type=self.__STRING_TYPE,
display_name='Object types',
order=2)
self._add_primitive_type_field(tag_template=tag_template,
field_id='site_url',
field_type=self.__STRING_TYPE,
display_name='Qlik Sense site url',
is_required=True,
order=1)
return tag_template
def make_tag_template_for_custom_property_value(self, definition_metadata,
value):
tag_template = datacatalog.TagTemplate()
template_id = dph.DynamicPropertiesHelper\
.make_id_for_custom_property_value_tag_template(
definition_metadata, value)
tag_template.name = datacatalog.DataCatalogClient.tag_template_path(
project=self.__project_id,
location=self.__location_id,
tag_template=template_id)
tag_template.display_name = dph.DynamicPropertiesHelper\
.make_display_name_for_custom_property_value_tag_template(
definition_metadata, value)
self._add_primitive_type_field(tag_template=tag_template,
field_id='id',
field_type=self.__STRING_TYPE,
display_name='Unique Id',
is_required=True,
order=9)
self._add_primitive_type_field(tag_template=tag_template,
field_id='created_date',
field_type=self.__TIMESTAMP_TYPE,
display_name='Created date',
order=8)
self._add_primitive_type_field(tag_template=tag_template,
field_id='modified_date',
field_type=self.__TIMESTAMP_TYPE,
display_name='Modified date',
order=7)
self._add_primitive_type_field(tag_template=tag_template,
field_id='modified_by_username',
field_type=self.__STRING_TYPE,
display_name='Username who modified it',
order=6)
self._add_primitive_type_field(tag_template=tag_template,
field_id='value',
field_type=self.__STRING_TYPE,
display_name='Value',
is_required=True,
order=5)
self._add_primitive_type_field(tag_template=tag_template,
field_id='property_definition_id',
field_type=self.__STRING_TYPE,
display_name='Property Definition Id',
is_required=True,
order=4)
# According to the Qlik Analytics Platform Architecture Team, there was
# no way of searching assets by the Custom Property values using Qlik
# when this feature was implemented (Dec, 2020), which means the
# catalog search might be helpful to address such a use case. Hence the
# 'definition_' part was supressed from this Tag Field Id to turn seach
# queries more intuitive, e.g. tag:property_name:<PROPERTY-NAME>.
self._add_primitive_type_field(tag_template=tag_template,
field_id='property_name',
field_type=self.__STRING_TYPE,
display_name='Property Definition name',
is_required=True,
order=3)
self._add_primitive_type_field(
tag_template=tag_template,
field_id='property_definition_entry',
field_type=self.__STRING_TYPE,
display_name='Data Catalog Entry for the Property Definition',
is_required=True,
order=2)
self._add_primitive_type_field(tag_template=tag_template,
field_id='site_url',
field_type=self.__STRING_TYPE,
display_name='Qlik Sense site url',
is_required=True,
order=1)
return tag_template
def make_tag_template_for_dimension(self):
tag_template = datacatalog.TagTemplate()
tag_template.name = datacatalog.DataCatalogClient.tag_template_path(
project=self.__project_id,
location=self.__location_id,
tag_template=constants.TAG_TEMPLATE_ID_DIMENSION)
tag_template.display_name = 'Qlik Dimension Metadata'
self._add_primitive_type_field(tag_template=tag_template,
field_id='id',
field_type=self.__STRING_TYPE,
display_name='Unique Id',
is_required=True,
order=9)
self._add_enum_type_field(
tag_template=tag_template,
field_id='grouping',
values=[
constants.DIMENSION_GROUPING_SINGLE_TAG_FIELD,
constants.DIMENSION_GROUPING_DRILL_DOWN_TAG_FIELD
],
display_name='Grouping',
order=8)
self._add_primitive_type_field(tag_template=tag_template,
field_id='fields',
field_type=self.__STRING_TYPE,
display_name='Fields',
order=7)
self._add_primitive_type_field(tag_template=tag_template,
field_id='field_labels',
field_type=self.__STRING_TYPE,
display_name='Field labels',
order=6)
self._add_primitive_type_field(tag_template=tag_template,
field_id='tags',
field_type=self.__STRING_TYPE,
display_name='Qlik tags',
order=5)
self._add_primitive_type_field(tag_template=tag_template,
field_id='app_id',
field_type=self.__STRING_TYPE,
display_name='App Id',
is_required=True,
order=4)
self._add_primitive_type_field(tag_template=tag_template,
field_id='app_name',
field_type=self.__STRING_TYPE,
display_name='App name',
is_required=True,
order=3)
self._add_primitive_type_field(
tag_template=tag_template,
field_id='app_entry',
field_type=self.__STRING_TYPE,
display_name='Data Catalog Entry for the App',
is_required=True,
order=2)
self._add_primitive_type_field(tag_template=tag_template,
field_id='site_url',
field_type=self.__STRING_TYPE,
display_name='Qlik Sense site url',
is_required=True,
order=1)
return tag_template
def make_tag_template_for_measure(self):
tag_template = datacatalog.TagTemplate()
tag_template.name = datacatalog.DataCatalogClient.tag_template_path(
project=self.__project_id,
location=self.__location_id,
tag_template=constants.TAG_TEMPLATE_ID_MEASURE)
tag_template.display_name = 'Qlik Measure Metadata'
self._add_primitive_type_field(tag_template=tag_template,
field_id='id',
field_type=self.__STRING_TYPE,
display_name='Unique Id',
order=9)
self._add_primitive_type_field(tag_template=tag_template,
field_id='expression',
field_type=self.__STRING_TYPE,
display_name='Expression',
order=8)
self._add_primitive_type_field(tag_template=tag_template,
field_id='label_expression',
field_type=self.__STRING_TYPE,
display_name='Label expression',
order=7)
self._add_primitive_type_field(tag_template=tag_template,
field_id='is_custom_formatted',
field_type=self.__BOOL_TYPE,
display_name='Is custom formatted',
order=6)
self._add_primitive_type_field(tag_template=tag_template,
field_id='tags',
field_type=self.__STRING_TYPE,
display_name='Qlik tags',
order=5)
self._add_primitive_type_field(tag_template=tag_template,
field_id='app_id',
field_type=self.__STRING_TYPE,
display_name='App Id',
is_required=True,
order=4)
self._add_primitive_type_field(tag_template=tag_template,
field_id='app_name',
field_type=self.__STRING_TYPE,
display_name='App name',
is_required=True,
order=3)
self._add_primitive_type_field(
tag_template=tag_template,
field_id='app_entry',
field_type=self.__STRING_TYPE,
display_name='Data Catalog Entry for the App',
is_required=True,
order=2)
self._add_primitive_type_field(tag_template=tag_template,
field_id='site_url',
field_type=self.__STRING_TYPE,
display_name='Qlik Sense site url',
is_required=True,
order=1)
return tag_template
def make_tag_template_for_sheet(self):
tag_template = datacatalog.TagTemplate()
tag_template.name = datacatalog.DataCatalogClient.tag_template_path(
project=self.__project_id,
location=self.__location_id,
tag_template=constants.TAG_TEMPLATE_ID_SHEET)
tag_template.display_name = 'Qlik Sheet Metadata'
self._add_primitive_type_field(tag_template=tag_template,
field_id='id',
field_type=self.__STRING_TYPE,
display_name='Unique Id',
is_required=True,
order=12)
self._add_primitive_type_field(tag_template=tag_template,
field_id='owner_username',
field_type=self.__STRING_TYPE,
display_name='Owner username',
order=11)
self._add_primitive_type_field(tag_template=tag_template,
field_id='owner_name',
field_type=self.__STRING_TYPE,
display_name='Owner name',
order=10)
self._add_primitive_type_field(tag_template=tag_template,
field_id='published',
field_type=self.__BOOL_TYPE,
display_name='Published',
order=9)
self._add_primitive_type_field(tag_template=tag_template,
field_id='publish_time',
field_type=self.__TIMESTAMP_TYPE,
display_name='Publish time',
order=8)
self._add_primitive_type_field(tag_template=tag_template,
field_id='approved',
field_type=self.__BOOL_TYPE,
display_name='Approved',
order=7)
self._add_primitive_type_field(tag_template=tag_template,
field_id='app_id',
field_type=self.__STRING_TYPE,
display_name='App Id',
is_required=True,
order=6)
self._add_primitive_type_field(tag_template=tag_template,
field_id='app_name',
field_type=self.__STRING_TYPE,
display_name='App name',
is_required=True,
order=5)
self._add_primitive_type_field(
tag_template=tag_template,
field_id='app_entry',
field_type=self.__STRING_TYPE,
display_name='Data Catalog Entry for the App',
is_required=True,
order=4)
self._add_primitive_type_field(tag_template=tag_template,
field_id='source_object',
field_type=self.__STRING_TYPE,
display_name='Source object',
order=3)
self._add_primitive_type_field(tag_template=tag_template,
field_id='draft_object',
field_type=self.__STRING_TYPE,
display_name='Draft object',
order=2)
self._add_primitive_type_field(tag_template=tag_template,
field_id='site_url',
field_type=self.__STRING_TYPE,
display_name='Qlik Sense site url',
is_required=True,
order=1)
return tag_template
def make_tag_template_for_stream(self):
tag_template = datacatalog.TagTemplate()
tag_template.name = datacatalog.DataCatalogClient.tag_template_path(
project=self.__project_id,
location=self.__location_id,
tag_template=constants.TAG_TEMPLATE_ID_STREAM)
tag_template.display_name = 'Qlik Stream Metadata'
self._add_primitive_type_field(tag_template=tag_template,
field_id='id',
field_type=self.__STRING_TYPE,
display_name='Unique Id',
is_required=True,
order=5)
self._add_primitive_type_field(tag_template=tag_template,
field_id='owner_username',
field_type=self.__STRING_TYPE,
display_name='Owner username',
order=4)
self._add_primitive_type_field(tag_template=tag_template,
field_id='owner_name',
field_type=self.__STRING_TYPE,
display_name='Owner name',
order=3)
self._add_primitive_type_field(tag_template=tag_template,
field_id='modified_by_username',
field_type=self.__STRING_TYPE,
display_name='Username who modified it',
order=2)
self._add_primitive_type_field(tag_template=tag_template,
field_id='site_url',
field_type=self.__STRING_TYPE,
display_name='Qlik Sense site url',
is_required=True,
order=1)
return tag_template
def make_tag_template_for_visualization(self):
tag_template = datacatalog.TagTemplate()
tag_template.name = datacatalog.DataCatalogClient.tag_template_path(
project=self.__project_id,
location=self.__location_id,
tag_template=constants.TAG_TEMPLATE_ID_VISUALIZATION)
tag_template.display_name = 'Qlik Visualization Metadata'
self._add_primitive_type_field(tag_template=tag_template,
field_id='id',
field_type=self.__STRING_TYPE,
display_name='Unique Id',
is_required=True,
order=10)
self._add_primitive_type_field(tag_template=tag_template,
field_id='title',
field_type=self.__STRING_TYPE,
display_name='Title',
order=9)
self._add_primitive_type_field(tag_template=tag_template,
field_id='subtitle',
field_type=self.__STRING_TYPE,
display_name='Subtitle',
order=8)
self._add_primitive_type_field(tag_template=tag_template,
field_id='footnote',
field_type=self.__STRING_TYPE,
display_name='Footnote',
order=7)
self._add_primitive_type_field(tag_template=tag_template,
field_id='type',
field_type=self.__STRING_TYPE,
display_name='Type',
order=6)
self._add_primitive_type_field(tag_template=tag_template,
field_id='tags',
field_type=self.__STRING_TYPE,
display_name='Qlik tags',
order=5)
self._add_primitive_type_field(tag_template=tag_template,
field_id='app_id',
field_type=self.__STRING_TYPE,
display_name='App Id',
is_required=True,
order=4)
self._add_primitive_type_field(tag_template=tag_template,
field_id='app_name',
field_type=self.__STRING_TYPE,
display_name='App name',
is_required=True,
order=3)
self._add_primitive_type_field(
tag_template=tag_template,
field_id='app_entry',
field_type=self.__STRING_TYPE,
display_name='Data Catalog Entry for the App',
is_required=True,
order=2)
self._add_primitive_type_field(tag_template=tag_template,
field_id='site_url',
field_type=self.__STRING_TYPE,
display_name='Qlik Sense site url',
is_required=True,
order=1)
return tag_template
| 47.828927
| 79
| 0.462899
| 2,585
| 30,754
| 5.027079
| 0.086267
| 0.183686
| 0.070181
| 0.116968
| 0.82601
| 0.806387
| 0.804694
| 0.770989
| 0.729357
| 0.712736
| 0
| 0.006219
| 0.482409
| 30,754
| 642
| 80
| 47.903427
| 0.810152
| 0.031541
| 0
| 0.77712
| 0
| 0
| 0.066996
| 0.002386
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017751
| false
| 0
| 0.005917
| 0
| 0.051282
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0fcf1a0bafc2912fd8f2447451e119fd9ee771d9
| 201
|
py
|
Python
|
model/model_helper/__init__.py
|
IDNatte/mygamelist
|
6b40f56b84e2246793e26da9aecc800b6698e1ba
|
[
"CNRI-Python",
"RSA-MD"
] | null | null | null |
model/model_helper/__init__.py
|
IDNatte/mygamelist
|
6b40f56b84e2246793e26da9aecc800b6698e1ba
|
[
"CNRI-Python",
"RSA-MD"
] | null | null | null |
model/model_helper/__init__.py
|
IDNatte/mygamelist
|
6b40f56b84e2246793e26da9aecc800b6698e1ba
|
[
"CNRI-Python",
"RSA-MD"
] | null | null | null |
"""
Model Helper function
"""
import random
import string
def random_id_generator():
return ''.join((random.choice(string.ascii_letters + string.digits + string.punctuation) for x in range(50)))
| 20.1
| 113
| 0.736318
| 27
| 201
| 5.37037
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011494
| 0.134328
| 201
| 9
| 114
| 22.333333
| 0.821839
| 0.104478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
ba0b2e588c5e833be2813ee75f784b61bf58bfb0
| 6,198
|
py
|
Python
|
tests/image_window_dataset_test.py
|
Min-Sheng/NiftyNet
|
13ff54018d3ea282b94af94b6bce7bd67c0d7cc5
|
[
"Apache-2.0"
] | 2
|
2019-03-25T18:50:47.000Z
|
2019-10-10T01:45:02.000Z
|
tests/image_window_dataset_test.py
|
Min-Sheng/NiftyNet
|
13ff54018d3ea282b94af94b6bce7bd67c0d7cc5
|
[
"Apache-2.0"
] | null | null | null |
tests/image_window_dataset_test.py
|
Min-Sheng/NiftyNet
|
13ff54018d3ea282b94af94b6bce7bd67c0d7cc5
|
[
"Apache-2.0"
] | 1
|
2018-11-11T09:49:38.000Z
|
2018-11-11T09:49:38.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function
import os
import numpy as np
import tensorflow as tf
from niftynet.io.image_reader import ImageReader
from niftynet.contrib.dataset_sampler.image_window_dataset import \
ImageWindowDataset
IMAGE_PATH_2D_1 = os.path.join('.', 'example_volumes', 'gan_test_data')
IMAGE_PATH_3D = os.path.join('.', 'testing_data')
def get_2d_reader():
data_param = {'mr': {'path_to_search': IMAGE_PATH_2D_1}}
reader = ImageReader().initialise(data_param)
return reader
def get_3d_reader():
data_param = {'mr': {'path_to_search': IMAGE_PATH_3D,
'filename_contains': 'FLAIR',
'interp_order': 1}}
reader = ImageReader().initialise(data_param)
return reader
class ImageWindowDataset_2D_Test(tf.test.TestCase):
def assert_window(self, window):
self.assertEqual(window['mr'].shape[1:3], (120, 160))
self.assertEqual(window['mr_location'][0, 1:].tolist(),
[0, 0, 0, 120, 160, 1])
self.assertEqual(window['mr'].dtype, np.float32)
self.assertEqual(window['mr_location'].dtype, np.int32)
def assert_tf_window(self, sampler):
with self.test_session() as sess:
sampler.run_threads(sess)
window = sess.run(sampler.pop_batch_op())
self.assert_window(window)
def test_simple(self):
sampler = ImageWindowDataset(reader=get_2d_reader())
self.assert_tf_window(sampler)
self.assert_window(sampler())
def test_batch_size(self):
# batch size doesn't change the numpy interface
sampler = ImageWindowDataset(reader=get_2d_reader(), batch_size=2)
self.assert_tf_window(sampler)
self.assert_window(sampler())
def test_window_size(self):
sampler = ImageWindowDataset(reader=get_2d_reader(),
window_sizes=(0, 0, 0), batch_size=2)
self.assert_tf_window(sampler)
self.assert_window(sampler())
def test_window_size_dict(self):
sampler = ImageWindowDataset(reader=get_2d_reader(),
window_sizes={'mr': (0, 0, 0)},
batch_size=2)
self.assert_tf_window(sampler)
self.assert_window(sampler())
# sampler layer_op()'s output shape is not checked
#def test_wrong_window_size_dict(self):
# sampler = ImageWindowDataset(reader=get_2d_reader(),
# batch_size=2,
# window_sizes=(3,3,0))
# self.assert_tf_window(sampler)
def test_windows_per_image(self):
with self.assertRaisesRegexp(AssertionError, ''):
sampler = ImageWindowDataset(reader=get_2d_reader(), batch_size=2,
windows_per_image=2)
self.assert_window(sampler())
def test_epoch(self):
reader = get_2d_reader()
batch_size = 3
sampler = ImageWindowDataset(reader=reader, batch_size=batch_size,
epoch=1)
with self.test_session() as sess:
sampler.run_threads(sess)
iters = 0
try:
for _ in range(400):
window = sess.run(sampler.pop_batch_op())
iters = iters + 1
except tf.errors.OutOfRangeError:
pass
# batch size 2, 40 images in total
self.assertEqual(np.ceil(reader.num_subjects/np.float(batch_size)),
iters)
class ImageWindowDataset_3D_Test(tf.test.TestCase):
def assert_window(self, window):
self.assertEqual(window['mr'].shape[1:4], (256, 168, 256))
self.assertEqual(window['mr_location'][0, 1:].tolist(),
[0, 0, 0, 256, 168, 256])
self.assertEqual(window['mr'].dtype, np.float32)
self.assertEqual(window['mr_location'].dtype, np.int32)
def assert_tf_window(self, sampler):
with self.test_session() as sess:
sampler.run_threads(sess)
window = sess.run(sampler.pop_batch_op())
self.assert_window(window)
def test_simple(self):
sampler = ImageWindowDataset(reader=get_3d_reader())
self.assert_tf_window(sampler)
self.assert_window(sampler())
def test_batch_size(self):
# batch size doesn't change the numpy interface
sampler = ImageWindowDataset(reader=get_3d_reader(), batch_size=2)
self.assert_tf_window(sampler)
self.assert_window(sampler())
def test_window_size(self):
sampler = ImageWindowDataset(reader=get_3d_reader(),
window_sizes=(0, 0, 0), batch_size=2)
self.assert_tf_window(sampler)
self.assert_window(sampler())
def test_window_size_dict(self):
sampler = ImageWindowDataset(reader=get_3d_reader(),
window_sizes={'mr': (0, 0, 0)},
batch_size=2)
self.assert_tf_window(sampler)
self.assert_window(sampler())
def test_windows_per_image(self):
with self.assertRaisesRegexp(AssertionError, ''):
sampler = ImageWindowDataset(reader=get_3d_reader(), batch_size=2,
windows_per_image=2)
self.assert_window(sampler())
def test_epoch(self):
reader = get_3d_reader()
batch_size = 3
sampler = ImageWindowDataset(reader=reader, batch_size=batch_size,
epoch=1)
with self.test_session() as sess:
sampler.run_threads(sess)
iters = 0
try:
for _ in range(400):
window = sess.run(sampler.pop_batch_op())
iters = iters + 1
except tf.errors.OutOfRangeError:
pass
# batch size 2, 40 images in total
self.assertEqual(np.ceil(reader.num_subjects/np.float(batch_size)),
iters)
if __name__ == "__main__":
tf.test.main()
| 37.792683
| 79
| 0.591642
| 714
| 6,198
| 4.868347
| 0.163866
| 0.059551
| 0.115938
| 0.107595
| 0.85817
| 0.854718
| 0.853567
| 0.847238
| 0.819045
| 0.790276
| 0
| 0.02875
| 0.30413
| 6,198
| 163
| 80
| 38.02454
| 0.777185
| 0.073572
| 0
| 0.752
| 0
| 0
| 0.030017
| 0
| 0
| 0
| 0
| 0
| 0.288
| 1
| 0.144
| false
| 0.016
| 0.048
| 0
| 0.224
| 0.008
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ba1e592815ac539f506518a660e63c3c6f1a6077
| 68,578
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/SystemIPC/cmp_lu.cont/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/SystemIPC/cmp_lu.cont/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/SystemIPC/cmp_lu.cont/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.125201,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.301027,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.668056,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.826149,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.43059,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.820485,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 3.07723,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.714191,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 7.51418,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.12621,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0299486,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.263767,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.221488,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.389977,
'Execution Unit/Register Files/Runtime Dynamic': 0.251437,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.671668,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.94412,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 5.97919,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0044099,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0044099,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0038114,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00145926,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00318169,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0158129,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.04334,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.212922,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.709301,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.723179,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.70455,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0297012,
'L2/Runtime Dynamic': 0.0105028,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 7.68206,
'Load Store Unit/Data Cache/Runtime Dynamic': 3.10802,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.208509,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.208509,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 8.67069,
'Load Store Unit/Runtime Dynamic': 4.34482,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.514148,
'Load Store Unit/StoreQ/Runtime Dynamic': 1.0283,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.182473,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.182918,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.116281,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.873954,
'Memory Management Unit/Runtime Dynamic': 0.299199,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 30.619,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.440319,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0475432,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.426935,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.914797,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 13.2531,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0447887,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.237867,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.239703,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.257582,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.41547,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.209715,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.882768,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.257849,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.75113,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0452851,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0108041,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0949839,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0799033,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.140269,
'Execution Unit/Register Files/Runtime Dynamic': 0.0907074,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.211304,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.613163,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.22988,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0017311,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0017311,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00153784,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000611762,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00114782,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00614785,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0155237,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0768131,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.88597,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.2547,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.260892,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.34161,
'Instruction Fetch Unit/Runtime Dynamic': 0.614076,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0116036,
'L2/Runtime Dynamic': 0.00459448,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.55227,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.11782,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0749007,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0749006,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.90597,
'Load Store Unit/Runtime Dynamic': 1.5621,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.184692,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.369384,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0655479,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0657217,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.303792,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.041755,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.5725,
'Memory Management Unit/Runtime Dynamic': 0.107477,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.1723,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.119124,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0130711,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.130514,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.262709,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.78084,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0422097,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.235842,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.226056,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.241663,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.389793,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.196754,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.82821,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.241735,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.69537,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0427069,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0101364,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0891785,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.074965,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.131885,
'Execution Unit/Register Files/Runtime Dynamic': 0.0851014,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.198429,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.57589,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.13042,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00162292,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00162292,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00144198,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000573759,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00107688,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0057647,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.014545,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0720658,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.58401,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.238953,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.244768,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.02499,
'Instruction Fetch Unit/Runtime Dynamic': 0.576096,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0111863,
'L2/Runtime Dynamic': 0.00459466,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.41068,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.04991,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0703198,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0703199,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.74274,
'Load Store Unit/Runtime Dynamic': 1.46702,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.173397,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.346794,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.061539,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0617068,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.285017,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0391734,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.546839,
'Memory Management Unit/Runtime Dynamic': 0.10088,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.6106,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.112342,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0122703,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.122431,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.247044,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.52606,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0357518,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.23077,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.192371,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.205958,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.332202,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.167685,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.705845,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.206063,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.56548,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0363431,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0086388,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0758828,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0638892,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.112226,
'Execution Unit/Register Files/Runtime Dynamic': 0.072528,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.168804,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.490721,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.90524,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00136891,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00136891,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00121647,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000484127,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000917774,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00487206,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0122619,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0614183,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.90673,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.202575,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.208604,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.31485,
'Instruction Fetch Unit/Runtime Dynamic': 0.489732,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0097511,
'L2/Runtime Dynamic': 0.0039694,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.08159,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.89106,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0596731,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.059673,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.36338,
'Load Store Unit/Runtime Dynamic': 1.24502,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.147144,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.294287,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0522218,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0523678,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.242906,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0332097,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.488723,
'Memory Management Unit/Runtime Dynamic': 0.0855776,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 18.3317,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0956014,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0104557,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.104383,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.21044,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.93998,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 1.7706167804022201,
'Runtime Dynamic': 1.7706167804022201,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.0935869,
'Runtime Dynamic': 0.0534378,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 88.8271,
'Peak Power': 121.939,
'Runtime Dynamic': 26.5534,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 88.7335,
'Total Cores/Runtime Dynamic': 26.4999,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.0935869,
'Total L3s/Runtime Dynamic': 0.0534378,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.030635
| 124
| 0.681939
| 8,082
| 68,578
| 5.7805
| 0.067929
| 0.123635
| 0.113019
| 0.093497
| 0.939253
| 0.930648
| 0.917291
| 0.885761
| 0.861938
| 0.842844
| 0
| 0.131442
| 0.224431
| 68,578
| 914
| 125
| 75.030635
| 0.746931
| 0
| 0
| 0.642232
| 0
| 0
| 0.657709
| 0.04812
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ba2564d20d7ae3305a076ea6bfb661e958289202
| 149
|
py
|
Python
|
models/read_vectors/read_vectors.py
|
zankner/DNC
|
2596fa3a1ee9bf0a9f160f3f953639e70bb25fe7
|
[
"MIT"
] | 3
|
2020-03-20T17:46:33.000Z
|
2020-03-23T16:48:59.000Z
|
models/read_vectors/read_vectors.py
|
zankner/DNC
|
2596fa3a1ee9bf0a9f160f3f953639e70bb25fe7
|
[
"MIT"
] | null | null | null |
models/read_vectors/read_vectors.py
|
zankner/DNC
|
2596fa3a1ee9bf0a9f160f3f953639e70bb25fe7
|
[
"MIT"
] | 1
|
2020-03-20T17:55:55.000Z
|
2020-03-20T17:55:55.000Z
|
import tensorflow as tf
def read_vectors(memory_matrix, read_weightings):
return tf.linalg.matmul(tf.transpose(memory_matrix), read_weightings)
| 29.8
| 73
| 0.818792
| 21
| 149
| 5.571429
| 0.666667
| 0.205128
| 0.273504
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100671
| 149
| 4
| 74
| 37.25
| 0.873134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
d75a5189b7e9ca381e0b5d39df53d5d8e0e58c64
| 36
|
py
|
Python
|
devo/common/loadenv/__init__.py
|
imposeren/python-sdk
|
c772da98664a31d86207f86bc005ecb2128e74d7
|
[
"MIT"
] | 18
|
2018-09-25T11:47:28.000Z
|
2021-12-14T20:28:39.000Z
|
devo/common/loadenv/__init__.py
|
imposeren/python-sdk
|
c772da98664a31d86207f86bc005ecb2128e74d7
|
[
"MIT"
] | 57
|
2018-11-08T12:40:30.000Z
|
2022-03-31T13:01:19.000Z
|
devo/common/loadenv/__init__.py
|
imposeren/python-sdk
|
c772da98664a31d86207f86bc005ecb2128e74d7
|
[
"MIT"
] | 34
|
2018-11-05T16:09:15.000Z
|
2022-03-08T10:51:34.000Z
|
from .load_env import load_env_file
| 18
| 35
| 0.861111
| 7
| 36
| 4
| 0.714286
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d7685a296b789b8c5ff51a1c315c62da012bf676
| 23,791
|
py
|
Python
|
OpenGLCffi/FFI/_glxffi.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/FFI/_glxffi.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/FFI/_glxffi.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
# auto-generated file
import _cffi_backend
from xcffib._ffi import ffi as _ffi0
ffi = _cffi_backend.FFI('FFI._glxffi',
_version = 0x2601,
_types = b'\x00\x00\x06\x0D\x00\x02\x42\x03\x00\x00\x00\x0F\x00\x00\x06\x0D\x00\x00\x00\x0F\x00\x00\x51\x0D\x00\x02\x31\x03\x00\x00\x07\x01\x00\x00\x07\x03\x00\x00\x00\x0F\x00\x02\x36\x0D\x00\x00\x06\x11\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x30\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x30\x0D\x00\x00\x06\x11\x00\x02\x4C\x03\x00\x00\x00\x0F\x00\x00\x01\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x01\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x01\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x01\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x14\x09\x00\x00\x2B\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x02\x41\x03\x00\x00\x07\x01\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x08\x11\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x2B\x03\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x40\x11\x00\x00\x40\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x02\x35\x03\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x0D\x09\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x08\x11\x00\x00\x08\x11\x00\x00\x08\x11\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x91\x03\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x02\x53\x03\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x78\x11\x00\x00\x78\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x0A\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x08\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x08\x01\x00\x00\x8C\x03\x00\x00\x97\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x15\x11\x00\x00\x07\x01\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x02\x4D\x03\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\xA2\x11\x00\x00\x07\x01\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x08\x01\x00\x00\x08\x01\x00\x00\x07\x03\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x08\x01\x00\x00\x0A\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x08\x01\x00\x00\x0A\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x02\x30\x03\x00\x00\x00\x09\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x0E\x09\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x40\x11\x00\x00\x40\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x2B\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x07\x01\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x07\x01\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x07\x01\x00\x00\x97\x11\x00\x00\x0C\x09\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x02\x45\x03\x00\x00\xEB\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\xF8\x03\x00\x00\xF1\x11\x00\x00\xF1\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x17\x01\x00\x00\xF1\x11\x00\x00\xF1\x11\x00\x00\xF1\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x17\x01\x00\x00\x17\x01\x00\x00\x17\x01\x00\x00\xF1\x11\x00\x00\xF1\x11\x00\x00\xF1\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\xA2\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x0A\x01\x00\x00\xA2\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x07\x01\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\xA2\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x08\x01\x00\x00\x07\x01\x00\x00\x58\x11\x00\x00\x08\x01\x00\x00\x78\x11\x00\x00\x00\x0F\x00\x00\xF8\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x17\x01\x00\x00\x17\x01\x00\x00\x17\x01\x00\x00\x00\x0F\x00\x02\x4B\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x02\x4B\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x08\x11\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x02\x4B\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\xAE\x11\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x15\x0D\x00\x00\x06\x11\x00\x00\x30\x11\x00\x00\x00\x0F\x00\x00\xA2\x0D\x00\x00\x06\x11\x00\x00\x30\x11\x00\x00\xA2\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\xA2\x0D\x00\x00\x06\x11\x00\x00\x15\x11\x00\x00\x07\x01\x00\x00\xA2\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\xA2\x0D\x00\x00\x06\x11\x00\x00\x15\x11\x00\x00\xA2\x11\x00\x00\x07\x01\x00\x00\xAE\x11\x00\x00\x00\x0F\x00\x00\xA2\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x00\x0F\x00\x00\xA2\x0D\x00\x00\x08\x01\x00\x00\xA2\x11\x00\x00\x00\x0F\x00\x00\xA2\x0D\x00\x00\x08\x01\x00\x00\xA2\x11\x00\x00\xAE\x11\x00\x00\x00\x0F\x00\x00\xA2\x0D\x00\x00\x00\x0F\x00\x00\x71\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x91\x0D\x00\x00\xA2\x11\x00\x00\x00\x0F\x00\x00\x91\x0D\x00\x00\x08\x01\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x00\x91\x0D\x00\x02\x53\x03\x00\x00\x00\x0F\x00\x00\x97\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x8C\x0D\x00\x00\x06\x11\x00\x00\x30\x11\x00\x00\x0A\x01\x00\x00\x00\x0F\x00\x00\x8C\x0D\x00\x00\x06\x11\x00\x00\x30\x11\x00\x00\x0A\x01\x00\x00\x0A\x01\x00\x00\x00\x0F\x00\x00\x8C\x0D\x00\x00\x06\x11\x00\x00\x07\x01\x00\x00\x17\x09\x00\x00\x16\x09\x00\x00\x07\x01\x00\x00\x15\x09\x00\x00\x00\x0F\x00\x00\x8C\x0D\x00\x00\x06\x11\x00\x00\x15\x11\x00\x00\xAE\x11\x00\x00\x00\x0F\x00\x00\x8C\x0D\x00\x00\x06\x11\x00\x00\x15\x11\x00\x00\x08\x01\x00\x00\x08\x01\x00\x00\x08\x11\x00\x00\x00\x0F\x00\x00\x8C\x0D\x00\x00\x06\x11\x00\x00\x15\x11\x00\x00\x0A\x01\x00\x00\x00\x0F\x00\x00\x8C\x0D\x00\x00\x06\x11\x00\x00\x15\x11\x00\x00\x0A\x01\x00\x00\xAE\x11\x00\x00\x00\x0F\x00\x00\x8C\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x0A\x01\x00\x02\x46\x03\x00\x00\x00\x0F\x00\x00\x8C\x0D\x00\x00\xA2\x11\x00\x00\x00\x0F\x00\x00\x8C\x0D\x00\x00\x00\x0F\x00\x02\x2B\x0D\x00\x02\x3C\x03\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\x00\x0B\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\xA2\x11\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\xA2\x11\x00\x00\x2B\x11\x00\x00\x58\x11\x00\x00\x0F\x09\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x00\xA2\x11\x00\x00\x2B\x11\x00\x00\x58\x11\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x00\x11\x09\x00\x01\xCE\x11\x00\x01\xCE\x11\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\xA2\x11\x00\x00\xA2\x11\x00\x00\x58\x11\x00\x00\x58\x11\x00\x00\x10\x09\x00\x01\xD8\x11\x00\x00\x12\x09\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\xA2\x11\x00\x00\xA2\x11\x00\x00\x2B\x11\x00\x00\x2B\x11\x00\x01\xD8\x11\x00\x01\xD8\x11\x00\x01\xDA\x11\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\xA2\x11\x00\x00\xA2\x11\x00\x00\x0A\x01\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x0D\x01\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x07\x01\x00\x00\xAE\x11\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x07\x01\x00\x00\x71\x11\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x97\x11\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x06\x11\x00\x00\x0A\x01\x00\x00\x0A\x01\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\xA2\x11\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x01\xC3\x11\x00\x00\x0B\x09\x00\x00\x58\x11\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x0A\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x02\x53\x0D\x00\x00\x00\x0F\x00\x02\x55\x0D\x00\x00\x06\x11\x00\x00\x00\x0F\x00\x00\x01\x09\x00\x00\x02\x09\x00\x00\x03\x09\x00\x00\x04\x09\x00\x00\x21\x09\x00\x00\x05\x09\x00\x02\x37\x03\x00\x00\x06\x09\x00\x00\x07\x09\x00\x00\x08\x09\x00\x00\x09\x09\x00\x00\x0A\x09\x00\x00\x13\x09\x00\x02\x3E\x03\x00\x00\x18\x09\x00\x02\x40\x03\x00\x00\x19\x09\x00\x00\x1A\x09\x00\x00\x02\x01\x00\x02\x42\x05\x00\x00\x00\x50\x00\x00\x15\x01\x00\x00\x09\x01\x00\x02\x46\x05\x00\x00\x00\x18\x00\x00\x05\x01\x00\x00\x03\x01\x00\x00\x15\x03\x00\x00\x22\x09\x00\x00\x23\x09\x00\x00\x24\x09\x00\x00\x16\x01\x00\x00\x18\x01\x00\x00\x04\x01\x00\x00\x06\x01\x00\x00\x00\x01\x00\x00\x25\x09\x00\x02\x56\x03\x00\x00\x1B\x09\x00\x00\x26\x09\x00\x00\x1C\x09\x00\x00\x1D\x09\x00\x00\x1E\x09\x00\x00\x1F\x09\x00\x00\x27\x09\x00\x00\x28\x09\x00\x00\x29\x09\x00\x00\x2A\x09\x00\x00\x2B\x09\x00\x00\x2C\x09\x00\x00\x2D\x09\x00\x00\x20\x09',
_globals = (b'\xFF\xFF\xFF\x0BXCBOwnsEventQueue',1,b'\x00\x02\x2D\x23XGetXCBConnection',0,b'\x00\x00\x00\x23XOpenDisplay',0,b'\x00\x01\xB6\x23XSetEventQueueOwner',0,b'\xFF\xFF\xFF\x0BXlibOwnsEventQueue',0,b'\x00\x00\xBF\x23glXAssociateDMPbufferSGIX',0,b'\x00\x00\x88\x23glXBindChannelToWindowSGIX',0,b'\x00\x00\x39\x23glXBindHyperpipeSGIX',0,b'\x00\x00\x29\x23glXBindSwapBarrierNV',0,b'\x00\x01\xF5\x23glXBindSwapBarrierSGIX',0,b'\x00\x01\xFA\x23glXBindTexImageEXT',0,b'\x00\x00\xB0\x23glXBindVideoCaptureDeviceNV',0,b'\x00\x00\xAA\x23glXBindVideoDeviceNV',0,b'\x00\x00\xB5\x23glXBindVideoImageNV',0,b'\x00\x02\x18\x23glXBlitContextFramebufferAMD',0,b'\x00\x00\x63\x23glXChannelRectSGIX',0,b'\x00\x00\x54\x23glXChannelRectSyncSGIX',0,b'\x00\x01\x3D\x23glXChooseFBConfig',0,b'\x00\x01\x37\x23glXChooseFBConfigSGIX',0,b'\x00\x00\x0E\x23glXChooseVisual',0,b'\x00\x01\xD2\x23glXCopyBufferSubDataNV',0,b'\x00\x01\xE6\x23glXCopyContext',0,b'\x00\x01\xBE\x23glXCopyImageSubDataNV',0,b'\x00\x02\x00\x23glXCopySubBufferMESA',0,b'\x00\x01\x5F\x23glXCreateAssociatedContextAMD',0,b'\x00\x01\x63\x23glXCreateAssociatedContextAttribsAMD',0,b'\x00\x01\x47\x23glXCreateContext',0,b'\x00\x01\x54\x23glXCreateContextAttribsARB',0,b'\x00\x01\x4D\x23glXCreateContextWithConfigSGIX',0,b'\x00\x01\x96\x23glXCreateGLXPbufferSGIX',0,b'\x00\x01\x7E\x23glXCreateGLXPixmap',0,b'\x00\x01\x83\x23glXCreateGLXPixmapMESA',0,b'\x00\x01\x9D\x23glXCreateGLXPixmapWithConfigSGIX',0,b'\x00\x01\x89\x23glXCreateGLXVideoSourceSGIX',0,b'\x00\x01\x4D\x23glXCreateNewContext',0,b'\x00\x01\x91\x23glXCreatePbuffer',0,b'\x00\x01\xA2\x23glXCreatePixmap',0,b'\x00\x01\xA2\x23glXCreateWindow',0,b'\x00\x01\xF0\x23glXCushionSGI',0,b'\x00\x00\xC5\x23glXDelayBeforeSwapNV',0,b'\x00\x01\x1E\x23glXDeleteAssociatedContextAMD',0,b'\x00\x01\xBA\x23glXDestroyContext',0,b'\x00\x01\xEC\x23glXDestroyGLXPbufferSGIX',0,b'\x00\x01\xEC\x23glXDestroyGLXPixmap',0,b'\x00\x01\xEC\x23glXDestroyGLXVideoSourceSGIX',0,b'\x00\x00\x39\x23glXDestroyHyperpipeConfigSGIX',0,b'\x00\x01\xEC\x23glXDestroyPbuffer',0,b'\x00\x01\xEC\x23glXDestroyPixmap',0,b'\x00\x01\xEC\x23glXDestroyWindow',0,b'\x00\x01\x79\x23glXEnumerateVideoCaptureDevicesNV',0,b'\x00\x01\x6A\x23glXEnumerateVideoDevicesNV',0,b'\x00\x01\xBA\x23glXFreeContextEXT',0,b'\x00\x01\x76\x23glXGetAGPOffsetMESA',0,b'\x00\x00\x17\x23glXGetClientString',0,b'\x00\x00\x2E\x23glXGetConfig',0,b'\x00\x01\x6F\x23glXGetContextGPUIDAMD',0,b'\x00\x01\xAE\x23glXGetContextIDEXT',0,b'\x00\x01\x68\x23glXGetCurrentAssociatedContextAMD',0,b'\x00\x01\x68\x23glXGetCurrentContext',0,b'\x00\x00\x03\x23glXGetCurrentDisplay',0,b'\x00\x00\x03\x23glXGetCurrentDisplayEXT',0,b'\x00\x01\xB1\x23glXGetCurrentDrawable',0,b'\x00\x01\xB1\x23glXGetCurrentReadDrawable',0,b'\x00\x01\xB1\x23glXGetCurrentReadDrawableSGI',0,b'\x00\x00\x9A\x23glXGetFBConfigAttrib',0,b'\x00\x00\x9A\x23glXGetFBConfigAttribSGIX',0,b'\x00\x01\x43\x23glXGetFBConfigFromVisualSGIX',0,b'\x00\x01\x32\x23glXGetFBConfigs',0,b'\x00\x01\x72\x23glXGetGPUIDsAMD',0,b'\x00\x01\x24\x23glXGetGPUInfoAMD',0,b'\x00\x00\xE8\x23glXGetMscRateOML',0,b'\x00\x01\xB3\x23glXGetProcAddress',0,b'\x00\x01\xB3\x23glXGetProcAddressARB',0,b'\x00\x02\x0E\x23glXGetSelectedEvent',0,b'\x00\x02\x0E\x23glXGetSelectedEventSGIX',0,b'\x00\x00\xEE\x23glXGetSyncValuesOML',0,b'\x00\x01\xA8\x23glXGetTransparentIndexSUN',0,b'\x00\x00\x82\x23glXGetVideoDeviceNV',0,b'\x00\x00\x93\x23glXGetVideoInfoNV',0,b'\x00\x01\x21\x23glXGetVideoSyncSGI',0,b'\x00\x00\x13\x23glXGetVisualFromFBConfig',0,b'\x00\x00\x13\x23glXGetVisualFromFBConfigSGIX',0,b'\x00\x00\x73\x23glXHyperpipeAttribSGIX',0,b'\x00\x00\x4D\x23glXHyperpipeConfigSGIX',0,b'\x00\x01\x5B\x23glXImportContextEXT',0,b'\x00\x00\xA0\x23glXIsDirect',0,b'\x00\x00\xD0\x23glXJoinSwapGroupNV',0,b'\x00\x02\x13\x23glXJoinSwapGroupSGIX',0,b'\x00\x01\xEC\x23glXLockVideoCaptureDeviceNV',0,b'\x00\x01\x1E\x23glXMakeAssociatedContextCurrentAMD',0,b'\x00\x01\x0C\x23glXMakeContextCurrent',0,b'\x00\x01\x07\x23glXMakeCurrent',0,b'\x00\x01\x0C\x23glXMakeCurrentReadSGI',0,b'\x00\x01\xDC\x23glXNamedCopyBufferSubDataNV',0,b'\x00\x00\x5A\x23glXQueryChannelDeltasSGIX',0,b'\x00\x00\x5A\x23glXQueryChannelRectSGIX',0,b'\x00\x00\xA4\x23glXQueryContext',0,b'\x00\x00\xA4\x23glXQueryContextInfoEXT',0,b'\x00\x01\x1A\x23glXQueryCurrentRendererIntegerMESA',0,b'\x00\x00\x26\x23glXQueryCurrentRendererStringMESA',0,b'\x00\x02\x08\x23glXQueryDrawable',0,b'\x00\x00\x34\x23glXQueryExtension',0,b'\x00\x00\x17\x23glXQueryExtensionsString',0,b'\x00\x00\x3D\x23glXQueryFrameCountNV',0,b'\x00\x00\xDB\x23glXQueryGLXPbufferSGIX',0,b'\x00\x00\x73\x23glXQueryHyperpipeAttribSGIX',0,b'\x00\x00\x7A\x23glXQueryHyperpipeBestAttribSGIX',0,b'\x00\x00\x05\x23glXQueryHyperpipeConfigSGIX',0,b'\x00\x00\x0A\x23glXQueryHyperpipeNetworkSGIX',0,b'\x00\x00\x48\x23glXQueryMaxSwapBarriersSGIX',0,b'\x00\x00\x42\x23glXQueryMaxSwapGroupsNV',0,b'\x00\x00\x6C\x23glXQueryRendererIntegerMESA',0,b'\x00\x00\x20\x23glXQueryRendererStringMESA',0,b'\x00\x00\x1B\x23glXQueryServerString',0,b'\x00\x00\xCA\x23glXQuerySwapGroupNV',0,b'\x00\x00\x34\x23glXQueryVersion',0,b'\x00\x00\xD5\x23glXQueryVideoCaptureDeviceNV',0,b'\x00\x00\xBB\x23glXReleaseBuffersMESA',0,b'\x00\x01\xF5\x23glXReleaseTexImageEXT',0,b'\x00\x01\xEC\x23glXReleaseVideoCaptureDeviceNV',0,b'\x00\x00\x8E\x23glXReleaseVideoDeviceNV',0,b'\x00\x00\xBB\x23glXReleaseVideoImageNV',0,b'\x00\x00\x39\x23glXResetFrameCountNV',0,b'\x00\x02\x13\x23glXSelectEvent',0,b'\x00\x02\x13\x23glXSelectEventSGIX',0,b'\x00\x00\xE1\x23glXSendPbufferToVideoNV',0,b'\x00\x01\x12\x23glXSet3DfxModeMESA',0,b'\x00\x01\xEC\x23glXSwapBuffers',0,b'\x00\x01\x2B\x23glXSwapBuffersMscOML',0,b'\x00\x01\xF5\x23glXSwapIntervalEXT',0,b'\x00\x01\x12\x23glXSwapIntervalSGI',0,b'\x00\x02\x25\x23glXUseXFont',0,b'\x00\x00\xFD\x23glXWaitForMscOML',0,b'\x00\x00\xF5\x23glXWaitForSbcOML',0,b'\x00\x02\x2B\x23glXWaitGL',0,b'\x00\x01\x15\x23glXWaitVideoSyncSGI',0,b'\x00\x02\x2B\x23glXWaitX',0),
_struct_unions = ((b'\x00\x00\x00\xC3\x00\x00\x00\x10$DMbuffer',),(b'\x00\x00\x02\x30\x00\x00\x00\x10$DMparams',),(b'\x00\x00\x02\x31\x00\x00\x00\x10$Display',),(b'\x00\x00\x02\x32\x00\x00\x00\x02$GLXBufferClobberEventSGIX',b'\x00\x00\x07\x11type',b'\x00\x00\x8C\x11serial',b'\x00\x00\x07\x11send_event',b'\x00\x00\x06\x11display',b'\x00\x00\x8C\x11drawable',b'\x00\x00\x07\x11event_type',b'\x00\x00\x07\x11draw_type',b'\x00\x00\x91\x11mask',b'\x00\x00\x07\x11x',b'\x00\x00\x07\x11y',b'\x00\x00\x07\x11width',b'\x00\x00\x07\x11height',b'\x00\x00\x07\x11count'),(b'\x00\x00\x02\x33\x00\x00\x00\x02$GLXBufferSwapComplete',b'\x00\x00\x07\x11type',b'\x00\x00\x8C\x11serial',b'\x00\x00\x07\x11send_event',b'\x00\x00\x06\x11display',b'\x00\x00\x8C\x11drawable',b'\x00\x00\x07\x11event_type',b'\x00\x00\xF8\x11ust',b'\x00\x00\xF8\x11msc',b'\x00\x00\xF8\x11sbc'),(b'\x00\x00\x02\x35\x00\x00\x00\x02$GLXHyperpipeConfigSGIX',b'\x00\x02\x43\x11pipeName',b'\x00\x00\x07\x11channel',b'\x00\x00\x91\x11participationType',b'\x00\x00\x07\x11timeSlice'),(b'\x00\x00\x02\x37\x00\x00\x00\x02$GLXHyperpipeNetworkSGIX',b'\x00\x02\x43\x11pipeName',b'\x00\x00\x07\x11networkId'),(b'\x00\x00\x02\x38\x00\x00\x00\x02$GLXPbufferClobberEvent',b'\x00\x00\x07\x11event_type',b'\x00\x00\x07\x11draw_type',b'\x00\x00\x8C\x11serial',b'\x00\x00\x07\x11send_event',b'\x00\x00\x06\x11display',b'\x00\x00\x8C\x11drawable',b'\x00\x00\x91\x11buffer_mask',b'\x00\x00\x91\x11aux_buffer',b'\x00\x00\x07\x11x',b'\x00\x00\x07\x11y',b'\x00\x00\x07\x11width',b'\x00\x00\x07\x11height',b'\x00\x00\x07\x11count'),(b'\x00\x00\x02\x39\x00\x00\x00\x02$GLXPipeRect',b'\x00\x02\x43\x11pipeName',b'\x00\x00\x07\x11srcXOrigin',b'\x00\x00\x07\x11srcYOrigin',b'\x00\x00\x07\x11srcWidth',b'\x00\x00\x07\x11srcHeight',b'\x00\x00\x07\x11destXOrigin',b'\x00\x00\x07\x11destYOrigin',b'\x00\x00\x07\x11destWidth',b'\x00\x00\x07\x11destHeight'),(b'\x00\x00\x02\x3A\x00\x00\x00\x02$GLXPipeRectLimits',b'\x00\x02\x43\x11pipeName',b'\x00\x00\x07\x11XOrigin',b'\x00\x00\x07\x11YOrigin',b'\x00\x00\x07\x11maxHeight',b'\x00\x00\x07\x11maxWidth'),(b'\x00\x00\x02\x3B\x00\x00\x00\x02$GLXStereoNotifyEventEXT',b'\x00\x00\x07\x11type',b'\x00\x00\x8C\x11serial',b'\x00\x00\x07\x11send_event',b'\x00\x00\x06\x11display',b'\x00\x00\x07\x11extension',b'\x00\x00\x07\x11evtype',b'\x00\x00\x8C\x11window',b'\x00\x00\x07\x11stereo_tree'),(b'\x00\x00\x02\x22\x00\x00\x00\x10$GLbitfield',),(b'\x00\x00\x00\xE6\x00\x00\x00\x10$GLboolean',),(b'\x00\x00\x00\x58\x00\x00\x00\x10$GLenum',),(b'\x00\x00\x00\xC8\x00\x00\x00\x10$GLfloat',),(b'\x00\x00\x01\xC3\x00\x00\x00\x10$GLint',),(b'\x00\x00\x01\xD8\x00\x00\x00\x10$GLintptr',),(b'\x00\x00\x01\xCE\x00\x00\x00\x10$GLsizei',),(b'\x00\x00\x01\xDA\x00\x00\x00\x10$GLsizeiptr',),(b'\x00\x00\x02\x3C\x00\x00\x00\x10$GLubyte',),(b'\x00\x00\x00\x2B\x00\x00\x00\x10$GLuint',),(b'\x00\x00\x01\x8F\x00\x00\x00\x10$VLNode',),(b'\x00\x00\x01\x8D\x00\x00\x00\x10$VLPath',),(b'\x00\x00\x01\x8C\x00\x00\x00\x10$VLServer',),(b'\x00\x00\x02\x3E\x00\x00\x00\x02$Visual',b'\x00\x02\x3F\x11ext_data',b'\x00\x00\x8C\x11visualid',b'\x00\x00\x07\x11class',b'\x00\x00\x8C\x11red_mask',b'\x00\x00\x8C\x11green_mask',b'\x00\x00\x8C\x11blue_mask',b'\x00\x00\x07\x11bits_per_rgb',b'\x00\x00\x07\x11map_entries'),(b'\x00\x00\x02\x40\x00\x00\x00\x10$XExtData',),(b'\x00\x00\x02\x41\x00\x00\x00\x02$XVisualInfo',b'\x00\x02\x3D\x11visual',b'\x00\x00\x8C\x11visualid',b'\x00\x00\x07\x11screen',b'\x00\x00\x07\x11depth',b'\x00\x00\x07\x11class',b'\x00\x00\x8C\x11red_mask',b'\x00\x00\x8C\x11green_mask',b'\x00\x00\x8C\x11blue_mask',b'\x00\x00\x07\x11colormap_size',b'\x00\x00\x07\x11bits_per_rgb'),(b'\x00\x00\x02\x56\x00\x00\x00\x18$xcb_connection_t',),(b'\x00\x00\x02\x58\x00\x00\x00\x08$xcb_generic_error_t',),(b'\x00\x00\x02\x59\x00\x00\x00\x08$xcb_generic_event_t',),(b'\x00\x00\x02\x5A\x00\x00\x00\x08$xcb_generic_reply_t',),(b'\x00\x00\x02\x5B\x00\x00\x00\x08$xcb_protocol_request_t',),(b'\x00\x00\x02\x63\x00\x00\x00\x08$xcb_void_cookie_t',),(b'\x00\x00\x02\x34\x00\x00\x00\x03__GLXEvent',b'\x00\x02\x38\x11glxpbufferclobber',b'\x00\x02\x33\x11glxbufferswapcomplete',b'\x00\x02\x47\x11pad'),(b'\x00\x00\x02\x4C\x00\x00\x00\x10__GLXFBConfigRec',),(b'\x00\x00\x02\x4D\x00\x00\x00\x10__GLXcontextRec',),(b'\x00\x00\x02\x4E\x00\x00\x00\x08iovec',),(b'\x00\x00\x02\x54\x00\x00\x00\x08xcb_auth_info_t',),(b'\x00\x00\x02\x57\x00\x00\x00\x08xcb_extension_t',),(b'\x00\x00\x02\x5C\x00\x00\x00\x08xcb_query_extension_reply_t',),(b'\x00\x00\x02\x5D\x00\x00\x00\x08xcb_render_directformat_t',),(b'\x00\x00\x02\x5E\x00\x00\x00\x08xcb_render_pictforminfo_t',),(b'\x00\x00\x02\x5F\x00\x00\x00\x08xcb_screen_iterator_t',),(b'\x00\x00\x02\x60\x00\x00\x00\x08xcb_screen_t',),(b'\x00\x00\x02\x61\x00\x00\x00\x08xcb_setup_t',),(b'\x00\x00\x02\x62\x00\x00\x00\x08xcb_visualtype_t',)),
_enums = (b'\x00\x00\x01\xB8\x00\x00\x00\x16XEventQueueOwner\x00XlibOwnsEventQueue,XCBOwnsEventQueue',),
_typenames = (b'\x00\x00\x00\x07Bool',b'\x00\x00\x00\x8CColormap',b'\x00\x00\x00\xC3DMbuffer',b'\x00\x00\x02\x30DMparams',b'\x00\x00\x02\x31Display',b'\x00\x00\x00\x8CFont',b'\x00\x00\x02\x32GLXBufferClobberEventSGIX',b'\x00\x00\x02\x33GLXBufferSwapComplete',b'\x00\x00\x00\xA2GLXContext',b'\x00\x00\x00\x8CGLXContextID',b'\x00\x00\x00\x8CGLXDrawable',b'\x00\x00\x02\x34GLXEvent',b'\x00\x00\x00\x15GLXFBConfig',b'\x00\x00\x00\x8CGLXFBConfigID',b'\x00\x00\x00\x8CGLXFBConfigIDSGIX',b'\x00\x00\x00\x15GLXFBConfigSGIX',b'\x00\x00\x02\x35GLXHyperpipeConfigSGIX',b'\x00\x00\x02\x37GLXHyperpipeNetworkSGIX',b'\x00\x00\x00\x8CGLXPbuffer',b'\x00\x00\x02\x38GLXPbufferClobberEvent',b'\x00\x00\x00\x8CGLXPbufferSGIX',b'\x00\x00\x02\x39GLXPipeRect',b'\x00\x00\x02\x3AGLXPipeRectLimits',b'\x00\x00\x00\x8CGLXPixmap',b'\x00\x00\x02\x3BGLXStereoNotifyEventEXT',b'\x00\x00\x00\x8CGLXVideoCaptureDeviceNV',b'\x00\x00\x00\x91GLXVideoDeviceNV',b'\x00\x00\x00\x8CGLXVideoSourceSGIX',b'\x00\x00\x00\x8CGLXWindow',b'\x00\x00\x02\x22GLbitfield',b'\x00\x00\x00\xE6GLboolean',b'\x00\x00\x00\x58GLenum',b'\x00\x00\x00\xC8GLfloat',b'\x00\x00\x01\xC3GLint',b'\x00\x00\x01\xD8GLintptr',b'\x00\x00\x01\xCEGLsizei',b'\x00\x00\x01\xDAGLsizeiptr',b'\x00\x00\x02\x3CGLubyte',b'\x00\x00\x00\x2BGLuint',b'\x00\x00\x00\x8CPixmap',b'\x00\x00\x00\x8CScreen',b'\x00\x00\x00\x8CStatus',b'\x00\x00\x01\x8FVLNode',b'\x00\x00\x01\x8DVLPath',b'\x00\x00\x01\x8CVLServer',b'\x00\x00\x02\x3EVisual',b'\x00\x00\x00\x8CVisualID',b'\x00\x00\x00\x8CWindow',b'\x00\x00\x02\x40XExtData',b'\x00\x00\x00\x8CXID',b'\x00\x00\x02\x41XVisualInfo',b'\x00\x00\x02\x2B__GLXextFuncPtr',b'\x00\x00\x01\xF3khronos_float_t',b'\x00\x00\x02\x49khronos_int16_t',b'\x00\x00\x02\x45khronos_int32_t',b'\x00\x00\x00\xF8khronos_int64_t',b'\x00\x00\x02\x4Akhronos_int8_t',b'\x00\x00\x02\x46khronos_intptr_t',b'\x00\x00\x02\x46khronos_ssize_t',b'\x00\x00\x00\xF8khronos_stime_nanoseconds_t',b'\x00\x00\x02\x52khronos_uint16_t',b'\x00\x00\x02\x4Fkhronos_uint32_t',b'\x00\x00\x02\x50khronos_uint64_t',b'\x00\x00\x02\x51khronos_uint8_t',b'\x00\x00\x00\x8Ckhronos_uintptr_t',b'\x00\x00\x00\x8Ckhronos_usize_t',b'\x00\x00\x02\x50khronos_utime_nanoseconds_t',b'\x00\x00\x02\x54xcb_auth_info_t',b'\x00\x00\x02\x4Fxcb_colormap_t',b'\x00\x00\x02\x56xcb_connection_t',b'\x00\x00\x02\x4Fxcb_drawable_t',b'\x00\x00\x02\x57xcb_extension_t',b'\x00\x00\x02\x58xcb_generic_error_t',b'\x00\x00\x02\x59xcb_generic_event_t',b'\x00\x00\x02\x5Axcb_generic_reply_t',b'\x00\x00\x02\x4Fxcb_pixmap_t',b'\x00\x00\x02\x5Bxcb_protocol_request_t',b'\x00\x00\x02\x5Cxcb_query_extension_reply_t',b'\x00\x00\x02\x5Dxcb_render_directformat_t',b'\x00\x00\x02\x4Fxcb_render_pictformat_t',b'\x00\x00\x02\x5Excb_render_pictforminfo_t',b'\x00\x00\x02\x5Fxcb_screen_iterator_t',b'\x00\x00\x02\x60xcb_screen_t',b'\x00\x00\x02\x61xcb_setup_t',b'\x00\x00\x02\x4Fxcb_visualid_t',b'\x00\x00\x02\x62xcb_visualtype_t',b'\x00\x00\x02\x63xcb_void_cookie_t',b'\x00\x00\x02\x4Fxcb_window_t'),
_includes = (_ffi0,),
)
| 1,699.357143
| 9,809
| 0.767685
| 4,774
| 23,791
| 3.788856
| 0.113741
| 0.349624
| 0.103715
| 0.066342
| 0.522391
| 0.470201
| 0.443498
| 0.40596
| 0.40115
| 0.390756
| 0
| 0.324538
| 0.002732
| 23,791
| 13
| 9,810
| 1,830.076923
| 0.437832
| 0.000799
| 0
| 0
| 1
| 0.090909
| 0.913294
| 0.900926
| 0
| 1
| 0.000252
| 0
| 0
| 1
| 0
| false
| 0
| 0.272727
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d76b2b2dc7116687ad579b6f901b3ec73ffee13a
| 40,452
|
py
|
Python
|
swagger_client/apis/accommodation_api.py
|
scubawhere/scubawhere-api-python-client
|
9f8578e251492c7667f785df7b7c9d66e71f5c8e
|
[
"Apache-2.0"
] | null | null | null |
swagger_client/apis/accommodation_api.py
|
scubawhere/scubawhere-api-python-client
|
9f8578e251492c7667f785df7b7c9d66e71f5c8e
|
[
"Apache-2.0"
] | null | null | null |
swagger_client/apis/accommodation_api.py
|
scubawhere/scubawhere-api-python-client
|
9f8578e251492c7667f785df7b7c9d66e71f5c8e
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Scubawhere API Documentation
This is the documentation for scubawhere's RMS API. This API is only to be used by authorized parties with valid auth tokens. [Learn about scubawhere](http://www.scubawhere.com) to become an authorized consumer of our API
OpenAPI spec version: 1.0.0
Contact: bryan@scubawhere.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class AccommodationApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_accommodation(self, name, capacity, base_prices, **kwargs):
"""
Create an accommodation
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_accommodation(name, capacity, base_prices, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: Name of the type of accommodation (required)
:param int capacity: Number of beds in the accommodation (required)
:param int base_prices: Price of the accommodation and the dates of when the price is applicable (required)
:param str description: Description of the accommodation
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_accommodation_with_http_info(name, capacity, base_prices, **kwargs)
else:
(data) = self.add_accommodation_with_http_info(name, capacity, base_prices, **kwargs)
return data
def add_accommodation_with_http_info(self, name, capacity, base_prices, **kwargs):
"""
Create an accommodation
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_accommodation_with_http_info(name, capacity, base_prices, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: Name of the type of accommodation (required)
:param int capacity: Number of beds in the accommodation (required)
:param int base_prices: Price of the accommodation and the dates of when the price is applicable (required)
:param str description: Description of the accommodation
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'capacity', 'base_prices', 'description']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_accommodation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `add_accommodation`")
# verify the required parameter 'capacity' is set
if ('capacity' not in params) or (params['capacity'] is None):
raise ValueError("Missing the required parameter `capacity` when calling `add_accommodation`")
# verify the required parameter 'base_prices' is set
if ('base_prices' not in params) or (params['base_prices'] is None):
raise ValueError("Missing the required parameter `base_prices` when calling `add_accommodation`")
resource_path = '/accommodation/add'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'name' in params:
query_params['name'] = params['name']
if 'description' in params:
query_params['description'] = params['description']
if 'capacity' in params:
query_params['capacity'] = params['capacity']
if 'base_prices' in params:
query_params['base_prices'] = params['base_prices']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def delete_accommodation(self, body, **kwargs):
"""
Delete an accommodation by ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_accommodation(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int body: ID of the accommodation (required)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_accommodation_with_http_info(body, **kwargs)
else:
(data) = self.delete_accommodation_with_http_info(body, **kwargs)
return data
def delete_accommodation_with_http_info(self, body, **kwargs):
"""
Delete an accommodation by ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_accommodation_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int body: ID of the accommodation (required)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_accommodation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_accommodation`")
resource_path = '/accommodation/delete'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'body' in params:
query_params['body'] = params['body']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def edit_agent(self, id, name, branch_name, branch_address, branch_phone, branch_email, commission, terms, **kwargs):
"""
Create a new agent
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_agent(id, name, branch_name, branch_address, branch_phone, branch_email, commission, terms, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: (required)
:param int name: (required)
:param str branch_name: (required)
:param str branch_address: (required)
:param str branch_phone: (required)
:param str branch_email: (required)
:param float commission: (required)
:param str terms: (required)
:param str website:
:param str billing_address:
:param str billing_phone:
:param str billing_email:
:return: InlineResponse2004
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.edit_agent_with_http_info(id, name, branch_name, branch_address, branch_phone, branch_email, commission, terms, **kwargs)
else:
(data) = self.edit_agent_with_http_info(id, name, branch_name, branch_address, branch_phone, branch_email, commission, terms, **kwargs)
return data
def edit_agent_with_http_info(self, id, name, branch_name, branch_address, branch_phone, branch_email, commission, terms, **kwargs):
"""
Create a new agent
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.edit_agent_with_http_info(id, name, branch_name, branch_address, branch_phone, branch_email, commission, terms, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: (required)
:param int name: (required)
:param str branch_name: (required)
:param str branch_address: (required)
:param str branch_phone: (required)
:param str branch_email: (required)
:param float commission: (required)
:param str terms: (required)
:param str website:
:param str billing_address:
:param str billing_phone:
:param str billing_email:
:return: InlineResponse2004
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'name', 'branch_name', 'branch_address', 'branch_phone', 'branch_email', 'commission', 'terms', 'website', 'billing_address', 'billing_phone', 'billing_email']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method edit_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `edit_agent`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `edit_agent`")
# verify the required parameter 'branch_name' is set
if ('branch_name' not in params) or (params['branch_name'] is None):
raise ValueError("Missing the required parameter `branch_name` when calling `edit_agent`")
# verify the required parameter 'branch_address' is set
if ('branch_address' not in params) or (params['branch_address'] is None):
raise ValueError("Missing the required parameter `branch_address` when calling `edit_agent`")
# verify the required parameter 'branch_phone' is set
if ('branch_phone' not in params) or (params['branch_phone'] is None):
raise ValueError("Missing the required parameter `branch_phone` when calling `edit_agent`")
# verify the required parameter 'branch_email' is set
if ('branch_email' not in params) or (params['branch_email'] is None):
raise ValueError("Missing the required parameter `branch_email` when calling `edit_agent`")
# verify the required parameter 'commission' is set
if ('commission' not in params) or (params['commission'] is None):
raise ValueError("Missing the required parameter `commission` when calling `edit_agent`")
# verify the required parameter 'terms' is set
if ('terms' not in params) or (params['terms'] is None):
raise ValueError("Missing the required parameter `terms` when calling `edit_agent`")
resource_path = '/agent/edit'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
if 'name' in params:
query_params['name'] = params['name']
if 'website' in params:
query_params['website'] = params['website']
if 'branch_name' in params:
query_params['branch_name'] = params['branch_name']
if 'branch_address' in params:
query_params['branch_address'] = params['branch_address']
if 'branch_phone' in params:
query_params['branch_phone'] = params['branch_phone']
if 'branch_email' in params:
query_params['branch_email'] = params['branch_email']
if 'billing_address' in params:
query_params['billing_address'] = params['billing_address']
if 'billing_phone' in params:
query_params['billing_phone'] = params['billing_phone']
if 'billing_email' in params:
query_params['billing_email'] = params['billing_email']
if 'commission' in params:
query_params['commission'] = params['commission']
if 'terms' in params:
query_params['terms'] = params['terms']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2004',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def filter_accommodation(self, **kwargs):
"""
Get all the accommodations matching a filter
Get all the accommodations and their bookings between certain dates and / or an accommodation id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.filter_accommodation(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param date before: The date for the upper boundary of the dates
:param date after: The date for the lower boundary of the dates
:param int accommodation_id: ID Accommodation to filter by
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.filter_accommodation_with_http_info(**kwargs)
else:
(data) = self.filter_accommodation_with_http_info(**kwargs)
return data
def filter_accommodation_with_http_info(self, **kwargs):
"""
Get all the accommodations matching a filter
Get all the accommodations and their bookings between certain dates and / or an accommodation id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.filter_accommodation_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param date before: The date for the upper boundary of the dates
:param date after: The date for the lower boundary of the dates
:param int accommodation_id: ID Accommodation to filter by
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['before', 'after', 'accommodation_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method filter_accommodation" % key
)
params[key] = val
del params['kwargs']
resource_path = '/accommodation/filter'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'before' in params:
query_params['before'] = params['before']
if 'after' in params:
query_params['after'] = params['after']
if 'accommodation_id' in params:
query_params['accommodation_id'] = params['accommodation_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def find_accommodation(self, id, **kwargs):
"""
Retrieve an accommodation by ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_accommodation(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[int] id: ID of the accommodation to be retrieved (required)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.find_accommodation_with_http_info(id, **kwargs)
else:
(data) = self.find_accommodation_with_http_info(id, **kwargs)
return data
def find_accommodation_with_http_info(self, id, **kwargs):
"""
Retrieve an accommodation by ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_accommodation_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[int] id: ID of the accommodation to be retrieved (required)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_accommodation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `find_accommodation`")
resource_path = '/accommodation'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_all_accommodations(self, **kwargs):
"""
Retrieve all accommodation
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_accommodations(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Accommodation]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_all_accommodations_with_http_info(**kwargs)
else:
(data) = self.get_all_accommodations_with_http_info(**kwargs)
return data
def get_all_accommodations_with_http_info(self, **kwargs):
"""
Retrieve all accommodation
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_accommodations_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Accommodation]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_accommodations" % key
)
params[key] = val
del params['kwargs']
resource_path = '/accommodations/all'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Accommodation]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_all_with_trashed_accommodations(self, **kwargs):
"""
Retrieve all accommodation including any deleted models
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_with_trashed_accommodations(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Accommodation]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_all_with_trashed_accommodations_with_http_info(**kwargs)
else:
(data) = self.get_all_with_trashed_accommodations_with_http_info(**kwargs)
return data
def get_all_with_trashed_accommodations_with_http_info(self, **kwargs):
"""
Retrieve all accommodation including any deleted models
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_with_trashed_accommodations_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Accommodation]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_with_trashed_accommodations" % key
)
params[key] = val
del params['kwargs']
resource_path = '/accommodations/all-with-trashed'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Accommodation]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def update_accommodation(self, id, **kwargs):
"""
Update an Accommodation
Updates the accommodation by id using the specified fields
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_accommodation(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: ID of the Accommodation to be updated (required)
:param str name: Name of the Accommodation
:param int capacity: Number of rooms the accommodation holds
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_accommodation_with_http_info(id, **kwargs)
else:
(data) = self.update_accommodation_with_http_info(id, **kwargs)
return data
def update_accommodation_with_http_info(self, id, **kwargs):
"""
Update an Accommodation
Updates the accommodation by id using the specified fields
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_accommodation_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: ID of the Accommodation to be updated (required)
:param str name: Name of the Accommodation
:param int capacity: Number of rooms the accommodation holds
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'name', 'capacity']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_accommodation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_accommodation`")
resource_path = '/accommodation/edit'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
if 'name' in params:
query_params['name'] = params['name']
if 'capacity' in params:
query_params['capacity'] = params['capacity']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 41.832472
| 227
| 0.579131
| 4,177
| 40,452
| 5.416088
| 0.061049
| 0.05658
| 0.030058
| 0.025461
| 0.885205
| 0.86001
| 0.853158
| 0.843213
| 0.812978
| 0.781506
| 0
| 0.002509
| 0.339958
| 40,452
| 966
| 228
| 41.875776
| 0.844794
| 0.34693
| 0
| 0.701327
| 0
| 0
| 0.178844
| 0.029451
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037611
| false
| 0
| 0.015487
| 0
| 0.108407
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ad3ac30b8104022dc104da85fefbec495973bac1
| 133
|
py
|
Python
|
gmpack/__init__.py
|
odakan/gmpack
|
7d57be190784955cd90b55c4878edf12dabf9e83
|
[
"MIT"
] | null | null | null |
gmpack/__init__.py
|
odakan/gmpack
|
7d57be190784955cd90b55c4878edf12dabf9e83
|
[
"MIT"
] | null | null | null |
gmpack/__init__.py
|
odakan/gmpack
|
7d57be190784955cd90b55c4878edf12dabf9e83
|
[
"MIT"
] | null | null | null |
from gmpack.Rp_Spectra import *
from gmpack.Plot_Spectra import *
from gmpack.Flatten_Record import *
from gmpack.F_Spectra import *
| 26.6
| 35
| 0.819549
| 20
| 133
| 5.25
| 0.45
| 0.380952
| 0.457143
| 0.438095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120301
| 133
| 4
| 36
| 33.25
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ad668919d0466cd06513420076e380158dac6298
| 11,186
|
py
|
Python
|
somaticseq/utilities/dockered_pipelines/somatic_mutations/VarDict.py
|
bioinform/somaticseq
|
71f058dcdfea78ec056aa46f96a40cc737cc559f
|
[
"BSD-2-Clause"
] | 159
|
2015-07-26T15:14:44.000Z
|
2022-03-31T03:29:25.000Z
|
somaticseq/utilities/dockered_pipelines/somatic_mutations/VarDict.py
|
lethalfang/somaticseq
|
e6f5b1c6b98b324d418407154392778164215a65
|
[
"BSD-2-Clause"
] | 77
|
2016-06-12T21:44:43.000Z
|
2022-03-31T19:33:49.000Z
|
somaticseq/utilities/dockered_pipelines/somatic_mutations/VarDict.py
|
lethalfang/somaticseq
|
e6f5b1c6b98b324d418407154392778164215a65
|
[
"BSD-2-Clause"
] | 64
|
2015-10-26T01:34:32.000Z
|
2022-03-14T14:43:08.000Z
|
import sys, argparse, os, re
import subprocess
from datetime import datetime
import somaticseq.utilities.dockered_pipelines.container_option as container
from somaticseq._version import __version__ as VERSION
ts = re.sub(r'[:-]', '.', datetime.now().isoformat() )
DEFAULT_PARAMS = {'vardict_image' : 'lethalfang/vardictjava:1.7.0',
'MEM' : '8G',
'threads' : 1,
'normal_bam' : None,
'tumor_bam' : None,
'genome_reference' : None,
'inclusion_region' : None,
'output_directory' : os.curdir,
'outfile' : 'VarDict.vcf',
'action' : 'echo',
'vardict_arguments' : '',
'extra_docker_options' : '',
'script' : 'vardict.{}.cmd'.format(ts),
'min_MQ' : 1,
'minimum_VAF' : 0.05,
'process_bed' : True,
}
def tumor_normal(input_parameters, tech='docker' ):
for param_i in DEFAULT_PARAMS:
if param_i not in input_parameters:
input_parameters[param_i] = DEFAULT_PARAMS[param_i]
# The following are required:
assert os.path.exists( input_parameters['normal_bam'] )
assert os.path.exists( input_parameters['tumor_bam'] )
assert os.path.exists( input_parameters['genome_reference'] )
logdir = os.path.join( input_parameters['output_directory'], 'logs' )
outfile = os.path.join( logdir, input_parameters['script'] )
all_paths = []
for path_i in input_parameters['normal_bam'], input_parameters['tumor_bam'], input_parameters['genome_reference'], input_parameters['output_directory'], input_parameters['inclusion_region']:
if path_i:
all_paths.append( path_i )
container_line, fileDict = container.container_params( input_parameters['vardict_image'], tech=tech, files=all_paths, extra_args=input_parameters['extra_docker_options'] )
minVAF = input_parameters['minimum_VAF']
total_bases = 0
num_lines = 0
if input_parameters['inclusion_region']:
bed_file = input_parameters['inclusion_region']
with open(bed_file) as bed:
line_i = bed.readline().rstrip()
while line_i.startswith('track'):
line_i = bed.readline().rstrip()
while line_i:
item = line_i.rstrip().split('\t')
total_bases = total_bases + int(item[2]) - int(item[1])
num_lines += 1
line_i = bed.readline().rstrip()
else:
fai_file = input_parameters['genome_reference'] + '.fai'
bed_file = os.path.join(input_parameters['output_directory'], 'genome.bed')
with open(fai_file) as fai, open(bed_file, 'w') as wgs_bed:
for line_i in fai:
item = line_i.split('\t')
total_bases += int( item[1] )
num_lines += 1
wgs_bed.write( '{}\t{}\t{}\n'.format(item[0], '0', item[1]) )
# However the "bed_file" is defined here, create a dockered line and mount dictionary for it:
bed_split_line, bedDict = container.container_params( 'lethalfang/somaticseq:{}'.format(VERSION), tech, (bed_file, input_parameters['output_directory']) )
# Mounted paths for all the input files and output directory:
mounted_genome_reference = fileDict[ input_parameters['genome_reference'] ]['mount_path']
mounted_tumor_bam = fileDict[ input_parameters['tumor_bam'] ]['mount_path']
mounted_normal_bam = fileDict[ input_parameters['normal_bam'] ]['mount_path']
mounted_outdir = fileDict[ input_parameters['output_directory'] ]['mount_path']
mounted_bed = bedDict[ bed_file ]['mount_path']
with open(outfile, 'w') as out:
out.write( "#!/bin/bash\n\n" )
out.write(f'#$ -o {logdir}\n' )
out.write(f'#$ -e {logdir}\n' )
out.write( '#$ -S /bin/bash\n' )
out.write( '#$ -l h_vmem={}\n'.format( input_parameters['MEM'] ) )
out.write( 'set -e\n\n' )
out.write( 'echo -e "Start at `date +"%Y/%m/%d %H:%M:%S"`" 1>&2\n\n' )
# Decide if Bed file needs to be "split" such that each line has a small enough region
if input_parameters['process_bed'] or total_bases/num_lines > 50000:
out.write(f'{bed_split_line} \\\n' )
out.write( '/opt/somaticseq/somaticseq/utilities/split_mergedBed.py \\\n' )
out.write( '-infile {} -outfile {}/split_regions.bed\n\n'.format(mounted_bed, bedDict[input_parameters['output_directory']]['mount_path']) )
bed_file = '{}/split_regions.bed'.format( mounted_outdir )
out.write(f'{container_line} bash -c \\\n' )
out.write( '"/opt/VarDict-1.7.0/bin/VarDict \\\n' )
if input_parameters['vardict_arguments']:
out.write( '{} \\\n'.format(input_parameters['vardict_arguments']) )
out.write( '-G {} \\\n'.format( mounted_genome_reference ) )
out.write( '-f {} -h \\\n'.format( minVAF ) )
out.write( '-b \'{}|{}\' \\\n'.format(mounted_tumor_bam, mounted_normal_bam) )
out.write( '-Q 1 -c 1 -S 2 -E 3 -g 4 {} \\\n'.format(bed_file) )
out.write( '> {}/vardict.var"\n\n'.format(mounted_outdir) )
out.write( '\n' )
out.write(f'{container_line} \\\n' )
out.write( 'bash -c "cat {}/vardict.var | awk \'NR!=1\' | /opt/VarDict/testsomatic.R | /opt/VarDict/var2vcf_paired.pl -N \'TUMOR|NORMAL\' -f {} \\\n'.format(mounted_outdir, minVAF ) )
out.write( '> {}/{}"\n\n'.format(mounted_outdir, input_parameters['outfile']) )
out.write( '\necho -e "Done at `date +"%Y/%m/%d %H:%M:%S"`" 1>&2\n' )
# "Run" the script that was generated
command_line = '{} {}'.format( input_parameters['action'], outfile )
returnCode = subprocess.call( command_line, shell=True )
return outfile
def tumor_only(input_parameters, tech='docker' ):
for param_i in DEFAULT_PARAMS:
if param_i not in input_parameters:
input_parameters[param_i] = DEFAULT_PARAMS[param_i]
# The following are required:
assert os.path.exists( input_parameters['bam'] )
assert os.path.exists( input_parameters['genome_reference'] )
logdir = os.path.join( input_parameters['output_directory'], 'logs' )
outfile = os.path.join( logdir, input_parameters['script'] )
all_paths = []
for path_i in input_parameters['bam'], input_parameters['genome_reference'], input_parameters['output_directory'], input_parameters['inclusion_region']:
if path_i:
all_paths.append( path_i )
container_line, fileDict = container.container_params( input_parameters['vardict_image'], tech=tech, files=all_paths, extra_args=input_parameters['extra_docker_options'] )
minVAF = input_parameters['minimum_VAF']
total_bases = 0
num_lines = 0
if input_parameters['inclusion_region']:
bed_file = input_parameters['inclusion_region']
with open(bed_file) as bed:
line_i = bed.readline().rstrip()
while line_i.startswith('track'):
line_i = bed.readline().rstrip()
while line_i:
item = line_i.rstrip().split('\t')
total_bases = total_bases + int(item[2]) - int(item[1])
num_lines += 1
line_i = bed.readline().rstrip()
else:
fai_file = input_parameters['genome_reference'] + '.fai'
bed_file = os.path.join(input_parameters['output_directory'], 'genome.bed')
with open(fai_file) as fai, open(bed_file, 'w') as wgs_bed:
for line_i in fai:
item = line_i.split('\t')
total_bases += int( item[1] )
num_lines += 1
wgs_bed.write( '{}\t{}\t{}\n'.format(item[0], '0', item[1]) )
# However the "bed_file" is defined here, create a dockered line and mount dictionary for it:
bed_split_line, bedDict = container.container_params( 'lethalfang/somaticseq:{}'.format(VERSION), tech, (bed_file, input_parameters['output_directory']) )
# Mounted paths for all the input files and output directory:
mounted_genome_reference = fileDict[ input_parameters['genome_reference'] ]['mount_path']
mounted_tumor_bam = fileDict[ input_parameters['bam'] ]['mount_path']
mounted_outdir = fileDict[ input_parameters['output_directory'] ]['mount_path']
mounted_bed = bedDict[ bed_file ]['mount_path']
with open(outfile, 'w') as out:
out.write( "#!/bin/bash\n\n" )
out.write(f'#$ -o {logdir}\n' )
out.write(f'#$ -e {logdir}\n' )
out.write( '#$ -S /bin/bash\n' )
out.write( '#$ -l h_vmem={}\n'.format( input_parameters['MEM'] ) )
out.write( 'set -e\n\n' )
out.write( 'echo -e "Start at `date +"%Y/%m/%d %H:%M:%S"`" 1>&2\n\n' )
# Decide if Bed file needs to be "split" such that each line has a small enough region
if input_parameters['process_bed'] or total_bases/num_lines > 50000:
out.write(f'{bed_split_line} \\\n' )
out.write( '/opt/somaticseq/somaticseq/utilities/split_mergedBed.py \\\n' )
out.write( '-infile {} -outfile {}/split_regions.bed\n\n'.format(mounted_bed, bedDict[input_parameters['output_directory']]['mount_path']) )
bed_file = '{}/split_regions.bed'.format( mounted_outdir )
out.write(f'{container_line} bash -c \\\n' )
out.write( '"/opt/VarDict-1.7.0/bin/VarDict \\\n' )
if input_parameters['vardict_arguments']:
out.write( '{} \\\n'.format(input_parameters['vardict_arguments']) )
out.write( '-G {} \\\n'.format( mounted_genome_reference ) )
out.write( '-f {} -h \\\n'.format( minVAF ) )
out.write( '-b \'{}\' \\\n'.format(mounted_tumor_bam) )
out.write( '-Q 1 -c 1 -S 2 -E 3 -g 4 {} \\\n'.format(bed_file) )
out.write( '> {}/vardict.var"\n\n'.format(mounted_outdir) )
out.write(f'{container_line} \\\n' )
out.write( 'bash -c "cat {}/vardict.var | awk \'NR!=1\' | /opt/VarDict/teststrandbias.R | /opt/VarDict/var2vcf_valid.pl -N \'TUMOR\' -f {} \\\n'.format(mounted_outdir, minVAF ) )
out.write( '> {}/{}"\n\n'.format(mounted_outdir, input_parameters['outfile']) )
out.write( '\necho -e "Done at `date +"%Y/%m/%d %H:%M:%S"`" 1>&2\n' )
# "Run" the script that was generated
command_line = '{} {}'.format( input_parameters['action'], outfile )
returnCode = subprocess.call( command_line, shell=True )
return outfile
| 42.371212
| 194
| 0.577776
| 1,362
| 11,186
| 4.530837
| 0.138032
| 0.148274
| 0.02771
| 0.058337
| 0.885918
| 0.885918
| 0.885918
| 0.880084
| 0.880084
| 0.880084
| 0
| 0.008256
| 0.27454
| 11,186
| 263
| 195
| 42.532319
| 0.752187
| 0.053728
| 0
| 0.771084
| 0
| 0.036145
| 0.237041
| 0.038782
| 0
| 0
| 0
| 0
| 0.03012
| 1
| 0.012048
| false
| 0
| 0.03012
| 0
| 0.054217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ada030ca64b2dfa74610511f19c39635a53f5378
| 2,828
|
py
|
Python
|
tests/services/admin_test.py
|
lsst/jwt_authorizer
|
76b984c809ad10843f2cd6f543e88ee1002bdb96
|
[
"MIT"
] | null | null | null |
tests/services/admin_test.py
|
lsst/jwt_authorizer
|
76b984c809ad10843f2cd6f543e88ee1002bdb96
|
[
"MIT"
] | 5
|
2019-06-18T17:28:52.000Z
|
2020-04-20T22:15:54.000Z
|
tests/services/admin_test.py
|
lsst/gafaelfawr
|
a9dd91c7770e194d609f64fee6fd955a27231bd6
|
[
"MIT"
] | 1
|
2019-06-25T16:04:57.000Z
|
2019-06-25T16:04:57.000Z
|
"""Tests for the service that handles token administrators."""
from __future__ import annotations
import pytest
from gafaelfawr.exceptions import PermissionDeniedError
from gafaelfawr.factory import Factory
from gafaelfawr.models.admin import Admin
@pytest.mark.asyncio
async def test_add(factory: Factory) -> None:
admin_service = factory.create_admin_service()
async with factory.session.begin():
assert await admin_service.get_admins() == [Admin(username="admin")]
await admin_service.add_admin(
"example", actor="admin", ip_address="192.168.0.1"
)
async with factory.session.begin():
assert await admin_service.get_admins() == [
Admin(username="admin"),
Admin(username="example"),
]
assert await admin_service.is_admin("example")
assert not await admin_service.is_admin("foo")
async with factory.session.begin():
with pytest.raises(PermissionDeniedError):
await admin_service.add_admin(
"foo", actor="bar", ip_address="127.0.0.1"
)
async with factory.session.begin():
await admin_service.add_admin(
"foo", actor="<bootstrap>", ip_address="127.0.0.1"
)
async with factory.session.begin():
assert await admin_service.is_admin("foo")
assert not await admin_service.is_admin("<bootstrap>")
@pytest.mark.asyncio
async def test_delete(factory: Factory) -> None:
admin_service = factory.create_admin_service()
async with factory.session.begin():
assert await admin_service.get_admins() == [Admin(username="admin")]
async with factory.session.begin():
with pytest.raises(PermissionDeniedError):
await admin_service.delete_admin(
"admin", actor="admin", ip_address="127.0.0.1"
)
async with factory.session.begin():
await admin_service.add_admin(
"example", actor="admin", ip_address="127.0.0.1"
)
async with factory.session.begin():
await admin_service.delete_admin(
"admin", actor="admin", ip_address="127.0.0.1"
)
async with factory.session.begin():
assert await admin_service.is_admin("example")
assert not await admin_service.is_admin("admin")
assert await admin_service.get_admins() == [Admin(username="example")]
async with factory.session.begin():
await admin_service.add_admin(
"other", actor="example", ip_address="127.0.0.1"
)
async with factory.session.begin():
await admin_service.delete_admin(
"other", actor="<bootstrap>", ip_address="127.0.0.1"
)
async with factory.session.begin():
assert await admin_service.get_admins() == [Admin(username="example")]
| 32.883721
| 78
| 0.649576
| 340
| 2,828
| 5.232353
| 0.152941
| 0.155143
| 0.181563
| 0.168072
| 0.841484
| 0.841484
| 0.805509
| 0.776279
| 0.776279
| 0.743114
| 0
| 0.022957
| 0.229844
| 2,828
| 85
| 79
| 33.270588
| 0.793848
| 0.019802
| 0
| 0.546875
| 0
| 0
| 0.086045
| 0
| 0
| 0
| 0
| 0
| 0.171875
| 1
| 0
| false
| 0
| 0.078125
| 0
| 0.078125
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a8f1eb55cd06ad9ccf0bfbde01210e56f0d49e97
| 8,718
|
py
|
Python
|
tests/test_resource_token.py
|
andraune/Run4IT_BackEnd
|
a481427a0d1189a1f08c42e7ac1b452af6bbfc8d
|
[
"MIT"
] | 1
|
2022-03-29T06:11:20.000Z
|
2022-03-29T06:11:20.000Z
|
tests/test_resource_token.py
|
andraune/run4it_backend
|
a481427a0d1189a1f08c42e7ac1b452af6bbfc8d
|
[
"MIT"
] | null | null | null |
tests/test_resource_token.py
|
andraune/run4it_backend
|
a481427a0d1189a1f08c42e7ac1b452af6bbfc8d
|
[
"MIT"
] | null | null | null |
import pytest
import datetime as dt
from run4it.api.token.model import TokenRegistry
from run4it.api.token.resource import Token
from .helpers import (
get_response_json,
register_and_login_confirmed_user,
register_and_login_user_with_unfresh_token,
get_authorization_header)
@pytest.mark.usefixtures('db')
class TestTokenResource:
def test_content_type_is_json(self, api, client):
url = api.url_for(Token, token_id=1)
response = client.get(url)
assert(response.headers["Content-Type"] == 'application/json')
def test_get_token_not_logged_in(self, api, client):
url = api.url_for(Token, token_id=1)
response = client.get(url)
response_json = get_response_json(response.data)
assert(response.status_code == 401)
assert(response_json["errors"]["auth"] is not None)
def test_get_token_logged_in(self, api, client):
token,_ = register_and_login_confirmed_user(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=1)
response = client.get(url, headers=get_authorization_header(token))
response_json = get_response_json(response.data)
assert(response.status_code == 200)
assert("jti" not in response_json)
assert(response_json["id"] == 1)
assert(response_json["tokenType"] in ("access", "refresh"))
assert(response_json["username"] == "tokenreader")
assert(response_json["revoked"] == False)
assert(response_json["expires"] is not None)
def test_get_token_with_unfresh_token(self, api, client):
unfresh_token = register_and_login_user_with_unfresh_token(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=3)
response = client.get(url, headers=get_authorization_header(unfresh_token))
response_json = get_response_json(response.data)
assert(response.status_code == 200)
def test_get_other_user_token(self, api, client):
new_token = TokenRegistry('12345', 'access', 'another_user', False, dt.datetime.now() + dt.timedelta(hours=1))
new_token.save()
token,_ = register_and_login_confirmed_user(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=new_token.id)
response = client.get(url, headers=get_authorization_header(token))
response_json = get_response_json(response.data)
assert(response.status_code == 403)
assert(response_json["errors"]["token"] is not None)
def test_request_nonexisting_token(self, api, client):
token,_ = register_and_login_confirmed_user(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=999)
response = client.get(url, headers=get_authorization_header(token))
response_json = get_response_json(response.data)
assert(response.status_code == 404)
assert(response_json["errors"]["token"] is not None)
def test_delete_token_not_logged_in(self, api, client):
url = api.url_for(Token, token_id=2)
response = client.delete(url)
response_json = get_response_json(response.data)
assert(response.status_code == 401)
assert(response_json["errors"]["auth"] is not None)
def test_delete_token_logged_in(self, api, client):
token,_ = register_and_login_confirmed_user(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=2)
response = client.delete(url, headers=get_authorization_header(token))
response_json = get_response_json(response.data)
assert(response.status_code == 200)
assert(response_json["messages"]["token"] is not None)
assert(TokenRegistry.get_by_id(1) is not None)
assert(TokenRegistry.get_by_id(2) is None) # should have been deleted
def test_delete_token_with_unfresh_token(self, api, client):
unfresh_token = register_and_login_user_with_unfresh_token(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=2)
response = client.delete(url, headers=get_authorization_header(unfresh_token))
response_json = get_response_json(response.data)
assert(response.status_code == 401)
assert(response_json["errors"]["auth"] is not None)
def test_delete_other_user_token(self, api, client):
new_token = TokenRegistry('12345', 'access', 'another_user', False, dt.datetime.now() + dt.timedelta(hours=1))
new_token.save()
token,_ = register_and_login_confirmed_user(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=new_token.id)
response = client.delete(url, headers=get_authorization_header(token))
response_json = get_response_json(response.data)
assert(response.status_code == 403)
assert(response_json["errors"]["token"] is not None)
def test_delete_nonexisting_token(self, api, client):
token,_ = register_and_login_confirmed_user(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=999)
response = client.delete(url, headers=get_authorization_header(token))
response_json = get_response_json(response.data)
assert(response.status_code == 404)
assert(response_json["errors"]["token"] is not None)
def test_update_token_not_logged_in(self, api, client):
url = api.url_for(Token, token_id=2)
response = client.put(url)
response_json = get_response_json(response.data)
assert(response.status_code == 401)
assert(response_json["errors"]["auth"] is not None)
def test_update_token_revoke(self, api, client):
token,_ = register_and_login_confirmed_user(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=2)
response = client.put(url, data={'revoked' : 'True'}, headers=get_authorization_header(token))
response_json = get_response_json(response.data)
assert(response.status_code == 200)
assert(response_json["revoked"] == True)
def test_update_token_unrevoke(self, api, client):
token,_ = register_and_login_confirmed_user(api, client, "tokenreader", "token@reader.com", "passwd")
update_token = TokenRegistry.get_by_id(2)
update_token.revoked = True
url = api.url_for(Token, token_id=2)
response = client.put(url, data={'revoked' : 'False'}, headers=get_authorization_header(token))
response_json = get_response_json(response.data)
assert(response.status_code == 200)
assert(response_json["revoked"] == False)
def test_update_token_actually_saved(self, api, client):
update_token = TokenRegistry('jti', 'access', 'tokenreader', False, dt.datetime(2001, 1, 2, 12, 11, 10, 9))
update_token.save()
token,_ = register_and_login_confirmed_user(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=update_token.id)
response = client.put(url, data={'revoked':'True','jti':'newjti','tokenType':'refresh','username':'newuser','expires':str(dt.datetime(2002, 2, 3, 16, 15, 14, 13))}, headers=get_authorization_header(token))
response_json = get_response_json(response.data)
assert(update_token.jti == 'jti')
assert(update_token.token_type == 'access')
assert(update_token.username == 'tokenreader')
assert(update_token.revoked == True)
assert(update_token.expires == dt.datetime(2001, 1, 2, 12, 11, 10, 9))
def test_update_token_with_unfresh_token(self, api, client):
unfresh_token = register_and_login_user_with_unfresh_token(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=2)
response = client.put(url, data={'revoked' : 'True'}, headers=get_authorization_header(unfresh_token))
response_json = get_response_json(response.data)
assert(response.status_code == 401)
assert(response_json["errors"]["auth"] is not None)
def test_update_other_user_token(self, api, client):
new_token = TokenRegistry('12345', 'access', 'another_user', False, dt.datetime.now() + dt.timedelta(hours=1))
new_token.save()
token,_ = register_and_login_confirmed_user(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=new_token.id)
response = client.put(url, data={'revoked' : 'True'}, headers=get_authorization_header(token))
response_json = get_response_json(response.data)
assert(response.status_code == 403)
assert(response_json["errors"]["token"] is not None)
def test_update_nonexisting_token(self, api, client):
token,_ = register_and_login_confirmed_user(api, client, "tokenreader", "token@reader.com", "passwd")
url = api.url_for(Token, token_id=999)
response = client.put(url, data={'revoked' : 'True'}, headers=get_authorization_header(token))
response_json = get_response_json(response.data)
assert(response.status_code == 404)
assert(response_json["errors"]["token"] is not None)
def test_post_token_not_supported(self, api, client):
url = api.url_for(Token, token_id=1)
response = client.post(url)
assert(response.status_code == 405) # not allowed
| 49.534091
| 207
| 0.757972
| 1,247
| 8,718
| 5.016038
| 0.090617
| 0.105516
| 0.039488
| 0.036451
| 0.844285
| 0.830536
| 0.830536
| 0.820464
| 0.806555
| 0.792486
| 0
| 0.017458
| 0.106446
| 8,718
| 175
| 208
| 49.817143
| 0.785494
| 0.004129
| 0
| 0.61039
| 0
| 0
| 0.105773
| 0
| 0
| 0
| 0
| 0
| 0.292208
| 1
| 0.123377
| false
| 0.090909
| 0.032468
| 0
| 0.162338
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
d103e9ab2331e1527c962428d6a8839a13359a86
| 14,585
|
py
|
Python
|
playtime/lesson02_99bottles.py
|
govex/python-lessons
|
e692f48b6db008a45df0b941dee1e580f5a6c800
|
[
"MIT"
] | 425
|
2015-01-13T03:19:10.000Z
|
2022-03-13T00:34:44.000Z
|
playtime/lesson02_99bottles.py
|
Supercodero/python-lessons
|
38409c318e7a62d30b2ffd68f8a7a5a5ec00778d
|
[
"MIT"
] | null | null | null |
playtime/lesson02_99bottles.py
|
Supercodero/python-lessons
|
38409c318e7a62d30b2ffd68f8a7a5a5ec00778d
|
[
"MIT"
] | 178
|
2015-01-08T05:01:05.000Z
|
2021-12-02T00:56:58.000Z
|
# Difficulty Level: Beginner
# Can you make Python print out the song for 99 bottles of beer on the wall?
# Note: You can use range() in three different ways
# First:
# range(5) will give you a list containing [0, 1, 2, 3, 4]
# In this case, range assumes you want to start counting at 0, and the parameter you give is the number to stop *just* short of.
# Second:
# range(5, 10) will give you a list containing [5, 6, 7, 8, 9]
# In this case, the two parameters you give to range() are the number to start at and the number to stop *just* short of.
# Helpful mnemonic: range(start, stop)
# Third:
# range(5, 15, 3) will give you a list containing [5, 8, 11, 14]
# In this case, the three parameters you give to range() are the number to start at, the number to stop *just* short of, and the number to increment each time by.
# Note that normally, the number to increment each time by is assumed to be 1. (In other words, you add 1 each time through.)
# That's why it goes [0, 1, 2, 3, 4] unless you specify that third parameter, called the step.
# Helpful mnemonic: range(start, stop, step)
# Using range() and a loop, print out the song. Your output should look like this:
# 99 bottles of beer on the wall, 99 bottles of beer ...
# If one of those bottles should happen to fall, 98 bottles of beer on the wall
# 98 bottles of beer on the wall, 98 bottles of beer ...
# If one of those bottles should happen to fall, 97 bottles of beer on the wall
# 97 bottles of beer on the wall, 97 bottles of beer ...
# If one of those bottles should happen to fall, 96 bottles of beer on the wall
# 96 bottles of beer on the wall, 96 bottles of beer ...
# If one of those bottles should happen to fall, 95 bottles of beer on the wall
# 95 bottles of beer on the wall, 95 bottles of beer ...
# If one of those bottles should happen to fall, 94 bottles of beer on the wall
# 94 bottles of beer on the wall, 94 bottles of beer ...
# If one of those bottles should happen to fall, 93 bottles of beer on the wall
# 93 bottles of beer on the wall, 93 bottles of beer ...
# If one of those bottles should happen to fall, 92 bottles of beer on the wall
# 92 bottles of beer on the wall, 92 bottles of beer ...
# If one of those bottles should happen to fall, 91 bottles of beer on the wall
# 91 bottles of beer on the wall, 91 bottles of beer ...
# If one of those bottles should happen to fall, 90 bottles of beer on the wall
# 90 bottles of beer on the wall, 90 bottles of beer ...
# If one of those bottles should happen to fall, 89 bottles of beer on the wall
# 89 bottles of beer on the wall, 89 bottles of beer ...
# If one of those bottles should happen to fall, 88 bottles of beer on the wall
# 88 bottles of beer on the wall, 88 bottles of beer ...
# If one of those bottles should happen to fall, 87 bottles of beer on the wall
# 87 bottles of beer on the wall, 87 bottles of beer ...
# If one of those bottles should happen to fall, 86 bottles of beer on the wall
# 86 bottles of beer on the wall, 86 bottles of beer ...
# If one of those bottles should happen to fall, 85 bottles of beer on the wall
# 85 bottles of beer on the wall, 85 bottles of beer ...
# If one of those bottles should happen to fall, 84 bottles of beer on the wall
# 84 bottles of beer on the wall, 84 bottles of beer ...
# If one of those bottles should happen to fall, 83 bottles of beer on the wall
# 83 bottles of beer on the wall, 83 bottles of beer ...
# If one of those bottles should happen to fall, 82 bottles of beer on the wall
# 82 bottles of beer on the wall, 82 bottles of beer ...
# If one of those bottles should happen to fall, 81 bottles of beer on the wall
# 81 bottles of beer on the wall, 81 bottles of beer ...
# If one of those bottles should happen to fall, 80 bottles of beer on the wall
# 80 bottles of beer on the wall, 80 bottles of beer ...
# If one of those bottles should happen to fall, 79 bottles of beer on the wall
# 79 bottles of beer on the wall, 79 bottles of beer ...
# If one of those bottles should happen to fall, 78 bottles of beer on the wall
# 78 bottles of beer on the wall, 78 bottles of beer ...
# If one of those bottles should happen to fall, 77 bottles of beer on the wall
# 77 bottles of beer on the wall, 77 bottles of beer ...
# If one of those bottles should happen to fall, 76 bottles of beer on the wall
# 76 bottles of beer on the wall, 76 bottles of beer ...
# If one of those bottles should happen to fall, 75 bottles of beer on the wall
# 75 bottles of beer on the wall, 75 bottles of beer ...
# If one of those bottles should happen to fall, 74 bottles of beer on the wall
# 74 bottles of beer on the wall, 74 bottles of beer ...
# If one of those bottles should happen to fall, 73 bottles of beer on the wall
# 73 bottles of beer on the wall, 73 bottles of beer ...
# If one of those bottles should happen to fall, 72 bottles of beer on the wall
# 72 bottles of beer on the wall, 72 bottles of beer ...
# If one of those bottles should happen to fall, 71 bottles of beer on the wall
# 71 bottles of beer on the wall, 71 bottles of beer ...
# If one of those bottles should happen to fall, 70 bottles of beer on the wall
# 70 bottles of beer on the wall, 70 bottles of beer ...
# If one of those bottles should happen to fall, 69 bottles of beer on the wall
# 69 bottles of beer on the wall, 69 bottles of beer ...
# If one of those bottles should happen to fall, 68 bottles of beer on the wall
# 68 bottles of beer on the wall, 68 bottles of beer ...
# If one of those bottles should happen to fall, 67 bottles of beer on the wall
# 67 bottles of beer on the wall, 67 bottles of beer ...
# If one of those bottles should happen to fall, 66 bottles of beer on the wall
# 66 bottles of beer on the wall, 66 bottles of beer ...
# If one of those bottles should happen to fall, 65 bottles of beer on the wall
# 65 bottles of beer on the wall, 65 bottles of beer ...
# If one of those bottles should happen to fall, 64 bottles of beer on the wall
# 64 bottles of beer on the wall, 64 bottles of beer ...
# If one of those bottles should happen to fall, 63 bottles of beer on the wall
# 63 bottles of beer on the wall, 63 bottles of beer ...
# If one of those bottles should happen to fall, 62 bottles of beer on the wall
# 62 bottles of beer on the wall, 62 bottles of beer ...
# If one of those bottles should happen to fall, 61 bottles of beer on the wall
# 61 bottles of beer on the wall, 61 bottles of beer ...
# If one of those bottles should happen to fall, 60 bottles of beer on the wall
# 60 bottles of beer on the wall, 60 bottles of beer ...
# If one of those bottles should happen to fall, 59 bottles of beer on the wall
# 59 bottles of beer on the wall, 59 bottles of beer ...
# If one of those bottles should happen to fall, 58 bottles of beer on the wall
# 58 bottles of beer on the wall, 58 bottles of beer ...
# If one of those bottles should happen to fall, 57 bottles of beer on the wall
# 57 bottles of beer on the wall, 57 bottles of beer ...
# If one of those bottles should happen to fall, 56 bottles of beer on the wall
# 56 bottles of beer on the wall, 56 bottles of beer ...
# If one of those bottles should happen to fall, 55 bottles of beer on the wall
# 55 bottles of beer on the wall, 55 bottles of beer ...
# If one of those bottles should happen to fall, 54 bottles of beer on the wall
# 54 bottles of beer on the wall, 54 bottles of beer ...
# If one of those bottles should happen to fall, 53 bottles of beer on the wall
# 53 bottles of beer on the wall, 53 bottles of beer ...
# If one of those bottles should happen to fall, 52 bottles of beer on the wall
# 52 bottles of beer on the wall, 52 bottles of beer ...
# If one of those bottles should happen to fall, 51 bottles of beer on the wall
# 51 bottles of beer on the wall, 51 bottles of beer ...
# If one of those bottles should happen to fall, 50 bottles of beer on the wall
# 50 bottles of beer on the wall, 50 bottles of beer ...
# If one of those bottles should happen to fall, 49 bottles of beer on the wall
# 49 bottles of beer on the wall, 49 bottles of beer ...
# If one of those bottles should happen to fall, 48 bottles of beer on the wall
# 48 bottles of beer on the wall, 48 bottles of beer ...
# If one of those bottles should happen to fall, 47 bottles of beer on the wall
# 47 bottles of beer on the wall, 47 bottles of beer ...
# If one of those bottles should happen to fall, 46 bottles of beer on the wall
# 46 bottles of beer on the wall, 46 bottles of beer ...
# If one of those bottles should happen to fall, 45 bottles of beer on the wall
# 45 bottles of beer on the wall, 45 bottles of beer ...
# If one of those bottles should happen to fall, 44 bottles of beer on the wall
# 44 bottles of beer on the wall, 44 bottles of beer ...
# If one of those bottles should happen to fall, 43 bottles of beer on the wall
# 43 bottles of beer on the wall, 43 bottles of beer ...
# If one of those bottles should happen to fall, 42 bottles of beer on the wall
# 42 bottles of beer on the wall, 42 bottles of beer ...
# If one of those bottles should happen to fall, 41 bottles of beer on the wall
# 41 bottles of beer on the wall, 41 bottles of beer ...
# If one of those bottles should happen to fall, 40 bottles of beer on the wall
# 40 bottles of beer on the wall, 40 bottles of beer ...
# If one of those bottles should happen to fall, 39 bottles of beer on the wall
# 39 bottles of beer on the wall, 39 bottles of beer ...
# If one of those bottles should happen to fall, 38 bottles of beer on the wall
# 38 bottles of beer on the wall, 38 bottles of beer ...
# If one of those bottles should happen to fall, 37 bottles of beer on the wall
# 37 bottles of beer on the wall, 37 bottles of beer ...
# If one of those bottles should happen to fall, 36 bottles of beer on the wall
# 36 bottles of beer on the wall, 36 bottles of beer ...
# If one of those bottles should happen to fall, 35 bottles of beer on the wall
# 35 bottles of beer on the wall, 35 bottles of beer ...
# If one of those bottles should happen to fall, 34 bottles of beer on the wall
# 34 bottles of beer on the wall, 34 bottles of beer ...
# If one of those bottles should happen to fall, 33 bottles of beer on the wall
# 33 bottles of beer on the wall, 33 bottles of beer ...
# If one of those bottles should happen to fall, 32 bottles of beer on the wall
# 32 bottles of beer on the wall, 32 bottles of beer ...
# If one of those bottles should happen to fall, 31 bottles of beer on the wall
# 31 bottles of beer on the wall, 31 bottles of beer ...
# If one of those bottles should happen to fall, 30 bottles of beer on the wall
# 30 bottles of beer on the wall, 30 bottles of beer ...
# If one of those bottles should happen to fall, 29 bottles of beer on the wall
# 29 bottles of beer on the wall, 29 bottles of beer ...
# If one of those bottles should happen to fall, 28 bottles of beer on the wall
# 28 bottles of beer on the wall, 28 bottles of beer ...
# If one of those bottles should happen to fall, 27 bottles of beer on the wall
# 27 bottles of beer on the wall, 27 bottles of beer ...
# If one of those bottles should happen to fall, 26 bottles of beer on the wall
# 26 bottles of beer on the wall, 26 bottles of beer ...
# If one of those bottles should happen to fall, 25 bottles of beer on the wall
# 25 bottles of beer on the wall, 25 bottles of beer ...
# If one of those bottles should happen to fall, 24 bottles of beer on the wall
# 24 bottles of beer on the wall, 24 bottles of beer ...
# If one of those bottles should happen to fall, 23 bottles of beer on the wall
# 23 bottles of beer on the wall, 23 bottles of beer ...
# If one of those bottles should happen to fall, 22 bottles of beer on the wall
# 22 bottles of beer on the wall, 22 bottles of beer ...
# If one of those bottles should happen to fall, 21 bottles of beer on the wall
# 21 bottles of beer on the wall, 21 bottles of beer ...
# If one of those bottles should happen to fall, 20 bottles of beer on the wall
# 20 bottles of beer on the wall, 20 bottles of beer ...
# If one of those bottles should happen to fall, 19 bottles of beer on the wall
# 19 bottles of beer on the wall, 19 bottles of beer ...
# If one of those bottles should happen to fall, 18 bottles of beer on the wall
# 18 bottles of beer on the wall, 18 bottles of beer ...
# If one of those bottles should happen to fall, 17 bottles of beer on the wall
# 17 bottles of beer on the wall, 17 bottles of beer ...
# If one of those bottles should happen to fall, 16 bottles of beer on the wall
# 16 bottles of beer on the wall, 16 bottles of beer ...
# If one of those bottles should happen to fall, 15 bottles of beer on the wall
# 15 bottles of beer on the wall, 15 bottles of beer ...
# If one of those bottles should happen to fall, 14 bottles of beer on the wall
# 14 bottles of beer on the wall, 14 bottles of beer ...
# If one of those bottles should happen to fall, 13 bottles of beer on the wall
# 13 bottles of beer on the wall, 13 bottles of beer ...
# If one of those bottles should happen to fall, 12 bottles of beer on the wall
# 12 bottles of beer on the wall, 12 bottles of beer ...
# If one of those bottles should happen to fall, 11 bottles of beer on the wall
# 11 bottles of beer on the wall, 11 bottles of beer ...
# If one of those bottles should happen to fall, 10 bottles of beer on the wall
# 10 bottles of beer on the wall, 10 bottles of beer ...
# If one of those bottles should happen to fall, 9 bottles of beer on the wall
# 9 bottles of beer on the wall, 9 bottles of beer ...
# If one of those bottles should happen to fall, 8 bottles of beer on the wall
# 8 bottles of beer on the wall, 8 bottles of beer ...
# If one of those bottles should happen to fall, 7 bottles of beer on the wall
# 7 bottles of beer on the wall, 7 bottles of beer ...
# If one of those bottles should happen to fall, 6 bottles of beer on the wall
# 6 bottles of beer on the wall, 6 bottles of beer ...
# If one of those bottles should happen to fall, 5 bottles of beer on the wall
# 5 bottles of beer on the wall, 5 bottles of beer ...
# If one of those bottles should happen to fall, 4 bottles of beer on the wall
# 4 bottles of beer on the wall, 4 bottles of beer ...
# If one of those bottles should happen to fall, 3 bottles of beer on the wall
# 3 bottles of beer on the wall, 3 bottles of beer ...
# If one of those bottles should happen to fall, 2 bottles of beer on the wall
# 2 bottles of beer on the wall, 2 bottles of beer ...
# If one of those bottles should happen to fall, 1 bottles of beer on the wall
| 65.995475
| 162
| 0.728077
| 2,867
| 14,585
| 3.703872
| 0.064179
| 0.250024
| 0.361145
| 0.278275
| 0.957906
| 0.951502
| 0.946982
| 0.924381
| 0.924381
| 0.924381
| 0
| 0.05235
| 0.218101
| 14,585
| 220
| 163
| 66.295455
| 0.878814
| 0.970243
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
0f5ade2432db14638fdf49ac70cdcac799bd71ff
| 208
|
py
|
Python
|
bin/setup.py
|
bb12489/slideshow
|
7c9c803ca29d8660490fa24cdaccf807e1fe3a50
|
[
"MIT"
] | 3
|
2018-05-15T07:52:15.000Z
|
2019-08-18T20:54:46.000Z
|
bin/setup.py
|
bb12489/slideshow
|
7c9c803ca29d8660490fa24cdaccf807e1fe3a50
|
[
"MIT"
] | null | null | null |
bin/setup.py
|
bb12489/slideshow
|
7c9c803ca29d8660490fa24cdaccf807e1fe3a50
|
[
"MIT"
] | 1
|
2019-07-09T12:12:01.000Z
|
2019-07-09T12:12:01.000Z
|
from subprocess import STDOUT, check_call
import os
#check_call(['sudo', 'apt-get', 'update', '-y'])
#check_call(['sudo', 'apt-get', 'upgrade', '-y'])
subprocess.call("(cd ~/temp/slideshow/)", shell=True)
| 26
| 53
| 0.658654
| 29
| 208
| 4.62069
| 0.62069
| 0.201493
| 0.19403
| 0.238806
| 0.283582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100962
| 208
| 7
| 54
| 29.714286
| 0.716578
| 0.456731
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0f60b6566edc7032066ce9d760bf91c4f9cedc88
| 141
|
py
|
Python
|
aws_managers/athena/__init__.py
|
vahndi/aws-managers
|
bdbfb2b8a9258a53e3ea4dfbbfe5491a34113899
|
[
"MIT"
] | null | null | null |
aws_managers/athena/__init__.py
|
vahndi/aws-managers
|
bdbfb2b8a9258a53e3ea4dfbbfe5491a34113899
|
[
"MIT"
] | null | null | null |
aws_managers/athena/__init__.py
|
vahndi/aws-managers
|
bdbfb2b8a9258a53e3ea4dfbbfe5491a34113899
|
[
"MIT"
] | null | null | null |
from aws_managers.athena.athena_frame import AthenaFrame
from aws_managers.athena.queries.athena_query_generator import AthenaQueryGenerator
| 47
| 83
| 0.907801
| 18
| 141
| 6.833333
| 0.611111
| 0.113821
| 0.243902
| 0.341463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056738
| 141
| 2
| 84
| 70.5
| 0.924812
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0f76c60fee6036da1d73316412c2eaeff07d6819
| 17,815
|
py
|
Python
|
sam/sim/test/unit-apps/test_unit_bv_vec_elemmul.py
|
weiya711/scadi_graph
|
5ce78fe09286e9722c2627f7f959c4ad3f2288eb
|
[
"MIT"
] | null | null | null |
sam/sim/test/unit-apps/test_unit_bv_vec_elemmul.py
|
weiya711/scadi_graph
|
5ce78fe09286e9722c2627f7f959c4ad3f2288eb
|
[
"MIT"
] | null | null | null |
sam/sim/test/unit-apps/test_unit_bv_vec_elemmul.py
|
weiya711/scadi_graph
|
5ce78fe09286e9722c2627f7f959c4ad3f2288eb
|
[
"MIT"
] | null | null | null |
import pytest
import random
from sam.sim.src.rd_scanner import BVRdScan, CompressedCrdRdScan
from sam.sim.src.bitvector import BV, BVDrop
from sam.sim.src.wr_scanner import ValsWrScan, CompressWrScan
from sam.sim.src.joiner import IntersectBV2
from sam.sim.src.compute import Multiply2
from sam.sim.src.array import Array
from sam.sim.src.split import Split
from sam.sim.src.base import remove_emptystr
from sam.sim.test.test import TIMEOUT, check_arr, check_seg_arr, remove_zeros
def bv(ll):
result = 0
for elem in ll:
result |= 1 << elem
return result
def inner_bv(ll, size, sf):
result = []
for i in range(int(size / sf) + 2):
temp = bv([elem % sf for elem in ll if max((i - 1) * sf, 0) <= elem < i * sf])
if temp:
result.append(temp)
return result
@pytest.mark.parametrize("nnz", [1, 10, 100, 500, 1000])
def test_vec_bv_split(nnz, debug_sim, max_val=999, size=1000, fill=0):
sf = 32
crd_arr1 = [random.randint(0, max_val) for _ in range(nnz)]
crd_arr1 = sorted(set(crd_arr1))
seg_arr1 = [0, len(crd_arr1)]
vals_arr1 = [random.randint(0, max_val) for _ in range(len(crd_arr1))]
crd_arr2 = [random.randint(0, max_val) for _ in range(nnz)]
crd_arr2 = sorted(set(crd_arr2))
seg_arr2 = [0, len(crd_arr2)]
vals_arr2 = [random.randint(0, max_val) for _ in range(len(crd_arr2))]
if debug_sim:
print("Compressed VECTOR 1:\n", seg_arr1, "\n", crd_arr1, "\n", vals_arr1)
print("Compressed VECTOR 2:\n", seg_arr2, "\n", crd_arr2, "\n", vals_arr2)
gold_bv1_1 = [bv([int(elem / sf) for elem in crd_arr1])]
gold_bv1_0 = inner_bv(crd_arr1, size, sf)
gold_bv1_0 += (32 - len(gold_bv1_0)) * [0]
gold_bv2_1 = [bv([int(elem / sf) for elem in crd_arr2])]
gold_bv2_0 = inner_bv(crd_arr2, size, sf)
gold_bv2_0 += (32 - len(gold_bv2_0)) * [0]
crdscan1 = CompressedCrdRdScan(seg_arr=seg_arr1, crd_arr=crd_arr1, debug=debug_sim)
crdscan2 = CompressedCrdRdScan(seg_arr=seg_arr2, crd_arr=crd_arr2, debug=debug_sim)
split1 = Split(split_factor=sf, orig_crd=False, debug=debug_sim)
split2 = Split(split_factor=sf, orig_crd=False, debug=debug_sim)
bv1_0 = BV(debug=debug_sim)
bv1_1 = BV(debug=debug_sim)
bv2_0 = BV(debug=debug_sim)
bv2_1 = BV(debug=debug_sim)
wrscan1_0 = ValsWrScan(size=sf, fill=fill)
wrscan1_1 = ValsWrScan(size=1, fill=fill)
wrscan2_0 = ValsWrScan(size=sf, fill=fill)
wrscan2_1 = ValsWrScan(size=1, fill=fill)
in_ref1 = [0, 'D']
in_ref2 = [0, 'D']
done = False
time = 0
out_split1_0 = []
out_split1_1 = []
out_split2_0 = []
out_split2_1 = []
while not done and time < TIMEOUT:
if len(in_ref1) > 0:
crdscan1.set_in_ref(in_ref1.pop(0))
crdscan1.update()
if len(in_ref2) > 0:
crdscan2.set_in_ref(in_ref2.pop(0))
crdscan2.update()
split1.set_in_crd(crdscan1.out_crd())
split1.update()
split2.set_in_crd(crdscan2.out_crd())
split2.update()
out_split1_0.append(split1.out_inner_crd())
out_split1_1.append(split1.out_outer_crd())
out_split2_0.append(split2.out_inner_crd())
out_split2_1.append(split2.out_outer_crd())
bv1_0.set_in_crd(split1.out_inner_crd())
bv1_1.set_in_crd(split1.out_outer_crd())
bv2_0.set_in_crd(split2.out_inner_crd())
bv2_1.set_in_crd(split2.out_outer_crd())
bv1_0.update()
bv1_1.update()
bv2_0.update()
bv2_1.update()
wrscan1_0.set_input(bv1_0.out_bv_int())
wrscan1_1.set_input(bv1_1.out_bv_int())
wrscan2_0.set_input(bv2_0.out_bv_int())
wrscan2_1.set_input(bv2_1.out_bv_int())
wrscan1_0.update()
wrscan1_1.update()
wrscan2_0.update()
wrscan2_1.update()
print("Timestep", time, "\t Done -- \n",
"\nRdScan1:", crdscan1.out_done(), "\tRdScan2:", crdscan2.out_done(),
"\nSplit1:", split1.out_done(), "\tSplit2:", split2.out_done(),
"\nBV:", bv1_0.out_done(), bv1_1.out_done(), bv2_0.out_done(), bv2_1.out_done(),
"\nWrScan:", wrscan1_0.out_done(), wrscan1_1.out_done(), wrscan2_0.out_done(), wrscan2_1.out_done()
)
done = wrscan2_0.out_done() and wrscan2_1.out_done() and wrscan1_1.out_done() and wrscan1_0.out_done()
time += 1
if debug_sim:
print(remove_emptystr(out_split1_0))
print(remove_emptystr(out_split1_1))
print(remove_emptystr(out_split2_0))
print(remove_emptystr(out_split2_1))
print([bin(i) for i in wrscan1_0.get_arr()])
print([bin(i) for i in wrscan1_1.get_arr()])
print([bin(i) for i in wrscan2_0.get_arr()])
print([bin(i) for i in wrscan2_1.get_arr()])
check_arr(wrscan1_0, gold_bv1_0)
check_arr(wrscan1_1, gold_bv1_1)
check_arr(wrscan2_0, gold_bv2_0)
check_arr(wrscan2_1, gold_bv2_1)
# TODO: BV already set vecmul ONLY and then combined
@pytest.mark.parametrize("nnz", [1, 10, 100, 500, 1000])
def test_mat_elemmul_bvonly(nnz, debug_sim, max_val=1000, size=1001, fill=0):
assert (size > max_val)
sf = 32
crd_arr1 = [random.randint(0, max_val) for _ in range(nnz)]
crd_arr1 = sorted(set(crd_arr1))
seg_arr1 = [0, len(crd_arr1)]
vals_arr1 = [random.randint(0, max_val) for _ in range(len(crd_arr1))]
crd_arr2 = [random.randint(0, max_val) for _ in range(nnz)]
crd_arr2 = sorted(set(crd_arr2))
seg_arr2 = [0, len(crd_arr2)]
vals_arr2 = [random.randint(0, max_val) for _ in range(len(crd_arr2))]
if debug_sim:
print("Compressed VECTOR 1:\n", seg_arr1, "\n", crd_arr1, "\n", vals_arr1)
print("Compressed VECTOR 2:\n", seg_arr2, "\n", crd_arr2, "\n", vals_arr2)
gold_bv1_1 = [bv([int(elem / sf) for elem in crd_arr1])]
gold_bv1_0 = inner_bv(crd_arr1, size, sf)
gold_bv1_0 += (32 - len(gold_bv1_0)) * [0]
gold_bv2_1 = [bv([int(elem / sf) for elem in crd_arr2])]
gold_bv2_0 = inner_bv(crd_arr2, size, sf)
gold_bv2_0 += (32 - len(gold_bv2_0)) * [0]
gold_crd = sorted(set(crd_arr1) & set(crd_arr2))
gold_seg = [0, len(gold_crd)]
gold_vals = []
gold_bv1 = []
gold_bv0 = []
if gold_crd:
gold_vals = [vals_arr1[crd_arr1.index(i)] * vals_arr2[crd_arr2.index(i)] for i in gold_crd]
gold_bv1 = [bv([int(elem / sf) for elem in gold_crd])]
gold_bv0 = inner_bv(gold_crd, size, sf)
if debug_sim:
print("Compressed RESULT :\n", gold_seg, "\n", gold_crd, "\n", gold_vals)
print("BV arr1 0", gold_bv1_0)
print("BV arr1 1", gold_bv1_1)
print("BV arr2 0", gold_bv2_0)
print("BV arr2 1", gold_bv2_1)
bvscan1_0 = BVRdScan(bv_arr=gold_bv1_0, debug=debug_sim)
bvscan1_1 = BVRdScan(bv_arr=gold_bv1_1, debug=debug_sim)
bvscan2_0 = BVRdScan(bv_arr=gold_bv2_0, debug=debug_sim)
bvscan2_1 = BVRdScan(bv_arr=gold_bv2_1, debug=debug_sim)
inter0 = IntersectBV2(debug=debug_sim)
inter1 = IntersectBV2(debug=debug_sim)
val1 = Array(init_arr=vals_arr1, debug=debug_sim)
val2 = Array(init_arr=vals_arr2, debug=debug_sim)
mul = Multiply2(debug=debug_sim)
bvdrop = BVDrop(debug=debug_sim)
oval_wrscan = ValsWrScan(size=size, fill=fill)
wrscan0 = ValsWrScan(size=size, fill=fill)
wrscan1 = ValsWrScan(size=1, fill=fill)
temp1 = []
temp2 = []
temp3 = []
temp4 = []
in_ref1 = [0, 'D']
in_ref2 = [0, 'D']
done = False
time = 0
while not done and time < TIMEOUT:
if len(in_ref1) > 0:
bvscan1_1.set_in_ref(in_ref1.pop(0))
bvscan1_1.update()
if len(in_ref2) > 0:
bvscan2_1.set_in_ref(in_ref2.pop(0))
bvscan2_1.update()
inter1.set_in1(bvscan1_1.out_ref(), bvscan1_1.out_bv())
inter1.set_in2(bvscan2_1.out_ref(), bvscan2_1.out_bv())
inter1.update()
temp1.append(inter0.out_ref1())
print(remove_emptystr(temp1))
temp2.append(inter0.out_ref2())
print(remove_emptystr(temp2))
bvscan1_0.set_in_ref(inter1.out_ref1())
bvscan1_0.update()
bvscan2_0.set_in_ref(inter1.out_ref2())
bvscan2_0.update()
inter0.set_in1(bvscan1_0.out_ref(), bvscan1_0.out_bv())
inter0.set_in2(bvscan2_0.out_ref(), bvscan2_0.out_bv())
inter0.update()
val1.set_load(inter0.out_ref1())
val2.set_load(inter0.out_ref2())
val1.update()
val2.update()
mul.set_in1(val1.out_load())
mul.set_in2(val2.out_load())
mul.update()
oval_wrscan.set_input(mul.out_val())
oval_wrscan.update()
temp3.append(inter0.out_bv())
temp4.append(inter1.out_bv())
bvdrop.set_inner_bv(inter0.out_bv())
bvdrop.set_outer_bv(inter1.out_bv())
bvdrop.update()
wrscan0.set_input(bvdrop.out_bv_inner())
wrscan0.update()
wrscan1.set_input(bvdrop.out_bv_outer())
wrscan1.update()
print("Timestep", time, "\t Done --",
"\nRdScan1:", bvscan1_0.out_done(), bvscan2_0.out_done(), bvscan1_1.out_done(), bvscan2_1.out_done(),
"\nInter:", inter0.out_done(), inter1.out_done(),
"\nArr:", val1.out_done(), val2.out_done(),
"\tMul:", mul.out_done(),
"\nOutVal:", oval_wrscan.out_done(),
"\tOutBV1:", wrscan1.out_done(), "\tOutBV0:", wrscan0.out_done()
)
done = wrscan0.out_done() and wrscan1.out_done() and oval_wrscan.out_done()
time += 1
if debug_sim:
print(oval_wrscan.get_arr())
print(temp3)
print(temp4)
print(wrscan0.get_arr())
print(gold_bv0)
print(wrscan1.get_arr())
print(gold_bv1)
check_arr(oval_wrscan, gold_vals)
if gold_crd:
check_arr(wrscan0, gold_bv0)
check_arr(wrscan1, gold_bv1)
# NOTE: This is the full vector elementwise multiplication as a bitvector
@pytest.mark.parametrize("nnz", [1, 10, 100, 500, 1000])
def test_vec_elemmul_bv_split(nnz, debug_sim, max_val=999, size=1000, fill=0):
sf = 32
crd_arr1 = [random.randint(0, max_val) for _ in range(nnz)]
crd_arr1 = sorted(set(crd_arr1))
seg_arr1 = [0, len(crd_arr1)]
vals_arr1 = [random.randint(0, max_val) for _ in range(len(crd_arr1))]
crd_arr2 = [random.randint(0, max_val) for _ in range(nnz)]
crd_arr2 = sorted(set(crd_arr2))
seg_arr2 = [0, len(crd_arr2)]
vals_arr2 = [random.randint(0, max_val) for _ in range(len(crd_arr2))]
if debug_sim:
print("Compressed VECTOR 1:\n", seg_arr1, "\n", crd_arr1, "\n", vals_arr1)
print("Compressed VECTOR 2:\n", seg_arr2, "\n", crd_arr2, "\n", vals_arr2)
gold_bv1_1 = [bv([int(elem / sf) for elem in crd_arr1])]
gold_bv1_0 = inner_bv(crd_arr1, size, sf)
gold_bv1_0 += (32 - len(gold_bv1_0)) * [0]
gold_bv2_1 = [bv([int(elem / sf) for elem in crd_arr2])]
gold_bv2_0 = inner_bv(crd_arr2, size, sf)
gold_bv2_0 += (32 - len(gold_bv2_0)) * [0]
gold_crd = sorted(set(crd_arr1) & set(crd_arr2))
gold_seg = [0, len(gold_crd)]
gold_vals = []
gold_bv1 = []
gold_bv0 = []
if gold_crd:
gold_vals = [vals_arr1[crd_arr1.index(i)] * vals_arr2[crd_arr2.index(i)] for i in gold_crd]
gold_bv1 = [bv([int(elem / sf) for elem in gold_crd])]
gold_bv0 = inner_bv(gold_crd, size, sf)
if debug_sim:
print("Compressed RESULT :\n", gold_seg, "\n", gold_crd, "\n", gold_vals)
print("BV arr1 0", gold_bv1_0)
print("BV arr1 1", gold_bv1_1)
print("BV arr2 0", gold_bv2_0)
print("BV arr2 1", gold_bv2_1)
crdscan1 = CompressedCrdRdScan(seg_arr=seg_arr1, crd_arr=crd_arr1, debug=debug_sim)
crdscan2 = CompressedCrdRdScan(seg_arr=seg_arr2, crd_arr=crd_arr2, debug=debug_sim)
split1 = Split(split_factor=sf, orig_crd=False, debug=debug_sim)
split2 = Split(split_factor=sf, orig_crd=False, debug=debug_sim)
bv1_0 = BV(debug=debug_sim)
bv1_1 = BV(debug=debug_sim)
bv2_0 = BV(debug=debug_sim)
bv2_1 = BV(debug=debug_sim)
wrscan1_0 = ValsWrScan(size=sf, fill=fill)
wrscan1_1 = ValsWrScan(size=1, fill=fill)
wrscan2_0 = ValsWrScan(size=sf, fill=fill)
wrscan2_1 = ValsWrScan(size=1, fill=fill)
in_ref1 = [0, 'D']
in_ref2 = [0, 'D']
done = False
time1 = 0
out_split1_0 = []
out_split1_1 = []
out_split2_0 = []
out_split2_1 = []
while not done and time1 < TIMEOUT:
if len(in_ref1) > 0:
crdscan1.set_in_ref(in_ref1.pop(0))
crdscan1.update()
if len(in_ref2) > 0:
crdscan2.set_in_ref(in_ref2.pop(0))
crdscan2.update()
split1.set_in_crd(crdscan1.out_crd())
split1.update()
split2.set_in_crd(crdscan2.out_crd())
split2.update()
out_split1_0.append(split1.out_inner_crd())
out_split1_1.append(split1.out_outer_crd())
out_split2_0.append(split2.out_inner_crd())
out_split2_1.append(split2.out_outer_crd())
bv1_0.set_in_crd(split1.out_inner_crd())
bv1_1.set_in_crd(split1.out_outer_crd())
bv2_0.set_in_crd(split2.out_inner_crd())
bv2_1.set_in_crd(split2.out_outer_crd())
bv1_0.update()
bv1_1.update()
bv2_0.update()
bv2_1.update()
wrscan1_0.set_input(bv1_0.out_bv_int())
wrscan1_1.set_input(bv1_1.out_bv_int())
wrscan2_0.set_input(bv2_0.out_bv_int())
wrscan2_1.set_input(bv2_1.out_bv_int())
wrscan1_0.update()
wrscan1_1.update()
wrscan2_0.update()
wrscan2_1.update()
print("Timestep", time1, "\t Done -- \n",
"\nRdScan1:", crdscan1.out_done(), "\tRdScan2:", crdscan2.out_done(),
"\nSplit1:", split1.out_done(), "\tSplit2:", split2.out_done(),
"\nBV:", bv1_0.out_done(), bv1_1.out_done(), bv2_0.out_done(), bv2_1.out_done(),
"\nWrScan:", wrscan1_0.out_done(), wrscan1_1.out_done(), wrscan2_0.out_done(), wrscan2_1.out_done()
)
done = wrscan2_0.out_done() and wrscan2_1.out_done() and wrscan1_1.out_done() and wrscan1_0.out_done()
time1 += 1
if debug_sim:
print(remove_emptystr(out_split1_0))
print(remove_emptystr(out_split1_1))
print(remove_emptystr(out_split2_0))
print(remove_emptystr(out_split2_1))
print([bin(i) for i in wrscan1_0.get_arr()])
print([bin(i) for i in wrscan1_1.get_arr()])
print([bin(i) for i in wrscan2_0.get_arr()])
print([bin(i) for i in wrscan2_1.get_arr()])
check_arr(wrscan1_0, gold_bv1_0)
check_arr(wrscan1_1, gold_bv1_1)
check_arr(wrscan2_0, gold_bv2_0)
check_arr(wrscan2_1, gold_bv2_1)
bvscan1_0 = BVRdScan(bv_arr=wrscan1_0.get_arr(), debug=debug_sim)
bvscan1_1 = BVRdScan(bv_arr=wrscan1_1.get_arr(), debug=debug_sim)
bvscan2_0 = BVRdScan(bv_arr=wrscan2_0.get_arr(), debug=debug_sim)
bvscan2_1 = BVRdScan(bv_arr=wrscan2_1.get_arr(), debug=debug_sim)
inter0 = IntersectBV2(debug=debug_sim)
inter1 = IntersectBV2(debug=debug_sim)
val1 = Array(init_arr=vals_arr1, debug=debug_sim)
val2 = Array(init_arr=vals_arr2, debug=debug_sim)
mul = Multiply2(debug=debug_sim)
bvdrop = BVDrop(debug=debug_sim)
oval_wrscan = ValsWrScan(size=size, fill=fill)
wrscan0 = ValsWrScan(size=size, fill=fill)
wrscan1 = ValsWrScan(size=1, fill=fill)
in_ref1 = [0, 'D']
in_ref2 = [0, 'D']
done = False
time2 = 0
while not done and time1 < TIMEOUT:
if len(in_ref1) > 0:
bvscan1_1.set_in_ref(in_ref1.pop(0))
bvscan1_1.update()
if len(in_ref2) > 0:
bvscan2_1.set_in_ref(in_ref2.pop(0))
bvscan2_1.update()
inter1.set_in1(bvscan1_1.out_ref(), bvscan1_1.out_bv())
inter1.set_in2(bvscan2_1.out_ref(), bvscan2_1.out_bv())
inter1.update()
bvscan1_0.set_in_ref(inter1.out_ref1())
bvscan1_0.update()
bvscan2_0.set_in_ref(inter1.out_ref2())
bvscan2_0.update()
inter0.set_in1(bvscan1_0.out_ref(), bvscan1_0.out_bv())
inter0.set_in2(bvscan2_0.out_ref(), bvscan2_0.out_bv())
inter0.update()
val1.set_load(inter0.out_ref1())
val2.set_load(inter0.out_ref2())
val1.update()
val2.update()
mul.set_in1(val1.out_load())
mul.set_in2(val2.out_load())
mul.update()
oval_wrscan.set_input(mul.out_val())
oval_wrscan.update()
bvdrop.set_inner_bv(inter0.out_bv())
bvdrop.set_outer_bv(inter1.out_bv())
bvdrop.update()
wrscan0.set_input(bvdrop.out_bv_inner())
wrscan0.update()
wrscan1.set_input(bvdrop.out_bv_outer())
wrscan1.update()
print("Timestep", time2, "\t Done --",
"\nRdScan1:", bvscan1_0.out_done(), bvscan2_0.out_done(), bvscan1_1.out_done(), bvscan2_1.out_done(),
"\nInter:", inter0.out_done(), inter1.out_done(),
"\nArr:", val1.out_done(), val2.out_done(),
"\tMul:", mul.out_done(),
"\nOutVal:", oval_wrscan.out_done(),
"\tOutBV1:", wrscan1.out_done(), "\tOutBV0:", wrscan0.out_done()
)
done = wrscan0.out_done() and wrscan1.out_done() and oval_wrscan.out_done()
time2 += 1
if debug_sim:
print("TOTAL TIME:", time1 + time2)
print(oval_wrscan.get_arr())
print(wrscan0.get_arr())
print(gold_bv0)
print(wrscan1.get_arr())
print(gold_bv1)
check_arr(oval_wrscan, gold_vals)
if gold_crd:
check_arr(wrscan0, gold_bv0)
check_arr(wrscan1, gold_bv1)
| 35.417495
| 115
| 0.633062
| 2,744
| 17,815
| 3.789359
| 0.059038
| 0.041739
| 0.045009
| 0.019619
| 0.907675
| 0.900269
| 0.892768
| 0.892768
| 0.870841
| 0.866801
| 0
| 0.068722
| 0.22975
| 17,815
| 502
| 116
| 35.488048
| 0.68904
| 0.006848
| 0
| 0.858881
| 0
| 0
| 0.034144
| 0
| 0
| 0
| 0
| 0.001992
| 0.002433
| 1
| 0.012165
| false
| 0
| 0.026764
| 0
| 0.043796
| 0.124088
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0f7b259d190892996744486c93ef886a56611948
| 578
|
py
|
Python
|
Funny_Js_Crack/91-wallhere_spider/use_pypackage_anti_cloudflare.py
|
qqizai/Func_Js_Crack
|
8cc8586107fecace4b71d0519cfbc760584171b1
|
[
"MIT"
] | 18
|
2020-12-09T06:49:46.000Z
|
2022-01-27T03:20:36.000Z
|
Funny_Js_Crack/91-wallhere_spider/use_pypackage_anti_cloudflare.py
|
useafter/Func_Js_Crack
|
8cc8586107fecace4b71d0519cfbc760584171b1
|
[
"MIT"
] | null | null | null |
Funny_Js_Crack/91-wallhere_spider/use_pypackage_anti_cloudflare.py
|
useafter/Func_Js_Crack
|
8cc8586107fecace4b71d0519cfbc760584171b1
|
[
"MIT"
] | 9
|
2020-12-20T08:52:09.000Z
|
2021-12-19T09:13:09.000Z
|
import cfscrape
# 实例化一个CloudflareScraper对象
scraper = cfscrape.create_scraper()
# 或者scraper = cfscrape.CloudflareScraper()也可以
# 如果出现错误可以加个延迟
# scraper = cfscrape.create_scraper(delay = 10)
# 获取真实网页源代码
web_data = scraper.get("https://wallhere.com/").content
print(web_data)
# pip install cfscrape
# 处理post的CloudFlare
# import cfscrape
# 实例化一个CloudflareScraper对象
# scraper = cfscrape.create_scraper()
# 或者scraper = cfscrape.CloudflareScraper()也可以
# 如果出现错误可以加个延迟
# scraper = cfscrape.create_scraper(delay = 10)
# 获取真实网页源代码
# web_data = scraper.post("http://example.com").content
| 25.130435
| 55
| 0.780277
| 61
| 578
| 7.278689
| 0.42623
| 0.135135
| 0.189189
| 0.252252
| 0.779279
| 0.779279
| 0.779279
| 0.779279
| 0.779279
| 0.779279
| 0
| 0.007737
| 0.105536
| 578
| 22
| 56
| 26.272727
| 0.851064
| 0.726644
| 0
| 0
| 0
| 0
| 0.148936
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.25
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7e661bf7c03da5cfc2cf394cf35c30237a5fbced
| 365
|
py
|
Python
|
Website_to_CSV_Functions/__init__.py
|
dniamir/FantasyFootballScraper
|
3ba86cb8a976e45a852b32bc6450264e072259ba
|
[
"MIT"
] | null | null | null |
Website_to_CSV_Functions/__init__.py
|
dniamir/FantasyFootballScraper
|
3ba86cb8a976e45a852b32bc6450264e072259ba
|
[
"MIT"
] | null | null | null |
Website_to_CSV_Functions/__init__.py
|
dniamir/FantasyFootballScraper
|
3ba86cb8a976e45a852b32bc6450264e072259ba
|
[
"MIT"
] | null | null | null |
from Website_to_CSV_Functions.Basic_Stats_Class import *
from Website_to_CSV_Functions.Career_Stats_Class import *
from Website_to_CSV_Functions.Game_Logs_Class import *
from Website_to_CSV_Functions.Obtain_Players_from_Website import *
from Website_to_CSV_Functions.NFL_Glossary import *
from Website_to_CSV_Functions.Functions_Needed_For_All_Stats import *
| 52.142857
| 70
| 0.884932
| 56
| 365
| 5.196429
| 0.339286
| 0.264605
| 0.268041
| 0.329897
| 0.704467
| 0.618557
| 0.405498
| 0.281787
| 0
| 0
| 0
| 0
| 0.082192
| 365
| 6
| 71
| 60.833333
| 0.868657
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7e70d80fc9d9ef5824c8c5f79fd3447dec5ed5ce
| 67,154
|
py
|
Python
|
tests/test_path.py
|
sobolevn/action-hero
|
75ff10dc8b01ee8d00367c63e8eccbbee9cc7d42
|
[
"MIT"
] | 91
|
2019-09-26T20:55:55.000Z
|
2021-10-19T22:27:21.000Z
|
tests/test_path.py
|
kadimisetty/action-heroes
|
f203e2e130dce970db803d6447bf7518c5e54285
|
[
"MIT"
] | 1
|
2020-02-07T02:51:40.000Z
|
2020-02-07T02:51:40.000Z
|
tests/test_path.py
|
kadimisetty/action-heroes
|
f203e2e130dce970db803d6447bf7518c5e54285
|
[
"MIT"
] | 5
|
2020-02-02T03:40:47.000Z
|
2020-11-05T10:53:11.000Z
|
import os
import tempfile
from action_hero.utils import ActionHeroTestCase
from action_hero import (
DirectoryDoesNotExistAction,
DirectoryExistsAction,
DirectoryIsExecutableAction,
DirectoryIsNotExecutableAction,
DirectoryIsNotReadableAction,
DirectoryIsNotWritableAction,
DirectoryIsReadableAction,
DirectoryIsValidAction,
DirectoryIsWritableAction,
EnsureDirectoryAction,
EnsureFileAction,
FileDoesNotExistAction,
FileExistsAction,
FileIsEmptyAction,
FileIsExecutableAction,
FileIsNotEmptyAction,
FileIsNotExecutableAction,
FileIsNotReadableAction,
FileIsNotWritableAction,
FileIsReadableAction,
FileIsWritableAction,
FileHasExtensionAction,
PathDoesNotExistsAction,
PathExistsAction,
PathIsExecutableAction,
PathIsNotExecutableAction,
PathIsNotReadableAction,
PathIsNotWritableAction,
PathIsReadableAction,
PathIsValidAction,
PathIsWritableAction,
ResolvePathAction,
)
from action_hero.path_utils import (
add_execute_permission,
is_empty_file,
is_executable_directory,
is_executable_file,
is_existing_directory,
is_existing_file,
is_existing_path,
is_readable_directory,
is_readable_file,
is_readable_path,
is_valid_directory,
is_valid_file,
is_valid_path,
is_writable_file,
remove_execute_permission,
remove_read_permission,
remove_write_permission,
resolve_path,
)
class TestResolvePathAction(ActionHeroTestCase):
def test_processes_path(self):
self.parser.add_argument("--path", action=ResolvePathAction)
with tempfile.TemporaryDirectory() as dir1:
args = self.parser.parse_args(["--path", dir1])
self.assertIn("path", args)
def test_on_single_path(self):
self.parser.add_argument("--path", action=ResolvePathAction)
with tempfile.TemporaryDirectory() as dir1:
args = self.parser.parse_args(["--path", dir1])
expected = resolve_path(dir1)
self.assertEqual(args.path, expected)
def test_on_list_of_paths(self):
self.parser.add_argument("--path", nargs="+", action=ResolvePathAction)
# Create few temporary paths
path1 = tempfile.mkdtemp()
path2 = tempfile.mkdtemp()
path3 = tempfile.mkdtemp()
temp_paths = [path1, path2, path3]
# Parse args with list of paths
args = self.parser.parse_args(["--path", *temp_paths])
expected = [resolve_path(path) for path in temp_paths]
self.assertEqual(args.path, expected)
# Delete all temporary paths
[os.rmdir(path) for path in temp_paths]
class TestEnsureDirectoryAction(ActionHeroTestCase):
def test_on_nonexisting_directory(self):
self.parser.add_argument("--path", action=EnsureDirectoryAction)
with tempfile.TemporaryDirectory() as parent_directory:
# Specify unique directory name
dir1 = os.path.join(parent_directory, "NEWDIRT")
# Assert specified directory does not exist
self.assertFalse(os.path.isdir(dir1))
# Parse args with --path as specified directory
self.parser.parse_args(["--path", dir1])
# Assert specified directory does exist
self.assertTrue(os.path.isdir(dir1))
def test_on_existing_directory(self):
self.parser.add_argument("--path", action=EnsureDirectoryAction)
# Specify directory to check
with tempfile.TemporaryDirectory() as dir1:
# Assert specified directory exists
self.assertTrue(os.path.isdir(dir1))
# Parse args with --path as specified directory
self.parser.parse_args(["--path", dir1])
# Assert specified directory still exist
self.assertTrue(os.path.isdir(dir1))
def test_on_multiple_mixed_existing_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=EnsureDirectoryAction
)
# Specify few new temporary directories
dir1 = tempfile.mkdtemp()
dir2 = tempfile.mkdtemp()
# Directories deleted immediately to confirm they do not exist
dir3 = tempfile.mkdtemp()
os.rmdir(dir3)
dir4 = tempfile.mkdtemp()
os.rmdir(dir4)
mixed_dirs = [dir1, dir2, dir3, dir4]
# Assert that mixed_dirs contain existing and non-existing directories
self.assertIn(True, [os.path.isdir(d) for d in mixed_dirs])
self.assertIn(False, [os.path.isdir(d) for d in mixed_dirs])
# Parse args with --path as specified file that does not exist
self.parser.parse_args(["--path", *mixed_dirs])
# Assert that all directories in mixed_dirs now exist
self.assertNotIn(False, [os.path.isdir(d) for d in mixed_dirs])
# Tear down temporary directories
[os.rmdir(d) for d in mixed_dirs]
class TestEnsureFileAction(ActionHeroTestCase):
def test_on_nonexisting_file(self):
self.parser.add_argument("--path", action=EnsureFileAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify a file to check
file1 = tempfile.mkstemp(dir=dir1)[1]
# Assert specified file exists
self.assertTrue(os.path.isfile(file1))
# Remove file
os.remove(file1)
# Assert specified file no longer exists
self.assertFalse(os.path.isfile(file1))
# Parse args with --path as specified file that does not exist
self.parser.parse_args(["--path", file1])
# Assert specified file now exists
self.assertTrue(os.path.isfile(file1))
def test_on_existing_file(self):
self.parser.add_argument("--path", action=EnsureFileAction)
# Specify file to check
with tempfile.NamedTemporaryFile() as file1:
# Assert specified file exists
self.assertTrue(os.path.isfile(file1.name))
# Parse args with --path as specified file
self.parser.parse_args(["--path", file1.name])
# Assert specified file still exists
self.assertTrue(os.path.isfile(file1.name))
def test_on_multiple_mixed_existing_files(self):
self.parser.add_argument("--path", nargs="+", action=EnsureFileAction)
with tempfile.TemporaryDirectory() as parent_directory:
# Specify few new temporary files
file1 = tempfile.mkstemp(dir=parent_directory)[1]
file2 = tempfile.mkstemp(dir=parent_directory)[1]
# files deleted immediately to confirm they do not exist
file3 = tempfile.mkstemp(dir=parent_directory)[1]
os.remove(file3)
file4 = tempfile.mkstemp(dir=parent_directory)[1]
os.remove(file4)
mixed_files = [file1, file2, file3, file4]
# Assert that mixed_files contain existing and non-existing files
self.assertIn(True, [os.path.isfile(f) for f in mixed_files])
self.assertIn(False, [os.path.isfile(f) for f in mixed_files])
# Parse args with --path as specified file that does not exist
self.parser.parse_args(["--path", *mixed_files])
# Assert that all files in mixed_files now exist
self.assertNotIn(False, [os.path.isfile(f) for f in mixed_files])
# Tear down temporary files
[os.remove(f) for f in mixed_files]
class TestPathIsValidAction(ActionHeroTestCase):
def test_on_valid_path(self):
self.parser.add_argument("--path", action=PathIsValidAction)
with tempfile.TemporaryDirectory() as path1:
# Assert path is valid path
self.assertTrue(is_valid_path(path1))
# Parse args with list of paths
args = self.parser.parse_args(["--path", path1])
# Assert path from args is valid path
self.assertTrue(is_valid_path(args.path))
def test_on_multiple_valid_paths(self):
self.parser.add_argument("--path", nargs="+", action=PathIsValidAction)
# Create few temporary paths
path1 = tempfile.mkdtemp()
path2 = tempfile.mkdtemp()
path3 = tempfile.mkdtemp()
paths = [path1, path2, path3]
# Parse args with list of paths
self.parser.parse_args(["--path", *paths])
self.assertNotIn(False, [is_valid_path(p) for p in paths])
# Delete all temporary paths
[os.rmdir(path) for path in paths]
def test_on_invalid_path(self):
self.parser.add_argument("--path", action=PathIsValidAction)
with tempfile.TemporaryDirectory() as parent_directory:
# Create a file name with a char forbidden in POSIX and WIN*
forbidden_char = "/"
file_name = "SOMEFILE{}".format(forbidden_char)
file_path = os.path.join(parent_directory, file_name)
# Assert that the forbidden character prohibited path creation
self.assertFalse(is_valid_path(file_path))
with self.assertRaises(ValueError):
# Parse args with prohibited filename
self.parser.parse_args(["--path", file_path])
def test_on_mixed_valid_and_invalid_path(self):
self.parser.add_argument("--path", nargs="+", action=PathIsValidAction)
with tempfile.TemporaryDirectory() as parent_directory:
# Create a file name with a char forbidden in POSIX and WIN*
forbidden_char = "/"
invalid_path_name = "SOMEFILE{}".format(forbidden_char)
invalid_path = os.path.join(parent_directory, invalid_path_name)
# Assert that the forbidden character prohibited path creation
self.assertFalse(is_valid_path(invalid_path))
# Create valid path
with tempfile.TemporaryDirectory() as valid_dir_path:
# Assemble mixed valid and invalid path
paths = [invalid_path, valid_dir_path]
with self.assertRaises(ValueError):
# Parse args with list of paths
self.parser.parse_args(["--path", *paths])
class TestFileIsValidAction(ActionHeroTestCase):
def test_on_valid_file_path(self):
self.parser.add_argument("--path", action=PathIsValidAction)
with tempfile.NamedTemporaryFile() as file_path:
# Assert file path is valid file path
self.assertTrue(is_valid_file(file_path.name))
# Parse args with list of paths
args = self.parser.parse_args(["--path", file_path.name])
# Assert path from args is valid path
self.assertTrue(is_valid_file(args.path))
def test_on_multiple_valid_file_paths(self):
self.parser.add_argument("--path", nargs="+", action=PathIsValidAction)
# Create few temporary paths
file_path1 = tempfile.mkstemp()[1]
file_path2 = tempfile.mkstemp()[1]
file_path3 = tempfile.mkstemp()[1]
file_paths = [file_path1, file_path2, file_path3]
# Parse args with list of paths
self.parser.parse_args(["--path", *file_paths])
self.assertNotIn(False, [is_valid_path(p) for p in file_paths])
# Delete all temporary file paths
[os.remove(path) for path in file_paths]
def test_on_invalid_file_path(self):
self.parser.add_argument("--path", action=PathIsValidAction)
with tempfile.TemporaryDirectory() as parent_directory:
# Create a file name with a char forbidden in POSIX and WIN*
forbidden_char = "/"
file_name = "SOMEFILE{}".format(forbidden_char)
file_path = os.path.join(parent_directory, file_name)
# Assert that the forbidden character prohibited path creation
self.assertFalse(is_valid_file(file_path))
with self.assertRaises(ValueError):
# Parse args with prohibited filename
self.parser.parse_args(["--path", file_path])
def test_on_mixed_valid_and_invalid_file_path(self):
self.parser.add_argument("--path", nargs="+", action=PathIsValidAction)
with tempfile.TemporaryDirectory() as parent_directory:
# Create a file name with a char forbidden in POSIX and WIN*
forbidden_char = "/"
invalid_file_name = "SOMEFILE{}".format(forbidden_char)
invalid_file_path = os.path.join(
parent_directory, invalid_file_name
)
# Assert that the forbidden character prohibited path creation
self.assertFalse(is_valid_file(invalid_file_path))
# Create valid path
with tempfile.NamedTemporaryFile() as valid_file_path:
# Assemble mixed valid and invalid path
paths = [invalid_file_path, valid_file_path.name]
with self.assertRaises(ValueError):
# Parse args with list of paths
self.parser.parse_args(["--path", *paths])
class TestDirectoryIsValidAction(ActionHeroTestCase):
def test_on_valid_directory_path(self):
self.parser.add_argument("--path", action=DirectoryIsValidAction)
with tempfile.TemporaryDirectory() as dir1:
# Assert path is valid path
self.assertTrue(is_valid_directory(dir1))
# Parse args with list of paths
args = self.parser.parse_args(["--path", dir1])
# Assert path from args is valid path
self.assertTrue(is_valid_directory(args.path))
def test_on_multiple_valid_directory_paths(self):
self.parser.add_argument(
"--path", nargs="+", action=DirectoryIsValidAction
)
# Create few temporary paths
dir1 = tempfile.mkdtemp()
dir2 = tempfile.mkdtemp()
dir3 = tempfile.mkdtemp()
dirs = [dir1, dir2, dir3]
# Parse args with list of paths
self.parser.parse_args(["--path", *dirs])
self.assertNotIn(False, [is_valid_directory(d) for d in dirs])
# Delete all temporary file paths
[os.rmdir(d) for d in dirs]
def test_on_invalid_directory_path(self):
self.parser.add_argument("--path", action=PathIsValidAction)
with tempfile.TemporaryDirectory() as parent_directory:
# Create a file name with a char forbidden in POSIX and WIN*
forbidden_char = "/"
dir_name = "SOMEFILE{}".format(forbidden_char)
dir_path = os.path.join(parent_directory, dir_name)
# Assert that the forbidden character prohibited path creation
self.assertFalse(is_valid_file(dir_path))
with self.assertRaises(ValueError):
# Parse args with prohibited filename
self.parser.parse_args(["--path", dir_path])
def test_on_mixed_valid_and_invalid_file_path(self):
self.parser.add_argument("--path", nargs="+", action=PathIsValidAction)
with tempfile.TemporaryDirectory() as parent_directory:
# Create a file name with a char forbidden in POSIX and WIN*
forbidden_char = "/"
invalid_dir_name = "SOMEDIR{}".format(forbidden_char)
invalid_dir_path = os.path.join(parent_directory, invalid_dir_name)
# Assert that the forbidden character prohibited path creation
self.assertFalse(is_valid_file(invalid_dir_path))
# Create valid path
with tempfile.TemporaryDirectory() as valid_dir_path:
# Assemble mixed valid and invalid path
paths = [invalid_dir_path, valid_dir_path]
with self.assertRaises(ValueError):
# Parse args with list of paths
self.parser.parse_args(["--path", *paths])
class TestPathExistsAction(ActionHeroTestCase):
def test_on_existing_path(self):
self.parser.add_argument("--path", action=PathExistsAction)
# Specify file to check
with tempfile.NamedTemporaryFile() as file1:
# Assert specified file exists
self.assertTrue(is_existing_path(file1.name))
# Parse args with --path as specified file
self.parser.parse_args(["--path", file1.name])
# Assert specified file still exists
self.assertTrue(is_existing_path(file1.name))
def test_on_nonexisting_path(self):
self.parser.add_argument("--path", action=PathExistsAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify a file to check
file1 = tempfile.mkstemp(dir=dir1)[1]
# Assert specified file exists
self.assertTrue(is_existing_path(file1))
# Remove file
os.remove(file1)
# Assert specified file no longer exists
self.assertFalse(is_existing_path(file1))
with self.assertRaises(ValueError):
# Parse args with --path as specified file that does not exist
self.parser.parse_args(["--path", file1])
def test_on_mixed_existing_and_nonexisting_path(self):
self.parser.add_argument("--path", nargs="+", action=PathExistsAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify a file to check
file1 = tempfile.mkstemp(dir=dir1)[1]
# Assert specified file exists
self.assertTrue(is_existing_path(file1))
# Remove file
os.remove(file1)
# Assert specified file no longer exists
self.assertFalse(is_existing_path(file1))
# Assemble mixed list of existing and nonexisting paths
paths = [dir1, file1]
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", *paths])
class TestPathDoesNotExistsAction(ActionHeroTestCase):
def test_on_existing_path(self):
self.parser.add_argument("--path", action=PathDoesNotExistsAction)
# Specify file to check
with tempfile.NamedTemporaryFile() as file1:
with self.assertRaises(ValueError):
# Parse args with --path as specified file that does not exist
self.parser.parse_args(["--path", file1.name])
def test_on_nonexisting_path(self):
self.parser.add_argument("--path", action=PathDoesNotExistsAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify a file to check
file1 = tempfile.mkstemp(dir=dir1)[1]
# Assert specified file exists
self.assertTrue(is_existing_path(file1))
# Remove file
os.remove(file1)
# Assert specified file no longer exists
self.assertFalse(is_existing_path(file1))
# Parse args with --path as specified file
self.parser.parse_args(["--path", file1])
# Assert specified file no longer exists
self.assertFalse(is_existing_path(file1))
def test_on_mixed_existing_and_nonexisting_path(self):
self.parser.add_argument(
"--path", nargs="+", action=PathDoesNotExistsAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify a file to check
file1 = tempfile.mkstemp(dir=dir1)[1]
# Assert specified file exists
self.assertTrue(is_existing_path(file1))
# Remove file
os.remove(file1)
# Assert specified file no longer exists
self.assertFalse(is_existing_path(file1))
# Assemble mixed list of existing and nonexisting paths
paths = [dir1, file1]
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", *paths])
class TestFileExistsAction(ActionHeroTestCase):
def test_on_existing_path(self):
self.parser.add_argument("--path", action=FileExistsAction)
# Specify file to check
with tempfile.NamedTemporaryFile() as file1:
# Assert specified file exists
self.assertTrue(is_existing_file(file1.name))
# Parse args with --path as specified file
self.parser.parse_args(["--path", file1.name])
# Assert specified file still exists
self.assertTrue(is_existing_file(file1.name))
def test_on_nonexisting_file(self):
self.parser.add_argument("--path", action=FileExistsAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify a file to check
file1 = tempfile.mkstemp(dir=dir1)[1]
# Assert specified file exists
self.assertTrue(is_existing_file(file1))
# Remove file
os.remove(file1)
# Assert specified file no longer exists
self.assertFalse(is_existing_file(file1))
with self.assertRaises(ValueError):
# Parse args with --path as specified file that does not exist
self.parser.parse_args(["--path", file1])
def test_on_mixed_existing_and_nonexisting_path(self):
self.parser.add_argument("--path", nargs="+", action=FileExistsAction)
with tempfile.TemporaryDirectory() as directory:
# Specify file to check
file1 = tempfile.mkstemp(dir=directory)[1]
file2 = tempfile.mkstemp(dir=directory)[1]
file3 = tempfile.mkstemp(dir=directory)[1]
# Assert specified files exists
self.assertTrue(is_existing_file(file1))
self.assertTrue(is_existing_file(file2))
self.assertTrue(is_existing_file(file3))
# Remove file 1
os.remove(file1)
# Assert specified file no longer exists
self.assertFalse(is_existing_file(file1))
# Assemble mixed list of existing and nonexisting paths
paths = [file1, file2, file3]
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", *paths])
class TestFileDoesNotExistsAction(ActionHeroTestCase):
def test_on_existing_path(self):
self.parser.add_argument("--path", action=FileDoesNotExistAction)
# Specify file to check
with tempfile.NamedTemporaryFile() as file1:
with self.assertRaises(ValueError):
# Parse args with --path as specified file that does not exist
self.parser.parse_args(["--path", file1.name])
def test_on_nonexisting_path(self):
self.parser.add_argument("--path", action=FileDoesNotExistAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify a file to check
file1 = tempfile.mkstemp(dir=dir1)[1]
# Assert specified file exists
self.assertTrue(is_existing_file(file1))
# Remove file
os.remove(file1)
# Assert specified file no longer exists
self.assertFalse(is_existing_file(file1))
# Parse args with --path as specified file
self.parser.parse_args(["--path", file1])
# Assert specified file no longer exists
self.assertFalse(is_existing_file(file1))
def test_on_mixed_existing_and_nonexisting_path(self):
self.parser.add_argument(
"--path", nargs="+", action=FileDoesNotExistAction
)
# Specify a file to check
file1 = tempfile.mkstemp()[1]
file2 = tempfile.mkstemp()[1]
# Assert specified file exists
self.assertTrue(is_existing_file(file1))
self.assertTrue(is_existing_file(file2))
# Remove file
os.remove(file1)
self.assertFalse(is_existing_file(file1))
# Assemble mixed list of existing and nonexisting paths
paths = [file1, file2]
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", *paths])
# Tear down remaining temporary files
os.remove(file2)
class TestDirectoryExistsAction(ActionHeroTestCase):
def test_on_existing_path(self):
self.parser.add_argument("--path", action=DirectoryExistsAction)
# Specify directory to check
with tempfile.TemporaryDirectory() as dir1:
# Assert specified directory exists
self.assertTrue(is_existing_directory(dir1))
# Parse args with --path as specified directory
args = self.parser.parse_args(["--path", dir1])
# Assert specified directory still exists
self.assertTrue(is_existing_path(dir1))
self.assertTrue(is_existing_path(args.path))
def test_on_nonexisting_path(self):
self.parser.add_argument("--path", action=DirectoryExistsAction)
# Specifiy directory
dir1 = tempfile.mkdtemp()
# Assert specified directory exists
self.assertTrue(is_existing_directory(dir1))
# Remove specified directory
os.rmdir(dir1)
# Assert specified directory doesnt exist
self.assertFalse(is_existing_directory(dir1))
with self.assertRaises(ValueError):
# Parse args with list of paths
self.parser.parse_args(["--path", dir1])
def test_on_mixed_existing_and_nonexisting_path(self):
self.parser.add_argument("--path", action=DirectoryExistsAction)
# Specifiy directores
dir1 = tempfile.mkdtemp()
dir2 = tempfile.mkdtemp()
# Assert specified directory exists
self.assertTrue(is_existing_directory(dir1))
self.assertTrue(is_existing_directory(dir2))
# Remove specified directory
os.rmdir(dir1)
# Assert specified directory doesnt exist
self.assertFalse(is_existing_directory(dir1))
with self.assertRaises(ValueError):
# Parse args with list of paths
self.parser.parse_args(["--path", dir1, dir2])
class TestDirectoryDoesNotExistsAction(ActionHeroTestCase):
def test_on_existing_path(self):
self.parser.add_argument("--path", action=DirectoryDoesNotExistAction)
with tempfile.TemporaryDirectory() as dir1:
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1])
def test_on_nonexisting_path(self):
self.parser.add_argument("--path", action=DirectoryDoesNotExistAction)
# Specify directory
dir1 = tempfile.mkdtemp()
# Assert directory exists
self.assertTrue(is_existing_directory(dir1))
# Remove specified directory
os.rmdir(dir1)
# Pargs args with removed specified directory
self.parser.parse_args(["--path", dir1])
def test_on_mixed_existing_and_nonexisting_path(self):
self.parser.add_argument(
"--path", nargs="+", action=DirectoryDoesNotExistAction
)
# Specify directories
dir1 = tempfile.mkdtemp()
dir2 = tempfile.mkdtemp()
# Assert directories exists
self.assertTrue(is_existing_directory(dir1))
self.assertTrue(is_existing_directory(dir2))
# Remove one specified directory
os.rmdir(dir1)
# Pargs args with one removed specified directory
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
# Tear down remaining temporary directories
os.rmdir(dir2)
class TestFileIsWritableAction(ActionHeroTestCase):
def test_on_writable_file(self):
self.parser.add_argument("--path", action=FileIsWritableAction)
# Specify file
with tempfile.NamedTemporaryFile() as file1:
# Assert fil eis writable
self.assertTrue(is_writable_file(file1.name))
# No errors when parsing args
self.parser.parse_args(["--path", file1.name])
# Assert file is still writable
self.assertTrue(is_writable_file(file1.name))
def test_on_unwritable_file(self):
self.parser.add_argument("--path", action=FileIsWritableAction)
# Specify file
with tempfile.NamedTemporaryFile() as file1:
# Assert fileis writable
self.assertTrue(is_writable_file(file1.name))
# No errors when parsing args
self.parser.parse_args(["--path", file1.name])
def test_on_mixed_writable_and_unwritable_file(self):
self.parser.add_argument(
"--path", nargs="+", action=FileIsWritableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify writable and unwritable file
file1 = tempfile.mkstemp(dir=dir1)[1]
file2 = tempfile.mkstemp(dir=dir1)[1]
remove_write_permission(file2)
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1, file2])
class TestFileIsNotWritableAction(ActionHeroTestCase):
def test_on_writable_file(self):
self.parser.add_argument("--path", action=FileIsNotWritableAction)
# Specify file
with tempfile.NamedTemporaryFile() as file1:
# Assert file is writable
self.assertTrue(is_writable_file(file1.name))
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1.name])
def test_on_unwritable_file(self):
self.parser.add_argument("--path", action=FileIsNotWritableAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify file and remove write permission
file1 = tempfile.mkstemp(dir=dir1)[1]
remove_write_permission(file1)
# Assert file is unwritable
self.assertFalse(is_writable_file(file1))
# No Error on parse args
self.parser.parse_args(["--path", file1])
# Assert file is unwritable
self.assertFalse(is_writable_file(file1))
def test_on_mixed_writable_and_unwritable_file(self):
self.parser.add_argument(
"--path", nargs="+", action=FileIsNotWritableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify writable and unwritable files
file1 = tempfile.mkstemp(dir=dir1)[1]
file2 = tempfile.mkstemp(dir=dir1)[1]
remove_write_permission(file2)
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1, file2])
class TestDirectoryIsWritableAction(ActionHeroTestCase):
def test_on_writable_directory(self):
self.parser.add_argument("--path", action=DirectoryIsWritableAction)
# Specify writable directory
with tempfile.TemporaryDirectory() as dir1:
# Parse with readable directory
self.parser.parse_args(["--path", dir1])
def test_on_wunwritable_directory(self):
self.parser.add_argument("--path", action=DirectoryIsWritableAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify unwritable directory and remove write permissions
dir2 = tempfile.mkdtemp(dir=dir1)
remove_write_permission(dir2)
# Asserts error on parsing unwritable directory
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir2])
def test_on_writable_and_unwritable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=DirectoryIsWritableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify unwritable directory and remove write permissions
dir2 = tempfile.mkdtemp(dir=dir1)
remove_write_permission(dir2)
# Asserts error on parsing unwritable directory
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestDirectoryIsNotWritableAction(ActionHeroTestCase):
def test_on_writable_directory(self):
self.parser.add_argument("--path", action=DirectoryIsNotWritableAction)
# Specify writable directory
with tempfile.TemporaryDirectory() as dir1:
# Asserts error on parsing unwritable directory
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1])
def test_on_unwritable_directory(self):
self.parser.add_argument("--path", action=DirectoryIsNotWritableAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify unwritable directory and remove write permissions
dir2 = tempfile.mkdtemp(dir=dir1)
remove_write_permission(dir2)
# Parse with readable directory
self.parser.parse_args(["--path", dir2])
def test_on_writable_and_unwritable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=DirectoryIsNotWritableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify unwritable directory and remove write permissions
dir2 = tempfile.mkdtemp(dir=dir1)
remove_write_permission(dir2)
# Asserts error on parsing unwritable directory
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestPathIsWritableAction(ActionHeroTestCase):
def test_on_writable_directory(self):
self.parser.add_argument("--path", action=PathIsWritableAction)
# Specify writable directory
with tempfile.TemporaryDirectory() as dir1:
# Parse with readable directory
self.parser.parse_args(["--path", dir1])
def test_on_unwritable_directory(self):
self.parser.add_argument("--path", action=PathIsWritableAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify unwritable directory and remove write permissions
dir2 = tempfile.mkdtemp(dir=dir1)
remove_write_permission(dir2)
# Asserts error on parsing unwritable directory
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir2])
def test_on_writable_and_unwritable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=PathIsWritableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify unwritable directory and remove write permissions
dir2 = tempfile.mkdtemp(dir=dir1)
remove_write_permission(dir2)
# Asserts error on parsing unwritable directory
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestPathIsNotWritableAction(ActionHeroTestCase):
def test_on_writable_directory(self):
self.parser.add_argument("--path", action=PathIsNotWritableAction)
# Specify writable directory
with tempfile.TemporaryDirectory() as dir1:
# Asserts error on parsing unwritable directory
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1])
def test_on_unwritable_directory(self):
self.parser.add_argument("--path", action=PathIsNotWritableAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify unwritable directory and remove write permissions
dir2 = tempfile.mkdtemp(dir=dir1)
remove_write_permission(dir2)
# Parse with readable directory
self.parser.parse_args(["--path", dir2])
def test_on_writable_and_unwritable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=PathIsNotWritableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify unwritable directory and remove write permissions
dir2 = tempfile.mkdtemp(dir=dir1)
remove_write_permission(dir2)
# Asserts error on parsing unwritable directory
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestFileIsReadableAction(ActionHeroTestCase):
def test_on_readable_file(self):
self.parser.add_argument("--path", action=FileIsReadableAction)
# Specify file
with tempfile.NamedTemporaryFile() as file1:
# Assert file is readable
self.assertTrue(is_readable_file(file1.name))
# No errors when parsing args
self.parser.parse_args(["--path", file1.name])
# Assert file is still readable
self.assertTrue(is_readable_file(file1.name))
def test_on_unreadable_file(self):
self.parser.add_argument("--path", action=FileIsReadableAction)
# Specify file
with tempfile.NamedTemporaryFile() as file1:
# Assert file is readable
self.assertTrue(is_readable_file(file1.name))
# No errors when parsing args
self.parser.parse_args(["--path", file1.name])
def test_on_mixed_readable_and_unreadable_file(self):
self.parser.add_argument(
"--path", nargs="+", action=FileIsReadableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify readable and unreadable file
file1 = tempfile.mkstemp(dir=dir1)[1]
file2 = tempfile.mkstemp(dir=dir1)[1]
remove_read_permission(file2)
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1, file2])
class TestFileIsNotReadableAction(ActionHeroTestCase):
def test_on_readable_file(self):
self.parser.add_argument("--path", action=FileIsNotReadableAction)
# Specify file
with tempfile.NamedTemporaryFile() as file1:
# Assert file is readable
self.assertTrue(is_readable_file(file1.name))
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1.name])
def test_on_unreadable_file(self):
self.parser.add_argument("--path", action=FileIsNotReadableAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify file and remove write permission
file1 = tempfile.mkstemp(dir=dir1)[1]
remove_read_permission(file1)
# Assert file is unreadable
self.assertFalse(is_readable_file(file1))
# No Error on parse args
self.parser.parse_args(["--path", file1])
# Assert file is unreadable
self.assertFalse(is_readable_file(file1))
def test_on_mixed_readable_and_unreadable_file(self):
self.parser.add_argument(
"--path", nargs="+", action=FileIsNotReadableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify readable and unreadable files
file1 = tempfile.mkstemp(dir=dir1)[1]
file2 = tempfile.mkstemp(dir=dir1)[1]
remove_write_permission(file2)
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1, file2])
class TestDirectoryIsReadableAction(ActionHeroTestCase):
def test_on_readable_directory(self):
self.parser.add_argument("--path", action=DirectoryIsReadableAction)
# Specify directory
with tempfile.TemporaryDirectory() as dir1:
# Assert directory is readable
self.assertTrue(is_readable_directory(dir1))
# No errors when parsing args
self.parser.parse_args(["--path", dir1])
# Assert directory is still readable
self.assertTrue(is_readable_directory(dir1))
def test_on_unreadable_directory(self):
self.parser.add_argument("--path", action=DirectoryIsReadableAction)
# Specify dir amd make unreadable
dir1 = tempfile.mkdtemp()
remove_read_permission(dir1)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1])
# Tear down temp dirs
os.rmdir(dir1)
def test_on_mixed_readable_and_unreadable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=DirectoryIsReadableAction
)
# Specify readable and unreadable dirs
with tempfile.TemporaryDirectory() as dir1:
dir2 = tempfile.mkdtemp()
remove_read_permission(dir2)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestDirectoryIsNotReadableAction(ActionHeroTestCase):
def test_on_readable_directory(self):
self.parser.add_argument("--path", action=DirectoryIsNotReadableAction)
# Specify directory
with tempfile.TemporaryDirectory() as dir1:
# Assert directory is readable
self.assertTrue(is_readable_directory(dir1))
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1])
def test_on_unreadable_directory(self):
self.parser.add_argument("--path", action=DirectoryIsNotReadableAction)
# Specify dir amd make unreadable
dir1 = tempfile.mkdtemp()
remove_read_permission(dir1)
# No errors when parsing args
self.parser.parse_args(["--path", dir1])
# Assert directory is still unreadable
self.assertFalse(is_readable_directory(dir1))
# Tear down temp dirs
os.rmdir(dir1)
def test_on_mixed_readable_and_unreadable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=DirectoryIsNotReadableAction
)
# Specify readable and unreadable dirs
with tempfile.TemporaryDirectory() as dir1:
dir2 = tempfile.mkdtemp()
remove_read_permission(dir2)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestPathIsReadableAction(ActionHeroTestCase):
def test_on_readable_file(self):
self.parser.add_argument("--path", action=PathIsReadableAction)
# Specify file
with tempfile.NamedTemporaryFile() as file1:
# Assert file is readable
self.assertTrue(is_readable_file(file1.name))
# No errors when parsing args
self.parser.parse_args(["--path", file1.name])
# Assert file is still readable
self.assertTrue(is_readable_file(file1.name))
def test_on_unreadable_file(self):
self.parser.add_argument("--path", action=PathIsReadableAction)
# Specify file
with tempfile.NamedTemporaryFile() as file1:
# Assert file is readable
self.assertTrue(is_readable_file(file1.name))
# No errors when parsing args
self.parser.parse_args(["--path", file1.name])
def test_on_mixed_readable_and_unreadable_file(self):
self.parser.add_argument(
"--path", nargs="+", action=PathIsReadableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify readable and unreadable file
file1 = tempfile.mkstemp(dir=dir1)[1]
file2 = tempfile.mkstemp(dir=dir1)[1]
remove_read_permission(file2)
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1, file2])
def test_on_readable_directory(self):
self.parser.add_argument("--path", action=PathIsReadableAction)
# Specify directory
with tempfile.TemporaryDirectory() as dir1:
# Assert directory is readable
self.assertTrue(is_readable_directory(dir1))
# No errors when parsing args
self.parser.parse_args(["--path", dir1])
# Assert directory is still readable
self.assertTrue(is_readable_directory(dir1))
def test_on_unreadable_directory(self):
self.parser.add_argument("--path", action=PathIsReadableAction)
# Specify dir amd make unreadable
dir1 = tempfile.mkdtemp()
remove_read_permission(dir1)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1])
# Tear down temp dirs
os.rmdir(dir1)
def test_on_mixed_readable_and_unreadable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=PathIsReadableAction
)
# Specify readable and unreadable dirs
with tempfile.TemporaryDirectory() as dir1:
dir2 = tempfile.mkdtemp()
remove_read_permission(dir2)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestPathIsNotReadableAction(ActionHeroTestCase):
def test_on_readable_file(self):
self.parser.add_argument("--path", action=PathIsNotReadableAction)
# Specify file
with tempfile.NamedTemporaryFile() as file1:
# Assert file is readable
self.assertTrue(is_readable_file(file1.name))
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1.name])
def test_on_unreadable_file(self):
self.parser.add_argument("--path", action=PathIsNotReadableAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify file and remove write permission
file1 = tempfile.mkstemp(dir=dir1)[1]
remove_read_permission(file1)
# Assert file is unreadable
self.assertFalse(is_readable_file(file1))
# No Error on parse args
self.parser.parse_args(["--path", file1])
# Assert file is unreadable
self.assertFalse(is_readable_path(file1))
def test_on_mixed_readable_and_unreadable_file(self):
self.parser.add_argument(
"--path", nargs="+", action=PathIsNotReadableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify readable and unreadable files
file1 = tempfile.mkstemp(dir=dir1)[1]
file2 = tempfile.mkstemp(dir=dir1)[1]
remove_write_permission(file2)
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1, file2])
def test_on_readable_directory(self):
self.parser.add_argument("--path", action=PathIsNotReadableAction)
# Specify directory
with tempfile.TemporaryDirectory() as dir1:
# Assert directory is readable
self.assertTrue(is_readable_directory(dir1))
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1])
def test_on_unreadable_directory(self):
self.parser.add_argument("--path", action=PathIsNotReadableAction)
# Specify dir amd make unreadable
dir1 = tempfile.mkdtemp()
remove_read_permission(dir1)
# No errors when parsing args
self.parser.parse_args(["--path", dir1])
# Assert directory is still unreadable
self.assertFalse(is_readable_directory(dir1))
# Tear down temp dirs
os.rmdir(dir1)
def test_on_mixed_readable_and_unreadable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=PathIsNotReadableAction
)
# Specify readable and unreadable dirs
with tempfile.TemporaryDirectory() as dir1:
dir2 = tempfile.mkdtemp()
remove_read_permission(dir2)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestFileIsExecutableAction(ActionHeroTestCase):
def test_on_executable_file(self):
self.parser.add_argument("--path", action=FileIsExecutableAction)
# Specify file and make executable
with tempfile.NamedTemporaryFile() as file1:
add_execute_permission(file1.name)
self.assertTrue(is_executable_file(file1.name))
# No errors when parsing args
self.parser.parse_args(["--path", file1.name])
def test_on_unexecutable_file(self):
self.parser.add_argument("--path", action=FileIsExecutableAction)
# Specify file and assert not executable
with tempfile.NamedTemporaryFile() as file1:
self.assertFalse(is_executable_file(file1.name))
# Assert ValueError on parsing
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1.name])
def test_on_mixed_executable_and_unexecutable_file(self):
self.parser.add_argument(
"--path", nargs="+", action=FileIsExecutableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify readable and unreadable file
file1 = tempfile.mkstemp(dir=dir1)[1]
file2 = tempfile.mkstemp(dir=dir1)[1]
add_execute_permission(file2)
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1, file2])
class TestFileIsNotExecutableAction(ActionHeroTestCase):
def test_on_executable_file(self):
self.parser.add_argument("--path", action=FileIsNotExecutableAction)
# Specify file and make executable
with tempfile.NamedTemporaryFile() as file1:
add_execute_permission(file1.name)
self.assertTrue(is_executable_file(file1.name))
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1.name])
def test_on_unreadable_file(self):
self.parser.add_argument("--path", action=FileIsNotExecutableAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify file and remove write permission
file1 = tempfile.mkstemp(dir=dir1)[1]
# Assert file is unexecutable
self.assertFalse(is_executable_file(file1))
# No Error on parse args
self.parser.parse_args(["--path", file1])
# Assert file is unexecutable
self.assertFalse(is_executable_file(file1))
def test_on_mixed_executable_and_unexecutable_file(self):
self.parser.add_argument(
"--path", nargs="+", action=FileIsNotExecutableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify executable and unexecutable files
file1 = tempfile.mkstemp(dir=dir1)[1]
add_execute_permission(file1)
file2 = tempfile.mkstemp(dir=dir1)[1]
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1, file2])
class TestDirectoryIsExecutableAction(ActionHeroTestCase):
def test_on_executable_directory(self):
self.parser.add_argument("--path", action=DirectoryIsExecutableAction)
# Specify directory and make executable
with tempfile.TemporaryDirectory() as dir1:
self.assertTrue(is_executable_directory(dir1))
# No errors when parsing args
self.parser.parse_args(["--path", dir1])
def test_on_unexecutable_directory(self):
self.parser.add_argument("--path", action=DirectoryIsExecutableAction)
# Specify dir and make unexecutable
with tempfile.TemporaryDirectory() as dir1:
remove_execute_permission(dir1)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1])
def test_on_mixed_executable_and_unexecutable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=DirectoryIsExecutableAction
)
# Specify executable and unexecutable dirs
with tempfile.TemporaryDirectory() as dir1:
dir2 = tempfile.mkdtemp()
remove_execute_permission(dir2)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestDirectoryIsNotExecutableAction(ActionHeroTestCase):
def test_on_executable_directory(self):
self.parser.add_argument(
"--path", action=DirectoryIsNotExecutableAction
)
# Specify directory
with tempfile.TemporaryDirectory() as dir1:
# Assert directory is executable
self.assertTrue(is_executable_directory(dir1))
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1])
def test_on_unexecutable_directory(self):
self.parser.add_argument(
"--path", action=DirectoryIsNotExecutableAction
)
# Specify dir amd make unexecutable
with tempfile.TemporaryDirectory() as dir1:
dir1 = tempfile.mkdtemp()
remove_execute_permission(dir1)
# No errors when parsing args
self.parser.parse_args(["--path", dir1])
# Assert directory is still unreadable
self.assertFalse(is_executable_directory(dir1))
def test_on_mixed_executable_and_unexecutable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=DirectoryIsNotExecutableAction
)
# Specify executable and unexecutable dirs
with tempfile.TemporaryDirectory() as dir1:
dir2 = tempfile.mkdtemp()
remove_execute_permission(dir2)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestPathIsExecutableAction(ActionHeroTestCase):
def test_on_executable_file(self):
self.parser.add_argument("--path", action=PathIsExecutableAction)
# Specify file and make executable
with tempfile.NamedTemporaryFile() as file1:
add_execute_permission(file1.name)
self.assertTrue(is_executable_file(file1.name))
# No errors when parsing args
self.parser.parse_args(["--path", file1.name])
def test_on_unexecutable_file(self):
self.parser.add_argument("--path", action=PathIsExecutableAction)
# Specify file and assert not executable
with tempfile.NamedTemporaryFile() as file1:
self.assertFalse(is_executable_file(file1.name))
# Assert ValueError on parsing
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1.name])
def test_on_mixed_executable_and_unexecutable_file(self):
self.parser.add_argument(
"--path", nargs="+", action=PathIsExecutableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify readable and unreadable file
file1 = tempfile.mkstemp(dir=dir1)[1]
file2 = tempfile.mkstemp(dir=dir1)[1]
add_execute_permission(file2)
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1, file2])
def test_on_executable_directory(self):
self.parser.add_argument("--path", action=PathIsExecutableAction)
# Specify directory and make executable
with tempfile.TemporaryDirectory() as dir1:
self.assertTrue(is_executable_directory(dir1))
# No errors when parsing args
self.parser.parse_args(["--path", dir1])
def test_on_unexecutable_directory(self):
self.parser.add_argument("--path", action=PathIsExecutableAction)
# Specify dir and make unexecutable
with tempfile.TemporaryDirectory() as dir1:
remove_execute_permission(dir1)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1])
def test_on_mixed_executable_and_unexecutable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=PathIsExecutableAction
)
# Specify executable and unexecutable dirs
with tempfile.TemporaryDirectory() as dir1:
dir2 = tempfile.mkdtemp()
remove_execute_permission(dir2)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestPathIsNotExecutableAction(ActionHeroTestCase):
def test_on_executable_file(self):
self.parser.add_argument("--path", action=PathIsNotExecutableAction)
# Specify file and make executable
with tempfile.NamedTemporaryFile() as file1:
add_execute_permission(file1.name)
self.assertTrue(is_executable_file(file1.name))
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1.name])
def test_on_unreadable_file(self):
self.parser.add_argument("--path", action=PathIsNotExecutableAction)
with tempfile.TemporaryDirectory() as dir1:
# Specify file and remove write permission
file1 = tempfile.mkstemp(dir=dir1)[1]
# Assert file is unexecutable
self.assertFalse(is_executable_file(file1))
# No Error on parse args
self.parser.parse_args(["--path", file1])
# Assert file is unexecutable
self.assertFalse(is_executable_file(file1))
def test_on_mixed_executable_and_unexecutable_file(self):
self.parser.add_argument(
"--path", nargs="+", action=PathIsNotExecutableAction
)
with tempfile.TemporaryDirectory() as dir1:
# Specify executable and unexecutable files
file1 = tempfile.mkstemp(dir=dir1)[1]
add_execute_permission(file1)
file2 = tempfile.mkstemp(dir=dir1)[1]
# Check if ValueError raised on parse
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1, file2])
def test_on_executable_directory(self):
self.parser.add_argument("--path", action=PathIsNotExecutableAction)
# Specify directory
with tempfile.TemporaryDirectory() as dir1:
# Assert directory is executable
self.assertTrue(is_executable_directory(dir1))
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1])
def test_on_unexecutable_directory(self):
self.parser.add_argument("--path", action=PathIsNotExecutableAction)
# Specify dir amd make unexecutable
with tempfile.TemporaryDirectory() as dir1:
dir1 = tempfile.mkdtemp()
remove_execute_permission(dir1)
# No errors when parsing args
self.parser.parse_args(["--path", dir1])
# Assert directory is still unreadable
self.assertFalse(is_executable_directory(dir1))
def test_on_mixed_executable_and_unexecutable_directories(self):
self.parser.add_argument(
"--path", nargs="+", action=PathIsNotExecutableAction
)
# Specify executable and unexecutable dirs
with tempfile.TemporaryDirectory() as dir1:
dir2 = tempfile.mkdtemp()
remove_execute_permission(dir2)
# Assert ValueError raised when parsing args
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", dir1, dir2])
class TestFileIsEmptyAction(ActionHeroTestCase):
def test_on_empty_file(self):
self.parser.add_argument("--path", action=FileIsEmptyAction)
with tempfile.NamedTemporaryFile() as file1:
self.assertTrue(is_empty_file(file1.name))
self.parser.parse_args(["--path", file1.name])
def test_on_nonempty_file(self):
self.parser.add_argument("--path", action=FileIsEmptyAction)
with tempfile.NamedTemporaryFile() as file1:
with open(file1.name, "a") as file_for_writing:
file_for_writing.write("SOME TEXT")
self.assertFalse(is_empty_file(file1.name))
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1.name])
def test_on_list_of_empty_and_nonempty_files(self):
self.parser.add_argument("--path", nargs="+", action=FileIsEmptyAction)
with tempfile.NamedTemporaryFile() as file1:
with tempfile.NamedTemporaryFile() as file2:
with open(file2.name, "a") as file_for_writing:
file_for_writing.write("SOME TEXT")
self.assertTrue(is_empty_file(file1.name))
self.assertFalse(is_empty_file(file2.name))
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1.name, file2.name])
class TestFileIsNotEmptyAction(ActionHeroTestCase):
def test_on_empty_file(self):
self.parser.add_argument("--path", action=FileIsNotEmptyAction)
with tempfile.NamedTemporaryFile() as file1:
self.assertTrue(is_empty_file(file1.name))
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1.name])
def test_on_nonempty_file(self):
self.parser.add_argument("--path", action=FileIsNotEmptyAction)
with tempfile.NamedTemporaryFile() as file1:
with open(file1.name, "a") as file_for_writing:
file_for_writing.write("SOME TEXT")
self.assertFalse(is_empty_file(file1.name))
self.parser.parse_args(["--path", file1.name])
def test_on_list_of_empty_and_nonempty_files(self):
self.parser.add_argument(
"--path", nargs="+", action=FileIsNotEmptyAction
)
with tempfile.NamedTemporaryFile() as file1:
with tempfile.NamedTemporaryFile() as file2:
with open(file2.name, "a") as file_for_writing:
file_for_writing.write("SOME TEXT")
self.assertTrue(is_empty_file(file1.name))
self.assertFalse(is_empty_file(file2.name))
with self.assertRaises(ValueError):
self.parser.parse_args(["--path", file1.name, file2.name])
class TestFileHasExtensionAction(ActionHeroTestCase):
def test_on_parser_with_extension(self):
self.parser.add_argument(
"--filename", action=FileHasExtensionAction, action_values=["txt"]
)
def test_on_parser_without_extension(self):
with self.assertRaises(ValueError):
self.parser.add_argument(
"--filename", action=FileHasExtensionAction
)
def test_on_filename_with_matching_extension(self):
self.parser.add_argument(
"--filename", action=FileHasExtensionAction, action_values=["txt"]
)
self.parser.parse_args(["--filename", "diary.txt"])
def test_on_filename_with_nonmatching_extension(self):
self.parser.add_argument(
"--filename", action=FileHasExtensionAction, action_values=["txt"]
)
with self.assertRaises(ValueError):
self.parser.parse_args(["--filename", "diary.md"])
def test_on_list_of_filenames_with_matching_extension(self):
self.parser.add_argument(
"--filename",
nargs="+",
action=FileHasExtensionAction,
action_values=["txt"],
)
self.parser.parse_args(
["--filename", "diary.txt", "log.txt", "lyrics.txt"]
)
def test_on_list_of_filenames_with_nonmatching_extension(self):
self.parser.add_argument(
"--filename",
nargs="+",
action=FileHasExtensionAction,
action_values=["txt"],
)
with self.assertRaises(ValueError):
self.parser.parse_args(
["--filename", "diary.md", "README.rst", "history.sh"]
)
def test_on_list_of_filenames_with_mixed_matching_extensions(self):
self.parser.add_argument(
"--filename",
nargs="+",
action=FileHasExtensionAction,
action_values=["txt"],
)
with self.assertRaises(ValueError):
self.parser.parse_args(
[
"--filename",
"notes.txt",
"diary.md",
"README.rst",
"history.sh",
]
)
def test_on_multiple_action_values(self):
self.parser.add_argument(
"--filename",
action=FileHasExtensionAction,
action_values=["md", "markdown"],
)
self.parser.parse_args(["--filename", "diary.md"])
def test_on_multiple_action_values_with_expected_filenames(self):
self.parser.add_argument(
"--filename",
action=FileHasExtensionAction,
action_values=["md", "markdown"],
)
with self.assertRaises(ValueError):
self.parser.parse_args(
["--filename", "README.md", "blogentry.markdown"]
)
def test_on_multiple_action_values_with_unexpected_filenames(self):
self.parser.add_argument(
"--filename",
action=FileHasExtensionAction,
action_values=["md", "markdown"],
)
with self.assertRaises(ValueError):
self.parser.parse_args(["--filename", "config.yml"])
def test_on_multiple_action_values_with_mixed_expected_filenames(self):
self.parser.add_argument(
"--filename",
nargs="+",
action=FileHasExtensionAction,
action_values=["md", "markdown"],
)
with self.assertRaises(ValueError):
self.parser.parse_args(["--filename", "config.yml", "README.rst"])
| 42.155681
| 79
| 0.643327
| 7,069
| 67,154
| 5.938605
| 0.035366
| 0.057647
| 0.03778
| 0.061029
| 0.909671
| 0.892306
| 0.876822
| 0.856908
| 0.841591
| 0.803144
| 0
| 0.011638
| 0.265524
| 67,154
| 1,592
| 80
| 42.182161
| 0.839487
| 0.164666
| 0
| 0.722117
| 0
| 0
| 0.034443
| 0
| 0
| 0
| 0
| 0
| 0.178639
| 1
| 0.115312
| false
| 0
| 0.004726
| 0
| 0.151229
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ead05d3f9b40123d5b838866a8a290b3a4e5104
| 21,378
|
py
|
Python
|
project/grid.py
|
Kyle-Tran/Game-of-Life
|
ebb65956e42393444857e3fcc9389f8dae2a62f0
|
[
"MIT"
] | 1
|
2021-07-19T16:29:28.000Z
|
2021-07-19T16:29:28.000Z
|
project/grid.py
|
Kyle-Tran/Game-of-Life
|
ebb65956e42393444857e3fcc9389f8dae2a62f0
|
[
"MIT"
] | null | null | null |
project/grid.py
|
Kyle-Tran/Game-of-Life
|
ebb65956e42393444857e3fcc9389f8dae2a62f0
|
[
"MIT"
] | null | null | null |
import pygame, random
import numpy as np
class Conway:
###################################
# Class for Conway's Game of Life #
###################################
def __init__(self, width, height, scale, border, percentRandom):
self.scale = scale
self.rows = int(width / scale)
self.columns = int(height / scale)
self.size = (self.rows, self.columns)
self.curr_array = np.ndarray(shape=self.size) # Field as 2d array
self.border = border # Lines between cells
self.percentRandom = percentRandom
def update(self, dead, live, surface):
"""
Updates cells color correlating to dead or live
"""
for x in range(self.rows):
for y in range(self.columns):
x_pos, y_pos = x * self.scale, y * self.scale
if self.curr_array[x][y] == 1:
pygame.draw.rect(surface, live,
[x_pos, y_pos, self.scale - self.border, self.scale - self.border])
else:
pygame.draw.rect(surface, dead,
[x_pos, y_pos, self.scale - self.border, self.scale - self.border])
def transition(self):
"""
Rules for transitions between generations
"""
new_array = np.ndarray(shape=self.size)
for x in range(self.rows):
for y in range(self.columns):
state = self.curr_array[x][y]
neighbors = self.get_neighbors(x, y)
# Any live cell with two or three live neighbors survives.
if state == 1 and (neighbors == 2 or neighbors == 3):
new_array[x][y] = 1
# Any dead cell with three live neighbors becomes a live cell.
elif state == 0 and neighbors == 3:
new_array[x][y] = 1
# All other live cells die in the next generation.
# Similarly, all other dead cells stay dead.
else:
new_array[x][y] = 0
# update previous field with next generation's field
self.curr_array = new_array
def get_neighbors(self, x, y):
"""
Check state of 8 cells around current cell
Returns number of live neighbors
"""
neighbors = 0
for n in range(-1, 2):
for m in range(-1, 2):
if not (n == m == 0): # Ignore self during check
# Since field is finite, stitch edges to yield toroidal array
x_edge = (x + n + self.rows) % self.rows
y_edge = (y + m + self.columns) % self.columns
neighbors += self.curr_array[x_edge][y_edge]
return neighbors
def click(self, pos):
"""
Clicking on cell will change it's state from dead to live or vice versa
"""
x, y = int(pos[0] / self.scale), int(pos[1] / self.scale)
if self.curr_array[x][y] == 1:
self.curr_array[x][y] = 0
else:
self.curr_array[x][y] = 1
def random_field(self):
"""
Generates random field of cells
"""
for x in range(self.rows):
for y in range(self.columns):
# self.curr_array[x][y] = random.randint(0, 1)
self.curr_array[x][y] = random.choices(
[0, 1], [1 - self.percentRandom, self.percentRandom])[0] # Fills grid with percentRandom live cells
def reset(self):
"""
Clears entire field to all dead cells
"""
for x in range(self.rows):
for y in range(self.columns):
self.curr_array[x][y] = 0
class RPS:
###############################################
# Class for ternary/quinary multi-state world #
###############################################
# Rock = -1, White = 0, Paper = 1, Scissors = 2, Lizard = 3, Spock = 4
def __init__(self, width, height, scale, border, numColors):
self.scale = scale
self.rows = int(width / scale)
self.columns = int(height / scale)
self.size = (self.rows, self.columns)
self.curr_array = np.ndarray(shape=self.size) # Field as 2d array
self.border = border # Lines between cells
self.numColors = numColors
def update(self, rock, paper, scissors, lizard, spock, surface):
"""
Updates cells colors on field
"""
for x in range(self.rows):
for y in range(self.columns):
x_pos, y_pos = x * self.scale, y * self.scale
curr = self.curr_array[x][y]
if -1 < curr < 1: # Fix rounding error near 0 for numpy array
pygame.draw.rect(surface, (255, 255, 255),
[x_pos, y_pos, self.scale - self.border, self.scale - self.border])
elif curr == -1:
pygame.draw.rect(surface, rock,
[x_pos, y_pos, self.scale - self.border, self.scale - self.border])
elif curr == 1:
pygame.draw.rect(surface, paper,
[x_pos, y_pos, self.scale - self.border, self.scale - self.border])
elif curr == 2:
pygame.draw.rect(surface, scissors,
[x_pos, y_pos, self.scale - self.border, self.scale - self.border])
elif curr == 3 and self.numColors == 5:
pygame.draw.rect(surface, lizard,
[x_pos, y_pos, self.scale - self.border, self.scale - self.border])
elif curr == 4 and self.numColors == 5:
pygame.draw.rect(surface, spock,
[x_pos, y_pos, self.scale - self.border, self.scale - self.border])
def transition(self):
"""
Updates current field with next generation field
"""
new_array = np.ndarray(shape=self.size)
for x in range(self.rows):
for y in range(self.columns):
state = self.curr_array[x][y]
neighbors, dominating = self.get_neighbors(x, y, state)
if neighbors > 2: # if more than 2 cells dominate current cell, convert cell
# if current cell is touched by more than 2 cells of both dominating colors,
# then convert current cell to the dominant color with the greater frequency
# Example: Rock surrounded by 3 paper and 5 spock, rock becomes spock
new_array[x][y] = most_freq(dominating)
else: # otherwise cell stays the same
new_array[x][y] = state
# update previous field with next generation's field
self.curr_array = new_array
def get_neighbors(self, x, y, state):
"""
Rules for transitions between generations
total: number of dominant neighbors
num_dominating: list of dominant neighbors
Example: state = rock, total = 8
num_dominating = [paper, paper, paper, spock, spock, spock, spock, spock]
"""
total, num_dominating = 0, []
# check 8 cells around current cell
for n in range(-1, 2):
for m in range(-1, 2):
if not (n == m == 0): # Ignore self during check
# Since field is finite, stitch edges to yield toroidal array
x_edge = (x + n + self.rows) % self.rows
y_edge = (y + m + self.columns) % self.columns
neighbor = self.curr_array[x_edge][y_edge]
if self.numColors == 3: # Normal Rock, Paper, Scissor rules
if (state == -1 and neighbor == 1) or (state == 1 and neighbor == 2) \
or (state == 2 and neighbor == -1):
total += 1
num_dominating.append(neighbor)
elif self.numColors == 5:
# Rock (-1), Paper (1), Scissor(2), Lizard(3), Spock(4) rules
# Rock > Scissors, Lizard
# Paper > Rock, Spock
# Scissors > Paper, Lizard
# Lizard > Paper, Spock
# Spock > Scissors, Rock
if (state == -1 and (neighbor == 1 or neighbor == 4)) or \
(state == 1 and (neighbor == 2 or neighbor == 3)) \
or (state == 2 and (neighbor == -1 or neighbor == 4)) \
or (state == 3 and (neighbor == -1 or neighbor == 2)) \
or (state == 4 and (neighbor == 1 or neighbor == 3)):
total += 1
num_dominating.append(neighbor)
return total, num_dominating
def click(self, pos, choice):
"""
Clicking on cell will change it's state to choice of color currently pressed (1-5)
Default is white
"""
x, y = int(pos[0] / self.scale), int(pos[1] / self.scale)
self.curr_array[x][y] = choice
def random_field(self):
"""
Generates random field of cells
"""
for x in range(self.rows):
for y in range(self.columns):
if self.numColors == 3:
self.curr_array[x][y] = random.choices([-1, 1, 2])[0]
elif self.numColors == 5:
self.curr_array[x][y] = random.choices([-1, 1, 2, 3, 4])[0]
def reset(self):
"""
Clears entire field to all white cells
"""
for x in range(self.rows):
for y in range(self.columns):
self.curr_array[x][y] = 0
def most_freq(neighbors):
"""
Returns the dominant color with the greater frequency
Example: num_dominating = [paper, paper, paper, spock, spock, spock, spock, spock]
Returns: spock
"""
return max(set(neighbors), key=neighbors.count)
class Langton:
###########################
# Class for Langton's Ant #
###########################
def __init__(self, width, height, scale, border, colors, rules):
self.scale = scale
self.rows = int(width / scale)
self.columns = int(height / scale)
self.size = (self.rows, self.columns)
self.curr_array = np.ndarray(shape=self.size) # Field as 2d array
self.border = border # Lines between cells
self.direction = "N" # N, E, S, W
self.ant = (-1, -1) # Ant current position on field
# Colors [(x,y,z), ..., (xn,yn,zn)], Rules = "RL..."
self.rules = rules
self.colors = colors
def transition(self, surface):
"""
Updates cells colors on field between each transition
"""
ant_xpos, ant_ypos = self.ant[0], self.ant[1]
ant_xcoord, ant_ycoord = ant_xpos * self.scale, ant_ypos * self.scale
if self.curr_array[ant_xpos][ant_ypos] == -1: # curr cell is white (default)
self.curr_array[ant_xpos][ant_ypos] = 0 # change cell to first color
self.rotate(self.rules[0]) # rotate direction based on first rule
# Update color of cell ant is currently on
# Move ant forward in current direction
# Draw ant on that forward cell
pygame.draw.rect(surface, self.colors[0],
[ant_xcoord, ant_ycoord, self.scale - self.border, self.scale - self.border])
self.move()
pygame.draw.rect(surface, (255, 0, 0),
[self.ant[0] * self.scale, self.ant[1] * self.scale,
self.scale - self.border, self.scale - self.border])
else:
# updates value of cell to n+1 color
# if end of color list, return to index 0
update_idx = (int(self.curr_array[ant_xpos][ant_ypos]) + 1) % len(self.colors)
self.curr_array[ant_xpos][ant_ypos] = update_idx
# rotate direction based on rule of next color
self.rotate(self.rules[update_idx])
# Update color of cell ant is currently on
# Move ant forward in current direction
# Draw ant on that forward cell
pygame.draw.rect(surface, self.colors[update_idx],
[ant_xcoord, ant_ycoord, self.scale - self.border, self.scale - self.border])
self.move()
pygame.draw.rect(surface, (255, 0, 0),
[self.ant[0] * self.scale, self.ant[1] * self.scale,
self.scale - self.border, self.scale - self.border])
def move(self):
"""
Changes ant's current location to cell 1 away in it's current direction
"""
x, y = self.ant[0], self.ant[1]
# Stitches field for when ant is at edge
addx, addy = (x + 1) % self.rows, (y+1) % self.columns
minusx, minusy = (x - 1) % self.rows, (y - 1) % self.columns
if self.direction == "N":
self.ant = (x, minusy)
elif self.direction == "E":
self.ant = (addx, y)
elif self.direction == "S":
self.ant = (x, addy)
elif self.direction == "W":
self.ant = (minusx, y)
def rotate(self, rule):
"""
Changes current direction based on rule
"""
if rule == "R":
if self.direction == "N":
self.direction = "E"
elif self.direction == "E":
self.direction = "S"
elif self.direction == "S":
self.direction = "W"
elif self.direction == "W":
self.direction = "N"
elif rule == "L":
if self.direction == "N":
self.direction = "W"
elif self.direction == "E":
self.direction = "N"
elif self.direction == "S":
self.direction = "E"
elif self.direction == "W":
self.direction = "S"
def click(self, pos, direction, surface):
"""
Clicking on cell spawns ant in specified direction
"""
x, y = int(pos[0] / self.scale), int(pos[1] / self.scale)
prev_x, prev_y = self.ant[0] * self.scale, self.ant[1] * self.scale
new_x, new_y = x * self.scale, y * self.scale
if self.ant != (-1, -1): # There is an ant on the field currently
# make sure previous cell where ant was can still update later
# by making that cell empty (white)
self.curr_array[self.ant[0], self.ant[1]] = -1
# On clicking, deletes previous ant
pygame.draw.rect(surface, (255, 255, 255),
[prev_x, prev_y, self.scale - self.border, self.scale - self.border])
# Creates new ant in specified direction
self.ant = (x, y)
pygame.draw.rect(surface, (255, 0, 0),
[new_x, new_y, self.scale - self.border, self.scale - self.border])
self.direction = direction
def reset(self, surface):
"""
Clears entire field to all dead cells
"""
for x in range(self.rows):
for y in range(self.columns):
x_pos, y_pos = x * self.scale, y * self.scale
self.curr_array[x][y] = -1 # initial field array to all empty (-1)
pygame.draw.rect(surface, (255, 255, 255),
[x_pos, y_pos, self.scale - self.border, self.scale - self.border])
class Turmite:
######################
# Class for Turmites #
######################
def __init__(self, width, height, scale, border, colors, rules):
self.scale = scale
self.rows = int(width / scale)
self.columns = int(height / scale)
self.size = (self.rows, self.columns)
self.curr_array = np.ndarray(shape=self.size) # Field as 2d array
self.border = border # Lines between cells
self.direction = "N" # N, E, S, W
self.ant = (-1, -1) # Ant current position on field
# Colors [(x,y,z), ..., (xn,yn,zn)], Rules = "RL..."
self.rules = rules
self.colors = colors
def transition(self, surface):
"""
Updates cells colors on field between each transition
"""
ant_xpos, ant_ypos = self.ant[0], self.ant[1]
ant_xcoord, ant_ycoord = ant_xpos * self.scale, ant_ypos * self.scale
if self.curr_array[ant_xpos][ant_ypos] == -1: # curr cell is white (default)
self.curr_array[ant_xpos][ant_ypos] = 0 # change cell to first color
self.rotate(self.rules[0]) # rotate direction based on first rule
# Update color of cell ant is currently on
# Move ant forward in current direction
# Draw ant on that forward cell
pygame.draw.rect(surface, self.colors[0],
[ant_xcoord, ant_ycoord, self.scale - self.border, self.scale - self.border])
self.move()
pygame.draw.rect(surface, (255, 0, 0),
[self.ant[0] * self.scale, self.ant[1] * self.scale,
self.scale - self.border, self.scale - self.border])
else:
# updates value of cell to n+1 color
# if end of color list, return to index 0
update_idx = (int(self.curr_array[ant_xpos][ant_ypos]) + 1) % len(self.colors)
self.curr_array[ant_xpos][ant_ypos] = update_idx
# rotate direction based on rule of next color
self.rotate(self.rules[update_idx])
# Update color of cell ant is currently on
# Move ant forward in current direction
# Draw ant on that forward cell
pygame.draw.rect(surface, self.colors[update_idx],
[ant_xcoord, ant_ycoord, self.scale - self.border, self.scale - self.border])
self.move()
pygame.draw.rect(surface, (255, 0, 0),
[self.ant[0] * self.scale, self.ant[1] * self.scale,
self.scale - self.border, self.scale - self.border])
def move(self):
"""
Changes ant's current location to cell 1 away in it's current direction
"""
x, y = self.ant[0], self.ant[1]
# Stitches field for when ant is at edge
addx, addy = (x + 1) % self.rows, (y+1) % self.columns
minusx, minusy = (x - 1) % self.rows, (y - 1) % self.columns
if self.direction == "N":
self.ant = (x, minusy)
elif self.direction == "E":
self.ant = (addx, y)
elif self.direction == "S":
self.ant = (x, addy)
elif self.direction == "W":
self.ant = (minusx, y)
def rotate(self, rule):
"""
Changes current direction based on rule
"""
if rule == "R":
if self.direction == "N":
self.direction = "E"
elif self.direction == "E":
self.direction = "S"
elif self.direction == "S":
self.direction = "W"
elif self.direction == "W":
self.direction = "N"
elif rule == "L":
if self.direction == "N":
self.direction = "W"
elif self.direction == "E":
self.direction = "N"
elif self.direction == "S":
self.direction = "E"
elif self.direction == "W":
self.direction = "S"
def click(self, pos, direction, surface):
"""
Clicking on cell spawns ant in specified direction
"""
x, y = int(pos[0] / self.scale), int(pos[1] / self.scale)
prev_x, prev_y = self.ant[0] * self.scale, self.ant[1] * self.scale
new_x, new_y = x * self.scale, y * self.scale
if self.ant != (-1, -1): # There is an ant on the field currently
# make sure previous cell where ant was can still update later
# by making that cell empty (white)
self.curr_array[self.ant[0], self.ant[1]] = -1
# On clicking, deletes previous ant
pygame.draw.rect(surface, (255, 255, 255),
[prev_x, prev_y, self.scale - self.border, self.scale - self.border])
# Creates new ant in specified direction
self.ant = (x, y)
pygame.draw.rect(surface, (255, 0, 0),
[new_x, new_y, self.scale - self.border, self.scale - self.border])
self.direction = direction
def reset(self, surface):
"""
Clears entire field to all dead cells
"""
for x in range(self.rows):
for y in range(self.columns):
x_pos, y_pos = x * self.scale, y * self.scale
self.curr_array[x][y] = -1 # initial field array to all empty (-1)
pygame.draw.rect(surface, (255, 255, 255),
[x_pos, y_pos, self.scale - self.border, self.scale - self.border])
| 41.59144
| 120
| 0.511975
| 2,643
| 21,378
| 4.076807
| 0.088157
| 0.070162
| 0.06877
| 0.077587
| 0.836009
| 0.811508
| 0.796381
| 0.772065
| 0.740696
| 0.736984
| 0
| 0.018397
| 0.366872
| 21,378
| 514
| 121
| 41.59144
| 0.777687
| 0.214286
| 0
| 0.807818
| 0
| 0
| 0.002898
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.087948
| false
| 0
| 0.006515
| 0
| 0.117264
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0e3e240ac39b8e648c6ed7956774dce3d4bcc62c
| 503
|
py
|
Python
|
pyais/ais/binary_broadcast.py
|
reinderien/pyais
|
c65ad5296966b2c69da7307b8f43e991aa285cb0
|
[
"MIT"
] | null | null | null |
pyais/ais/binary_broadcast.py
|
reinderien/pyais
|
c65ad5296966b2c69da7307b8f43e991aa285cb0
|
[
"MIT"
] | null | null | null |
pyais/ais/binary_broadcast.py
|
reinderien/pyais
|
c65ad5296966b2c69da7307b8f43e991aa285cb0
|
[
"MIT"
] | null | null | null |
from ..bits import Bits
def decode(bits: Bits) -> dict:
return {}
'''
def decode_msg_8(bit_vector):
"""
Binary Broadcast Message
TODO: data needs to be interpreted depending DAC-FID
"""
return {
'type': to_int(bit_vector[0:6], 2),
'repeat': to_int(bit_vector[6:8], 2),
'mmsi': to_int(bit_vector[8:38], 2),
'dac': to_int(bit_vector[40:50], 2),
'fid': to_int(bit_vector[50:56], 2),
'data': to_int(bit_vector[56::], 2),
}
'''
| 21.869565
| 56
| 0.564612
| 76
| 503
| 3.539474
| 0.447368
| 0.234201
| 0.178439
| 0.312268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064171
| 0.256461
| 503
| 22
| 57
| 22.863636
| 0.65508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
0e44ae8236f3e4fbeed7b1bb625d36523b32f807
| 232,192
|
py
|
Python
|
lib/quantile_airs.py
|
L2UQ/airs_uq
|
fa12000432ba20eae6fe2222af639532528ca3b1
|
[
"Apache-2.0"
] | null | null | null |
lib/quantile_airs.py
|
L2UQ/airs_uq
|
fa12000432ba20eae6fe2222af639532528ca3b1
|
[
"Apache-2.0"
] | null | null | null |
lib/quantile_airs.py
|
L2UQ/airs_uq
|
fa12000432ba20eae6fe2222af639532528ca3b1
|
[
"Apache-2.0"
] | null | null | null |
# Quantile utilities for processing MERRA/AIRS data
import numpy
import numpy.ma as ma
import calculate_VPD
import netCDF4
from netCDF4 import Dataset
from numpy import random, linalg
import datetime
import pandas
import os, sys
from scipy import stats
import h5py
def quantile_cloud_locmask(airsdr, mtdr, indr, dtdr, yrlst, mnst, mnfn, hrchc, rgchc, msk):
# Construct cloud variable quantiles and z-scores, with a possibly irregular location mask
# Read probs and pressure levels
rnm = '%s/AIRS_Levels_Quantiles.nc' % (airsdr)
f = Dataset(rnm,'r')
plev = f['level'][:]
prbs = f['probability'][:]
alts = f['altitude'][:]
f.close()
nyr = len(yrlst)
nprb = prbs.shape[0]
# Mask, lat, lon
fnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_South_Southeast_US_%02dUTC_no_vertical_variation_for_missing.nc' % (mtdr,yrlst[0],hrchc)
f = Dataset(fnm,'r')
mask = f[msk][:,:]
latmet = f['plat'][:]
lonmet = f['plon'][:]
f.close()
mask[mask <= 0] = 0
lnsq = numpy.arange(lonmet.shape[0])
ltsq = numpy.arange(latmet.shape[0])
# Subset a bit
lnsm = numpy.sum(mask,axis=0)
print(lnsq.shape)
print(lnsm.shape)
print(lnsm)
ltsm = numpy.sum(mask,axis=1)
print(ltsq.shape)
print(ltsm.shape)
print(ltsm)
lnmn = numpy.amin(lnsq[lnsm > 0])
lnmx = numpy.amax(lnsq[lnsm > 0]) + 1
ltmn = numpy.amin(ltsq[ltsm > 0])
ltmx = numpy.amax(ltsq[ltsm > 0]) + 1
stridx = 'Lon Range: %d, %d\nLat Range: %d, %d \n' % (lnmn,lnmx,ltmn,ltmx)
print(stridx)
#latflt = latin.flatten()
#lonflt = lonin.flatten()
#mskflt = mask.flatten()
#lcsq = numpy.arange(mskflt.shape[0])
#lcsb = lcsq[mskflt > 0]
nx = lnmx - lnmn
ny = ltmx - ltmn
lnrp = numpy.tile(lonmet[lnmn:lnmx],ny)
ltrp = numpy.repeat(latmet[ltmn:ltmx],nx)
mskblk = mask[ltmn:ltmx,lnmn:lnmx]
mskflt = mskblk.flatten()
tsmp = 0
for k in range(nyr):
dyinit = datetime.date(yrlst[k],6,1)
dyst = datetime.date(yrlst[k],mnst,1)
ttst = dyst.timetuple()
jst = ttst.tm_yday
if mnfn < 12:
dyfn = datetime.date(yrlst[k],mnfn+1,1)
ttfn = dyfn.timetuple()
jfn = ttfn.tm_yday
else:
dyfn = datetime.date(yrlst[k]+1,1,1)
dy31 = datetime.date(yrlst[k],12,31)
tt31 = dy31.timetuple()
jfn = tt31.tm_yday + 1
dystidx = abs((dyst-dyinit).days)
dyfnidx = abs((dyfn-dyinit).days)
jdsq = numpy.arange(jst,jfn)
print(jdsq)
tmhld = numpy.repeat(jdsq,nx*ny)
print(tmhld.shape)
print(numpy.amin(tmhld))
print(numpy.amax(tmhld))
stridx = 'Day Range: %d, %d\n' % (dystidx,dyfnidx)
print(stridx)
fnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_South_Southeast_US_%02dUTC_no_vertical_variation_for_missing_IncludesCloudParams.h5' % (indr,yrlst[k],hrchc)
f = h5py.File(fnm,'r')
ctyp1 = f['/ctype'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
ctyp2 = f['/ctype2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprt1 = f['/cprtop'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprt2 = f['/cprtop2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprb1 = f['/cprbot'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprb2 = f['/cprbot2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cfrc1 = f['/cfrac'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cfrc2 = f['/cfrac2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cfrc12 = f['/cfrac12'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cngwt1 = f['/cngwat'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cngwt2 = f['/cngwat2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cttp1 = f['/cstemp'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cttp2 = f['/cstemp2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
f.close()
mtnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_South_Southeast_US_%02dUTC_no_vertical_variation_for_missing.nc' % (mtdr,yrlst[k],hrchc)
f = Dataset(mtnm,'r')
psfc = f.variables['spres'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
f.close()
nt = ctyp1.shape[0]
mskall = numpy.tile(mskflt,nt)
msksq = numpy.arange(mskall.shape[0])
msksb = msksq[mskall > 0]
mskstr = 'Total Obs: %d, Within Mask: %d \n' % (msksq.shape[0],msksb.shape[0])
print(mskstr)
lthld = numpy.tile(ltrp,nt)
lnhld = numpy.tile(lnrp,nt)
ctyp1 = ctyp1.flatten()
ctyp2 = ctyp2.flatten()
cfrc1 = cfrc1.flatten()
cfrc2 = cfrc2.flatten()
cfrc12 = cfrc12.flatten()
cngwt1 = cngwt1.flatten()
cngwt2 = cngwt2.flatten()
cttp1 = cttp1.flatten()
cttp2 = cttp2.flatten()
psfc = psfc.flatten()
# Number of slabs
nslbtmp = numpy.zeros((ctyp1.shape[0],),dtype=numpy.int16)
nslbtmp[(ctyp1 > 100) & (ctyp2 > 100)] = 2
nslbtmp[(ctyp1 > 100) & (ctyp2 < 100)] = 1
if tsmp == 0:
nslabout = numpy.zeros((msksb.shape[0],),dtype=numpy.int16)
nslabout[:] = nslbtmp[msksb]
else:
nslabout = numpy.append(nslabout,nslbtmp[msksb])
flsq = numpy.arange(ctyp1.shape[0])
# For two slabs, slab 1 must have highest cloud bottom pressure
cprt1 = cprt1.flatten()
cprt2 = cprt2.flatten()
cprb1 = cprb1.flatten()
cprb2 = cprb2.flatten()
slabswap = numpy.zeros((ctyp1.shape[0],),dtype=numpy.int16)
swpsq = flsq[(nslbtmp == 2) & (cprb1 < cprb2)]
slabswap[swpsq] = 1
print(numpy.mean(slabswap))
# Cloud Pressure variables
pbttmp1 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
pbttmp1[nslbtmp >= 1] = cprb1[nslbtmp >= 1]
pbttmp1[swpsq] = cprb2[swpsq]
ptptmp1 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
ptptmp1[nslbtmp >= 1] = cprt1[nslbtmp >= 1]
ptptmp1[swpsq] = cprt2[swpsq]
pbttmp2 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
pbttmp2[nslbtmp == 2] = cprb2[nslbtmp == 2]
pbttmp2[swpsq] = cprb1[swpsq]
ptptmp2 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
ptptmp2[nslbtmp == 2] = cprt2[nslbtmp == 2]
ptptmp2[swpsq] = cprt1[swpsq]
# DP Cloud transformation
dptmp1 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
dptmp1[nslbtmp >= 1] = pbttmp1[nslbtmp >= 1] - ptptmp1[nslbtmp >= 1]
dpslbtmp = numpy.zeros((ctyp1.shape[0],)) - 9999.0
dpslbtmp[nslbtmp == 2] = ptptmp1[nslbtmp == 2] - pbttmp2[nslbtmp == 2]
dptmp2 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
dptmp2[nslbtmp == 2] = pbttmp2[nslbtmp == 2] - ptptmp2[nslbtmp == 2]
# Adjust negative DPSlab values
dpnsq = flsq[(nslbtmp == 2) & (dpslbtmp < 0.0) & (dpslbtmp > -1000.0)]
dpadj = numpy.zeros((ctyp1.shape[0],))
dpadj[dpnsq] = numpy.absolute(dpslbtmp[dpnsq])
dpslbtmp[dpnsq] = 1.0
dptmp1[dpnsq] = dptmp1[dpnsq] / 2.0
dptmp2[dpnsq] = dptmp2[dpnsq] / 2.0
# Sigma / Logit Adjustments
zpbtmp = numpy.zeros((psfc.shape[0],)) - 9999.0
zdp1tmp = numpy.zeros((psfc.shape[0],)) - 9999.0
zdslbtmp = numpy.zeros((psfc.shape[0],)) - 9999.0
zdp2tmp = numpy.zeros((psfc.shape[0],)) - 9999.0
ncldct = 0
for t in range(psfc.shape[0]):
if ( (pbttmp1[t] >= 0.0) and (dpslbtmp[t] >= 0.0) ):
prptmp = numpy.array( [ (psfc[t] - pbttmp1[t]) / psfc[t], \
dptmp1[t] / psfc[t], dpslbtmp[t] / psfc[t], \
dptmp2[t] / psfc[t], 0.0 ] )
if (prptmp[0] < 0.0):
# Adjustment needed
prpadj = prptmp[0]
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
prptmp[2] = prptmp[2] + prpadj*prptmp[2]
prptmp[3] = prptmp[3] + prpadj*prptmp[3]
ncldct = ncldct + 1
elif (prptmp[0] == 0.0):
# Adjustment needed
prpadj = -0.01
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
prptmp[2] = prptmp[2] + prpadj*prptmp[2]
prptmp[3] = prptmp[3] + prpadj*prptmp[3]
ncldct = ncldct + 1
prptmp[4] = 1.0 - prptmp[0] - prptmp[1] - prptmp[2] - prptmp[3]
ztmp = calculate_VPD.lgtzs(prptmp)
zpbtmp[t] = ztmp[0]
zdp1tmp[t] = ztmp[1]
zdslbtmp[t] = ztmp[2]
zdp2tmp[t] = ztmp[3]
elif ( pbttmp1[t] >= 0.0 ):
prptmp = numpy.array( [ (psfc[t] - pbttmp1[t]) / psfc[t], \
dptmp1[t] / psfc[t], 0.0 ] )
if (prptmp[0] < 0.0):
# Adjustment needed
prpadj = prptmp[0]
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
ncldct = ncldct + 1
elif (prptmp[0] == 0.0):
# Adjustment needed
prpadj = -0.01
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
ncldct = ncldct + 1
prptmp[2] = 1.0 - prptmp[0] - prptmp[1]
ztmp = calculate_VPD.lgtzs(prptmp)
zpbtmp[t] = ztmp[0]
zdp1tmp[t] = ztmp[1]
zdslbtmp[t] = -9999.0
zdp2tmp[t] = -9999.0
else:
zpbtmp[t] = -9999.0
zdp1tmp[t] = -9999.0
zdslbtmp[t] = -9999.0
zdp2tmp[t] = -9999.0
str1 = 'Cloud Bot Pres Below Sfc: %d ' % (ncldct)
print(str1)
if tsmp == 0:
psfcout = numpy.zeros((msksb.shape[0],)) - 9999.0
psfcout[:] = psfc[msksb]
prsbot1out = numpy.zeros((msksb.shape[0],)) - 9999.0
prsbot1out[:] = zpbtmp[msksb]
dpcld1out = numpy.zeros((msksb.shape[0],)) - 9999.0
dpcld1out[:] = zdp1tmp[msksb]
dpslbout = numpy.zeros((msksb.shape[0],)) - 9999.0
dpslbout[:] = zdslbtmp[msksb]
dpcld2out = numpy.zeros((msksb.shape[0],)) - 9999.0
dpcld2out[:] = zdp2tmp[msksb]
else:
psfcout = numpy.append(psfcout,psfc[msksb])
prsbot1out = numpy.append(prsbot1out,zpbtmp[msksb])
dpcld1out = numpy.append(dpcld1out,zdp1tmp[msksb])
dpslbout = numpy.append(dpslbout,zdslbtmp[msksb])
dpcld2out = numpy.append(dpcld2out,zdp2tmp[msksb])
# Slab Types: 101.0 = Liquid, 201.0 = Ice, None else
# Output: 0 = Liquid, 1 = Ice
typtmp1 = numpy.zeros((ctyp1.shape[0],),dtype=numpy.int16) - 99
typtmp1[nslbtmp >= 1] = (ctyp1[nslbtmp >= 1] - 1.0) / 100.0 - 1.0
typtmp1[swpsq] = (ctyp2[swpsq] - 1.0) / 100.0 - 1.0
typtmp2 = numpy.zeros((ctyp1.shape[0],),dtype=numpy.int16) - 99
typtmp2[nslbtmp == 2] = (ctyp2[nslbtmp == 2] - 1.0) / 100.0 - 1.0
typtmp2[swpsq] = (ctyp1[swpsq] - 1.0) / 100.0 - 1.0
if tsmp == 0:
slbtyp1out = numpy.zeros((msksb.shape[0],),dtype=numpy.int16)
slbtyp1out[:] = typtmp1[msksb]
slbtyp2out = numpy.zeros((msksb.shape[0],),dtype=numpy.int16)
slbtyp2out[:] = typtmp2[msksb]
else:
slbtyp1out = numpy.append(slbtyp1out,typtmp1[msksb])
slbtyp2out = numpy.append(slbtyp2out,typtmp2[msksb])
# Cloud Fraction Logit, still account for swapping
z1tmp = numpy.zeros((cfrc1.shape[0],)) - 9999.0
z2tmp = numpy.zeros((cfrc1.shape[0],)) - 9999.0
z12tmp = numpy.zeros((cfrc1.shape[0],)) - 9999.0
for t in range(z1tmp.shape[0]):
if ( (cfrc1[t] > 0.0) and (cfrc2[t] > 0.0) and (cfrc12[t] > 0.0) ):
# Must adjust amounts
if (slabswap[t] == 0):
prptmp = numpy.array( [cfrc1[t]-cfrc12[t], cfrc2[t]-cfrc12[t], cfrc12[t], 0.0] )
else:
prptmp = numpy.array( [cfrc2[t]-cfrc12[t], cfrc1[t]-cfrc12[t], cfrc12[t], 0.0] )
prptmp[3] = 1.0 - prptmp[0] - prptmp[1] - prptmp[2]
ztmp = calculate_VPD.lgtzs(prptmp)
z1tmp[t] = ztmp[0]
z2tmp[t] = ztmp[1]
z12tmp[t] = ztmp[2]
elif ( (cfrc1[t] > 0.0) and (cfrc2[t] > 0.0) ):
if (slabswap[t] == 0):
prptmp = numpy.array( [cfrc1[t], cfrc2[t], 0.0] )
else:
prptmp = numpy.array( [cfrc2[t], cfrc1[t], 0.0] )
prptmp[2] = 1.0 - prptmp[0] - prptmp[1]
ztmp = calculate_VPD.lgtzs(prptmp)
z1tmp[t] = ztmp[0]
z2tmp[t] = ztmp[1]
z12tmp[t] = -9999.0
elif ( cfrc1[t] > 0.0 ):
prptmp = numpy.array( [cfrc1[t], 1.0 - cfrc1[t] ] )
ztmp = calculate_VPD.lgtzs(prptmp)
z1tmp[t] = ztmp[0]
z2tmp[t] = -9999.0
z12tmp[t] = -9999.0
else:
z1tmp[t] = -9999.0
z2tmp[t] = -9999.0
z12tmp[t] = -9999.0
if tsmp == 0:
cfclgt1out = numpy.zeros((msksb.shape[0],)) - 9999.0
cfclgt1out[:] = z1tmp[msksb]
cfclgt2out = numpy.zeros((msksb.shape[0],)) - 9999.0
cfclgt2out[:] = z2tmp[msksb]
cfclgt12out = numpy.zeros((msksb.shape[0],)) - 9999.0
cfclgt12out[:] = z12tmp[msksb]
else:
cfclgt1out = numpy.append(cfclgt1out,z1tmp[msksb])
cfclgt2out = numpy.append(cfclgt2out,z2tmp[msksb])
cfclgt12out = numpy.append(cfclgt12out,z12tmp[msksb])
# Cloud Non-Gas Water
ngwttmp1 = numpy.zeros(cngwt1.shape[0]) - 9999.0
ngwttmp1[nslbtmp >= 1] = cngwt1[nslbtmp >= 1]
ngwttmp1[swpsq] = cngwt2[swpsq]
ngwttmp2 = numpy.zeros(cngwt1.shape[0]) - 9999.0
ngwttmp2[nslbtmp == 2] = cngwt2[nslbtmp == 2]
ngwttmp2[swpsq] = cngwt1[swpsq]
if tsmp == 0:
ngwt1out = numpy.zeros((msksb.shape[0],)) - 9999.0
ngwt1out[:] = ngwttmp1[msksb]
ngwt2out = numpy.zeros((msksb.shape[0],)) - 9999.0
ngwt2out[:] = ngwttmp2[msksb]
else:
ngwt1out = numpy.append(ngwt1out,ngwttmp1[msksb])
ngwt2out = numpy.append(ngwt2out,ngwttmp2[msksb])
# Cloud Top Temperature
cttptmp1 = numpy.zeros(cttp1.shape[0]) - 9999.0
cttptmp1[nslbtmp >= 1] = cttp1[nslbtmp >= 1]
cttptmp1[swpsq] = cttp2[swpsq]
cttptmp2 = numpy.zeros(cttp1.shape[0]) - 9999.0
cttptmp2[nslbtmp == 2] = cttp2[nslbtmp == 2]
cttptmp2[swpsq] = cttp1[swpsq]
if tsmp == 0:
cttp1out = numpy.zeros((msksb.shape[0],)) - 9999.0
cttp1out[:] = cttptmp1[msksb]
cttp2out = numpy.zeros((msksb.shape[0],)) - 9999.0
cttp2out[:] = cttptmp2[msksb]
else:
cttp1out = numpy.append(cttp1out,cttptmp1[msksb])
cttp2out = numpy.append(cttp2out,cttptmp2[msksb])
# Loc/Time
if tsmp == 0:
latout = numpy.zeros((msksb.shape[0],)) - 9999.0
latout[:] = lthld[msksb]
lonout = numpy.zeros((msksb.shape[0],)) - 9999.0
lonout[:] = lnhld[msksb]
yrout = numpy.zeros((msksb.shape[0],),dtype=numpy.int16)
yrout[:] = yrlst[k]
jdyout = numpy.zeros((msksb.shape[0],),dtype=numpy.int16)
jdyout[:] = tmhld[msksb]
else:
latout = numpy.append(latout,lthld[msksb])
lonout = numpy.append(lonout,lnhld[msksb])
yrtmp = numpy.zeros((msksb.shape[0],),dtype=numpy.int16)
yrtmp[:] = yrlst[k]
yrout = numpy.append(yrout,yrtmp)
jdyout = numpy.append(jdyout,tmhld[msksb])
tsmp = tsmp + msksb.shape[0]
# Process quantiles
nslbqs = calculate_VPD.quantile_msgdat_discrete(nslabout,prbs)
str1 = '%.2f Number Slab Quantile: %d' % (prbs[53],nslbqs[53])
print(str1)
print(nslbqs)
psfcqs = calculate_VPD.quantile_msgdat(psfcout,prbs)
str1 = '%.2f Surface Pressure Quantile: %.3f' % (prbs[53],psfcqs[53])
print(str1)
prsbt1qs = calculate_VPD.quantile_msgdat(prsbot1out,prbs)
str1 = '%.2f CldBot1 Pressure Quantile: %.3f' % (prbs[53],prsbt1qs[53])
print(str1)
dpcld1qs = calculate_VPD.quantile_msgdat(dpcld1out,prbs)
str1 = '%.2f DPCloud1 Quantile: %.3f' % (prbs[53],dpcld1qs[53])
print(str1)
dpslbqs = calculate_VPD.quantile_msgdat(dpslbout,prbs)
str1 = '%.2f DPSlab Quantile: %.3f' % (prbs[53],dpslbqs[53])
print(str1)
dpcld2qs = calculate_VPD.quantile_msgdat(dpcld2out,prbs)
str1 = '%.2f DPCloud2 Quantile: %.3f' % (prbs[53],dpcld2qs[53])
print(str1)
slb1qs = calculate_VPD.quantile_msgdat_discrete(slbtyp1out,prbs)
str1 = '%.2f Type1 Quantile: %d' % (prbs[53],slb1qs[53])
print(str1)
slb2qs = calculate_VPD.quantile_msgdat_discrete(slbtyp2out,prbs)
str1 = '%.2f Type2 Quantile: %d' % (prbs[53],slb2qs[53])
print(str1)
lgt1qs = calculate_VPD.quantile_msgdat(cfclgt1out,prbs)
str1 = '%.2f Logit 1 Quantile: %.3f' % (prbs[53],lgt1qs[53])
print(str1)
lgt2qs = calculate_VPD.quantile_msgdat(cfclgt2out,prbs)
str1 = '%.2f Logit 2 Quantile: %.3f' % (prbs[53],lgt2qs[53])
print(str1)
lgt12qs = calculate_VPD.quantile_msgdat(cfclgt12out,prbs)
str1 = '%.2f Logit 1/2 Quantile: %.3f' % (prbs[53],lgt12qs[53])
print(str1)
ngwt1qs = calculate_VPD.quantile_msgdat(ngwt1out,prbs)
str1 = '%.2f NGWater1 Quantile: %.3f' % (prbs[53],ngwt1qs[53])
print(str1)
ngwt2qs = calculate_VPD.quantile_msgdat(ngwt2out,prbs)
str1 = '%.2f NGWater2 Quantile: %.3f' % (prbs[53],ngwt2qs[53])
print(str1)
cttp1qs = calculate_VPD.quantile_msgdat(cttp1out,prbs)
str1 = '%.2f CTTemp1 Quantile: %.3f' % (prbs[53],cttp1qs[53])
print(str1)
cttp2qs = calculate_VPD.quantile_msgdat(cttp2out,prbs)
str1 = '%.2f CTTemp2 Quantile: %.3f' % (prbs[53],cttp2qs[53])
print(str1)
# Should be no missing for number of slabs
print('Slab summary')
print(numpy.amin(nslabout))
print(numpy.amax(nslabout))
print(tsmp)
# Output Quantiles
mstr = dyst.strftime('%b')
qfnm = '%s/%s_US_JJA_%02dUTC_%04d_Cloud_Quantile.nc' % (dtdr,rgchc,hrchc,yrlst[k])
qout = Dataset(qfnm,'w')
dimp = qout.createDimension('probability',nprb)
varprb = qout.createVariable('probability','f4',['probability'], fill_value = -9999)
varprb[:] = prbs
varprb.long_name = 'Probability break points'
varprb.units = 'none'
varprb.missing_value = -9999
varnslb = qout.createVariable('NumberSlab_quantile','i2',['probability'], fill_value = -99)
varnslb[:] = nslbqs
varnslb.long_name = 'Number of cloud slabs quantiles'
varnslb.units = 'Count'
varnslb.missing_value = -99
varcbprs = qout.createVariable('CloudBot1Logit_quantile','f4',['probability'], fill_value = -9999)
varcbprs[:] = prsbt1qs
varcbprs.long_name = 'Slab 1 cloud bottom pressure logit quantiles'
varcbprs.units = 'hPa'
varcbprs.missing_value = -9999
vardpc1 = qout.createVariable('DPCloud1Logit_quantile','f4',['probability'], fill_value = -9999)
vardpc1[:] = dpcld1qs
vardpc1.long_name = 'Slab 1 cloud pressure depth logit quantiles'
vardpc1.units = 'hPa'
vardpc1.missing_value = -9999
vardpslb = qout.createVariable('DPSlabLogit_quantile','f4',['probability'], fill_value = -9999)
vardpslb[:] = dpslbqs
vardpslb.long_name = 'Two-slab vertical separation logit quantiles'
vardpslb.units = 'hPa'
vardpslb.missing_value = -9999
vardpc2 = qout.createVariable('DPCloud2Logit_quantile','f4',['probability'], fill_value = -9999)
vardpc2[:] = dpcld2qs
vardpc2.long_name = 'Slab 2 cloud pressure depth logit quantiles'
vardpc2.units = 'hPa'
vardpc2.missing_value = -9999
vartyp1 = qout.createVariable('CType1_quantile','i2',['probability'], fill_value = -99)
vartyp1[:] = slb1qs
vartyp1.long_name = 'Slab 1 cloud type quantiles'
vartyp1.units = 'None'
vartyp1.missing_value = -99
vartyp1.comment = 'Cloud slab type: 0=Liquid, 1=Ice'
vartyp2 = qout.createVariable('CType2_quantile','i2',['probability'], fill_value = -99)
vartyp2[:] = slb2qs
vartyp2.long_name = 'Slab 2 cloud type quantiles'
vartyp2.units = 'None'
vartyp2.missing_value = -99
vartyp2.comment = 'Cloud slab type: 0=Liquid, 1=Ice'
varlgt1 = qout.createVariable('CFrcLogit1_quantile','f4',['probability'], fill_value = -9999)
varlgt1[:] = lgt1qs
varlgt1.long_name = 'Slab 1 cloud fraction (cfrac1x) logit quantiles'
varlgt1.units = 'None'
varlgt1.missing_value = -9999
varlgt2 = qout.createVariable('CFrcLogit2_quantile','f4',['probability'], fill_value = -9999)
varlgt2[:] = lgt2qs
varlgt2.long_name = 'Slab 2 cloud fraction (cfrac2x) logit quantiles'
varlgt2.units = 'None'
varlgt2.missing_value = -9999
varlgt12 = qout.createVariable('CFrcLogit12_quantile','f4',['probability'], fill_value = -9999)
varlgt12[:] = lgt12qs
varlgt12.long_name = 'Slab 1/2 overlap fraction (cfrac12) logit quantiles'
varlgt12.units = 'None'
varlgt12.missing_value = -9999
varngwt1 = qout.createVariable('NGWater1_quantile','f4',['probability'], fill_value = -9999)
varngwt1[:] = ngwt1qs
varngwt1.long_name = 'Slab 1 cloud non-gas water quantiles'
varngwt1.units = 'g m^-2'
varngwt1.missing_value = -9999
varngwt2 = qout.createVariable('NGWater2_quantile','f4',['probability'], fill_value = -9999)
varngwt2[:] = ngwt2qs
varngwt2.long_name = 'Slab 2 cloud non-gas water quantiles'
varngwt2.units = 'g m^-2'
varngwt2.missing_value = -9999
varcttp1 = qout.createVariable('CTTemp1_quantile','f4',['probability'], fill_value = -9999)
varcttp1[:] = cttp1qs
varcttp1.long_name = 'Slab 1 cloud top temperature'
varcttp1.units = 'K'
varcttp1.missing_value = -9999
varcttp2 = qout.createVariable('CTTemp2_quantile','f4',['probability'], fill_value = -9999)
varcttp2[:] = cttp2qs
varcttp2.long_name = 'Slab 2 cloud top temperature'
varcttp2.units = 'K'
varcttp2.missing_value = -9999
qout.close()
# Set up transformations
znslb = calculate_VPD.std_norm_quantile_from_obs(nslabout, nslbqs, prbs, msgval=-99)
zpsfc = calculate_VPD.std_norm_quantile_from_obs(psfcout, psfcqs, prbs, msgval=-9999.)
zprsbt1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(prsbot1out, prsbt1qs, prbs, msgval=-9999.)
zdpcld1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(dpcld1out, dpcld1qs, prbs, msgval=-9999.)
zdpslb = calculate_VPD.std_norm_quantile_from_obs_fill_msg(dpslbout, dpslbqs, prbs, msgval=-9999.)
zdpcld2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(dpcld2out, dpcld2qs, prbs, msgval=-9999.)
zctyp1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(slbtyp1out, slb1qs, prbs, msgval=-99)
zctyp2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(slbtyp2out, slb2qs, prbs, msgval=-99)
zlgt1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cfclgt1out, lgt1qs, prbs, msgval=-9999.)
zlgt2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cfclgt2out, lgt2qs, prbs, msgval=-9999.)
zlgt12 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cfclgt12out, lgt12qs, prbs, msgval=-9999.)
zngwt1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(ngwt1out, ngwt1qs, prbs, msgval=-9999.)
zngwt2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(ngwt2out, ngwt2qs, prbs, msgval=-9999.)
zcttp1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cttp1out, cttp1qs, prbs, msgval=-9999.)
zcttp2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cttp2out, cttp2qs, prbs, msgval=-9999.)
# Output transformed quantile samples
zfnm = '%s/%s_US_JJA_%02dUTC_%04d_Cloud_StdGausTrans.nc' % (dtdr,rgchc,hrchc,yrlst[k])
zout = Dataset(zfnm,'w')
dimsmp = zout.createDimension('sample',tsmp)
varlon = zout.createVariable('Longitude','f4',['sample'])
varlon[:] = lonout
varlon.long_name = 'Longitude'
varlon.units = 'degrees_east'
varlat = zout.createVariable('Latitude','f4',['sample'])
varlat[:] = latout
varlat.long_name = 'Latitude'
varlat.units = 'degrees_north'
varjdy = zout.createVariable('JulianDay','i2',['sample'])
varjdy[:] = jdyout
varjdy.long_name = 'JulianDay'
varjdy.units = 'day'
varyr = zout.createVariable('Year','i2',['sample'])
varyr[:] = yrout
varyr.long_name = 'Year'
varyr.units = 'year'
varnslb = zout.createVariable('NumberSlab_StdGaus','f4',['sample'], fill_value = -9999)
varnslb[:] = znslb
varnslb.long_name = 'Quantile transformed number of cloud slabs'
varnslb.units = 'None'
varnslb.missing_value = -9999.
varcbprs = zout.createVariable('CloudBot1Logit_StdGaus','f4',['sample'], fill_value = -9999)
varcbprs[:] = zprsbt1
varcbprs.long_name = 'Quantile transformed slab 1 cloud bottom pressure logit'
varcbprs.units = 'None'
varcbprs.missing_value = -9999.
vardpc1 = zout.createVariable('DPCloud1Logit_StdGaus','f4',['sample'], fill_value = -9999)
vardpc1[:] = zdpcld1
vardpc1.long_name = 'Quantile transformed slab 1 cloud pressure depth logit'
vardpc1.units = 'None'
vardpc1.missing_value = -9999.
vardpslb = zout.createVariable('DPSlabLogit_StdGaus','f4',['sample'], fill_value = -9999)
vardpslb[:] = zdpslb
vardpslb.long_name = 'Quantile transformed two-slab vertical separation logit'
vardpslb.units = 'None'
vardpslb.missing_value = -9999.
vardpc2 = zout.createVariable('DPCloud2Logit_StdGaus','f4',['sample'], fill_value = -9999)
vardpc2[:] = zdpcld2
vardpc2.long_name = 'Quantile transformed slab 2 cloud pressure depth logit'
vardpc2.units = 'None'
vardpc2.missing_value = -9999.
vartyp1 = zout.createVariable('CType1_StdGaus','f4',['sample'], fill_value = -9999)
vartyp1[:] = zctyp1
vartyp1.long_name = 'Quantile transformed slab 1 cloud type logit'
vartyp1.units = 'None'
vartyp1.missing_value = -9999.
vartyp2 = zout.createVariable('CType2_StdGaus','f4',['sample'], fill_value = -9999)
vartyp2[:] = zctyp2
vartyp2.long_name = 'Quantile transformed slab 2 cloud type'
vartyp2.units = 'None'
vartyp2.missing_value = -9999.
varlgt1 = zout.createVariable('CFrcLogit1_StdGaus','f4',['sample'], fill_value = -9999)
varlgt1[:] = zlgt1
varlgt1.long_name = 'Quantile transformed slab 1 cloud fraction logit'
varlgt1.units = 'None'
varlgt1.missing_value = -9999.
varlgt2 = zout.createVariable('CFrcLogit2_StdGaus','f4',['sample'], fill_value = -9999)
varlgt2[:] = zlgt2
varlgt2.long_name = 'Quantile transformed slab 2 cloud fraction logit'
varlgt2.units = 'None'
varlgt2.missing_value = -9999.
varlgt12 = zout.createVariable('CFrcLogit12_StdGaus','f4',['sample'], fill_value = -9999)
varlgt12[:] = zlgt12
varlgt12.long_name = 'Quantile transformed slab 1/2 overlap fraction logit'
varlgt12.units = 'None'
varlgt12.missing_value = -9999.
varngwt1 = zout.createVariable('NGWater1_StdGaus','f4',['sample'], fill_value = -9999)
varngwt1[:] = zngwt1
varngwt1.long_name = 'Quantile transformed slab 1 non-gas water'
varngwt1.units = 'None'
varngwt1.missing_value = -9999.
varngwt2 = zout.createVariable('NGWater2_StdGaus','f4',['sample'], fill_value = -9999)
varngwt2[:] = zngwt2
varngwt2.long_name = 'Quantile transformed slab 2 non-gas water'
varngwt2.units = 'None'
varngwt2.missing_value = -9999.
varcttp1 = zout.createVariable('CTTemp1_StdGaus','f4',['sample'], fill_value = -9999)
varcttp1[:] = zcttp1
varcttp1.long_name = 'Quantile transformed slab 1 cloud top temperature'
varcttp1.units = 'None'
varcttp1.missing_value = -9999.
varcttp2 = zout.createVariable('CTTemp2_StdGaus','f4',['sample'], fill_value = -9999)
varcttp2[:] = zcttp2
varcttp2.long_name = 'Quantile transformed slab 2 cloud top temperature'
varcttp2.units = 'None'
varcttp2.missing_value = -9999.
zout.close()
return
# Temp/RH Quantiles
def quantile_profile_locmask(airsdr, mtdr, indr, dtdr, yrlst, mnst, mnfn, hrchc, rgchc, msk):
# Construct profile/sfc variable quantiles and z-scores, with a possibly irregular location mask
# Read probs and pressure levels
rnm = '%s/AIRS_Levels_Quantiles.nc' % (airsdr)
f = Dataset(rnm,'r')
plev = f['level'][:]
prbs = f['probability'][:]
alts = f['altitude'][:]
f.close()
nyr = len(yrlst)
nprb = prbs.shape[0]
nzout = 101
tmpqout = numpy.zeros((nzout,nprb)) - 9999.
rhqout = numpy.zeros((nzout,nprb)) - 9999.
sftmpqs = numpy.zeros((nprb,)) - 9999.
sfaltqs = numpy.zeros((nprb,)) - 9999.
psfcqs = numpy.zeros((nprb,)) - 9999.
altmed = numpy.zeros((nzout,)) - 9999.
# Mask, lat, lon
fnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_South_Southeast_US_%02dUTC_no_vertical_variation_for_missing.nc' % (mtdr,yrlst[0],hrchc)
f = Dataset(fnm,'r')
mask = f[msk][:,:]
latmet = f['plat'][:]
lonmet = f['plon'][:]
f.close()
mask[mask <= 0] = 0
lnsq = numpy.arange(lonmet.shape[0])
ltsq = numpy.arange(latmet.shape[0])
# Subset a bit
lnsm = numpy.sum(mask,axis=0)
print(lnsq.shape)
print(lnsm.shape)
print(lnsm)
ltsm = numpy.sum(mask,axis=1)
print(ltsq.shape)
print(ltsm.shape)
print(ltsm)
lnmn = numpy.amin(lnsq[lnsm > 0])
lnmx = numpy.amax(lnsq[lnsm > 0]) + 1
ltmn = numpy.amin(ltsq[ltsm > 0])
ltmx = numpy.amax(ltsq[ltsm > 0]) + 1
stridx = 'Lon Range: %d, %d\nLat Range: %d, %d \n' % (lnmn,lnmx,ltmn,ltmx)
print(stridx)
nx = lnmx - lnmn
ny = ltmx - ltmn
lnrp = numpy.tile(lonmet[lnmn:lnmx],ny)
ltrp = numpy.repeat(latmet[ltmn:ltmx],nx)
mskblk = mask[ltmn:ltmx,lnmn:lnmx]
mskflt = mskblk.flatten()
tsmp = 0
for k in range(nyr):
dyinit = datetime.date(yrlst[k],6,1)
dyst = datetime.date(yrlst[k],mnst,1)
ttst = dyst.timetuple()
jst = ttst.tm_yday
if mnfn < 12:
dyfn = datetime.date(yrlst[k],mnfn+1,1)
ttfn = dyfn.timetuple()
jfn = ttfn.tm_yday
else:
dyfn = datetime.date(yrlst[k]+1,1,1)
dy31 = datetime.date(yrlst[k],12,31)
tt31 = dy31.timetuple()
jfn = tt31.tm_yday + 1
dystidx = abs((dyst-dyinit).days)
dyfnidx = abs((dyfn-dyinit).days)
jdsq = numpy.arange(jst,jfn)
tmhld = numpy.repeat(jdsq,nx*ny)
stridx = 'Day Range: %d, %d\n' % (dystidx,dyfnidx)
print(stridx)
mtnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_South_Southeast_US_%02dUTC_no_vertical_variation_for_missing.nc' % (mtdr,yrlst[k],hrchc)
f = h5py.File(mtnm,'r')
stparr = f['/stemp'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
psfarr = f['/spres'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
salarr = f['/salti'][ltmn:ltmx,lnmn:lnmx]
f.close()
nt = psfarr.shape[0]
msksq1 = numpy.arange(mskflt.shape[0])
msksb1 = msksq1[mskflt > 0]
mskall = numpy.tile(mskflt,nt)
msksq = numpy.arange(mskall.shape[0])
msksb = msksq[mskall > 0]
mskstr = 'Total Obs: %d, Within Mask: %d \n' % (msksq.shape[0],msksb.shape[0])
print(mskstr)
lthld = numpy.tile(ltrp,nt)
lnhld = numpy.tile(lnrp,nt)
stparr = stparr.flatten()
psfarr = psfarr.flatten()
salarr = salarr.flatten()
if tsmp == 0:
sftmpout = numpy.zeros((msksb.shape[0],)) - 9999.0
sftmpout[:] = stparr[msksb]
psfcout = numpy.zeros((msksb.shape[0],)) - 9999.0
psfcout[:] = psfarr[msksb]
sfaltout = numpy.zeros((msksb.shape[0],)) - 9999.0
sfaltout[:] = numpy.tile(salarr[msksb1],nt)
else:
sftmpout = numpy.append(sftmpout,stparr[msksb])
psfcout = numpy.append(psfcout,psfarr[msksb])
sfaltout = numpy.append(sfaltout,numpy.tile(salarr[msksb1],nt))
# Loc/Time
if tsmp == 0:
latout = numpy.zeros((msksb.shape[0],)) - 9999.0
latout[:] = lthld[msksb]
lonout = numpy.zeros((msksb.shape[0],)) - 9999.0
lonout[:] = lnhld[msksb]
yrout = numpy.zeros((msksb.shape[0],),dtype=numpy.int16)
yrout[:] = yrlst[k]
jdyout = numpy.zeros((msksb.shape[0],),dtype=numpy.int16)
jdyout[:] = tmhld[msksb]
else:
latout = numpy.append(latout,lthld[msksb])
lonout = numpy.append(lonout,lnhld[msksb])
yrtmp = numpy.zeros((msksb.shape[0],),dtype=numpy.int16)
yrtmp[:] = yrlst[k]
yrout = numpy.append(yrout,yrtmp)
jdyout = numpy.append(jdyout,tmhld[msksb])
tsmp = tsmp + msksb.shape[0]
# Vertical profiles
tmpmerout = numpy.zeros((tsmp,nzout)) - 9999.
h2omerout = numpy.zeros((tsmp,nzout)) - 9999.
altout = numpy.zeros((tsmp,nzout)) - 9999.
sidx = 0
for k in range(nyr):
dyinit = datetime.date(yrlst[k],6,1)
dyst = datetime.date(yrlst[k],mnst,1)
ttst = dyst.timetuple()
jst = ttst.tm_yday
if mnfn < 12:
dyfn = datetime.date(yrlst[k],mnfn+1,1)
ttfn = dyfn.timetuple()
jfn = ttfn.tm_yday
else:
dyfn = datetime.date(yrlst[k]+1,1,1)
dy31 = datetime.date(yrlst[k],12,31)
tt31 = dy31.timetuple()
jfn = tt31.tm_yday + 1
dystidx = abs((dyst-dyinit).days)
dyfnidx = abs((dyfn-dyinit).days)
jdsq = numpy.arange(jst,jfn)
tmhld = numpy.repeat(jdsq,nx*ny)
stridx = 'Day Range: %d, %d\n' % (dystidx,dyfnidx)
print(stridx)
mtnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_South_Southeast_US_%02dUTC_no_vertical_variation_for_missing.nc' % (mtdr,yrlst[k],hrchc)
f = h5py.File(mtnm,'r')
tmparr = f['/ptemp'][dystidx:dyfnidx,:,ltmn:ltmx,lnmn:lnmx]
h2oarr = f['/rh'][dystidx:dyfnidx,:,ltmn:ltmx,lnmn:lnmx]
altarr = f['/palts'][dystidx:dyfnidx,:,ltmn:ltmx,lnmn:lnmx]
f.close()
nt = tmparr.shape[0]
mskall = numpy.tile(mskflt,nt)
msksq = numpy.arange(mskall.shape[0])
msksb = msksq[mskall > 0]
lthld = numpy.tile(ltrp,nt)
lnhld = numpy.tile(lnrp,nt)
fidx = sidx + msksb.shape[0]
for j in range(nzout):
tmpvec = tmparr[:,j,:,:].flatten()
tmpvec[tmpvec > 1e30] = -9999.
tmpmerout[sidx:fidx,j] = tmpvec[msksb]
altvec = altarr[:,j,:,:].flatten()
altout[sidx:fidx,j] = altvec[msksb]
h2ovec = h2oarr[:,j,:,:].flatten()
h2ovec[h2ovec > 1e30] = -9999.
h2omerout[sidx:fidx,j] = h2ovec[msksb]
sidx = sidx + msksb.shape[0]
# Quantiles
ztmpout = numpy.zeros((tsmp,nzout)) - 9999.
zrhout = numpy.zeros((tsmp,nzout)) - 9999.
zsftmpout = numpy.zeros((tsmp,)) - 9999.
zsfaltout = numpy.zeros((tsmp,)) - 9999.
zpsfcout = numpy.zeros((tsmp,)) - 9999.
for j in range(nzout):
tmptmp = calculate_VPD.quantile_msgdat(tmpmerout[:,j],prbs)
tmpqout[j,:] = tmptmp[:]
str1 = 'Plev %.2f, %.2f Temp Quantile: %.3f' % (plev[j],prbs[103],tmptmp[103])
print(str1)
# Transform
ztmp = calculate_VPD.std_norm_quantile_from_obs(tmpmerout[:,j], tmptmp, prbs, msgval=-9999.)
ztmpout[:,j] = ztmp[:]
alttmp = calculate_VPD.quantile_msgdat(altout[:,j],prbs)
altmed[j] = alttmp[103]
str1 = 'Plev %.2f, %.2f Alt Quantile: %.3f' % (plev[j],prbs[103],alttmp[103])
print(str1)
# Adjust RH over 100
rhadj = h2omerout[:,j]
rhadj[rhadj > 1.0] = 1.0
rhqtmp = calculate_VPD.quantile_msgdat(rhadj,prbs)
rhqout[j,:] = rhqtmp[:]
str1 = 'Plev %.2f, %.2f RH Quantile: %.4f' % (plev[j],prbs[103],rhqtmp[103])
print(str1)
zrh = calculate_VPD.std_norm_quantile_from_obs(rhadj, rhqtmp, prbs, msgval=-9999.)
zrhout[:,j] = zrh[:]
psfcqs = calculate_VPD.quantile_msgdat(psfcout,prbs)
str1 = '%.2f PSfc Quantile: %.2f' % (prbs[103],psfcqs[103])
print(str1)
zpsfcout = calculate_VPD.std_norm_quantile_from_obs(psfcout, psfcqs, prbs, msgval=-9999.)
sftpqs = calculate_VPD.quantile_msgdat(sftmpout,prbs)
str1 = '%.2f SfcTmp Quantile: %.2f' % (prbs[103],sftpqs[103])
print(str1)
zsftmpout = calculate_VPD.std_norm_quantile_from_obs(sftmpout, sftpqs, prbs, msgval=-9999.)
sfalqs = calculate_VPD.quantile_msgdat(sfaltout,prbs)
str1 = '%.2f SfcAlt Quantile: %.2f' % (prbs[103],sfalqs[103])
print(str1)
zsfaltout = calculate_VPD.std_norm_quantile_from_obs(sfaltout, sfalqs, prbs, msgval=-9999.)
# Output Quantiles
mstr = dyst.strftime('%b')
qfnm = '%s/%s_US_JJA_%02dUTC_%04d_TempRHSfc_Quantile.nc' % (dtdr,rgchc,hrchc,yrlst[k])
qout = Dataset(qfnm,'w')
dimz = qout.createDimension('level',nzout)
dimp = qout.createDimension('probability',nprb)
varlvl = qout.createVariable('level','f4',['level'], fill_value = -9999)
varlvl[:] = plev
varlvl.long_name = 'AIRS/SARTA pressure levels'
varlvl.units = 'hPa'
varlvl.missing_value = -9999
varprb = qout.createVariable('probability','f4',['probability'], fill_value = -9999)
varprb[:] = prbs
varprb.long_name = 'Probability break points'
varprb.units = 'none'
varprb.missing_value = -9999
# Altitude grid
varalt = qout.createVariable('Altitude_median', 'f4', ['level'], fill_value = -9999)
varalt[:] = altmed
varalt.long_name = 'Altitude median value'
varalt.units = 'm'
varalt.missing_value = -9999
vartmp = qout.createVariable('Temperature_quantile', 'f4', ['level','probability'], fill_value = -9999)
vartmp[:] = tmpqout
vartmp.long_name = 'Temperature quantiles'
vartmp.units = 'K'
vartmp.missing_value = -9999.
varrh = qout.createVariable('RH_quantile', 'f4', ['level','probability'], fill_value = -9999)
varrh[:] = rhqout
varrh.long_name = 'Relative humidity quantiles'
varrh.units = 'Unitless'
varrh.missing_value = -9999.
varstmp = qout.createVariable('SfcTemp_quantile', 'f4', ['probability'], fill_value = -9999)
varstmp[:] = sftpqs
varstmp.long_name = 'Surface temperature quantiles'
varstmp.units = 'K'
varstmp.missing_value = -9999.
varpsfc = qout.createVariable('SfcPres_quantile', 'f4', ['probability'], fill_value = -9999)
varpsfc[:] = psfcqs
varpsfc.long_name = 'Surface pressure quantiles'
varpsfc.units = 'hPa'
varpsfc.missing_value = -9999.
varsalt = qout.createVariable('SfcAlt_quantile', 'f4', ['probability'], fill_value = -9999)
varsalt[:] = sfalqs
varsalt.long_name = 'Surface altitude quantiles'
varsalt.units = 'm'
varsalt.missing_value = -9999.
qout.close()
# Output transformed quantile samples
zfnm = '%s/%s_US_JJA_%02dUTC_%04d_TempRHSfc_StdGausTrans.nc' % (dtdr,rgchc,hrchc,yrlst[k])
zout = Dataset(zfnm,'w')
dimz = zout.createDimension('level',nzout)
dimsmp = zout.createDimension('sample',tsmp)
varlvl = zout.createVariable('level','f4',['level'], fill_value = -9999)
varlvl[:] = plev
varlvl.long_name = 'AIRS/SARTA pressure levels'
varlvl.units = 'hPa'
varlvl.missing_value = -9999
varlon = zout.createVariable('Longitude','f4',['sample'])
varlon[:] = lonout
varlon.long_name = 'Longitude'
varlon.units = 'degrees_east'
varlat = zout.createVariable('Latitude','f4',['sample'])
varlat[:] = latout
varlat.long_name = 'Latitude'
varlat.units = 'degrees_north'
varjdy = zout.createVariable('JulianDay','i2',['sample'])
varjdy[:] = jdyout
varjdy.long_name = 'JulianDay'
varjdy.units = 'day'
varyr = zout.createVariable('Year','i2',['sample'])
varyr[:] = yrout
varyr.long_name = 'Year'
varyr.units = 'year'
varsrt3 = zout.createVariable('Temperature_StdGaus', 'f4', ['sample','level'], fill_value = -9999)
varsrt3[:] = ztmpout
varsrt3.long_name = 'Quantile transformed temperature'
varsrt3.units = 'None'
varsrt3.missing_value = -9999.
varsrt4 = zout.createVariable('RH_StdGaus', 'f4', ['sample','level'], fill_value = -9999)
varsrt4[:] = zrhout
varsrt4.long_name = 'Quantile transformed relative humidity'
varsrt4.units = 'None'
varsrt4.missing_value = -9999.
varsrts1 = zout.createVariable('SfcTemp_StdGaus', 'f4', ['sample'], fill_value = -9999)
varsrts1[:] = zsftmpout
varsrts1.long_name = 'Quantile transformed surface temperature'
varsrts1.units = 'None'
varsrts1.missing_value = -9999.
varsrts2 = zout.createVariable('SfcPres_StdGaus', 'f4', ['sample'], fill_value = -9999)
varsrts2[:] = zpsfcout
varsrts2.long_name = 'Quantile transformed surface pressure'
varsrts2.units = 'None'
varsrts2.missing_value = -9999.
varsrts3 = zout.createVariable('SfcAlt_StdGaus', 'f4', ['sample'], fill_value = -9999)
varsrts3[:] = zsfaltout
varsrts3.long_name = 'Quantile transformed surface pressure'
varsrts3.units = 'None'
varsrts3.missing_value = -9999.
zout.close()
return
def expt_near_sfc_summary(inpdr, outdr, expfl, qclrfl, outfnm):
# Produce experiment near-surface summaries
# inpdr: Name of input directory
# outdr: Name of output directory
# expfl: Name of file with experiment results
# qclrfl: Input quantile file
# outfnm: Ouptut file name
nzairs = 100
nzsrt = 101
# Read simulation results
f = h5py.File(expfl,'r')
tmprtr = f['airs_ptemp'][:,:]
h2ortr = f['airs_h2o'][:,:]
tqflg = f['airs_ptemp_qc'][:,:]
hqflg = f['airs_h2o_qc'][:,:]
tmpsrt = f['ptemp'][:,1:nzsrt]
h2osrt = f['gas_1'][:,1:nzsrt]
psfc = f['spres'][:]
lvs = f['level'][1:nzsrt]
f.close()
nszout = tmprtr.shape[0]
tqflg = tqflg.astype(numpy.int16)
hqflg = hqflg.astype(numpy.int16)
# Altitude info
qin = Dataset(qclrfl,'r')
alts = qin['Altitude_median'][:]
qin.close()
alth2o = numpy.zeros((nszout,nzsrt))
alth2o[:,nzsrt-4] = alts[nzsrt-4]
curdlt = 0.0
for j in range(nzsrt-5,-1,-1):
#str1 = 'Level %d: %.4f' % (j,curdlt)
#print(str1)
if (alts[j] > alts[j+1]):
curdlt = alts[j] - alts[j+1]
alth2o[:,j] = alts[j]
else:
alth2o[:,j] = alts[j+1] + curdlt * 2.0
curdlt = curdlt * 2.0
alth2o[:,97] = 0.0
tsfcsrt = calculate_VPD.near_sfc_temp(tmpsrt, lvs, psfc, passqual = False, qual = None)
print(tsfcsrt[0:10])
tsfcrtr, tqflgsfc = calculate_VPD.near_sfc_temp(tmprtr, lvs, psfc, passqual = True, qual = tqflg)
print(tsfcrtr[0:10])
print(tqflgsfc[0:10])
qvsrt, rhsrt, vpdsrt = calculate_VPD.calculate_QV_and_VPD(h2osrt,tmpsrt,lvs,alth2o[:,1:nzsrt])
qvrtr, rhrtr, vpdrtr = calculate_VPD.calculate_QV_and_VPD(h2ortr,tmprtr,lvs,alth2o[:,1:nzsrt])
qsfsrt, rhsfsrt = calculate_VPD.near_sfc_qv_rh(qvsrt, tsfcsrt, lvs, psfc, passqual = False, qual = None)
qsfrtr, rhsfrtr, qflgsfc = calculate_VPD.near_sfc_qv_rh(qvrtr, tsfcrtr, lvs, psfc, passqual = True, qual = hqflg)
print(tqflgsfc.dtype)
print(qflgsfc.dtype)
# Output: Sfc Temp and qflg, SfC QV, RH and qflg
fldbl = numpy.array([-9999.],dtype=numpy.float64)
flflt = numpy.array([-9999.],dtype=numpy.float32)
flshrt = numpy.array([-99],dtype=numpy.int16)
#outfnm = '%s/MAGIC_%s_%s_%02dUTC_SR%02d_Sfc_UQ_Output.h5' % (outdr,rgchc,mnchc,hrchc,scnrw)
f = h5py.File(outfnm,'w')
dft1 = f.create_dataset('TSfcAir_True',data=tsfcsrt)
dft1.attrs['missing_value'] = fldbl
dft1.attrs['_FillValue'] = fldbl
dft2 = f.create_dataset('TSfcAir_Retrieved',data=tsfcrtr)
dft2.attrs['missing_value'] = fldbl
dft2.attrs['_FillValue'] = fldbl
dft3 = f.create_dataset('TSfcAir_QC',data=tqflgsfc)
dfq1 = f.create_dataset('QVSfcAir_True',data=qsfsrt)
dfq1.attrs['missing_value'] = fldbl
dfq1.attrs['_FillValue'] = fldbl
dfq2 = f.create_dataset('QVSfcAir_Retrieved',data=qsfrtr)
dfq2.attrs['missing_value'] = fldbl
dfq2.attrs['_FillValue'] = fldbl
dfq3 = f.create_dataset('RHSfcAir_True',data=rhsfsrt)
dfq3.attrs['missing_value'] = fldbl
dfq3.attrs['_FillValue'] = fldbl
dfq4 = f.create_dataset('RHSfcAir_Retrieved',data=rhsfrtr)
dfq4.attrs['missing_value'] = fldbl
dfq4.attrs['_FillValue'] = fldbl
dfq5 = f.create_dataset('RHSfcAir_QC',data=qflgsfc)
dfp1 = f.create_dataset('SfcPres',data=psfc)
dfp1.attrs['missing_value'] = fldbl
dfp1.attrs['_FillValue'] = fldbl
f.close()
return
def quantile_cfrac_locmask_conus(rfdr, mtdr, csdr, airdr, dtdr, yrlst, mnst, mnfn, hrchc, rgchc, mskvr, mskvl):
# Construct cloud variable quantiles and z-scores, with a possibly irregular location mask
# rfdr: Directory for reference data (Levels/Quantiles)
# mtdr: Directory for MERRA data
# csdr: Directory for cloud slab data
# airdr: Directory for AIRS cloud fraction
# dtdr: Output directory
# yrlst: List of years to process
# mnst: Starting Month
# mnfn: Ending Month
# hrchc: Template Hour Choice
# rgchc: Template Region Choice
# mskvr: Name of region mask variable
# mskvl: Value of region mask for Region Choice
# Read probs and pressure levels
rnm = '%s/AIRS_Levels_Quantiles.nc' % (rfdr)
f = Dataset(rnm,'r')
plev = f['level'][:]
prbs = f['probability'][:]
alts = f['altitude'][:]
f.close()
nyr = len(yrlst)
nprb = prbs.shape[0]
# RN generator
sdchc = 542354 + yrlst[0] + hrchc
random.seed(sdchc)
# Mask, lat, lon
fnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_CONUS_with_NCA_regions_%02dUTC_no_vertical_variation_for_missing.nc' % (mtdr,yrlst[0],hrchc)
f = Dataset(fnm,'r')
mask = f.variables[mskvr][:,:]
latmet = f.variables['plat'][:]
lonmet = f.variables['plon'][:]
tminf = f.variables['time'][:]
tmunit = f.variables['time'].units[:]
f.close()
mskind = numpy.zeros((mask.shape),dtype=mask.dtype)
print(mskvl)
mskind[mask == mskvl] = 1
lnsq = numpy.arange(lonmet.shape[0])
ltsq = numpy.arange(latmet.shape[0])
# Subset a bit
lnsm = numpy.sum(mskind,axis=0)
#print(lnsq.shape)
#print(lnsm.shape)
#print(lnsm)
ltsm = numpy.sum(mskind,axis=1)
#print(ltsq.shape)
#print(ltsm.shape)
#print(ltsm)
lnmn = numpy.amin(lnsq[lnsm > 0])
lnmx = numpy.amax(lnsq[lnsm > 0]) + 1
ltmn = numpy.amin(ltsq[ltsm > 0])
ltmx = numpy.amax(ltsq[ltsm > 0]) + 1
stridx = 'Lon Range: %d, %d\nLat Range: %d, %d \n' % (lnmn,lnmx,ltmn,ltmx)
print(stridx)
nx = lnmx - lnmn
ny = ltmx - ltmn
lnrp = numpy.tile(lonmet[lnmn:lnmx],ny)
ltrp = numpy.repeat(latmet[ltmn:ltmx],nx)
mskblk = mskind[ltmn:ltmx,lnmn:lnmx]
mskflt = mskblk.flatten()
tsmp = 0
for k in range(nyr):
fnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_CONUS_with_NCA_regions_%02dUTC_no_vertical_variation_for_missing.nc' % (mtdr,yrlst[k],hrchc)
f = Dataset(fnm,'r')
tminf = f.variables['time'][:]
tmunit = f.variables['time'].units[:]
f.close()
tmunit = tmunit.replace("days since ","")
dybs = datetime.datetime.strptime(tmunit,"%Y-%m-%d %H:%M:%S")
print(dybs)
dy0 = dybs + datetime.timedelta(days=tminf[0])
dyinit = datetime.date(dy0.year,dy0.month,dy0.day)
print(dyinit)
dyst = datetime.date(yrlst[k],mnst,1)
ttst = dyst.timetuple()
jst = ttst.tm_yday
if mnfn < 12:
dyfn = datetime.date(yrlst[k],mnfn+1,1)
ttfn = dyfn.timetuple()
jfn = ttfn.tm_yday
else:
dyfn = datetime.date(yrlst[k]+1,1,1)
dy31 = datetime.date(yrlst[k],12,31)
tt31 = dy31.timetuple()
jfn = tt31.tm_yday + 1
dystidx = abs((dyst-dyinit).days)
dyfnidx = abs((dyfn-dyinit).days)
jdsq = numpy.arange(jst,jfn)
print(jdsq)
tmhld = numpy.repeat(jdsq,nx*ny)
#print(tmhld.shape)
#print(numpy.amin(tmhld))
#print(numpy.amax(tmhld))
stridx = 'Day Range: %d, %d\n' % (dystidx,dyfnidx)
print(stridx)
fnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_CONUS_with_NCA_regions_%02dUTC_no_vertical_variation_for_missing_IncludesCloudParams.h5' % (csdr,yrlst[k],hrchc)
f = h5py.File(fnm,'r')
tms = f['/time'][:,dystidx:dyfnidx]
ctyp1 = f['/ctype'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
ctyp2 = f['/ctype2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprt1 = f['/cprtop'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprt2 = f['/cprtop2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprb1 = f['/cprbot'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprb2 = f['/cprbot2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cfrc1 = f['/cfrac'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cfrc2 = f['/cfrac2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cfrc12 = f['/cfrac12'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cngwt1 = f['/cngwat'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cngwt2 = f['/cngwat2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cttp1 = f['/cstemp'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cttp2 = f['/cstemp2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
f.close()
tmflt = tms.flatten()
nt = tmflt.shape[0]
lnhld = numpy.tile(lnrp,nt)
lthld = numpy.tile(ltrp,nt)
mtnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_CONUS_with_NCA_regions_%02dUTC_no_vertical_variation_for_missing.nc' % (mtdr,yrlst[k],hrchc)
f = Dataset(mtnm,'r')
psfc = f.variables['spres'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
f.close()
nt = ctyp1.shape[0]
mskall = numpy.tile(mskflt,nt)
msksq = numpy.arange(mskall.shape[0])
msksb = msksq[mskall > 0]
mskstr = 'Total Obs: %d, Within Mask: %d \n' % (msksq.shape[0],msksb.shape[0])
print(mskstr)
# lthld = numpy.tile(ltrp,nt)
# lnhld = numpy.tile(lnrp,nt)
nslbtmp = numpy.zeros((ctyp1.shape),dtype=numpy.int16)
nslbtmp[(ctyp1 > 100) & (ctyp2 > 100)] = 2
nslbtmp[(ctyp1 > 100) & (ctyp2 < 100)] = 1
# AIRS clouds
anm = '%s/CONUS_AIRS_CldFrc_Match_JJA_%d_%02d_UTC.nc' % (airdr,yrlst[k],hrchc)
f = Dataset(anm,'r')
arsfrc1 = f.variables['AIRS_CldFrac_1'][:,dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
arsfrc2 = f.variables['AIRS_CldFrac_2'][:,dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
f.close()
# Sum
frctot = arsfrc1 + arsfrc2
# Construct Clr/PC/Ovc indicator for AIRS total cloud frac
totclr = numpy.zeros(frctot.shape,dtype=numpy.int16)
totclr[frctot == 0.0] = -1
totclr[frctot == 1.0] = 1
totclr = ma.masked_array(totclr, mask = frctot.mask)
frc0 = frctot[0,:,:,:]
frc0 = frc0.flatten()
frcsq = numpy.arange(tmhld.shape[0])
# Subset by AIRS matchup and location masks
frcsb = frcsq[(numpy.logical_not(frc0.mask)) & (mskall > 0)]
nairs = frcsb.shape[0]
print(tmhld.shape)
print(frcsb.shape)
ctyp1 = ctyp1.flatten()
ctyp2 = ctyp2.flatten()
nslbtmp = nslbtmp.flatten()
cngwt1 = cngwt1.flatten()
cngwt2 = cngwt2.flatten()
cttp1 = cttp1.flatten()
cttp2 = cttp2.flatten()
psfc = psfc.flatten()
# Number of slabs
if tsmp == 0:
nslabout = numpy.zeros((nairs,),dtype=numpy.int16)
nslabout[:] = nslbtmp[frcsb]
else:
nslabout = numpy.append(nslabout,nslbtmp[frcsb])
# For two slabs, slab 1 must have highest cloud bottom pressure
cprt1 = cprt1.flatten()
cprt2 = cprt2.flatten()
cprb1 = cprb1.flatten()
cprb2 = cprb2.flatten()
slabswap = numpy.zeros((ctyp1.shape[0],),dtype=numpy.int16)
swpsq = frcsq[(nslbtmp == 2) & (cprb1 < cprb2)]
slabswap[swpsq] = 1
# Cloud Pressure variables
pbttmp1 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
pbttmp1[nslbtmp >= 1] = cprb1[nslbtmp >= 1]
pbttmp1[swpsq] = cprb2[swpsq]
ptptmp1 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
ptptmp1[nslbtmp >= 1] = cprt1[nslbtmp >= 1]
ptptmp1[swpsq] = cprt2[swpsq]
pbttmp2 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
pbttmp2[nslbtmp == 2] = cprb2[nslbtmp == 2]
pbttmp2[swpsq] = cprb1[swpsq]
ptptmp2 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
ptptmp2[nslbtmp == 2] = cprt2[nslbtmp == 2]
ptptmp2[swpsq] = cprt1[swpsq]
# DP Cloud transformation
dptmp1 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
dptmp1[nslbtmp >= 1] = pbttmp1[nslbtmp >= 1] - ptptmp1[nslbtmp >= 1]
dpslbtmp = numpy.zeros((ctyp1.shape[0],)) - 9999.0
dpslbtmp[nslbtmp == 2] = ptptmp1[nslbtmp == 2] - pbttmp2[nslbtmp == 2]
dptmp2 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
dptmp2[nslbtmp == 2] = pbttmp2[nslbtmp == 2] - ptptmp2[nslbtmp == 2]
# Adjust negative DPSlab values
dpnsq = frcsq[(nslbtmp == 2) & (dpslbtmp < 0.0) & (dpslbtmp > -1000.0)]
dpadj = numpy.zeros((ctyp1.shape[0],))
dpadj[dpnsq] = numpy.absolute(dpslbtmp[dpnsq])
dpslbtmp[dpnsq] = 1.0
dptmp1[dpnsq] = dptmp1[dpnsq] / 2.0
dptmp2[dpnsq] = dptmp2[dpnsq] / 2.0
# Sigma / Logit Adjustments
zpbtmp = numpy.zeros((psfc.shape[0],)) - 9999.0
zdp1tmp = numpy.zeros((psfc.shape[0],)) - 9999.0
zdslbtmp = numpy.zeros((psfc.shape[0],)) - 9999.0
zdp2tmp = numpy.zeros((psfc.shape[0],)) - 9999.0
ncldct = 0
for t in range(psfc.shape[0]):
if ( (pbttmp1[t] >= 0.0) and (dpslbtmp[t] >= 0.0) ):
prptmp = numpy.array( [ (psfc[t] - pbttmp1[t]) / psfc[t], \
dptmp1[t] / psfc[t], dpslbtmp[t] / psfc[t], \
dptmp2[t] / psfc[t], 0.0 ] )
if (prptmp[0] < 0.0):
# Adjustment needed
prpadj = prptmp[0]
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
prptmp[2] = prptmp[2] + prpadj*prptmp[2]
prptmp[3] = prptmp[3] + prpadj*prptmp[3]
ncldct = ncldct + 1
elif (prptmp[0] == 0.0):
# Adjustment needed
prpadj = -0.01
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
prptmp[2] = prptmp[2] + prpadj*prptmp[2]
prptmp[3] = prptmp[3] + prpadj*prptmp[3]
ncldct = ncldct + 1
prptmp[4] = 1.0 - prptmp[0] - prptmp[1] - prptmp[2] - prptmp[3]
ztmp = calculate_VPD.lgtzs(prptmp)
zpbtmp[t] = ztmp[0]
zdp1tmp[t] = ztmp[1]
zdslbtmp[t] = ztmp[2]
zdp2tmp[t] = ztmp[3]
elif ( pbttmp1[t] >= 0.0 ):
prptmp = numpy.array( [ (psfc[t] - pbttmp1[t]) / psfc[t], \
dptmp1[t] / psfc[t], 0.0 ] )
if (prptmp[0] < 0.0):
# Adjustment needed
prpadj = prptmp[0]
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
ncldct = ncldct + 1
elif (prptmp[0] == 0.0):
# Adjustment needed
prpadj = -0.01
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
ncldct = ncldct + 1
prptmp[2] = 1.0 - prptmp[0] - prptmp[1]
ztmp = calculate_VPD.lgtzs(prptmp)
zpbtmp[t] = ztmp[0]
zdp1tmp[t] = ztmp[1]
zdslbtmp[t] = -9999.0
zdp2tmp[t] = -9999.0
else:
zpbtmp[t] = -9999.0
zdp1tmp[t] = -9999.0
zdslbtmp[t] = -9999.0
zdp2tmp[t] = -9999.0
str1 = 'Cloud Bot Pres Below Sfc: %d ' % (ncldct)
print(str1)
if tsmp == 0:
psfcout = numpy.zeros((frcsb.shape[0],)) - 9999.0
psfcout[:] = psfc[frcsb]
prsbot1out = numpy.zeros((frcsb.shape[0],)) - 9999.0
prsbot1out[:] = zpbtmp[frcsb]
dpcld1out = numpy.zeros((frcsb.shape[0],)) - 9999.0
dpcld1out[:] = zdp1tmp[frcsb]
dpslbout = numpy.zeros((frcsb.shape[0],)) - 9999.0
dpslbout[:] = zdslbtmp[frcsb]
dpcld2out = numpy.zeros((frcsb.shape[0],)) - 9999.0
dpcld2out[:] = zdp2tmp[frcsb]
else:
psfcout = numpy.append(psfcout,psfc[frcsb])
prsbot1out = numpy.append(prsbot1out,zpbtmp[frcsb])
dpcld1out = numpy.append(dpcld1out,zdp1tmp[frcsb])
dpslbout = numpy.append(dpslbout,zdslbtmp[frcsb])
dpcld2out = numpy.append(dpcld2out,zdp2tmp[frcsb])
# Slab Types: 101.0 = Liquid, 201.0 = Ice, None else
# Output: 0 = Liquid, 1 = Ice
typtmp1 = numpy.zeros((ctyp1.shape[0],),dtype=numpy.int16) - 99
typtmp1[nslbtmp >= 1] = (ctyp1[nslbtmp >= 1] - 1.0) / 100.0 - 1.0
typtmp1[swpsq] = (ctyp2[swpsq] - 1.0) / 100.0 - 1.0
typtmp2 = numpy.zeros((ctyp1.shape[0],),dtype=numpy.int16) - 99
typtmp2[nslbtmp == 2] = (ctyp2[nslbtmp == 2] - 1.0) / 100.0 - 1.0
typtmp2[swpsq] = (ctyp1[swpsq] - 1.0) / 100.0 - 1.0
if tsmp == 0:
slbtyp1out = numpy.zeros((frcsb.shape[0],),dtype=numpy.int16)
slbtyp1out[:] = typtmp1[frcsb]
slbtyp2out = numpy.zeros((frcsb.shape[0],),dtype=numpy.int16)
slbtyp2out[:] = typtmp2[frcsb]
else:
slbtyp1out = numpy.append(slbtyp1out,typtmp1[frcsb])
slbtyp2out = numpy.append(slbtyp2out,typtmp2[frcsb])
# Cloud Cover Indicators
totclrtmp = numpy.zeros((frcsb.shape[0],3,3),dtype=numpy.int16)
cctr = 0
for frw in range(3):
for fcl in range(3):
clrvec = totclr[cctr,:,:,:].flatten()
totclrtmp[:,frw,fcl] = clrvec[frcsb]
cctr = cctr + 1
if tsmp == 0:
totclrout = numpy.zeros(totclrtmp.shape,dtype=numpy.int16)
totclrout[:,:,:] = totclrtmp
else:
totclrout = numpy.append(totclrout,totclrtmp,axis=0)
# Cloud Fraction Logit, still account for swapping
z1tmp = numpy.zeros((frcsb.shape[0],3,3)) - 9999.0
z2tmp = numpy.zeros((frcsb.shape[0],3,3)) - 9999.0
z12tmp = numpy.zeros((frcsb.shape[0],3,3)) - 9999.0
# Cloud Fraction
cctr = 0
for frw in range(3):
for fcl in range(3):
frcvect = frctot[cctr,:,:,:].flatten()
frcvec1 = arsfrc1[cctr,:,:,:].flatten()
frcvec2 = arsfrc2[cctr,:,:,:].flatten()
# Quick fix for totals over 1.0
fvsq = numpy.arange(frcvect.shape[0])
fvsq2 = fvsq[frcvect > 1.0]
frcvect[fvsq2] = frcvect[fvsq2] / 1.0
frcvec1[fvsq2] = frcvec1[fvsq2] / 1.0
frcvec2[fvsq2] = frcvec2[fvsq2] / 1.0
for t in range(nairs):
crslb = nslbtmp[frcsb[t]]
crclr = totclrtmp[t,frw,fcl]
if ( (crslb == 0) or (crclr == -1) ):
z1tmp[t,frw,fcl] = -9999.0
z2tmp[t,frw,fcl] = -9999.0
z12tmp[t,frw,fcl] = -9999.0
elif ( (crslb == 1) and (crclr == 1) ):
z1tmp[t,frw,fcl] = -9999.0
z2tmp[t,frw,fcl] = -9999.0
z12tmp[t,frw,fcl] = -9999.0
elif ( (crslb == 1) and (crclr == 0) ):
prptmp = numpy.array( [frcvect[frcsb[t]], 1.0 - frcvect[frcsb[t]] ] )
ztmp = calculate_VPD.lgtzs(prptmp)
z1tmp[t,frw,fcl] = ztmp[0]
z2tmp[t,frw,fcl] = -9999.0
z12tmp[t,frw,fcl] = -9999.0
# For 2 slabs, recall AIRS cloud layers go upper/lower, ours is opposite
# Also apply random overlap adjust AIRS zero values
elif ( (crslb == 2) and (crclr == 0) ):
frcs = numpy.array([frcvec2[frcsb[t]],frcvec1[frcsb[t]]])
if (numpy.sum(frcs) < 0.01):
frcs[0] = 0.005
frcs[1] = 0.005
elif frcs[0] < 0.005:
frcs[0] = 0.005
frcs[1] = frcs[1] - 0.005
elif frcs[1] < 0.005:
frcs[1] = 0.005
frcs[0] = frcs[0] - 0.005
mnfrc = numpy.amin(frcs)
c12tmp = random.uniform(0.0,mnfrc,size=1)
prptmp = numpy.array( [frcs[0] - c12tmp[0]*frcs[1], \
frcs[1] - c12tmp[0]*frcs[0], c12tmp[0], 0.0])
prptmp[3] = 1.0 - prptmp[0] - prptmp[1] - prptmp[2]
ztmp = calculate_VPD.lgtzs(prptmp)
z1tmp[t,frw,fcl] = ztmp[0]
z2tmp[t,frw,fcl] = ztmp[1]
z12tmp[t,frw,fcl] = ztmp[2]
elif ( (crslb == 2) and (crclr == 1) ):
frcs = numpy.array([frcvec2[frcsb[t]],frcvec1[frcsb[t]]])
if frcs[0] < 0.005:
frcs[0] = 0.005
frcs[1] = frcs[1] - 0.005
elif frcs[1] < 0.005:
frcs[1] = 0.005
frcs[0] = frcs[0] - 0.005
mnfrc = numpy.amin(frcs)
c12tmp = random.uniform(0.0,mnfrc,size=1)
prptmp = numpy.array( [0.999 * (frcs[0] - c12tmp[0]*frcs[1]), \
0.999 * (frcs[1] - c12tmp[0]*frcs[0]), 0.999 * c12tmp[0], 0.001])
prptmp[3] = 1.0 - prptmp[0] - prptmp[1] - prptmp[2]
ztmp = calculate_VPD.lgtzs(prptmp)
z1tmp[t,frw,fcl] = ztmp[0]
z2tmp[t,frw,fcl] = ztmp[1]
z12tmp[t,frw,fcl] = ztmp[2]
cctr = cctr + 1
if tsmp == 0:
cfclgt1out = numpy.zeros(z1tmp.shape)
cfclgt1out[:,:,:] = z1tmp
cfclgt2out = numpy.zeros(z2tmp.shape)
cfclgt2out[:,:,:] = z2tmp
cfclgt12out = numpy.zeros(z12tmp.shape)
cfclgt12out[:,:,:] = z12tmp
else:
cfclgt1out = numpy.append(cfclgt1out,z1tmp,axis=0)
cfclgt2out = numpy.append(cfclgt2out,z2tmp,axis=0)
cfclgt12out = numpy.append(cfclgt12out,z12tmp,axis=0)
# Cloud Non-Gas Water
ngwttmp1 = numpy.zeros(cngwt1.shape[0]) - 9999.0
ngwttmp1[nslbtmp >= 1] = cngwt1[nslbtmp >= 1]
ngwttmp1[swpsq] = cngwt2[swpsq]
ngwttmp2 = numpy.zeros(cngwt1.shape[0]) - 9999.0
ngwttmp2[nslbtmp == 2] = cngwt2[nslbtmp == 2]
ngwttmp2[swpsq] = cngwt1[swpsq]
if tsmp == 0:
ngwt1out = numpy.zeros((frcsb.shape[0],)) - 9999.0
ngwt1out[:] = ngwttmp1[frcsb]
ngwt2out = numpy.zeros((frcsb.shape[0],)) - 9999.0
ngwt2out[:] = ngwttmp2[frcsb]
else:
ngwt1out = numpy.append(ngwt1out,ngwttmp1[frcsb])
ngwt2out = numpy.append(ngwt2out,ngwttmp2[frcsb])
# Cloud Top Temperature
cttptmp1 = numpy.zeros(cttp1.shape[0]) - 9999.0
cttptmp1[nslbtmp >= 1] = cttp1[nslbtmp >= 1]
cttptmp1[swpsq] = cttp2[swpsq]
cttptmp2 = numpy.zeros(cttp1.shape[0]) - 9999.0
cttptmp2[nslbtmp == 2] = cttp2[nslbtmp == 2]
cttptmp2[swpsq] = cttp1[swpsq]
if tsmp == 0:
cttp1out = numpy.zeros((frcsb.shape[0],)) - 9999.0
cttp1out[:] = cttptmp1[frcsb]
cttp2out = numpy.zeros((frcsb.shape[0],)) - 9999.0
cttp2out[:] = cttptmp2[frcsb]
else:
cttp1out = numpy.append(cttp1out,cttptmp1[frcsb])
cttp2out = numpy.append(cttp2out,cttptmp2[frcsb])
# Loc/Time
if tsmp == 0:
latout = numpy.zeros((frcsb.shape[0],)) - 9999.0
latout[:] = lthld[frcsb]
lonout = numpy.zeros((frcsb.shape[0],)) - 9999.0
lonout[:] = lnhld[frcsb]
yrout = numpy.zeros((frcsb.shape[0],),dtype=numpy.int16)
yrout[:] = yrlst[k]
jdyout = numpy.zeros((frcsb.shape[0],),dtype=numpy.int16)
jdyout[:] = tmhld[frcsb]
else:
latout = numpy.append(latout,lthld[frcsb])
lonout = numpy.append(lonout,lnhld[frcsb])
yrtmp = numpy.zeros((frcsb.shape[0],),dtype=numpy.int16)
yrtmp[:] = yrlst[k]
yrout = numpy.append(yrout,yrtmp)
jdyout = numpy.append(jdyout,tmhld[frcsb])
tsmp = tsmp + nairs
# Process quantiles
nslbqs = calculate_VPD.quantile_msgdat_discrete(nslabout,prbs)
str1 = '%.2f Number Slab Quantile: %d' % (prbs[103],nslbqs[103])
print(str1)
print(nslbqs)
# psfcqs = calculate_VPD.quantile_msgdat(psfcout,prbs)
# str1 = '%.2f Surface Pressure Quantile: %.3f' % (prbs[53],psfcqs[53])
# print(str1)
prsbt1qs = calculate_VPD.quantile_msgdat(prsbot1out,prbs)
str1 = '%.2f CldBot1 Pressure Quantile: %.3f' % (prbs[103],prsbt1qs[103])
print(str1)
dpcld1qs = calculate_VPD.quantile_msgdat(dpcld1out,prbs)
str1 = '%.2f DPCloud1 Quantile: %.3f' % (prbs[103],dpcld1qs[103])
print(str1)
dpslbqs = calculate_VPD.quantile_msgdat(dpslbout,prbs)
str1 = '%.2f DPSlab Quantile: %.3f' % (prbs[103],dpslbqs[103])
print(str1)
dpcld2qs = calculate_VPD.quantile_msgdat(dpcld2out,prbs)
str1 = '%.2f DPCloud2 Quantile: %.3f' % (prbs[103],dpcld2qs[103])
print(str1)
slb1qs = calculate_VPD.quantile_msgdat_discrete(slbtyp1out,prbs)
str1 = '%.2f Type1 Quantile: %d' % (prbs[103],slb1qs[103])
print(str1)
slb2qs = calculate_VPD.quantile_msgdat_discrete(slbtyp2out,prbs)
str1 = '%.2f Type2 Quantile: %d' % (prbs[103],slb2qs[103])
print(str1)
# Indicators
totclrqout = numpy.zeros((3,3,nprb)) - 99
lgt1qs = numpy.zeros((3,3,nprb)) - 9999.0
lgt2qs = numpy.zeros((3,3,nprb)) - 9999.0
lgt12qs = numpy.zeros((3,3,nprb)) - 9999.0
for frw in range(3):
for fcl in range(3):
tmpclr = calculate_VPD.quantile_msgdat_discrete(totclrout[:,frw,fcl],prbs)
totclrqout[frw,fcl,:] = tmpclr[:]
str1 = 'Clr/Ovc Indicator %d, %d %.2f Quantile: %d' % (frw,fcl,prbs[103],tmpclr[103])
print(str1)
tmplgtq = calculate_VPD.quantile_msgdat(cfclgt1out[:,frw,fcl],prbs)
lgt1qs[frw,fcl,:] = tmplgtq[:]
tmplgtq = calculate_VPD.quantile_msgdat(cfclgt2out[:,frw,fcl],prbs)
lgt2qs[frw,fcl,:] = tmplgtq[:]
tmplgtq = calculate_VPD.quantile_msgdat(cfclgt12out[:,frw,fcl],prbs)
lgt12qs[frw,fcl,:] = tmplgtq[:]
str1 = 'CFrac Logit %d, %d %.2f Quantile: %.3f, %.3f, %.3f' % (frw,fcl,prbs[103], \
lgt1qs[frw,fcl,103],lgt2qs[frw,fcl,103],lgt12qs[frw,fcl,103])
print(str1)
ngwt1qs = calculate_VPD.quantile_msgdat(ngwt1out,prbs)
str1 = '%.2f NGWater1 Quantile: %.3f' % (prbs[103],ngwt1qs[103])
print(str1)
ngwt2qs = calculate_VPD.quantile_msgdat(ngwt2out,prbs)
str1 = '%.2f NGWater2 Quantile: %.3f' % (prbs[103],ngwt2qs[103])
print(str1)
cttp1qs = calculate_VPD.quantile_msgdat(cttp1out,prbs)
str1 = '%.2f CTTemp1 Quantile: %.3f' % (prbs[103],cttp1qs[103])
print(str1)
cttp2qs = calculate_VPD.quantile_msgdat(cttp2out,prbs)
str1 = '%.2f CTTemp2 Quantile: %.3f' % (prbs[103],cttp2qs[103])
print(str1)
# Output Quantiles
qfnm = '%s/CONUS_AIRS_JJA_%04d_%02dUTC_%s_Cloud_Quantile.nc' % (dtdr,yrlst[k],hrchc,rgchc)
qout = Dataset(qfnm,'w')
dimp = qout.createDimension('probability',nprb)
dimfov1 = qout.createDimension('fovrow',3)
dimfov2 = qout.createDimension('fovcol',3)
varprb = qout.createVariable('probability','f4',['probability'], fill_value = -9999)
varprb[:] = prbs
varprb.long_name = 'Probability break points'
varprb.units = 'none'
varprb.missing_value = -9999
varnslb = qout.createVariable('NumberSlab_quantile','i2',['probability'], fill_value = -99)
varnslb[:] = nslbqs
varnslb.long_name = 'Number of cloud slabs quantiles'
varnslb.units = 'Count'
varnslb.missing_value = -99
varcbprs = qout.createVariable('CloudBot1Logit_quantile','f4',['probability'], fill_value = -9999)
varcbprs[:] = prsbt1qs
varcbprs.long_name = 'Slab 1 cloud bottom pressure logit quantiles'
varcbprs.units = 'hPa'
varcbprs.missing_value = -9999
vardpc1 = qout.createVariable('DPCloud1Logit_quantile','f4',['probability'], fill_value = -9999)
vardpc1[:] = dpcld1qs
vardpc1.long_name = 'Slab 1 cloud pressure depth logit quantiles'
vardpc1.units = 'hPa'
vardpc1.missing_value = -9999
vardpslb = qout.createVariable('DPSlabLogit_quantile','f4',['probability'], fill_value = -9999)
vardpslb[:] = dpslbqs
vardpslb.long_name = 'Two-slab vertical separation logit quantiles'
vardpslb.units = 'hPa'
vardpslb.missing_value = -9999
vardpc2 = qout.createVariable('DPCloud2Logit_quantile','f4',['probability'], fill_value = -9999)
vardpc2[:] = dpcld2qs
vardpc2.long_name = 'Slab 2 cloud pressure depth logit quantiles'
vardpc2.units = 'hPa'
vardpc2.missing_value = -9999
vartyp1 = qout.createVariable('CType1_quantile','i2',['probability'], fill_value = -99)
vartyp1[:] = slb1qs
vartyp1.long_name = 'Slab 1 cloud type quantiles'
vartyp1.units = 'None'
vartyp1.missing_value = -99
vartyp1.comment = 'Cloud slab type: 0=Liquid, 1=Ice'
vartyp2 = qout.createVariable('CType2_quantile','i2',['probability'], fill_value = -99)
vartyp2[:] = slb2qs
vartyp2.long_name = 'Slab 2 cloud type quantiles'
vartyp2.units = 'None'
vartyp2.missing_value = -99
vartyp2.comment = 'Cloud slab type: 0=Liquid, 1=Ice'
varcvr = qout.createVariable('CCoverInd_quantile','i2',['fovrow','fovcol','probability'], fill_value = 99)
varcvr[:] = totclrqout
varcvr.long_name = 'Cloud cover indicator quantiles'
varcvr.units = 'None'
varcvr.missing_value = -99
varcvr.comment = 'Cloud cover indicators: -1=Clear, 0=Partly cloudy, 1=Overcast'
varlgt1 = qout.createVariable('CFrcLogit1_quantile','f4',['fovrow','fovcol','probability'], fill_value = -9999)
varlgt1[:] = lgt1qs
varlgt1.long_name = 'Slab 1 cloud fraction (cfrac1x) logit quantiles'
varlgt1.units = 'None'
varlgt1.missing_value = -9999
varlgt2 = qout.createVariable('CFrcLogit2_quantile','f4',['fovrow','fovcol','probability'], fill_value = -9999)
varlgt2[:] = lgt2qs
varlgt2.long_name = 'Slab 2 cloud fraction (cfrac2x) logit quantiles'
varlgt2.units = 'None'
varlgt2.missing_value = -9999
varlgt12 = qout.createVariable('CFrcLogit12_quantile','f4',['fovrow','fovcol','probability'], fill_value = -9999)
varlgt12[:] = lgt12qs
varlgt12.long_name = 'Slab 1/2 overlap fraction (cfrac12) logit quantiles'
varlgt12.units = 'None'
varlgt12.missing_value = -9999
varngwt1 = qout.createVariable('NGWater1_quantile','f4',['probability'], fill_value = -9999)
varngwt1[:] = ngwt1qs
varngwt1.long_name = 'Slab 1 cloud non-gas water quantiles'
varngwt1.units = 'g m^-2'
varngwt1.missing_value = -9999
varngwt2 = qout.createVariable('NGWater2_quantile','f4',['probability'], fill_value = -9999)
varngwt2[:] = ngwt2qs
varngwt2.long_name = 'Slab 2 cloud non-gas water quantiles'
varngwt2.units = 'g m^-2'
varngwt2.missing_value = -9999
varcttp1 = qout.createVariable('CTTemp1_quantile','f4',['probability'], fill_value = -9999)
varcttp1[:] = cttp1qs
varcttp1.long_name = 'Slab 1 cloud top temperature'
varcttp1.units = 'K'
varcttp1.missing_value = -9999
varcttp2 = qout.createVariable('CTTemp2_quantile','f4',['probability'], fill_value = -9999)
varcttp2[:] = cttp2qs
varcttp2.long_name = 'Slab 2 cloud top temperature'
varcttp2.units = 'K'
varcttp2.missing_value = -9999
qout.close()
# Set up transformations
zccvout = numpy.zeros((tsmp,3,3,)) - 9999.
zlgt1 = numpy.zeros((tsmp,3,3)) - 9999.
zlgt2 = numpy.zeros((tsmp,3,3)) - 9999.
zlgt12 = numpy.zeros((tsmp,3,3)) - 9999.
znslb = calculate_VPD.std_norm_quantile_from_obs(nslabout, nslbqs, prbs, msgval=-99)
zprsbt1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(prsbot1out, prsbt1qs, prbs, msgval=-9999.)
zdpcld1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(dpcld1out, dpcld1qs, prbs, msgval=-9999.)
zdpslb = calculate_VPD.std_norm_quantile_from_obs_fill_msg(dpslbout, dpslbqs, prbs, msgval=-9999.)
zdpcld2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(dpcld2out, dpcld2qs, prbs, msgval=-9999.)
zctyp1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(slbtyp1out, slb1qs, prbs, msgval=-99)
zctyp2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(slbtyp2out, slb2qs, prbs, msgval=-99)
for frw in range(3):
for fcl in range(3):
ztmp = calculate_VPD.std_norm_quantile_from_obs_fill_msg(totclrout[:,frw,fcl], totclrqout[frw,fcl,:], \
prbs, msgval=-99)
zccvout[:,frw,fcl] = ztmp[:]
ztmp = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cfclgt1out[:,frw,fcl], lgt1qs[frw,fcl,:], \
prbs, msgval=-9999.)
zlgt1[:,frw,fcl] = ztmp[:]
ztmp = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cfclgt2out[:,frw,fcl], lgt2qs[frw,fcl,:], \
prbs, msgval=-9999.)
zlgt2[:,frw,fcl] = ztmp[:]
ztmp = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cfclgt12out[:,frw,fcl], lgt12qs[frw,fcl,:], \
prbs, msgval=-9999.)
zlgt12[:,frw,fcl] = ztmp[:]
zngwt1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(ngwt1out, ngwt1qs, prbs, msgval=-9999.)
zngwt2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(ngwt2out, ngwt2qs, prbs, msgval=-9999.)
zcttp1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cttp1out, cttp1qs, prbs, msgval=-9999.)
zcttp2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cttp2out, cttp2qs, prbs, msgval=-9999.)
# Output transformed quantile samples
zfnm = '%s/CONUS_AIRS_JJA_%04d_%02dUTC_%s_Cloud_StdGausTrans.nc' % (dtdr,yrlst[k],hrchc,rgchc)
zout = Dataset(zfnm,'w')
dimsmp = zout.createDimension('sample',tsmp)
dimfov1 = zout.createDimension('fovrow',3)
dimfov2 = zout.createDimension('fovcol',3)
varlon = zout.createVariable('Longitude','f4',['sample'])
varlon[:] = lonout
varlon.long_name = 'Longitude'
varlon.units = 'degrees_east'
varlat = zout.createVariable('Latitude','f4',['sample'])
varlat[:] = latout
varlat.long_name = 'Latitude'
varlat.units = 'degrees_north'
varjdy = zout.createVariable('JulianDay','i2',['sample'])
varjdy[:] = jdyout
varjdy.long_name = 'JulianDay'
varjdy.units = 'day'
varyr = zout.createVariable('Year','i2',['sample'])
varyr[:] = yrout
varyr.long_name = 'Year'
varyr.units = 'year'
varnslb = zout.createVariable('NumberSlab_StdGaus','f4',['sample'], fill_value = -9999)
varnslb[:] = znslb
varnslb.long_name = 'Quantile transformed number of cloud slabs'
varnslb.units = 'None'
varnslb.missing_value = -9999.
varcbprs = zout.createVariable('CloudBot1Logit_StdGaus','f4',['sample'], fill_value = -9999)
varcbprs[:] = zprsbt1
varcbprs.long_name = 'Quantile transformed slab 1 cloud bottom pressure logit'
varcbprs.units = 'None'
varcbprs.missing_value = -9999.
vardpc1 = zout.createVariable('DPCloud1Logit_StdGaus','f4',['sample'], fill_value = -9999)
vardpc1[:] = zdpcld1
vardpc1.long_name = 'Quantile transformed slab 1 cloud pressure depth logit'
vardpc1.units = 'None'
vardpc1.missing_value = -9999.
vardpslb = zout.createVariable('DPSlabLogit_StdGaus','f4',['sample'], fill_value = -9999)
vardpslb[:] = zdpslb
vardpslb.long_name = 'Quantile transformed two-slab vertical separation logit'
vardpslb.units = 'None'
vardpslb.missing_value = -9999.
vardpc2 = zout.createVariable('DPCloud2Logit_StdGaus','f4',['sample'], fill_value = -9999)
vardpc2[:] = zdpcld2
vardpc2.long_name = 'Quantile transformed slab 2 cloud pressure depth logit'
vardpc2.units = 'None'
vardpc2.missing_value = -9999.
vartyp1 = zout.createVariable('CType1_StdGaus','f4',['sample'], fill_value = -9999)
vartyp1[:] = zctyp1
vartyp1.long_name = 'Quantile transformed slab 1 cloud type logit'
vartyp1.units = 'None'
vartyp1.missing_value = -9999.
vartyp2 = zout.createVariable('CType2_StdGaus','f4',['sample'], fill_value = -9999)
vartyp2[:] = zctyp2
vartyp2.long_name = 'Quantile transformed slab 2 cloud type'
vartyp2.units = 'None'
vartyp2.missing_value = -9999.
varcov = zout.createVariable('CCoverInd_StdGaus','f4',['sample','fovrow','fovcol'], fill_value= -9999)
varcov[:] = zccvout
varcov.long_name = 'Quantile transformed cloud cover indicator'
varcov.units = 'None'
varcov.missing_value = -9999.
varlgt1 = zout.createVariable('CFrcLogit1_StdGaus','f4',['sample','fovrow','fovcol'], fill_value = -9999)
varlgt1[:] = zlgt1
varlgt1.long_name = 'Quantile transformed slab 1 cloud fraction logit'
varlgt1.units = 'None'
varlgt1.missing_value = -9999.
varlgt2 = zout.createVariable('CFrcLogit2_StdGaus','f4',['sample','fovrow','fovcol'], fill_value = -9999)
varlgt2[:] = zlgt2
varlgt2.long_name = 'Quantile transformed slab 2 cloud fraction logit'
varlgt2.units = 'None'
varlgt2.missing_value = -9999.
varlgt12 = zout.createVariable('CFrcLogit12_StdGaus','f4',['sample','fovrow','fovcol'], fill_value = -9999)
varlgt12[:] = zlgt12
varlgt12.long_name = 'Quantile transformed slab 1/2 overlap fraction logit'
varlgt12.units = 'None'
varlgt12.missing_value = -9999.
varngwt1 = zout.createVariable('NGWater1_StdGaus','f4',['sample'], fill_value = -9999)
varngwt1[:] = zngwt1
varngwt1.long_name = 'Quantile transformed slab 1 non-gas water'
varngwt1.units = 'None'
varngwt1.missing_value = -9999.
varngwt2 = zout.createVariable('NGWater2_StdGaus','f4',['sample'], fill_value = -9999)
varngwt2[:] = zngwt2
varngwt2.long_name = 'Quantile transformed slab 2 non-gas water'
varngwt2.units = 'None'
varngwt2.missing_value = -9999.
varcttp1 = zout.createVariable('CTTemp1_StdGaus','f4',['sample'], fill_value = -9999)
varcttp1[:] = zcttp1
varcttp1.long_name = 'Quantile transformed slab 1 cloud top temperature'
varcttp1.units = 'None'
varcttp1.missing_value = -9999.
varcttp2 = zout.createVariable('CTTemp2_StdGaus','f4',['sample'], fill_value = -9999)
varcttp2[:] = zcttp2
varcttp2.long_name = 'Quantile transformed slab 2 cloud top temperature'
varcttp2.units = 'None'
varcttp2.missing_value = -9999.
zout.close()
return
def quantile_profile_locmask_conus(rfdr, mtdr, csdr, airdr, dtdr, yrlst, mnst, mnfn, hrchc, rgchc, mskvr, mskvl):
# Construct profile/sfc variable quantiles and z-scores, with a possibly irregular location mask
# rfdr: Directory for reference data (Levels/Quantiles)
# mtdr: Directory for MERRA data
# csdr: Directory for cloud slab data
# airdr: Directory for AIRS cloud fraction
# dtdr: Output directory
# yrlst: List of years to process
# mnst: Starting Month
# mnfn: Ending Month
# hrchc: Template Hour Choice
# rgchc: Template Region Choice
# mskvr: Name of region mask variable
# mskvl: Value of region mask for Region Choice
# Read probs and pressure levels
rnm = '%s/AIRS_Levels_Quantiles.nc' % (rfdr)
f = Dataset(rnm,'r')
plev = f['level'][:]
prbs = f['probability'][:]
alts = f['altitude'][:]
f.close()
nyr = len(yrlst)
nprb = prbs.shape[0]
nzout = 101
tmpqout = numpy.zeros((nzout,nprb)) - 9999.
rhqout = numpy.zeros((nzout,nprb)) - 9999.
sftmpqs = numpy.zeros((nprb,)) - 9999.
sfaltqs = numpy.zeros((nprb,)) - 9999.
psfcqs = numpy.zeros((nprb,)) - 9999.
altmed = numpy.zeros((nzout,)) - 9999.
# Mask, lat, lon
fnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_CONUS_with_NCA_regions_%02dUTC_no_vertical_variation_for_missing.nc' % (mtdr,yrlst[0],hrchc)
f = Dataset(fnm,'r')
mask = f.variables[mskvr][:,:]
latmet = f.variables['plat'][:]
lonmet = f.variables['plon'][:]
tminf = f.variables['time'][:]
tmunit = f.variables['time'].units[:]
f.close()
mskind = numpy.zeros((mask.shape),dtype=mask.dtype)
print(mskvl)
mskind[mask == mskvl] = 1
lnsq = numpy.arange(lonmet.shape[0])
ltsq = numpy.arange(latmet.shape[0])
# Subset a bit
lnsm = numpy.sum(mskind,axis=0)
#print(lnsq.shape)
#print(lnsm.shape)
#print(lnsm)
ltsm = numpy.sum(mskind,axis=1)
#print(ltsq.shape)
#print(ltsm.shape)
#print(ltsm)
lnmn = numpy.amin(lnsq[lnsm > 0])
lnmx = numpy.amax(lnsq[lnsm > 0]) + 1
ltmn = numpy.amin(ltsq[ltsm > 0])
ltmx = numpy.amax(ltsq[ltsm > 0]) + 1
stridx = 'Lon Range: %d, %d\nLat Range: %d, %d \n' % (lnmn,lnmx,ltmn,ltmx)
print(stridx)
nx = lnmx - lnmn
ny = ltmx - ltmn
lnrp = numpy.tile(lonmet[lnmn:lnmx],ny)
ltrp = numpy.repeat(latmet[ltmn:ltmx],nx)
mskblk = mskind[ltmn:ltmx,lnmn:lnmx]
mskflt = mskblk.flatten()
tsmp = 0
for k in range(nyr):
fnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_CONUS_with_NCA_regions_%02dUTC_no_vertical_variation_for_missing.nc' % (mtdr,yrlst[k],hrchc)
f = Dataset(fnm,'r')
tminf = f.variables['time'][:]
tmunit = f.variables['time'].units[:]
f.close()
tmunit = tmunit.replace("days since ","")
dybs = datetime.datetime.strptime(tmunit,"%Y-%m-%d %H:%M:%S")
print(dybs)
dy0 = dybs + datetime.timedelta(days=tminf[0])
dyinit = datetime.date(dy0.year,dy0.month,dy0.day)
print(dyinit)
dyst = datetime.date(yrlst[k],mnst,1)
ttst = dyst.timetuple()
jst = ttst.tm_yday
if mnfn < 12:
dyfn = datetime.date(yrlst[k],mnfn+1,1)
ttfn = dyfn.timetuple()
jfn = ttfn.tm_yday
else:
dyfn = datetime.date(yrlst[k]+1,1,1)
dy31 = datetime.date(yrlst[k],12,31)
tt31 = dy31.timetuple()
jfn = tt31.tm_yday + 1
dystidx = abs((dyst-dyinit).days)
dyfnidx = abs((dyfn-dyinit).days)
jdsq = numpy.arange(jst,jfn)
print(jdsq)
tmhld = numpy.repeat(jdsq,nx*ny)
#print(tmhld.shape)
#print(numpy.amin(tmhld))
#print(numpy.amax(tmhld))
stridx = 'Day Range: %d, %d\n' % (dystidx,dyfnidx)
print(stridx)
# MERRA variables
fnm = '%s/interpolated_merra2_for_SARTA_two_slab_%d_JJA_CONUS_with_NCA_regions_%02dUTC_no_vertical_variation_for_missing.nc' % (mtdr,yrlst[k],hrchc)
f = Dataset(fnm,'r')
tms = f.variables['time'][:]
stparr = f['/stemp'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
psfarr = f['/spres'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
salarr = f['/salti'][ltmn:ltmx,lnmn:lnmx]
tmparr = f['/ptemp'][dystidx:dyfnidx,:,ltmn:ltmx,lnmn:lnmx]
h2oarr = f['/rh'][dystidx:dyfnidx,:,ltmn:ltmx,lnmn:lnmx]
altarr = f['/palts'][dystidx:dyfnidx,:,ltmn:ltmx,lnmn:lnmx]
f.close()
tmflt = tms.flatten()
nt = tmflt.shape[0]
lnhld = numpy.tile(lnrp,nt)
lthld = numpy.tile(ltrp,nt)
mskall = numpy.tile(mskflt,nt)
msksq = numpy.arange(mskall.shape[0])
msksb = msksq[mskall > 0]
mskstr = 'Total Obs: %d, Within Mask: %d \n' % (msksq.shape[0],msksb.shape[0])
print(mskstr)
# AIRS Clouds
anm = '%s/CONUS_AIRS_CldFrc_Match_JJA_%d_%02d_UTC.nc' % (airdr,yrlst[k],hrchc)
f = Dataset(anm,'r')
arsfrc1 = f.variables['AIRS_CldFrac_1'][:,dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
arsfrc2 = f.variables['AIRS_CldFrac_2'][:,dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
f.close()
# Sum
frctot = arsfrc1 + arsfrc2
frc0 = frctot[0,:,:,:]
frc0 = frc0.flatten()
frcsq = numpy.arange(tmhld.shape[0])
# Subset by AIRS matchup and location masks
frcsb = frcsq[(numpy.logical_not(frc0.mask)) & (mskall > 0)]
nairs = frcsb.shape[0]
print(tmhld.shape)
print(frcsb.shape)
tmptmp = numpy.zeros((nairs,nzout))
h2otmp = numpy.zeros((nairs,nzout))
alttmp = numpy.zeros((nairs,nzout))
for j in range(nzout):
tmpvec = tmparr[:,j,:,:].flatten()
tmpvec[tmpvec > 1e30] = -9999.
tmptmp[:,j] = tmpvec[frcsb]
altvec = altarr[:,j,:,:].flatten()
alttmp[:,j] = altvec[frcsb]
h2ovec = h2oarr[:,j,:,:].flatten()
h2ovec[h2ovec > 1e30] = -9999.
h2otmp[:,j] = h2ovec[frcsb]
if tsmp == 0:
tmpmerout = numpy.zeros(tmptmp.shape)
tmpmerout[:,:] = tmptmp
h2omerout = numpy.zeros(h2otmp.shape)
h2omerout[:,:] = h2otmp
altout = numpy.zeros(alttmp.shape)
altout[:,:] = alttmp
else:
tmpmerout = numpy.append(tmpmerout,tmptmp,axis=0)
h2omerout = numpy.append(h2omerout,h2otmp,axis=0)
altout = numpy.append(altout,alttmp,axis=0)
stparr = stparr.flatten()
psfarr = psfarr.flatten()
salarr = salarr.flatten()
salfl = numpy.tile(salarr[:],nt)
if tsmp == 0:
sftmpout = numpy.zeros((nairs,)) - 9999.0
sftmpout[:] = stparr[frcsb]
psfcout = numpy.zeros((nairs,)) - 9999.0
psfcout[:] = psfarr[frcsb]
sfaltout = numpy.zeros((nairs,)) - 9999.0
sfaltout[:] = salfl[frcsb]
else:
sftmpout = numpy.append(sftmpout,stparr[frcsb])
psfcout = numpy.append(psfcout,psfarr[frcsb])
sfaltout = numpy.append(sfaltout,salfl[frcsb])
# Loc/Time
if tsmp == 0:
latout = numpy.zeros((nairs,)) - 9999.0
latout[:] = lthld[frcsb]
lonout = numpy.zeros((nairs,)) - 9999.0
lonout[:] = lnhld[frcsb]
yrout = numpy.zeros((nairs,),dtype=numpy.int16)
yrout[:] = yrlst[k]
jdyout = numpy.zeros((nairs,),dtype=numpy.int16)
jdyout[:] = tmhld[frcsb]
else:
latout = numpy.append(latout,lthld[frcsb])
lonout = numpy.append(lonout,lnhld[frcsb])
yrtmp = numpy.zeros((nairs,),dtype=numpy.int16)
yrtmp[:] = yrlst[k]
yrout = numpy.append(yrout,yrtmp)
jdyout = numpy.append(jdyout,tmhld[frcsb])
tsmp = tsmp + nairs
# Quantiles
tmpqout = numpy.zeros((nzout,nprb)) - 9999.
rhqout = numpy.zeros((nzout,nprb)) - 9999.
sftmpqs = numpy.zeros((nprb,)) - 9999.
sfaltqs = numpy.zeros((nprb,)) - 9999.
psfcqs = numpy.zeros((nprb,)) - 9999.
altmed = numpy.zeros((nzout,)) - 9999.
ztmpout = numpy.zeros((tsmp,nzout)) - 9999.
zrhout = numpy.zeros((tsmp,nzout)) - 9999.
zsftmpout = numpy.zeros((tsmp,)) - 9999.
zsfaltout = numpy.zeros((tsmp,)) - 9999.
zpsfcout = numpy.zeros((tsmp,)) - 9999.
# Quantiles
for j in range(nzout):
tmptmp = calculate_VPD.quantile_msgdat(tmpmerout[:,j],prbs)
tmpqout[j,:] = tmptmp[:]
str1 = 'Plev %.2f, %.2f Temp Quantile: %.3f' % (plev[j],prbs[103],tmptmp[103])
print(str1)
# Transform
ztmp = calculate_VPD.std_norm_quantile_from_obs(tmpmerout[:,j], tmptmp, prbs, msgval=-9999.)
ztmpout[:,j] = ztmp[:]
alttmp = calculate_VPD.quantile_msgdat(altout[:,j],prbs)
altmed[j] = alttmp[103]
str1 = 'Plev %.2f, %.2f Alt Quantile: %.3f' % (plev[j],prbs[103],alttmp[103])
print(str1)
# Adjust RH over 100
rhadj = h2omerout[:,j]
rhadj[rhadj > 1.0] = 1.0
rhqtmp = calculate_VPD.quantile_msgdat(rhadj,prbs)
rhqout[j,:] = rhqtmp[:]
str1 = 'Plev %.2f, %.2f RH Quantile: %.4f' % (plev[j],prbs[103],rhqtmp[103])
print(str1)
zrh = calculate_VPD.std_norm_quantile_from_obs(rhadj, rhqtmp, prbs, msgval=-9999.)
zrhout[:,j] = zrh[:]
psfcqs = calculate_VPD.quantile_msgdat(psfcout,prbs)
str1 = '%.2f PSfc Quantile: %.2f' % (prbs[103],psfcqs[103])
print(str1)
zpsfcout = calculate_VPD.std_norm_quantile_from_obs(psfcout, psfcqs, prbs, msgval=-9999.)
sftpqs = calculate_VPD.quantile_msgdat(sftmpout,prbs)
str1 = '%.2f SfcTmp Quantile: %.2f' % (prbs[103],sftpqs[103])
print(str1)
zsftmpout = calculate_VPD.std_norm_quantile_from_obs(sftmpout, sftpqs, prbs, msgval=-9999.)
sfalqs = calculate_VPD.quantile_msgdat(sfaltout,prbs)
str1 = '%.2f SfcAlt Quantile: %.2f' % (prbs[103],sfalqs[103])
print(str1)
zsfaltout = calculate_VPD.std_norm_quantile_from_obs(sfaltout, sfalqs, prbs, msgval=-9999.)
# Output Quantiles
qfnm = '%s/CONUS_AIRS_JJA_%04d_%02dUTC_%s_TempRHSfc_Quantile.nc' % (dtdr,yrlst[k],hrchc,rgchc)
qout = Dataset(qfnm,'w')
dimz = qout.createDimension('level',nzout)
dimp = qout.createDimension('probability',nprb)
varlvl = qout.createVariable('level','f4',['level'], fill_value = -9999)
varlvl[:] = plev
varlvl.long_name = 'AIRS/SARTA pressure levels'
varlvl.units = 'hPa'
varlvl.missing_value = -9999
varprb = qout.createVariable('probability','f4',['probability'], fill_value = -9999)
varprb[:] = prbs
varprb.long_name = 'Probability break points'
varprb.units = 'none'
varprb.missing_value = -9999
# Altitude grid
varalt = qout.createVariable('Altitude_median', 'f4', ['level'], fill_value = -9999)
varalt[:] = altmed
varalt.long_name = 'Altitude median value'
varalt.units = 'm'
varalt.missing_value = -9999
vartmp = qout.createVariable('Temperature_quantile', 'f4', ['level','probability'], fill_value = -9999)
vartmp[:] = tmpqout
vartmp.long_name = 'Temperature quantiles'
vartmp.units = 'K'
vartmp.missing_value = -9999.
varrh = qout.createVariable('RH_quantile', 'f4', ['level','probability'], fill_value = -9999)
varrh[:] = rhqout
varrh.long_name = 'Relative humidity quantiles'
varrh.units = 'Unitless'
varrh.missing_value = -9999.
varstmp = qout.createVariable('SfcTemp_quantile', 'f4', ['probability'], fill_value = -9999)
varstmp[:] = sftpqs
varstmp.long_name = 'Surface temperature quantiles'
varstmp.units = 'K'
varstmp.missing_value = -9999.
varpsfc = qout.createVariable('SfcPres_quantile', 'f4', ['probability'], fill_value = -9999)
varpsfc[:] = psfcqs
varpsfc.long_name = 'Surface pressure quantiles'
varpsfc.units = 'hPa'
varpsfc.missing_value = -9999.
varsalt = qout.createVariable('SfcAlt_quantile', 'f4', ['probability'], fill_value = -9999)
varsalt[:] = sfalqs
varsalt.long_name = 'Surface altitude quantiles'
varsalt.units = 'm'
varsalt.missing_value = -9999.
qout.close()
# Output transformed quantile samples
zfnm = '%s/CONUS_AIRS_JJA_%04d_%02dUTC_%s_TempRHSfc_StdGausTrans.nc' % (dtdr,yrlst[k],hrchc,rgchc)
zout = Dataset(zfnm,'w')
dimz = zout.createDimension('level',nzout)
dimsmp = zout.createDimension('sample',tsmp)
varlvl = zout.createVariable('level','f4',['level'], fill_value = -9999)
varlvl[:] = plev
varlvl.long_name = 'AIRS/SARTA pressure levels'
varlvl.units = 'hPa'
varlvl.missing_value = -9999
varlon = zout.createVariable('Longitude','f4',['sample'])
varlon[:] = lonout
varlon.long_name = 'Longitude'
varlon.units = 'degrees_east'
varlat = zout.createVariable('Latitude','f4',['sample'])
varlat[:] = latout
varlat.long_name = 'Latitude'
varlat.units = 'degrees_north'
varjdy = zout.createVariable('JulianDay','i2',['sample'])
varjdy[:] = jdyout
varjdy.long_name = 'JulianDay'
varjdy.units = 'day'
varyr = zout.createVariable('Year','i2',['sample'])
varyr[:] = yrout
varyr.long_name = 'Year'
varyr.units = 'year'
varsrt3 = zout.createVariable('Temperature_StdGaus', 'f4', ['sample','level'], fill_value = -9999)
varsrt3[:] = ztmpout
varsrt3.long_name = 'Quantile transformed temperature'
varsrt3.units = 'None'
varsrt3.missing_value = -9999.
varsrt4 = zout.createVariable('RH_StdGaus', 'f4', ['sample','level'], fill_value = -9999)
varsrt4[:] = zrhout
varsrt4.long_name = 'Quantile transformed relative humidity'
varsrt4.units = 'None'
varsrt4.missing_value = -9999.
varsrts1 = zout.createVariable('SfcTemp_StdGaus', 'f4', ['sample'], fill_value = -9999)
varsrts1[:] = zsftmpout
varsrts1.long_name = 'Quantile transformed surface temperature'
varsrts1.units = 'None'
varsrts1.missing_value = -9999.
varsrts2 = zout.createVariable('SfcPres_StdGaus', 'f4', ['sample'], fill_value = -9999)
varsrts2[:] = zpsfcout
varsrts2.long_name = 'Quantile transformed surface pressure'
varsrts2.units = 'None'
varsrts2.missing_value = -9999.
varsrts3 = zout.createVariable('SfcAlt_StdGaus', 'f4', ['sample'], fill_value = -9999)
varsrts3[:] = zsfaltout
varsrts3.long_name = 'Quantile transformed surface pressure'
varsrts3.units = 'None'
varsrts3.missing_value = -9999.
zout.close()
return
def airscld_invtransf_mix_cloud9_conus_nosfc(rfdr, dtdr, yrchc, hrchc, rgchc, rfmn, rfdy, rfgrn, scnrw, nrep = 10, \
l2dir = '/archive/AIRSOps/airs/gdaac/v6'):
# Read in mixture model parameters, draw random samples and set up SARTA input files
# Use AIRS FOV cloud fraction information
# Use designated AIRS reference granule, and pull surface pressure temperature from there
# dtdr: Output directory
# yrchc: Template Year Choice
# hrchc: Template Hour Choice
# rgchc: Template Region Choice
# rfmn: Month for reference granule
# rfdy: Day for reference granule
# rfgrn: Reference granule number
# scnrw: Scan row for experiment
# nrep: Number of replicate granules
# l2dir: Local AIRS Level 2 directory (to retrieve reference info)
# RN Generator
sdchc = 165434 + yrchc + hrchc
random.seed(sdchc)
cldprt = numpy.array([0.4,0.2,0.08])
nszout = 45 * 30 * nrep
sfrps = 45 * nrep
nlvsrt = 98
msgdbl = -9999.0
# Read probs and pressure levels
rnm = '%s/AIRS_Levels_Quantiles.nc' % (rfdr)
f = Dataset(rnm,'r')
airs_sarta_levs = f['level'][:]
f.close()
# Get reference granule info
airsdr = '%s/%04d/%02d/%02d/airs2sup' % (l2dir,yrchc,rfmn,rfdy)
if (os.path.exists(airsdr)):
fllst = os.listdir(airsdr)
l2str = 'AIRS.%04d.%02d.%02d.%03d' % (yrchc,rfmn,rfdy,rfgrn)
rffd = -1
j = 0
while ( (j < len(fllst)) and (rffd < 0) ):
lncr = len(fllst[j])
l4 = lncr - 4
if ( (fllst[j][l4:lncr] == '.hdf') and (l2str in fllst[j])):
l2fl = '%s/%s' % (airsdr,fllst[j])
ncl2 = Dataset(l2fl)
psfc = ncl2.variables['PSurfStd'][:,:]
topg = ncl2.variables['topog'][:,:]
ncl2.close()
rffd = j
j = j + 1
else:
print('L2 directory not found')
# Surface replicates
psfcvc = psfc[scnrw-1,:]
topgvc = topg[scnrw-1,:]
spres = numpy.tile(psfcvc,(sfrps,))
salti = numpy.tile(topgvc,(sfrps,))
# Variable list
clrlst = ['Temperature','RH','SfcTemp']
clrst = [1,64,0]
clrct = [98,35,1]
cldlst = ['NumberSlab','CloudBot1Logit','DPCloud1Logit','DPSlabLogit','DPCloud2Logit', \
'CType1','CType2','CCoverInd','CFrcLogit1','CFrcLogit2','CFrcLogit12', \
'NGWater1','NGWater2','CTTemp1','CTTemp2']
cldst = [0,0,0,0,0, 0,0,0,0,0,0, 0,0,0,0]
cldct = [1,1,1,1,1, 1,1,9,9,9,9, 1,1,1,1]
nvar = 0
for q in range(len(clrct)):
nvar = nvar + clrct[q]
nclr = nvar
for q in range(len(cldlst)):
nvar = nvar + cldct[q]
ncld = nvar - nclr
# Discrete/Continuous Indicator
typind = []
for q in range(len(clrct)):
for p in range(clrct[q]):
typind.append('Continuous')
cldtypind = ['Discrete','Continuous','Continuous','Continuous','Continuous', \
'Discrete','Discrete','Discrete','Continuous','Continuous','Continuous', \
'Continuous','Continuous','Continuous','Continuous']
for q in range(len(cldct)):
for p in range(cldct[q]):
typind.append(cldtypind[q])
# Quantile files
qclrnm = '%s/CONUS_AIRS_JJA_%04d_%02dUTC_%s_TempRHSfc_Quantile.nc' % (dtdr,yrchc,hrchc,rgchc)
qcldnm = '%s/CONUS_AIRS_JJA_%04d_%02dUTC_%s_Cloud_Quantile.nc' % (dtdr,yrchc,hrchc,rgchc)
qin = Dataset(qclrnm,'r')
prbs = qin.variables['probability'][:]
nprb = prbs.shape[0]
qsclr = numpy.zeros((nclr,nprb))
lvs = qin.variables['level'][:]
alts = qin.variables['Altitude_median'][:]
rhmd = qin.variables['RH_quantile'][:,103]
nlvl = lvs.shape[0]
cctr = 0
for j in range(len(clrlst)):
print(clrlst[j])
if clrst[j] == 0:
vr1 = '%s_quantile' % (clrlst[j])
qsclr[cctr,:] = qin.variables[vr1][:]
else:
inst = clrst[j] - 1
infn = inst + clrct[j]
otst = cctr
otfn = cctr + clrct[j]
vr1 = '%s_quantile' % (clrlst[j])
qsclr[otst:otfn,:] = qin.variables[vr1][inst:infn,:]
cctr = cctr + clrct[j]
qin.close()
print('Clear medians')
print(qsclr[:,103])
cldnmout = []
qin = Dataset(qcldnm,'r')
qscld = numpy.zeros((ncld,nprb))
dctr = 0
for j in range(len(cldlst)):
print(cldlst[j])
vr1 = '%s_quantile' % (cldlst[j])
vrinf = qin.variables[vr1]
if cldct[j] == 1:
qscld[dctr,:] = qin.variables[vr1][:]
dctr = dctr + 1
cldnmout.append(cldlst[j])
elif (len(vrinf.shape) == 2):
inst = cldst[j]
infn = inst + cldct[j]
for n2 in range(inst,infn):
clnm = '%s_%d' % (cldlst[j],n2)
cldnmout.append(clnm)
otst = dctr
otfn = dctr + cldct[j]
vr1 = '%s_quantile' % (clrlst[j])
qscld[otst:otfn,:] = qin.variables[vr1][inst:infn,:]
dctr = dctr + cldct[j]
elif (len(vrinf.shape) == 3):
for cl0 in range(vrinf.shape[0]):
for rw0 in range(vrinf.shape[1]):
otst = dctr
otfn = dctr + 1
qscld[otst:otfn,:] = qin.variables[vr1][cl0,rw0,:]
clnm = '%s_%d_%d' % (cldlst[j],cl0,rw0)
cldnmout.append(clnm)
dctr = dctr + 1
qin.close()
print('Cloud medians')
print(qscld[:,103])
# Read GMM Results
gmmnm = '%s/CONUS_AIRS_JJA_%04d_%02dUTC_%s_GMM_parameters.nc' % (dtdr,yrchc,hrchc,rgchc)
gmin = Dataset(gmmnm,'r')
gmnms = gmin['State_Vector_Names'][:,:]
gmmean = gmin['Mean'][:,:]
gmpkcv = gmin['Packed_Covariance'][:,:]
gmprps = gmin['Mixture_Proportion'][:]
gmin.close()
nmclps = gmnms.tolist()
strvrs = list(map(calculate_VPD.clean_byte_list,nmclps))
if sys.version_info[0] < 3:
print('Version 2')
strvrs = map(str,strvrs)
nmix = gmmean.shape[0]
nmxvar = gmmean.shape[1]
mrgcv = numpy.zeros((nmix,nmxvar,nmxvar),dtype=numpy.float64)
for j in range(nmix):
mrgcv[j,:,:] = calculate_VPD.unpackcov(gmpkcv[j,:], nelm=nmxvar)
# Component sizes
dtall = numpy.zeros((nszout,nmxvar),dtype=numpy.float)
cmpidx = numpy.zeros((nszout,),dtype=numpy.int16)
csmp = random.multinomial(nszout,pvals=gmprps)
cmsz = 0
for j in range(nmix):
cvfl = mrgcv[j,:,:]
s1 = numpy.sqrt(numpy.diagonal(cvfl))
crmt = calculate_VPD.cov2cor(cvfl)
sdmt = numpy.diag(numpy.sqrt(cvfl.diagonal()))
w, v = linalg.eig(crmt)
print(numpy.amin(w))
sdfn = cmsz + csmp[j]
dtz = random.multivariate_normal(numpy.zeros((nmxvar,)),crmt,size=csmp[j])
dttmp = numpy.tile(gmmean[j,:],(csmp[j],1)) + numpy.dot(dtz,sdmt)
dtall[cmsz:sdfn,:] = dttmp[:,:]
cmpidx[cmsz:sdfn] = j + 1
cmsz = cmsz + csmp[j]
# Re-shuffle
ssq = numpy.arange(nszout)
sqsmp = random.choice(ssq,size=nszout,replace=False)
csmpshf = cmpidx[sqsmp]
dtshf = dtall[sqsmp,:]
print(dtshf.shape)
### Inverse Transform
qout = numpy.zeros(dtshf.shape)
for j in range(nclr):
if typind[j] == 'Discrete':
qout[:,j] = calculate_VPD.data_quantile_from_std_norm_discrete(dtshf[:,j],qsclr[j,:],prbs,minval=qsclr[j,0],maxval=qsclr[j,nprb-1])
else:
qout[:,j] = calculate_VPD.data_quantile_from_std_norm(dtshf[:,j],qsclr[j,:],prbs,minval=qsclr[j,0],maxval=qsclr[j,nprb-1])
for j in range(nclr,nvar):
if typind[j] == 'Discrete':
qout[:,j] = calculate_VPD.data_quantile_from_std_norm_discrete(dtshf[:,j],qscld[j-nclr,:],prbs,minval=qsclr[j-nclr,0],maxval=qscld[j-nclr,nprb-1])
else:
qout[:,j] = calculate_VPD.data_quantile_from_std_norm(dtshf[:,j],qscld[j-nclr,:],prbs,minval=qscld[j-nclr,0],maxval=qsclr[j-nclr,nprb-1])
### Prepare for SARTA
varlstout = ['cngwat','cngwat2','cprbot','cprbot2','cprtop','cprtop2', \
'cpsize','cpsize2','cstemp','cstemp2','ctype','ctype2','salti','spres','stemp']
# Adjust altitudes
alth2o = numpy.zeros((nszout,nlvsrt+3))
alth2o[:,nlvsrt-1] = alts[nlvsrt-1]
curdlt = 0.0
for j in range(nlvsrt-2,-1,-1):
str1 = 'Level %d: %.4f' % (j,curdlt)
print(str1)
if (alts[j] > alts[j+1]):
curdlt = alts[j] - alts[j+1]
alth2o[:,j] = alts[j]
else:
alth2o[:,j] = alts[j+1] + curdlt * 2.0
curdlt = curdlt * 2.0
alth2o[:,97] = 0.0
# Convert cloud items to data frame
smpfrm = pandas.DataFrame(data=qout[:,nclr:nvar],columns=cldnmout)
dtout = numpy.zeros((nszout,len(varlstout)), dtype=numpy.float64)
frmout = pandas.DataFrame(data=dtout,columns=varlstout)
# Cloud Types
frmout['ctype'] = (smpfrm['CType1'] + 1.0) * 100.0 + 1.0
frmout['ctype2'] = (smpfrm['CType2'] + 1.0) * 100.0 + 1.0
frmout.loc[(smpfrm.NumberSlab == 0),'ctype'] = msgdbl
frmout.loc[(smpfrm.NumberSlab < 2),'ctype2'] = msgdbl
# Met/Sfc Components, arrays sized for SARTA and AIRS
cctr = 0
prhout = numpy.zeros((nszout,nlvsrt+3)) - 9999.0
ptmpout = numpy.zeros((nszout,nlvsrt+3)) - 9999.0
for j in range(len(clrst)):
if clrst[j] == 0:
if clrlst[j] == 'SfcTemp':
frmout['stemp'] = qout[:,cctr]
elif clrlst[j] == 'Temperature':
inst = clrst[j] - 1
infn = inst + clrct[j]
otst = cctr
otfn = cctr + clrct[j]
ptmpout[:,inst:infn] = qout[:,otst:otfn]
elif clrlst[j] == 'RH':
inst = clrst[j] - 1
infn = inst + clrct[j]
otst = cctr
otfn = cctr + clrct[j]
prhout[:,inst:infn] = qout[:,otst:otfn]
bsrh = rhmd[inst]
for k in range(inst-1,-1,-1):
if ma.is_masked(rhmd[k]):
prhout[:,k] = bsrh / 2.0
t2 = 'RH masked: %d' % (k)
print(t2)
elif rhmd[k] < 0:
t2 = 'RH below 0: %d' % (k)
print(t2)
prhout[:,k] = bsrh
else:
prhout[:,k] = rhmd[k]
bsrh = rhmd[k]
cctr = cctr + clrct[j]
str1 = '''RH at Level 1: %.4e, %.4e ''' % (numpy.amin(prhout[:,0]),rhmd[0])
str2 = '''RH at Level 2: %.4e, %.4e ''' % (numpy.amin(prhout[:,1]),rhmd[1])
print(str1)
print(str2)
h2oout = calculate_VPD.calculate_h2odens(prhout,ptmpout,airs_sarta_levs,alth2o)
# Surface from reference
frmout['salti'] = salti
# Need for clouds
frmout['spres'] = spres
smpfrm['SfcPres'] = spres
# Pressure Variables
for i in range(nszout):
if smpfrm['NumberSlab'][smpfrm.index[i]] == 0:
frmout.at[i,'cprbot'] = msgdbl
frmout.at[i,'cprtop'] = msgdbl
frmout.at[i,'cprbot2'] = msgdbl
frmout.at[i,'cprtop2'] = msgdbl
elif smpfrm['NumberSlab'][smpfrm.index[i]] == 1:
tmplgts = numpy.array( [smpfrm['CloudBot1Logit'][smpfrm.index[i]], \
smpfrm['DPCloud1Logit'][smpfrm.index[i]] ] )
frctmp = calculate_VPD.lgttoprp(tmplgts)
frmout.at[i,'cprbot'] = smpfrm['SfcPres'][smpfrm.index[i]] * (1.0 - frctmp[0])
frmout.at[i,'cprtop'] = smpfrm['SfcPres'][smpfrm.index[i]] * (1.0 - frctmp[0] - frctmp[1])
frmout.at[i,'cprbot2'] = msgdbl
frmout.at[i,'cprtop2'] = msgdbl
elif smpfrm['NumberSlab'][smpfrm.index[i]] == 2:
tmplgts = numpy.array( [smpfrm['CloudBot1Logit'][smpfrm.index[i]], \
smpfrm['DPCloud1Logit'][smpfrm.index[i]], \
smpfrm['DPSlabLogit'][smpfrm.index[i]], \
smpfrm['DPCloud2Logit'][smpfrm.index[i]] ] )
frctmp = calculate_VPD.lgttoprp(tmplgts)
frmout.at[i,'cprbot'] = smpfrm['SfcPres'][smpfrm.index[i]] * (1.0 - frctmp[0])
frmout.at[i,'cprtop'] = smpfrm['SfcPres'][smpfrm.index[i]] * (1.0 - frctmp[0] - frctmp[1])
frmout.at[i,'cprbot2'] = smpfrm['SfcPres'][smpfrm.index[i]] * (1.0 - frctmp[0] - frctmp[1] - frctmp[2])
frmout.at[i,'cprtop2'] = smpfrm['SfcPres'][smpfrm.index[i]] * (1.0 - frctmp[0] - frctmp[1] - frctmp[2] - frctmp[3])
# Non-Gas Water
frmout['cngwat'] = smpfrm['NGWater1']
frmout.loc[(smpfrm.NumberSlab == 0),'cngwat'] = msgdbl
frmout['cngwat2'] = smpfrm['NGWater2']
frmout.loc[(smpfrm.NumberSlab < 2),'cngwat2'] = msgdbl
# Temperature
frmout['cstemp'] = smpfrm['CTTemp1']
frmout.loc[(smpfrm.NumberSlab == 0),'cstemp'] = msgdbl
frmout['cstemp2'] = smpfrm['CTTemp2']
frmout.loc[(smpfrm.NumberSlab < 2),'cstemp2'] = msgdbl
# Particle Size, from Sergio's paper
# 20 for water, 80 for ice
#'cpsize','cpsize2','cstemp','cstemp2','ctype','ctype2']
frmout.loc[(frmout.ctype == 101.0),'cpsize'] = 20
frmout.loc[(frmout.ctype == 201.0),'cpsize'] = 80
frmout.loc[(frmout.ctype < 0.0),'cpsize'] = msgdbl
frmout.loc[(frmout.ctype2 == 101.0),'cpsize2'] = 20
frmout.loc[(frmout.ctype2 == 201.0),'cpsize2'] = 80
frmout.loc[(frmout.ctype2 < 0.0),'cpsize2'] = msgdbl
# Fractions, 3D Arrays
cfrc1out = numpy.zeros((nszout,3,3)) - 9999.0
cfrc2out = numpy.zeros((nszout,3,3)) - 9999.0
cfrc12out = numpy.zeros((nszout,3,3)) - 9999.0
for i in range(nszout):
if smpfrm['NumberSlab'][smpfrm.index[i]] == 0:
cfrc1out[i,:,:] = 0.0
cfrc2out[i,:,:] = 0.0
cfrc12out[i,:,:] = 0.0
elif smpfrm['NumberSlab'][smpfrm.index[i]] == 1:
for q in range(3):
for p in range(3):
ccvnm = 'CCoverInd_%d_%d' % (q,p)
lgtnm1 = 'CFrcLogit1_%d_%d' % (q,p)
if (smpfrm[ccvnm][smpfrm.index[i]] == -1):
cfrc1out[i,q,p] = 0.0
elif (smpfrm[ccvnm][smpfrm.index[i]] == 1):
cfrc1out[i,q,p] = 1.0
else:
tmplgts = numpy.array( [smpfrm[lgtnm1][smpfrm.index[i]]] )
frctmp = calculate_VPD.lgttoprp(tmplgts)
cfrc1out[i,q,p] = frctmp[0]
cfrc2out[i,:,:] = 0.0
cfrc12out[i,:,:] = 0.0
elif smpfrm['NumberSlab'][smpfrm.index[i]] == 2:
for q in range(3):
for p in range(3):
ccvnm = 'CCoverInd_%d_%d' % (q,p)
lgtnm1 = 'CFrcLogit1_%d_%d' % (q,p)
lgtnm2 = 'CFrcLogit2_%d_%d' % (q,p)
lgtnm12 = 'CFrcLogit12_%d_%d' % (q,p)
if (smpfrm[ccvnm][smpfrm.index[i]] == -1):
cfrc1out[i,q,p] = 0.0
cfrc2out[i,q,p] = 0.0
cfrc12out[i,q,p] = 0.0
elif (smpfrm[ccvnm][smpfrm.index[i]] == 1):
tmplgts = numpy.array( [smpfrm[lgtnm1][smpfrm.index[i]], \
smpfrm[lgtnm2][smpfrm.index[i]], \
smpfrm[lgtnm12][smpfrm.index[i]]] )
frctmp = calculate_VPD.lgttoprp(tmplgts)
frcadj = 1.0 - frctmp[3]
cfrc1out[i,q,p] = (frctmp[0] + frctmp[2]) / frcadj
cfrc2out[i,q,p] = (frctmp[1] + frctmp[2]) / frcadj
cfrc12out[i,q,p] = frctmp[2] / frcadj
else:
tmplgts = numpy.array( [smpfrm[lgtnm1][smpfrm.index[i]], \
smpfrm[lgtnm2][smpfrm.index[i]], \
smpfrm[lgtnm12][smpfrm.index[i]]] )
frctmp = calculate_VPD.lgttoprp(tmplgts)
cfrc1out[i,q,p] = frctmp[0] + frctmp[2]
cfrc2out[i,q,p] = frctmp[1] + frctmp[2]
cfrc12out[i,q,p] = frctmp[2]
# Write Sample Output
print(frmout[166:180])
fldbl = numpy.array([-9999.],dtype=numpy.float64)
flflt = numpy.array([-9999.],dtype=numpy.float32)
flshrt = numpy.array([-99],dtype=numpy.int16)
dfnm = '%s/SampledStateVectors/CONUS_AIRS_JJA_%04d_%02dUTC_%s_SR%02d_SimSARTAStates_Mix_CloudFOV.h5' % (dtdr,yrchc,hrchc,rgchc,scnrw)
f = h5py.File(dfnm,'w')
for j in range(len(varlstout)):
dftmp = f.create_dataset(varlstout[j],data=frmout[varlstout[j]])
dftmp.attrs['missing_value'] = -9999.
dftmp.attrs['_FillValue'] = -9999.
dfpt = f.create_dataset('ptemp',data=ptmpout)
dfpt.attrs['missing_value'] = fldbl
dfpt.attrs['_FillValue'] = fldbl
dfrh = f.create_dataset('relative_humidity',data=prhout)
dfrh.attrs['missing_value'] = fldbl
dfrh.attrs['_FillValue'] = fldbl
dfgs = f.create_dataset('gas_1',data=h2oout)
dfgs.attrs['missing_value'] = fldbl
dfgs.attrs['_FillValue'] = fldbl
dfcf1 = f.create_dataset('cfrac',data=cfrc1out)
dfcf1.attrs['missing_value'] = fldbl
dfcf1.attrs['_FillValue'] = fldbl
dfcf2 = f.create_dataset('cfrac2',data=cfrc2out)
dfcf2.attrs['missing_value'] = fldbl
dfcf2.attrs['_FillValue'] = fldbl
dfcf12 = f.create_dataset('cfrac12',data=cfrc12out)
dfcf12.attrs['missing_value'] = fldbl
dfcf12.attrs['_FillValue'] = fldbl
dfcsmp = f.create_dataset('mixture_component',data=csmpshf)
dfcsmp.attrs['missing_value'] = flshrt
dfcsmp.attrs['_FillValue'] = flshrt
dflv = f.create_dataset('level',data=airs_sarta_levs)
f.close()
return
def setup_airs_cloud(flnm, tms, lats, lons, tmunit = 'Seconds since 1993-01-01 00:00:00'):
# Set up matched AIRS/MERRA cloud file
# flnm: Name of output file
# tms: Time variable array
# lats: Latitude variable array
# lons: Longitude variable array
ntm = tms.shape[0]
nlat = lats.shape[0]
nlon = lons.shape[0]
# Create Output file
qout = Dataset(flnm,'w')
dimln = qout.createDimension('lon',nlon)
dimlt = qout.createDimension('lat',nlat)
dimtm = qout.createDimension('time',ntm)
dimtrk = qout.createDimension('AIRSFOV',9)
if (lons.dtype == numpy.float32):
lntp = 'f4'
else:
lntp = 'f8'
varlon = qout.createVariable('lon',lntp,['lon'], fill_value = -9999)
varlon[:] = lons
varlon.long_name = 'longitude'
varlon.units='degrees_east'
varlon.missing_value = -9999
if (lats.dtype == numpy.float32):
lttp = 'f4'
else:
lttp = 'f8'
varlat = qout.createVariable('lat',lttp,['lat'], fill_value = -9999)
varlat[:] = lats
varlat.long_name = 'latitude'
varlat.units='degrees_north'
varlat.missing_value = -9999
if (tms.dtype == numpy.float32):
tmtp = 'f4'
else:
tmtp = 'f8'
vartm = qout.createVariable('time',lttp,['time'], fill_value = -9999)
vartm[:] = tms
vartm.long_name = 'time'
vartm.units = tmunit
vartm.missing_value = -9999
# Other output variables
varcfrc1 = qout.createVariable('AIRS_CldFrac_1','f4',['time','lat','lon','AIRSFOV'], fill_value = -9999)
varcfrc1.long_name = 'AIRS cloud fraction, upper level'
varcfrc1.units = 'unitless'
varcfrc1.missing_value = -9999
varcfrc2 = qout.createVariable('AIRS_CldFrac_2','f4',['time','lat','lon','AIRSFOV'], fill_value = -9999)
varcfrc2.long_name = 'AIRS cloud fraction, lower level'
varcfrc2.units = 'unitless'
varcfrc2.missing_value = -9999
varcqc1 = qout.createVariable('AIRS_CldFrac_QC_1','i2',['time','lat','lon','AIRSFOV'], fill_value = -99)
varcqc1.long_name = 'AIRS cloud fraction quality flag, upper level'
varcqc1.units = 'unitless'
varcqc1.missing_value = -9999
varcqc2 = qout.createVariable('AIRS_CldFrac_QC_2','i2',['time','lat','lon','AIRSFOV'], fill_value = -99)
varcqc2.long_name = 'AIRS cloud fraction quality flag, lower level'
varcqc2.units = 'unitless'
varcqc2.missing_value = -9999
varncld = qout.createVariable('AIRS_nCld','i2',['time','lat','lon','AIRSFOV'], fill_value = -99)
varncld.long_name = 'AIRS number of cloud layers'
varncld.units = 'unitless'
varncld.missing_value = -9999
qout.close()
return
def airs_cfrac_match_merra(flnm, tmidx, tmday, lats, lons, msgvl = -9999, \
l2srch = '/archive/AIRSOps/airs/gdaac/v6'):
# Set up matched AIRS/MERRA cloud file
# flnm: Name of output file
# tms: Time index in output
# tmday: Datetime object with time information
# lats: Longitude variable array
# lons: Longitude variable array
# Search AIRS Level 2
airsdr = '%s/%04d/%02d/%02d/airs2ret' % (l2srch,tmday.year,tmday.month,tmday.day)
dsclst = []
asclst = []
nlat = lats.shape[0]
nlon = lons.shape[0]
lonmn = lons[0] - 5.0
lonmx = lons[nlon-1] + 5.0
latmn = lats[0] - 5.0
latmx = lats[nlat-1] + 5.0
d0 = datetime.datetime(1993,1,1,0,0,0)
ddif = tmday - d0
bsdif = ddif.total_seconds()
# Set up reference frame
ltrp = numpy.repeat(lats,nlon)
ltidx = numpy.repeat(numpy.arange(nlat),nlon)
lnrp = numpy.tile(lons,nlat)
lnidx = numpy.tile(numpy.arange(nlon),nlat)
merfrm = pandas.DataFrame({'GridLonIdx': lnidx, 'GridLatIdx': ltidx, \
'GridLon': lnrp, 'GridLat': ltrp})
if (os.path.exists(airsdr)):
fllst = os.listdir(airsdr)
#print(fllst)
for j in range(len(fllst)):
lncr = len(fllst[j])
l4 = lncr - 4
if (fllst[j][l4:lncr] == '.hdf'):
l2fl = '%s/%s' % (airsdr,fllst[j])
ncl2 = Dataset(l2fl)
slrzn = ncl2.variables['solzen'][:,:]
l2lat = ncl2.variables['Latitude'][:,:]
l2lon = ncl2.variables['Longitude'][:,:]
l2tm = ncl2.variables['Time'][:,:]
ncl2.close()
# Check lat/lon ranges and asc/dsc
l2tmdf = numpy.absolute(l2tm - bsdif)
l2mntm = numpy.min(l2tmdf)
# Within 4 hours
if l2mntm < 14400.0:
ltflt = l2lat.flatten()
lnflt = l2lon.flatten()
latsb = ltflt[(ltflt >= latmn) & (ltflt <= latmx)]
lonsb = lnflt[(lnflt >= lonmn) & (lnflt <= lonmx)]
if ( (latsb.shape[0] > 0) and (lonsb.shape[0] > 0) ):
asclst.append(fllst[j])
sstr = '%s %.2f' % (fllst[j], l2mntm)
print(sstr)
# Set up outputs
cld1arr = numpy.zeros((nlat,nlon,9),dtype=numpy.float32) + msgvl
cld2arr = numpy.zeros((nlat,nlon,9),dtype=numpy.float32) + msgvl
cld1qc = numpy.zeros((nlat,nlon,9),dtype=numpy.int16) - 99
cld2qc = numpy.zeros((nlat,nlon,9),dtype=numpy.int16) - 99
ncldarr = numpy.zeros((nlat,nlon,9),dtype=numpy.int16) - 99
#print(asclst)
if (len(asclst) > 0):
# Start matchups
for j in range(len(asclst)):
l2fl = '%s/%s' % (airsdr,asclst[j])
ncl2 = Dataset(l2fl)
l2lat = ncl2.variables['Latitude'][:,:]
l2lon = ncl2.variables['Longitude'][:,:]
cfrcair = ncl2.variables['CldFrcStd'][:,:,:,:,:]
cfrcaqc = ncl2.variables['CldFrcStd_QC'][:,:,:,:,:]
ncldair = ncl2.variables['nCld'][:,:,:,:]
ncl2.close()
nairtrk = l2lat.shape[0]
nairxtk = l2lat.shape[1]
# Data Frame
tkidx = numpy.repeat(numpy.arange(nairtrk),nairxtk)
xtidx = numpy.tile(numpy.arange(nairxtk),nairtrk)
l2lnflt = l2lon.flatten().astype(numpy.float64)
l2ltflt = l2lat.flatten().astype(numpy.float64)
l2frm = pandas.DataFrame({'L2LonIdx': xtidx, 'L2LatIdx': tkidx, \
'L2Lon': l2lnflt, 'L2Lat': l2ltflt})
l2frm['GridLon'] = numpy.around(l2frm['L2Lon']/0.625) * 0.625
l2frm['GridLat'] = numpy.around(l2frm['L2Lat']/0.5) * 0.5
l2mrg = pandas.merge(l2frm,merfrm,on=['GridLon','GridLat'])
print(l2mrg.shape)
#if j == 0:
# print(asclst[j])
# print(l2mrg[0:15])
# Output data if available
for k in range(l2mrg.shape[0]):
yidxout = l2mrg['GridLatIdx'].values[k]
xidxout = l2mrg['GridLatIdx'].values[k]
yidxl2 = l2mrg['L2LatIdx'].values[k]
xidxl2 = l2mrg['L2LonIdx'].values[k]
cld1arr[yidxout,xidxout,:] = cfrcair[yidxl2,xidxl2,:,:,0].flatten().astype(numpy.float32)
cld2arr[yidxout,xidxout,:] = cfrcair[yidxl2,xidxl2,:,:,1].flatten().astype(numpy.float32)
cld1qc[yidxout,xidxout,:] = cfrcaqc[yidxl2,xidxl2,:,:,0].flatten().astype(numpy.int16)
cld2qc[yidxout,xidxout,:] = cfrcaqc[yidxl2,xidxl2,:,:,1].flatten().astype(numpy.int16)
ncldarr[yidxout,xidxout,:] = ncldair[yidxl2,xidxl2,:,:].flatten().astype(numpy.int16)
# Output
qout = Dataset(flnm,'r+')
varcfrc1 = qout.variables['AIRS_CldFrac_1']
varcfrc1[tmidx,:,:,:] = cld1arr[:,:,:]
varcfrc2 = qout.variables['AIRS_CldFrac_2']
varcfrc2[tmidx,:,:,:] = cld2arr[:,:,:]
varcfqc1 = qout.variables['AIRS_CldFrac_QC_1']
varcfqc1[tmidx,:,:,:] = cld1qc[:,:,:]
varcfqc2 = qout.variables['AIRS_CldFrac_QC_2']
varcfqc2[tmidx,:,:,:] = cld2qc[:,:,:]
varncld = qout.variables['AIRS_nCld']
varncld[tmidx,:,:,:] = ncldarr[:,:,:]
qout.close()
return
def quantile_allstate_locmask_conus(rfdr, mtlst, cslst, airslst, dtdr, yrlst, mnst, mnfn, hrchc, rgchc, sstr, mskvr, mskvl):
# Construct quantiles and z-scores, with a possibly irregular location mask,
# for joint atmospheric state (AIRS/SARTA)
# rfdr: Directory for reference data (Levels/Quantiles)
# mtlst: Meteorology (MERRA) file list
# cslst: Cloud slab file list
# airslst: AIRS cloud fraction file list
# dtdr: Output directory
# yrlst: List of years to process
# mnst: Starting Month
# mnfn: Ending Month
# hrchc: Template Hour Choice
# rgchc: Template Region Choice
# sstr: Season string
# mskvr: Name of region mask variable
# mskvl: Value of region mask for Region Choice
# Read probs and pressure levels
rnm = '%s/AIRS_Levels_Quantiles.nc' % (rfdr)
f = Dataset(rnm,'r')
plev = f['level'][:]
prbs = f['probability'][:]
alts = f['altitude'][:]
f.close()
nyr = len(yrlst)
nprb = prbs.shape[0]
# RN generator
sdchc = 542354 + yrlst[0] + hrchc
random.seed(sdchc)
# Mask, lat, lon
f = Dataset(mtlst[0],'r')
mask = f.variables[mskvr][:,:]
latmet = f.variables['lat'][:]
lonmet = f.variables['lon'][:]
tminf = f.variables['time'][:]
tmunit = f.variables['time'].units[:]
f.close()
mskind = numpy.zeros((mask.shape),dtype=mask.dtype)
print(mskvl)
mskind[mask == mskvl] = 1
lnsq = numpy.arange(lonmet.shape[0])
ltsq = numpy.arange(latmet.shape[0])
# Subset a bit
lnsm = numpy.sum(mskind,axis=0)
ltsm = numpy.sum(mskind,axis=1)
lnmn = numpy.amin(lnsq[lnsm > 0])
lnmx = numpy.amax(lnsq[lnsm > 0]) + 1
ltmn = numpy.amin(ltsq[ltsm > 0])
ltmx = numpy.amax(ltsq[ltsm > 0]) + 1
stridx = 'Lon Range: %d, %d\nLat Range: %d, %d \n' % (lnmn,lnmx,ltmn,ltmx)
print(stridx)
nx = lnmx - lnmn
ny = ltmx - ltmn
nzout = 101
lnrp = numpy.tile(lonmet[lnmn:lnmx],ny)
ltrp = numpy.repeat(latmet[ltmn:ltmx],nx)
mskblk = mskind[ltmn:ltmx,lnmn:lnmx]
mskflt = mskblk.flatten()
tsmp = 0
for k in range(nyr):
f = Dataset(mtlst[k],'r')
tminf = f.variables['time'][:]
tmunit = f.variables['time'].units[:]
f.close()
tmunit = tmunit.replace("days since ","")
dybs = datetime.datetime.strptime(tmunit,"%Y-%m-%d %H:%M:%S")
print(dybs)
dy0 = dybs + datetime.timedelta(days=tminf[0])
dyinit = datetime.date(dy0.year,dy0.month,dy0.day)
print(dyinit)
dyst = datetime.date(yrlst[k],mnst,1)
ttst = dyst.timetuple()
jst = ttst.tm_yday
if mnfn < mnst:
dyfn = datetime.date(yrlst[k]+1,mnfn+1,1)
ttfn = dyfn.timetuple()
jfn = ttfn.tm_yday
dy31 = datetime.date(yrlst[k],12,31)
tt31 = dy31.timetuple()
jftmp = tt31.tm_yday + 1
jsq1 = numpy.arange(jst,jftmp)
jsq2 = numpy.arange(1,jfn)
jdsq = numpy.append(jsq1,jsq2)
elif mnfn < 12:
dyfn = datetime.date(yrlst[k],mnfn+1,1)
ttfn = dyfn.timetuple()
jfn = ttfn.tm_yday
jdsq = numpy.arange(jst,jfn)
else:
dyfn = datetime.date(yrlst[k]+1,1,1)
dy31 = datetime.date(yrlst[k],12,31)
tt31 = dy31.timetuple()
jfn = tt31.tm_yday + 1
print(dyst)
print(dyfn)
dystidx = abs((dyst-dyinit).days)
dyfnidx = abs((dyfn-dyinit).days)
print(jdsq)
tmhld = numpy.repeat(jdsq,nx*ny)
stridx = 'Day Range: %d, %d\n' % (dystidx,dyfnidx)
print(stridx)
# Cloud slab: HDF5 or NetCDF
lncr = len(cslst[k])
l3 = lncr - 3
if (cslst[k][l3:lncr] == '.h5'):
f = h5py.File(cslst[k],'r')
tms = f['/time'][:,dystidx:dyfnidx]
ctyp1 = f['/ctype'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
ctyp2 = f['/ctype2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprt1 = f['/cprtop'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprt2 = f['/cprtop2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprb1 = f['/cprbot'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprb2 = f['/cprbot2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cfrc1 = f['/cfrac'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cfrc2 = f['/cfrac2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cfrc12 = f['/cfrac12'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cngwt1 = f['/cngwat'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cngwt2 = f['/cngwat2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cttp1 = f['/cstemp'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cttp2 = f['/cstemp2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
f.close()
elif (cslst[k][l3:lncr] == '.nc'):
f = Dataset(cslst[k],'r')
tms = f.variables['time'][dystidx:dyfnidx]
ctyp1 = f.variables['ctype1'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
ctyp2 = f.variables['ctype2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprt1 = f.variables['cprtop1'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprt2 = f.variables['cprtop2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprb1 = f.variables['cprbot1'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cprb2 = f.variables['cprbot2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cngwt1 = f.variables['cngwat1'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cngwt2 = f.variables['cngwat2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cttp1 = f.variables['cstemp1'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
cttp2 = f.variables['cstemp2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
f.close()
tmflt = tms.flatten()
nt = tmflt.shape[0]
lnhld = numpy.tile(lnrp,nt)
lthld = numpy.tile(ltrp,nt)
# MERRA variables
f = Dataset(mtlst[k],'r')
psfc = f.variables['spres'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
stparr = f.variables['stemp'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
salinf = f.variables['salti']
if salinf.ndim == 3:
salarr = f.variables['salti'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx]
elif salinf.ndim == 2:
salarr = f.variables['salti'][ltmn:ltmx,lnmn:lnmx]
tmparr = f.variables['ptemp'][dystidx:dyfnidx,:,ltmn:ltmx,lnmn:lnmx]
h2oarr = f.variables['rh'][dystidx:dyfnidx,:,ltmn:ltmx,lnmn:lnmx]
altarr = f.variables['palts'][dystidx:dyfnidx,:,ltmn:ltmx,lnmn:lnmx]
f.close()
# Mask
print(ctyp1.shape)
nt = ctyp1.shape[0]
mskall = numpy.tile(mskflt,nt)
msksq = numpy.arange(mskall.shape[0])
msksb = msksq[mskall > 0]
mskstr = 'Total Obs: %d, Within Mask: %d \n' % (msksq.shape[0],msksb.shape[0])
print(mskstr)
nslbtmp = numpy.zeros((ctyp1.shape),dtype=numpy.int16)
nslbtmp[(ctyp1 > 100) & (ctyp2 > 100)] = 2
nslbtmp[(ctyp1 > 100) & (ctyp2 < 100)] = 1
# AIRS clouds
f = Dataset(airslst[k],'r')
arsfrc1 = f.variables['AIRS_CldFrac_1'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx,:]
arsfrc2 = f.variables['AIRS_CldFrac_2'][dystidx:dyfnidx,ltmn:ltmx,lnmn:lnmx,:]
f.close()
# Sum
frctot = arsfrc1 + arsfrc2
# Construct Clr/PC/Ovc indicator for AIRS total cloud frac
totclr = numpy.zeros(frctot.shape,dtype=numpy.int16)
totclr[frctot == 0.0] = -1
totclr[frctot == 1.0] = 1
totclr = ma.masked_array(totclr, mask = frctot.mask)
frc0 = frctot[:,:,:,0]
frc0 = frc0.flatten()
frcsq = numpy.arange(tmhld.shape[0])
# Subset by AIRS matchup and location masks
frcsb = frcsq[(numpy.logical_not(frc0.mask)) & (mskall > 0)]
nairs = frcsb.shape[0]
print(tmhld.shape)
print(frcsb.shape)
ctyp1 = ctyp1.flatten()
ctyp2 = ctyp2.flatten()
nslbtmp = nslbtmp.flatten()
cngwt1 = cngwt1.flatten()
cngwt2 = cngwt2.flatten()
cttp1 = cttp1.flatten()
cttp2 = cttp2.flatten()
psfc = psfc.flatten()
# Number of slabs
if tsmp == 0:
nslabout = numpy.zeros((nairs,),dtype=numpy.int16)
nslabout[:] = nslbtmp[frcsb]
else:
nslabout = numpy.append(nslabout,nslbtmp[frcsb])
# For two slabs, slab 1 must have highest cloud bottom pressure
cprt1 = cprt1.flatten()
cprt2 = cprt2.flatten()
cprb1 = cprb1.flatten()
cprb2 = cprb2.flatten()
slabswap = numpy.zeros((ctyp1.shape[0],),dtype=numpy.int16)
swpsq = frcsq[(nslbtmp == 2) & (cprb1 < cprb2)]
slabswap[swpsq] = 1
# Cloud Pressure variables
pbttmp1 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
pbttmp1[nslbtmp >= 1] = cprb1[nslbtmp >= 1]
pbttmp1[swpsq] = cprb2[swpsq]
ptptmp1 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
ptptmp1[nslbtmp >= 1] = cprt1[nslbtmp >= 1]
ptptmp1[swpsq] = cprt2[swpsq]
pbttmp2 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
pbttmp2[nslbtmp == 2] = cprb2[nslbtmp == 2]
pbttmp2[swpsq] = cprb1[swpsq]
ptptmp2 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
ptptmp2[nslbtmp == 2] = cprt2[nslbtmp == 2]
ptptmp2[swpsq] = cprt1[swpsq]
# DP Cloud transformation
dptmp1 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
dptmp1[nslbtmp >= 1] = pbttmp1[nslbtmp >= 1] - ptptmp1[nslbtmp >= 1]
dpslbtmp = numpy.zeros((ctyp1.shape[0],)) - 9999.0
dpslbtmp[nslbtmp == 2] = ptptmp1[nslbtmp == 2] - pbttmp2[nslbtmp == 2]
dptmp2 = numpy.zeros((ctyp1.shape[0],)) - 9999.0
dptmp2[nslbtmp == 2] = pbttmp2[nslbtmp == 2] - ptptmp2[nslbtmp == 2]
# Adjust negative DPSlab values
dpnsq = frcsq[(nslbtmp == 2) & (dpslbtmp <= 0.0) & (dpslbtmp > -1000.0)]
dpadj = numpy.zeros((ctyp1.shape[0],))
dpadj[dpnsq] = numpy.absolute(dpslbtmp[dpnsq])
dpslbtmp[dpnsq] = 10.0
dptmp1[dpnsq] = dptmp1[dpnsq] / 2.0
dptmp2[dpnsq] = dptmp2[dpnsq] / 2.0
# Sigma / Logit Adjustments
zpbtmp = numpy.zeros((psfc.shape[0],)) - 9999.0
zdp1tmp = numpy.zeros((psfc.shape[0],)) - 9999.0
zdslbtmp = numpy.zeros((psfc.shape[0],)) - 9999.0
zdp2tmp = numpy.zeros((psfc.shape[0],)) - 9999.0
ncldct = 0
for t in range(psfc.shape[0]):
if ( (pbttmp1[t] >= 0.0) and (dpslbtmp[t] >= 0.0) ):
prptmp = numpy.array( [ (psfc[t] - pbttmp1[t]) / psfc[t], \
dptmp1[t] / psfc[t], dpslbtmp[t] / psfc[t], \
dptmp2[t] / psfc[t], 0.0 ] )
if (prptmp[0] < 0.0):
# Adjustment needed
prpadj = prptmp[0]
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
prptmp[2] = prptmp[2] + prpadj*prptmp[2]
prptmp[3] = prptmp[3] + prpadj*prptmp[3]
ncldct = ncldct + 1
elif (prptmp[0] == 0.0):
# Adjustment needed
prpadj = -0.01
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
prptmp[2] = prptmp[2] + prpadj*prptmp[2]
prptmp[3] = prptmp[3] + prpadj*prptmp[3]
ncldct = ncldct + 1
prptmp[4] = 1.0 - prptmp[0] - prptmp[1] - prptmp[2] - prptmp[3]
ztmp = calculate_VPD.lgtzs(prptmp)
zpbtmp[t] = ztmp[0]
zdp1tmp[t] = ztmp[1]
zdslbtmp[t] = ztmp[2]
zdp2tmp[t] = ztmp[3]
elif ( pbttmp1[t] >= 0.0 ):
prptmp = numpy.array( [ (psfc[t] - pbttmp1[t]) / psfc[t], \
dptmp1[t] / psfc[t], 0.0 ] )
if (prptmp[0] < 0.0):
# Adjustment needed
prpadj = prptmp[0]
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
ncldct = ncldct + 1
elif (prptmp[0] == 0.0):
# Adjustment needed
prpadj = -0.01
prptmp[0] = 0.01
prptmp[1] = prptmp[1] + prpadj*prptmp[1]
ncldct = ncldct + 1
prptmp[2] = 1.0 - prptmp[0] - prptmp[1]
ztmp = calculate_VPD.lgtzs(prptmp)
zpbtmp[t] = ztmp[0]
zdp1tmp[t] = ztmp[1]
zdslbtmp[t] = -9999.0
zdp2tmp[t] = -9999.0
else:
zpbtmp[t] = -9999.0
zdp1tmp[t] = -9999.0
zdslbtmp[t] = -9999.0
zdp2tmp[t] = -9999.0
str1 = 'Cloud Bot Pres Below Sfc: %d ' % (ncldct)
print(str1)
if tsmp == 0:
psfcout = numpy.zeros((frcsb.shape[0],)) - 9999.0
psfcout[:] = psfc[frcsb]
prsbot1out = numpy.zeros((frcsb.shape[0],)) - 9999.0
prsbot1out[:] = zpbtmp[frcsb]
dpcld1out = numpy.zeros((frcsb.shape[0],)) - 9999.0
dpcld1out[:] = zdp1tmp[frcsb]
dpslbout = numpy.zeros((frcsb.shape[0],)) - 9999.0
dpslbout[:] = zdslbtmp[frcsb]
dpcld2out = numpy.zeros((frcsb.shape[0],)) - 9999.0
dpcld2out[:] = zdp2tmp[frcsb]
else:
psfcout = numpy.append(psfcout,psfc[frcsb])
prsbot1out = numpy.append(prsbot1out,zpbtmp[frcsb])
dpcld1out = numpy.append(dpcld1out,zdp1tmp[frcsb])
dpslbout = numpy.append(dpslbout,zdslbtmp[frcsb])
dpcld2out = numpy.append(dpcld2out,zdp2tmp[frcsb])
# Slab Types: 101.0 = Liquid, 201.0 = Ice, None else
# Output: 0 = Liquid, 1 = Ice
typtmp1 = numpy.zeros((ctyp1.shape[0],),dtype=numpy.int16) - 99
typtmp1[nslbtmp >= 1] = (ctyp1[nslbtmp >= 1] - 1.0) / 100.0 - 1.0
typtmp1[swpsq] = (ctyp2[swpsq] - 1.0) / 100.0 - 1.0
typtmp2 = numpy.zeros((ctyp1.shape[0],),dtype=numpy.int16) - 99
typtmp2[nslbtmp == 2] = (ctyp2[nslbtmp == 2] - 1.0) / 100.0 - 1.0
typtmp2[swpsq] = (ctyp1[swpsq] - 1.0) / 100.0 - 1.0
if tsmp == 0:
slbtyp1out = numpy.zeros((frcsb.shape[0],),dtype=numpy.int16)
slbtyp1out[:] = typtmp1[frcsb]
slbtyp2out = numpy.zeros((frcsb.shape[0],),dtype=numpy.int16)
slbtyp2out[:] = typtmp2[frcsb]
else:
slbtyp1out = numpy.append(slbtyp1out,typtmp1[frcsb])
slbtyp2out = numpy.append(slbtyp2out,typtmp2[frcsb])
# Cloud Cover Indicators
totclrtmp = numpy.zeros((frcsb.shape[0],3,3),dtype=numpy.int16)
frctottmp = numpy.zeros((frcsb.shape[0],3,3),dtype=frctot.dtype)
cctr = 0
for frw in range(3):
for fcl in range(3):
clrvec = totclr[:,:,:,cctr].flatten()
frcvec = frctot[:,:,:,cctr].flatten()
totclrtmp[:,frw,fcl] = clrvec[frcsb]
frctottmp[:,frw,fcl] = frcvec[frcsb]
cctr = cctr + 1
if tsmp == 0:
totclrout = numpy.zeros(totclrtmp.shape,dtype=numpy.int16)
totclrout[:,:,:] = totclrtmp
frctotout = numpy.zeros(frctottmp.shape,dtype=frctottmp.dtype)
frctotout[:,:,:] = frctottmp
else:
totclrout = numpy.append(totclrout,totclrtmp,axis=0)
frctotout = numpy.append(frctotout,frctottmp,axis=0)
# Cloud Fraction Logit, still account for swapping
#z1tmp = numpy.zeros((frcsb.shape[0],3,3)) - 9999.0
z2tmp = numpy.zeros((frcsb.shape[0],3,3)) - 9999.0
z12tmp = numpy.zeros((frcsb.shape[0],3,3)) - 9999.0
# Cloud Fraction
cctr = 0
for frw in range(3):
for fcl in range(3):
frcvect = frctot[:,:,:,cctr].flatten()
frcvec1 = arsfrc1[:,:,:,cctr].flatten()
frcvec2 = arsfrc2[:,:,:,cctr].flatten()
# Quick fix for totals over 1.0
fvsq = numpy.arange(frcvect.shape[0])
fvsq2 = fvsq[frcvect > 1.0]
frcvect[fvsq2] = frcvect[fvsq2] / 1.0
frcvec1[fvsq2] = frcvec1[fvsq2] / 1.0
frcvec2[fvsq2] = frcvec2[fvsq2] / 1.0
for t in range(nairs):
crslb = nslbtmp[frcsb[t]]
crclr = totclrtmp[t,frw,fcl]
if ( (crslb == 0) or (crclr == -1) ):
#z1tmp[t,frw,fcl] = -9999.0
z2tmp[t,frw,fcl] = -9999.0
z12tmp[t,frw,fcl] = -9999.0
elif ( (crslb == 1) and (crclr == 1) ):
#z1tmp[t,frw,fcl] = -9999.0
z2tmp[t,frw,fcl] = -9999.0
z12tmp[t,frw,fcl] = -9999.0
elif ( (crslb == 1) and (crclr == 0) ):
#prptmp = numpy.array( [frcvect[frcsb[t]], 1.0 - frcvect[frcsb[t]] ] )
#ztmp = calculate_VPD.lgtzs(prptmp)
#z1tmp[t,frw,fcl] = ztmp[0]
z2tmp[t,frw,fcl] = -9999.0
z12tmp[t,frw,fcl] = -9999.0
# For 2 slabs, recall AIRS cloud layers go upper/lower, ours is opposite
# Also apply random overlap adjust AIRS zero values
elif ( (crslb == 2) and (crclr == 0) ):
frcs = numpy.array([frcvec2[frcsb[t]],frcvec1[frcsb[t]]])
if (numpy.sum(frcs) < 0.01):
frcs[0] = 0.005
frcs[1] = 0.005
elif frcs[0] < 0.005:
frcs[0] = 0.005
frcs[1] = frcs[1] - 0.005
elif frcs[1] < 0.005:
frcs[1] = 0.005
frcs[0] = frcs[0] - 0.005
mnfrc = numpy.amin(frcs)
c12tmp = random.uniform(0.0,mnfrc,size=1)
prptmp = numpy.array( [frcs[0] - c12tmp[0]*frcs[1], \
frcs[1] - c12tmp[0]*frcs[0], c12tmp[0], 0.0])
prptmp[3] = 1.0 - prptmp[0] - prptmp[1] - prptmp[2]
prpcld = (prptmp[0] + prptmp[1] + prptmp[2])
prpfnl = numpy.array([prptmp[1] / prpcld, prptmp[2] / prpcld, prptmp[0] / prpcld])
ztmp = calculate_VPD.lgtzs(prpfnl)
#z1tmp[t,frw,fcl] = ztmp[0]
z2tmp[t,frw,fcl] = ztmp[0]
z12tmp[t,frw,fcl] = ztmp[1]
elif ( (crslb == 2) and (crclr == 1) ):
frcs = numpy.array([frcvec2[frcsb[t]],frcvec1[frcsb[t]]])
if frcs[0] < 0.005:
frcs[0] = 0.005
frcs[1] = frcs[1] - 0.005
elif frcs[1] < 0.005:
frcs[1] = 0.005
frcs[0] = frcs[0] - 0.005
mnfrc = numpy.amin(frcs)
c12tmp = random.uniform(0.0,mnfrc,size=1)
prptmp = numpy.array( [0.999 * (frcs[0] - c12tmp[0]*frcs[1]), \
0.999 * (frcs[1] - c12tmp[0]*frcs[0]), 0.999 * c12tmp[0], 0.001])
prptmp[3] = 1.0 - prptmp[0] - prptmp[1] - prptmp[2]
prpcld = (prptmp[0] + prptmp[1] + prptmp[2])
prpfnl = numpy.array([prptmp[1] / prpcld, prptmp[2] / prpcld, prptmp[0] / prpcld])
ztmp = calculate_VPD.lgtzs(prpfnl)
#z1tmp[t,frw,fcl] = ztmp[0]
z2tmp[t,frw,fcl] = ztmp[0]
z12tmp[t,frw,fcl] = ztmp[1]
cctr = cctr + 1
if tsmp == 0:
#cfclgt1out = numpy.zeros(z1tmp.shape)
#cfclgt1out[:,:,:] = z1tmp
cfclgt2out = numpy.zeros(z2tmp.shape)
cfclgt2out[:,:,:] = z2tmp
cfclgt12out = numpy.zeros(z12tmp.shape)
cfclgt12out[:,:,:] = z12tmp
else:
#cfclgt1out = numpy.append(cfclgt1out,z1tmp,axis=0)
cfclgt2out = numpy.append(cfclgt2out,z2tmp,axis=0)
cfclgt12out = numpy.append(cfclgt12out,z12tmp,axis=0)
# Cloud Non-Gas Water
ngwttmp1 = numpy.zeros(cngwt1.shape[0]) - 9999.0
ngwttmp1[nslbtmp >= 1] = cngwt1[nslbtmp >= 1]
ngwttmp1[swpsq] = cngwt2[swpsq]
ngwttmp2 = numpy.zeros(cngwt1.shape[0]) - 9999.0
ngwttmp2[nslbtmp == 2] = cngwt2[nslbtmp == 2]
ngwttmp2[swpsq] = cngwt1[swpsq]
if tsmp == 0:
ngwt1out = numpy.zeros((frcsb.shape[0],)) - 9999.0
ngwt1out[:] = ngwttmp1[frcsb]
ngwt2out = numpy.zeros((frcsb.shape[0],)) - 9999.0
ngwt2out[:] = ngwttmp2[frcsb]
else:
ngwt1out = numpy.append(ngwt1out,ngwttmp1[frcsb])
ngwt2out = numpy.append(ngwt2out,ngwttmp2[frcsb])
# Cloud Top Temperature
cttptmp1 = numpy.zeros(cttp1.shape[0]) - 9999.0
cttptmp1[nslbtmp >= 1] = cttp1[nslbtmp >= 1]
cttptmp1[swpsq] = cttp2[swpsq]
cttptmp2 = numpy.zeros(cttp1.shape[0]) - 9999.0
cttptmp2[nslbtmp == 2] = cttp2[nslbtmp == 2]
cttptmp2[swpsq] = cttp1[swpsq]
if tsmp == 0:
cttp1out = numpy.zeros((frcsb.shape[0],)) - 9999.0
cttp1out[:] = cttptmp1[frcsb]
cttp2out = numpy.zeros((frcsb.shape[0],)) - 9999.0
cttp2out[:] = cttptmp2[frcsb]
else:
cttp1out = numpy.append(cttp1out,cttptmp1[frcsb])
cttp2out = numpy.append(cttp2out,cttptmp2[frcsb])
# Temp/RH profiles
tmptmp = numpy.zeros((nairs,nzout))
h2otmp = numpy.zeros((nairs,nzout))
alttmp = numpy.zeros((nairs,nzout))
for j in range(nzout):
tmpvec = tmparr[:,j,:,:].flatten()
tmpvec[tmpvec > 1e30] = -9999.
tmptmp[:,j] = tmpvec[frcsb]
altvec = altarr[:,j,:,:].flatten()
alttmp[:,j] = altvec[frcsb]
h2ovec = h2oarr[:,j,:,:].flatten()
h2ovec[h2ovec > 1e30] = -9999.
h2otmp[:,j] = h2ovec[frcsb]
if tsmp == 0:
tmpmerout = numpy.zeros(tmptmp.shape)
tmpmerout[:,:] = tmptmp
h2omerout = numpy.zeros(h2otmp.shape)
h2omerout[:,:] = h2otmp
altout = numpy.zeros(alttmp.shape)
altout[:,:] = alttmp
else:
tmpmerout = numpy.append(tmpmerout,tmptmp,axis=0)
h2omerout = numpy.append(h2omerout,h2otmp,axis=0)
altout = numpy.append(altout,alttmp,axis=0)
# Surface
stparr = stparr.flatten()
psfarr = psfc.flatten()
if salarr.ndim == 2:
salarr = salarr.flatten()
salfl = numpy.tile(salarr[:],nt)
elif salarr.ndim == 3:
salfl = salarr.flatten()
if tsmp == 0:
sftmpout = numpy.zeros((nairs,)) - 9999.0
sftmpout[:] = stparr[frcsb]
psfcout = numpy.zeros((nairs,)) - 9999.0
psfcout[:] = psfarr[frcsb]
sfaltout = numpy.zeros((nairs,)) - 9999.0
sfaltout[:] = salfl[frcsb]
else:
sftmpout = numpy.append(sftmpout,stparr[frcsb])
psfcout = numpy.append(psfcout,psfarr[frcsb])
sfaltout = numpy.append(sfaltout,salfl[frcsb])
# Loc/Time
if tsmp == 0:
latout = numpy.zeros((frcsb.shape[0],)) - 9999.0
latout[:] = lthld[frcsb]
lonout = numpy.zeros((frcsb.shape[0],)) - 9999.0
lonout[:] = lnhld[frcsb]
yrout = numpy.zeros((frcsb.shape[0],),dtype=numpy.int16)
yrout[:] = yrlst[k]
jdyout = numpy.zeros((frcsb.shape[0],),dtype=numpy.int16)
jdyout[:] = tmhld[frcsb]
else:
latout = numpy.append(latout,lthld[frcsb])
lonout = numpy.append(lonout,lnhld[frcsb])
yrtmp = numpy.zeros((frcsb.shape[0],),dtype=numpy.int16)
yrtmp[:] = yrlst[k]
yrout = numpy.append(yrout,yrtmp)
jdyout = numpy.append(jdyout,tmhld[frcsb])
tsmp = tsmp + nairs
# Process quantiles
nslbqs = calculate_VPD.quantile_msgdat_discrete(nslabout,prbs)
str1 = '%.2f Number Slab Quantile: %d' % (prbs[103],nslbqs[103])
print(str1)
print(nslbqs)
# psfcqs = calculate_VPD.quantile_msgdat(psfcout,prbs)
# str1 = '%.2f Surface Pressure Quantile: %.3f' % (prbs[53],psfcqs[53])
# print(str1)
prsbt1qs = calculate_VPD.quantile_msgdat(prsbot1out,prbs)
str1 = '%.2f CldBot1 Pressure Quantile: %.3f' % (prbs[103],prsbt1qs[103])
print(str1)
dpcld1qs = calculate_VPD.quantile_msgdat(dpcld1out,prbs)
str1 = '%.2f DPCloud1 Quantile: %.3f' % (prbs[103],dpcld1qs[103])
print(str1)
dpslbqs = calculate_VPD.quantile_msgdat(dpslbout,prbs)
str1 = '%.2f DPSlab Quantile: %.3f' % (prbs[103],dpslbqs[103])
print(str1)
dpcld2qs = calculate_VPD.quantile_msgdat(dpcld2out,prbs)
str1 = '%.2f DPCloud2 Quantile: %.3f' % (prbs[103],dpcld2qs[103])
print(str1)
slb1qs = calculate_VPD.quantile_msgdat_discrete(slbtyp1out,prbs)
str1 = '%.2f Type1 Quantile: %d' % (prbs[103],slb1qs[103])
print(str1)
slb2qs = calculate_VPD.quantile_msgdat_discrete(slbtyp2out,prbs)
str1 = '%.2f Type2 Quantile: %d' % (prbs[103],slb2qs[103])
print(str1)
# Indicators
totclrqout = numpy.zeros((3,3,nprb)) - 99
frctotqout = numpy.zeros((3,3,nprb)) - 9999.0
#lgt1qs = numpy.zeros((3,3,nprb)) - 9999.0
lgt2qs = numpy.zeros((3,3,nprb)) - 9999.0
lgt12qs = numpy.zeros((3,3,nprb)) - 9999.0
for frw in range(3):
for fcl in range(3):
tmpclr = calculate_VPD.quantile_msgdat_discrete(totclrout[:,frw,fcl],prbs)
totclrqout[frw,fcl,:] = tmpclr[:]
str1 = 'Clr/Ovc Indicator %d, %d %.2f Quantile: %d' % (frw,fcl,prbs[103],tmpclr[103])
print(str1)
tmpfrcq = calculate_VPD.quantile_msgdat(frctotout[:,frw,fcl],prbs)
frctotqout[frw,fcl,:] = tmpfrcq[:]
str1 = 'Tot Cld Frac %d, %d %.2f Quantile: %.4f' % (frw,fcl,prbs[103],tmpfrcq[103])
print(str1)
#tmplgtq = calculate_VPD.quantile_msgdat(cfclgt1out[:,frw,fcl],prbs)
#lgt1qs[frw,fcl,:] = tmplgtq[:]
tmplgtq = calculate_VPD.quantile_msgdat(cfclgt2out[:,frw,fcl],prbs)
lgt2qs[frw,fcl,:] = tmplgtq[:]
tmplgtq = calculate_VPD.quantile_msgdat(cfclgt12out[:,frw,fcl],prbs)
lgt12qs[frw,fcl,:] = tmplgtq[:]
str1 = 'CFrac Logit %d, %d %.2f Quantile: %.3f, %.3f' % (frw,fcl,prbs[103], \
lgt2qs[frw,fcl,103],lgt12qs[frw,fcl,103])
print(str1)
ngwt1qs = calculate_VPD.quantile_msgdat(ngwt1out,prbs)
str1 = '%.2f NGWater1 Quantile: %.3f' % (prbs[103],ngwt1qs[103])
print(str1)
ngwt2qs = calculate_VPD.quantile_msgdat(ngwt2out,prbs)
str1 = '%.2f NGWater2 Quantile: %.3f' % (prbs[103],ngwt2qs[103])
print(str1)
cttp1qs = calculate_VPD.quantile_msgdat(cttp1out,prbs)
str1 = '%.2f CTTemp1 Quantile: %.3f' % (prbs[103],cttp1qs[103])
print(str1)
cttp2qs = calculate_VPD.quantile_msgdat(cttp2out,prbs)
str1 = '%.2f CTTemp2 Quantile: %.3f' % (prbs[103],cttp2qs[103])
print(str1)
# Temp/RH Quantiles
tmpqout = numpy.zeros((nzout,nprb)) - 9999.
rhqout = numpy.zeros((nzout,nprb)) - 9999.
sftmpqs = numpy.zeros((nprb,)) - 9999.
sfaltqs = numpy.zeros((nprb,)) - 9999.
psfcqs = numpy.zeros((nprb,)) - 9999.
altmed = numpy.zeros((nzout,)) - 9999.
ztmpout = numpy.zeros((tsmp,nzout)) - 9999.
zrhout = numpy.zeros((tsmp,nzout)) - 9999.
zsftmpout = numpy.zeros((tsmp,)) - 9999.
zsfaltout = numpy.zeros((tsmp,)) - 9999.
zpsfcout = numpy.zeros((tsmp,)) - 9999.
# Quantiles
for j in range(nzout):
tmptmp = calculate_VPD.quantile_msgdat(tmpmerout[:,j],prbs)
tmpqout[j,:] = tmptmp[:]
str1 = 'Plev %.2f, %.2f Temp Quantile: %.3f' % (plev[j],prbs[103],tmptmp[103])
print(str1)
# Transform if some not missing
if (tmptmp[0] != -9999.):
ztmp = calculate_VPD.std_norm_quantile_from_obs_fill_msg(tmpmerout[:,j], tmptmp, prbs, msgval=-9999.)
ztmpout[:,j] = ztmp[:]
alttmp = calculate_VPD.quantile_msgdat(altout[:,j],prbs)
altmed[j] = alttmp[103]
str1 = 'Plev %.2f, %.2f Alt Quantile: %.3f' % (plev[j],prbs[103],alttmp[103])
print(str1)
# Adjust RH over 100
rhadj = h2omerout[:,j]
rhadj[rhadj > 1.0] = 1.0
rhqtmp = calculate_VPD.quantile_msgdat(rhadj,prbs)
rhqout[j,:] = rhqtmp[:]
str1 = 'Plev %.2f, %.2f RH Quantile: %.4f' % (plev[j],prbs[103],rhqtmp[103])
print(str1)
if (rhqtmp[0] != -9999.):
zrh = calculate_VPD.std_norm_quantile_from_obs_fill_msg(rhadj, rhqtmp, prbs, msgval=-9999.)
zrhout[:,j] = zrh[:]
h2omerout[:,j] = rhadj
psfcqs = calculate_VPD.quantile_msgdat(psfcout,prbs)
str1 = '%.2f PSfc Quantile: %.2f' % (prbs[103],psfcqs[103])
print(str1)
zpsfcout = calculate_VPD.std_norm_quantile_from_obs(psfcout, psfcqs, prbs, msgval=-9999.)
sftpqs = calculate_VPD.quantile_msgdat(sftmpout,prbs)
str1 = '%.2f SfcTmp Quantile: %.2f' % (prbs[103],sftpqs[103])
print(str1)
zsftmpout = calculate_VPD.std_norm_quantile_from_obs(sftmpout, sftpqs, prbs, msgval=-9999.)
sfalqs = calculate_VPD.quantile_msgdat(sfaltout,prbs)
str1 = '%.2f SfcAlt Quantile: %.2f' % (prbs[103],sfalqs[103])
print(str1)
zsfaltout = calculate_VPD.std_norm_quantile_from_obs(sfaltout, sfalqs, prbs, msgval=-9999.)
# Output Quantiles
qfnm = '%s/CONUS_AIRS_%s_%04d_%02dUTC_%s_State_Quantile.nc' % (dtdr,sstr,yrlst[k],hrchc,rgchc)
qout = Dataset(qfnm,'w')
dimp = qout.createDimension('probability',nprb)
dimfov1 = qout.createDimension('fovrow',3)
dimfov2 = qout.createDimension('fovcol',3)
dimz = qout.createDimension('level',nzout)
varlvl = qout.createVariable('level','f4',['level'], fill_value = -9999)
varlvl[:] = plev
varlvl.long_name = 'AIRS/SARTA pressure levels'
varlvl.units = 'hPa'
varlvl.missing_value = -9999
varprb = qout.createVariable('probability','f4',['probability'], fill_value = -9999)
varprb[:] = prbs
varprb.long_name = 'Probability break points'
varprb.units = 'none'
varprb.missing_value = -9999
varnslb = qout.createVariable('NumberSlab_quantile','i2',['probability'], fill_value = -99)
varnslb[:] = nslbqs
varnslb.long_name = 'Number of cloud slabs quantiles'
varnslb.units = 'Count'
varnslb.missing_value = -99
varcbprs = qout.createVariable('CloudBot1Logit_quantile','f4',['probability'], fill_value = -9999)
varcbprs[:] = prsbt1qs
varcbprs.long_name = 'Slab 1 cloud bottom pressure logit quantiles'
varcbprs.units = 'hPa'
varcbprs.missing_value = -9999
vardpc1 = qout.createVariable('DPCloud1Logit_quantile','f4',['probability'], fill_value = -9999)
vardpc1[:] = dpcld1qs
vardpc1.long_name = 'Slab 1 cloud pressure depth logit quantiles'
vardpc1.units = 'hPa'
vardpc1.missing_value = -9999
vardpslb = qout.createVariable('DPSlabLogit_quantile','f4',['probability'], fill_value = -9999)
vardpslb[:] = dpslbqs
vardpslb.long_name = 'Two-slab vertical separation logit quantiles'
vardpslb.units = 'hPa'
vardpslb.missing_value = -9999
vardpc2 = qout.createVariable('DPCloud2Logit_quantile','f4',['probability'], fill_value = -9999)
vardpc2[:] = dpcld2qs
vardpc2.long_name = 'Slab 2 cloud pressure depth logit quantiles'
vardpc2.units = 'hPa'
vardpc2.missing_value = -9999
vartyp1 = qout.createVariable('CType1_quantile','i2',['probability'], fill_value = -99)
vartyp1[:] = slb1qs
vartyp1.long_name = 'Slab 1 cloud type quantiles'
vartyp1.units = 'None'
vartyp1.missing_value = -99
vartyp1.comment = 'Cloud slab type: 0=Liquid, 1=Ice'
vartyp2 = qout.createVariable('CType2_quantile','i2',['probability'], fill_value = -99)
vartyp2[:] = slb2qs
vartyp2.long_name = 'Slab 2 cloud type quantiles'
vartyp2.units = 'None'
vartyp2.missing_value = -99
vartyp2.comment = 'Cloud slab type: 0=Liquid, 1=Ice'
varcvr = qout.createVariable('CCoverInd_quantile','i2',['fovrow','fovcol','probability'], fill_value = 99)
varcvr[:] = totclrqout
varcvr.long_name = 'Cloud cover indicator quantiles'
varcvr.units = 'None'
varcvr.missing_value = -99
varcvr.comment = 'Cloud cover indicators: -1=Clear, 0=Partly cloudy, 1=Overcast'
varfrc = qout.createVariable('TotCFrc_quantile','f4',['fovrow','fovcol','probability'], fill_value = -9999)
varfrc[:] = frctotqout
varfrc.long_name = 'Total cloud fraction quantiles'
varfrc.units = 'None'
varfrc.missing_value = -9999
#varlgt1 = qout.createVariable('CFrcLogit1_quantile','f4',['fovrow','fovcol','probability'], fill_value = -9999)
#varlgt1[:] = lgt1qs
#varlgt1.long_name = 'Slab 1 cloud fraction (cfrac1x) logit quantiles'
#varlgt1.units = 'None'
#varlgt1.missing_value = -9999
varlgt2 = qout.createVariable('CFrcLogit2_quantile','f4',['fovrow','fovcol','probability'], fill_value = -9999)
varlgt2[:] = lgt2qs
varlgt2.long_name = 'Slab 2 cloud fraction (cfrac2x) logit quantiles'
varlgt2.units = 'None'
varlgt2.missing_value = -9999
varlgt12 = qout.createVariable('CFrcLogit12_quantile','f4',['fovrow','fovcol','probability'], fill_value = -9999)
varlgt12[:] = lgt12qs
varlgt12.long_name = 'Slab 1/2 overlap fraction (cfrac12) logit quantiles'
varlgt12.units = 'None'
varlgt12.missing_value = -9999
varngwt1 = qout.createVariable('NGWater1_quantile','f4',['probability'], fill_value = -9999)
varngwt1[:] = ngwt1qs
varngwt1.long_name = 'Slab 1 cloud non-gas water quantiles'
varngwt1.units = 'g m^-2'
varngwt1.missing_value = -9999
varngwt2 = qout.createVariable('NGWater2_quantile','f4',['probability'], fill_value = -9999)
varngwt2[:] = ngwt2qs
varngwt2.long_name = 'Slab 2 cloud non-gas water quantiles'
varngwt2.units = 'g m^-2'
varngwt2.missing_value = -9999
varcttp1 = qout.createVariable('CTTemp1_quantile','f4',['probability'], fill_value = -9999)
varcttp1[:] = cttp1qs
varcttp1.long_name = 'Slab 1 cloud top temperature'
varcttp1.units = 'K'
varcttp1.missing_value = -9999
varcttp2 = qout.createVariable('CTTemp2_quantile','f4',['probability'], fill_value = -9999)
varcttp2[:] = cttp2qs
varcttp2.long_name = 'Slab 2 cloud top temperature'
varcttp2.units = 'K'
varcttp2.missing_value = -9999
# Altitude grid
varalt = qout.createVariable('Altitude_median', 'f4', ['level'], fill_value = -9999)
varalt[:] = altmed
varalt.long_name = 'Altitude median value'
varalt.units = 'm'
varalt.missing_value = -9999
vartmp = qout.createVariable('Temperature_quantile', 'f4', ['level','probability'], fill_value = -9999)
vartmp[:] = tmpqout
vartmp.long_name = 'Temperature quantiles'
vartmp.units = 'K'
vartmp.missing_value = -9999.
varrh = qout.createVariable('RH_quantile', 'f4', ['level','probability'], fill_value = -9999)
varrh[:] = rhqout
varrh.long_name = 'Relative humidity quantiles'
varrh.units = 'Unitless'
varrh.missing_value = -9999.
varstmp = qout.createVariable('SfcTemp_quantile', 'f4', ['probability'], fill_value = -9999)
varstmp[:] = sftpqs
varstmp.long_name = 'Surface temperature quantiles'
varstmp.units = 'K'
varstmp.missing_value = -9999.
varpsfc = qout.createVariable('SfcPres_quantile', 'f4', ['probability'], fill_value = -9999)
varpsfc[:] = psfcqs
varpsfc.long_name = 'Surface pressure quantiles'
varpsfc.units = 'hPa'
varpsfc.missing_value = -9999.
varsalt = qout.createVariable('SfcAlt_quantile', 'f4', ['probability'], fill_value = -9999)
varsalt[:] = sfalqs
varsalt.long_name = 'Surface altitude quantiles'
varsalt.units = 'm'
varsalt.missing_value = -9999.
qout.close()
# Set up transformations
zccvout = numpy.zeros((tsmp,3,3,)) - 9999.
zfrcout = numpy.zeros((tsmp,3,3,)) - 9999.
#zlgt1 = numpy.zeros((tsmp,3,3)) - 9999.
zlgt2 = numpy.zeros((tsmp,3,3)) - 9999.
zlgt12 = numpy.zeros((tsmp,3,3)) - 9999.
znslb = calculate_VPD.std_norm_quantile_from_obs(nslabout, nslbqs, prbs, msgval=-99)
zprsbt1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(prsbot1out, prsbt1qs, prbs, msgval=-9999.)
zdpcld1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(dpcld1out, dpcld1qs, prbs, msgval=-9999.)
zdpslb = calculate_VPD.std_norm_quantile_from_obs_fill_msg(dpslbout, dpslbqs, prbs, msgval=-9999.)
zdpcld2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(dpcld2out, dpcld2qs, prbs, msgval=-9999.)
zctyp1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(slbtyp1out, slb1qs, prbs, msgval=-99)
zctyp2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(slbtyp2out, slb2qs, prbs, msgval=-99)
for frw in range(3):
for fcl in range(3):
ztmp = calculate_VPD.std_norm_quantile_from_obs_fill_msg(totclrout[:,frw,fcl], totclrqout[frw,fcl,:], \
prbs, msgval=-99)
zccvout[:,frw,fcl] = ztmp[:]
ztmp = calculate_VPD.std_norm_quantile_from_obs_fill_msg(frctotout[:,frw,fcl], frctotqout[frw,fcl,:], \
prbs, msgval=-9999.)
zfrcout[:,frw,fcl] = ztmp[:]
#ztmp = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cfclgt1out[:,frw,fcl], lgt1qs[frw,fcl,:], \
# prbs, msgval=-9999.)
#zlgt1[:,frw,fcl] = ztmp[:]
ztmp = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cfclgt2out[:,frw,fcl], lgt2qs[frw,fcl,:], \
prbs, msgval=-9999.)
zlgt2[:,frw,fcl] = ztmp[:]
ztmp = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cfclgt12out[:,frw,fcl], lgt12qs[frw,fcl,:], \
prbs, msgval=-9999.)
zlgt12[:,frw,fcl] = ztmp[:]
zngwt1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(ngwt1out, ngwt1qs, prbs, msgval=-9999.)
zngwt2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(ngwt2out, ngwt2qs, prbs, msgval=-9999.)
zcttp1 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cttp1out, cttp1qs, prbs, msgval=-9999.)
zcttp2 = calculate_VPD.std_norm_quantile_from_obs_fill_msg(cttp2out, cttp2qs, prbs, msgval=-9999.)
# Output transformed quantile samples
zfnm = '%s/CONUS_AIRS_%s_%04d_%02dUTC_%s_State_StdGausTrans.nc' % (dtdr,sstr,yrlst[k],hrchc,rgchc)
zout = Dataset(zfnm,'w')
dimsmp = zout.createDimension('sample',tsmp)
dimfov1 = zout.createDimension('fovrow',3)
dimfov2 = zout.createDimension('fovcol',3)
dimz = zout.createDimension('level',nzout)
varlon = zout.createVariable('Longitude','f4',['sample'])
varlon[:] = lonout
varlon.long_name = 'Longitude'
varlon.units = 'degrees_east'
varlat = zout.createVariable('Latitude','f4',['sample'])
varlat[:] = latout
varlat.long_name = 'Latitude'
varlat.units = 'degrees_north'
varlvl = zout.createVariable('level','f4',['level'], fill_value = -9999)
varlvl[:] = plev
varlvl.long_name = 'AIRS/SARTA pressure levels'
varlvl.units = 'hPa'
varlvl.missing_value = -9999
varjdy = zout.createVariable('JulianDay','i2',['sample'])
varjdy[:] = jdyout
varjdy.long_name = 'JulianDay'
varjdy.units = 'day'
varyr = zout.createVariable('Year','i2',['sample'])
varyr[:] = yrout
varyr.long_name = 'Year'
varyr.units = 'year'
varnslb = zout.createVariable('NumberSlab_StdGaus','f4',['sample'], fill_value = -9999)
varnslb[:] = znslb
varnslb.long_name = 'Quantile transformed number of cloud slabs'
varnslb.units = 'None'
varnslb.missing_value = -9999.
vdtnslb = zout.createVariable('NumberSlab_Data','i2',['sample'], fill_value = -99)
vdtnslb[:] = nslabout
vdtnslb.long_name = 'Number of cloud slabs'
vdtnslb.units = 'None'
vdtnslb.missing_value = -99
varcbprs = zout.createVariable('CloudBot1Logit_StdGaus','f4',['sample'], fill_value = -9999)
varcbprs[:] = zprsbt1
varcbprs.long_name = 'Quantile transformed slab 1 cloud bottom pressure logit'
varcbprs.units = 'None'
varcbprs.missing_value = -9999.
vdtcbprs = zout.createVariable('CloudBot1Logit_Data','f4',['sample'], fill_value = -9999)
vdtcbprs[:] = prsbot1out
vdtcbprs.long_name = 'Slab 1 cloud bottom pressure logit'
vdtcbprs.units = 'None'
vdtcbprs.missing_value = -9999.
vardpc1 = zout.createVariable('DPCloud1Logit_StdGaus','f4',['sample'], fill_value = -9999)
vardpc1[:] = zdpcld1
vardpc1.long_name = 'Quantile transformed slab 1 cloud pressure depth logit'
vardpc1.units = 'None'
vardpc1.missing_value = -9999.
vdtdpc1 = zout.createVariable('DPCloud1Logit_Data','f4',['sample'], fill_value = -9999)
vdtdpc1[:] = dpcld1out
vdtdpc1.long_name = 'Slab 1 cloud pressure depth logit'
vdtdpc1.units = 'None'
vdtdpc1.missing_value = -9999.
vardpslb = zout.createVariable('DPSlabLogit_StdGaus','f4',['sample'], fill_value = -9999)
vardpslb[:] = zdpslb
vardpslb.long_name = 'Quantile transformed two-slab vertical separation logit'
vardpslb.units = 'None'
vardpslb.missing_value = -9999.
vdtdpslb = zout.createVariable('DPSlabLogit_Data','f4',['sample'], fill_value = -9999)
vdtdpslb[:] = dpslbout
vdtdpslb.long_name = 'Two-slab vertical separation logit'
vdtdpslb.units = 'None'
vdtdpslb.missing_value = -9999.
vardpc2 = zout.createVariable('DPCloud2Logit_StdGaus','f4',['sample'], fill_value = -9999)
vardpc2[:] = zdpcld2
vardpc2.long_name = 'Quantile transformed slab 2 cloud pressure depth logit'
vardpc2.units = 'None'
vardpc2.missing_value = -9999.
vdtdpc2 = zout.createVariable('DPCloud2Logit_Data','f4',['sample'], fill_value = -9999)
vdtdpc2[:] = dpcld2out
vdtdpc2.long_name = 'Slab 2 cloud pressure depth logit'
vdtdpc2.units = 'None'
vdtdpc2.missing_value = -9999.
vartyp1 = zout.createVariable('CType1_StdGaus','f4',['sample'], fill_value = -9999)
vartyp1[:] = zctyp1
vartyp1.long_name = 'Quantile transformed slab 1 cloud type'
vartyp1.units = 'None'
vartyp1.missing_value = -9999.
vdttyp1 = zout.createVariable('CType1_Data','i2',['sample'], fill_value = -99)
vdttyp1[:] = slbtyp1out
vdttyp1.long_name = 'Slab 1 cloud type'
vdttyp1.units = 'None'
vdttyp1.missing_value = -99
vartyp2 = zout.createVariable('CType2_StdGaus','f4',['sample'], fill_value = -9999)
vartyp2[:] = zctyp2
vartyp2.long_name = 'Quantile transformed slab 2 cloud type'
vartyp2.units = 'None'
vartyp2.missing_value = -9999.
vdttyp2 = zout.createVariable('CType2_Data','i2',['sample'], fill_value = -99)
vdttyp2[:] = slbtyp2out
vdttyp2.long_name = 'Slab 2 cloud type logit'
vdttyp2.units = 'None'
vdttyp2.missing_value = -99
varcov = zout.createVariable('CCoverInd_StdGaus','f4',['sample','fovrow','fovcol'], fill_value= -9999)
varcov[:] = zccvout
varcov.long_name = 'Quantile transformed cloud cover indicator'
varcov.units = 'None'
varcov.missing_value = -9999.
vdtcov = zout.createVariable('CCoverInd_Data','i2',['sample','fovrow','fovcol'], fill_value= -99)
vdtcov[:] = totclrout
vdtcov.long_name = 'Cloud cover indicator'
vdtcov.units = 'None'
vdtcov.missing_value = -99
varfrc = zout.createVariable('TotCFrc_StdGaus','f4',['sample','fovrow','fovcol'], fill_value = -9999)
varfrc[:] = zfrcout
varfrc.long_name = 'Quantile transformed total cloud fraction'
varfrc.units = 'None'
varfrc.missing_value = -9999
vdtfrc = zout.createVariable('TotCFrc_Data','f4',['sample','fovrow','fovcol'], fill_value = -9999)
vdtfrc[:] = frctotout
vdtfrc.long_name = 'Total cloud fraction'
vdtfrc.units = 'None'
vdtfrc.missing_value = -9999
#varlgt1 = zout.createVariable('CFrcLogit1_StdGaus','f4',['fovrow','fovcol','sample'], fill_value = -9999)
#varlgt1[:] = zlgt1
#varlgt1.long_name = 'Quantile transformed slab 1 cloud fraction logit'
#varlgt1.units = 'None'
#varlgt1.missing_value = -9999.
varlgt2 = zout.createVariable('CFrcLogit2_StdGaus','f4',['sample','fovrow','fovcol'], fill_value = -9999)
varlgt2[:] = zlgt2
varlgt2.long_name = 'Quantile transformed slab 2 cloud fraction logit'
varlgt2.units = 'None'
varlgt2.missing_value = -9999.
vdtlgt2 = zout.createVariable('CFrcLogit2_Data','f4',['sample','fovrow','fovcol'], fill_value = -9999)
vdtlgt2[:] = cfclgt2out
vdtlgt2.long_name = 'Slab 2 cloud fraction logit'
vdtlgt2.units = 'None'
vdtlgt2.missing_value = -9999.
varlgt12 = zout.createVariable('CFrcLogit12_StdGaus','f4',['sample','fovrow','fovcol'], fill_value = -9999)
varlgt12[:] = zlgt12
varlgt12.long_name = 'Quantile transformed slab 1/2 overlap fraction logit'
varlgt12.units = 'None'
varlgt12.missing_value = -9999.
vdtlgt12 = zout.createVariable('CFrcLogit12_Data','f4',['sample','fovrow','fovcol'], fill_value = -9999)
vdtlgt12[:] = cfclgt12out
vdtlgt12.long_name = 'Slab 1/2 overlap fraction logit'
vdtlgt12.units = 'None'
vdtlgt12.missing_value = -9999.
varngwt1 = zout.createVariable('NGWater1_StdGaus','f4',['sample'], fill_value = -9999)
varngwt1[:] = zngwt1
varngwt1.long_name = 'Quantile transformed slab 1 non-gas water'
varngwt1.units = 'None'
varngwt1.missing_value = -9999.
vdtngwt1 = zout.createVariable('NGWater1_Data','f4',['sample'], fill_value = -9999)
vdtngwt1[:] = ngwt1out
vdtngwt1.long_name = 'Slab 1 non-gas water'
vdtngwt1.units = 'None'
vdtngwt1.missing_value = -9999.
varngwt2 = zout.createVariable('NGWater2_StdGaus','f4',['sample'], fill_value = -9999)
varngwt2[:] = zngwt2
varngwt2.long_name = 'Quantile transformed slab 2 non-gas water'
varngwt2.units = 'None'
varngwt2.missing_value = -9999.
vdtngwt2 = zout.createVariable('NGWater2_Data','f4',['sample'], fill_value = -9999)
vdtngwt2[:] = ngwt2out
vdtngwt2.long_name = 'Slab 2 non-gas water'
vdtngwt2.units = 'None'
vdtngwt2.missing_value = -9999.
varcttp1 = zout.createVariable('CTTemp1_StdGaus','f4',['sample'], fill_value = -9999)
varcttp1[:] = zcttp1
varcttp1.long_name = 'Quantile transformed slab 1 cloud top temperature'
varcttp1.units = 'None'
varcttp1.missing_value = -9999.
vdtcttp1 = zout.createVariable('CTTemp1_Data','f4',['sample'], fill_value = -9999)
vdtcttp1[:] = cttp1out
vdtcttp1.long_name = 'Slab 1 cloud top temperature'
vdtcttp1.units = 'K'
vdtcttp1.missing_value = -9999.
varcttp2 = zout.createVariable('CTTemp2_StdGaus','f4',['sample'], fill_value = -9999)
varcttp2[:] = zcttp2
varcttp2.long_name = 'Quantile transformed slab 2 cloud top temperature'
varcttp2.units = 'None'
varcttp2.missing_value = -9999.
vdtcttp2 = zout.createVariable('CTTemp2_Data','f4',['sample'], fill_value = -9999)
vdtcttp2[:] = cttp2out
vdtcttp2.long_name = 'Slab 2 cloud top temperature'
vdtcttp2.units = 'K'
vdtcttp2.missing_value = -9999.
varsrt3 = zout.createVariable('Temperature_StdGaus', 'f4', ['sample','level'], fill_value = -9999)
varsrt3[:] = ztmpout
varsrt3.long_name = 'Quantile transformed temperature'
varsrt3.units = 'None'
varsrt3.missing_value = -9999.
vdtsrt3 = zout.createVariable('Temperature_Data', 'f4', ['sample','level'], fill_value = -9999)
vdtsrt3[:] = tmpmerout
vdtsrt3.long_name = 'Temperature'
vdtsrt3.units = 'K'
vdtsrt3.missing_value = -9999.
varsrt4 = zout.createVariable('RH_StdGaus', 'f4', ['sample','level'], fill_value = -9999)
varsrt4[:] = zrhout
varsrt4.long_name = 'Quantile transformed relative humidity'
varsrt4.units = 'None'
varsrt4.missing_value = -9999.
vdtsrt4 = zout.createVariable('RH_Data', 'f4', ['sample','level'], fill_value = -9999)
vdtsrt4[:] = h2omerout
vdtsrt4.long_name = 'Relative humidity'
vdtsrt4.units = 'None'
vdtsrt4.missing_value = -9999.
varsrts1 = zout.createVariable('SfcTemp_StdGaus', 'f4', ['sample'], fill_value = -9999)
varsrts1[:] = zsftmpout
varsrts1.long_name = 'Quantile transformed surface temperature'
varsrts1.units = 'None'
varsrts1.missing_value = -9999.
vdtsrts1 = zout.createVariable('SfcTemp_Data', 'f4', ['sample'], fill_value = -9999)
vdtsrts1[:] = sftmpout
vdtsrts1.long_name = 'Surface temperature'
vdtsrts1.units = 'None'
vdtsrts1.missing_value = -9999.
varsrts2 = zout.createVariable('SfcPres_StdGaus', 'f4', ['sample'], fill_value = -9999)
varsrts2[:] = zpsfcout
varsrts2.long_name = 'Quantile transformed surface pressure'
varsrts2.units = 'None'
varsrts2.missing_value = -9999.
vdtsrts2 = zout.createVariable('SfcPres_Data', 'f4', ['sample'], fill_value = -9999)
vdtsrts2[:] = psfcout
vdtsrts2.long_name = 'Surface pressure'
vdtsrts2.units = 'hPa'
vdtsrts2.missing_value = -9999.
varsrts3 = zout.createVariable('SfcAlt_StdGaus', 'f4', ['sample'], fill_value = -9999)
varsrts3[:] = zsfaltout
varsrts3.long_name = 'Quantile transformed surface altitude'
varsrts3.units = 'None'
varsrts3.missing_value = -9999.
vdtsrts3 = zout.createVariable('SfcAlt_Data', 'f4', ['sample'], fill_value = -9999)
vdtsrts3[:] = sfaltout
vdtsrts3.long_name = 'Surface altitude'
vdtsrts3.units = 'm'
vdtsrts3.missing_value = -9999.
zout.close()
return
def zscore_update_mcem(outfile, qfile, zfile, dfile, cnffile, probs, niter = 50, sdchc = 553133):
# Monte Carlo expectation maximization update for zscores
# outfile: Output file with updated z-scores, MCEM results
# qfile: Quantile file
# zfile: Input z-score file
# dfile: Data file
# cnffile: Configuration file (CSV)
# probs: Probability levels for quantile
# niter: Maximum number of EM iterations
# sdchc: Random seed
df = pandas.read_csv(cnffile, dtype = {'Order':int, 'ZScore_Name':str, 'Quantile_Name':str, \
'Data_Name':str, 'Start':int, 'Length':int, 'DType':str })
tsz = df['Length'].sum()
szstr = '%d Total State Vector Elements' % (tsz)
print(szstr)
nrw = df.shape[0]
nsmp = -1
stctr = 0
# RN generator
random.seed(sdchc)
# Initialize MVN parameters
mu0 = numpy.zeros((tsz,),dtype=numpy.float64)
cv0 = numpy.zeros((tsz,tsz),dtype=numpy.float64)
# Loop through groups to initialize
print(qfile)
print(zfile)
for q in range(nrw):
if (df['Length'].values[q] == 1):
cv0[stctr,stctr] = 1.0
else:
cst = stctr
cfn = stctr + df['Length'].values[q]
diagvl = numpy.zeros( (df['Length'].values[q],), dtype=numpy.float64) + 0.6
cvblk = numpy.zeros( (df['Length'].values[q],df['Length'].values[q]), dtype=numpy.float64) + 0.4 + \
numpy.diagflat( diagvl)
cv0[cst:cfn,cst:cfn] = cvblk
qvrnm = df['Quantile_Name'].values[q]
fqs = Dataset(qfile,'r')
if (df['Group'].values[q] == 'CloudFrac'):
qtmp = fqs.variables[qvrnm][:,:,:]
elif (df['Length'].values[q] > 1):
qtmp = fqs.variables[qvrnm][:,:]
else:
qtmp = fqs.variables[qvrnm][:]
fqs.close()
zvrnm = df['ZScore_Name'].values[q]
fzs = Dataset(zfile,'r')
if (df['Group'].values[q] == 'CloudFrac'):
ztmp = fzs.variables[zvrnm][:,:,:]
elif (df['Length'].values[q] > 1):
ztmp = fzs.variables[zvrnm][:,:]
else:
ztmp = fzs.variables[zvrnm][:]
if nsmp < 0:
nsmp = ztmp.shape[0]
zscrarr = numpy.zeros((nsmp,tsz),dtype=numpy.float32)
zlwrarr = numpy.zeros((nsmp,tsz),dtype=numpy.float32)
zuprarr = numpy.zeros((nsmp,tsz),dtype=numpy.float32)
fzs.close()
yvrnm = df['Data_Name'].values[q]
fys = Dataset(dfile,'r')
if (df['Group'].values[q] == 'CloudFrac'):
dttmp = fys.variables[yvrnm][:,:,:]
elif (df['Length'].values[q] > 1):
dttmp = fys.variables[yvrnm][:,:]
else:
dttmp = fys.variables[yvrnm][:]
msgvl = fys.variables[yvrnm].missing_value
fys.close()
# Find z-score limits
if (df['Group'].values[q] == 'CloudFrac'):
cctr = 0
for frw in range(3):
for fcl in range(3):
ztmplwr, ztmpupr = calculate_VPD.std_norm_limits_from_obs_fill_msg(dttmp[:,frw,fcl], qtmp[frw,fcl,:], probs, msgvl)
cctr = cctr + 1
zlwrarr[:,stctr] = ztmplwr[:]
zuprarr[:,stctr] = ztmpupr[:]
zchk = ztmp[:,frw,fcl]
zmsg = zchk[zchk < -10.0]
zchk[zchk < -10.0] = random.uniform(size=zmsg.shape[0])
zscrarr[:,stctr] = zchk[:]
zdif = ztmpupr - ztmplwr
zsm = numpy.sum( (zdif < 0))
if zsm > 0:
zstr = 'Lower Upper Mismatch\n %s (%d): %d' % (df['ZScore_Name'].values[q],cctr,zsm)
print(zstr)
stctr = stctr + 1
elif (df['Length'].values[q] > 1):
stidx = df['Start'].values[q] - 1
fnidx = stidx + df['Length'].values[q]
for k in range(stidx,fnidx):
ztmplwr, ztmpupr = calculate_VPD.std_norm_limits_from_obs_fill_msg(dttmp[:,k], qtmp[k,:], probs, msgvl)
zlwrarr[:,stctr] = ztmplwr[:]
zuprarr[:,stctr] = ztmpupr[:]
zchk = ztmp[:,k]
zmsg = zchk[zchk < -10.0]
zchk[zchk < -10.0] = random.uniform(size=zmsg.shape[0])
zscrarr[:,stctr] = zchk[:]
zdif = ztmpupr - ztmplwr
zsm = numpy.sum( (zdif < 0))
if zsm > 0:
zstr = 'Lower Upper Mismatch\n %s (%d): %d' % (df['ZScore_Name'].values[q],k,zsm)
print(zstr)
stctr = stctr + 1
else:
ztmplwr, ztmpupr = calculate_VPD.std_norm_limits_from_obs_fill_msg(dttmp, qtmp, probs, msgvl)
zlwrarr[:,stctr] = ztmplwr[:]
zuprarr[:,stctr] = ztmpupr[:]
zchk = ztmp[:]
zmsg = zchk[zchk < -10.0]
zchk[zchk < -10.0] = random.uniform(size=zmsg.shape[0])
zscrarr[:,stctr] = zchk[:]
zdif = ztmpupr - ztmplwr
zsm = numpy.sum( (zdif < 0))
if zsm > 0:
zstr = 'Lower Upper Mismatch\n %s: %d' % (df['ZScore_Name'].values[q],zsm)
print(zstr)
stctr = stctr + 1
print(nsmp)
lgdns = stats.multivariate_normal.logpdf(zscrarr, mean=mu0, cov=cv0)
cmpllk = numpy.sum(lgdns)
lkstr = 'Initial Log-likelihood: %.4e' % (cmpllk)
print(lkstr)
mucr = mu0
cvcr = cv0
crlk = cmpllk
lkdf = 1e8
zfnl = zscrarr
# Setup output
emout = Dataset(outfile,'w')
dimiter = emout.createDimension('iteration',niter)
dimstate = emout.createDimension('state',tsz)
dimsmp = emout.createDimension('sample',nsmp)
varlk = emout.createVariable('logLike','f8',['iteration'], fill_value = -9999)
varlk.long_name = 'Complete information log likelihood'
varlk.units = 'None'
varlk.missing_value = -9999
varmn = emout.createVariable('state_mean','f4',['iteration','state'], fill_value = -9999)
varmn.long_name = 'Multivariate state mean vector'
varmn.units = 'None'
varmn.missing_value = -9999
varcv = emout.createVariable('state_cov','f4',['iteration','state','state'], fill_value = -9999)
varcv.long_name = 'Multivariate state covariance matrix'
varcv.units = 'None'
varcv.missing_value = -9999
varest = emout.createVariable('state_samples','f4',['iteration','sample','state'], fill_value = -9999)
varest.long_name = 'State variable expected values'
varest.units = 'None'
varest.missing_value = -9999
emout.close()
critr = 0
while ( (critr < niter) and (lkdf > 1e3)):
# MCMC
prccr = linalg.inv(cvcr)
zfnl, zmn1 = calculate_VPD.trnc_norm_mcmc(zfnl, mucr, prccr, zlwrarr, zuprarr, \
niter = 450, nburn = 50, nvec = nsmp, nstate = tsz)
# Mean and Cov
cvcr = numpy.cov(zmn1.T)
mucr = numpy.mean(zmn1,axis=0)
w, v = linalg.eig(cvcr)
wsq = numpy.arange(w.shape[0])
wsb = wsq[w < 1.5e-8]
if wsb.shape[0] > 0:
s1 = 'Lifting %d eigenvalues' % (wsb.shape[0])
print(s1)
print(w[150:174])
w[wsb] = 1.5e-8
wdg = numpy.diagflat(w)
cvcr = numpy.dot(v, numpy.dot(wdg,v.T))
lgdns = stats.multivariate_normal.logpdf(zmn1, mean=mucr, cov=cvcr)
cmpllk = numpy.sum(lgdns)
lkdf = cmpllk - crlk
lkstrcr = '''At EM Iteration %d,
Log-likelihood: %.4e
Log-like increase: %.4e
Minimimum Eigenvalue: %.6e''' % (critr,cmpllk,lkdf,numpy.amin(w))
print(lkstrcr)
crlk = cmpllk
# Save results
emout = Dataset(outfile,'r+')
varlk = emout.variables['logLike']
varlk[critr] = cmpllk
varmn = emout.variables['state_mean']
varmn[critr,:] = mucr
varcv = emout.variables['state_cov']
varcv[critr,:,:] = cvcr
varest = emout.variables['state_samples']
varest[critr,:,:] = zmn1
emout.close()
critr = critr + 1
return
def airs_raw_l2_summary(expdir, outfnm, nrep=10):
# Extract desired AIRS L2 files directly from experiment results
# expfl: Name of file with experiment results
# outfnm: Ouptut file name
# nrep: Number of replicates of the reference AIRS granule
# Experiment should have one directory per replicate
nzairs = 100
nzsrt = 101
nsmpout = nrep * 45 * 30
# Set up output (PSfc, temp profile and QC)
qout = Dataset(outfnm,'w')
dimsmp = qout.createDimension('sample',nsmpout)
dimlev = qout.createDimension('level',nzairs)
varpsfc = qout.createVariable('PSurfStd','f4',['sample'], fill_value = -9999)
varpsfc.long_name = 'Surface pressure'
varpsfc.units = 'hPa'
varpsfc.missing_value = -9999
var2m = qout.createVariable('TSurfAir','f4',['sample'], fill_value = -9999)
var2m.long_name = 'Near-surface air temperature'
var2m.units = 'K'
var2m.missing_value = -9999
vart2qc = qout.createVariable('TSurfAir_QC','i2',['sample'], fill_value = -99)
vart2qc.long_name = 'Near-surface air temperature QC'
vart2qc.units = 'none'
vart2qc.missing_value = -99
vartmp = qout.createVariable('TAirSup','f4',['sample','level'], fill_value = -9999)
vartmp.long_name = 'Air temperature'
vartmp.units = 'K'
vartmp.missing_value = -9999
vartqc = qout.createVariable('TAirSup_QC','i2',['sample','level'], fill_value = -99)
vartqc.long_name = 'Air temperature QC'
vartqc.units = 'none'
vartqc.missing_value = -99
qout.close()
for k in range(nrep):
simdir = '%sindex_%d' % (expdir,k+1)
print(simdir)
if os.path.exists(simdir):
flst = os.listdir(simdir)
l2lst = []
for j in range(len(flst)):
if ('L2.RetSup' in flst[j]):
l2lst.append(flst[j])
# Sort by L2 run index
xlst = []
for j in range(len(l2lst)):
l2prs = l2lst[j].split('.')
lnl2 = len(l2prs)
xstr = l2prs[lnl2-2]
lnx = len(xstr)
tstr = xstr[1:lnx]
xlst.append(int(tstr))
print(xlst)
l2frm = pandas.DataFrame({'L2SupFile': l2lst, 'RunIndex': xlst})
l2frm = l2frm.sort_values(by=['RunIndex'], ascending=[True])
# Use only most recent 45
l2ln = l2frm.shape[0]
if l2ln > 45:
lidxst = l2ln-45
lidxfn = l2ln
else:
lidxst = 0
lidxfn = l2ln
l2frm = l2frm[lidxst:lidxfn]
tmparr = numpy.zeros( (45,30), dtype=numpy.float32) - 9999.0
for j in range(l2frm.shape[0]):
l2fl = '%s/%s' % (simdir,l2frm['L2SupFile'].values[j])
ncl2 = Dataset(l2fl)
psfc = ncl2.variables['PSurfStd'][0,:]
tprf = ncl2.variables['TAirSup'][0,:,:]
tmpqc = ncl2.variables['TAirSup_QC'][0,:,:]
t2m = ncl2.variables['TSurfAir'][0,:]
t2mqc = ncl2.variables['TSurfAir_QC'][0,:]
ncl2.close()
ost = k*45*30 + j*30
ofn = k*45*30 + (j+1)*30
ncout = Dataset(outfnm,'r+')
varpsfc = ncout.variables['PSurfStd']
varpsfc[ost:ofn] = psfc
vartmp = ncout.variables['TAirSup']
vartmp[ost:ofn,:] = tprf
varqc = ncout.variables['TAirSup_QC']
varqc[ost:ofn,:] = tmpqc
vart2m = ncout.variables['TSurfAir']
vart2m[ost:ofn] = t2m
var2qc = ncout.variables['TSurfAir_QC']
var2qc[ost:ofn] = t2mqc
ncout.close()
return
def airscld_invtransf_stateconf_cloud9(rffl, qfl, gmmfl, outfl, stcnf, yrchc, rfmn, rfdy, rfgrn, scnrw, nrep = 10, \
clearsky = False, l2dir = '/archive/AIRSOps/airs/gdaac/v6'):
# Read in mixture model parameters and quantiles, draw random samples and set up SARTA input files
# Use AIRS FOV cloud fraction information
# Use state vector reference configuration
# Use designated AIRS reference granule, and pull surface pressure temperature from there
# rffl: Reference level file
# qfl: Template quantile file
# gmmfl: Gaussian mixture model results file
# outfl: Output file
# stcnf: State vector configuration file
# yrchc: Template Year Choice
# rfmn: Month for reference granule
# rfdy: Day for reference granule
# rfgrn: Reference granule number
# scnrw: Scan row for experiment
# nrep: Number of replicate granules
# cloud: Simulate clouds, use False for clear-sky only
# l2dir: Local AIRS Level 2 directory (to retrieve reference info)
# RN Generator
sdchc = 452546 + yrchc + rfmn*100
random.seed(sdchc)
cldprt = numpy.array([0.4,0.2,0.08])
nszout = 45 * 30 * nrep
sfrps = 45 * nrep
nlvsrt = 98
msgdbl = -9999.0
# Read probs and pressure levels
f = Dataset(rffl,'r')
airs_sarta_levs = f.variables['level'][:]
f.close()
# Get reference granule info
airsdr = '%s/%04d/%02d/%02d/airs2sup' % (l2dir,yrchc,rfmn,rfdy)
if (os.path.exists(airsdr)):
fllst = os.listdir(airsdr)
l2str = 'AIRS.%04d.%02d.%02d.%03d' % (yrchc,rfmn,rfdy,rfgrn)
rffd = -1
j = 0
while ( (j < len(fllst)) and (rffd < 0) ):
lncr = len(fllst[j])
l4 = lncr - 4
if ( (fllst[j][l4:lncr] == '.hdf') and (l2str in fllst[j])):
l2fl = '%s/%s' % (airsdr,fllst[j])
ncl2 = Dataset(l2fl)
psfc = ncl2.variables['PSurfStd'][:,:]
topg = ncl2.variables['topog'][:,:]
ncl2.close()
rffd = j
j = j + 1
else:
print('L2 directory not found')
# Surface replicates
psfcvc = psfc[scnrw-1,:]
topgvc = topg[scnrw-1,:]
spres = numpy.tile(psfcvc,(sfrps,))
salti = numpy.tile(topgvc,(sfrps,))
# Altitude for H2O processing
qin = Dataset(qfl,'r')
lvs = qin.variables['level'][:]
alts = qin.variables['Altitude_median'][:]
qin.close()
altrw = numpy.zeros((30,nlvsrt+3),dtype=numpy.float64)
for i in range(30):
# Set lowest levels to surface topog
altrw[i,:] = alts[:]
altrw[i, alts < topgvc[i]] = topgvc[i]
alth2o = numpy.tile(altrw,(sfrps,1))
print(alth2o[15,80:100])
print(alth2o[75,80:100])
print(alth2o[80,80:100])
print(alth2o.shape)
# Variable list, from configuration
df = pandas.read_csv(stcnf, dtype = {'Order':int, 'ZScore_Name':str, 'Quantile_Name':str, \
'Data_Name':str, 'Start':int, 'Length':int, 'DType':str })
tsz = df['Length'].sum()
szstr = '%d Total State Vector Elements' % (tsz)
print(szstr)
nrw = df.shape[0]
nsmp = -1
stctr = 0
# Discrete/Continuous Indicator
df['DiscCont'] = 'Continuous'
typind = []
stvrnms = []
for q in range(nrw):
if ( (df['Group'].values[q] == 'NumCloud') or (df['Group'].values[q] == 'CloudType') ):
df['DiscCont'].values[q] = 'Discrete'
for p in range(df['Length'].values[q]):
typind.append(df['DiscCont'].values[q])
cspt = df['Start'].values[q] + p
vnm = '%s_%d' % (df['Data_Name'].values[q],cspt)
stvrnms.append(vnm)
# Quantile files
qin = Dataset(qfl,'r')
prbs = qin.variables['probability'][:]
nprb = prbs.shape[0]
qsall = numpy.zeros((tsz,nprb))
lvs = qin.variables['level'][:]
alts = qin.variables['Altitude_median'][:]
rhmd = qin.variables['RH_quantile'][:,103]
nlvl = lvs.shape[0]
cctr = 0
for j in range(nrw):
if (df['Length'].values[j] == 1):
vr1 = df['Quantile_Name'].values[j]
qsall[cctr,:] = qin.variables[vr1][:]
cctr = cctr + df['Length'].values[j]
elif (df['Group'].values[j] == 'CloudFrac'):
for cl0 in range(3):
for rw0 in range(3):
otst = cctr
otfn = cctr + 1
vr1 = df['Quantile_Name'].values[j]
qsall[otst:otfn,:] = qin.variables[vr1][cl0,rw0,:]
cctr = cctr + 1
else:
inst = df['Start'].values[j] - 1
infn = inst + df['Length'].values[j]
otst = cctr
otfn = cctr + df['Length'].values[j]
vr1 = df['Quantile_Name'].values[j]
qsall[otst:otfn,:] = qin.variables[vr1][inst:infn,:]
cctr = cctr + df['Length'].values[j]
qin.close()
print('State medians')
print(qsall[:,103])
# Read GMM Results
gmin = Dataset(gmmfl,'r')
gmnms = gmin['State_Vector_Names'][:,:]
gmmean = gmin['Mean'][:,:]
gmpkcv = gmin['Packed_Covariance'][:,:]
gmprps = gmin['Mixture_Proportion'][:]
gmin.close()
nmclps = gmnms.tolist()
strvrs = list(map(calculate_VPD.clean_byte_list,nmclps))
if sys.version_info[0] < 3:
print('Version 2')
strvrs = map(str,strvrs)
nmix = gmmean.shape[0]
nmxvar = gmmean.shape[1]
mrgcv = numpy.zeros((nmix,nmxvar,nmxvar),dtype=numpy.float64)
for j in range(nmix):
mrgcv[j,:,:] = calculate_VPD.unpackcov(gmpkcv[j,:], nelm=nmxvar)
# Component sizes
dtall = numpy.zeros((nszout,nmxvar),dtype=numpy.float)
cmpidx = numpy.zeros((nszout,),dtype=numpy.int16)
csmp = random.multinomial(nszout,pvals=gmprps)
cmsz = 0
for j in range(nmix):
cvfl = mrgcv[j,:,:]
s1 = numpy.sqrt(numpy.diagonal(cvfl))
crmt = calculate_VPD.cov2cor(cvfl)
sdmt = numpy.diag(numpy.sqrt(cvfl.diagonal()))
w, v = linalg.eig(crmt)
print(numpy.amin(w))
sdfn = cmsz + csmp[j]
dtz = random.multivariate_normal(numpy.zeros((nmxvar,)),crmt,size=csmp[j])
dttmp = numpy.tile(gmmean[j,:],(csmp[j],1)) + numpy.dot(dtz,sdmt)
dtall[cmsz:sdfn,:] = dttmp[:,:]
cmpidx[cmsz:sdfn] = j + 1
cmsz = cmsz + csmp[j]
# Re-shuffle
ssq = numpy.arange(nszout)
sqsmp = random.choice(ssq,size=nszout,replace=False)
csmpshf = cmpidx[sqsmp]
dtshf = dtall[sqsmp,:]
print(dtshf.shape)
### Inverse Transform
qout = numpy.zeros(dtshf.shape)
for j in range(tsz):
if typind[j] == 'Discrete':
qout[:,j] = calculate_VPD.data_quantile_from_std_norm_discrete(dtshf[:,j],qsall[j,:],prbs,minval=qsall[j,0],maxval=qsall[j,nprb-1])
else:
qout[:,j] = calculate_VPD.data_quantile_from_std_norm(dtshf[:,j],qsall[j,:],prbs,minval=qsall[j,0],maxval=qsall[j,nprb-1])
### Prepare for SARTA
varlstout = ['cngwat','cngwat2','cprbot','cprbot2','cprtop','cprtop2', \
'cpsize','cpsize2','cstemp','cstemp2','ctype','ctype2','salti','spres','stemp']
# Convert to data frame
smpfrm = pandas.DataFrame(data=qout,columns=stvrnms)
dtout = numpy.zeros((nszout,len(varlstout)), dtype=numpy.float64)
frmout = pandas.DataFrame(data=dtout,columns=varlstout)
# Clear-sky?
if clearsky:
smpfrm['NumberSlab_Data_1'] = 0
# Cloud Types
frmout['ctype'] = (smpfrm['CType1_Data_1'] + 1.0) * 100.0 + 1.0
frmout['ctype2'] = (smpfrm['CType2_Data_1'] + 1.0) * 100.0 + 1.0
frmout.loc[(smpfrm.NumberSlab_Data_1 == 0),'ctype'] = msgdbl
frmout.loc[(smpfrm.NumberSlab_Data_1 < 2),'ctype2'] = msgdbl
# Met/Sfc Components, arrays sized for SARTA and AIRS
cctr = 0
prhout = numpy.zeros((nszout,nlvsrt+3)) - 9999.0
ptmpout = numpy.zeros((nszout,nlvsrt+3)) - 9999.0
for j in range(nrw):
if (df['Group'].values[j] == 'Temperature'):
inst = df['Start'].values[j] - 1
infn = inst + df['Length'].values[j]
otst = cctr
otfn = cctr + df['Length'].values[j]
ptmpout[:,inst:infn] = qout[:,otst:otfn]
elif (df['Group'].values[j] == 'RelHum'):
inst = df['Start'].values[j] - 1
infn = inst + df['Length'].values[j]
otst = cctr
otfn = cctr + df['Length'].values[j]
prhout[:,inst:infn] = qout[:,otst:otfn]
bsrh = rhmd[inst]
for k in range(inst-1,-1,-1):
if ma.is_masked(rhmd[k]):
prhout[:,k] = bsrh / 2.0
t2 = 'RH masked: %d' % (k)
print(t2)
elif rhmd[k] < 0:
t2 = 'RH below 0: %d' % (k)
print(t2)
prhout[:,k] = bsrh
else:
prhout[:,k] = rhmd[k]
bsrh = rhmd[k]
elif (df['Group'].values[j] == 'Surface'):
frmout['stemp'] = qout[:,cctr]
cctr = cctr + df['Length'].values[j]
str1 = '''RH at Level 1: %.4e, %.4e ''' % (numpy.amin(prhout[:,0]),rhmd[0])
str2 = '''RH at Level 2: %.4e, %.4e ''' % (numpy.amin(prhout[:,1]),rhmd[1])
print(str1)
print(str2)
h2oout = calculate_VPD.calculate_h2odens(prhout,ptmpout,airs_sarta_levs,alth2o)
# Surface from reference
frmout['salti'] = salti
# Need for clouds
frmout['spres'] = spres
#smpfrm['SfcPres'] = spres
# Pressure Variables
for i in range(nszout):
if smpfrm['NumberSlab_Data_1'][smpfrm.index[i]] == 0:
frmout.at[i,'cprbot'] = msgdbl
frmout.at[i,'cprtop'] = msgdbl
frmout.at[i,'cprbot2'] = msgdbl
frmout.at[i,'cprtop2'] = msgdbl
elif smpfrm['NumberSlab_Data_1'][smpfrm.index[i]] == 1:
tmplgts = numpy.array( [smpfrm['CloudBot1Logit_Data_1'][smpfrm.index[i]], \
smpfrm['DPCloud1Logit_Data_1'][smpfrm.index[i]] ] )
frctmp = calculate_VPD.lgttoprp(tmplgts)
frmout.at[i,'cprbot'] = spres[i] * (1.0 - frctmp[0])
frmout.at[i,'cprtop'] = spres[i] * (1.0 - frctmp[0] - frctmp[1])
frmout.at[i,'cprbot2'] = msgdbl
frmout.at[i,'cprtop2'] = msgdbl
elif smpfrm['NumberSlab_Data_1'][smpfrm.index[i]] == 2:
tmplgts = numpy.array( [smpfrm['CloudBot1Logit_Data_1'][smpfrm.index[i]], \
smpfrm['DPCloud1Logit_Data_1'][smpfrm.index[i]], \
smpfrm['DPSlabLogit_Data_1'][smpfrm.index[i]], \
smpfrm['DPCloud2Logit_Data_1'][smpfrm.index[i]] ] )
frctmp = calculate_VPD.lgttoprp(tmplgts)
frmout.at[i,'cprbot'] = spres[i] * (1.0 - frctmp[0])
frmout.at[i,'cprtop'] = spres[i] * (1.0 - frctmp[0] - frctmp[1])
frmout.at[i,'cprbot2'] = spres[i] * (1.0 - frctmp[0] - frctmp[1] - frctmp[2])
frmout.at[i,'cprtop2'] = spres[i] * (1.0 - frctmp[0] - frctmp[1] - frctmp[2] - frctmp[3])
# Non-Gas Water
frmout['cngwat'] = smpfrm['NGWater1_Data_1']
frmout.loc[(smpfrm.NumberSlab_Data_1 == 0),'cngwat'] = msgdbl
frmout['cngwat2'] = smpfrm['NGWater2_Data_1']
frmout.loc[(smpfrm.NumberSlab_Data_1 < 2),'cngwat2'] = msgdbl
# Temperature
frmout['cstemp'] = smpfrm['CTTemp1_Data_1']
frmout.loc[(smpfrm.NumberSlab_Data_1 == 0),'cstemp'] = msgdbl
frmout['cstemp2'] = smpfrm['CTTemp2_Data_1']
frmout.loc[(smpfrm.NumberSlab_Data_1 < 2),'cstemp2'] = msgdbl
# Particle Size, from Sergio's paper
# 20 for water, 80 for ice
#'cpsize','cpsize2','cstemp','cstemp2','ctype','ctype2']
frmout.loc[(frmout.ctype == 101.0),'cpsize'] = 20
frmout.loc[(frmout.ctype == 201.0),'cpsize'] = 80
frmout.loc[(frmout.ctype < 0.0),'cpsize'] = msgdbl
frmout.loc[(frmout.ctype2 == 101.0),'cpsize2'] = 20
frmout.loc[(frmout.ctype2 == 201.0),'cpsize2'] = 80
frmout.loc[(frmout.ctype2 < 0.0),'cpsize2'] = msgdbl
# Fractions, 3D Arrays
cfrc1out = numpy.zeros((nszout,3,3)) - 9999.0
cfrc2out = numpy.zeros((nszout,3,3)) - 9999.0
cfrc12out = numpy.zeros((nszout,3,3)) - 9999.0
for i in range(nszout):
cldctr = 0
if smpfrm['NumberSlab_Data_1'][smpfrm.index[i]] == 0:
cfrc1out[i,:,:] = 0.0
cfrc2out[i,:,:] = 0.0
cfrc12out[i,:,:] = 0.0
elif smpfrm['NumberSlab_Data_1'][smpfrm.index[i]] == 1:
for q in range(3):
for p in range(3):
cfcnm = 'TotCFrc_Data_%d' % (cldctr + 1)
cfrc1out[i,q,p] = smpfrm[cfcnm].values[i]
cldctr = cldctr + 1
cfrc2out[i,:,:] = 0.0
cfrc12out[i,:,:] = 0.0
elif smpfrm['NumberSlab_Data_1'][smpfrm.index[i]] == 2:
for q in range(3):
for p in range(3):
cfcnm = 'TotCFrc_Data_%d' % (cldctr + 1)
lg2nm = 'CFrcLogit2_Data_%d' % (cldctr + 1)
lg12nm = 'CFrcLogit12_Data_%d' % (cldctr + 1)
tcfrc = smpfrm[cfcnm].values[i]
zlgt2 = smpfrm[cfcnm].values[i]
zlgt12 = smpfrm[cfcnm].values[i]
tmplgts = numpy.array( [zlgt2, zlgt12] )
frctmp = calculate_VPD.lgttoprp(tmplgts)
cfrc1out[i,q,p] = tcfrc * (frctmp[2] + frctmp[1])
cfrc2out[i,q,p] = tcfrc * (frctmp[0] + frctmp[1])
cfrc12out[i,q,p] = tcfrc * (frctmp[1])
cldctr = cldctr + 1
# Write Sample Output
print(frmout[166:180])
fldbl = numpy.array([-9999.],dtype=numpy.float64)
flflt = numpy.array([-9999.],dtype=numpy.float32)
flshrt = numpy.array([-99],dtype=numpy.int16)
f = h5py.File(outfl,'w')
for j in range(len(varlstout)):
dftmp = f.create_dataset(varlstout[j],data=frmout[varlstout[j]])
dftmp.attrs['missing_value'] = -9999.
dftmp.attrs['_FillValue'] = -9999.
dfpt = f.create_dataset('ptemp',data=ptmpout)
dfpt.attrs['missing_value'] = fldbl
dfpt.attrs['_FillValue'] = fldbl
dfrh = f.create_dataset('relative_humidity',data=prhout)
dfrh.attrs['missing_value'] = fldbl
dfrh.attrs['_FillValue'] = fldbl
dfgs = f.create_dataset('gas_1',data=h2oout)
dfgs.attrs['missing_value'] = fldbl
dfgs.attrs['_FillValue'] = fldbl
dfcf1 = f.create_dataset('cfrac',data=cfrc1out)
dfcf1.attrs['missing_value'] = fldbl
dfcf1.attrs['_FillValue'] = fldbl
dfcf2 = f.create_dataset('cfrac2',data=cfrc2out)
dfcf2.attrs['missing_value'] = fldbl
dfcf2.attrs['_FillValue'] = fldbl
dfcf12 = f.create_dataset('cfrac12',data=cfrc12out)
dfcf12.attrs['missing_value'] = fldbl
dfcf12.attrs['_FillValue'] = fldbl
dfcsmp = f.create_dataset('mixture_component',data=csmpshf)
dfcsmp.attrs['missing_value'] = flshrt
dfcsmp.attrs['_FillValue'] = flshrt
dflv = f.create_dataset('level',data=airs_sarta_levs)
f.close()
return
def extract_airs_supp(rffl, qfl, gmmfl, outfl, stcnf, yrchc, rfmn, rfdy, rfgrn, scnrw, \
l2dir = '/archive/AIRSOps/airs/gdaac/v6'):
# Use designated AIRS reference granule, and pull reference information
# dtdr: Output directory
# yrchc: Template Year Choice
# hrchc: Template Hour Choice
# rgchc: Template Region Choice
# rfmn: Month for reference granule
# rfdy: Day for reference granule
# rfgrn: Reference granule number
# scnrw: Scan row for experiment
# nrep: Number of replicate granules
# l2dir: Local AIRS Level 2 directory (to retrieve reference info)
#sfrps = 45 * nrep
#nlvsrt = 98
msgdbl = -9999.0
# Read probs and pressure levels
f = Dataset(rffl,'r')
airs_sarta_levs = f['level'][:]
f.close()
# Get reference granule info
airsdr = '%s/%04d/%02d/%02d/airs2sup' % (l2dir,yrchc,rfmn,rfdy)
if (os.path.exists(airsdr)):
fllst = os.listdir(airsdr)
l2str = 'AIRS.%04d.%02d.%02d.%03d' % (yrchc,rfmn,rfdy,rfgrn)
rffd = -1
j = 0
while ( (j < len(fllst)) and (rffd < 0) ):
lncr = len(fllst[j])
l4 = lncr - 4
if ( (fllst[j][l4:lncr] == '.hdf') and (l2str in fllst[j])):
l2fl = '%s/%s' % (airsdr,fllst[j])
ncl2 = Dataset(l2fl)
psfc = ncl2.variables['PSurfStd'][:,:]
topg = ncl2.variables['topog'][:,:]
freqemis = ncl2.variables['freqEmis'][:,:,:]
emisIR = ncl2.variables['emisIRStd'][:,:,:]
lndfrc = ncl2.variables['landFrac'][:,:]
tsrf = ncl2.variables['TSurfStd'][:,:]
print('Emis Freq Dims')
print(freqemis.shape)
print('Emis IR Dims')
print(emisIR.shape)
print('Land Frac Dims')
print(lndfrc.shape)
latfor = ncl2.variables['Latitude'][:,:]
lonfor = ncl2.variables['Longitude'][:,:]
ncl2.close()
rffd = j
j = j + 1
else:
print('L2 directory not found')
latout = latfor[scnrw-1,:]
lonout = lonfor[scnrw-1,:]
pscvc = psfc[scnrw-1,:]
topgvc = topg[scnrw-1,:]
emsv = emisIR[scnrw-1,:,:]
frqemsv = freqemis[scnrw-1,:,:]
lfrcout = lndfrc[scnrw-1,:]
tsrfout = tsrf[scnrw-1,:]
fldbl = numpy.array([-9999.],dtype=numpy.float64)
flflt = numpy.array([-9999.],dtype=numpy.float32)
flshrt = numpy.array([-99],dtype=numpy.int16)
f = h5py.File(outfl,'w')
dflt = f.create_dataset('Latitude',data=latout)
dfln = f.create_dataset('Longitude',data=lonout)
dfps = f.create_dataset('PSurfStd',data=pscvc)
dftp = f.create_dataset('topog',data=topgvc)
dfem = f.create_dataset('emisIRStd',data=emsv)
dffq = f.create_dataset('freqEmis',data=frqemsv)
dflf = f.create_dataset('landFrac',data=lfrcout)
dtsf = f.create_dataset('TSurfStd',data=tsrfout)
f.close()
return
def airs_post_match_l2(flnm, tmidx, tmday, lats, lons, mskarr, rgnfrm, \
gmmdir, nsmp = 0, msgvl = -9999, \
l2srch = '/archive/AIRSOps/airs/gdaac/v6'):
# Match AIRS Level 2 to region masks and execute posterior analysis
# flnm: Name of output file (NetCDF expected)
# tms: Time index in output
# tmday: Datetime object with time information
# lats: Longitude variable array
# lons: Longitude variable array
# mskarr: Region mask array
# rgnfrm: Data frame with region indicators
# gmmdir: Directory with GMM results
# nsmp: Number of posterior samples to draw (optional)
# msgvl: Missing value
# l2srch: Level 2 search directory
# Search AIRS Level 2
airsdr = '%s/%04d/%02d/%02d/airs2ret' % (l2srch,tmday.year,tmday.month,tmday.day)
asupdr = '%s/%04d/%02d/%02d/airs2sup' % (l2srch,tmday.year,tmday.month,tmday.day)
dsclst = []
asclst = []
nlat = lats.shape[0]
nlon = lons.shape[0]
lonmn = lons[0] - 5.0
lonmx = lons[nlon-1] + 5.0
latmn = lats[0] - 5.0
latmx = lats[nlat-1] + 5.0
d0 = datetime.datetime(1993,1,1,0,0,0)
ddif = tmday - d0
bsdif = ddif.total_seconds()
# Seed
sdchc = 151444 + tmday.year*10 + int(bsdif)
random.seed(sdchc)
# Set up reference frame, with region mask info
ltrp = numpy.repeat(lats,nlon)
ltidx = numpy.repeat(numpy.arange(nlat),nlon)
lnrp = numpy.tile(lons,nlat)
lnidx = numpy.tile(numpy.arange(nlon),nlat)
mskflt = mskarr.flatten()
merfrm = pandas.DataFrame({'GridLonIdx': lnidx, 'GridLatIdx': ltidx, \
'GridLon': lnrp, 'GridLat': ltrp, 'RgnMask': mskflt})
merfrm.loc[pandas.isnull(merfrm.RgnMask),'RgnMask'] = -99
merfrm['RgnMask'] = merfrm['RgnMask'].astype('int32')
if (os.path.exists(asupdr)):
# Set up a list of files/granules that might match up
fllst = os.listdir(asupdr)
#print(fllst)
for j in range(len(fllst)):
lncr = len(fllst[j])
l4 = lncr - 4
if (fllst[j][l4:lncr] == '.hdf'):
l2fl = '%s/%s' % (asupdr,fllst[j])
ncl2 = Dataset(l2fl)
slrzn = ncl2.variables['solzen'][:,:]
l2lat = ncl2.variables['Latitude'][:,:]
l2lon = ncl2.variables['Longitude'][:,:]
l2tm = ncl2.variables['Time'][:,:]
ncl2.close()
# Check lat/lon ranges and asc/dsc
l2tmdf = numpy.absolute(l2tm - bsdif)
l2mntm = numpy.min(l2tmdf)
# Within 4 hours
if l2mntm < 14400.0:
ltflt = l2lat.flatten()
lnflt = l2lon.flatten()
latsb = ltflt[(ltflt >= latmn) & (ltflt <= latmx)]
lonsb = lnflt[(lnflt >= lonmn) & (lnflt <= lonmx)]
if ( (latsb.shape[0] > 0) and (lonsb.shape[0] > 0) ):
asclst.append(fllst[j])
#sstr = '%s %.2f' % (fllst[j], l2mntm)
#print(sstr)
# Region posterior model names
hrcr = rgnfrm['Hour'].values[0]
abrvcr = rgnfrm['Abbrev'].values[0]
sstr = rgnfrm['Season'].values[0]
rgnfl = '%s/PostGMM_%s_%s_%02dUTC.nc' % (gmmdir,abrvcr,sstr,hrcr)
ncgm = Dataset(rgnfl)
rtnms = ncgm.variables['state_names_retrieved'][:]
lvs = ncgm.variables['level'][:]
ncgm.close()
nmclps = rtnms.tolist()
strvrs = list(map(calculate_VPD.clean_byte_list,nmclps))
# General AIRS level sequence
lsqair = numpy.arange(34,97)
lv850air = 90
tairpc = 0
for i in range(len(strvrs)):
if ('TempPC' in strvrs[i]):
tairpc = tairpc + 1
strpc = 'Number TAir PCs: %d' % (tairpc)
print(strpc)
# Region GMM output, grab eigenvectors
nrgn = rgnfrm.shape[0]
nlev = lvs.shape[0]
maxcmp = 0
if tairpc > 0:
taireig = numpy.zeros((nrgn,tairpc,nlev),dtype=numpy.float64)
tairmnvc = numpy.zeros((nrgn,nlev),dtype=numpy.float64)
for j in range(nrgn):
hrcr = rgnfrm['Hour'].values[j]
abrvcr = rgnfrm['Abbrev'].values[j]
sstr = rgnfrm['Season'].values[j]
rgnfl = '%s/PostGMM_%s_%s_%02dUTC.nc' % (gmmdir,abrvcr,sstr,hrcr)
ncgm = Dataset(rgnfl)
gmm_prp = ncgm.variables['mixture_proportion'][:]
ncgm.close()
nmxcmp = gmm_prp.shape[0]
if nmxcmp > maxcmp:
maxcmp = nmxcmp
if tairpc > 0:
ncgm = Dataset(rgnfl)
tairmnvc[j,:] = ncgm.variables['temp_prof_mean'][:]
taireig[j,:,:] = ncgm.variables['temp_eigenvector'][0:tairpc,:]
ncgm.close()
# Check temp prof
tmpmx = numpy.nanmax(tairmnvc[j,:])
for q1 in range(tairmnvc.shape[1]):
if numpy.isnan(tairmnvc[j,q1]):
tairmnvc[j,q1] = tmpmx
# Level 2 processing
# Extract lat, lon, granules
# Additional processing
# 1. Cloud summaries
# 2. PCA of vertical profiles (region specific)
tmch = 0
if (len(asclst) > 0):
# Start matchups
for j in range(len(asclst)):
l2fl = '%s/%s' % (asupdr,asclst[j])
ncl2 = Dataset(l2fl)
l2lat = ncl2.variables['Latitude'][:,:]
l2lon = ncl2.variables['Longitude'][:,:]
l2tm = ncl2.variables['Time'][:,:]
cfrcair = ncl2.variables['CldFrcStd'][:,:,:,:,:]
cfrcaqc = ncl2.variables['CldFrcStd_QC'][:,:,:,:,:]
tsfcqc = ncl2.variables['TSurfAir_QC'][:,:]
tsfair = ncl2.variables['TSurfAir'][:,:]
tsferr = ncl2.variables['TSurfAirErr'][:,:]
psfc = ncl2.variables['PSurfStd'][:,:]
tairsp = ncl2.variables['TAirSup'][:,:,:]
ncldair = ncl2.variables['nCld'][:,:,:,:]
ncl2.close()
nairtrk = l2lat.shape[0]
nairxtk = l2lat.shape[1]
# Extract granule
asplt = asclst[j].split('.')
grnchc = asplt[4]
frctot = cfrcair[:,:,:,:,0] + cfrcair[:,:,:,:,1]
cldsmarr = numpy.zeros((nairtrk,nairxtk,4),frctot.dtype)
ncldmx = numpy.zeros((nairtrk,nairxtk),ncldair.dtype)
for q1 in range(nairtrk):
for p1 in range(nairxtk):
cldsmarr[q1,p1,:] = calculate_VPD.cloud_frac_summary(frctot[q1,p1,:,:])
ncldmx[q1,p1] = numpy.amax(ncldair[q1,p1,:,:])
# Data Frame
tkidx = numpy.repeat(numpy.arange(nairtrk),nairxtk)
xtidx = numpy.tile(numpy.arange(nairxtk),nairtrk)
l2lnflt = l2lon.flatten().astype(numpy.float64)
l2ltflt = l2lat.flatten().astype(numpy.float64)
l2tmflt = l2tm.flatten().astype(numpy.float64)
l2frm = pandas.DataFrame({'L2LonIdx': xtidx, 'L2LatIdx': tkidx, \
'L2Lon': l2lnflt, 'L2Lat': l2ltflt, 'L2Time': l2tmflt})
l2frm['GridLon'] = numpy.around(l2frm['L2Lon']/0.625) * 0.625
l2frm['GridLat'] = numpy.around(l2frm['L2Lat']/0.5) * 0.5
l2frm['Granule'] = int(grnchc)
# Sfc info
sfcspt = calculate_VPD.sfclvl(psfc,lvs)
sfcspt = sfcspt + lsqair[0]
tdftmp = tairsp[:,:,lv850air] - tsfair
for q1 in range(nairtrk):
for p1 in range(nairxtk):
if sfcspt[q1,p1] <= lv850air:
tdftmp[q1,p1] = tairsp[q1,p1,sfcspt[q1,p1]-2] - tsfair[q1,p1]
#str1 = 'Sfc below 850 hPa: %d, %d, %.4f' % (q1,p1,tdftmp[q1,p1])
#print(str1)
ttmp = tsfair.flatten()
tdftmp = tdftmp.flatten()
ertmp = tsferr.flatten()
if ttmp.dtype.byteorder == '>':
l2frm['NSTRtrv'] = ttmp.byteswap().newbyteorder()
else:
l2frm['NSTRtrv'] = ttmp
if ertmp.dtype.byteorder == '>':
l2frm['NSTL2Err'] = ertmp.byteswap().newbyteorder()
else:
l2frm['NSTL2Err'] = ertmp
if tdftmp.dtype.byteorder == '>':
l2frm['TDif850'] = tdftmp.byteswap().newbyteorder()
else:
l2frm['TDif850'] = tdftmp
qcmp = tsfcqc.flatten()
if qcmp.dtype.byteorder == '>':
l2frm['NSTRtrvQF'] = qcmp.byteswap().newbyteorder()
else:
l2frm['NSTRtrvQF'] = qcmp
ncldtmp = ncldmx.flatten()
if ncldtmp.dtype.byteorder == '>':
l2frm['NCloud'] = ncldtmp.byteswap().newbyteorder()
else:
l2frm['NCloud'] = ncldtmp
psftmp = psfc.flatten()
if ncldtmp.dtype.byteorder == '>':
l2frm['PSfc'] = psftmp.byteswap().newbyteorder()
else:
l2frm['PSfc'] = psftmp
l2frm['CFrcMean'] = cldsmarr[:,:,0].flatten()
l2frm['CFrcSD'] = cldsmarr[:,:,1].flatten()
l2frm['NClr'] = cldsmarr[:,:,2].flatten()
l2frm['NOvc'] = cldsmarr[:,:,3].flatten()
# Set up temp PCs
tpcnms = []
for t in range(tairpc):
pcnm = 'TempPC%d' % (t+1)
tpcnms.append(pcnm)
l2frm[pcnm] = numpy.zeros( (l2frm.shape[0],), dtype=cldsmarr.dtype)
l2mrg = pandas.merge(l2frm,merfrm,on=['GridLon','GridLat'])
l2mrg = l2mrg[l2mrg['RgnMask'] >= 0]
print(l2mrg.shape)
nl2 = l2mrg.shape[0]
if nl2 > 0:
# PCA processing
nlv = lsqair.shape[0]
lsq = numpy.arange(nlv)
for i in range(nl2):
rgidx = l2mrg['RgnMask'].values[i] - 1
atrk = l2mrg['L2LatIdx'].values[i]
ctrk = l2mrg['L2LonIdx'].values[i]
tprftmp = tairsp[atrk,ctrk,lsqair]
tprftmp = ma.masked_where(tprftmp < 0,tprftmp)
msq = ma.is_masked(tprftmp)
tprfscr = ma.filled(tprftmp, fill_value=tairmnvc[rgidx,:])
tpcsr = numpy.dot(taireig[rgidx,:,:],tprfscr)
for t in range(tairpc):
pcnm = 'TempPC%d' % (t+1)
l2mrg[pcnm].values[i] = tpcsr[t]
if ( (i % 100) == 0):
rgstr = 'Region %d' % (rgidx+1)
print(rgstr)
print(tprfscr[50:nlv])
print(tpcsr)
# Append to master frame
if tmch == 0:
mrg_out = l2mrg
else:
mrg_out = mrg_out.append(l2mrg,ignore_index=True)
if (nl2 > 50):
print(l2mrg[20:30])
print(l2mrg.columns)
tmch = tmch + nl2
# Loop through regions and match
# Region GMM output
nrgn = rgnfrm.shape[0]
totsdg = 0
print(strvrs)
for j in range(nrgn):
hrcr = rgnfrm['Hour'].values[j]
abrvcr = rgnfrm['Abbrev'].values[j]
sstr = rgnfrm['Season'].values[j]
rgnfl = '%s/PostGMM_%s_%s_%02dUTC.nc' % (gmmdir,abrvcr,sstr,hrcr)
ncgm = Dataset(rgnfl)
#rtnms = ncgm.variables['state_names_retrieved'][:]
gmm_prp = ncgm.variables['mixture_proportion'][:]
gmm_mux = ncgm.variables['mean_true'][:,:]
gmm_muy = ncgm.variables['mean_retrieved'][:,:]
gmm_varx = ncgm.variables['varcov_true'][:,:,:]
gmm_varxy = ncgm.variables['varcov_cross'][:,:,:]
gmm_vary = ncgm.variables['varcov_retrieved'][:,:,:]
gmm_prcy = ncgm.variables['precmat_retrieved'][:,:,:]
gmm_pstvarx = ncgm.variables['varcov_post_true'][:,:,:]
ncgm.close()
frmsb = mrg_out[mrg_out['RgnMask'] == (j+1)]
print(frmsb.shape)
nsdg = frmsb.shape[0]
nmxcmp = gmm_prp.shape[0]
nrtrv = gmm_muy.shape[1]
nrtbs = nrtrv - tairpc
nxprd = gmm_mux.shape[1]
# Set up a data array
print(abrvcr)
ydattmp = numpy.zeros((nsdg,nrtrv),dtype=numpy.float64)
for q in range(nrtbs):
ydattmp[:,q] = frmsb[strvrs[q]]
for q in range(tairpc):
ydattmp[:,q+nrtbs] = frmsb[tpcnms[q]]
print(ydattmp[0:4,:])
## Apply GMM, from gmm_post_pred in airs_post_expt_support.R
# Densities
f_y_c = numpy.zeros((nsdg,nmxcmp),dtype=numpy.float64)
#p_c_y = numpy.zeros((nsdg,nmxcmp),dtype=numpy.float64)
print('Computing f_y_c')
for k in range(nmxcmp):
w, v = linalg.eig(gmm_vary[k,:,:])
wsq = numpy.arange(w.shape[0])
wsb = wsq[w < 5.0e-5]
if wsb.shape[0] > 0:
s1 = 'Lifting %d eigenvalues' % (wsb.shape[0])
print(s1)
w[wsb] = 5.0e-5
wdg = numpy.diagflat(w)
gmm_vary[k,:,:] = numpy.dot(v, numpy.dot(wdg,v.T))
w, v = linalg.eig(gmm_vary[k,:,:])
print(numpy.amin(w))
if nrtrv > 1:
f_y_c[:,k] = stats.multivariate_normal.logpdf(ydattmp, mean=gmm_muy[k,:], cov=gmm_vary[k,:,:])
elif ntrv == 1:
# Univariate density
ltr = 0
# Adjust for possible underflow
mxdns = numpy.amax(f_y_c,axis=1)
mxarr = numpy.transpose(numpy.tile(mxdns,reps=(nmxcmp,1)))
adjdns = f_y_c - mxarr
# Compute the conditional probabilities, p_c_y
print('computing p_c_y')
prprep = numpy.tile(gmm_prp,reps=(nsdg,1))
cmplk = prprep * numpy.exp(adjdns)
sumlk = numpy.sum(cmplk,axis=1)
sumrep = numpy.transpose(numpy.tile(sumlk,reps=(nmxcmp,1)))
cmpprb = cmplk / sumrep
print('predicting E_X_Y')
ex_y_c = numpy.zeros((nsdg,nxprd,nmxcmp),dtype=numpy.float64)
ex_y = numpy.zeros((nsdg,nxprd),dtype=numpy.float64)
for k in range(nmxcmp):
muxrp = numpy.tile(gmm_mux[k,:],reps=(nsdg,1))
muyrp = numpy.tile(gmm_muy[k,:],reps=(nsdg,1))
ydevcr = ydattmp - muyrp
prcdev = numpy.dot(gmm_prcy[k,:,:], numpy.transpose(ydevcr))
cvxytmp = numpy.transpose(gmm_varxy[k,:,:])
ex_y_c[:,:,k] = muxrp + numpy.transpose(numpy.dot(cvxytmp,prcdev))
print(prcdev.shape)
print(muxrp.shape)
print(muyrp.shape)
for k in range(nxprd):
cmpmns = cmpprb * ex_y_c[:,k,:]
ex_y[:,k] = numpy.sum(cmpmns,axis=1)
print(ex_y[8:12,0])
print('predicting Sigma_X_Y')
Sigma_X_Y_C_bet = numpy.zeros((nsdg,nxprd,nxprd,nmxcmp),dtype=numpy.float64)
Sigma_X_Y_C_wth = numpy.zeros((nsdg,nxprd,nxprd,nmxcmp),dtype=numpy.float64)
Sigma_X_Y = numpy.zeros((nsdg,nxprd,nxprd),dtype=numpy.float64)
for k in range(nmxcmp):
wthcv = gmm_pstvarx[k,:,:]
mndv = ex_y_c[:,:,k] - ex_y
prbrp = numpy.repeat(cmpprb[:,k],nxprd*nxprd)
prbrp = numpy.reshape(prbrp,(nsdg,nxprd,nxprd))
wthrp = numpy.tile(wthcv.flatten(),nsdg)
wthrp = numpy.reshape(wthrp,(nsdg,nxprd,nxprd))
Sigma_X_Y_C_wth[:,:,:,k] = wthrp
for i in range(nsdg):
Sigma_X_Y_C_bet[i,:,:,k] = numpy.outer(mndv[i,:],mndv[i,:])
Sigma_X_Y = Sigma_X_Y + prbrp * (Sigma_X_Y_C_wth[:,:,:,k] + Sigma_X_Y_C_bet[:,:,:,k])
print(prbrp.shape)
print(wthrp.shape)
# Optionally sample
# Posterior samples
if nsmp > 0:
smpsv = numpy.zeros((nsdg,nsmp,nxprd),dtype=numpy.float)
skwsv = numpy.zeros((nsdg,nxprd),dtype=numpy.float)
kursv = numpy.zeros((nsdg,nxprd),dtype=numpy.float)
for i in range(nsdg):
tmpsmp = numpy.zeros((nsmp,nxprd),dtype=numpy.float)
cmpidx = numpy.zeros((nsmp,),dtype=numpy.int16)
csmp = random.multinomial(nsmp,pvals = cmpprb[i,:])
cmsz = 0
for k in range(nmxcmp):
if csmp[k] > 0:
sdfn = cmsz + csmp[k]
dtz = random.multivariate_normal(numpy.zeros((nxprd,)), gmm_pstvarx[k,:,:], size=csmp[k])
dttmp = numpy.tile(ex_y_c[i,:,k],(csmp[k],1)) + dtz
tmpsmp[cmsz:sdfn,:] = dttmp[:,:]
cmpidx[cmsz:sdfn] = k + 1
cmsz = cmsz + csmp[k]
# Re-shuffle
ssq = numpy.arange(nsmp)
sqsmp = random.choice(ssq,size=nsmp,replace=False)
cmpshf = cmpidx[sqsmp]
smpsv[i,:,:] = tmpsmp[sqsmp,:]
for s1 in range(nxprd):
skwsv[:,s1] = stats.skew(smpsv[:,:,s1],axis=1)
kursv[:,s1] = stats.kurtosis(smpsv[:,:,s1],axis=1,fisher=True)
#print(skwtmp.shape)
#strskw = ' Skew %.3f \n' % (skwtmp[10])
#print(strskw)
# Create/update output arrays
# Region Indicator
# AIRS cross-track index
# AIRS along-track index
# Latitude
# Longitude
# Time
# Granule
# AIRS quality flag
# Predictor data array
# Posterior mean array
# Posterior (co)variance array
if totsdg == 0:
rgout = numpy.zeros((nsdg,),dtype=numpy.int16)
rgout[:] = j + 1
qfout = numpy.zeros((nsdg,),dtype=numpy.int16)
qfout[:] = frmsb['NSTRtrvQF']
lnidxout = numpy.zeros((nsdg,),dtype=numpy.int16)
lnidxout[:] = frmsb['L2LonIdx']
ltidxout = numpy.zeros((nsdg,),dtype=numpy.int16)
ltidxout[:] = frmsb['L2LatIdx']
grnout = numpy.zeros((nsdg,),dtype=numpy.int16)
grnout[:] = frmsb['Granule']
latout = numpy.zeros((nsdg,),dtype=numpy.float32)
latout[:] = frmsb['L2Lat']
lonout = numpy.zeros((nsdg,),dtype=numpy.float32)
lonout[:] = frmsb['L2Lon']
tmout = numpy.zeros((nsdg,),dtype=numpy.float64)
tmout[:] = frmsb['L2Time']
psfout = numpy.zeros((nsdg,),dtype=numpy.float32)
psfout[:] = frmsb['PSfc']
l2errout = numpy.zeros((nsdg,),dtype=numpy.float32)
l2errout[:] = frmsb['NSTL2Err']
prdmnout = numpy.zeros((nsdg,nxprd),dtype=numpy.float32)
prdmnout[:,:] = ex_y
if nsmp > 0:
skwout = numpy.zeros((nsdg,nxprd),dtype=numpy.float32)
skwout[:,:] = skwsv
kurout = numpy.zeros((nsdg,nxprd),dtype=numpy.float32)
kurout[:,:] = kursv
smpout = numpy.zeros((nsdg,nsmp,nxprd),dtype=numpy.float32)
smpout[:,:,:] = smpsv
sigxyout = numpy.zeros((nsdg,nxprd,nxprd),dtype=numpy.float32)
sigxyout[:,:] = Sigma_X_Y
rtryout = numpy.zeros((nsdg,nrtrv),dtype=numpy.float32)
rtryout[:,:] = ydattmp
cmpprbout = numpy.zeros((nsdg,maxcmp),dtype=numpy.float32)
cmpprbout[:,0:nmxcmp] = cmpprb
else:
rgtmp = numpy.zeros((nsdg,),dtype=numpy.int16)
rgtmp[:] = j + 1
rgout = numpy.append(rgout,rgtmp)
qftmp = frmsb['NSTRtrvQF']
qfout = numpy.append(qfout,qftmp)
errtmp = frmsb['NSTL2Err']
l2errout = numpy.append(l2errout,errtmp)
lnidxtmp = frmsb['L2LonIdx']
lnidxout = numpy.append(lnidxout,lnidxtmp)
ltidxtmp = frmsb['L2LatIdx']
ltidxout = numpy.append(ltidxout,ltidxtmp)
grntmp = frmsb['Granule']
grnout = numpy.append(grnout,grntmp)
lontmp = frmsb['L2Lon']
lonout = numpy.append(lonout,lontmp)
lattmp = frmsb['L2Lat']
latout = numpy.append(latout,lattmp)
tmtmp = frmsb['L2Time']
tmout = numpy.append(tmout,tmtmp)
psftmp = frmsb['PSfc']
psfout = numpy.append(psfout,psftmp)
prdmnout = numpy.append(prdmnout,ex_y,axis=0)
if nsmp > 0:
skwout = numpy.append(skwout,skwsv,axis=0)
kurout = numpy.append(kurout,kursv,axis=0)
smpout = numpy.append(smpout,smpsv,axis=0)
rtryout = numpy.append(rtryout,ydattmp,axis=0)
sigxyout = numpy.append(sigxyout,Sigma_X_Y,axis=0)
cmpprbtmp = numpy.zeros((nsdg,maxcmp),dtype=numpy.float32)
cmpprbtmp[:,0:nmxcmp] = cmpprb
cmpprbout = numpy.append(cmpprbout,cmpprbtmp,axis=0)
totsdg = totsdg + nsdg
## Prepare output file
qout = Dataset(flnm,'w')
dimprd = qout.createDimension('state_retrieved',nrtrv)
dimxtr = qout.createDimension('state_true',nxprd)
dimsdg = qout.createDimension('sounding',totsdg)
dimchr = qout.createDimension('charnm',30)
dimmix = qout.createDimension('mixture_component',maxcmp)
if nsmp > 0:
dimsmp = qout.createDimension('posterior_sample',nsmp)
str_out = netCDF4.stringtochar(numpy.array(strvrs,'S30'))
print(str_out)
varnms = qout.createVariable('state_names_retrieved','S1',['state_retrieved','charnm'])
varnms[:] = str_out
varrgn = qout.createVariable('region_indicator','i2',['sounding'], fill_value = -99)
varrgn[:] = rgout
varrgn.long_name = 'NCA CONUS region number'
varrgn.units = 'None'
varrgn.missing_value = -99
varxidx = qout.createVariable('airs_x_index','i2',['sounding'], fill_value = -99)
varxidx[:] = lnidxout
varxidx.long_name = 'AIRS cross-track index (0-based)'
varxidx.units = 'None'
varxidx.missing_value = -99
varyidx = qout.createVariable('airs_y_index','i2',['sounding'], fill_value = -99)
varyidx[:] = ltidxout
varyidx.long_name = 'AIRS along-track index (0-based)'
varyidx.units = 'None'
varyidx.missing_value = -99
vargrn = qout.createVariable('airs_granule','i2',['sounding'], fill_value = -99)
vargrn[:] = grnout
vargrn.long_name = 'AIRS granule number'
vargrn.units = 'None'
vargrn.missing_value = -99
varqf = qout.createVariable('airs_tsurfair_qc','i2',['sounding'], fill_value = -99)
varqf[:] = qfout
varqf.long_name = 'AIRS near-surface temperature quality flag'
varqf.units = 'None'
varqf.missing_value = -99
varlon = qout.createVariable('longitude','f4',['sounding'], fill_value = -9999)
varlon[:] = lonout
varlon.long_name = 'AIRS FOR center longitude'
varlon.units = 'degrees_east'
varlon.missing_value = -9999
varlat = qout.createVariable('latitude','f4',['sounding'], fill_value = -9999)
varlat[:] = latout
varlat.long_name = 'AIRS FOR center latitude'
varlat.units = 'degrees_north'
varlat.missing_value = -9999
vartm = qout.createVariable('time','f8',['sounding'], fill_value = -9999)
vartm[:] = tmout
vartm.long_name = 'AIRS observation time'
vartm.units = 'Seconds since 1993-01-01'
vartm.missing_value = -9999
varpsf = qout.createVariable('surface_pressure','f4',['sounding'], fill_value = -9999)
varpsf[:] = psfout
varpsf.long_name = 'AIRS FOR surface pressure'
varpsf.units = 'hPa'
varpsf.missing_value = -9999
varl2er = qout.createVariable('airs_tsurfair_err','f4',['sounding'], fill_value = -9999)
varl2er[:] = l2errout
varl2er.long_name = 'AIRS Level 2 near-surface temperature error estimate'
varl2er.units = 'K'
varl2er.missing_value = -9999
varmn = qout.createVariable('pred_post_mean','f4',['sounding','state_true'], fill_value = -9999)
varmn[:] = prdmnout
varmn.long_name = 'Posterior mean for true state'
varmn.units = ''
varmn.missing_value = -9999
if nsmp > 0:
varskw = qout.createVariable('pred_post_skew','f4',['sounding','state_true'], fill_value = -9999)
varskw[:] = skwout
varskw.long_name = 'Posterior skewness for true state'
varskw.units = ''
varskw.missing_value = -9999
varkur = qout.createVariable('pred_post_kurtosis','f4',['sounding','state_true'], fill_value = -9999)
varkur[:] = kurout
varkur.long_name = 'Posterior kurtosis for true state'
varkur.units = ''
varkur.missing_value = -9999
varsmp = qout.createVariable('pred_post_samples','f4',['sounding','posterior_sample','state_true'], fill_value = -9999)
varsmp[:] = smpout
varsmp.long_name = 'Posterior samples for true state'
varsmp.units = ''
varsmp.missing_value = -9999
varmn = qout.createVariable('pred_post_var','f4',['sounding','state_true','state_true'], fill_value = -9999)
varmn[:] = sigxyout
varmn.long_name = 'Posterior (co)variance for true state'
varmn.units = ''
varmn.missing_value = -9999
varrtr = qout.createVariable('airs_ret_covariate','f4',['sounding','state_retrieved'], fill_value = -9999)
varrtr[:] = rtryout
varrtr.long_name = 'Retrieved covariates'
varrtr.units = ''
varrtr.missing_value = -9999
varprb = qout.createVariable('pred_post_prob','f4',['sounding','mixture_component'], fill_value = -9999)
varprb[:] = cmpprbout
varprb.long_name = 'Mixture component posterior probabilities'
varprb.units = 'None'
varprb.missing_value = -9999
qout.close()
return
def airs_add_isd_val(flnm, vlnm, tmidx, tmday, rgnfrm, gmmdir):
# Read validation matchups for AIRS outputs
# flnm: Name of output file (NetCDF expected)
# vlnm: Location of validation data (CSV)
# tms: Time index in output
# tmday: Datetime object with time information
# rgnfrm: Data frame with region indicators
# gmmdir: Directory with GMM results
flflt = numpy.array([-9999.], dtype=numpy.float32)
flshr = numpy.array([-99], dtype=numpy.int16)
# Read validation
#vlfrm = pandas.read_csv(vlnm, dtype = {'isd_temperature':float, 'N':int}, na_values = 'NA')
vlfrm = pandas.read_csv(vlnm, na_values = 'NA')
#print(vlfrm[0:10])
#vlfrm['isd_temperature'] = df['isd_temperature'].astype(
print(vlfrm.dtypes)
isdtout = ma.array(vlfrm['isd_temperature'],dtype=numpy.float32)
isdtout = ma.masked_invalid(isdtout)
# Convert to K
isdtout = isdtout + 273.15
print(isdtout[0:10])
isdtout = ma.filled(isdtout, fill_value = flflt)
vlctout = ma.array(vlfrm['N'],dtype=numpy.int16)
vlctout = ma.masked_invalid(vlctout)
vlctout = ma.filled(vlctout, fill_value = flshr)
# Merge with existing file
ncout = Dataset(flnm,'r+')
dm1 = ncout.dimensions['sounding']
if ('ISD' in ncout.groups):
str1 = 'ISD group present'
vlgrp = ncout.groups['ISD']
else:
vlgrp = ncout.createGroup('ISD')
if ('ISD_temperature' in vlgrp.variables):
vartisd = vlgrp.variables['ISD_temperature']
vartisd[:] = isdtout
else:
vrtisd = vlgrp.createVariable('ISD_temperature','f4',['sounding'], fill_value = flflt[0])
vrtisd[:] = isdtout
vrtisd.long_name = 'Near-surface temperature at validation sites'
vrtisd.units = 'K'
vrtisd.missing_value = flflt[0]
if ('ISD_count' in vlgrp.variables):
varnisd = vlgrp.variables['ISD_count']
varnisd[:] = isdtout
else:
vrnisd = vlgrp.createVariable('ISD_count','i2',['sounding'], fill_value = flshr[0])
vrnisd[:] = vlctout
vrnisd.long_name = 'Number of validation observation sites'
vrnisd.units = 'none'
vrnisd.missing_value = flshr[0]
ncout.close()
return
def qsummary(df, grpvr, vlvr):
# Quantile summary for grouped data frame
tmpdt = df[vlvr]
dtvld = tmpdt[numpy.isfinite(tmpdt)]
nmtch = dtvld.shape[0]
plvs = numpy.array([10.0,25.0,50.0,75.0,90.0])
dtqs = numpy.percentile(dtvld,q=plvs)
dfout = pandas.DataFrame({'NSmp' : nmtch, 'Q10' : dtqs[0], 'Q25' : dtqs[1], 'Q50' : dtqs[2], \
'Q75' : dtqs[3], 'Q90' : dtqs[4]}, index=[0])
return dfout
| 39.718098
| 176
| 0.57592
| 27,554
| 232,192
| 4.769435
| 0.058104
| 0.022668
| 0.020454
| 0.00745
| 0.802887
| 0.778895
| 0.753951
| 0.732858
| 0.715235
| 0.699308
| 0
| 0.056706
| 0.275823
| 232,192
| 5,845
| 177
| 39.724893
| 0.72485
| 0.062207
| 0
| 0.717425
| 0
| 0
| 0.118602
| 0.015981
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003552
| false
| 0.000888
| 0.002442
| 0
| 0.009545
| 0.046393
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0e6bd491eeff13302295b6e343eec8f87ca9f334
| 30,253
|
py
|
Python
|
tests/test_topology.py
|
techlib/adminator
|
f7f6695778dc5fb741f118fe2e0358bdda632c7b
|
[
"MIT"
] | 1
|
2019-04-27T22:51:56.000Z
|
2019-04-27T22:51:56.000Z
|
tests/test_topology.py
|
techlib/adminator
|
f7f6695778dc5fb741f118fe2e0358bdda632c7b
|
[
"MIT"
] | 2
|
2016-11-25T10:02:15.000Z
|
2017-05-31T08:29:03.000Z
|
tests/test_topology.py
|
techlib/adminator
|
f7f6695778dc5fb741f118fe2e0358bdda632c7b
|
[
"MIT"
] | 1
|
2021-12-01T05:26:43.000Z
|
2021-12-01T05:26:43.000Z
|
import flexmock
from adminator.topology_agent import ConMapHTMLParser, AGUpdater
real_html = '''
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>AMPTRAC Analyzer (ID=1)</title>
<link media="all" href="/FS/analyzer1.css" type="text/css" rel="stylesheet">
<link media="all" href="/FS/analyzer2.css" type="text/css" rel="stylesheet">
<meta content="text/html; charset=UTF-8" http-equiv="Content-Type"/>
<link type="image/x-icon" href="/FS/favicon.ico" rel="SHORTCUT ICON">
<script type="text/javascript">
function reload_page()
{
window.location.reload(true);
}
function download_file(fileURL)
{
window.location.assign(fileURL);
}
</script>
<style media="all" type="text/css">
/* Styles for the ConnMap table elements */
td.connmap-na { cursor: pointer; background-color: lightGray; }
td.connmap-clr { cursor: pointer; background-color: white; }
td.connmap-con { cursor: pointer; background-color: lime; }
td.connmap-dis { cursor: pointer; background-color: red; }
td.connmap-lock { cursor: pointer; background-color: yellow; }
td.connmap-err { background-color: blue; }
td.connmap-db25 { background-color: beige; }
</style></head>
<body>
<table id="frame" cellspacing="0" cellpadding="0" border="0">
<thead>
<tr>
<td rowspan="2">
<img style="margin: 5px 0pt 5px 5px;" src="/FS/TELogo.gif"/>
</td>
<td id="naviTop" colspan="2">
<a href="http://www.te.com/industry/enterprisenetworks/">Go to Enterprise Networks Web Site</a>
<a href="/Logout.html" style="position: absolute; right: 10px;" >Log Out</a>
</td>
</tr>
<tr>
<td id="naviMain" colspan="2">
<ul>
<li>
<a href="/index.html">Home</a>
</li>
<li>
<a href="/NetworkSettings.html">Settings</a>
</li>
<li>
<a class="selected" href="/ConnMap.html">Maps</a>
</li>
<li>
<a href="/AlarmsLog.html">Logs</a>
</li>
</ul>
</td>
</tr>
<tr id="lines">
<td id="left">
<img align="middle" src="/FS/shadow-navi-top.gif"/>
</td>
<td id="centre">
</td>
<td style="vertical-align: top;">
<img align="middle" src="/FS/lines-right.gif"/>
</td>
</tr>
</thead>
<tbody>
<tr>
<td id="naviSide" style="text-align: center;">
<table cellspacing="0" cellpadding="0" style="display: inline;">
<tbody>
<tr>
<td colspan="2" class="leftHeader" height="22">
Maps
</td>
</tr>
<tr>
<td colspan="2" class="blueLine" height="2">
</td>
</tr>
<tr>
<td width="12" height="24">
<a href="/ConnMap.html">
<img border="0" src="/FS/dots.gif"/>
</a>
</td>
<td>
<a class="boldTxtGlobal" href="/ConnMap.html">
Connection Map</a>
</td>
</tr>
<tr>
<td width="12" height="24">
<a href="/ZoneMap.html">
<img border="0" src="/FS/arrow.gif"/>
</a>
</td>
<td>
<a class="txtGlobal" href="/ZoneMap.html">
Zone Map</a>
</td>
</tr>
</tbody>
</table>
</td>
<td id="content" rowspan="2" colspan="2">
<form action="/ProcessForm/ConnMap.html" method="post">
<table cellpadding="1" border="0" style="text-align: center; width: 80%;" title="Connection Map">
<tr>
<td align="center">
<table cellpadding="5" border="0" style="text-align: center; width: 70%;">
<tr style="text-align: center; width: 100%;">
<td colspan="2"> </td></tr>
<tr>
<td class="header2Bg" colspan="2">
<span class="boldText">
Connection Map</span>
</td>
</tr>
<tr>
<td colspan="2"> </td></tr>
<tr>
<td colspan="2">
<table border="1" style="border-collapse: collapse; text-align: center;" cellpadding="0px">
<tr>
<td>
<table border="1" style="border-collapse: collapse; text-align: center;" cellpadding="3px">
<tr>
<td class="connmap-db25"> DB#01 </td>
</tr>
<tr>
<td class="connmap-db25"> DB#02 </td>
</tr>
<tr>
<td class="connmap-db25"> DB#03 </td>
</tr>
<tr>
<td class="connmap-db25"> DB#04 </td>
</tr>
<tr>
<td class="connmap-db25"> DB#05 </td>
</tr>
<tr>
<td class="connmap-db25"> DB#06 </td>
</tr>
<tr>
<td class="connmap-db25"> DB#07 </td>
</tr>
</table>
</td>
<td>
<table border="1" style="border-collapse: collapse; text-align: center;" cellpadding="3px">
<tr>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 06">01</td>
<td class="connmap-clr" title="No Connection">02</td>
<td class="connmap-clr" title="No Connection">03</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 11">04</td>
<td class="connmap-clr" title="No Connection">05</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 05">06</td>
<td class="connmap-clr" title="No Connection">07</td>
<td class="connmap-clr" title="No Connection">08</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 20">09</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 10">10</td>
<td class="connmap-clr" title="No Connection">11</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 12">12</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 09">13</td>
<td class="connmap-clr" title="No Connection">14</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 08">15</td>
<td class="connmap-clr" title="No Connection">16</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 07">17</td>
<td class="connmap-clr" title="No Connection">18</td>
<td class="connmap-clr" title="No Connection">19</td>
<td class="connmap-clr" title="No Connection">20</td>
<td class="connmap-clr" title="No Connection">21</td>
<td class="connmap-clr" title="No Connection">22</td>
<td class="connmap-clr" title="No Connection">23</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 04">24</td>
</tr>
<tr>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 14">01</td>
<td class="connmap-clr" title="No Connection">02</td>
<td class="connmap-clr" title="No Connection">03</td>
<td class="connmap-clr" title="No Connection">04</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 03">05</td>
<td class="connmap-clr" title="No Connection">06</td>
<td class="connmap-clr" title="No Connection">07</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 17">08</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 22">09</td>
<td class="connmap-clr" title="No Connection">10</td>
<td class="connmap-clr" title="No Connection">11</td>
<td class="connmap-clr" title="No Connection">12</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 13">13</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 01">14</td>
<td class="connmap-clr" title="No Connection">15</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 19">16</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 21">17</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 18">18</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 15">19</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#04 Port 16">20</td>
<td class="connmap-clr" title="No Connection">21</td>
<td class="connmap-clr" title="No Connection">22</td>
<td class="connmap-clr" title="No Connection">23</td>
<td class="connmap-clr" title="No Connection">24</td>
</tr>
<tr>
<td class="connmap-clr" title="No Connection">01</td>
<td class="connmap-clr" title="No Connection">02</td>
<td class="connmap-clr" title="No Connection">03</td>
<td class="connmap-clr" title="No Connection">04</td>
<td class="connmap-clr" title="No Connection">05</td>
<td class="connmap-clr" title="No Connection">06</td>
<td class="connmap-clr" title="No Connection">07</td>
<td class="connmap-clr" title="No Connection">08</td>
<td class="connmap-clr" title="No Connection">09</td>
<td class="connmap-clr" title="No Connection">10</td>
<td class="connmap-clr" title="No Connection">11</td>
<td class="connmap-clr" title="No Connection">12</td>
<td class="connmap-clr" title="No Connection">13</td>
<td class="connmap-clr" title="No Connection">14</td>
<td class="connmap-clr" title="No Connection">15</td>
<td class="connmap-clr" title="No Connection">16</td>
<td class="connmap-clr" title="No Connection">17</td>
<td class="connmap-clr" title="No Connection">18</td>
<td class="connmap-clr" title="No Connection">19</td>
<td class="connmap-clr" title="No Connection">20</td>
<td class="connmap-clr" title="No Connection">21</td>
<td class="connmap-clr" title="No Connection">22</td>
<td class="connmap-clr" title="No Connection">23</td>
<td class="connmap-clr" title="No Connection">24</td>
</tr>
<tr>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#02 Port 14">01</td>
<td class="connmap-clr" title="No Connection">02</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#02 Port 05">03</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#01 Port 24">04</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#01 Port 06">05</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#01 Port 01">06</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#01 Port 17">07</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#01 Port 15">08</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#01 Port 13">09</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#01 Port 10">10</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#01 Port 04">11</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#01 Port 12">12</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#02 Port 13">13</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#02 Port 01">14</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#02 Port 19">15</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#02 Port 20">16</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#02 Port 08">17</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#02 Port 18">18</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#02 Port 16">19</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#01 Port 09">20</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#02 Port 17">21</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#02 Port 09">22</td>
<td class="connmap-clr" title="No Connection">23</td>
<td class="connmap-clr" title="No Connection">24</td>
</tr>
<tr>
<td class="connmap-clr" title="No Connection">01</td>
<td class="connmap-clr" title="No Connection">02</td>
<td class="connmap-clr" title="No Connection">03</td>
<td class="connmap-clr" title="No Connection">04</td>
<td class="connmap-clr" title="No Connection">05</td>
<td class="connmap-clr" title="No Connection">06</td>
<td class="connmap-clr" title="No Connection">07</td>
<td class="connmap-clr" title="No Connection">08</td>
<td class="connmap-clr" title="No Connection">09</td>
<td class="connmap-clr" title="No Connection">10</td>
<td class="connmap-clr" title="No Connection">11</td>
<td class="connmap-clr" title="No Connection">12</td>
<td class="connmap-clr" title="No Connection">13</td>
<td class="connmap-clr" title="No Connection">14</td>
<td class="connmap-clr" title="No Connection">15</td>
<td class="connmap-clr" title="No Connection">16</td>
<td class="connmap-clr" title="No Connection">17</td>
<td class="connmap-clr" title="No Connection">18</td>
<td class="connmap-clr" title="No Connection">19</td>
<td class="connmap-clr" title="No Connection">20</td>
<td class="connmap-clr" title="No Connection">21</td>
<td class="connmap-clr" title="No Connection">22</td>
<td class="connmap-clr" title="No Connection">23</td>
<td class="connmap-clr" title="No Connection">24</td>
</tr>
<tr>
<td class="connmap-clr" title="No Connection">01</td>
<td class="connmap-clr" title="No Connection">02</td>
<td class="connmap-clr" title="No Connection">03</td>
<td class="connmap-clr" title="No Connection">04</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#06 Port 11">05</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#06 Port 12">06</td>
<td class="connmap-clr" title="No Connection">07</td>
<td class="connmap-clr" title="No Connection">08</td>
<td class="connmap-clr" title="No Connection">09</td>
<td class="connmap-clr" title="No Connection">10</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#06 Port 05">11</td>
<td class="connmap-con" title="Connected to: Analyzer 1 DB#06 Port 06">12</td>
<td class="connmap-clr" title="No Connection">13</td>
<td class="connmap-clr" title="No Connection">14</td>
<td class="connmap-clr" title="No Connection">15</td>
<td class="connmap-clr" title="No Connection">16</td>
<td class="connmap-clr" title="No Connection">17</td>
<td class="connmap-clr" title="No Connection">18</td>
<td class="connmap-clr" title="No Connection">19</td>
<td class="connmap-clr" title="No Connection">20</td>
<td class="connmap-clr" title="No Connection">21</td>
<td class="connmap-clr" title="No Connection">22</td>
<td class="connmap-clr" title="No Connection">23</td>
<td class="connmap-clr" title="No Connection">24</td>
</tr>
<tr>
<td class="connmap-clr" title="No Connection">01</td>
<td class="connmap-clr" title="No Connection">02</td>
<td class="connmap-clr" title="No Connection">03</td>
<td class="connmap-clr" title="No Connection">04</td>
<td class="connmap-clr" title="No Connection">05</td>
<td class="connmap-clr" title="No Connection">06</td>
<td class="connmap-clr" title="No Connection">07</td>
<td class="connmap-clr" title="No Connection">08</td>
<td class="connmap-clr" title="No Connection">09</td>
<td class="connmap-clr" title="No Connection">10</td>
<td class="connmap-clr" title="No Connection">11</td>
<td class="connmap-clr" title="No Connection">12</td>
<td class="connmap-clr" title="No Connection">13</td>
<td class="connmap-clr" title="No Connection">14</td>
<td class="connmap-clr" title="No Connection">15</td>
<td class="connmap-clr" title="No Connection">16</td>
<td class="connmap-clr" title="No Connection">17</td>
<td class="connmap-clr" title="No Connection">18</td>
<td class="connmap-clr" title="No Connection">19</td>
<td class="connmap-clr" title="No Connection">20</td>
<td class="connmap-clr" title="No Connection">21</td>
<td class="connmap-clr" title="No Connection">22</td>
<td class="connmap-clr" title="No Connection">23</td>
<td class="connmap-clr" title="No Connection">24</td>
</tr>
</table>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td colspan="2"> </td></tr>
<tr>
<td class="errorText" colspan="2">
<input type="button" value=" Refresh " onClick="reload_page();" />
</td>
</tr>
</table>
</td>
</tr>
</table>
</form>
</td>
</tr>
<tr>
<td id="naviFooter"> </td>
</tr>
</tbody>
</table>
</body>
</html>'''
real_table = [[
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 06'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 11'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 05'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 20'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 10'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 12'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 09'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 08'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 07'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 04'}
], [
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 14'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 03'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 17'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 22'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 13'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 01'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 19'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 21'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 18'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 15'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#04 Port 16'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'}
], [
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'}
], [
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 14'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 05'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 24'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 06'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 01'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 17'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 15'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 13'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 10'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 04'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 12'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 13'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 01'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 19'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 20'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 08'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 18'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 16'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 09'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 17'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 09'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'}
], [
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'}
], [
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#06 Port 11'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#06 Port 12'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#06 Port 05'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#06 Port 06'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'}
], [
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'}
]]
real_connections = {
1: {
1: {
1: (1, 4, 6), 2: None, 3: None, 4: (1, 4, 11), 5: None, 6: (1, 4, 5),
7: None, 8: None, 9: (1, 4, 20), 10: (1, 4, 10), 11: None, 12: (1, 4, 12),
13: (1, 4, 9), 14: None, 15: (1, 4, 8), 16: None, 17: (1, 4, 7), 18: None,
19: None, 20: None, 21: None, 22: None, 23: None, 24: (1, 4, 4)
},
2: {
1: (1, 4, 14), 2: None, 3: None, 4: None, 5: (1, 4, 3), 6: None,
7: None, 8: (1, 4, 17), 9: (1, 4, 22), 10: None, 11: None, 12: None,
13: (1, 4, 13), 14: (1, 4, 1), 15: None, 16: (1, 4, 19), 17: (1, 4, 21), 18: (1, 4, 18),
19: (1, 4, 15), 20: (1, 4, 16), 21: None, 22: None, 23: None, 24: None
},
3: {
1: None, 2: None, 3: None, 4: None, 5: None, 6: None,
7: None, 8: None, 9: None, 10: None, 11: None, 12: None,
13: None, 14: None, 15: None, 16: None, 17: None, 18: None,
19: None, 20: None, 21: None, 22: None, 23: None, 24: None
},
4: {
1: (1, 2, 14), 2: None, 3: (1, 2, 5), 4: (1, 1, 24), 5: (1, 1, 6), 6: (1, 1, 1),
7: (1, 1, 17), 8: (1, 1, 15), 9: (1, 1, 13), 10: (1, 1, 10), 11: (1, 1, 4), 12: (1, 1, 12),
13: (1, 2, 13), 14: (1, 2, 1), 15: (1, 2, 19), 16: (1, 2, 20), 17: (1, 2, 8), 18: (1, 2, 18),
19: (1, 2, 16), 20: (1, 1, 9), 21: (1, 2, 17), 22: (1, 2, 9), 23: None, 24: None
},
5: {
1: None, 2: None, 3: None, 4: None, 5: None, 6: None,
7: None, 8: None, 9: None, 10: None, 11: None, 12: None,
13: None, 14: None, 15: None, 16: None, 17: None, 18: None,
19: None, 20: None, 21: None, 22: None, 23: None, 24: None
},
6: {
1: None, 2: None, 3: None, 4: None, 5: (1, 6, 11), 6: (1, 6, 12),
7: None, 8: None, 9: None, 10: None, 11: (1, 6, 5), 12: (1, 6, 6),
13: None, 14: None, 15: None, 16: None, 17: None, 18: None,
19: None, 20: None, 21: None, 22: None, 23: None, 24: None
},
7: {
1: None, 2: None, 3: None, 4: None, 5: None, 6: None,
7: None, 8: None, 9: None, 10: None, 11: None, 12: None,
13: None, 14: None, 15: None, 16: None, 17: None, 18: None,
19: None, 20: None, 21: None, 22: None, 23: None, 24: None
}
}
}
multi_an_tables = {
1: [
[
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 03'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 01'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 01'},
],
[
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#01 Port 04'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 2 DB#01 Port 04'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 2 DB#01 Port 03'},
],
],
2: [
[
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-clr', 'title': 'No Connection'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 04'},
{'class': 'connmap-con', 'title': 'Connected to: Analyzer 1 DB#02 Port 03'},
],
],
}
multi_an_connections = {
1: {
1: {
1: (1, 1, 3), 2: None, 3: (1, 1, 1), 4: (1, 2, 1)
},
2: {
1: (1, 1, 4), 2: None, 3: (2, 1, 4), 4: (2, 1, 3)
}
},
2: {
1: {
1: None, 2: None, 3: (1, 2, 4), 4: (1, 2, 3)
}
}
}
analyzer1 = flexmock(analyzer_id_in_group = 1)
analyzer2 = flexmock(analyzer_id_in_group = 2)
analyzer_group1 = [analyzer1]
analyzer_group2 = [analyzer1, analyzer2]
def test_real_html_parse():
p = ConMapHTMLParser()
p.feed(real_html)
assert p.get_table() == real_table
def test_real_get_connections():
agu = AGUpdater(None, None)
assert agu.get_connections(analyzer_group1, {1: real_table} ) == real_connections
def test_multi_an_get_connections():
agu = AGUpdater(None, None)
assert agu.get_connections(analyzer_group2, multi_an_tables) == multi_an_connections
| 45.977204
| 105
| 0.603048
| 4,428
| 30,253
| 4.110659
| 0.05262
| 0.23404
| 0.204373
| 0.272498
| 0.870509
| 0.853313
| 0.843644
| 0.838479
| 0.836172
| 0.831996
| 0
| 0.057326
| 0.174297
| 30,253
| 657
| 106
| 46.047184
| 0.671337
| 0
| 0
| 0.620905
| 0
| 0.088924
| 0.724589
| 0.035401
| 0
| 0
| 0
| 0
| 0.00468
| 1
| 0.00468
| false
| 0
| 0.00312
| 0
| 0.0078
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0e7c5b8c3529cd49e9c6435e71a43f2c3f5f128c
| 3,663
|
py
|
Python
|
ads/fundamentals/disjoint_sets.py
|
Aminul-Momin/Algorithms_and_Data_Structures
|
cba73b36b73ad92fb34bc34a0e03503f7a137713
|
[
"MIT"
] | null | null | null |
ads/fundamentals/disjoint_sets.py
|
Aminul-Momin/Algorithms_and_Data_Structures
|
cba73b36b73ad92fb34bc34a0e03503f7a137713
|
[
"MIT"
] | null | null | null |
ads/fundamentals/disjoint_sets.py
|
Aminul-Momin/Algorithms_and_Data_Structures
|
cba73b36b73ad92fb34bc34a0e03503f7a137713
|
[
"MIT"
] | null | null | null |
"""
The module provides a 'union-find' data type also known as 'Disjoint Sets' data type.
"""
class DisjointSetsQF():
"""
The class represents a 'Union-Find' data structures where 'find' operation
is optimized to have constant time complexity.
"""
def __init__(self, N):
"""Initializes an empty DisjointSetQU with 'N' elements 0 through "N'-1.
Initially each element is in it's own component
Args:
N (int): the number of elements.
"""
self.n = N # Number of elements
self.parent = [i for i in range(self.n)] # parent[i] = parent of i
def find(self, i):
"""Find the canonical element of the set containing element 'i'.
Args:
i (int): an element.
Returns:
int: the canonical element of the set containing element 'i'.
"""
def union(self, x, y):
"""Merges the set containing element 'x' with the set containing element 'y'.
Args:
x (int): one element.
y (int): the other element.
"""
def is_connected(self, x, y):
"""Check if element 'x' and 'y' are connected or not.
Args:
x (int): one element
y (int): the other element
Returns:
bool: True if 'x' an 'y' are in the same set False otherwise.
"""
return self.find(x) == self.find(y)
def count(self):
"""Count the number of total elements.
Returns:
int: the number of total elements.
"""
return self.n
class DisjointSetsQU(DisjointSetsQF):
"""
The class represents a 'Union-Find' data structures where 'union' operation
is optimized to have constant time complexity.
"""
def __init__(self, N):
"""Initializes an empty DisjointSetQU with 'N' elements 0 through "N'-1.
Initially each element is in it's own component
Args:
N (int): the number of elements.
"""
super(DisjointSetsQU, self).__init__(N)
def union(self, x, y):
"""Merges the set containing element 'x' with the set containing element 'y'.
Args:
x (int): one element.
y (int): the other element.
"""
i = self.find(x)
j = self.find(y)
self.parent[i] = self.parent[j]
def find(self, i):
"""Find the canonical element of the set containing element 'i'.
Args:
i (int): an element.
Returns:
int: the canonical element of the set containing element 'i'.
"""
while i != self.parent[i]:
i = self.parent[i]
return i
class DisjointSetsWeightedQU(DisjointSetsQU):
"""
The class represents a 'Union-Find' data structures where 'union' operation
is optimized to have constant time complexity.
"""
def __init__(self, N):
"""Initializes an empty DisjointSetQU with 'N' elements 0 through "N'-1.
Initially each element is in it's own component
Args:
N (int): the number of elements.
"""
super(DisjointSetsQU, self).__init__(N)
def union(self, x, y):
"""Merges the set containing element 'x' with the set containing element 'y'.
Args:
x (int): one element.
y (int): the other element.
"""
pass
def find(self, i):
"""Find the canonical element of the set containing element 'i'.
Args:
i (int): an element.
Returns:
int: the canonical element of the set containing element 'i'.
"""
pass
| 27.133333
| 85
| 0.562654
| 464
| 3,663
| 4.396552
| 0.18319
| 0.035294
| 0.094118
| 0.135294
| 0.777451
| 0.753922
| 0.753922
| 0.753922
| 0.753922
| 0.753922
| 0
| 0.002468
| 0.336336
| 3,663
| 134
| 86
| 27.335821
| 0.836693
| 0.598144
| 0
| 0.464286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.392857
| false
| 0.071429
| 0
| 0
| 0.607143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
0e9642f2e3113c059424329e4460efe39ac72bee
| 1,713
|
py
|
Python
|
extract/tests/test_extract_invalid.py
|
SbastianGarzon/o2r-meta
|
ec5f7c6d432cbb43ab7df2985fc502e60fe7bcff
|
[
"Apache-2.0"
] | 2
|
2017-06-28T14:47:57.000Z
|
2020-11-10T01:59:11.000Z
|
extract/tests/test_extract_invalid.py
|
SbastianGarzon/o2r-meta
|
ec5f7c6d432cbb43ab7df2985fc502e60fe7bcff
|
[
"Apache-2.0"
] | 95
|
2016-07-14T13:21:52.000Z
|
2021-04-08T12:51:53.000Z
|
extract/tests/test_extract_invalid.py
|
SbastianGarzon/o2r-meta
|
ec5f7c6d432cbb43ab7df2985fc502e60fe7bcff
|
[
"Apache-2.0"
] | 4
|
2016-07-21T12:16:37.000Z
|
2021-10-04T13:31:40.000Z
|
# pylint: skip-file
import os
import json
def test_invalid_compendium(script_runner, tmpdir):
ret = script_runner.run('python3', 'o2rmeta.py', '-debug', 'extract',
'-i', 'extract/tests/compendium_invalid',
'-o', str(tmpdir),
'-xo', '-m')
print(ret.stdout)
print(ret.stderr)
assert ret.success, "process should return success"
assert ret.stderr == '', "stderr should be empty"
metadata = json.load(open(os.path.join(str(tmpdir), 'metadata_raw.json')))
assert metadata['displayfile'] is None, "missing displayfile should be none"
assert metadata['mainfile'] is None, "missing mainfile should be none"
assert metadata['displayfile'] != "", "missing displayfile should be none not empty string"
assert metadata['mainfile'] != "", "missing mainfile should be none not empty string"
def test_invalid_compendium_basedir(script_runner, tmpdir):
ret = script_runner.run('python3', 'o2rmeta.py', '-debug', 'extract',
'-i', 'extract/tests/compendium_invalid',
'-b', 'extract/tests/compendium_invalid',
'-o', str(tmpdir),
'-xo', '-m')
print(ret.stdout)
print(ret.stderr)
assert ret.success, "process should return success"
assert ret.stderr == '', "stderr should be empty"
metadata = json.load(open(os.path.join(str(tmpdir), 'metadata_raw.json')))
assert metadata['displayfile'] is None, "missing displayfile should be none"
assert metadata['mainfile'] is None, "missing mainfile should be none"
assert metadata['displayfile'] != "", "missing displayfile should be none not empty string"
assert metadata['mainfile'] != "", "missing mainfile should be none not empty string"
| 45.078947
| 95
| 0.675423
| 213
| 1,713
| 5.366197
| 0.239437
| 0.069991
| 0.08399
| 0.090989
| 0.92301
| 0.92301
| 0.92301
| 0.92301
| 0.92301
| 0.92301
| 0
| 0.002859
| 0.183304
| 1,713
| 38
| 96
| 45.078947
| 0.814153
| 0.009924
| 0
| 0.83871
| 0
| 0
| 0.422419
| 0.056637
| 0
| 0
| 0
| 0
| 0.387097
| 1
| 0.064516
| false
| 0
| 0.064516
| 0
| 0.129032
| 0.129032
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ec1a884b4108519cc6ea47221479b0bd91c66b2
| 23,608
|
py
|
Python
|
imagetrac_docker/b5/migrations/0001_initial.py
|
arsenalstriker14/imagetraccloud
|
04004d5eabc82e85596bc9e110c9250d5f882e17
|
[
"MIT"
] | null | null | null |
imagetrac_docker/b5/migrations/0001_initial.py
|
arsenalstriker14/imagetraccloud
|
04004d5eabc82e85596bc9e110c9250d5f882e17
|
[
"MIT"
] | null | null | null |
imagetrac_docker/b5/migrations/0001_initial.py
|
arsenalstriker14/imagetraccloud
|
04004d5eabc82e85596bc9e110c9250d5f882e17
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-20 09:04
from __future__ import unicode_literals
import datetime
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='AdProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ad_date', models.CharField(blank=True, default=None, max_length=20, null=True)),
('item_no', models.CharField(blank=True, max_length=30, null=True)),
('mfg', models.CharField(blank=True, max_length=30, null=True)),
('desc', models.CharField(blank=True, max_length=200, null=True)),
('vendor_number', models.CharField(blank=True, max_length=30, null=True)),
('order_date', models.CharField(blank=True, max_length=30, null=True)),
('received_dc', models.CharField(blank=True, max_length=30, null=True)),
('received_137', models.CharField(blank=True, max_length=30, null=True)),
('received_buyer', models.CharField(blank=True, max_length=40, null=True)),
('received_other', models.CharField(blank=True, default=None, max_length=50, null=True)),
('photo_dldate', models.CharField(blank=True, max_length=50, null=True)),
('whowhen', models.CharField(blank=True, max_length=100, null=True)),
('studio_out', models.CharField(blank=True, max_length=30, null=True)),
('checked_out', models.CharField(blank=True, max_length=100, null=True)),
('have_image', models.CharField(blank=True, max_length=30, null=True)),
('confirmed_placed', models.CharField(blank=True, default='None', max_length=200, null=True)),
('shooting_instructions', models.CharField(blank=True, default=None, max_length=200, null=True)),
('studio_in', models.CharField(blank=True, max_length=50, null=True)),
('notes', models.CharField(blank=True, max_length=200, null=True)),
('item_ns', models.CharField(blank=True, max_length=30, null=True)),
('short_sku', models.CharField(blank=True, max_length=30, null=True)),
('from_file', models.CharField(blank=True, max_length=30, null=True)),
('sku', models.CharField(blank=True, max_length=30, null=True)),
('sku_ns', models.CharField(blank=True, max_length=30, null=True)),
('first', models.CharField(blank=True, max_length=5, null=True)),
('first_date', models.CharField(blank=True, max_length=30, null=True)),
('buyer', models.CharField(blank=True, max_length=50, null=True)),
('merch_to_137', models.CharField(blank=True, max_length=200, null=True)),
('product_class', models.CharField(blank=True, max_length=4, null=True)),
('dc_received_u', models.IntegerField(blank=True, null=True)),
('curr_dc_oh_u', models.IntegerField(blank=True, null=True)),
('dc_curr_oo_u', models.IntegerField(blank=True, null=True)),
('na', models.CharField(blank=True, max_length=5, null=True)),
('size_type', models.CharField(blank=True, max_length=10, null=True)),
('color_desc', models.CharField(blank=True, max_length=200, null=True)),
('version', models.CharField(blank=True, max_length=20, null=True)),
],
),
migrations.CreateModel(
name='Buyers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('buyer', models.CharField(blank=True, max_length=50, null=True)),
('department', models.CharField(blank=True, max_length=3, null=True)),
('product_class', models.CharField(blank=True, max_length=4, null=True, unique=True)),
('description', models.CharField(blank=True, max_length=150, null=True)),
],
),
migrations.CreateModel(
name='CheckProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sku_ns', models.CharField(max_length=30)),
('brand', models.CharField(blank=True, max_length=20, null=True)),
('desc', models.CharField(blank=True, max_length=200, null=True)),
('confirmed_placed', models.CharField(blank=True, default='None', max_length=200, null=True)),
('ad_date', models.CharField(blank=True, max_length=20, null=True)),
('mfg', models.CharField(blank=True, max_length=30, null=True)),
('already_tracked', models.CharField(blank=True, default='None', max_length=300, null=True)),
('na', models.NullBooleanField(default=False)),
],
),
migrations.CreateModel(
name='ColorCode',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=3, unique=True)),
('color', models.CharField(blank=True, max_length=30, null=True)),
],
),
migrations.CreateModel(
name='ColorGrid',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('color', models.CharField(blank=True, max_length=30, null=True)),
('hexcode', models.CharField(max_length=6, unique=True)),
('description', models.CharField(max_length=300, unique=True)),
],
),
migrations.CreateModel(
name='Department',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(choices=[('Admin', 'Admin'), ('Advertising', 'Advertising'), ('Art', 'Art'), ('Layout', 'Layout'), ('Copy', 'Copy'), ('Purchasing', 'Purchasing'), ('IT', 'IT')], max_length=64)),
],
),
migrations.CreateModel(
name='Deployed',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item_no', models.CharField(blank=True, max_length=30, null=True)),
('operator', models.CharField(blank=True, max_length=50, null=True)),
('filenames', django.contrib.postgres.fields.jsonb.JSONField()),
],
),
migrations.CreateModel(
name='FirstReceipt',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('buyer', models.CharField(blank=True, max_length=30, null=True)),
('dc_received_u', models.IntegerField(blank=True, null=True)),
('curr_dc_oh_u', models.IntegerField(blank=True, null=True)),
('dc_curr_oo_u', models.IntegerField(blank=True, null=True)),
('date_received', models.CharField(blank=True, max_length=20, null=True)),
('item_ns', models.CharField(blank=True, max_length=30, null=True)),
('short_sku', models.CharField(blank=True, max_length=30, null=True)),
('size_type', models.CharField(blank=True, max_length=10, null=True)),
('item_no', models.CharField(blank=True, max_length=30, null=True)),
('vendor_style', models.CharField(blank=True, max_length=30, null=True)),
('description', models.CharField(blank=True, max_length=200, null=True)),
('color_desc', models.CharField(blank=True, max_length=200, null=True)),
('have_image', models.CharField(blank=True, max_length=30, null=True)),
('ad_date', models.CharField(blank=True, max_length=50, null=True)),
('order_date', models.CharField(blank=True, max_length=50, null=True)),
('received_dc', models.CharField(blank=True, max_length=30, null=True)),
('received_137', models.CharField(blank=True, max_length=30, null=True)),
('from_file', models.CharField(blank=True, max_length=30, null=True)),
('photo_dldate', models.DateField(blank=True, null=True)),
('whowhen', models.CharField(blank=True, max_length=100, null=True)),
('studio_out', models.CharField(blank=True, max_length=50, null=True)),
('checked_out', models.CharField(blank=True, max_length=100, null=True)),
('confirmed_placed', models.CharField(blank=True, default='None', max_length=200, null=True)),
('studio_in', models.CharField(blank=True, max_length=50, null=True)),
('merch_to_137', models.CharField(blank=True, max_length=200, null=True)),
('product_class', models.CharField(blank=True, max_length=4, null=True)),
],
),
migrations.CreateModel(
name='HotItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item_no', models.CharField(max_length=30, unique=True)),
('ad_date', models.CharField(blank=True, default=None, max_length=20, null=True)),
('create_date', models.DateField(default=datetime.date.today)),
('item_name', models.CharField(blank=True, max_length=210, null=True)),
('comments', models.TextField(blank=True, max_length=2000, null=True)),
('reply', models.TextField(blank=True, max_length=2000, null=True)),
('confirmed_placed', models.CharField(blank=True, default=None, max_length=200, null=True)),
],
),
migrations.CreateModel(
name='InventoryProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sku', models.CharField(blank=True, max_length=7, null=True)),
('item_no', models.CharField(blank=True, max_length=30, null=True, unique=True)),
('desc', models.CharField(blank=True, max_length=200, null=True)),
('quantity', models.CharField(max_length=3)),
('source', models.CharField(blank=True, max_length=100, null=True)),
],
),
migrations.CreateModel(
name='LargeWebfiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('long_sku', models.CharField(max_length=30)),
('filename', models.CharField(max_length=50)),
('mod_date', models.CharField(blank=True, max_length=200, null=True)),
('item_ns', models.CharField(max_length=30, null=True)),
('sku', models.CharField(max_length=30, null=True)),
('sku_ns', models.CharField(max_length=30, null=True)),
],
),
migrations.CreateModel(
name='MSWebfiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('long_sku', models.CharField(max_length=30)),
('filename', models.CharField(max_length=50)),
('item_ns', models.CharField(max_length=30, null=True)),
('sku', models.CharField(max_length=30, null=True)),
('sku_ns', models.CharField(max_length=30, null=True)),
],
),
migrations.CreateModel(
name='OneImageFiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item_no', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='PrintFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('assoc_sku', models.CharField(max_length=30)),
('filename', models.CharField(blank=True, max_length=500, null=True)),
('path', models.CharField(blank=True, max_length=200, null=True)),
('mod_date', models.CharField(blank=True, max_length=200, null=True)),
],
),
migrations.CreateModel(
name='ProcessedFiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item_no', models.CharField(max_length=30)),
('filename', models.CharField(blank=True, max_length=50, null=True)),
('processor', models.CharField(blank=True, max_length=50, null=True)),
('item_ns', models.CharField(blank=True, max_length=30, null=True)),
('sku', models.CharField(blank=True, max_length=30, null=True)),
('sku_ns', models.CharField(blank=True, max_length=30, null=True)),
('product_class', models.CharField(blank=True, max_length=10, null=True)),
('upload_date', models.CharField(blank=True, max_length=30, null=True)),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ad_date', models.CharField(blank=True, default=None, max_length=20, null=True)),
('item_no', models.CharField(blank=True, max_length=30, null=True, unique=True)),
('mfg', models.CharField(blank=True, max_length=30, null=True)),
('desc', models.CharField(blank=True, max_length=200, null=True)),
('vendor_number', models.CharField(blank=True, max_length=30, null=True)),
('order_date', models.DateField(blank=True, null=True)),
('received_dc', models.DateField(blank=True, null=True)),
('received_137', models.CharField(blank=True, max_length=30, null=True)),
('received_buyer', models.CharField(blank=True, max_length=40, null=True)),
('received_other', models.CharField(blank=True, default=None, max_length=50, null=True)),
('photo_dldate', models.CharField(blank=True, max_length=50, null=True)),
('whowhen', models.CharField(blank=True, max_length=100, null=True)),
('studio_out', models.DateField(blank=True, null=True)),
('checked_out', models.CharField(blank=True, max_length=100, null=True)),
('have_image', models.CharField(blank=True, max_length=30, null=True)),
('confirmed_placed', models.CharField(blank=True, default='None', max_length=200, null=True)),
('shooting_instructions', models.CharField(blank=True, default=None, max_length=200, null=True)),
('studio_in', models.CharField(blank=True, max_length=50, null=True)),
('notes', models.CharField(blank=True, max_length=200, null=True)),
('item_ns', models.CharField(blank=True, max_length=30, null=True)),
('short_sku', models.CharField(blank=True, max_length=30, null=True)),
('from_file', models.CharField(blank=True, max_length=30, null=True)),
('sku', models.CharField(blank=True, max_length=30, null=True)),
('sku_ns', models.CharField(blank=True, max_length=30, null=True)),
('first', models.CharField(blank=True, max_length=5, null=True)),
('first_date', models.CharField(blank=True, max_length=30, null=True)),
('buyer', models.CharField(blank=True, max_length=50, null=True)),
('merch_to_137', models.CharField(blank=True, max_length=200, null=True)),
('dc_received_u', models.IntegerField(blank=True, null=True)),
('curr_dc_oh_u', models.IntegerField(blank=True, null=True)),
('dc_curr_oo_u', models.IntegerField(blank=True, null=True)),
('na', models.CharField(blank=True, max_length=5, null=True)),
('size_type', models.CharField(blank=True, max_length=10, null=True)),
('color_desc', models.CharField(blank=True, max_length=200, null=True)),
('product_class', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='b5.Buyers', to_field='product_class')),
],
),
migrations.CreateModel(
name='RegularWebfiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('long_sku', models.CharField(max_length=30)),
('filename', models.CharField(max_length=50)),
('item_ns', models.CharField(max_length=30, null=True)),
('sku', models.CharField(max_length=30, null=True)),
('sku_ns', models.CharField(max_length=30, null=True)),
],
),
migrations.CreateModel(
name='ReplacedImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sku', models.CharField(blank=True, max_length=30, null=True)),
('sku_ns', models.CharField(blank=True, max_length=30, null=True)),
('item_no', models.CharField(blank=True, max_length=30, null=True)),
('item_ns', models.CharField(blank=True, max_length=30, null=True)),
('old_filename', models.CharField(blank=True, max_length=50, null=True)),
('new_filename', models.CharField(blank=True, max_length=50, null=True)),
('change_date', models.DateField(blank=True, default=datetime.date.today, null=True)),
],
),
migrations.CreateModel(
name='RumbaProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item_no', models.CharField(blank=True, max_length=30, null=True)),
('mfg', models.CharField(blank=True, max_length=30, null=True)),
('desc', models.CharField(blank=True, max_length=200, null=True)),
('vendor_number', models.CharField(blank=True, max_length=30, null=True)),
('confirmed_placed', models.CharField(blank=True, default='None', max_length=200, null=True)),
('item_ns', models.CharField(blank=True, max_length=30, null=True)),
('short_sku', models.CharField(blank=True, max_length=30, null=True)),
('sku', models.CharField(blank=True, max_length=30, null=True)),
('sku_ns', models.CharField(blank=True, max_length=30, null=True)),
('product_class', models.CharField(blank=True, max_length=4, null=True)),
('merch_to_137', models.CharField(blank=True, max_length=200, null=True)),
('dc_received_u', models.IntegerField(blank=True, null=True)),
('curr_dc_oh_u', models.IntegerField(blank=True, null=True)),
('dc_curr_oo_u', models.IntegerField(blank=True, null=True)),
('size_type', models.CharField(blank=True, max_length=10, null=True)),
('size', models.CharField(blank=True, max_length=10, null=True)),
('color_desc', models.CharField(blank=True, max_length=200, null=True)),
],
),
migrations.CreateModel(
name='ThumbWebfiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('long_sku', models.CharField(max_length=30)),
('filename', models.CharField(max_length=50)),
('item_ns', models.CharField(max_length=30, null=True)),
('sku', models.CharField(max_length=30, null=True)),
('sku_ns', models.CharField(max_length=30, null=True)),
],
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fullname', models.CharField(max_length=64)),
('position', models.CharField(blank=True, max_length=64, null=True)),
('phone', models.CharField(blank=True, max_length=15, null=True)),
('extension', models.CharField(blank=True, max_length=15, null=True)),
('mobile', models.CharField(blank=True, max_length=15, null=True)),
('fax', models.CharField(blank=True, max_length=15, null=True)),
('notes', models.TextField(blank=True, max_length=2000, null=True)),
('email', models.EmailField(max_length=254)),
('user', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['user__last_name'],
},
),
migrations.CreateModel(
name='WatchedItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item_no', models.CharField(blank=True, max_length=30, null=True)),
('desc', models.CharField(blank=True, max_length=210, null=True)),
('comments', models.TextField(blank=True, max_length=2000, null=True)),
('confirmed_placed', models.CharField(blank=True, default=None, max_length=200, null=True)),
('watched_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='b5.UserProfile')),
],
),
migrations.AddField(
model_name='replacedimage',
name='processor',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='b5.UserProfile'),
),
migrations.AddField(
model_name='oneimagefiles',
name='filenames',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='b5.PrintFile'),
),
migrations.AlterUniqueTogether(
name='inventoryproduct',
unique_together=set([('sku', 'source')]),
),
migrations.AddField(
model_name='department',
name='member',
field=models.ManyToManyField(to='b5.UserProfile'),
),
]
| 62.455026
| 220
| 0.58908
| 2,644
| 23,608
| 5.098336
| 0.077912
| 0.11951
| 0.212166
| 0.254599
| 0.875816
| 0.850742
| 0.84273
| 0.830267
| 0.820772
| 0.775742
| 0
| 0.024964
| 0.25682
| 23,608
| 377
| 221
| 62.62069
| 0.743346
| 0.00288
| 0
| 0.707317
| 1
| 0
| 0.099928
| 0.001784
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.01626
| 0
| 0.0271
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
7ed7bc1694f73741d38255d44b474ff7878f3e2b
| 4,021
|
py
|
Python
|
tests/bindings/polymer/test_predicates.py
|
sizmailov/pyxmolpp2
|
9395ba1b1ddc957e0b33dc6decccdb711e720764
|
[
"MIT"
] | 4
|
2020-06-24T11:07:57.000Z
|
2022-01-15T23:00:30.000Z
|
tests/bindings/polymer/test_predicates.py
|
sizmailov/pyxmolpp2
|
9395ba1b1ddc957e0b33dc6decccdb711e720764
|
[
"MIT"
] | 84
|
2018-04-22T12:29:31.000Z
|
2020-06-17T15:03:37.000Z
|
tests/bindings/polymer/test_predicates.py
|
sizmailov/pyxmolpp2
|
9395ba1b1ddc957e0b33dc6decccdb711e720764
|
[
"MIT"
] | 6
|
2018-06-04T09:16:26.000Z
|
2022-03-12T11:05:54.000Z
|
import pytest
import os
from make_polygly import make_polyglycine
def test_atom_name():
from pyxmolpp2 import aName
frame = make_polyglycine([("A", 10)])
assert frame.atoms.filter(aName == "CA").size == 10
assert frame.atoms.filter(aName.is_in({"CA", "N"})).size == 20
assert frame.atoms.filter(aName.is_in("CA", "N")).size == 20
assert frame.atoms.filter(~aName.is_in({"CA", "N"})).size == 50
assert frame.atoms.filter(~aName.is_in("CA", "N")).size == 50
assert frame.atoms.filter((aName == "CA") | (aName == "N")).size == 20
def test_residue_name():
from pyxmolpp2 import rName
frame = make_polyglycine([("A", 10)])
assert frame.atoms.filter(rName == "GLY").size == 70
assert frame.atoms.filter(rName.is_in({"GLY", "LYS"})).size == 70
assert frame.atoms.filter(rName.is_in("GLY", "LYS")).size == 70
assert frame.atoms.filter(~rName.is_in({"GLY"})).size == 0
assert frame.atoms.filter(~rName.is_in("GLY")).size == 0
assert frame.atoms.filter((rName == "GLY") | (rName != "GLY")).size == 70
assert frame.residues.filter(rName == "GLY").size == 10
assert frame.residues.filter(rName.is_in({"GLY", "LYS"})).size == 10
assert frame.residues.filter(rName.is_in("GLY", "LYS")).size == 10
assert frame.residues.filter(~rName.is_in({"GLY"})).size == 0
assert frame.residues.filter(~rName.is_in("GLY")).size == 0
assert frame.residues.filter((rName == "GLY") | (rName != "GLY")).size == 10
def test_chain_name():
from pyxmolpp2 import mName
frame = make_polyglycine([("A", 10),("B",20)])
assert frame.atoms.filter(mName == "A").size == 10*7
assert frame.atoms.filter(mName.is_in({"A", "B"})).size == 30*7
assert frame.atoms.filter(~mName.is_in({"B"})).size == 10*7
assert frame.atoms.filter((mName == "A") | (mName != "B")).size == 10*7
assert frame.residues.filter(mName == "A").size == 10
assert frame.residues.filter(mName.is_in({"A", "B"})).size == 30
assert frame.residues.filter(mName.is_in("A", "B")).size == 30
assert frame.residues.filter(~mName.is_in({"B"})).size == 10
assert frame.residues.filter(~mName.is_in("B")).size == 10
assert frame.residues.filter((mName == "A") | (mName != "B")).size == 10
assert frame.molecules.filter(mName == "A").size == 1
assert frame.molecules.filter(mName.is_in({"A", "B"})).size == 2
assert frame.molecules.filter(mName.is_in("A", "B")).size == 2
assert frame.molecules.filter(~mName.is_in({"B"})).size == 1
assert frame.molecules.filter(~mName.is_in("B")).size == 1
assert frame.molecules.filter((mName == "A") | (mName != "B")).size == 1
def test_atom_id():
from pyxmolpp2 import aId
frame = make_polyglycine([("A", 10)])
assert frame.atoms.filter(aId == 5).size == 1
assert frame.atoms.filter(aId.is_in({1,2,3})).size == 3
assert frame.atoms.filter(aId.is_in(1,2,3)).size == 3
assert frame.atoms.filter(~aId.is_in({1,2,3})).size == 70-3
assert frame.atoms.filter(~aId.is_in(1,2,3)).size == 70-3
assert frame.atoms.filter((aId == 2) | (aId == 3)).size == 2
def test_residue_id():
from pyxmolpp2 import rId, ResidueId
frame = make_polyglycine([("A", 10)])
assert frame.atoms.filter(rId == 5).size == 1*7
assert frame.atoms.filter(rId.is_in({1,2,3})).size == 3*7
assert frame.atoms.filter(rId.is_in(1,2,3)).size == 3*7
assert frame.atoms.filter(~rId.is_in({1,2,3})).size == 7*7
assert frame.atoms.filter(~rId.is_in(1,2,3)).size == 7*7
assert frame.atoms.filter((rId == 2) | (rId == 3)).size == 2*7
assert frame.residues.filter(rId == 5).size == 1
assert frame.residues.filter(rId.is_in({1,2,3})).size == 3
assert frame.residues.filter(rId.is_in(1,2,3)).size == 3
assert frame.residues.filter(~rId.is_in({1,2,3})).size == 7
assert frame.residues.filter(~rId.is_in(1,2,3)).size == 7
assert frame.residues.filter((rId == 2) | (rId == 3)).size == 2
assert frame.residues.filter(rId == ResidueId(5,"A")).size == 0
| 43.236559
| 80
| 0.629445
| 639
| 4,021
| 3.881064
| 0.073552
| 0.235081
| 0.180645
| 0.248387
| 0.877016
| 0.845968
| 0.758871
| 0.739919
| 0.694758
| 0.617742
| 0
| 0.045428
| 0.156926
| 4,021
| 92
| 81
| 43.706522
| 0.686136
| 0
| 0
| 0.056338
| 0
| 0
| 0.025367
| 0
| 0
| 0
| 0
| 0
| 0.746479
| 1
| 0.070423
| false
| 0
| 0.112676
| 0
| 0.183099
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7d1c98a7a9ddad54bf9674e7f255da961ba1786a
| 197
|
py
|
Python
|
tests/basics/builtin_hex.py
|
peterson79/pycom-micropython-sigfox
|
3f93fc2c02567c96f18cff4af9125db8fd7a6fb4
|
[
"MIT"
] | 303
|
2015-07-11T17:12:55.000Z
|
2018-01-08T03:02:37.000Z
|
tests/basics/builtin_hex.py
|
peterson79/pycom-micropython-sigfox
|
3f93fc2c02567c96f18cff4af9125db8fd7a6fb4
|
[
"MIT"
] | 27
|
2015-01-02T16:17:37.000Z
|
2015-09-07T19:21:26.000Z
|
tests/basics/builtin_hex.py
|
peterson79/pycom-micropython-sigfox
|
3f93fc2c02567c96f18cff4af9125db8fd7a6fb4
|
[
"MIT"
] | 26
|
2018-01-18T09:15:33.000Z
|
2022-02-07T13:09:14.000Z
|
# test builtin hex function
print(hex(1))
print(hex(-1))
print(hex(15))
print(hex(-15))
print(hex(12345))
print(hex(0x12345))
print(hex(12345678901234567890))
print(hex(0x12345678901234567890))
| 15.153846
| 34
| 0.741117
| 28
| 197
| 5.214286
| 0.392857
| 0.438356
| 0.123288
| 0.191781
| 0.315068
| 0
| 0
| 0
| 0
| 0
| 0
| 0.320442
| 0.081218
| 197
| 12
| 35
| 16.416667
| 0.486188
| 0.126904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170588
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
7d3d2743257a8ee0b01c6779b35fadba0bfa615c
| 6,762
|
py
|
Python
|
marionette/tor_browser_tests/test_noscript.py
|
boklm/tbb-testsuite
|
308a0861a110c92a1f1378b6577e8db9a9f11c6c
|
[
"CC0-1.0",
"BSD-3-Clause"
] | 1
|
2015-07-06T18:40:14.000Z
|
2015-07-06T18:40:14.000Z
|
marionette/tor_browser_tests/test_noscript.py
|
boklm/tbb-testsuite
|
308a0861a110c92a1f1378b6577e8db9a9f11c6c
|
[
"CC0-1.0",
"BSD-3-Clause"
] | null | null | null |
marionette/tor_browser_tests/test_noscript.py
|
boklm/tbb-testsuite
|
308a0861a110c92a1f1378b6577e8db9a9f11c6c
|
[
"CC0-1.0",
"BSD-3-Clause"
] | 1
|
2017-10-04T22:11:52.000Z
|
2017-10-04T22:11:52.000Z
|
# https://trac.torproject.org/projects/tor/ticket/13053
from marionette_driver.errors import NoSuchElementException
from marionette_harness import MarionetteTestCase
import testsuite
class Test(MarionetteTestCase):
def setUp(self):
MarionetteTestCase.setUp(self)
ts = testsuite.TestSuite()
self.ts = ts
self.http_url = "%s/noscript/" % ts.t['options']['test_data_url']
self.https_url = "%s/noscript/" % ts.t['options']['test_data_url_https']
def test_noscript(self):
self.marionette.timeout.implicit = 1
with self.marionette.using_context('content'):
# http page sourcing http js
self.marionette.navigate("%s/http_src.html" % self.http_url)
res = False
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = True
self.assertTrue(res, msg="http page sourcing http js")
# https page sourcing http js
self.marionette.navigate("%s/http_src.html" % self.https_url)
res = False
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = True
self.assertTrue(res, msg="https page sourcing http js")
# https page sourcing http js (alternate hostname)
self.marionette.navigate("%s/alternate_http_src.html" % self.https_url)
res = False
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = True
self.assertTrue(res, msg="https page sourcing http js (alternate hostname)")
# http page sourcing https js
self.marionette.navigate("%s/https_src.html" % self.http_url)
res = False
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = True
self.assertTrue(res, msg="http page sourcing https js")
# https page sourcing https js
self.marionette.navigate("%s/https_src.html" % self.https_url)
res = True
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = False
self.assertTrue(res, msg="https page sourcing https js")
self.assertEqual('JavaScriptEnabled', elt.text, msg="https page sourcing https js")
# https page sourcing https js (alternate hostname)
self.marionette.navigate("%s/alternate_https_src.html" % self.https_url)
res = True
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = False
self.assertTrue(res, msg="https page sourcing https js (alternate hostname)")
self.assertEqual('JavaScriptEnabled', elt.text,
msg="https page sourcing https js (alternate hostname)")
# http page with http iframe
self.marionette.navigate("%s/http_iframe.html" % self.http_url)
iframe = self.marionette.find_element('id', 'iframe')
self.marionette.switch_to_frame(iframe)
res = False
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = True
self.assertTrue(res, msg="http page with http iframe")
self.marionette.switch_to_default_content()
# http page with https iframe
self.marionette.navigate("%s/https_iframe.html" % self.http_url)
iframe = self.marionette.find_element('id', 'iframe')
self.marionette.switch_to_frame(iframe)
res = False
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = True
self.assertTrue(res, msg="http page with https iframe")
self.marionette.switch_to_default_content()
# https page with http iframe
self.marionette.navigate("%s/http_iframe.html" % self.https_url)
iframe = self.marionette.find_element('id', 'iframe')
self.marionette.switch_to_frame(iframe)
res = False
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = True
self.assertTrue(res, msg="https page with http iframe")
self.marionette.switch_to_default_content()
# https page sourcing https js (alternate hostname)
self.marionette.navigate("%s/alternate_http_iframe.html" % self.https_url)
iframe = self.marionette.find_element('id', 'iframe')
self.marionette.switch_to_frame(iframe)
res = False
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = True
self.assertTrue(res, msg="https page sourcing https js (alternate hostname)")
self.marionette.switch_to_default_content()
# https page with https iframe
self.marionette.navigate("%s/https_iframe.html" % self.https_url)
iframe = self.marionette.find_element('id', 'iframe')
self.marionette.switch_to_frame(iframe)
res = True
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = False
self.assertTrue(res, msg="https page with https iframe")
self.assertEqual(elt.text, 'JavaScriptEnabled',
msg="https page with https iframe")
self.marionette.switch_to_default_content()
# https page with https iframe (alternate hostname)
self.marionette.navigate("%s/alternate_https_iframe.html" % self.https_url)
iframe = self.marionette.find_element('id', 'iframe')
self.marionette.switch_to_frame(iframe)
res = True
try:
elt = self.marionette.find_element('id', 'test_result')
except NoSuchElementException:
res = False
self.assertTrue(res, msg="https page with https iframe")
self.assertEqual(elt.text, 'JavaScriptEnabled',
msg="https page with https iframe")
self.marionette.switch_to_default_content()
| 43.070064
| 95
| 0.593168
| 720
| 6,762
| 5.433333
| 0.090278
| 0.157464
| 0.107362
| 0.115031
| 0.907975
| 0.90593
| 0.90593
| 0.89954
| 0.882669
| 0.841513
| 0
| 0.001291
| 0.312482
| 6,762
| 156
| 96
| 43.346154
| 0.840181
| 0.070393
| 0
| 0.704918
| 0
| 0
| 0.179876
| 0.01786
| 0
| 0
| 0
| 0
| 0.131148
| 1
| 0.016393
| false
| 0
| 0.02459
| 0
| 0.04918
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7d4eb1c22faaec86f73202be1c7f11c7139c9ac4
| 6,279
|
py
|
Python
|
pyaz/afd/route/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/afd/route/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/afd/route/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
'''
Manage routes under an AFD endpoint.
'''
from ... pyaz_utils import _call_az
def show(endpoint_name, profile_name, resource_group, route_name):
'''
Show route details.
Required Parameters:
- endpoint_name -- Name of the endpoint.
- profile_name -- Name of the CDN profile which is unique within the resource group.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- route_name -- Name of the route.
'''
return _call_az("az afd route show", locals())
def list(endpoint_name, profile_name, resource_group):
'''
List all the routes within the specified endpoint.
Required Parameters:
- endpoint_name -- Name of the endpoint.
- profile_name -- Name of the CDN profile which is unique within the resource group.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az afd route list", locals())
def create(endpoint_name, forwarding_protocol, https_redirect, origin_group, profile_name, resource_group, route_name, supported_protocols, content_types_to_compress=None, custom_domains=None, enable_compression=None, link_to_default_domain=None, origin_path=None, patterns_to_match=None, query_string_caching_behavior=None, rule_sets=None):
'''
Creates a new route within the specified endpoint.
Required Parameters:
- endpoint_name -- Name of the endpoint.
- forwarding_protocol -- Protocol this rule will use when forwarding traffic to backends.
- https_redirect -- Whether to automatically redirect HTTP traffic to HTTPS traffic.
- origin_group -- Name or ID of the origin group to be associated with.
- profile_name -- Name of the CDN profile which is unique within the resource group.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- route_name -- Name of the route.
- supported_protocols -- List of supported protocols for this route.
Optional Parameters:
- content_types_to_compress -- List of content types on which compression applies. The value should be a valid MIME type.
- custom_domains -- Custom domains referenced by this endpoint.
- enable_compression -- Indicates whether content compression is enabled on AzureFrontDoor. Default value is false. If compression is enabled, content will be served as compressed if user requests for a compressed version. Content won't be compressed on AzureFrontDoor when requested content is smaller than 1 byte or larger than 1 MB.
- link_to_default_domain -- Whether this route will be linked to the default endpoint domain.
- origin_path -- A directory path on the origin that AFD can use to retrieve content from. E.g, "/img/*"
- patterns_to_match -- The route patterns of the rule.
- query_string_caching_behavior -- Defines how CDN caches requests that include query strings. You can ignore any query strings when caching, bypass caching to prevent requests that contain query strings from being cached, or cache every request with a unique URL.
- rule_sets -- Collection of ID or name of rule set referenced by the route.
'''
return _call_az("az afd route create", locals())
def update(endpoint_name, profile_name, resource_group, route_name, content_types_to_compress=None, custom_domains=None, enable_compression=None, forwarding_protocol=None, https_redirect=None, link_to_default_domain=None, origin_group=None, origin_path=None, patterns_to_match=None, query_string_caching_behavior=None, rule_sets=None, supported_protocols=None):
'''
Update an existing route within the specified endpoint.
Required Parameters:
- endpoint_name -- Name of the endpoint.
- profile_name -- Name of the CDN profile which is unique within the resource group.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- route_name -- Name of the route.
Optional Parameters:
- content_types_to_compress -- List of content types on which compression applies. The value should be a valid MIME type.
- custom_domains -- Custom domains referenced by this endpoint.
- enable_compression -- Indicates whether content compression is enabled on AzureFrontDoor. Default value is false. If compression is enabled, content will be served as compressed if user requests for a compressed version. Content won't be compressed on AzureFrontDoor when requested content is smaller than 1 byte or larger than 1 MB.
- forwarding_protocol -- Protocol this rule will use when forwarding traffic to backends.
- https_redirect -- Whether to automatically redirect HTTP traffic to HTTPS traffic.
- link_to_default_domain -- Whether this route will be linked to the default endpoint domain.
- origin_group -- Name or ID of the origin group to be associated with.
- origin_path -- A directory path on the origin that AFD can use to retrieve content from. E.g, "/img/*"
- patterns_to_match -- The route patterns of the rule.
- query_string_caching_behavior -- Defines how CDN caches requests that include query strings. You can ignore any query strings when caching, bypass caching to prevent requests that contain query strings from being cached, or cache every request with a unique URL.
- rule_sets -- Collection of ID or name of rule set referenced by the route.
- supported_protocols -- List of supported protocols for this route.
'''
return _call_az("az afd route update", locals())
def delete(endpoint_name, profile_name, resource_group, route_name, yes=None):
'''
Delete an existing route within the specified endpoint.
Required Parameters:
- endpoint_name -- Name of the endpoint.
- profile_name -- Name of the CDN profile which is unique within the resource group.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- route_name -- Name of the route.
Optional Parameters:
- yes -- Do not prompt for confirmation.
'''
return _call_az("az afd route delete", locals())
| 62.79
| 361
| 0.752986
| 899
| 6,279
| 5.117909
| 0.16574
| 0.027385
| 0.030428
| 0.039557
| 0.918061
| 0.918061
| 0.893501
| 0.877418
| 0.838513
| 0.838513
| 0
| 0.000779
| 0.182354
| 6,279
| 99
| 362
| 63.424242
| 0.895403
| 0.75649
| 0
| 0
| 0
| 0
| 0.073565
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0.090909
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
7d50f68006ec02c191e80e8c74981ff83674c568
| 120
|
py
|
Python
|
draugr/os_utilities/windows_utilities/task_scheduler/enums/__init__.py
|
pything/draugr
|
2fda662f2fa97236e4495a6af2b8237516fa428b
|
[
"Apache-2.0"
] | null | null | null |
draugr/os_utilities/windows_utilities/task_scheduler/enums/__init__.py
|
pything/draugr
|
2fda662f2fa97236e4495a6af2b8237516fa428b
|
[
"Apache-2.0"
] | 16
|
2021-02-15T07:58:01.000Z
|
2022-02-20T15:16:23.000Z
|
draugr/os_utilities/windows_utilities/task_scheduler/enums/__init__.py
|
pything/draugr
|
2fda662f2fa97236e4495a6af2b8237516fa428b
|
[
"Apache-2.0"
] | null | null | null |
from .task_action_type import *
from .task_creation import *
from .task_logon_type import *
from .task_trigger import *
| 24
| 31
| 0.8
| 18
| 120
| 5
| 0.444444
| 0.355556
| 0.466667
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 120
| 4
| 32
| 30
| 0.865385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
adbe77a6568042d5ad531931eaabc8ad2496f9b7
| 27,386
|
py
|
Python
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_ras_ext.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_ras_ext.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_ras_ext.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_ras_ext(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def show_raslog_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
input = ET.SubElement(show_raslog, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_input_number_of_latest_events(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
input = ET.SubElement(show_raslog, "input")
number_of_latest_events = ET.SubElement(input, "number-of-latest-events")
number_of_latest_events.text = kwargs.pop('number_of_latest_events')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
rbridge_id = ET.SubElement(show_all_raslog, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_number_of_entries(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
number_of_entries = ET.SubElement(show_all_raslog, "number-of-entries")
number_of_entries.text = kwargs.pop('number_of_entries')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_index(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
index = ET.SubElement(raslog_entries, "index")
index.text = kwargs.pop('index')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_message_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
message_id = ET.SubElement(raslog_entries, "message-id")
message_id.text = kwargs.pop('message_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_date_and_time_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
date_and_time_info = ET.SubElement(raslog_entries, "date-and-time-info")
date_and_time_info.text = kwargs.pop('date_and_time_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_severity(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
severity = ET.SubElement(raslog_entries, "severity")
severity.text = kwargs.pop('severity')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_repeat_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
repeat_count = ET.SubElement(raslog_entries, "repeat-count")
repeat_count.text = kwargs.pop('repeat_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_message(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
message = ET.SubElement(raslog_entries, "message")
message.text = kwargs.pop('message')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_message_flag(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
message_flag = ET.SubElement(raslog_entries, "message-flag")
message_flag.text = kwargs.pop('message_flag')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_log_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
log_type = ET.SubElement(raslog_entries, "log-type")
log_type.text = kwargs.pop('log_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_switch_or_chassis_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
switch_or_chassis_name = ET.SubElement(raslog_entries, "switch-or-chassis-name")
switch_or_chassis_name.text = kwargs.pop('switch_or_chassis_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_cmd_status_error_msg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
cmd_status_error_msg = ET.SubElement(output, "cmd-status-error-msg")
cmd_status_error_msg.text = kwargs.pop('cmd_status_error_msg')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_support_save_status_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_support_save_status = ET.Element("show_support_save_status")
config = show_support_save_status
input = ET.SubElement(show_support_save_status, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_support_save_status_output_show_support_save_status_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_support_save_status = ET.Element("show_support_save_status")
config = show_support_save_status
output = ET.SubElement(show_support_save_status, "output")
show_support_save_status = ET.SubElement(output, "show-support-save-status")
rbridge_id = ET.SubElement(show_support_save_status, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_support_save_status_output_show_support_save_status_status(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_support_save_status = ET.Element("show_support_save_status")
config = show_support_save_status
output = ET.SubElement(show_support_save_status, "output")
show_support_save_status = ET.SubElement(output, "show-support-save-status")
status = ET.SubElement(show_support_save_status, "status")
status.text = kwargs.pop('status')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_support_save_status_output_show_support_save_status_message(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_support_save_status = ET.Element("show_support_save_status")
config = show_support_save_status
output = ET.SubElement(show_support_save_status, "output")
show_support_save_status = ET.SubElement(output, "show-support-save-status")
message = ET.SubElement(show_support_save_status, "message")
message.text = kwargs.pop('message')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_support_save_status_output_show_support_save_status_percentage_of_completion(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_support_save_status = ET.Element("show_support_save_status")
config = show_support_save_status
output = ET.SubElement(show_support_save_status, "output")
show_support_save_status = ET.SubElement(output, "show-support-save-status")
percentage_of_completion = ET.SubElement(show_support_save_status, "percentage-of-completion")
percentage_of_completion.text = kwargs.pop('percentage_of_completion')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_system_info_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_system_info = ET.Element("show_system_info")
config = show_system_info
input = ET.SubElement(show_system_info, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_system_info_output_show_system_info_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_system_info = ET.Element("show_system_info")
config = show_system_info
output = ET.SubElement(show_system_info, "output")
show_system_info = ET.SubElement(output, "show-system-info")
rbridge_id = ET.SubElement(show_system_info, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_system_info_output_show_system_info_stack_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_system_info = ET.Element("show_system_info")
config = show_system_info
output = ET.SubElement(show_system_info, "output")
show_system_info = ET.SubElement(output, "show-system-info")
stack_mac = ET.SubElement(show_system_info, "stack-mac")
stack_mac.text = kwargs.pop('stack_mac')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
input = ET.SubElement(show_raslog, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_input_number_of_latest_events(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
input = ET.SubElement(show_raslog, "input")
number_of_latest_events = ET.SubElement(input, "number-of-latest-events")
number_of_latest_events.text = kwargs.pop('number_of_latest_events')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
rbridge_id = ET.SubElement(show_all_raslog, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_number_of_entries(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
number_of_entries = ET.SubElement(show_all_raslog, "number-of-entries")
number_of_entries.text = kwargs.pop('number_of_entries')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_index(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
index = ET.SubElement(raslog_entries, "index")
index.text = kwargs.pop('index')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_message_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
message_id = ET.SubElement(raslog_entries, "message-id")
message_id.text = kwargs.pop('message_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_date_and_time_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
date_and_time_info = ET.SubElement(raslog_entries, "date-and-time-info")
date_and_time_info.text = kwargs.pop('date_and_time_info')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_severity(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
severity = ET.SubElement(raslog_entries, "severity")
severity.text = kwargs.pop('severity')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_repeat_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
repeat_count = ET.SubElement(raslog_entries, "repeat-count")
repeat_count.text = kwargs.pop('repeat_count')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_message(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
message = ET.SubElement(raslog_entries, "message")
message.text = kwargs.pop('message')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_message_flag(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
message_flag = ET.SubElement(raslog_entries, "message-flag")
message_flag.text = kwargs.pop('message_flag')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_log_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
log_type = ET.SubElement(raslog_entries, "log-type")
log_type.text = kwargs.pop('log_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_show_all_raslog_raslog_entries_switch_or_chassis_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
show_all_raslog = ET.SubElement(output, "show-all-raslog")
raslog_entries = ET.SubElement(show_all_raslog, "raslog-entries")
switch_or_chassis_name = ET.SubElement(raslog_entries, "switch-or-chassis-name")
switch_or_chassis_name.text = kwargs.pop('switch_or_chassis_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_raslog_output_cmd_status_error_msg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_raslog = ET.Element("show_raslog")
config = show_raslog
output = ET.SubElement(show_raslog, "output")
cmd_status_error_msg = ET.SubElement(output, "cmd-status-error-msg")
cmd_status_error_msg.text = kwargs.pop('cmd_status_error_msg')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_support_save_status_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_support_save_status = ET.Element("show_support_save_status")
config = show_support_save_status
input = ET.SubElement(show_support_save_status, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_support_save_status_output_show_support_save_status_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_support_save_status = ET.Element("show_support_save_status")
config = show_support_save_status
output = ET.SubElement(show_support_save_status, "output")
show_support_save_status = ET.SubElement(output, "show-support-save-status")
rbridge_id = ET.SubElement(show_support_save_status, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_support_save_status_output_show_support_save_status_status(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_support_save_status = ET.Element("show_support_save_status")
config = show_support_save_status
output = ET.SubElement(show_support_save_status, "output")
show_support_save_status = ET.SubElement(output, "show-support-save-status")
status = ET.SubElement(show_support_save_status, "status")
status.text = kwargs.pop('status')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_support_save_status_output_show_support_save_status_message(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_support_save_status = ET.Element("show_support_save_status")
config = show_support_save_status
output = ET.SubElement(show_support_save_status, "output")
show_support_save_status = ET.SubElement(output, "show-support-save-status")
message = ET.SubElement(show_support_save_status, "message")
message.text = kwargs.pop('message')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_support_save_status_output_show_support_save_status_percentage_of_completion(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_support_save_status = ET.Element("show_support_save_status")
config = show_support_save_status
output = ET.SubElement(show_support_save_status, "output")
show_support_save_status = ET.SubElement(output, "show-support-save-status")
percentage_of_completion = ET.SubElement(show_support_save_status, "percentage-of-completion")
percentage_of_completion.text = kwargs.pop('percentage_of_completion')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_system_info_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_system_info = ET.Element("show_system_info")
config = show_system_info
input = ET.SubElement(show_system_info, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_system_info_output_show_system_info_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_system_info = ET.Element("show_system_info")
config = show_system_info
output = ET.SubElement(show_system_info, "output")
show_system_info = ET.SubElement(output, "show-system-info")
rbridge_id = ET.SubElement(show_system_info, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def show_system_info_output_show_system_info_stack_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_system_info = ET.Element("show_system_info")
config = show_system_info
output = ET.SubElement(show_system_info, "output")
show_system_info = ET.SubElement(output, "show-system-info")
stack_mac = ET.SubElement(show_system_info, "stack-mac")
stack_mac.text = kwargs.pop('stack_mac')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 43.127559
| 106
| 0.661469
| 3,227
| 27,386
| 5.291602
| 0.021072
| 0.081986
| 0.066995
| 0.100843
| 0.99227
| 0.99227
| 0.99227
| 0.99227
| 0.99227
| 0.99227
| 0
| 0
| 0.228767
| 27,386
| 635
| 107
| 43.127559
| 0.808484
| 0.048309
| 0
| 0.99115
| 1
| 0
| 0.132905
| 0.027402
| 0
| 0
| 0
| 0
| 0
| 1
| 0.099558
| false
| 0
| 0.002212
| 0
| 0.201327
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ade803b163caa3a9a47e903eb04ba5dfbadc159e
| 109
|
py
|
Python
|
python/testData/psi/PatternMatchingRecoveryAsPatternsWithIllegalTarget.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/testData/psi/PatternMatchingRecoveryAsPatternsWithIllegalTarget.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/testData/psi/PatternMatchingRecoveryAsPatternsWithIllegalTarget.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
match x:
case 1 as foo.bar:
pass
case 1 as foo[0]:
pass
case 1 as _:
pass
| 15.571429
| 22
| 0.458716
| 18
| 109
| 2.722222
| 0.5
| 0.306122
| 0.428571
| 0.408163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 0.477064
| 109
| 7
| 23
| 15.571429
| 0.789474
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.428571
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
bc3ae3f744ea2c23a629d23bebc471bf396f11f3
| 70
|
py
|
Python
|
eunice012716/Week1/ch2/2.6/exercise3.py
|
coookie89/Intern-Training
|
6e3b26edfee5bdcc98dd5ac05d35cef125778ad5
|
[
"MIT"
] | 1
|
2021-08-24T12:14:46.000Z
|
2021-08-24T12:14:46.000Z
|
eunice012716/Week1/ch2/2.6/exercise3.py
|
coookie89/Intern-Training
|
6e3b26edfee5bdcc98dd5ac05d35cef125778ad5
|
[
"MIT"
] | 14
|
2021-07-09T07:48:35.000Z
|
2021-08-19T03:06:31.000Z
|
eunice012716/Week1/ch2/2.6/exercise3.py
|
coookie89/Intern-Training
|
6e3b26edfee5bdcc98dd5ac05d35cef125778ad5
|
[
"MIT"
] | 11
|
2021-07-09T07:35:24.000Z
|
2021-08-15T07:19:43.000Z
|
if __name__ == "__main__":
print("P(A,B,C)=P(A)*P(B|A)*P(C|B)")
| 23.333333
| 41
| 0.5
| 16
| 70
| 1.6875
| 0.5
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157143
| 70
| 2
| 42
| 35
| 0.457627
| 0
| 0
| 0
| 0
| 0.5
| 0.514706
| 0.397059
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
70d92b584769b44a4d43922d45cb52a981d0afaa
| 71,895
|
py
|
Python
|
tests/unit_tests/iolibs/test_group_subprocs.py
|
jlrainbolt/MG5_v2_6_1
|
241f0c6f309342d6e8b813284467b2edd393c7d6
|
[
"NCSA"
] | 5
|
2018-10-23T14:37:18.000Z
|
2021-11-22T20:59:02.000Z
|
tests/unit_tests/iolibs/test_group_subprocs.py
|
jlrainbolt/MG5_v2_6_1
|
241f0c6f309342d6e8b813284467b2edd393c7d6
|
[
"NCSA"
] | 26
|
2018-10-08T15:49:32.000Z
|
2020-05-15T13:33:36.000Z
|
tests/unit_tests/iolibs/test_group_subprocs.py
|
jlrainbolt/MG5_v2_6_1
|
241f0c6f309342d6e8b813284467b2edd393c7d6
|
[
"NCSA"
] | 4
|
2019-02-18T11:42:18.000Z
|
2021-11-11T20:46:08.000Z
|
################################################################################
#
# Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors
#
# This file is a part of the MadGraph5_aMC@NLO project, an application which
# automatically generates Feynman diagrams and matrix elements for arbitrary
# high-energy processes in the Standard Model and beyond.
#
# It is subject to the MadGraph5_aMC@NLO license which should accompany this
# distribution.
#
# For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch
#
################################################################################
"""Unit test library for the export v4 format routines"""
import StringIO
import copy
import fractions
import os
import tests.unit_tests as unittest
import madgraph.various.misc as misc
import madgraph.iolibs.export_v4 as export_v4
import madgraph.iolibs.file_writers as writers
import madgraph.iolibs.group_subprocs as group_subprocs
import madgraph.iolibs.helas_call_writers as helas_call_writers
import madgraph.core.base_objects as base_objects
import madgraph.core.helas_objects as helas_objects
import madgraph.core.diagram_generation as diagram_generation
import madgraph.core.color_algebra as color
import tests.unit_tests.iolibs.test_file_writers as test_file_writers
import tests.unit_tests.iolibs.test_helas_call_writers as \
test_helas_call_writers
#===============================================================================
# SubProcessGroupTest
#===============================================================================
class SubProcessGroupTest(unittest.TestCase):
"""Test class for the SubProcessGroup class"""
def setUp(self):
mypartlist = base_objects.ParticleList()
myinterlist = base_objects.InteractionList()
# A gluon
mypartlist.append(base_objects.Particle({'name':'g',
'antiname':'g',
'spin':3,
'color':8,
'mass':'zero',
'width':'zero',
'texname':'g',
'antitexname':'g',
'line':'curly',
'charge':0.,
'pdg_code':21,
'propagating':True,
'is_part':True,
'self_antipart':True}))
g = mypartlist[-1]
# A quark U and its antiparticle
mypartlist.append(base_objects.Particle({'name':'u',
'antiname':'u~',
'spin':2,
'color':3,
'mass':'zero',
'width':'zero',
'texname':'u',
'antitexname':'\bar u',
'line':'straight',
'charge':2. / 3.,
'pdg_code':2,
'propagating':True,
'is_part':True,
'self_antipart':False}))
u = mypartlist[-1]
antiu = copy.copy(u)
antiu.set('is_part', False)
# A quark D and its antiparticle
mypartlist.append(base_objects.Particle({'name':'d',
'antiname':'d~',
'spin':2,
'color':3,
'mass':'zero',
'width':'zero',
'texname':'d',
'antitexname':'\bar d',
'line':'straight',
'charge':-1. / 3.,
'pdg_code':1,
'propagating':True,
'is_part':True,
'self_antipart':False}))
d = mypartlist[-1]
antid = copy.copy(d)
antid.set('is_part', False)
# A photon
mypartlist.append(base_objects.Particle({'name':'a',
'antiname':'a',
'spin':3,
'color':1,
'mass':'zero',
'width':'zero',
'texname':'\gamma',
'antitexname':'\gamma',
'line':'wavy',
'charge':0.,
'pdg_code':22,
'propagating':True,
'is_part':True,
'self_antipart':True}))
a = mypartlist[-1]
# A electron and positron
mypartlist.append(base_objects.Particle({'name':'e-',
'antiname':'e+',
'spin':2,
'color':1,
'mass':'zero',
'width':'zero',
'texname':'e^-',
'antitexname':'e^+',
'line':'straight',
'charge':-1.,
'pdg_code':11,
'propagating':True,
'is_part':True,
'self_antipart':False}))
eminus = mypartlist[-1]
eplus = copy.copy(eminus)
eplus.set('is_part', False)
# A Z
mypartlist.append(base_objects.Particle({'name':'z',
'antiname':'z',
'spin':3,
'color':1,
'mass':'MZ',
'width':'WZ',
'texname':'Z',
'antitexname':'Z',
'line':'wavy',
'charge':0.,
'pdg_code':23,
'propagating':True,
'is_part':True,
'self_antipart':True}))
z = mypartlist[-1]
# 3 gluon vertiex
myinterlist.append(base_objects.Interaction({
'id': 1,
'particles': base_objects.ParticleList(\
[g] * 3),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'G'},
'orders':{'QCD':1}}))
# 4 gluon vertex
myinterlist.append(base_objects.Interaction({
'id': 2,
'particles': base_objects.ParticleList(\
[g] * 4),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'G^2'},
'orders':{'QCD':2}}))
# Gluon and photon couplings to quarks
myinterlist.append(base_objects.Interaction({
'id': 3,
'particles': base_objects.ParticleList(\
[u, \
antiu, \
g]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQQ'},
'orders':{'QCD':1}}))
myinterlist.append(base_objects.Interaction({
'id': 4,
'particles': base_objects.ParticleList(\
[u, \
antiu, \
a]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
myinterlist.append(base_objects.Interaction({
'id': 5,
'particles': base_objects.ParticleList(\
[d, \
antid, \
g]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQQ'},
'orders':{'QCD':1}}))
myinterlist.append(base_objects.Interaction({
'id': 6,
'particles': base_objects.ParticleList(\
[d, \
antid, \
a]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
# Coupling of e to gamma
myinterlist.append(base_objects.Interaction({
'id': 7,
'particles': base_objects.ParticleList(\
[eminus, \
eplus, \
a]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
# Coupling of Z to quarks and electrons
myinterlist.append(base_objects.Interaction({
'id': 8,
'particles': base_objects.ParticleList(\
[u, \
antiu, \
z]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
myinterlist.append(base_objects.Interaction({
'id': 9,
'particles': base_objects.ParticleList(\
[d, \
antid, \
z]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
myinterlist.append(base_objects.Interaction({
'id': 10,
'particles': base_objects.ParticleList(\
[eplus, \
eminus, \
z]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
self.mymodel = base_objects.Model()
self.mymodel.set('particles', mypartlist)
self.mymodel.set('interactions', myinterlist)
self.mymodel.set('name', 'sm')
def test_group_subprocs_and_get_diagram_maps(self):
"""Test grouping subprocs and generating HelasMatrixElements"""
max_fs = 2
p = [21, 1, -1, 2, -2]
my_multi_leg = base_objects.MultiLeg({'ids': p, 'state': True});
diagram_maps = [[{0: [0, 1, 2, 3]},
{0: [1, 2, 3]},
{0: [1, 2, 3], 1: [1, 3, 2]},
{0: [1, 2, 3], 1: [1, 3, 2]},
{0: [1, 2, 3, 4, 5, 6], 1: [7, 8, 9, 1, 2, 3], 2: [7, 8, 9], 3: [1, 2, 3], 4: [1, 2, 3], 5: [7, 8, 9, 4, 5, 6], 6: [7, 8, 9], 7: [1, 2, 3, 4, 5, 6], 8: [1, 2, 3], 9: [1, 2, 3]},
{0: [1, 2, 3], 1: [1, 3, 2]}]]
diags_for_config = [[[[2], [3], [4]],
[[1], [2], [3]],
[[1, 1], [2, 3], [3, 2]],
[[1, 1], [2, 3], [3, 2]],
[[1, 4, 0, 1, 1, 0, 0, 1, 1, 1], [2, 5, 0, 2, 2, 0, 0, 2, 2, 2], [3, 6, 0, 3, 3, 0, 0, 3, 3, 3], [4, 0, 0, 0, 0, 4, 0, 4, 0, 0], [5, 0, 0, 0, 0, 5, 0, 5, 0, 0], [6, 0, 0, 0, 0, 6, 0, 6, 0, 0], [0, 1, 1, 0, 0, 1, 1, 0, 0, 0], [0, 2, 2, 0, 0, 2, 2, 0, 0, 0], [0, 3, 3, 0, 0, 3, 3, 0, 0, 0]],
[[1, 1], [2, 3], [3, 2]]]]
#new_diagram_maps = []
#new_diags_for_config = []
for nfs in range(2, max_fs + 1):
# Define the multiprocess
my_multi_leglist = base_objects.MultiLegList([copy.copy(leg) for leg in [my_multi_leg] * (2 + nfs)])
my_multi_leglist[0].set('state', False)
my_multi_leglist[1].set('state', False)
my_process_definition = base_objects.ProcessDefinition({\
'legs':my_multi_leglist,
'model':self.mymodel,
'orders': {'QED': nfs}})
my_multiprocess = diagram_generation.MultiProcess(\
{'process_definitions':\
base_objects.ProcessDefinitionList([my_process_definition])})
nproc = 0
# Calculate diagrams for all processes
amplitudes = my_multiprocess.get('amplitudes')
subprocess_groups = group_subprocs.SubProcessGroup.\
group_amplitudes(amplitudes, "madevent")
#dmaps = []
#diags = []
for igroup, group in enumerate(subprocess_groups):
group.get('matrix_elements')
#dmaps.append(group.get('diagram_maps'))
self.assertEqual(group.get('diagram_maps'),
diagram_maps[nfs-2][igroup])
#diags.append([group.get_subproc_diagrams_for_config(ic) for\
# ic in range(len(group.get('mapping_diagrams')))])
for iconfig, config in enumerate(group.get('mapping_diagrams')):
self.assertEqual(group.get_subproc_diagrams_for_config(\
iconfig),
diags_for_config[nfs-2][igroup][iconfig])
pass
#new_diagram_maps.append(dmaps)
#new_diags_for_config.append(diags)
#print 'diagram_maps = ',new_diagram_maps
#print 'diags_for_config = ',new_diags_for_config
def test_find_process_classes_and_mapping_diagrams(self):
"""Test the find_process_classes and find_mapping_diagrams function."""
max_fs = 3
p = [21, 1, -1, 2, -2]
my_multi_leg = base_objects.MultiLeg({'ids': p, 'state': True});
proc_classes = [{0: 0, 1: 1, 2: 1, 3: 2, 4: 2, 5: 2, 6: 2, 7: 3, 8: 4, 9: 5, 10: 4, 11: 4, 12: 4, 13: 4, 14: 3, 15: 5, 16: 4, 17: 4, 18: 4, 19: 4, 20: 4, 21: 3, 22: 4, 23: 4, 24: 4, 25: 5, 26: 4, 27: 4, 28: 3, 29: 4, 30: 4, 31: 5, 32: 4, 33: 4, 34: 4},
{0: 0, 1: 1, 2: 1, 3: 2, 4: 3, 5: 3, 6: 2, 7: 3, 8: 3, 9: 2, 10: 3, 11: 3, 12: 2, 13: 3, 14: 3, 15: 4, 16: 5, 17: 5, 18: 6, 19: 7, 20: 6, 21: 6, 22: 6, 23: 6, 24: 4, 25: 5, 26: 5, 27: 7, 28: 6, 29: 6, 30: 6, 31: 6, 32: 6, 33: 4, 34: 5, 35: 5, 36: 6, 37: 6, 38: 6, 39: 7, 40: 6, 41: 6, 42: 4, 43: 5, 44: 5, 45: 6, 46: 6, 47: 7, 48: 6, 49: 6, 50: 6}]
all_diagram_maps = [[{0: [0, 1, 2, 3]},
{0: [1, 2, 3], 1: [1, 2, 3]},
{0: [1, 2, 3], 1: [1, 3, 2], 2: [1, 2, 3], 3: [1, 3, 2]},
{0: [1, 2, 3], 1: [1, 3, 2], 2: [1, 2, 3], 3: [1, 3, 2]},
{0: [1, 2, 3, 4, 5, 6], 1: [7, 8, 9, 1, 2, 3], 2: [7, 8, 9], 3: [1, 2, 3], 4: [1, 2, 3], 5: [7, 8, 9, 4, 5, 6], 6: [7, 8, 9], 7: [1, 2, 3, 4, 5, 6], 8: [1, 2, 3], 9: [1, 2, 3], 10: [4, 5, 6], 11: [4, 5, 6], 12: [1, 2, 3, 4, 5, 6], 13: [7, 8, 9], 14: [7, 8, 9, 1, 2, 3], 15: [4, 5, 6], 16: [4, 5, 6], 17: [7, 8, 9], 18: [7, 8, 9, 4, 5, 6], 19: [1, 2, 3, 4, 5, 6]},
{0: [1, 2, 3], 1: [1, 3, 2], 2: [1, 2, 3], 3: [1, 3, 2]}],
[{0: [1, 2, 3, 0, 4, 5, 6, 0, 7, 8, 9, 0, 10, 11, 12, 0, 13, 14, 15, 0, 0, 0, 0, 0, 0]},
{0: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0], 1: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0]},
{0: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0], 1: [2, 1, 3, 5, 4, 6, 10, 11, 12, 7, 8, 9, 13, 15, 14, 0], 2: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0], 3: [2, 1, 3, 5, 4, 6, 10, 11, 12, 7, 8, 9, 13, 15, 14, 0]},
{0: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], 1: [4, 5, 6, 10, 11, 12, 13, 14, 15, 19, 20, 21, 25], 2: [27, 28, 29, 16, 17, 18, 30, 31, 32, 1, 2, 3, 7, 8, 9, 33, 34, 35, 22, 23, 24, 36, 37, 38, 26, 39], 3: [10, 11, 12, 4, 5, 6, 13, 14, 15, 19, 20, 21, 25], 4: [27, 28, 29, 33, 34, 35, 36, 37, 38, 30, 31, 32, 39], 5: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], 6: [27, 28, 29, 33, 34, 35, 30, 31, 32, 36, 37, 38, 39], 7: [27, 28, 29, 16, 17, 18, 30, 31, 32, 1, 2, 3, 7, 8, 9, 33, 34, 35, 22, 23, 24, 36, 37, 38, 26, 39]},
{0: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0], 1: [2, 1, 3, 5, 4, 6, 10, 11, 12, 7, 8, 9, 13, 15, 14, 0], 2: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0], 3: [2, 1, 3, 5, 4, 6, 10, 11, 12, 7, 8, 9, 13, 15, 14, 0]},
{0: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], 1: [4, 5, 6, 10, 11, 12, 13, 14, 15, 19, 20, 21, 25], 2: [27, 28, 29, 16, 17, 18, 30, 31, 32, 1, 2, 3, 7, 8, 9, 33, 34, 35, 22, 23, 24, 36, 37, 38, 26, 39], 3: [10, 11, 12, 4, 5, 6, 13, 14, 15, 19, 20, 21, 25], 4: [27, 28, 29, 33, 34, 35, 36, 37, 38, 30, 31, 32, 39], 5: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], 6: [27, 28, 29, 33, 34, 35, 30, 31, 32, 36, 37, 38, 39], 7: [27, 28, 29, 16, 17, 18, 30, 31, 32, 1, 2, 3, 7, 8, 9, 33, 34, 35, 22, 23, 24, 36, 37, 38, 26, 39]},
{0: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], 1: [4, 5, 6, 27, 28, 29, 19, 20, 21, 30, 31, 32, 33, 34, 35, 10, 11, 12, 36, 37, 38, 13, 14, 15, 39, 25], 2: [27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39], 3: [4, 5, 6, 10, 11, 12, 13, 14, 15, 19, 20, 21, 25], 4: [4, 5, 6, 10, 11, 12, 19, 20, 21, 13, 14, 15, 25], 5: [22, 23, 24, 30, 31, 32, 1, 2, 3, 27, 28, 29, 33, 34, 35, 7, 8, 9, 36, 37, 38, 16, 17, 18, 39, 26], 6: [30, 31, 32, 27, 28, 29, 33, 34, 35, 36, 37, 38, 39], 7: [7, 8, 9, 10, 11, 12, 1, 2, 3, 4, 5, 6, 19, 20, 21, 22, 23, 24, 13, 14, 15, 16, 17, 18, 25, 26], 8: [10, 11, 12, 4, 5, 6, 13, 14, 15, 19, 20, 21, 25], 9: [10, 11, 12, 4, 5, 6, 19, 20, 21, 13, 14, 15, 25], 10: [22, 23, 24, 7, 8, 9, 16, 17, 18, 1, 2, 3, 26], 11: [7, 8, 9, 22, 23, 24, 16, 17, 18, 1, 2, 3, 26], 12: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], 13: [36, 37, 38, 33, 34, 35, 30, 31, 32, 27, 28, 29, 39], 14: [4, 5, 6, 27, 28, 29, 19, 20, 21, 30, 31, 32, 33, 34, 35, 10, 11, 12, 36, 37, 38, 13, 14, 15, 39, 25], 15: [22, 23, 24, 7, 8, 9, 1, 2, 3, 16, 17, 18, 26], 16: [7, 8, 9, 22, 23, 24, 1, 2, 3, 16, 17, 18, 26], 17: [36, 37, 38, 33, 34, 35, 27, 28, 29, 30, 31, 32, 39], 18: [22, 23, 24, 30, 31, 32, 1, 2, 3, 27, 28, 29, 33, 34, 35, 7, 8, 9, 36, 37, 38, 16, 17, 18, 39, 26], 19: [7, 8, 9, 10, 11, 12, 1, 2, 3, 4, 5, 6, 19, 20, 21, 22, 23, 24, 13, 14, 15, 16, 17, 18, 25, 26]},
{0: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0], 1: [2, 1, 3, 5, 4, 6, 10, 11, 12, 7, 8, 9, 13, 15, 14, 0], 2: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0], 3: [2, 1, 3, 5, 4, 6, 10, 11, 12, 7, 8, 9, 13, 15, 14, 0]}]]
#new_diagram_maps = []
for nfs in range(2, max_fs + 1):
# Define the multiprocess
my_multi_leglist = base_objects.MultiLegList([copy.copy(leg) for leg in [my_multi_leg] * (2 + nfs)])
my_multi_leglist[0].set('state', False)
my_multi_leglist[1].set('state', False)
my_process_definition = base_objects.ProcessDefinition({\
'legs':my_multi_leglist,
'model':self.mymodel,
'orders': {'QED': nfs}})
my_multiprocess = diagram_generation.MultiProcess(\
{'process_definitions':\
base_objects.ProcessDefinitionList([my_process_definition])})
nproc = 0
# Calculate diagrams for all processes
amplitudes = my_multiprocess.get('amplitudes')
process_classes = group_subprocs.SubProcessGroup.\
find_process_classes(amplitudes, "madevent")
#print process_classes
self.assertEqual(process_classes,
proc_classes[nfs-2])
subproc_groups = group_subprocs.SubProcessGroup.\
group_amplitudes(amplitudes, "madevent")
#dmaps = []
for inum, group in enumerate(subproc_groups):
mapping_diagrams, diagram_maps = group.find_mapping_diagrams()
#print "mapping_diagrams: "
#print "\n".join(["%d: %s" % (i+1, str(a)) for i,a in \
# enumerate(mapping_diagrams)])
#dmaps.append(diagram_maps)
for iamp, amplitude in enumerate(group.get('amplitudes')):
#print amplitude.nice_string()
self.assertEqual(diagram_maps[iamp],
all_diagram_maps[nfs-2][inum][iamp])
pass
#new_diagram_maps.append(dmaps)
#print "all_diagram_maps = ",new_diagram_maps
def test_group_decay_chains(self):
"""Test group_amplitudes for decay chains."""
max_fs = 2 # 3
procs = [[1,-1,2,-2,23], [2,2,2,2,23], [2,-2,21,21,23], [1,-1,21,21,23]]
decays = [[23,1,-1,21], [23,11,-11]]
coreamplitudes = diagram_generation.AmplitudeList()
decayamplitudes = diagram_generation.AmplitudeList()
decayprocs = base_objects.ProcessList()
for proc in procs:
# Define the multiprocess
my_leglist = base_objects.LegList([\
base_objects.Leg({'id': id, 'state': True}) for id in proc])
my_leglist[0].set('state', False)
my_leglist[1].set('state', False)
my_process = base_objects.Process({'legs':my_leglist,
'model':self.mymodel,
'orders':{'QED':1}})
my_amplitude = diagram_generation.Amplitude(my_process)
coreamplitudes.append(my_amplitude)
for proc in decays:
# Define the multiprocess
my_leglist = base_objects.LegList([\
base_objects.Leg({'id': id, 'state': True}) for id in proc])
my_leglist[0].set('state', False)
my_process = base_objects.Process({'legs':my_leglist,
'model':self.mymodel,
'is_decay_chain': True})
my_amplitude = diagram_generation.Amplitude(my_process)
decayamplitudes.append(my_amplitude)
decayprocs.append(my_process)
decays = diagram_generation.DecayChainAmplitudeList([\
diagram_generation.DecayChainAmplitude({\
'amplitudes': decayamplitudes})])
decay_chains = diagram_generation.DecayChainAmplitude({\
'amplitudes': coreamplitudes,
'decay_chains': decays})
dc_subproc_group = group_subprocs.DecayChainSubProcessGroup.\
group_amplitudes(\
diagram_generation.DecayChainAmplitudeList([decay_chains]))
#print dc_subproc_group.nice_string()
self.assertEqual(dc_subproc_group.nice_string(),
"""Group 1:
Process: d d~ > u u~ z QED<=1
4 diagrams:
1 ((1(-1),2(1)>1(21),id:5),(3(2),5(23)>3(2),id:8),(1(21),3(2),4(-2),id:3)) (QCD=2,QED=1,WEIGHTED=4)
2 ((1(-1),2(1)>1(21),id:5),(4(-2),5(23)>4(-2),id:8),(1(21),3(2),4(-2),id:3)) (QCD=2,QED=1,WEIGHTED=4)
3 ((1(-1),5(23)>1(-1),id:9),(3(2),4(-2)>3(21),id:3),(1(-1),2(1),3(21),id:5)) (QCD=2,QED=1,WEIGHTED=4)
4 ((2(1),5(23)>2(1),id:9),(3(2),4(-2)>3(21),id:3),(1(-1),2(1),3(21),id:5)) (QCD=2,QED=1,WEIGHTED=4)
Process: u u > u u z QED<=1
8 diagrams:
1 ((1(-2),3(2)>1(21),id:3),(2(-2),5(23)>2(-2),id:8),(1(21),2(-2),4(2),id:3)) (QCD=2,QED=1,WEIGHTED=4)
2 ((1(-2),3(2)>1(21),id:3),(4(2),5(23)>4(2),id:8),(1(21),2(-2),4(2),id:3)) (QCD=2,QED=1,WEIGHTED=4)
3 ((1(-2),4(2)>1(21),id:3),(2(-2),5(23)>2(-2),id:8),(1(21),2(-2),3(2),id:3)) (QCD=2,QED=1,WEIGHTED=4)
4 ((1(-2),4(2)>1(21),id:3),(3(2),5(23)>3(2),id:8),(1(21),2(-2),3(2),id:3)) (QCD=2,QED=1,WEIGHTED=4)
5 ((1(-2),5(23)>1(-2),id:8),(2(-2),3(2)>2(21),id:3),(1(-2),2(21),4(2),id:3)) (QCD=2,QED=1,WEIGHTED=4)
6 ((1(-2),5(23)>1(-2),id:8),(2(-2),4(2)>2(21),id:3),(1(-2),2(21),3(2),id:3)) (QCD=2,QED=1,WEIGHTED=4)
7 ((2(-2),3(2)>2(21),id:3),(4(2),5(23)>4(2),id:8),(1(-2),2(21),4(2),id:3)) (QCD=2,QED=1,WEIGHTED=4)
8 ((2(-2),4(2)>2(21),id:3),(3(2),5(23)>3(2),id:8),(1(-2),2(21),3(2),id:3)) (QCD=2,QED=1,WEIGHTED=4)
Group 2:
Process: u u~ > g g z QED<=1
8 diagrams:
1 ((1(-2),3(21)>1(-2),id:3),(2(2),4(21)>2(2),id:3),(1(-2),2(2),5(23),id:8)) (QCD=2,QED=1,WEIGHTED=4)
2 ((1(-2),3(21)>1(-2),id:3),(2(2),5(23)>2(2),id:8),(1(-2),2(2),4(21),id:3)) (QCD=2,QED=1,WEIGHTED=4)
3 ((1(-2),4(21)>1(-2),id:3),(2(2),3(21)>2(2),id:3),(1(-2),2(2),5(23),id:8)) (QCD=2,QED=1,WEIGHTED=4)
4 ((1(-2),4(21)>1(-2),id:3),(2(2),5(23)>2(2),id:8),(1(-2),2(2),3(21),id:3)) (QCD=2,QED=1,WEIGHTED=4)
5 ((1(-2),5(23)>1(-2),id:8),(2(2),3(21)>2(2),id:3),(1(-2),2(2),4(21),id:3)) (QCD=2,QED=1,WEIGHTED=4)
6 ((1(-2),5(23)>1(-2),id:8),(2(2),4(21)>2(2),id:3),(1(-2),2(2),3(21),id:3)) (QCD=2,QED=1,WEIGHTED=4)
7 ((1(-2),5(23)>1(-2),id:8),(3(21),4(21)>3(21),id:1),(1(-2),2(2),3(21),id:3)) (QCD=2,QED=1,WEIGHTED=4)
8 ((2(2),5(23)>2(2),id:8),(3(21),4(21)>3(21),id:1),(1(-2),2(2),3(21),id:3)) (QCD=2,QED=1,WEIGHTED=4)
Process: d d~ > g g z QED<=1
8 diagrams:
1 ((1(-1),3(21)>1(-1),id:5),(2(1),4(21)>2(1),id:5),(1(-1),2(1),5(23),id:9)) (QCD=2,QED=1,WEIGHTED=4)
2 ((1(-1),3(21)>1(-1),id:5),(2(1),5(23)>2(1),id:9),(1(-1),2(1),4(21),id:5)) (QCD=2,QED=1,WEIGHTED=4)
3 ((1(-1),4(21)>1(-1),id:5),(2(1),3(21)>2(1),id:5),(1(-1),2(1),5(23),id:9)) (QCD=2,QED=1,WEIGHTED=4)
4 ((1(-1),4(21)>1(-1),id:5),(2(1),5(23)>2(1),id:9),(1(-1),2(1),3(21),id:5)) (QCD=2,QED=1,WEIGHTED=4)
5 ((1(-1),5(23)>1(-1),id:9),(2(1),3(21)>2(1),id:5),(1(-1),2(1),4(21),id:5)) (QCD=2,QED=1,WEIGHTED=4)
6 ((1(-1),5(23)>1(-1),id:9),(2(1),4(21)>2(1),id:5),(1(-1),2(1),3(21),id:5)) (QCD=2,QED=1,WEIGHTED=4)
7 ((1(-1),5(23)>1(-1),id:9),(3(21),4(21)>3(21),id:1),(1(-1),2(1),3(21),id:5)) (QCD=2,QED=1,WEIGHTED=4)
8 ((2(1),5(23)>2(1),id:9),(3(21),4(21)>3(21),id:1),(1(-1),2(1),3(21),id:5)) (QCD=2,QED=1,WEIGHTED=4)
Decay groups:
Group 1:
Process: z > d d~ g
2 diagrams:
1 ((2(1),4(21)>2(1),id:5),(2(1),3(-1)>2(23),id:9),(1(23),2(23),id:0)) (QCD=1,QED=1,WEIGHTED=3)
2 ((3(-1),4(21)>3(-1),id:5),(2(1),3(-1)>2(23),id:9),(1(23),2(23),id:0)) (QCD=1,QED=1,WEIGHTED=3)
Group 2:
Process: z > e- e+
1 diagrams:
1 ((2(11),3(-11)>2(23),id:10),(1(23),2(23),id:0)) (QCD=0,QED=1,WEIGHTED=2)""")
subproc_groups = \
dc_subproc_group.generate_helas_decay_chain_subproc_groups()
self.assertEqual(len(subproc_groups), 4)
group_names = ['qq_qqz_z_qqg',
'qq_qqz_z_ll',
'qq_ggz_z_qqg',
'qq_ggz_z_ll']
for igroup, group in enumerate(subproc_groups):
self.assertEqual(group.get('name'),
group_names[igroup])
def test_special_group_decay_chain(self):
"""Test group_amplitudes for special decay chains."""
mypartlist = base_objects.ParticleList()
myinterlist = base_objects.InteractionList()
# A gluon
mypartlist.append(base_objects.Particle({'name':'g',
'antiname':'g',
'spin':3,
'color':1,
'mass':'zero',
'width':'zero',
'texname':'g',
'antitexname':'g',
'line':'curly',
'charge':0.,
'pdg_code':21,
'propagating':True,
'is_part':True,
'self_antipart':True}))
g = mypartlist[-1]
# A gluino
mypartlist.append(base_objects.Particle({'name':'go',
'antiname':'go',
'spin':2,
'color':1,
'mass':'MGO',
'width':'WGO',
'texname':'go',
'antitexname':'go',
'line':'curly',
'charge':0.,
'pdg_code':1000021,
'propagating':True,
'is_part':True,
'self_antipart':True}))
go = mypartlist[-1]
# A quark D and its antiparticle
mypartlist.append(base_objects.Particle({'name':'d',
'antiname':'d~',
'spin':2,
'color':1,
'mass':'zero',
'width':'zero',
'texname':'d',
'antitexname':'\bar d',
'line':'straight',
'charge':-1. / 3.,
'pdg_code':1,
'propagating':True,
'is_part':True,
'self_antipart':False}))
d = mypartlist[-1]
antid = copy.copy(d)
antid.set('is_part', False)
# A d squark and its antiparticle
mypartlist.append(base_objects.Particle({'name':'dl',
'antiname':'dl~',
'spin':0,
'color':1,
'mass':'MDL',
'width':'WDL',
'texname':'dl',
'antitexname':'\bar dl',
'line':'straight',
'charge':-1. / 3.,
'pdg_code':1000001,
'propagating':True,
'is_part':True,
'self_antipart':False}))
dl = mypartlist[-1]
antidl = copy.copy(dl)
antidl.set('is_part', False)
# A neutralino
mypartlist.append(base_objects.Particle({'name':'n1',
'antiname':'n1',
'spin':2,
'color':1,
'mass':'MN1',
'width':'WN1',
'texname':'n1',
'antitexname':'n1',
'line':'wavy',
'charge':0.,
'pdg_code':1000022,
'propagating':True,
'is_part':True,
'self_antipart':True}))
n1 = mypartlist[-1]
# 3 gluon vertiex
myinterlist.append(base_objects.Interaction({
'id': 1,
'particles': base_objects.ParticleList(\
[g] * 3),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'G'},
'orders':{'QCD':1}}))
# Gluon couplings to gluinos
myinterlist.append(base_objects.Interaction({
'id': 3,
'particles': base_objects.ParticleList(\
[go, \
go, \
g]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQQ'},
'orders':{'QCD':1}}))
# Gluino and neutralino couplings to quarks and squarks
myinterlist.append(base_objects.Interaction({
'id': 4,
'particles': base_objects.ParticleList(\
[go,
d,
antidl]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QCD':1}}))
myinterlist.append(base_objects.Interaction({
'id': 5,
'particles': base_objects.ParticleList(\
[antid,
go,
dl]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QCD':1}}))
myinterlist.append(base_objects.Interaction({
'id': 6,
'particles': base_objects.ParticleList(\
[n1, \
d, \
antidl]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQQ'},
'orders':{'QED':1}}))
myinterlist.append(base_objects.Interaction({
'id': 7,
'particles': base_objects.ParticleList(\
[antid, \
n1, \
dl]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQQ'},
'orders':{'QED':1}}))
mymodel = base_objects.Model()
mymodel.set('particles', mypartlist)
mymodel.set('interactions', myinterlist)
mymodel.set('name', 'sm')
# Multiparticle labels
ds = [1,-1]
dls = [1000001,-1000001]
proc = [21,21,1000021,1000021]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': [id]}) for id in proc])
my_leglist[0].set('state', False)
my_leglist[1].set('state', False)
process = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decayproc1 = [[1000021],ds,dls]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decayproc1])
my_leglist[0].set('state', False)
decayprocess1 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decayproc2 = [dls,ds,[1000022]]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decayproc2])
my_leglist[0].set('state', False)
decayprocess2 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decayprocess1.get('decay_chains').append(decayprocess2)
process.get('decay_chains').append(decayprocess1)
process.get('decay_chains').append(decayprocess1)
my_amplitude = diagram_generation.DecayChainAmplitude(process)
dc_subproc_group = group_subprocs.DecayChainSubProcessGroup.\
group_amplitudes(\
diagram_generation.DecayChainAmplitudeList([my_amplitude]))
subproc_groups = \
dc_subproc_group.generate_helas_decay_chain_subproc_groups()
self.assertEqual(len(subproc_groups), 1)
self.assertEqual(len(subproc_groups[0].get('matrix_elements')),3)
me_strings = ["""Process: g g > go go WEIGHTED<=2
Decay: go > d dl~ WEIGHTED<=1
Decay: dl~ > d~ n1 WEIGHTED<=2
Decay: go > d dl~ WEIGHTED<=1
Decay: dl~ > d~ n1 WEIGHTED<=2""",
"""Process: g g > go go WEIGHTED<=2
Decay: go > d dl~ WEIGHTED<=1
Decay: dl~ > d~ n1 WEIGHTED<=2
Decay: go > d~ dl WEIGHTED<=1
Decay: dl > d n1 WEIGHTED<=2""",
"""Process: g g > go go WEIGHTED<=2
Decay: go > d~ dl WEIGHTED<=1
Decay: dl > d n1 WEIGHTED<=2
Decay: go > d~ dl WEIGHTED<=1
Decay: dl > d n1 WEIGHTED<=2"""]
for i,me in enumerate(subproc_groups[0].get('matrix_elements')):
self.assertEqual(me.get('processes')[0].nice_string(),
me_strings[i])
def test_even_more_special_group_decay_chain(self):
"""Test group_amplitudes for even more special decay chain"""
mypartlist = base_objects.ParticleList()
myinterlist = base_objects.InteractionList()
# A d quark and its antiparticle
mypartlist.append(base_objects.Particle({'name':'d',
'antiname':'d~',
'spin':2,
'color':1,
'mass':'zero',
'width':'zero',
'texname':'d',
'antitexname':'\bar d',
'line':'straight',
'charge':-1. / 3.,
'pdg_code':1,
'propagating':True,
'is_part':True,
'self_antipart':False}))
d = mypartlist[-1]
antid = copy.copy(d)
antid.set('is_part', False)
# A u quark and its antiparticle
mypartlist.append(base_objects.Particle({'name':'u',
'antiname':'u~',
'spin':2,
'color':3,
'mass':'zero',
'width':'zero',
'texname':'u',
'antitexname':'\bar u',
'line':'straight',
'charge':2. / 3.,
'pdg_code':2,
'propagating':True,
'is_part':True,
'self_antipart':False}))
u = mypartlist[len(mypartlist) - 1]
antiu = copy.copy(u)
antiu.set('is_part', False)
# An s and its antiparticle
mypartlist.append(base_objects.Particle({'name':'s',
'antiname':'s~',
'spin':2,
'color':1,
'mass':'zero',
'width':'zero',
'texname':'d',
'antitexname':'\bar d',
'line':'straight',
'charge':-1. / 3.,
'pdg_code':3,
'propagating':True,
'is_part':True,
'self_antipart':False}))
s = mypartlist[-1]
antis = copy.copy(s)
antis.set('is_part', False)
# A c quark and its antiparticle
mypartlist.append(base_objects.Particle({'name':'c',
'antiname':'c~',
'spin':2,
'color':3,
'mass':'zero',
'width':'zero',
'texname':'c',
'antitexname':'\bar c',
'line':'straight',
'charge':2. / 3.,
'pdg_code':4,
'propagating':True,
'is_part':True,
'self_antipart':False}))
c = mypartlist[len(mypartlist) - 1]
antic = copy.copy(c)
antic.set('is_part', False)
# A b quark and its antiparticle
mypartlist.append(base_objects.Particle({'name':'b',
'antiname':'b~',
'spin':2,
'color':1,
'mass':'MB',
'width':'zero',
'texname':'b',
'antitexname':'\bar b',
'line':'straight',
'charge':-1. / 3.,
'pdg_code':5,
'propagating':True,
'is_part':True,
'self_antipart':False}))
b = mypartlist[-1]
antib = copy.copy(b)
antib.set('is_part', False)
# A t quark and its antiparticle
mypartlist.append(base_objects.Particle({'name':'t',
'antiname':'t~',
'spin':2,
'color':3,
'mass':'MT',
'width':'zero',
'texname':'t',
'antitexname':'\bar t',
'line':'straight',
'charge':2. / 3.,
'pdg_code':6,
'propagating':True,
'is_part':True,
'self_antipart':False}))
t = mypartlist[len(mypartlist) - 1]
antit = copy.copy(t)
antit.set('is_part', False)
# A funny Zprime
mypartlist.append(base_objects.Particle({
'name': 'Zp',
'antiname': 'Zp',
'spin': 3,
'color': 1,
'charge': 0.00,
'mass': 'MZp',
'width': 'WZp',
'pdg_code': 9900032,
'texname': 'Zp',
'antitexname': 'Zp',
'line': 'wavy',
'propagating': True,
'is_part': True,
'self_antipart': True}))
Zp = mypartlist[-1]
# A funny neutralino
mypartlist.append(base_objects.Particle({
'name': '~n1',
'antiname': '~n1',
'spin': 2,
'color': 1,
'charge': 0.00,
'mass': 'MnH1',
'width': 'WnH1',
'pdg_code': 9910012,
'texname': '~n1',
'antitexname': '~n1',
'line': 'swavy',
'propagating': True,
'is_part': True,
'self_antipart': True}))
n1 = mypartlist[-1]
# A W
mypartlist.append(base_objects.Particle({'name':'W+',
'antiname':'W-',
'spin':3,
'color':1,
'mass':'MW',
'width':'WW',
'texname':'W^+',
'antitexname':'W^-',
'line':'wavy',
'charge':1.,
'pdg_code':24,
'propagating':True,
'is_part':True,
'self_antipart':False}))
Wplus = mypartlist[len(mypartlist) - 1]
Wminus = copy.copy(Wplus)
Wminus.set('is_part', False)
# A electron and positron
mypartlist.append(base_objects.Particle({'name':'e-',
'antiname':'e+',
'spin':2,
'color':1,
'mass':'me',
'width':'zero',
'texname':'e^-',
'antitexname':'e^+',
'line':'straight',
'charge':-1.,
'pdg_code':11,
'propagating':True,
'is_part':True,
'self_antipart':False}))
eminus = mypartlist[len(mypartlist) - 1]
eplus = copy.copy(eminus)
eplus.set('is_part', False)
# Interactions
myinterlist.append(base_objects.Interaction({
'id': 1,
'particles': base_objects.ParticleList(\
[antid,u,Wminus]),
'color': [],
'lorentz': ['FFV3'],
'couplings': {(0, 0): 'GC_35'},
'orders': {'QED': 1}}))
myinterlist.append(base_objects.Interaction({
'id': 2,
'particles': base_objects.ParticleList(\
[antiu,d,Wplus]),
'color': [],
'lorentz': ['FFV3'],
'couplings': {(0, 0): 'GC_112'},
'orders': {'QED': 1}}))
myinterlist.append(base_objects.Interaction({
'id': 3,
'particles': base_objects.ParticleList(\
[antis,c,Wminus]),
'color': [],
'lorentz': ['FFV3'],
'couplings': {(0, 0): 'GC_35'},
'orders': {'QED': 1}}))
myinterlist.append(base_objects.Interaction({
'id': 4,
'particles': base_objects.ParticleList(\
[antic,s,Wplus]),
'color': [],
'lorentz': ['FFV3'],
'couplings': {(0, 0): 'GC_112'},
'orders': {'QED': 1}}))
myinterlist.append(base_objects.Interaction({
'id': 5,
'particles': base_objects.ParticleList(\
[antib,t,Wminus]),
'color': [],
'lorentz': ['FFV3'],
'couplings': {(0, 0): 'GC_35'},
'orders': {'QED': 1}}))
myinterlist.append(base_objects.Interaction({
'id': 6,
'particles': base_objects.ParticleList(\
[antit,b,Wplus]),
'color': [],
'lorentz': ['FFV3'],
'couplings': {(0, 0): 'GC_112'},
'orders': {'QED': 1}}))
myinterlist.append(base_objects.Interaction({
'id': 7,
'particles': base_objects.ParticleList(\
[antid,d,Zp]),
'color': [],
'lorentz': ['FFV1'],
'couplings': {(0, 0): 'GC_7'},
'orders': {'QED': 1}
}))
myinterlist.append(base_objects.Interaction({
'id': 8,
'particles': base_objects.ParticleList(\
[n1,n1,Zp]),
'color': [],
'lorentz': ['FFV2'],
'couplings': {(0, 0): 'GC_22'},
'orders': {'QED': 1}
}))
myinterlist.append(base_objects.Interaction({
'id': 9,
'particles': base_objects.ParticleList(\
[eplus,n1,Wminus]),
'color': [],
'lorentz': ['FFV3'],
'couplings': {(0, 0): 'GC_46'},
'orders': {'QED': 1}
}))
myinterlist.append(base_objects.Interaction({
'id': 10,
'particles': base_objects.ParticleList(\
[n1,eminus,Wplus]),
'color': [],
'lorentz': ['FFV3'],
'couplings': {(0, 0): 'GC_46'},
'orders': {'QED': 1}
}))
myinterlist.append(base_objects.Interaction({
'id': 11,
'particles': base_objects.ParticleList(\
[antit,c,Zp]),
'color': [],
'lorentz': ['FFV4'],
'couplings': {(0, 0): 'GC_50'},
'orders': {'QED': 1}
}))
myinterlist.append(base_objects.Interaction({
'id': 12,
'particles': base_objects.ParticleList(\
[antic,t,Zp]),
'color': [],
'lorentz': ['FFV4'],
'couplings': {(0, 0): 'GC_50'},
'orders': {'QED': 1}
}))
mymodel = base_objects.Model()
mymodel.set('particles', mypartlist)
mymodel.set('interactions', myinterlist)
mymodel.set('name', 'sm')
# Multiparticle labels
ds = [1,-1]
qs = [1,3,2,4,-1,-3,-2,-4]
my_leglist = base_objects.MultiLegList()
my_leglist.append(base_objects.MultiLeg({'ids': ds,
'state': False}))
my_leglist.append(base_objects.MultiLeg({'ids': ds,
'state': False}))
my_leglist.append(base_objects.MultiLeg({'ids': [n1.get('pdg_code')]}))
my_leglist.append(base_objects.MultiLeg({'ids': [n1.get('pdg_code')]}))
core_process = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay1proc1 = [[n1.get('pdg_code')],
[11], [24]]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decay1proc1])
my_leglist[0].set('state', False)
decay1process1 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay1process2 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay2proc1 = [[24], qs, qs]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decay2proc1])
my_leglist[0].set('state', False)
decay2process1 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay2proc2 = [[24], [6], [-5]]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decay2proc2])
my_leglist[0].set('state', False)
decay2process2 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay1process1.get('decay_chains').append(decay2process1)
decay1process2.get('decay_chains').append(decay2process2)
core_process.get('decay_chains').append(decay1process1)
core_process.get('decay_chains').append(decay1process2)
my_amplitude = diagram_generation.DecayChainAmplitude(core_process)
dc_subproc_group = group_subprocs.DecayChainSubProcessGroup.\
group_amplitudes(\
diagram_generation.DecayChainAmplitudeList([my_amplitude]))
subproc_groups = \
dc_subproc_group.generate_helas_decay_chain_subproc_groups()
self.assertEqual(len(subproc_groups), 1)
self.assertEqual(len(subproc_groups[0].get('matrix_elements')),2)
me_strings = ["""Process: d d~ > ~n1 ~n1 WEIGHTED<=2
Decay: ~n1 > e- W+ WEIGHTED<=1
Decay: W+ > u d~ WEIGHTED<=1
Decay: ~n1 > e- W+ WEIGHTED<=1
Decay: W+ > t b~ WEIGHTED<=1""",
"""Process: d~ d > ~n1 ~n1 WEIGHTED<=2
Decay: ~n1 > e- W+ WEIGHTED<=1
Decay: W+ > u d~ WEIGHTED<=1
Decay: ~n1 > e- W+ WEIGHTED<=1
Decay: W+ > t b~ WEIGHTED<=1"""]
for i,me in enumerate(subproc_groups[0].get('matrix_elements')):
self.assertEqual(me.get('processes')[0].nice_string(),
me_strings[i])
# Now test also for different process ids
ds = [1,-1]
qs = [1,3,2,4,-1,-3,-2,-4]
# First process
my_leglist = base_objects.MultiLegList()
my_leglist.append(base_objects.MultiLeg({'ids': ds,
'state': False}))
my_leglist.append(base_objects.MultiLeg({'ids': ds,
'state': False}))
my_leglist.append(base_objects.MultiLeg({'ids': [n1.get('pdg_code')]}))
my_leglist.append(base_objects.MultiLeg({'ids': [n1.get('pdg_code')]}))
core_process1 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel,
'id': 1})
decay1proc1 = [[n1.get('pdg_code')],
[11], [24]]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decay1proc1])
my_leglist[0].set('state', False)
decay1process1 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay1process2 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay2proc1 = [[24], qs, qs]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decay2proc1])
my_leglist[0].set('state', False)
decay2process1 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay2proc2 = [[24], [6], [-5]]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decay2proc2])
my_leglist[0].set('state', False)
decay2process2 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay1process1.get('decay_chains').append(decay2process1)
decay1process2.get('decay_chains').append(decay2process2)
core_process1.get('decay_chains').append(decay1process1)
core_process1.get('decay_chains').append(decay1process2)
my_amplitude1 = diagram_generation.DecayChainAmplitude(core_process1)
# Second process
my_leglist = base_objects.MultiLegList()
my_leglist.append(base_objects.MultiLeg({'ids': ds,
'state': False}))
my_leglist.append(base_objects.MultiLeg({'ids': ds,
'state': False}))
my_leglist.append(base_objects.MultiLeg({'ids': [n1.get('pdg_code')]}))
my_leglist.append(base_objects.MultiLeg({'ids': [n1.get('pdg_code')]}))
core_process2 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel,
'id': 2})
decay1proc1 = [[n1.get('pdg_code')],
[11], [24]]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decay1proc1])
my_leglist[0].set('state', False)
decay1process1 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay1process2 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay2proc1 = [[24], [6], [-5]]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decay2proc1])
my_leglist[0].set('state', False)
decay2process1 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay2proc2 = [[24], [6], [-5]]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decay2proc2])
my_leglist[0].set('state', False)
decay2process2 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay1process1.get('decay_chains').append(decay2process1)
decay1process2.get('decay_chains').append(decay2process2)
core_process2.get('decay_chains').append(decay1process1)
core_process2.get('decay_chains').append(decay1process2)
my_amplitude2 = diagram_generation.DecayChainAmplitude(core_process2)
dc_subproc_group = group_subprocs.DecayChainSubProcessGroup.\
group_amplitudes(\
diagram_generation.DecayChainAmplitudeList([my_amplitude1,
my_amplitude2]))
subproc_groups = \
dc_subproc_group.generate_helas_decay_chain_subproc_groups()
self.assertEqual(len(subproc_groups), 2)
self.assertEqual(len(subproc_groups[0].get('matrix_elements')),2)
self.assertEqual(len(subproc_groups[1].get('matrix_elements')),2)
def test_single_decay_combinations(self):
"""Test combination of single decay of t and t~:
b b~ > t t~, t > c d d~ and b b~ > t t~, t~ > c~ d d~"""
mypartlist = base_objects.ParticleList()
myinterlist = base_objects.InteractionList()
# A gluon
mypartlist.append(base_objects.Particle({'name':'g',
'antiname':'g',
'spin':3,
'color':8,
'mass':'zero',
'width':'zero',
'texname':'g',
'antitexname':'g',
'line':'curly',
'charge':0.,
'pdg_code':21,
'propagating':True,
'is_part':True,
'self_antipart':True}))
g = mypartlist[-1]
# A quark U and its antiparticle
mypartlist.append(base_objects.Particle({'name':'u',
'antiname':'u~',
'spin':2,
'color':3,
'mass':'zero',
'width':'zero',
'texname':'u',
'antitexname':'\bar u',
'line':'straight',
'charge':2. / 3.,
'pdg_code':2,
'propagating':True,
'is_part':True,
'self_antipart':False}))
u = mypartlist[-1]
antiu = copy.copy(u)
antiu.set('is_part', False)
# A quark D and its antiparticle
mypartlist.append(base_objects.Particle({'name':'d',
'antiname':'d~',
'spin':2,
'color':3,
'mass':'zero',
'width':'zero',
'texname':'d',
'antitexname':'\bar d',
'line':'straight',
'charge':-1. / 3.,
'pdg_code':1,
'propagating':True,
'is_part':True,
'self_antipart':False}))
d = mypartlist[-1]
antid = copy.copy(d)
antid.set('is_part', False)
# A top quark and its antiparticle
mypartlist.append(base_objects.Particle({'name':'t',
'antiname':'t~',
'spin':2,
'color':3,
'mass':'MT',
'width':'zero',
'texname':'t',
'antitexname':'\bar t',
'line':'straight',
'charge':2. / 3.,
'pdg_code':6,
'propagating':True,
'is_part':True,
'self_antipart':False}))
t = mypartlist[len(mypartlist) - 1]
antit = copy.copy(t)
antit.set('is_part', False)
# A electron and positron
mypartlist.append(base_objects.Particle({'name':'e-',
'antiname':'e+',
'spin':2,
'color':1,
'mass':'zero',
'width':'zero',
'texname':'e^-',
'antitexname':'e^+',
'line':'straight',
'charge':-1.,
'pdg_code':11,
'propagating':True,
'is_part':True,
'self_antipart':False}))
eminus = mypartlist[-1]
eplus = copy.copy(eminus)
eplus.set('is_part', False)
# A photon
mypartlist.append(base_objects.Particle({'name':'a',
'antiname':'a',
'spin':3,
'color':1,
'mass':'zero',
'width':'zero',
'texname':'\gamma',
'antitexname':'\gamma',
'line':'wavy',
'charge':0.,
'pdg_code':22,
'propagating':True,
'is_part':True,
'self_antipart':True}))
a = mypartlist[-1]
# A Z
mypartlist.append(base_objects.Particle({'name':'z',
'antiname':'z',
'spin':3,
'color':1,
'mass':'MZ',
'width':'WZ',
'texname':'Z',
'antitexname':'Z',
'line':'wavy',
'charge':0.,
'pdg_code':23,
'propagating':True,
'is_part':True,
'self_antipart':True}))
z = mypartlist[-1]
# 3 gluon vertiex
myinterlist.append(base_objects.Interaction({
'id': 1,
'particles': base_objects.ParticleList(\
[g] * 3),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'G'},
'orders':{'QCD':1}}))
# 4 gluon vertex
myinterlist.append(base_objects.Interaction({
'id': 2,
'particles': base_objects.ParticleList(\
[g] * 4),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'G^2'},
'orders':{'QCD':2}}))
# Gluon and photon couplings to quarks
myinterlist.append(base_objects.Interaction({
'id': 3,
'particles': base_objects.ParticleList(\
[u, \
antiu, \
g]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQQ'},
'orders':{'QCD':1}}))
myinterlist.append(base_objects.Interaction({
'id': 4,
'particles': base_objects.ParticleList(\
[u, \
antiu, \
a]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
myinterlist.append(base_objects.Interaction({
'id': 5,
'particles': base_objects.ParticleList(\
[d, \
antid, \
g]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQQ'},
'orders':{'QCD':1}}))
myinterlist.append(base_objects.Interaction({
'id': 6,
'particles': base_objects.ParticleList(\
[antit, \
t, \
g]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQQ'},
'orders':{'QCD':1}}))
myinterlist.append(base_objects.Interaction({
'id': 7,
'particles': base_objects.ParticleList(\
[d, \
antid, \
a]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
# Coupling of e to gamma
myinterlist.append(base_objects.Interaction({
'id': 8,
'particles': base_objects.ParticleList(\
[eminus, \
eplus, \
a]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
# Coupling of Z to quarks and electrons
myinterlist.append(base_objects.Interaction({
'id': 9,
'particles': base_objects.ParticleList(\
[u, \
antiu, \
z]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
myinterlist.append(base_objects.Interaction({
'id': 10,
'particles': base_objects.ParticleList(\
[d, \
antid, \
z]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
myinterlist.append(base_objects.Interaction({
'id': 11,
'particles': base_objects.ParticleList(\
[eplus, \
eminus, \
z]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GQED'},
'orders':{'QED':1}}))
# FCNC coupling
myinterlist.append(base_objects.Interaction({
'id': 12,
'particles': base_objects.ParticleList(\
[antit, \
u, \
z]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GFCNC'},
'orders':{'QED':1}}))
myinterlist.append(base_objects.Interaction({
'id': 13,
'particles': base_objects.ParticleList(\
[antiu, \
t, \
z]),
'color': [],
'lorentz':['L1'],
'couplings':{(0, 0):'GFCNC'},
'orders':{'QED':1}}))
mymodel = base_objects.Model()
mymodel.set('particles', mypartlist)
mymodel.set('interactions', myinterlist)
mymodel.set('name', 'sm')
# d d~ > t t~, t > u Z
my_leglist = base_objects.MultiLegList()
my_leglist.append(base_objects.MultiLeg({'ids': [1],
'state': False}))
my_leglist.append(base_objects.MultiLeg({'ids': [-1],
'state': False}))
my_leglist.append(base_objects.MultiLeg({'ids': [6]}))
my_leglist.append(base_objects.MultiLeg({'ids': [-6]}))
core_process1 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay1proc1 = [[6], [2], [23]]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decay1proc1])
my_leglist[0].set('state', False)
decay1process1 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
core_process1.get('decay_chains').append(decay1process1)
my_amplitude1 = diagram_generation.DecayChainAmplitude(core_process1)
# d d~ > t t~, t~ > u~ Z
my_leglist = base_objects.MultiLegList()
my_leglist.append(base_objects.MultiLeg({'ids': [1],
'state': False}))
my_leglist.append(base_objects.MultiLeg({'ids': [-1],
'state': False}))
my_leglist.append(base_objects.MultiLeg({'ids': [6]}))
my_leglist.append(base_objects.MultiLeg({'ids': [-6]}))
core_process2 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
decay1proc2 = [[-6], [-2], [23]]
my_leglist = base_objects.MultiLegList([\
base_objects.MultiLeg({'ids': id}) for id in decay1proc2])
my_leglist[0].set('state', False)
decay1process2 = base_objects.ProcessDefinition({'legs':my_leglist,
'model':mymodel})
core_process2.get('decay_chains').append(decay1process2)
my_amplitude2 = diagram_generation.DecayChainAmplitude(core_process2)
dc_subproc_group = group_subprocs.DecayChainSubProcessGroup.\
group_amplitudes(\
diagram_generation.DecayChainAmplitudeList([my_amplitude1,
my_amplitude2]))
subproc_groups = \
dc_subproc_group.generate_helas_decay_chain_subproc_groups()
self.assertEqual(len(subproc_groups), 2)
self.assertEqual(len(subproc_groups[0].get('matrix_elements')),1)
me_strings = ["""Process: d d~ > t t~ WEIGHTED<=2
Decay: t > u z WEIGHTED<=2""",
"""Process: d d~ > t t~ WEIGHTED<=2
Decay: t~ > u~ z WEIGHTED<=2"""]
for i, group in enumerate(subproc_groups):
self.assertEqual(group.get('matrix_elements')[0].\
get('processes')[0].nice_string(),
me_strings[i])
| 44.352252
| 1,507
| 0.411322
| 7,511
| 71,895
| 3.83158
| 0.049927
| 0.081796
| 0.052573
| 0.03989
| 0.8403
| 0.819174
| 0.787797
| 0.768199
| 0.750096
| 0.716912
| 0
| 0.096976
| 0.427721
| 71,895
| 1,620
| 1,508
| 44.37963
| 0.602494
| 0.043174
| 0
| 0.802251
| 0
| 0
| 0.110745
| 0
| 0
| 0
| 0
| 0
| 0.015273
| 1
| 0.005627
| false
| 0.001608
| 0.012862
| 0
| 0.019293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
70f645e712a2ce2a7d19f08614547b232c99b11d
| 345
|
py
|
Python
|
central/__init__.py
|
Mitsj0l/SDbranch-lab
|
cab76c9c06d63828dee791d050e5d2ef660089aa
|
[
"MIT"
] | 2
|
2021-12-10T02:53:54.000Z
|
2021-12-17T12:34:38.000Z
|
central/__init__.py
|
Mitsj0l/SDbranch-lab
|
cab76c9c06d63828dee791d050e5d2ef660089aa
|
[
"MIT"
] | null | null | null |
central/__init__.py
|
Mitsj0l/SDbranch-lab
|
cab76c9c06d63828dee791d050e5d2ef660089aa
|
[
"MIT"
] | 1
|
2020-11-26T13:01:27.000Z
|
2020-11-26T13:01:27.000Z
|
import central.authentication.central_auth
import central.authentication.central_frontend_auth
import central.authentication.database
import central.authentication.check_config
import central.configuration.VGW_Deployment
import central.configuration.configuration_device_fix
import central.configuration.sites
import central.configuration.labels
| 43.125
| 53
| 0.910145
| 39
| 345
| 7.871795
| 0.384615
| 0.338762
| 0.351792
| 0.221498
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 345
| 8
| 54
| 43.125
| 0.930303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cb093769b9429c1caee1aab2f12f43cff5265415
| 107
|
py
|
Python
|
ui/__init__.py
|
Darkblader24/material-combiner-addon
|
ff76134e904fc7543ea18cd6ca82c12a39bdafa9
|
[
"MIT"
] | 192
|
2018-08-21T21:57:45.000Z
|
2022-03-31T14:37:54.000Z
|
ui/__init__.py
|
vinhowe/material-combiner-addon
|
2087d7f8dd35e5824d56aed70ad989fd48f224cb
|
[
"MIT"
] | 34
|
2018-08-21T10:20:00.000Z
|
2022-03-22T19:54:47.000Z
|
ui/__init__.py
|
vinhowe/material-combiner-addon
|
2087d7f8dd35e5824d56aed70ad989fd48f224cb
|
[
"MIT"
] | 30
|
2018-08-21T08:39:03.000Z
|
2022-03-28T00:36:07.000Z
|
from . import credits_menu
from . import main_menu
from . import properties_menu
from . import update_menu
| 21.4
| 29
| 0.813084
| 16
| 107
| 5.1875
| 0.4375
| 0.481928
| 0.506024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149533
| 107
| 4
| 30
| 26.75
| 0.912088
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cb2cfaae6307cce721d693b337827875c2b5cc2c
| 76
|
py
|
Python
|
peach/__init__.py
|
iesl/KISP
|
2c8ffacc36abcefc86b6d19dcdd49213241c6ff8
|
[
"MIT"
] | null | null | null |
peach/__init__.py
|
iesl/KISP
|
2c8ffacc36abcefc86b6d19dcdd49213241c6ff8
|
[
"MIT"
] | null | null | null |
peach/__init__.py
|
iesl/KISP
|
2c8ffacc36abcefc86b6d19dcdd49213241c6ff8
|
[
"MIT"
] | null | null | null |
from peach import utils
from peach import tf_nn
from peach import bert
| 15.2
| 24
| 0.776316
| 13
| 76
| 4.461538
| 0.538462
| 0.465517
| 0.775862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.223684
| 76
| 4
| 25
| 19
| 0.983051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cb85d33f82671ae2d2a687aa0e825c1585d4b9fe
| 134,695
|
py
|
Python
|
exportacao/views.py
|
bcunhasa/nutriodonto
|
3ff20377bc85bc4960619f63688b7732e6eebba9
|
[
"CC0-1.0"
] | null | null | null |
exportacao/views.py
|
bcunhasa/nutriodonto
|
3ff20377bc85bc4960619f63688b7732e6eebba9
|
[
"CC0-1.0"
] | null | null | null |
exportacao/views.py
|
bcunhasa/nutriodonto
|
3ff20377bc85bc4960619f63688b7732e6eebba9
|
[
"CC0-1.0"
] | null | null | null |
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.urls import reverse
from django.shortcuts import render
from django.views import View
import csv
from administracao.models import *
from administracao.const import *
class LoginRequired(LoginRequiredMixin):
"""Configurações para o login"""
login_url = 'administracao/login/'
redirect_field_name = 'next'
class ExportacaoView(LoginRequired, View):
"""Permite a exportacao de dados do banco"""
def get(self, request):
context = {
'pagina_exportacao': True,
}
return render(self.request, 'exportacao/exportacao.html', context)
class DownloadCompletoView(LoginRequired, View):
"""Gera e envia o arquivo csv para a base completa"""
def get(self, request):
alunos = Aluno.objects.order_by('id')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="base.csv"'
writer = csv.writer(response, delimiter=';')
writer.writerow(['campanha.nome', 'acao.nome', 'escola.id', 'escola.nome', 'escola.latitude', 'escola.longitude', 'aluno.numero_identificacao', 'aluno.periodo', 'aluno.turma', 'aluno.nascimento', 'aluno.sexo', 'aluno.raca', 'diretor.data', 'diretor.questao_1', 'diretor.questao_2', 'diretor.questao_3', 'diretor.questao_4', 'diretor.questao_5', 'diretor.questao_6', 'diretor.questao_7', 'diretor.questao_8', 'diretor.questao_9', 'diretor.questao_10', 'diretor.questao_11', 'diretor.questao_12', 'diretor.questao_13', 'diretor.questao_14', 'diretor.questao_15', 'diretor.questao_16', 'diretor.questao_17', 'diretor.questao_18', 'diretor.questao_19', 'diretor.questao_20', 'diretor.questao_21', 'diretor.questao_22', 'diretor.questao_23', 'diretor.questao_24', 'diretor.questao_25', 'diretor.questao_26', 'diretor.questao_27', 'diretor.questao_28', 'diretor.questao_29', 'diretor.questao_30', 'diretor.questao_31', 'diretor.questao_32', 'diretor.questao_33', 'diretor.questao_34', 'diretor.questao_35', 'diretor.questao_36', 'diretor.questao_37', 'diretor.questao_38', 'diretor.questao_39', 'diretor.questao_40', 'diretor.questao_41', 'diretor.questao_42', 'diretor.questao_43', 'diretor.questao_44', 'diretor.questao_45', 'diretor.questao_46', 'diretor.questao_47', 'diretor.questao_48', 'diretor.questao_49', 'diretor.questao_50', 'diretor.questao_51', 'diretor.questao_52', 'diretor.questao_53', 'diretor.questao_54', 'diretor.questao_55', 'diretor.questao_56', 'diretor.questao_57', 'diretor.questao_58', 'diretor.questao_59', 'diretor.questao_60', 'diretor.questao_61', 'diretor.questao_62', 'diretor.questao_63', 'diretor.questao_64', 'diretor.questao_65', 'diretor.questao_66', 'diretor.questao_67', 'diretor.questao_68', 'diretor.questao_69', 'diretor.questao_70', 'diretor.questao_71', 'diretor.questao_72', 'diretor.questao_73', 'diretor.questao_74', 'diretor.questao_75', 'diretor.questao_76', 'diretor.questao_77', 'diretor.questao_78', 'diretor.questao_79', 'diretor.questao_80', 'diretor.questao_81', 'diretor.questao_82', 'diretor.questao_83', 'diretor.questao_84', 'questionario.data', 'questionario.questao_1', 'questionario.questao_2', 'questionario.questao_3', 'questionario.questao_4', 'questionario.questao_5', 'questionario.questao_6', 'questionario.questao_7', 'questionario.questao_8', 'questionario.questao_9', 'questionario.questao_10', 'questionario.questao_11', 'questionario.questao_12', 'questionario.questao_13', 'questionario.questao_14', 'questionario.questao_15', 'questionario.questao_16', 'questionario.questao_17', 'questionario.questao_18', 'questionario.questao_19', 'questionario.questao_20', 'questionario.questao_21', 'questionario.questao_22', 'questionario.questao_23', 'questionario.questao_24', 'questionario.questao_25', 'questionario.questao_26', 'questionario.questao_27', 'questionario.questao_28', 'questionario.questao_29', 'questionario.questao_30', 'questionario.questao_31', 'questionario.questao_32', 'questionario.questao_33', 'questionario.questao_34', 'questionario.questao_35', 'questionario.questao_36', 'questionario.questao_37', 'questionario.questao_38', 'questionario.questao_39', 'questionario.questao_40', 'questionario.questao_41', 'questionario.questao_42', 'questionario.questao_43', 'questionario.questao_44', 'questionario.questao_45', 'questionario.questao_46', 'questionario.questao_47', 'questionario.questao_48', 'questionario.questao_49', 'questionario.questao_50', 'questionario.questao_51', 'questionario.questao_52', 'questionario.questao_53', 'questionario.questao_54', 'questionario.questao_55', 'questionario.questao_56', 'questionario.questao_57', 'questionario.questao_58', 'questionario.questao_59', 'questionario.questao_60', 'questionario.questao_61', 'questionario.questao_62', 'questionario.questao_63', 'questionario.questao_64', 'questionario.questao_65', 'questionario.questao_66', 'questionario.questao_67', 'questionario.questao_68', 'questionario.questao_69', 'questionario.questao_70', 'questionario.questao_71', 'questionario.questao_72', 'questionario.questao_73', 'questionario.questao_74', 'questionario.questao_75', 'questionario.questao_76', 'questionario.questao_77', 'questionario.questao_78', 'questionario.questao_79', 'questionario.questao_80', 'questionario.questao_81', 'questionario.questao_82', 'questionario.questao_83', 'questionario.questao_84', 'questionario.questao_85', 'questionario.questao_86', 'questionario.questao_87', 'questionario.questao_88', 'questionario.questao_89', 'questionario.questao_90', 'questionario.questao_91', 'questionario.questao_92', 'questionario.questao_93', 'questionario.questao_94', 'questionario.questao_95', 'questionario.questao_96', 'questionario.questao_97', 'questionario.questao_98', 'questionario.questao_99', 'questionario.questao_100', 'questionario.questao_101', 'questionario.questao_102', 'questionario.questao_103', 'questionario.questao_104', 'questionario.questao_105', 'questionario.questao_106', 'questionario.questao_107', 'questionario.questao_108', 'questionario.questao_109', 'questionario.questao_110', 'questionario.questao_111', 'questionario.questao_112', 'questionario.questao_113', 'questionario.questao_114', 'questionario.questao_115', 'questionario.questao_116', 'questionario.questao_117', 'questionario.questao_118', 'questionario.questao_119', 'questionario.questao_120', 'questionario.questao_121', 'questionario.questao_122', 'questionario.questao_123', 'questionario.questao_124', 'questionario.questao_125', 'questionario.questao_126', 'questionario.questao_127', 'questionario.questao_128', 'questionario.questao_129', 'questionario.questao_130', 'questionario.questao_131', 'questionario.questao_132', 'questionario.questao_133', 'questionario.questao_134', 'questionario.questao_135', 'questionario.questao_136', 'questionario.questao_137', 'questionario.questao_138', 'questionario.questao_139', 'questionario.questao_140', 'questionario.questao_141', 'questionario.questao_142', 'questionario.questao_143', 'questionario.questao_144', 'questionario.questao_145', 'questionario.questao_146', 'exame.data', 'exame.examinador', 'exame.anotador', 'exame.carie_coroa_18', 'exame.carie_tratamento_18', 'exame.carie_coroa_17', 'exame.carie_tratamento_17', 'exame.carie_coroa_16', 'exame.carie_tratamento_16', 'exame.carie_coroa_15', 'exame.carie_tratamento_15', 'exame.carie_coroa_14', 'exame.carie_tratamento_14', 'exame.carie_coroa_13', 'exame.carie_tratamento_13', 'exame.carie_coroa_12', 'exame.carie_tratamento_12', 'exame.carie_coroa_11', 'exame.carie_tratamento_11', 'exame.carie_coroa_21', 'exame.carie_tratamento_21', 'exame.carie_coroa_22', 'exame.carie_tratamento_22', 'exame.carie_coroa_23', 'exame.carie_tratamento_23', 'exame.carie_coroa_24', 'exame.carie_tratamento_24', 'exame.carie_coroa_25', 'exame.carie_tratamento_25', 'exame.carie_coroa_26', 'exame.carie_tratamento_26', 'exame.carie_coroa_27', 'exame.carie_tratamento_27', 'exame.carie_coroa_28', 'exame.carie_tratamento_28', 'exame.carie_coroa_38', 'exame.carie_tratamento_38', 'exame.carie_coroa_37', 'exame.carie_tratamento_37', 'exame.carie_coroa_36', 'exame.carie_tratamento_36', 'exame.carie_coroa_35', 'exame.carie_tratamento_35', 'exame.carie_coroa_34', 'exame.carie_tratamento_34', 'exame.carie_coroa_33', 'exame.carie_tratamento_33', 'exame.carie_coroa_32', 'exame.carie_tratamento_32', 'exame.carie_coroa_31', 'exame.carie_tratamento_31', 'exame.carie_coroa_41', 'exame.carie_tratamento_41', 'exame.carie_coroa_42', 'exame.carie_tratamento_42', 'exame.carie_coroa_43', 'exame.carie_tratamento_43', 'exame.carie_coroa_44', 'exame.carie_tratamento_44', 'exame.carie_coroa_45', 'exame.carie_tratamento_45', 'exame.carie_coroa_46', 'exame.carie_tratamento_46', 'exame.carie_coroa_47', 'exame.carie_tratamento_47', 'exame.carie_coroa_48', 'exame.carie_tratamento_48', 'exame.periodontal_sangramento_1716', 'exame.periodontal_calculo_1716', 'exame.periodontal_bolsa_1716', 'exame.periodontal_sangramento_11', 'exame.periodontal_calculo_11', 'exame.periodontal_bolsa_11', 'exame.periodontal_sangramento_2627', 'exame.periodontal_calculo_2627', 'exame.periodontal_bolsa_2627', 'exame.periodontal_sangramento_3736', 'exame.periodontal_calculo_3736', 'exame.periodontal_bolsa_3736', 'exame.periodontal_sangramento_31', 'exame.periodontal_calculo_31', 'exame.periodontal_bolsa_31', 'exame.periodontal_sangramento_4647', 'exame.periodontal_calculo_4647', 'exame.periodontal_bolsa_4647', 'cpod',])
for aluno in alunos:
diretor = Diretor.objects.get(escola_id=aluno.escola_id)
resultados = {
"cpod": 0,
}
try:
exame = Exame.objects.get(aluno_id=aluno.id)
# cálculo do cpo-d
if exame.carie_coroa_18 == '1': resultados["cpod"] += 1
if exame.carie_coroa_18 == '2': resultados["cpod"] += 1
if exame.carie_coroa_18 == '3': resultados["cpod"] += 1
if exame.carie_coroa_18 == '4': resultados["cpod"] += 1
if exame.carie_coroa_18 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_18 == '6': resultados["cpod"] += 1
if exame.carie_coroa_17 == '1': resultados["cpod"] += 1
if exame.carie_coroa_17 == '2': resultados["cpod"] += 1
if exame.carie_coroa_17 == '3': resultados["cpod"] += 1
if exame.carie_coroa_17 == '4': resultados["cpod"] += 1
if exame.carie_coroa_17 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_17 == '6': resultados["cpod"] += 1
if exame.carie_coroa_16 == '1': resultados["cpod"] += 1
if exame.carie_coroa_16 == '2': resultados["cpod"] += 1
if exame.carie_coroa_16 == '3': resultados["cpod"] += 1
if exame.carie_coroa_16 == '4': resultados["cpod"] += 1
if exame.carie_coroa_16 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_16 == '6': resultados["cpod"] += 1
if exame.carie_coroa_15 == '1': resultados["cpod"] += 1
if exame.carie_coroa_15 == '2': resultados["cpod"] += 1
if exame.carie_coroa_15 == '3': resultados["cpod"] += 1
if exame.carie_coroa_15 == '4': resultados["cpod"] += 1
if exame.carie_coroa_15 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_15 == '6': resultados["cpod"] += 1
if exame.carie_coroa_14 == '1': resultados["cpod"] += 1
if exame.carie_coroa_14 == '2': resultados["cpod"] += 1
if exame.carie_coroa_14 == '3': resultados["cpod"] += 1
if exame.carie_coroa_14 == '4': resultados["cpod"] += 1
if exame.carie_coroa_14 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_14 == '6': resultados["cpod"] += 1
if exame.carie_coroa_13 == '1': resultados["cpod"] += 1
if exame.carie_coroa_13 == '2': resultados["cpod"] += 1
if exame.carie_coroa_13 == '3': resultados["cpod"] += 1
if exame.carie_coroa_13 == '4': resultados["cpod"] += 1
if exame.carie_coroa_13 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_13 == '6': resultados["cpod"] += 1
if exame.carie_coroa_12 == '1': resultados["cpod"] += 1
if exame.carie_coroa_12 == '2': resultados["cpod"] += 1
if exame.carie_coroa_12 == '3': resultados["cpod"] += 1
if exame.carie_coroa_12 == '4': resultados["cpod"] += 1
if exame.carie_coroa_12 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_12 == '6': resultados["cpod"] += 1
if exame.carie_coroa_11 == '1': resultados["cpod"] += 1
if exame.carie_coroa_11 == '2': resultados["cpod"] += 1
if exame.carie_coroa_11 == '3': resultados["cpod"] += 1
if exame.carie_coroa_11 == '4': resultados["cpod"] += 1
if exame.carie_coroa_11 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_11 == '6': resultados["cpod"] += 1
if exame.carie_coroa_21 == '1': resultados["cpod"] += 1
if exame.carie_coroa_21 == '2': resultados["cpod"] += 1
if exame.carie_coroa_21 == '3': resultados["cpod"] += 1
if exame.carie_coroa_21 == '4': resultados["cpod"] += 1
if exame.carie_coroa_21 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_21 == '6': resultados["cpod"] += 1
if exame.carie_coroa_22 == '1': resultados["cpod"] += 1
if exame.carie_coroa_22 == '2': resultados["cpod"] += 1
if exame.carie_coroa_22 == '3': resultados["cpod"] += 1
if exame.carie_coroa_22 == '4': resultados["cpod"] += 1
if exame.carie_coroa_22 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_22 == '6': resultados["cpod"] += 1
if exame.carie_coroa_23 == '1': resultados["cpod"] += 1
if exame.carie_coroa_23 == '2': resultados["cpod"] += 1
if exame.carie_coroa_23 == '3': resultados["cpod"] += 1
if exame.carie_coroa_23 == '4': resultados["cpod"] += 1
if exame.carie_coroa_23 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_23 == '6': resultados["cpod"] += 1
if exame.carie_coroa_24 == '1': resultados["cpod"] += 1
if exame.carie_coroa_24 == '2': resultados["cpod"] += 1
if exame.carie_coroa_24 == '3': resultados["cpod"] += 1
if exame.carie_coroa_24 == '4': resultados["cpod"] += 1
if exame.carie_coroa_24 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_24 == '6': resultados["cpod"] += 1
if exame.carie_coroa_25 == '1': resultados["cpod"] += 1
if exame.carie_coroa_25 == '2': resultados["cpod"] += 1
if exame.carie_coroa_25 == '3': resultados["cpod"] += 1
if exame.carie_coroa_25 == '4': resultados["cpod"] += 1
if exame.carie_coroa_25 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_25 == '6': resultados["cpod"] += 1
if exame.carie_coroa_26 == '1': resultados["cpod"] += 1
if exame.carie_coroa_26 == '2': resultados["cpod"] += 1
if exame.carie_coroa_26 == '3': resultados["cpod"] += 1
if exame.carie_coroa_26 == '4': resultados["cpod"] += 1
if exame.carie_coroa_26 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_26 == '6': resultados["cpod"] += 1
if exame.carie_coroa_27 == '1': resultados["cpod"] += 1
if exame.carie_coroa_27 == '2': resultados["cpod"] += 1
if exame.carie_coroa_27 == '3': resultados["cpod"] += 1
if exame.carie_coroa_27 == '4': resultados["cpod"] += 1
if exame.carie_coroa_27 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_27 == '6': resultados["cpod"] += 1
if exame.carie_coroa_28 == '1': resultados["cpod"] += 1
if exame.carie_coroa_28 == '2': resultados["cpod"] += 1
if exame.carie_coroa_28 == '3': resultados["cpod"] += 1
if exame.carie_coroa_28 == '4': resultados["cpod"] += 1
if exame.carie_coroa_28 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_28 == '6': resultados["cpod"] += 1
if exame.carie_coroa_38 == '1': resultados["cpod"] += 1
if exame.carie_coroa_38 == '2': resultados["cpod"] += 1
if exame.carie_coroa_38 == '3': resultados["cpod"] += 1
if exame.carie_coroa_38 == '4': resultados["cpod"] += 1
if exame.carie_coroa_38 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_38 == '6': resultados["cpod"] += 1
if exame.carie_coroa_37 == '1': resultados["cpod"] += 1
if exame.carie_coroa_37 == '2': resultados["cpod"] += 1
if exame.carie_coroa_37 == '3': resultados["cpod"] += 1
if exame.carie_coroa_37 == '4': resultados["cpod"] += 1
if exame.carie_coroa_37 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_37 == '6': resultados["cpod"] += 1
if exame.carie_coroa_36 == '1': resultados["cpod"] += 1
if exame.carie_coroa_36 == '2': resultados["cpod"] += 1
if exame.carie_coroa_36 == '3': resultados["cpod"] += 1
if exame.carie_coroa_36 == '4': resultados["cpod"] += 1
if exame.carie_coroa_36 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_36 == '6': resultados["cpod"] += 1
if exame.carie_coroa_35 == '1': resultados["cpod"] += 1
if exame.carie_coroa_35 == '2': resultados["cpod"] += 1
if exame.carie_coroa_35 == '3': resultados["cpod"] += 1
if exame.carie_coroa_35 == '4': resultados["cpod"] += 1
if exame.carie_coroa_35 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_35 == '6': resultados["cpod"] += 1
if exame.carie_coroa_34 == '1': resultados["cpod"] += 1
if exame.carie_coroa_34 == '2': resultados["cpod"] += 1
if exame.carie_coroa_34 == '3': resultados["cpod"] += 1
if exame.carie_coroa_34 == '4': resultados["cpod"] += 1
if exame.carie_coroa_34 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_34 == '6': resultados["cpod"] += 1
if exame.carie_coroa_33 == '1': resultados["cpod"] += 1
if exame.carie_coroa_33 == '2': resultados["cpod"] += 1
if exame.carie_coroa_33 == '3': resultados["cpod"] += 1
if exame.carie_coroa_33 == '4': resultados["cpod"] += 1
if exame.carie_coroa_33 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_33 == '6': resultados["cpod"] += 1
if exame.carie_coroa_32 == '1': resultados["cpod"] += 1
if exame.carie_coroa_32 == '2': resultados["cpod"] += 1
if exame.carie_coroa_32 == '3': resultados["cpod"] += 1
if exame.carie_coroa_32 == '4': resultados["cpod"] += 1
if exame.carie_coroa_32 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_32 == '6': resultados["cpod"] += 1
if exame.carie_coroa_31 == '1': resultados["cpod"] += 1
if exame.carie_coroa_31 == '2': resultados["cpod"] += 1
if exame.carie_coroa_31 == '3': resultados["cpod"] += 1
if exame.carie_coroa_31 == '4': resultados["cpod"] += 1
if exame.carie_coroa_31 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_31 == '6': resultados["cpod"] += 1
if exame.carie_coroa_41 == '1': resultados["cpod"] += 1
if exame.carie_coroa_41 == '2': resultados["cpod"] += 1
if exame.carie_coroa_41 == '3': resultados["cpod"] += 1
if exame.carie_coroa_41 == '4': resultados["cpod"] += 1
if exame.carie_coroa_41 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_41 == '6': resultados["cpod"] += 1
if exame.carie_coroa_42 == '1': resultados["cpod"] += 1
if exame.carie_coroa_42 == '2': resultados["cpod"] += 1
if exame.carie_coroa_42 == '3': resultados["cpod"] += 1
if exame.carie_coroa_42 == '4': resultados["cpod"] += 1
if exame.carie_coroa_42 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_42 == '6': resultados["cpod"] += 1
if exame.carie_coroa_43 == '1': resultados["cpod"] += 1
if exame.carie_coroa_43 == '2': resultados["cpod"] += 1
if exame.carie_coroa_43 == '3': resultados["cpod"] += 1
if exame.carie_coroa_43 == '4': resultados["cpod"] += 1
if exame.carie_coroa_43 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_43 == '6': resultados["cpod"] += 1
if exame.carie_coroa_44 == '1': resultados["cpod"] += 1
if exame.carie_coroa_44 == '2': resultados["cpod"] += 1
if exame.carie_coroa_44 == '3': resultados["cpod"] += 1
if exame.carie_coroa_44 == '4': resultados["cpod"] += 1
if exame.carie_coroa_44 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_44 == '6': resultados["cpod"] += 1
if exame.carie_coroa_45 == '1': resultados["cpod"] += 1
if exame.carie_coroa_45 == '2': resultados["cpod"] += 1
if exame.carie_coroa_45 == '3': resultados["cpod"] += 1
if exame.carie_coroa_45 == '4': resultados["cpod"] += 1
if exame.carie_coroa_45 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_45 == '6': resultados["cpod"] += 1
if exame.carie_coroa_46 == '1': resultados["cpod"] += 1
if exame.carie_coroa_46 == '2': resultados["cpod"] += 1
if exame.carie_coroa_46 == '3': resultados["cpod"] += 1
if exame.carie_coroa_46 == '4': resultados["cpod"] += 1
if exame.carie_coroa_46 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_46 == '6': resultados["cpod"] += 1
if exame.carie_coroa_47 == '1': resultados["cpod"] += 1
if exame.carie_coroa_47 == '2': resultados["cpod"] += 1
if exame.carie_coroa_47 == '3': resultados["cpod"] += 1
if exame.carie_coroa_47 == '4': resultados["cpod"] += 1
if exame.carie_coroa_47 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_47 == '6': resultados["cpod"] += 1
if exame.carie_coroa_48 == '1': resultados["cpod"] += 1
if exame.carie_coroa_48 == '2': resultados["cpod"] += 1
if exame.carie_coroa_48 == '3': resultados["cpod"] += 1
if exame.carie_coroa_48 == '4': resultados["cpod"] += 1
if exame.carie_coroa_48 == '5': resultados["cpod"] += 1
if exame.carie_tratamento_48 == '6': resultados["cpod"] += 1
except:
pass
tem_questionario = True
tem_exame = True
try:
questionario = Questionario.objects.get(aluno_id=aluno.id)
except:
tem_questionario = False
try:
exame = Exame.objects.get(aluno_id=aluno.id)
except:
tem_exame = False
if not tem_questionario and tem_exame:
writer.writerow([
aluno.escola.acao.campanha.nome,
aluno.escola.acao.nome,
aluno.escola.id,
aluno.escola.nome,
aluno.escola.latitude,
aluno.escola.longitude,
aluno.numero_identificacao,
aluno.periodo, # get__display()
aluno.turma,
aluno.nascimento,
aluno.sexo, # get__display()
aluno.raca, # get__display()
diretor.data,
diretor.questao_1,
diretor.questao_2, # get__display()
diretor.questao_3,
diretor.questao_4, # get__display()
diretor.questao_5, # get__display()
diretor.questao_6, # get__display()
diretor.questao_7, # get__display()
diretor.questao_8, # get__display()
diretor.questao_9, # get__display()
diretor.questao_10, # get__display()
diretor.questao_11, # get__display()
diretor.questao_12, # get__display()
diretor.questao_13, # get__display()
diretor.questao_14, # get__display()
diretor.questao_15, # get__display()
diretor.questao_16, # get__display()
diretor.questao_17, # get__display()
diretor.questao_18, # get__display()
diretor.questao_19, # get__display()
diretor.questao_20, # get__display()
diretor.questao_21, # get__display()
diretor.questao_22, # get__display()
diretor.questao_23, # get__display()
diretor.questao_24, # get__display()
diretor.questao_25, # get__display()
diretor.questao_26, # get__display()
diretor.questao_27, # get__display()
diretor.questao_28,
diretor.questao_29, # get__display()
diretor.questao_30, # get__display()
diretor.questao_31,
diretor.questao_32, # get__display()
diretor.questao_33,
diretor.questao_34, # get__display()
diretor.questao_35, # get__display()
diretor.questao_36, # get__display()
diretor.questao_37,
diretor.questao_38, # get__display()
diretor.questao_39,
diretor.questao_40, # get__display()
diretor.questao_41, # get__display()
diretor.questao_42, # get__display()
diretor.questao_43, # get__display()
diretor.questao_44, # get__display()
diretor.questao_45, # get__display()
diretor.questao_46, # get__display()
diretor.questao_47, # get__display()
diretor.questao_48, # get__display()
diretor.questao_49, # get__display()
diretor.questao_50, # get__display()
diretor.questao_51, # get__display()
diretor.questao_52, # get__display()
diretor.questao_53, # get__display()
diretor.questao_54, # get__display()
diretor.questao_55, # get__display()
diretor.questao_56, # get__display()
diretor.questao_57, # get__display()
diretor.questao_58, # get__display()
diretor.questao_59, # get__display()
diretor.questao_60, # get__display()
diretor.questao_61, # get__display()
diretor.questao_62, # get__display()
diretor.questao_63, # get__display()
diretor.questao_64, # get__display()
diretor.questao_65, # get__display()
diretor.questao_66, # get__display()
diretor.questao_67, # get__display()
diretor.questao_68, # get__display()
diretor.questao_69, # get__display()
diretor.questao_70, # get__display()
diretor.questao_71, # get__display()
diretor.questao_72, # get__display()
diretor.questao_73, # get__display()
diretor.questao_74, # get__display()
diretor.questao_75, # get__display()
diretor.questao_76, # get__display()
diretor.questao_77, # get__display()
diretor.questao_78, # get__display()
diretor.questao_79, # get__display()
diretor.questao_80, # get__display()
diretor.questao_81, # get__display()
diretor.questao_82, # get__display()
diretor.questao_83, # get__display()
diretor.questao_84, # get__display()
None,
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None,
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None, # get__display()
None,
exame.data,
exame.examinador,
exame.anotador,
exame.carie_coroa_18,
exame.carie_tratamento_18,
exame.carie_coroa_17,
exame.carie_tratamento_17,
exame.carie_coroa_16,
exame.carie_tratamento_16,
exame.carie_coroa_15,
exame.carie_tratamento_15,
exame.carie_coroa_14,
exame.carie_tratamento_14,
exame.carie_coroa_13,
exame.carie_tratamento_13,
exame.carie_coroa_12,
exame.carie_tratamento_12,
exame.carie_coroa_11,
exame.carie_tratamento_11,
exame.carie_coroa_21,
exame.carie_tratamento_21,
exame.carie_coroa_22,
exame.carie_tratamento_22,
exame.carie_coroa_23,
exame.carie_tratamento_23,
exame.carie_coroa_24,
exame.carie_tratamento_24,
exame.carie_coroa_25,
exame.carie_tratamento_25,
exame.carie_coroa_26,
exame.carie_tratamento_26,
exame.carie_coroa_27,
exame.carie_tratamento_27,
exame.carie_coroa_28,
exame.carie_tratamento_28,
exame.carie_coroa_38,
exame.carie_tratamento_38,
exame.carie_coroa_37,
exame.carie_tratamento_37,
exame.carie_coroa_36,
exame.carie_tratamento_36,
exame.carie_coroa_35,
exame.carie_tratamento_35,
exame.carie_coroa_34,
exame.carie_tratamento_34,
exame.carie_coroa_33,
exame.carie_tratamento_33,
exame.carie_coroa_32,
exame.carie_tratamento_32,
exame.carie_coroa_31,
exame.carie_tratamento_31,
exame.carie_coroa_41,
exame.carie_tratamento_41,
exame.carie_coroa_42,
exame.carie_tratamento_42,
exame.carie_coroa_43,
exame.carie_tratamento_43,
exame.carie_coroa_44,
exame.carie_tratamento_44,
exame.carie_coroa_45,
exame.carie_tratamento_45,
exame.carie_coroa_46,
exame.carie_tratamento_46,
exame.carie_coroa_47,
exame.carie_tratamento_47,
exame.carie_coroa_48,
exame.carie_tratamento_48,
exame.periodontal_sangramento_1716,
exame.periodontal_calculo_1716,
exame.periodontal_bolsa_1716,
exame.periodontal_sangramento_11,
exame.periodontal_calculo_11,
exame.periodontal_bolsa_11,
exame.periodontal_sangramento_2627,
exame.periodontal_calculo_2627,
exame.periodontal_bolsa_2627,
exame.periodontal_sangramento_3736,
exame.periodontal_calculo_3736,
exame.periodontal_bolsa_3736,
exame.periodontal_sangramento_31,
exame.periodontal_calculo_31,
exame.periodontal_bolsa_31,
exame.periodontal_sangramento_4647,
exame.periodontal_calculo_4647,
exame.periodontal_bolsa_4647,
resultados["cpod"],
])
continue
if tem_questionario and not tem_exame:
writer.writerow([
aluno.escola.acao.campanha.nome,
aluno.escola.acao.nome,
aluno.escola.id,
aluno.escola.nome,
aluno.escola.latitude,
aluno.escola.longitude,
aluno.numero_identificacao,
aluno.periodo, # get__display()
aluno.turma,
aluno.nascimento,
aluno.sexo, # get__display()
aluno.raca, # get__display()
diretor.data,
diretor.questao_1,
diretor.questao_2, # get__display()
diretor.questao_3,
diretor.questao_4, # get__display()
diretor.questao_5, # get__display()
diretor.questao_6, # get__display()
diretor.questao_7, # get__display()
diretor.questao_8, # get__display()
diretor.questao_9, # get__display()
diretor.questao_10, # get__display()
diretor.questao_11, # get__display()
diretor.questao_12, # get__display()
diretor.questao_13, # get__display()
diretor.questao_14, # get__display()
diretor.questao_15, # get__display()
diretor.questao_16, # get__display()
diretor.questao_17, # get__display()
diretor.questao_18, # get__display()
diretor.questao_19, # get__display()
diretor.questao_20, # get__display()
diretor.questao_21, # get__display()
diretor.questao_22, # get__display()
diretor.questao_23, # get__display()
diretor.questao_24, # get__display()
diretor.questao_25, # get__display()
diretor.questao_26, # get__display()
diretor.questao_27, # get__display()
diretor.questao_28,
diretor.questao_29, # get__display()
diretor.questao_30, # get__display()
diretor.questao_31,
diretor.questao_32, # get__display()
diretor.questao_33,
diretor.questao_34, # get__display()
diretor.questao_35, # get__display()
diretor.questao_36, # get__display()
diretor.questao_37,
diretor.questao_38, # get__display()
diretor.questao_39,
diretor.questao_40, # get__display()
diretor.questao_41, # get__display()
diretor.questao_42, # get__display()
diretor.questao_43, # get__display()
diretor.questao_44, # get__display()
diretor.questao_45, # get__display()
diretor.questao_46, # get__display()
diretor.questao_47, # get__display()
diretor.questao_48, # get__display()
diretor.questao_49, # get__display()
diretor.questao_50, # get__display()
diretor.questao_51, # get__display()
diretor.questao_52, # get__display()
diretor.questao_53, # get__display()
diretor.questao_54, # get__display()
diretor.questao_55, # get__display()
diretor.questao_56, # get__display()
diretor.questao_57, # get__display()
diretor.questao_58, # get__display()
diretor.questao_59, # get__display()
diretor.questao_60, # get__display()
diretor.questao_61, # get__display()
diretor.questao_62, # get__display()
diretor.questao_63, # get__display()
diretor.questao_64, # get__display()
diretor.questao_65, # get__display()
diretor.questao_66, # get__display()
diretor.questao_67, # get__display()
diretor.questao_68, # get__display()
diretor.questao_69, # get__display()
diretor.questao_70, # get__display()
diretor.questao_71, # get__display()
diretor.questao_72, # get__display()
diretor.questao_73, # get__display()
diretor.questao_74, # get__display()
diretor.questao_75, # get__display()
diretor.questao_76, # get__display()
diretor.questao_77, # get__display()
diretor.questao_78, # get__display()
diretor.questao_79, # get__display()
diretor.questao_80, # get__display()
diretor.questao_81, # get__display()
diretor.questao_82, # get__display()
diretor.questao_83, # get__display()
diretor.questao_84, # get__display()
questionario.data,
questionario.questao_1, # get__display()
questionario.questao_2, # get__display()
questionario.questao_3, # get__display()
questionario.questao_4, # get__display()
questionario.questao_5, # get__display()
questionario.questao_6, # get__display()
questionario.questao_7, # get__display()
questionario.questao_8, # get__display()
questionario.questao_9, # get__display()
questionario.questao_10, # get__display()
questionario.questao_11, # get__display()
questionario.questao_12, # get__display()
questionario.questao_13, # get__display()
questionario.questao_14, # get__display()
questionario.questao_15, # get__display()
questionario.questao_16, # get__display()
questionario.questao_17, # get__display()
questionario.questao_18, # get__display()
questionario.questao_19, # get__display()
questionario.questao_20, # get__display()
questionario.questao_21, # get__display()
questionario.questao_22, # get__display()
questionario.questao_23, # get__display()
questionario.questao_24, # get__display()
questionario.questao_25, # get__display()
questionario.questao_26, # get__display()
questionario.questao_27, # get__display()
questionario.questao_28, # get__display()
questionario.questao_29, # get__display()
questionario.questao_30, # get__display()
questionario.questao_31, # get__display()
questionario.questao_32, # get__display()
questionario.questao_33, # get__display()
questionario.questao_34, # get__display()
questionario.questao_35, # get__display()
questionario.questao_36, # get__display()
questionario.questao_37, # get__display()
questionario.questao_38, # get__display()
questionario.questao_39, # get__display()
questionario.questao_40, # get__display()
questionario.questao_41, # get__display()
questionario.questao_42, # get__display()
questionario.questao_43, # get__display()
questionario.questao_44, # get__display()
questionario.questao_45, # get__display()
questionario.questao_46, # get__display()
questionario.questao_47, # get__display()
questionario.questao_48, # get__display()
questionario.questao_49, # get__display()
questionario.questao_50, # get__display()
questionario.questao_51, # get__display()
questionario.questao_52, # get__display()
questionario.questao_53, # get__display()
questionario.questao_54, # get__display()
questionario.questao_55, # get__display()
questionario.questao_56, # get__display()
questionario.questao_57, # get__display()
questionario.questao_58, # get__display()
questionario.questao_59, # get__display()
questionario.questao_60,
questionario.questao_61, # get__display()
questionario.questao_62, # get__display()
questionario.questao_63, # get__display()
questionario.questao_64, # get__display()
questionario.questao_65, # get__display()
questionario.questao_66, # get__display()
questionario.questao_67, # get__display()
questionario.questao_68, # get__display()
questionario.questao_69, # get__display()
questionario.questao_70, # get__display()
questionario.questao_71, # get__display()
questionario.questao_72, # get__display()
questionario.questao_73, # get__display()
questionario.questao_74, # get__display()
questionario.questao_75, # get__display()
questionario.questao_76, # get__display()
questionario.questao_77, # get__display()
questionario.questao_78, # get__display()
questionario.questao_79, # get__display()
questionario.questao_80, # get__display()
questionario.questao_81, # get__display()
questionario.questao_82, # get__display()
questionario.questao_83, # get__display()
questionario.questao_84, # get__display()
questionario.questao_85, # get__display()
questionario.questao_86, # get__display()
questionario.questao_87, # get__display()
questionario.questao_88, # get__display()
questionario.questao_89, # get__display()
questionario.questao_90, # get__display()
questionario.questao_91, # get__display()
questionario.questao_92, # get__display()
questionario.questao_93, # get__display()
questionario.questao_94, # get__display()
questionario.questao_95, # get__display()
questionario.questao_96, # get__display()
questionario.questao_97, # get__display()
questionario.questao_98, # get__display()
questionario.questao_99, # get__display()
questionario.questao_100, # get__display()
questionario.questao_101, # get__display()
questionario.questao_102, # get__display()
questionario.questao_103, # get__display()
questionario.questao_104, # get__display()
questionario.questao_105, # get__display()
questionario.questao_106, # get__display()
questionario.questao_107, # get__display()
questionario.questao_108, # get__display()
questionario.questao_109, # get__display()
questionario.questao_110, # get__display()
questionario.questao_111, # get__display()
questionario.questao_112, # get__display()
questionario.questao_113, # get__display()
questionario.questao_114, # get__display()
questionario.questao_115, # get__display()
questionario.questao_116, # get__display()
questionario.questao_117, # get__display()
questionario.questao_118, # get__display()
questionario.questao_119, # get__display()
questionario.questao_120, # get__display()
questionario.questao_121, # get__display()
questionario.questao_122, # get__display()
questionario.questao_123, # get__display()
questionario.questao_124, # get__display()
questionario.questao_125, # get__display()
questionario.questao_126, # get__display()
questionario.questao_127, # get__display()
questionario.questao_128, # get__display()
questionario.questao_129, # get__display()
questionario.questao_130, # get__display()
questionario.questao_131, # get__display()
questionario.questao_132, # get__display()
questionario.questao_133, # get__display()
questionario.questao_134, # get__display()
questionario.questao_135, # get__display()
questionario.questao_136, # get__display()
questionario.questao_137, # get__display()
questionario.questao_138, # get__display()
questionario.questao_139, # get__display()
questionario.questao_140, # get__display()
questionario.questao_141, # get__display()
questionario.questao_142, # get__display()
questionario.questao_143, # get__display()
questionario.questao_144, # get__display()
questionario.questao_145, # get__display()
questionario.questao_146,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
])
continue
# if questionario.questao_55 == '1':
# questionario.questao_55 = 'Não'
if tem_questionario and tem_exame:
writer.writerow([
aluno.escola.acao.campanha.nome,
aluno.escola.acao.nome,
aluno.escola.id,
aluno.escola.nome,
aluno.escola.latitude,
aluno.escola.longitude,
aluno.numero_identificacao,
aluno.periodo, # get__display()
aluno.turma,
aluno.nascimento,
aluno.sexo, # get__display()
aluno.raca, # get__display()
diretor.data,
diretor.questao_1,
diretor.questao_2, # get__display()
diretor.questao_3,
diretor.questao_4, # get__display()
diretor.questao_5, # get__display()
diretor.questao_6, # get__display()
diretor.questao_7, # get__display()
diretor.questao_8, # get__display()
diretor.questao_9, # get__display()
diretor.questao_10, # get__display()
diretor.questao_11, # get__display()
diretor.questao_12, # get__display()
diretor.questao_13, # get__display()
diretor.questao_14, # get__display()
diretor.questao_15, # get__display()
diretor.questao_16, # get__display()
diretor.questao_17, # get__display()
diretor.questao_18, # get__display()
diretor.questao_19, # get__display()
diretor.questao_20, # get__display()
diretor.questao_21, # get__display()
diretor.questao_22, # get__display()
diretor.questao_23, # get__display()
diretor.questao_24, # get__display()
diretor.questao_25, # get__display()
diretor.questao_26, # get__display()
diretor.questao_27, # get__display()
diretor.questao_28,
diretor.questao_29, # get__display()
diretor.questao_30, # get__display()
diretor.questao_31,
diretor.questao_32, # get__display()
diretor.questao_33,
diretor.questao_34, # get__display()
diretor.questao_35, # get__display()
diretor.questao_36, # get__display()
diretor.questao_37,
diretor.questao_38, # get__display()
diretor.questao_39,
diretor.questao_40, # get__display()
diretor.questao_41, # get__display()
diretor.questao_42, # get__display()
diretor.questao_43, # get__display()
diretor.questao_44, # get__display()
diretor.questao_45, # get__display()
diretor.questao_46, # get__display()
diretor.questao_47, # get__display()
diretor.questao_48, # get__display()
diretor.questao_49, # get__display()
diretor.questao_50, # get__display()
diretor.questao_51, # get__display()
diretor.questao_52, # get__display()
diretor.questao_53, # get__display()
diretor.questao_54, # get__display()
diretor.questao_55, # get__display()
diretor.questao_56, # get__display()
diretor.questao_57, # get__display()
diretor.questao_58, # get__display()
diretor.questao_59, # get__display()
diretor.questao_60, # get__display()
diretor.questao_61, # get__display()
diretor.questao_62, # get__display()
diretor.questao_63, # get__display()
diretor.questao_64, # get__display()
diretor.questao_65, # get__display()
diretor.questao_66, # get__display()
diretor.questao_67, # get__display()
diretor.questao_68, # get__display()
diretor.questao_69, # get__display()
diretor.questao_70, # get__display()
diretor.questao_71, # get__display()
diretor.questao_72, # get__display()
diretor.questao_73, # get__display()
diretor.questao_74, # get__display()
diretor.questao_75, # get__display()
diretor.questao_76, # get__display()
diretor.questao_77, # get__display()
diretor.questao_78, # get__display()
diretor.questao_79, # get__display()
diretor.questao_80, # get__display()
diretor.questao_81, # get__display()
diretor.questao_82, # get__display()
diretor.questao_83, # get__display()
diretor.questao_84, # get__display()
questionario.data,
questionario.questao_1, # get__display()
questionario.questao_2, # get__display()
questionario.questao_3, # get__display()
questionario.questao_4, # get__display()
questionario.questao_5, # get__display()
questionario.questao_6, # get__display()
questionario.questao_7, # get__display()
questionario.questao_8, # get__display()
questionario.questao_9, # get__display()
questionario.questao_10, # get__display()
questionario.questao_11, # get__display()
questionario.questao_12, # get__display()
questionario.questao_13, # get__display()
questionario.questao_14, # get__display()
questionario.questao_15, # get__display()
questionario.questao_16, # get__display()
questionario.questao_17, # get__display()
questionario.questao_18, # get__display()
questionario.questao_19, # get__display()
questionario.questao_20, # get__display()
questionario.questao_21, # get__display()
questionario.questao_22, # get__display()
questionario.questao_23, # get__display()
questionario.questao_24, # get__display()
questionario.questao_25, # get__display()
questionario.questao_26, # get__display()
questionario.questao_27, # get__display()
questionario.questao_28, # get__display()
questionario.questao_29, # get__display()
questionario.questao_30, # get__display()
questionario.questao_31, # get__display()
questionario.questao_32, # get__display()
questionario.questao_33, # get__display()
questionario.questao_34, # get__display()
questionario.questao_35, # get__display()
questionario.questao_36, # get__display()
questionario.questao_37, # get__display()
questionario.questao_38, # get__display()
questionario.questao_39, # get__display()
questionario.questao_40, # get__display()
questionario.questao_41, # get__display()
questionario.questao_42, # get__display()
questionario.questao_43, # get__display()
questionario.questao_44, # get__display()
questionario.questao_45, # get__display()
questionario.questao_46, # get__display()
questionario.questao_47, # get__display()
questionario.questao_48, # get__display()
questionario.questao_49, # get__display()
questionario.questao_50, # get__display()
questionario.questao_51, # get__display()
questionario.questao_52, # get__display()
questionario.questao_53, # get__display()
questionario.questao_54, # get__display()
questionario.questao_55, # get__display()
questionario.questao_56, # get__display()
questionario.questao_57, # get__display()
questionario.questao_58, # get__display()
questionario.questao_59, # get__display()
questionario.questao_60,
questionario.questao_61, # get__display()
questionario.questao_62, # get__display()
questionario.questao_63, # get__display()
questionario.questao_64, # get__display()
questionario.questao_65, # get__display()
questionario.questao_66, # get__display()
questionario.questao_67, # get__display()
questionario.questao_68, # get__display()
questionario.questao_69, # get__display()
questionario.questao_70, # get__display()
questionario.questao_71, # get__display()
questionario.questao_72, # get__display()
questionario.questao_73, # get__display()
questionario.questao_74, # get__display()
questionario.questao_75, # get__display()
questionario.questao_76, # get__display()
questionario.questao_77, # get__display()
questionario.questao_78, # get__display()
questionario.questao_79, # get__display()
questionario.questao_80, # get__display()
questionario.questao_81, # get__display()
questionario.questao_82, # get__display()
questionario.questao_83, # get__display()
questionario.questao_84, # get__display()
questionario.questao_85, # get__display()
questionario.questao_86, # get__display()
questionario.questao_87, # get__display()
questionario.questao_88, # get__display()
questionario.questao_89, # get__display()
questionario.questao_90, # get__display()
questionario.questao_91, # get__display()
questionario.questao_92, # get__display()
questionario.questao_93, # get__display()
questionario.questao_94, # get__display()
questionario.questao_95, # get__display()
questionario.questao_96, # get__display()
questionario.questao_97, # get__display()
questionario.questao_98, # get__display()
questionario.questao_99, # get__display()
questionario.questao_100, # get__display()
questionario.questao_101, # get__display()
questionario.questao_102, # get__display()
questionario.questao_103, # get__display()
questionario.questao_104, # get__display()
questionario.questao_105, # get__display()
questionario.questao_106, # get__display()
questionario.questao_107, # get__display()
questionario.questao_108, # get__display()
questionario.questao_109, # get__display()
questionario.questao_110, # get__display()
questionario.questao_111, # get__display()
questionario.questao_112, # get__display()
questionario.questao_113, # get__display()
questionario.questao_114, # get__display()
questionario.questao_115, # get__display()
questionario.questao_116, # get__display()
questionario.questao_117, # get__display()
questionario.questao_118, # get__display()
questionario.questao_119, # get__display()
questionario.questao_120, # get__display()
questionario.questao_121, # get__display()
questionario.questao_122, # get__display()
questionario.questao_123, # get__display()
questionario.questao_124, # get__display()
questionario.questao_125, # get__display()
questionario.questao_126, # get__display()
questionario.questao_127, # get__display()
questionario.questao_128, # get__display()
questionario.questao_129, # get__display()
questionario.questao_130, # get__display()
questionario.questao_131, # get__display()
questionario.questao_132, # get__display()
questionario.questao_133, # get__display()
questionario.questao_134, # get__display()
questionario.questao_135, # get__display()
questionario.questao_136, # get__display()
questionario.questao_137, # get__display()
questionario.questao_138, # get__display()
questionario.questao_139, # get__display()
questionario.questao_140, # get__display()
questionario.questao_141, # get__display()
questionario.questao_142, # get__display()
questionario.questao_143, # get__display()
questionario.questao_144, # get__display()
questionario.questao_145, # get__display()
questionario.questao_146,
exame.data,
exame.examinador,
exame.anotador,
exame.carie_coroa_18,
exame.carie_tratamento_18,
exame.carie_coroa_17,
exame.carie_tratamento_17,
exame.carie_coroa_16,
exame.carie_tratamento_16,
exame.carie_coroa_15,
exame.carie_tratamento_15,
exame.carie_coroa_14,
exame.carie_tratamento_14,
exame.carie_coroa_13,
exame.carie_tratamento_13,
exame.carie_coroa_12,
exame.carie_tratamento_12,
exame.carie_coroa_11,
exame.carie_tratamento_11,
exame.carie_coroa_21,
exame.carie_tratamento_21,
exame.carie_coroa_22,
exame.carie_tratamento_22,
exame.carie_coroa_23,
exame.carie_tratamento_23,
exame.carie_coroa_24,
exame.carie_tratamento_24,
exame.carie_coroa_25,
exame.carie_tratamento_25,
exame.carie_coroa_26,
exame.carie_tratamento_26,
exame.carie_coroa_27,
exame.carie_tratamento_27,
exame.carie_coroa_28,
exame.carie_tratamento_28,
exame.carie_coroa_38,
exame.carie_tratamento_38,
exame.carie_coroa_37,
exame.carie_tratamento_37,
exame.carie_coroa_36,
exame.carie_tratamento_36,
exame.carie_coroa_35,
exame.carie_tratamento_35,
exame.carie_coroa_34,
exame.carie_tratamento_34,
exame.carie_coroa_33,
exame.carie_tratamento_33,
exame.carie_coroa_32,
exame.carie_tratamento_32,
exame.carie_coroa_31,
exame.carie_tratamento_31,
exame.carie_coroa_41,
exame.carie_tratamento_41,
exame.carie_coroa_42,
exame.carie_tratamento_42,
exame.carie_coroa_43,
exame.carie_tratamento_43,
exame.carie_coroa_44,
exame.carie_tratamento_44,
exame.carie_coroa_45,
exame.carie_tratamento_45,
exame.carie_coroa_46,
exame.carie_tratamento_46,
exame.carie_coroa_47,
exame.carie_tratamento_47,
exame.carie_coroa_48,
exame.carie_tratamento_48,
exame.periodontal_sangramento_1716,
exame.periodontal_calculo_1716,
exame.periodontal_bolsa_1716,
exame.periodontal_sangramento_11,
exame.periodontal_calculo_11,
exame.periodontal_bolsa_11,
exame.periodontal_sangramento_2627,
exame.periodontal_calculo_2627,
exame.periodontal_bolsa_2627,
exame.periodontal_sangramento_3736,
exame.periodontal_calculo_3736,
exame.periodontal_bolsa_3736,
exame.periodontal_sangramento_31,
exame.periodontal_calculo_31,
exame.periodontal_bolsa_31,
exame.periodontal_sangramento_4647,
exame.periodontal_calculo_4647,
exame.periodontal_bolsa_4647,
resultados["cpod"],
])
if not tem_questionario and not tem_exame:
writer.writerow([
aluno.escola.acao.campanha.nome,
aluno.escola.acao.nome,
aluno.escola.id,
aluno.escola.nome,
aluno.escola.latitude,
aluno.escola.longitude,
aluno.numero_identificacao,
aluno.periodo,
aluno.turma,
aluno.nascimento,
aluno.sexo,
aluno.raca,
diretor.data,
diretor.questao_1,
diretor.questao_2,
diretor.questao_3,
diretor.questao_4,
diretor.questao_5,
diretor.questao_6,
diretor.questao_7,
diretor.questao_8,
diretor.questao_9,
diretor.questao_10,
diretor.questao_11,
diretor.questao_12,
diretor.questao_13,
diretor.questao_14,
diretor.questao_15,
diretor.questao_16,
diretor.questao_17,
diretor.questao_18,
diretor.questao_19,
diretor.questao_20,
diretor.questao_21,
diretor.questao_22,
diretor.questao_23,
diretor.questao_24,
diretor.questao_25,
diretor.questao_26,
diretor.questao_27,
diretor.questao_28,
diretor.questao_29,
diretor.questao_30,
diretor.questao_31,
diretor.questao_32,
diretor.questao_33,
diretor.questao_34,
diretor.questao_35,
diretor.questao_36,
diretor.questao_37,
diretor.questao_38,
diretor.questao_39,
diretor.questao_40,
diretor.questao_41,
diretor.questao_42,
diretor.questao_43,
diretor.questao_44,
diretor.questao_45,
diretor.questao_46,
diretor.questao_47,
diretor.questao_48,
diretor.questao_49,
diretor.questao_50,
diretor.questao_51,
diretor.questao_52,
diretor.questao_53,
diretor.questao_54,
diretor.questao_55,
diretor.questao_56,
diretor.questao_57,
diretor.questao_58,
diretor.questao_59,
diretor.questao_60,
diretor.questao_61,
diretor.questao_62,
diretor.questao_63,
diretor.questao_64,
diretor.questao_65,
diretor.questao_66,
diretor.questao_67,
diretor.questao_68,
diretor.questao_69,
diretor.questao_70,
diretor.questao_71,
diretor.questao_72,
diretor.questao_73,
diretor.questao_74,
diretor.questao_75,
diretor.questao_76,
diretor.questao_77,
diretor.questao_78,
diretor.questao_79,
diretor.questao_80,
diretor.questao_81,
diretor.questao_82,
diretor.questao_83,
diretor.questao_84,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
])
return response
class DownloadAlunoQuestionarioExameView(LoginRequired, View):
"""Gera e envia o arquivo csv para os alunos que fizeram o questionário e o exame"""
def get(self, request):
alunos = Aluno.objects.order_by('id')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="aluno_questionario_exame.csv"'
writer = csv.writer(response, delimiter=';')
writer.writerow(['escola.nome', 'aluno.numero_identificacao', 'aluno.periodo', 'aluno.turma', 'aluno.nascimento', 'aluno.sexo', 'aluno.raca', 'questionario.data', 'questionario.questao_1', 'questionario.questao_2', 'questionario.questao_3', 'questionario.questao_4', 'questionario.questao_5', 'questionario.questao_6', 'questionario.questao_7', 'questionario.questao_8', 'questionario.questao_9', 'questionario.questao_10', 'questionario.questao_11', 'questionario.questao_12', 'questionario.questao_13', 'questionario.questao_14', 'questionario.questao_15', 'questionario.questao_16', 'questionario.questao_17', 'questionario.questao_18', 'questionario.questao_19', 'questionario.questao_20', 'questionario.questao_21', 'questionario.questao_22', 'questionario.questao_23', 'questionario.questao_24', 'questionario.questao_25', 'questionario.questao_26', 'questionario.questao_27', 'questionario.questao_28', 'questionario.questao_29', 'questionario.questao_30', 'questionario.questao_31', 'questionario.questao_32', 'questionario.questao_33', 'questionario.questao_34', 'questionario.questao_35', 'questionario.questao_36', 'questionario.questao_37', 'questionario.questao_38', 'questionario.questao_39', 'questionario.questao_40', 'questionario.questao_41', 'questionario.questao_42', 'questionario.questao_43', 'questionario.questao_44', 'questionario.questao_45', 'questionario.questao_46', 'questionario.questao_47', 'questionario.questao_48', 'questionario.questao_49', 'questionario.questao_50', 'questionario.questao_51', 'questionario.questao_52', 'questionario.questao_53', 'questionario.questao_54', 'questionario.questao_55', 'questionario.questao_56', 'questionario.questao_57', 'questionario.questao_58', 'questionario.questao_59', 'questionario.questao_60', 'questionario.questao_61', 'questionario.questao_62', 'questionario.questao_63', 'questionario.questao_64', 'questionario.questao_65', 'questionario.questao_66', 'questionario.questao_67', 'questionario.questao_68', 'questionario.questao_69', 'questionario.questao_70', 'questionario.questao_71', 'questionario.questao_72', 'questionario.questao_73', 'questionario.questao_74', 'questionario.questao_75', 'questionario.questao_76', 'questionario.questao_77', 'questionario.questao_78', 'questionario.questao_79', 'questionario.questao_80', 'questionario.questao_81', 'questionario.questao_82', 'questionario.questao_83', 'questionario.questao_84', 'questionario.questao_85', 'questionario.questao_86', 'questionario.questao_87', 'questionario.questao_88', 'questionario.questao_89', 'questionario.questao_90', 'questionario.questao_91', 'questionario.questao_92', 'questionario.questao_93', 'questionario.questao_94', 'questionario.questao_95', 'questionario.questao_96', 'questionario.questao_97', 'questionario.questao_98', 'questionario.questao_99', 'questionario.questao_100', 'questionario.questao_101', 'questionario.questao_102', 'questionario.questao_103', 'questionario.questao_104', 'questionario.questao_105', 'questionario.questao_106', 'questionario.questao_107', 'questionario.questao_108', 'questionario.questao_109', 'questionario.questao_110', 'questionario.questao_111', 'questionario.questao_112', 'questionario.questao_113', 'questionario.questao_114', 'questionario.questao_115', 'questionario.questao_116', 'questionario.questao_117', 'questionario.questao_118', 'questionario.questao_119', 'questionario.questao_120', 'questionario.questao_121', 'questionario.questao_122', 'questionario.questao_123', 'questionario.questao_124', 'questionario.questao_125', 'questionario.questao_126', 'questionario.questao_127', 'questionario.questao_128', 'questionario.questao_129', 'questionario.questao_130', 'questionario.questao_131', 'questionario.questao_132', 'questionario.questao_133', 'questionario.questao_134', 'questionario.questao_135', 'questionario.questao_136', 'questionario.questao_137', 'questionario.questao_138', 'questionario.questao_139', 'questionario.questao_140', 'questionario.questao_141', 'questionario.questao_142', 'questionario.questao_143', 'questionario.questao_144', 'questionario.questao_145', 'questionario.questao_146', 'exame.data', 'exame.examinador', 'exame.anotador', 'exame.carie_coroa_18', 'exame.carie_tratamento_18', 'exame.carie_coroa_17', 'exame.carie_tratamento_17', 'exame.carie_coroa_16', 'exame.carie_tratamento_16', 'exame.carie_coroa_15', 'exame.carie_tratamento_15', 'exame.carie_coroa_14', 'exame.carie_tratamento_14', 'exame.carie_coroa_13', 'exame.carie_tratamento_13', 'exame.carie_coroa_12', 'exame.carie_tratamento_12', 'exame.carie_coroa_11', 'exame.carie_tratamento_11', 'exame.carie_coroa_21', 'exame.carie_tratamento_21', 'exame.carie_coroa_22', 'exame.carie_tratamento_22', 'exame.carie_coroa_23', 'exame.carie_tratamento_23', 'exame.carie_coroa_24', 'exame.carie_tratamento_24', 'exame.carie_coroa_25', 'exame.carie_tratamento_25', 'exame.carie_coroa_26', 'exame.carie_tratamento_26', 'exame.carie_coroa_27', 'exame.carie_tratamento_27', 'exame.carie_coroa_28', 'exame.carie_tratamento_28', 'exame.carie_coroa_38', 'exame.carie_tratamento_38', 'exame.carie_coroa_37', 'exame.carie_tratamento_37', 'exame.carie_coroa_36', 'exame.carie_tratamento_36', 'exame.carie_coroa_35', 'exame.carie_tratamento_35', 'exame.carie_coroa_34', 'exame.carie_tratamento_34', 'exame.carie_coroa_33', 'exame.carie_tratamento_33', 'exame.carie_coroa_32', 'exame.carie_tratamento_32', 'exame.carie_coroa_31', 'exame.carie_tratamento_31', 'exame.carie_coroa_41', 'exame.carie_tratamento_41', 'exame.carie_coroa_42', 'exame.carie_tratamento_42', 'exame.carie_coroa_43', 'exame.carie_tratamento_43', 'exame.carie_coroa_44', 'exame.carie_tratamento_44', 'exame.carie_coroa_45', 'exame.carie_tratamento_45', 'exame.carie_coroa_46', 'exame.carie_tratamento_46', 'exame.carie_coroa_47', 'exame.carie_tratamento_47', 'exame.carie_coroa_48', 'exame.carie_tratamento_48', 'exame.periodontal_sangramento_1716', 'exame.periodontal_calculo_1716', 'exame.periodontal_bolsa_1716', 'exame.periodontal_sangramento_11', 'exame.periodontal_calculo_11', 'exame.periodontal_bolsa_11', 'exame.periodontal_sangramento_2627', 'exame.periodontal_calculo_2627', 'exame.periodontal_bolsa_2627', 'exame.periodontal_sangramento_3736', 'exame.periodontal_calculo_3736', 'exame.periodontal_bolsa_3736', 'exame.periodontal_sangramento_31', 'exame.periodontal_calculo_31', 'exame.periodontal_bolsa_31', 'exame.periodontal_sangramento_4647', 'exame.periodontal_calculo_4647', 'exame.periodontal_bolsa_4647',])
for aluno in alunos:
try:
questionario = Questionario.objects.get(aluno_id=aluno.id)
exame = Exame.objects.get(aluno_id=aluno.id)
except:
continue
writer.writerow([
aluno.escola.nome,
aluno.numero_identificacao,
aluno.periodo, # get__display()
aluno.turma,
aluno.nascimento,
aluno.sexo, # get__display()
aluno.raca, # get__display()
questionario.data,
questionario.questao_1, # get__display()
questionario.questao_2, # get__display()
questionario.questao_3, # get__display()
questionario.questao_4, # get__display()
questionario.questao_5, # get__display()
questionario.questao_6, # get__display()
questionario.questao_7, # get__display()
questionario.questao_8, # get__display()
questionario.questao_9, # get__display()
questionario.questao_10, # get__display()
questionario.questao_11, # get__display()
questionario.questao_12, # get__display()
questionario.questao_13, # get__display()
questionario.questao_14, # get__display()
questionario.questao_15, # get__display()
questionario.questao_16, # get__display()
questionario.questao_17, # get__display()
questionario.questao_18, # get__display()
questionario.questao_19, # get__display()
questionario.questao_20, # get__display()
questionario.questao_21, # get__display()
questionario.questao_22, # get__display()
questionario.questao_23, # get__display()
questionario.questao_24, # get__display()
questionario.questao_25, # get__display()
questionario.questao_26, # get__display()
questionario.questao_27, # get__display()
questionario.questao_28, # get__display()
questionario.questao_29, # get__display()
questionario.questao_30, # get__display()
questionario.questao_31, # get__display()
questionario.questao_32, # get__display()
questionario.questao_33, # get__display()
questionario.questao_34, # get__display()
questionario.questao_35, # get__display()
questionario.questao_36, # get__display()
questionario.questao_37, # get__display()
questionario.questao_38, # get__display()
questionario.questao_39, # get__display()
questionario.questao_40, # get__display()
questionario.questao_41, # get__display()
questionario.questao_42, # get__display()
questionario.questao_43, # get__display()
questionario.questao_44, # get__display()
questionario.questao_45, # get__display()
questionario.questao_46, # get__display()
questionario.questao_47, # get__display()
questionario.questao_48, # get__display()
questionario.questao_49, # get__display()
questionario.questao_50, # get__display()
questionario.questao_51, # get__display()
questionario.questao_52, # get__display()
questionario.questao_53, # get__display()
questionario.questao_54, # get__display()
questionario.questao_55, # get__display()
questionario.questao_56, # get__display()
questionario.questao_57, # get__display()
questionario.questao_58, # get__display()
questionario.questao_59, # get__display()
questionario.questao_60,
questionario.questao_61, # get__display()
questionario.questao_62, # get__display()
questionario.questao_63, # get__display()
questionario.questao_64, # get__display()
questionario.questao_65, # get__display()
questionario.questao_66, # get__display()
questionario.questao_67, # get__display()
questionario.questao_68, # get__display()
questionario.questao_69, # get__display()
questionario.questao_70, # get__display()
questionario.questao_71, # get__display()
questionario.questao_72, # get__display()
questionario.questao_73, # get__display()
questionario.questao_74, # get__display()
questionario.questao_75, # get__display()
questionario.questao_76, # get__display()
questionario.questao_77, # get__display()
questionario.questao_78, # get__display()
questionario.questao_79, # get__display()
questionario.questao_80, # get__display()
questionario.questao_81, # get__display()
questionario.questao_82, # get__display()
questionario.questao_83, # get__display()
questionario.questao_84, # get__display()
questionario.questao_85, # get__display()
questionario.questao_86, # get__display()
questionario.questao_87, # get__display()
questionario.questao_88, # get__display()
questionario.questao_89, # get__display()
questionario.questao_90, # get__display()
questionario.questao_91, # get__display()
questionario.questao_92, # get__display()
questionario.questao_93, # get__display()
questionario.questao_94, # get__display()
questionario.questao_95, # get__display()
questionario.questao_96, # get__display()
questionario.questao_97, # get__display()
questionario.questao_98, # get__display()
questionario.questao_99, # get__display()
questionario.questao_100, # get__display()
questionario.questao_101, # get__display()
questionario.questao_102, # get__display()
questionario.questao_103, # get__display()
questionario.questao_104, # get__display()
questionario.questao_105, # get__display()
questionario.questao_106, # get__display()
questionario.questao_107, # get__display()
questionario.questao_108, # get__display()
questionario.questao_109, # get__display()
questionario.questao_110, # get__display()
questionario.questao_111, # get__display()
questionario.questao_112, # get__display()
questionario.questao_113, # get__display()
questionario.questao_114, # get__display()
questionario.questao_115, # get__display()
questionario.questao_116, # get__display()
questionario.questao_117, # get__display()
questionario.questao_118, # get__display()
questionario.questao_119, # get__display()
questionario.questao_120, # get__display()
questionario.questao_121, # get__display()
questionario.questao_122, # get__display()
questionario.questao_123, # get__display()
questionario.questao_124, # get__display()
questionario.questao_125, # get__display()
questionario.questao_126, # get__display()
questionario.questao_127, # get__display()
questionario.questao_128, # get__display()
questionario.questao_129, # get__display()
questionario.questao_130, # get__display()
questionario.questao_131, # get__display()
questionario.questao_132, # get__display()
questionario.questao_133, # get__display()
questionario.questao_134, # get__display()
questionario.questao_135, # get__display()
questionario.questao_136, # get__display()
questionario.questao_137, # get__display()
questionario.questao_138, # get__display()
questionario.questao_139, # get__display()
questionario.questao_140, # get__display()
questionario.questao_141, # get__display()
questionario.questao_142, # get__display()
questionario.questao_143, # get__display()
questionario.questao_144, # get__display()
questionario.questao_145, # get__display()
questionario.questao_146,
exame.data,
exame.examinador,
exame.anotador,
exame.carie_coroa_18,
exame.carie_tratamento_18,
exame.carie_coroa_17,
exame.carie_tratamento_17,
exame.carie_coroa_16,
exame.carie_tratamento_16,
exame.carie_coroa_15,
exame.carie_tratamento_15,
exame.carie_coroa_14,
exame.carie_tratamento_14,
exame.carie_coroa_13,
exame.carie_tratamento_13,
exame.carie_coroa_12,
exame.carie_tratamento_12,
exame.carie_coroa_11,
exame.carie_tratamento_11,
exame.carie_coroa_21,
exame.carie_tratamento_21,
exame.carie_coroa_22,
exame.carie_tratamento_22,
exame.carie_coroa_23,
exame.carie_tratamento_23,
exame.carie_coroa_24,
exame.carie_tratamento_24,
exame.carie_coroa_25,
exame.carie_tratamento_25,
exame.carie_coroa_26,
exame.carie_tratamento_26,
exame.carie_coroa_27,
exame.carie_tratamento_27,
exame.carie_coroa_28,
exame.carie_tratamento_28,
exame.carie_coroa_38,
exame.carie_tratamento_38,
exame.carie_coroa_37,
exame.carie_tratamento_37,
exame.carie_coroa_36,
exame.carie_tratamento_36,
exame.carie_coroa_35,
exame.carie_tratamento_35,
exame.carie_coroa_34,
exame.carie_tratamento_34,
exame.carie_coroa_33,
exame.carie_tratamento_33,
exame.carie_coroa_32,
exame.carie_tratamento_32,
exame.carie_coroa_31,
exame.carie_tratamento_31,
exame.carie_coroa_41,
exame.carie_tratamento_41,
exame.carie_coroa_42,
exame.carie_tratamento_42,
exame.carie_coroa_43,
exame.carie_tratamento_43,
exame.carie_coroa_44,
exame.carie_tratamento_44,
exame.carie_coroa_45,
exame.carie_tratamento_45,
exame.carie_coroa_46,
exame.carie_tratamento_46,
exame.carie_coroa_47,
exame.carie_tratamento_47,
exame.carie_coroa_48,
exame.carie_tratamento_48,
exame.periodontal_sangramento_1716,
exame.periodontal_calculo_1716,
exame.periodontal_bolsa_1716,
exame.periodontal_sangramento_11,
exame.periodontal_calculo_11,
exame.periodontal_bolsa_11,
exame.periodontal_sangramento_2627,
exame.periodontal_calculo_2627,
exame.periodontal_bolsa_2627,
exame.periodontal_sangramento_3736,
exame.periodontal_calculo_3736,
exame.periodontal_bolsa_3736,
exame.periodontal_sangramento_31,
exame.periodontal_calculo_31,
exame.periodontal_bolsa_31,
exame.periodontal_sangramento_4647,
exame.periodontal_calculo_4647,
exame.periodontal_bolsa_4647,
])
return response
class DownloadListaCampanhasView(LoginRequired, View):
"""Gera e envia o arquivo csv da lista de campanhas"""
def get(self, request):
campanhas = Campanha.objects.order_by('id')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="lista_campanhas.csv"'
writer = csv.writer(response, delimiter=';')
writer.writerow(['campanha.nome',])
for campanha in campanhas:
writer.writerow([
campanha.nome,
])
return response
class DownloadListaAcoesView(LoginRequired, View):
"""Gera e envia o arquivo csv da lista de ações"""
def get(self, request):
acoes = Acao.objects.order_by('id')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="lista_acoes.csv"'
writer = csv.writer(response, delimiter=';')
writer.writerow(['acao.nome',])
for acao in acoes:
writer.writerow([
acao.nome,
])
return response
class DownloadListaEscolasView(LoginRequired, View):
"""Gera e envia o arquivo csv da lista de escolas"""
def get(self, request):
escolas = Escola.objects.order_by('id')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="lista_escolas.csv"'
writer = csv.writer(response, delimiter=';')
writer.writerow(['escola.nome', 'escola.latitude', 'escola.longitude'])
for escola in escolas:
writer.writerow([
escola.nome,
escola.latitude,
escola.longitude,
])
return response
class DownloadListaAlunosView(LoginRequired, View):
"""Gera e envia o arquivo csv da lista de alunos"""
def get(self, request):
alunos = Aluno.objects.order_by('id')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="lista_alunos.csv"'
writer = csv.writer(response, delimiter=';')
writer.writerow(['aluno.escola.nome', 'aluno.numero_identificacao', 'aluno.periodo', 'aluno.turma', 'aluno.nascimento', 'aluno.sexo', 'aluno.raca',])
for aluno in alunos:
writer.writerow([
aluno.escola.nome,
aluno.numero_identificacao,
aluno.periodo, # get__display()
aluno.turma,
aluno.nascimento,
aluno.sexo, # get__display()
aluno.raca, # get__display()
])
return response
class DownloadListaQuestionariosView(LoginRequired, View):
"""Gera e envia o arquivo csv da lista de questionários"""
def get(self, request):
questionarios = Questionario.objects.order_by('id')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="lista_questionarios.csv"'
writer = csv.writer(response, delimiter=';')
writer.writerow(['aluno.numero_identificacao', 'questionario.data', 'questionario.questao_1', 'questionario.questao_2', 'questionario.questao_3', 'questionario.questao_4', 'questionario.questao_5', 'questionario.questao_6', 'questionario.questao_7', 'questionario.questao_8', 'questionario.questao_9', 'questionario.questao_10', 'questionario.questao_11', 'questionario.questao_12', 'questionario.questao_13', 'questionario.questao_14', 'questionario.questao_15', 'questionario.questao_16', 'questionario.questao_17', 'questionario.questao_18', 'questionario.questao_19', 'questionario.questao_20', 'questionario.questao_21', 'questionario.questao_22', 'questionario.questao_23', 'questionario.questao_24', 'questionario.questao_25', 'questionario.questao_26', 'questionario.questao_27', 'questionario.questao_28', 'questionario.questao_29', 'questionario.questao_30', 'questionario.questao_31', 'questionario.questao_32', 'questionario.questao_33', 'questionario.questao_34', 'questionario.questao_35', 'questionario.questao_36', 'questionario.questao_37', 'questionario.questao_38', 'questionario.questao_39', 'questionario.questao_40', 'questionario.questao_41', 'questionario.questao_42', 'questionario.questao_43', 'questionario.questao_44', 'questionario.questao_45', 'questionario.questao_46', 'questionario.questao_47', 'questionario.questao_48', 'questionario.questao_49', 'questionario.questao_50', 'questionario.questao_51', 'questionario.questao_52', 'questionario.questao_53', 'questionario.questao_54', 'questionario.questao_55', 'questionario.questao_56', 'questionario.questao_57', 'questionario.questao_58', 'questionario.questao_59', 'questionario.questao_60', 'questionario.questao_61', 'questionario.questao_62', 'questionario.questao_63', 'questionario.questao_64', 'questionario.questao_65', 'questionario.questao_66', 'questionario.questao_67', 'questionario.questao_68', 'questionario.questao_69', 'questionario.questao_70', 'questionario.questao_71', 'questionario.questao_72', 'questionario.questao_73', 'questionario.questao_74', 'questionario.questao_75', 'questionario.questao_76', 'questionario.questao_77', 'questionario.questao_78', 'questionario.questao_79', 'questionario.questao_80', 'questionario.questao_81', 'questionario.questao_82', 'questionario.questao_83', 'questionario.questao_84', 'questionario.questao_85', 'questionario.questao_86', 'questionario.questao_87', 'questionario.questao_88', 'questionario.questao_89', 'questionario.questao_90', 'questionario.questao_91', 'questionario.questao_92', 'questionario.questao_93', 'questionario.questao_94', 'questionario.questao_95', 'questionario.questao_96', 'questionario.questao_97', 'questionario.questao_98', 'questionario.questao_99', 'questionario.questao_100', 'questionario.questao_101', 'questionario.questao_102', 'questionario.questao_103', 'questionario.questao_104', 'questionario.questao_105', 'questionario.questao_106', 'questionario.questao_107', 'questionario.questao_108', 'questionario.questao_109', 'questionario.questao_110', 'questionario.questao_111', 'questionario.questao_112', 'questionario.questao_113', 'questionario.questao_114', 'questionario.questao_115', 'questionario.questao_116', 'questionario.questao_117', 'questionario.questao_118', 'questionario.questao_119', 'questionario.questao_120', 'questionario.questao_121', 'questionario.questao_122', 'questionario.questao_123', 'questionario.questao_124', 'questionario.questao_125', 'questionario.questao_126', 'questionario.questao_127', 'questionario.questao_128', 'questionario.questao_129', 'questionario.questao_130', 'questionario.questao_131', 'questionario.questao_132', 'questionario.questao_133', 'questionario.questao_134', 'questionario.questao_135', 'questionario.questao_136', 'questionario.questao_137', 'questionario.questao_138', 'questionario.questao_139', 'questionario.questao_140', 'questionario.questao_141', 'questionario.questao_142', 'questionario.questao_143', 'questionario.questao_144', 'questionario.questao_145', 'questionario.questao_146', ])
for questionario in questionarios:
writer.writerow([
questionario.aluno.numero_identificacao,
questionario.data,
questionario.questao_1, # get__display()
questionario.questao_2, # get__display()
questionario.questao_3, # get__display()
questionario.questao_4, # get__display()
questionario.questao_5, # get__display()
questionario.questao_6, # get__display()
questionario.questao_7, # get__display()
questionario.questao_8, # get__display()
questionario.questao_9, # get__display()
questionario.questao_10, # get__display()
questionario.questao_11, # get__display()
questionario.questao_12, # get__display()
questionario.questao_13, # get__display()
questionario.questao_14, # get__display()
questionario.questao_15, # get__display()
questionario.questao_16, # get__display()
questionario.questao_17, # get__display()
questionario.questao_18, # get__display()
questionario.questao_19, # get__display()
questionario.questao_20, # get__display()
questionario.questao_21, # get__display()
questionario.questao_22, # get__display()
questionario.questao_23, # get__display()
questionario.questao_24, # get__display()
questionario.questao_25, # get__display()
questionario.questao_26, # get__display()
questionario.questao_27, # get__display()
questionario.questao_28, # get__display()
questionario.questao_29, # get__display()
questionario.questao_30, # get__display()
questionario.questao_31, # get__display()
questionario.questao_32, # get__display()
questionario.questao_33, # get__display()
questionario.questao_34, # get__display()
questionario.questao_35, # get__display()
questionario.questao_36, # get__display()
questionario.questao_37, # get__display()
questionario.questao_38, # get__display()
questionario.questao_39, # get__display()
questionario.questao_40, # get__display()
questionario.questao_41, # get__display()
questionario.questao_42, # get__display()
questionario.questao_43, # get__display()
questionario.questao_44, # get__display()
questionario.questao_45, # get__display()
questionario.questao_46, # get__display()
questionario.questao_47, # get__display()
questionario.questao_48, # get__display()
questionario.questao_49, # get__display()
questionario.questao_50, # get__display()
questionario.questao_51, # get__display()
questionario.questao_52, # get__display()
questionario.questao_53, # get__display()
questionario.questao_54, # get__display()
questionario.questao_55, # get__display()
questionario.questao_56, # get__display()
questionario.questao_57, # get__display()
questionario.questao_58, # get__display()
questionario.questao_59, # get__display()
questionario.questao_60,
questionario.questao_61, # get__display()
questionario.questao_62, # get__display()
questionario.questao_63, # get__display()
questionario.questao_64, # get__display()
questionario.questao_65, # get__display()
questionario.questao_66, # get__display()
questionario.questao_67, # get__display()
questionario.questao_68, # get__display()
questionario.questao_69, # get__display()
questionario.questao_70, # get__display()
questionario.questao_71, # get__display()
questionario.questao_72, # get__display()
questionario.questao_73, # get__display()
questionario.questao_74, # get__display()
questionario.questao_75, # get__display()
questionario.questao_76, # get__display()
questionario.questao_77, # get__display()
questionario.questao_78, # get__display()
questionario.questao_79, # get__display()
questionario.questao_80, # get__display()
questionario.questao_81, # get__display()
questionario.questao_82, # get__display()
questionario.questao_83, # get__display()
questionario.questao_84, # get__display()
questionario.questao_85, # get__display()
questionario.questao_86, # get__display()
questionario.questao_87, # get__display()
questionario.questao_88, # get__display()
questionario.questao_89, # get__display()
questionario.questao_90, # get__display()
questionario.questao_91, # get__display()
questionario.questao_92, # get__display()
questionario.questao_93, # get__display()
questionario.questao_94, # get__display()
questionario.questao_95, # get__display()
questionario.questao_96, # get__display()
questionario.questao_97, # get__display()
questionario.questao_98, # get__display()
questionario.questao_99, # get__display()
questionario.questao_100, # get__display()
questionario.questao_101, # get__display()
questionario.questao_102, # get__display()
questionario.questao_103, # get__display()
questionario.questao_104, # get__display()
questionario.questao_105, # get__display()
questionario.questao_106, # get__display()
questionario.questao_107, # get__display()
questionario.questao_108, # get__display()
questionario.questao_109, # get__display()
questionario.questao_110, # get__display()
questionario.questao_111, # get__display()
questionario.questao_112, # get__display()
questionario.questao_113, # get__display()
questionario.questao_114, # get__display()
questionario.questao_115, # get__display()
questionario.questao_116, # get__display()
questionario.questao_117, # get__display()
questionario.questao_118, # get__display()
questionario.questao_119, # get__display()
questionario.questao_120, # get__display()
questionario.questao_121, # get__display()
questionario.questao_122, # get__display()
questionario.questao_123, # get__display()
questionario.questao_124, # get__display()
questionario.questao_125, # get__display()
questionario.questao_126, # get__display()
questionario.questao_127, # get__display()
questionario.questao_128, # get__display()
questionario.questao_129, # get__display()
questionario.questao_130, # get__display()
questionario.questao_131, # get__display()
questionario.questao_132, # get__display()
questionario.questao_133, # get__display()
questionario.questao_134, # get__display()
questionario.questao_135, # get__display()
questionario.questao_136, # get__display()
questionario.questao_137, # get__display()
questionario.questao_138, # get__display()
questionario.questao_139, # get__display()
questionario.questao_140, # get__display()
questionario.questao_141, # get__display()
questionario.questao_142, # get__display()
questionario.questao_143, # get__display()
questionario.questao_144, # get__display()
questionario.questao_145, # get__display()
questionario.questao_146,
])
return response
class DownloadListaExamesView(LoginRequired, View):
"""Gera e envia o arquivo csv da lista de exames"""
def get(self, request):
exames = Exame.objects.order_by('id')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="lista_exames.csv"'
writer = csv.writer(response, delimiter=';')
writer.writerow(['aluno.numero_identificacao', 'exame.data', 'exame.examinador', 'exame.anotador', 'exame.carie_coroa_18', 'exame.carie_tratamento_18', 'exame.carie_coroa_17', 'exame.carie_tratamento_17', 'exame.carie_coroa_16', 'exame.carie_tratamento_16', 'exame.carie_coroa_15', 'exame.carie_tratamento_15', 'exame.carie_coroa_14', 'exame.carie_tratamento_14', 'exame.carie_coroa_13', 'exame.carie_tratamento_13', 'exame.carie_coroa_12', 'exame.carie_tratamento_12', 'exame.carie_coroa_11', 'exame.carie_tratamento_11', 'exame.carie_coroa_21', 'exame.carie_tratamento_21', 'exame.carie_coroa_22', 'exame.carie_tratamento_22', 'exame.carie_coroa_23', 'exame.carie_tratamento_23', 'exame.carie_coroa_24', 'exame.carie_tratamento_24', 'exame.carie_coroa_25', 'exame.carie_tratamento_25', 'exame.carie_coroa_26', 'exame.carie_tratamento_26', 'exame.carie_coroa_27', 'exame.carie_tratamento_27', 'exame.carie_coroa_28', 'exame.carie_tratamento_28', 'exame.carie_coroa_38', 'exame.carie_tratamento_38', 'exame.carie_coroa_37', 'exame.carie_tratamento_37', 'exame.carie_coroa_36', 'exame.carie_tratamento_36', 'exame.carie_coroa_35', 'exame.carie_tratamento_35', 'exame.carie_coroa_34', 'exame.carie_tratamento_34', 'exame.carie_coroa_33', 'exame.carie_tratamento_33', 'exame.carie_coroa_32', 'exame.carie_tratamento_32', 'exame.carie_coroa_31', 'exame.carie_tratamento_31', 'exame.carie_coroa_41', 'exame.carie_tratamento_41', 'exame.carie_coroa_42', 'exame.carie_tratamento_42', 'exame.carie_coroa_43', 'exame.carie_tratamento_43', 'exame.carie_coroa_44', 'exame.carie_tratamento_44', 'exame.carie_coroa_45', 'exame.carie_tratamento_45', 'exame.carie_coroa_46', 'exame.carie_tratamento_46', 'exame.carie_coroa_47', 'exame.carie_tratamento_47', 'exame.carie_coroa_48', 'exame.carie_tratamento_48', 'exame.periodontal_sangramento_1716', 'exame.periodontal_calculo_1716', 'exame.periodontal_bolsa_1716', 'exame.periodontal_sangramento_11', 'exame.periodontal_calculo_11', 'exame.periodontal_bolsa_11', 'exame.periodontal_sangramento_2627', 'exame.periodontal_calculo_2627', 'exame.periodontal_bolsa_2627', 'exame.periodontal_sangramento_3736', 'exame.periodontal_calculo_3736', 'exame.periodontal_bolsa_3736', 'exame.periodontal_sangramento_31', 'exame.periodontal_calculo_31', 'exame.periodontal_bolsa_31', 'exame.periodontal_sangramento_4647', 'exame.periodontal_calculo_4647', 'exame.periodontal_bolsa_4647',])
for exame in exames:
writer.writerow([
exame.aluno.numero_identificacao,
exame.data,
exame.examinador,
exame.anotador,
exame.carie_coroa_18,
exame.carie_tratamento_18,
exame.carie_coroa_17,
exame.carie_tratamento_17,
exame.carie_coroa_16,
exame.carie_tratamento_16,
exame.carie_coroa_15,
exame.carie_tratamento_15,
exame.carie_coroa_14,
exame.carie_tratamento_14,
exame.carie_coroa_13,
exame.carie_tratamento_13,
exame.carie_coroa_12,
exame.carie_tratamento_12,
exame.carie_coroa_11,
exame.carie_tratamento_11,
exame.carie_coroa_21,
exame.carie_tratamento_21,
exame.carie_coroa_22,
exame.carie_tratamento_22,
exame.carie_coroa_23,
exame.carie_tratamento_23,
exame.carie_coroa_24,
exame.carie_tratamento_24,
exame.carie_coroa_25,
exame.carie_tratamento_25,
exame.carie_coroa_26,
exame.carie_tratamento_26,
exame.carie_coroa_27,
exame.carie_tratamento_27,
exame.carie_coroa_28,
exame.carie_tratamento_28,
exame.carie_coroa_38,
exame.carie_tratamento_38,
exame.carie_coroa_37,
exame.carie_tratamento_37,
exame.carie_coroa_36,
exame.carie_tratamento_36,
exame.carie_coroa_35,
exame.carie_tratamento_35,
exame.carie_coroa_34,
exame.carie_tratamento_34,
exame.carie_coroa_33,
exame.carie_tratamento_33,
exame.carie_coroa_32,
exame.carie_tratamento_32,
exame.carie_coroa_31,
exame.carie_tratamento_31,
exame.carie_coroa_41,
exame.carie_tratamento_41,
exame.carie_coroa_42,
exame.carie_tratamento_42,
exame.carie_coroa_43,
exame.carie_tratamento_43,
exame.carie_coroa_44,
exame.carie_tratamento_44,
exame.carie_coroa_45,
exame.carie_tratamento_45,
exame.carie_coroa_46,
exame.carie_tratamento_46,
exame.carie_coroa_47,
exame.carie_tratamento_47,
exame.carie_coroa_48,
exame.carie_tratamento_48,
exame.periodontal_sangramento_1716,
exame.periodontal_calculo_1716,
exame.periodontal_bolsa_1716,
exame.periodontal_sangramento_11,
exame.periodontal_calculo_11,
exame.periodontal_bolsa_11,
exame.periodontal_sangramento_2627,
exame.periodontal_calculo_2627,
exame.periodontal_bolsa_2627,
exame.periodontal_sangramento_3736,
exame.periodontal_calculo_3736,
exame.periodontal_bolsa_3736,
exame.periodontal_sangramento_31,
exame.periodontal_calculo_31,
exame.periodontal_bolsa_31,
exame.periodontal_sangramento_4647,
exame.periodontal_calculo_4647,
exame.periodontal_bolsa_4647,
])
return response
class DownloadListaDiretoresView(LoginRequired, View):
"""Gera e envia o arquivo csv da lista de questionários de diretores"""
def get(self, request):
diretores = Diretor.objects.order_by('id')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="lista_diretores.csv"'
writer = csv.writer(response, delimiter=';')
writer.writerow(['escola.nome', 'diretor.data', 'diretor.questao_1', 'diretor.questao_2', 'diretor.questao_3', 'diretor.questao_4', 'diretor.questao_5', 'diretor.questao_6', 'diretor.questao_7', 'diretor.questao_8', 'diretor.questao_9', 'diretor.questao_10', 'diretor.questao_11', 'diretor.questao_12', 'diretor.questao_13', 'diretor.questao_14', 'diretor.questao_15', 'diretor.questao_16', 'diretor.questao_17', 'diretor.questao_18', 'diretor.questao_19', 'diretor.questao_20', 'diretor.questao_21', 'diretor.questao_22', 'diretor.questao_23', 'diretor.questao_24', 'diretor.questao_25', 'diretor.questao_26', 'diretor.questao_27', 'diretor.questao_28', 'diretor.questao_29', 'diretor.questao_30', 'diretor.questao_31', 'diretor.questao_32', 'diretor.questao_33', 'diretor.questao_34', 'diretor.questao_35', 'diretor.questao_36', 'diretor.questao_37', 'diretor.questao_38', 'diretor.questao_39', 'diretor.questao_40', 'diretor.questao_41', 'diretor.questao_42', 'diretor.questao_43', 'diretor.questao_44', 'diretor.questao_45', 'diretor.questao_46', 'diretor.questao_47', 'diretor.questao_48', 'diretor.questao_49', 'diretor.questao_50', 'diretor.questao_51', 'diretor.questao_52', 'diretor.questao_53', 'diretor.questao_54', 'diretor.questao_55', 'diretor.questao_56', 'diretor.questao_57', 'diretor.questao_58', 'diretor.questao_59', 'diretor.questao_60', 'diretor.questao_61', 'diretor.questao_62', 'diretor.questao_63', 'diretor.questao_64', 'diretor.questao_65', 'diretor.questao_66', 'diretor.questao_67', 'diretor.questao_68', 'diretor.questao_69', 'diretor.questao_70', 'diretor.questao_71', 'diretor.questao_72', 'diretor.questao_73', 'diretor.questao_74', 'diretor.questao_75', 'diretor.questao_76', 'diretor.questao_77', 'diretor.questao_78', 'diretor.questao_79', 'diretor.questao_80', 'diretor.questao_81', 'diretor.questao_82', 'diretor.questao_83', 'diretor.questao_84',])
for diretor in diretores:
writer.writerow([
diretor.escola.nome,
diretor.data,
diretor.questao_1,
diretor.questao_2, # get__display()
diretor.questao_3,
diretor.questao_4, # get__display()
diretor.questao_5, # get__display()
diretor.questao_6, # get__display()
diretor.questao_7, # get__display()
diretor.questao_8, # get__display()
diretor.questao_9, # get__display()
diretor.questao_10, # get__display()
diretor.questao_11, # get__display()
diretor.questao_12, # get__display()
diretor.questao_13, # get__display()
diretor.questao_14, # get__display()
diretor.questao_15, # get__display()
diretor.questao_16, # get__display()
diretor.questao_17, # get__display()
diretor.questao_18, # get__display()
diretor.questao_19, # get__display()
diretor.questao_20, # get__display()
diretor.questao_21, # get__display()
diretor.questao_22, # get__display()
diretor.questao_23, # get__display()
diretor.questao_24, # get__display()
diretor.questao_25, # get__display()
diretor.questao_26, # get__display()
diretor.questao_27, # get__display()
diretor.questao_28,
diretor.questao_29, # get__display()
diretor.questao_30, # get__display()
diretor.questao_31,
diretor.questao_32, # get__display()
diretor.questao_33,
diretor.questao_34, # get__display()
diretor.questao_35, # get__display()
diretor.questao_36, # get__display()
diretor.questao_37,
diretor.questao_38, # get__display()
diretor.questao_39,
diretor.questao_40, # get__display()
diretor.questao_41, # get__display()
diretor.questao_42, # get__display()
diretor.questao_43, # get__display()
diretor.questao_44, # get__display()
diretor.questao_45, # get__display()
diretor.questao_46, # get__display()
diretor.questao_47, # get__display()
diretor.questao_48, # get__display()
diretor.questao_49, # get__display()
diretor.questao_50, # get__display()
diretor.questao_51, # get__display()
diretor.questao_52, # get__display()
diretor.questao_53, # get__display()
diretor.questao_54, # get__display()
diretor.questao_55, # get__display()
diretor.questao_56, # get__display()
diretor.questao_57, # get__display()
diretor.questao_58, # get__display()
diretor.questao_59, # get__display()
diretor.questao_60, # get__display()
diretor.questao_61, # get__display()
diretor.questao_62, # get__display()
diretor.questao_63, # get__display()
diretor.questao_64, # get__display()
diretor.questao_65, # get__display()
diretor.questao_66, # get__display()
diretor.questao_67, # get__display()
diretor.questao_68, # get__display()
diretor.questao_69, # get__display()
diretor.questao_70, # get__display()
diretor.questao_71, # get__display()
diretor.questao_72, # get__display()
diretor.questao_73, # get__display()
diretor.questao_74, # get__display()
diretor.questao_75, # get__display()
diretor.questao_76, # get__display()
diretor.questao_77, # get__display()
diretor.questao_78, # get__display()
diretor.questao_79, # get__display()
diretor.questao_80, # get__display()
diretor.questao_81, # get__display()
diretor.questao_82, # get__display()
diretor.questao_83, # get__display()
diretor.questao_84, # get__display()
])
return response
| 57.049979
| 8,457
| 0.547622
| 12,431
| 134,695
| 5.507924
| 0.023248
| 0.152332
| 0.18604
| 0.243964
| 0.973185
| 0.972761
| 0.972411
| 0.972411
| 0.870934
| 0.865954
| 0
| 0.063468
| 0.355121
| 134,695
| 2,360
| 8,458
| 57.074153
| 0.724782
| 0.120851
| 0
| 0.880036
| 0
| 0
| 0.183232
| 0.124493
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004476
| false
| 0.000448
| 0.003581
| 0
| 0.018353
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
1dc57c0d12f238518d38818ae0945c4a1f83ee90
| 5,322
|
py
|
Python
|
tests/test_config/test_config_file_only.py
|
plato79/UnofficialDDNSnix
|
4a2a67cc06a6346f90726c33ee388374b7922f2e
|
[
"MIT"
] | 3
|
2015-10-18T09:14:59.000Z
|
2018-02-25T09:41:58.000Z
|
tests/test_config/test_config_file_only.py
|
plato79/UnofficialDDNSnix
|
4a2a67cc06a6346f90726c33ee388374b7922f2e
|
[
"MIT"
] | 3
|
2015-02-05T00:52:30.000Z
|
2020-04-17T06:21:00.000Z
|
tests/test_config/test_config_file_only.py
|
plato79/UnofficialDDNSnix
|
4a2a67cc06a6346f90726c33ee388374b7922f2e
|
[
"MIT"
] | 3
|
2016-01-09T04:36:10.000Z
|
2020-04-17T06:57:47.000Z
|
#!/usr/bin/env python2.6
import os
import pytest
from UnofficialDDNS import __doc__ as uddns_doc
from UnofficialDDNS import __version__ as uddns_ver
from docopt import docopt
import libs
def test_config_file_only_with_invalid_binary_data(config_file):
config_file.write(os.urandom(1024))
config_file.flush()
argv = ['-c', config_file.name]
with pytest.raises(libs.MultipleConfigSources.ConfigError) as e:
libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert "Unable to read config file %s, invalid data." % config_file.name == str(e.value)
def test_config_file_only_with_nonexistent_file():
argv = ['-c', '/tmp/doesNotExist.28520']
with pytest.raises(libs.MultipleConfigSources.ConfigError) as e:
libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert "Config file /tmp/doesNotExist.28520 does not exist, not a file, or no permission." == str(e.value)
def test_config_file_only_with_no_read_permissions():
argv = ['-c', '/etc/sudoers']
with pytest.raises(libs.MultipleConfigSources.ConfigError) as e:
libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert "Unable to read config file /etc/sudoers." == str(e.value)
def test_config_file_only_with_directory_instead_of_file():
argv = ['-c', '/etc']
with pytest.raises(libs.MultipleConfigSources.ConfigError) as e:
libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert "Config file /etc does not exist, not a file, or no permission." == str(e.value)
def test_config_file_only_with_invalid_text_data_not_yaml(config_file):
config_file.write("daemon\n")
config_file.flush()
argv = ['-c', config_file.name]
with pytest.raises(libs.MultipleConfigSources.ConfigError) as e:
libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert "Config file %s contents didn't yield dict or not YAML: daemon" % config_file.name == str(e.value)
def test_config_file_only_with_invalid_text_data_not_yaml_big(config_file):
config_file.write("""
domain mydomain.com # i am a comment
user thisuser#comment
#another comment
passwd abc"
""")
config_file.flush()
argv = ['-c', config_file.name]
with pytest.raises(libs.MultipleConfigSources.ConfigError) as e:
libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert "Config file %s contents not YAML formatted:" % config_file.name in str(e.value)
def test_config_file_only_with_invalid_text_data_unknown_option(config_file):
config_file.write("test: true\n")
config_file.flush()
argv = ['-c', config_file.name]
with pytest.raises(libs.MultipleConfigSources.ConfigError) as e:
libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert "Unknown option test in config file %s." % config_file.name == str(e.value)
def test_config_file_only_with_invalid_text_data_unknown_value(config_file):
config_file.write("daemon: unknown\n")
config_file.flush()
argv = ['-c', config_file.name]
with pytest.raises(libs.MultipleConfigSources.ConfigError) as e:
libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert "Config file option daemon must be True or False." == str(e.value)
def test_config_file_only_missing_log_value(config_file):
config_file.write("domain: mydomain.com\nuser: thisuser\npasswd: abc\nlog: #True\n")
config_file.flush()
argv = ['-c', config_file.name]
config = libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert None == config['log']
def test_config_file_only_tab_character(config_file):
config_file.write("domain: mydomain.com\nuser:\tthisuser\npasswd: abc")
config_file.flush()
argv = ['-c', config_file.name]
with pytest.raises(libs.MultipleConfigSources.ConfigError) as e:
libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert "Tab character found in config file %s. Must use spaces only!" % config_file.name == str(e.value)
def test_config_file_only_with_full_valid_data(config_file):
config_file.write("domain: mydomain.com\nuser: thisuser\npasswd: abc")
config_file.flush()
argv = ['-c', config_file.name]
expected = dict(log=None, daemon=False, verbose=False, interval=60, pid=None, quiet=False, version=False,
registrar='name.com', config=config_file.name, help=False,
user='thisuser', passwd='abc', domain='mydomain.com')
actual = libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert expected == actual
def test_config_file_only_with_full_valid_data_and_comments(config_file):
config_file.write("""
domain: mydomain.com # i am a comment
user: thisuser #comment
#another comment
passwd: abc
""")
config_file.flush()
argv = ['-c', config_file.name]
expected = dict(log=None, daemon=False, verbose=False, interval=60, pid=None, quiet=False, version=False,
registrar='name.com', config=config_file.name, help=False,
user='thisuser', passwd='abc', domain='mydomain.com')
actual = libs.get_config(docopt(uddns_doc, version=uddns_ver, argv=argv))
assert expected == actual
| 43.268293
| 110
| 0.72003
| 757
| 5,322
| 4.822985
| 0.153236
| 0.175294
| 0.061353
| 0.055875
| 0.845248
| 0.832649
| 0.804985
| 0.79622
| 0.788003
| 0.751849
| 0
| 0.004493
| 0.16366
| 5,322
| 122
| 111
| 43.622951
| 0.815772
| 0.004322
| 0
| 0.536082
| 0
| 0
| 0.200453
| 0.015855
| 0
| 0
| 0
| 0
| 0.123711
| 1
| 0.123711
| false
| 0.072165
| 0.061856
| 0
| 0.185567
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
1ded8a15af9380e20301cb57a2fc887ac18e7d90
| 10,461
|
py
|
Python
|
DQM/Physics/python/topSingleLeptonDQM_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
DQM/Physics/python/topSingleLeptonDQM_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
DQM/Physics/python/topSingleLeptonDQM_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
#Primary vertex selection
PVCut = "abs(z) < 24. & position.rho < 2. & ndof > 4 & !isFake"
#Jet selection
looseJetCut = "(chargedHadronEnergyFraction()>0 && chargedMultiplicity()>0 && chargedEmEnergyFraction()<0.99 && neutralHadronEnergyFraction()<0.99 && neutralEmEnergyFraction()<0.99 && (chargedMultiplicity()+neutralMultiplicity())>1) && abs(eta)<=2.4 "
tightJetCut = "(chargedHadronEnergyFraction()>0 && chargedMultiplicity()>0 && chargedEmEnergyFraction()<0.99 && neutralHadronEnergyFraction()<0.90 && neutralEmEnergyFraction()<0.90 && (chargedMultiplicity()+neutralMultiplicity())>1) && abs(eta)<=2.4 "
#Loose muon selection
looseMuonCut = "(muonRef.isNonnull && (muonRef.isGlobalMuon || muonRef.isTrackerMuon) && muonRef.isPFMuon)"
looseIsoCut = "((muonRef.pfIsolationR04.sumChargedHadronPt + max(0., muonRef.pfIsolationR04.sumNeutralHadronEt + muonRef.pfIsolationR04.sumPhotonEt - 0.5 * muonRef.pfIsolationR04.sumPUPt) ) / muonRef.pt < 0.25)"
#Medium muon selection. Also requires either good global muon or tight segment compatibility
mediumMuonCut = looseMuonCut + " muonRef.innerTrack.validFraction > 0.8"
#Tight muon selection. Lacks distance to primary vertex variables, dz<0.5, dxy < 0.2. Now done at .cc
tightMuonCut = "muonRef.isNonnull && muonRef.isGlobalMuon && muonRef.isPFMuon && muonRef.globalTrack.normalizedChi2 < 10. && muonRef.globalTrack.hitPattern.numberOfValidMuonHits > 0 && " + \
"muonRef.numberOfMatchedStations > 1 && muonRef.innerTrack.hitPattern.numberOfValidPixelHits > 0 && muonRef.innerTrack.hitPattern.trackerLayersWithMeasurement > 5 "
tightIsoCut = "(muonRef.pfIsolationR04.sumChargedHadronPt + max(0., muonRef.pfIsolationR04.sumNeutralHadronEt + muonRef.pfIsolationR04.sumPhotonEt - 0.5 * muonRef.pfIsolationR04.sumPUPt) ) / muonRef.pt < 0.15"
#Electron selections
looseEleCut = "(( gsfElectronRef.full5x5_sigmaIetaIeta() < 0.011 && gsfElectronRef.superCluster().isNonnull() && gsfElectronRef.superCluster().seed().isNonnull() && (gsfElectronRef.deltaEtaSuperClusterTrackAtVtx() - gsfElectronRef.superCluster().eta() + gsfElectronRef.superCluster().seed().eta()) < 0.00477 && abs(gsfElectronRef.deltaPhiSuperClusterTrackAtVtx()) < 0.222 && gsfElectronRef.hadronicOverEm() < 0.298 && abs(1.0 - gsfElectronRef.eSuperClusterOverP())*1.0/gsfElectronRef.ecalEnergy() < 0.241 && gsfElectronRef.gsfTrack.hitPattern().numberOfLostHits('MISSING_INNER_HITS') <= 1 && abs(gsfElectronRef.superCluster().eta()) < 1.479) || (gsfElectronRef.full5x5_sigmaIetaIeta() < 0.0314 && gsfElectronRef.superCluster().isNonnull() && gsfElectronRef.superCluster().seed().isNonnull() && (gsfElectronRef.deltaEtaSuperClusterTrackAtVtx() - gsfElectronRef.superCluster().eta() + gsfElectronRef.superCluster().seed().eta()) < 0.00868 && abs(gsfElectronRef.deltaPhiSuperClusterTrackAtVtx()) < 0.213 && gsfElectronRef.hadronicOverEm() < 0.101 && abs(1.0 - gsfElectronRef.eSuperClusterOverP())*1.0/gsfElectronRef.ecalEnergy() < 0.14 && gsfElectronRef.gsfTrack.hitPattern().numberOfLostHits('MISSING_INNER_HITS') <= 1 && abs(gsfElectronRef.superCluster().eta()) > 1.479))"
tightEleCut = "((gsfElectronRef.full5x5_sigmaIetaIeta() < 0.00998 && gsfElectronRef.superCluster().isNonnull() && gsfElectronRef.superCluster().seed().isNonnull() && (gsfElectronRef.deltaEtaSuperClusterTrackAtVtx() - gsfElectronRef.superCluster().eta() + gsfElectronRef.superCluster().seed().eta()) < 0.00308 && abs(gsfElectronRef.deltaPhiSuperClusterTrackAtVtx()) < 0.0816 && gsfElectronRef.hadronicOverEm() < 0.0414 && abs(1.0 - gsfElectronRef.eSuperClusterOverP())*1.0/gsfElectronRef.ecalEnergy() < 0.0129 && gsfElectronRef.gsfTrack.hitPattern().numberOfLostHits('MISSING_INNER_HITS') <= 1 && abs(gsfElectronRef.superCluster().eta()) < 1.479) || (gsfElectronRef.full5x5_sigmaIetaIeta() < 0.0292 && gsfElectronRef.superCluster().isNonnull() && gsfElectronRef.superCluster().seed().isNonnull() && (gsfElectronRef.deltaEtaSuperClusterTrackAtVtx() - gsfElectronRef.superCluster().eta() + gsfElectronRef.superCluster().seed().eta()) < 0.00605 && abs(gsfElectronRef.deltaPhiSuperClusterTrackAtVtx()) < 0.0394 && gsfElectronRef.hadronicOverEm() < 0.0641 && abs(1.0 - gsfElectronRef.eSuperClusterOverP())*1.0/gsfElectronRef.ecalEnergy() < 0.0129 && gsfElectronRef.gsfTrack.hitPattern().numberOfLostHits('MISSING_INNER_HITS') <= 1 && abs(gsfElectronRef.superCluster().eta()) > 1.479))"
from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer
topSingleMuonMediumDQM = DQMEDAnalyzer('TopSingleLeptonDQM',
## ------------------------------------------------------
## SETUP
##
## configuration of the MonitoringEnsemble(s)
## [mandatory] : optional PSets may be omitted
##
setup = cms.PSet(
directory = cms.string("Physics/Top/TopSingleMuonMediumDQM/"),
sources = cms.PSet(
muons = cms.InputTag("pfIsolatedMuonsEI"),
elecs = cms.InputTag("pfIsolatedElectronsEI"),
jets = cms.InputTag("ak4PFJetsCHS"),
mets = cms.VInputTag("pfMet"),
pvs = cms.InputTag("offlinePrimaryVertices")
),
monitoring = cms.PSet(
verbosity = cms.string("DEBUG")
),
pvExtras = cms.PSet(
select = cms.string(PVCut)
),
elecExtras = cms.PSet(
select = cms.string(tightEleCut + "& pt>20 & abs(eta)<2.5 & (abs(gsfElectronRef.superCluster().eta()) <= 1.4442 || abs(gsfElectronRef.superCluster().eta()) >= 1.5660)"),
rho = cms.InputTag("fixedGridRhoFastjetAll"),
),
muonExtras = cms.PSet(
select = cms.string(tightMuonCut + " && pt>20 & abs(eta)<2.4"),
#select = cms.string(looseMuonCut + " && pt>20 & abs(eta)<2.4"),
isolation = cms.string(looseIsoCut)
),
jetExtras = cms.PSet(
jetCorrector = cms.InputTag("dqmAk4PFCHSL1FastL2L3Corrector"), #Use pak4PFCHSL1FastL2L3Residual for data!!!
select = cms.string("pt>30 & abs(eta)< 2.4"),
jetBTaggers = cms.PSet(
cvsVertex = cms.PSet(
label = cms.InputTag("pfCombinedInclusiveSecondaryVertexV2BJetTags"),
workingPoint = cms.double(0.890)
# CSV Medium from https://twiki.cern.ch/twiki/bin/viewauth/CMS/BtagRecommendation74X
)
),
),
massExtras = cms.PSet(
lowerEdge = cms.double( 70.),
upperEdge = cms.double(110.)
),
),
preselection = cms.PSet(
vertex = cms.PSet(
src = cms.InputTag("offlinePrimaryVertices"),
select = cms.string(PVCut)
)
),
selection = cms.VPSet(
cms.PSet(
label = cms.string("muons:step0"),
src = cms.InputTag("pfIsolatedMuonsEI"),
select = cms.string(tightMuonCut + " && pt>20 & abs(eta)<2.4"),
min = cms.int32(1),
),
cms.PSet(
label = cms.string("jets/pf:step1"),
src = cms.InputTag("ak4PFJetsCHS"),
select = cms.string("pt>30 & abs(eta)<2.4"),
min = cms.int32(4),
),
cms.PSet(
label = cms.string("met:step2"),
src = cms.InputTag("pfMet"),
select = cms.string("pt>30"),
),
)
)
topSingleElectronMediumDQM = DQMEDAnalyzer('TopSingleLeptonDQM',
## ------------------------------------------------------
## SETUP
##
## configuration of the MonitoringEnsemble(s)
## [mandatory] : optional PSets may be omitted
##
setup = cms.PSet(
directory = cms.string("Physics/Top/TopSingleElectronMediumDQM/"),
sources = cms.PSet(
muons = cms.InputTag("pfIsolatedMuonsEI"),
elecs = cms.InputTag("pfIsolatedElectronsEI"),
jets = cms.InputTag("ak4PFJetsCHS"),
mets = cms.VInputTag("pfMet"),
pvs = cms.InputTag("offlinePrimaryVertices")
),
monitoring = cms.PSet(
verbosity = cms.string("DEBUG")
),
pvExtras = cms.PSet(
select = cms.string(PVCut)
),
elecExtras = cms.PSet(
select = cms.string(tightEleCut + "& pt>20 & abs(eta)<2.5 & (abs(gsfElectronRef.superCluster().eta()) <= 1.4442 || abs(gsfElectronRef.superCluster().eta()) >= 1.5660)"),
rho = cms.InputTag("fixedGridRhoFastjetAll"),
),
muonExtras = cms.PSet(
select = cms.string(tightMuonCut + " & pt>20 & abs(eta)<2.4"),
isolation = cms.string(looseIsoCut),
),
jetExtras = cms.PSet(
jetCorrector = cms.InputTag("dqmAk4PFCHSL1FastL2L3Corrector"), #Use pak4PFCHSL1FastL2L3Residual for data!!!
select = cms.string("pt>30 & abs(eta)<2.4"),
jetBTaggers = cms.PSet(
cvsVertex = cms.PSet(
label = cms.InputTag("pfCombinedInclusiveSecondaryVertexV2BJetTags"),
workingPoint = cms.double(0.890)
# CSV Medium from https://twiki.cern.ch/twiki/bin/viewauth/CMS/BtagRecommendation74X
)
),
),
massExtras = cms.PSet(
lowerEdge = cms.double( 70.),
upperEdge = cms.double(110.)
),
),
preselection = cms.PSet(
vertex = cms.PSet(
src = cms.InputTag("offlinePrimaryVertices"),
select = cms.string(PVCut)
)
),
selection = cms.VPSet(
cms.PSet(
label = cms.string("elecs:step0"),
src = cms.InputTag("pfIsolatedElectronsEI"),
select = cms.string("pt>20 & abs(eta)<2.5 & (abs(gsfElectronRef.superCluster().eta()) <= 1.4442 || abs(gsfElectronRef.superCluster().eta()) >= 1.5660) &&" + tightEleCut),
# select = cms.string("pt>30 & abs(eta)<2.5 & abs(gsfElectronRef.gsfTrack.d0)<0.02 & gsfElectronRef.gsfTrack.hitPattern().numberOfLostHits('MISSING_INNER_HITS') <= 0 & (abs(gsfElectronRef.superCluster.eta) <= 1.4442 || abs(gsfElectronRef.superCluster.eta) >= 1.5660) & " + EletightIsoCut),
min = cms.int32(1),
),
cms.PSet(
label = cms.string("jets/pf:step1"),
src = cms.InputTag("ak4PFJetsCHS"),
select = cms.string("pt>30 & abs(eta)<2.4"),
min = cms.int32(4),
),
cms.PSet(
label = cms.string("met:step2"),
src = cms.InputTag("pfMet"),
select = cms.string("pt>30"),
),
)
)
| 56.853261
| 1,283
| 0.635121
| 977
| 10,461
| 6.78608
| 0.220061
| 0.031674
| 0.040724
| 0.057919
| 0.771192
| 0.758522
| 0.755807
| 0.731976
| 0.728054
| 0.697285
| 0
| 0.044832
| 0.204665
| 10,461
| 183
| 1,284
| 57.163934
| 0.752043
| 0.120734
| 0
| 0.769231
| 0
| 0.083916
| 0.556489
| 0.407488
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013986
| 0
| 0.013986
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1df9add4ef7ba56c68b966626bfbd57cb88b1cbc
| 7,462
|
py
|
Python
|
tests/test_provider_chanzuckerberg_snowflake.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_chanzuckerberg_snowflake.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_chanzuckerberg_snowflake.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_chanzuckerberg_snowflake.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:27:18 UTC)
def test_provider_import():
import terrascript.provider.chanzuckerberg.snowflake
def test_resource_import():
from terrascript.resource.chanzuckerberg.snowflake import snowflake_account_grant
from terrascript.resource.chanzuckerberg.snowflake import snowflake_api_integration
from terrascript.resource.chanzuckerberg.snowflake import snowflake_database
from terrascript.resource.chanzuckerberg.snowflake import snowflake_database_grant
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_external_function,
)
from terrascript.resource.chanzuckerberg.snowflake import snowflake_external_table
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_external_table_grant,
)
from terrascript.resource.chanzuckerberg.snowflake import snowflake_file_format
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_file_format_grant,
)
from terrascript.resource.chanzuckerberg.snowflake import snowflake_function
from terrascript.resource.chanzuckerberg.snowflake import snowflake_function_grant
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_integration_grant,
)
from terrascript.resource.chanzuckerberg.snowflake import snowflake_managed_account
from terrascript.resource.chanzuckerberg.snowflake import snowflake_masking_policy
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_masking_policy_grant,
)
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_materialized_view,
)
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_materialized_view_grant,
)
from terrascript.resource.chanzuckerberg.snowflake import snowflake_network_policy
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_network_policy_attachment,
)
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_notification_integration,
)
from terrascript.resource.chanzuckerberg.snowflake import snowflake_pipe
from terrascript.resource.chanzuckerberg.snowflake import snowflake_pipe_grant
from terrascript.resource.chanzuckerberg.snowflake import snowflake_procedure
from terrascript.resource.chanzuckerberg.snowflake import snowflake_procedure_grant
from terrascript.resource.chanzuckerberg.snowflake import snowflake_resource_monitor
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_resource_monitor_grant,
)
from terrascript.resource.chanzuckerberg.snowflake import snowflake_role
from terrascript.resource.chanzuckerberg.snowflake import snowflake_role_grants
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_row_access_policy,
)
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_row_access_policy_grant,
)
from terrascript.resource.chanzuckerberg.snowflake import snowflake_schema
from terrascript.resource.chanzuckerberg.snowflake import snowflake_schema_grant
from terrascript.resource.chanzuckerberg.snowflake import snowflake_scim_integration
from terrascript.resource.chanzuckerberg.snowflake import snowflake_sequence
from terrascript.resource.chanzuckerberg.snowflake import snowflake_sequence_grant
from terrascript.resource.chanzuckerberg.snowflake import snowflake_share
from terrascript.resource.chanzuckerberg.snowflake import snowflake_stage
from terrascript.resource.chanzuckerberg.snowflake import snowflake_stage_grant
from terrascript.resource.chanzuckerberg.snowflake import (
snowflake_storage_integration,
)
from terrascript.resource.chanzuckerberg.snowflake import snowflake_stream
from terrascript.resource.chanzuckerberg.snowflake import snowflake_stream_grant
from terrascript.resource.chanzuckerberg.snowflake import snowflake_table
from terrascript.resource.chanzuckerberg.snowflake import snowflake_table_grant
from terrascript.resource.chanzuckerberg.snowflake import snowflake_task
from terrascript.resource.chanzuckerberg.snowflake import snowflake_task_grant
from terrascript.resource.chanzuckerberg.snowflake import snowflake_user
from terrascript.resource.chanzuckerberg.snowflake import snowflake_user_public_keys
from terrascript.resource.chanzuckerberg.snowflake import snowflake_view
from terrascript.resource.chanzuckerberg.snowflake import snowflake_view_grant
from terrascript.resource.chanzuckerberg.snowflake import snowflake_warehouse
from terrascript.resource.chanzuckerberg.snowflake import snowflake_warehouse_grant
def test_datasource_import():
from terrascript.data.chanzuckerberg.snowflake import snowflake_current_account
from terrascript.data.chanzuckerberg.snowflake import snowflake_external_functions
from terrascript.data.chanzuckerberg.snowflake import snowflake_external_tables
from terrascript.data.chanzuckerberg.snowflake import snowflake_file_formats
from terrascript.data.chanzuckerberg.snowflake import snowflake_functions
from terrascript.data.chanzuckerberg.snowflake import snowflake_masking_policies
from terrascript.data.chanzuckerberg.snowflake import snowflake_materialized_views
from terrascript.data.chanzuckerberg.snowflake import snowflake_pipes
from terrascript.data.chanzuckerberg.snowflake import snowflake_procedures
from terrascript.data.chanzuckerberg.snowflake import snowflake_resource_monitors
from terrascript.data.chanzuckerberg.snowflake import snowflake_row_access_policies
from terrascript.data.chanzuckerberg.snowflake import snowflake_schemas
from terrascript.data.chanzuckerberg.snowflake import snowflake_sequences
from terrascript.data.chanzuckerberg.snowflake import snowflake_stages
from terrascript.data.chanzuckerberg.snowflake import snowflake_storage_integrations
from terrascript.data.chanzuckerberg.snowflake import snowflake_streams
from terrascript.data.chanzuckerberg.snowflake import (
snowflake_system_generate_scim_access_token,
)
from terrascript.data.chanzuckerberg.snowflake import (
snowflake_system_get_aws_sns_iam_policy,
)
from terrascript.data.chanzuckerberg.snowflake import (
snowflake_system_get_privatelink_config,
)
from terrascript.data.chanzuckerberg.snowflake import (
snowflake_system_get_snowflake_platform_info,
)
from terrascript.data.chanzuckerberg.snowflake import snowflake_tables
from terrascript.data.chanzuckerberg.snowflake import snowflake_tasks
from terrascript.data.chanzuckerberg.snowflake import snowflake_views
from terrascript.data.chanzuckerberg.snowflake import snowflake_warehouses
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.chanzuckerberg.snowflake
#
# t = terrascript.provider.chanzuckerberg.snowflake.snowflake()
# s = str(t)
#
# assert 'https://github.com/chanzuckerberg/terraform-provider-snowflake' in s
# assert '0.25.19' in s
| 35.703349
| 88
| 0.817609
| 763
| 7,462
| 7.78768
| 0.159895
| 0.305789
| 0.366038
| 0.479636
| 0.883036
| 0.859307
| 0.857792
| 0.73174
| 0.246382
| 0.025581
| 0
| 0.002637
| 0.136157
| 7,462
| 208
| 89
| 35.875
| 0.919175
| 0.071697
| 0
| 0.150442
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004808
| 0
| 1
| 0.026549
| true
| 0
| 0.699115
| 0
| 0.725664
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
382d6564470c2075fd0ec32a401d0c39033d0e9b
| 14,293
|
py
|
Python
|
product/migrations/0001_initial.py
|
JiajiaHuang/smonus
|
95ec209ae3562ea73ee9ce4c22a0d3a3f0975210
|
[
"Unlicense"
] | null | null | null |
product/migrations/0001_initial.py
|
JiajiaHuang/smonus
|
95ec209ae3562ea73ee9ce4c22a0d3a3f0975210
|
[
"Unlicense"
] | null | null | null |
product/migrations/0001_initial.py
|
JiajiaHuang/smonus
|
95ec209ae3562ea73ee9ce4c22a0d3a3f0975210
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 2.2.1 on 2019-09-11 09:58
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='SmBrand',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bramd_id', models.IntegerField(blank=True, null=True, verbose_name='品牌ID')),
('bramd_name', models.CharField(blank=True, max_length=64, null=True, verbose_name='品牌')),
('brand_order', models.IntegerField(blank=True, null=True, verbose_name='排序')),
('is_status', models.BooleanField(blank=True, default=True, null=True, verbose_name='是否启用')),
('hide_value', models.IntegerField(blank=True, default=0, null=True, verbose_name='隐藏的字数')),
],
),
migrations.CreateModel(
name='SmCatGoods',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cat_id', models.IntegerField(blank=True, null=True, verbose_name='目录ID(goods_category表)')),
('goods_id', models.IntegerField(blank=True, null=True, verbose_name='商品ID')),
('goods_price', models.FloatField(blank=True, null=True, verbose_name='商品价格')),
('expand_name', models.CharField(blank=True, max_length=64, null=True, verbose_name='商品扩展名')),
('is_status', models.BooleanField(blank=True, default=True, null=True, verbose_name='是否启用')),
('add_time', models.DateTimeField(blank=True, null=True, verbose_name='添加时间')),
('note', models.CharField(blank=True, max_length=1024, null=True, verbose_name='备注')),
],
),
migrations.CreateModel(
name='SmCatGoodsBackup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cat_id', models.IntegerField(blank=True, null=True, verbose_name='目录ID(goods_category表)')),
('goods_id', models.IntegerField(blank=True, null=True, verbose_name='商品ID')),
('goods_price', models.FloatField(blank=True, null=True, verbose_name='商品价格')),
('expand_name', models.CharField(blank=True, max_length=64, null=True, verbose_name='商品扩展名')),
('is_status', models.BooleanField(blank=True, default=True, null=True, verbose_name='是否启用')),
('add_time', models.DateTimeField(blank=True, null=True, verbose_name='添加时间')),
('note', models.CharField(blank=True, max_length=1024, null=True, verbose_name='备注')),
],
),
migrations.CreateModel(
name='SmCoupon',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('coupon_id_type', models.IntegerField(blank=True, null=True, verbose_name='分类ID')),
('coupon_sn', models.CharField(blank=True, max_length=256, null=True, verbose_name='优惠券号')),
('user_id', models.IntegerField(blank=True, null=True, verbose_name='使用会员ID')),
('used_time', models.DateTimeField(blank=True, null=True, verbose_name='使用时间')),
('order_sn', models.CharField(blank=True, max_length=256, null=True, verbose_name='使用订单号')),
('soruce_order_sn', models.CharField(blank=True, max_length=256, null=True, verbose_name='由该订单产生的优惠券')),
('emailed', models.CharField(blank=True, max_length=64, null=True, verbose_name='邮箱地址')),
('send_type', models.BooleanField(blank=True, default=False, null=True, verbose_name='是否启用')),
('send_phone', models.CharField(blank=True, max_length=64, null=True, verbose_name='发送的手机号码')),
('send_time', models.DateTimeField(blank=True, null=True, verbose_name='发送时间')),
],
),
migrations.CreateModel(
name='SmCouponType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type_money', models.IntegerField(blank=True, null=True, verbose_name='类别金额')),
('type_name', models.CharField(blank=True, max_length=256, null=True, verbose_name='类别名称')),
('send_type', models.IntegerField(blank=True, null=True, verbose_name='发送类型')),
('min_amount', models.IntegerField(blank=True, null=True, verbose_name='最小使用金额')),
('max_amount', models.IntegerField(blank=True, null=True, verbose_name='最大使用金额')),
('send_start_date', models.DateTimeField(blank=True, null=True, verbose_name='发送开始时间')),
('send_end_date', models.DateTimeField(blank=True, null=True, verbose_name='发送最后时间')),
('use_start_date', models.DateTimeField(blank=True, null=True, verbose_name='使用开始时间')),
('use_end_date', models.DateTimeField(blank=True, null=True, verbose_name='最后使用时间')),
('min_goods_amount', models.IntegerField(blank=True, null=True, verbose_name='最小使用商品金额')),
('add_time', models.DateTimeField(blank=True, null=True, verbose_name='添加时间')),
],
),
migrations.CreateModel(
name='SmGoods',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cat_id', models.IntegerField(blank=True, null=True, verbose_name='')),
('goods_sn', models.CharField(blank=True, max_length=64, null=True, verbose_name='商品货号')),
('goods_name', models.CharField(blank=True, max_length=256, null=True, verbose_name='商品名字')),
('goods_name_style', models.CharField(blank=True, max_length=256, null=True, verbose_name='商品类型')),
('click_count', models.IntegerField(blank=True, default=0, null=True, verbose_name='点击数量')),
('brand_id', models.IntegerField(blank=True, null=True, verbose_name='品牌ID')),
('provider_name', models.CharField(blank=True, max_length=64, null=True, verbose_name='')),
('goods_number', models.IntegerField(blank=True, null=True, verbose_name='品牌ID')),
('goods_weight', models.FloatField(blank=True, null=True, verbose_name='')),
('market_price', models.FloatField(blank=True, null=True, verbose_name='市场价')),
('shop_price', models.FloatField(blank=True, null=True, verbose_name='销售价')),
('promote_price', models.FloatField(blank=True, null=True, verbose_name='促销价')),
('promote_start_date', models.DateField(blank=True, null=True, verbose_name='促销开始日期')),
('promote_end_date', models.DateField(blank=True, null=True, verbose_name='促销开始日期')),
('warn_number', models.IntegerField(blank=True, null=True, verbose_name='')),
('keywords', models.CharField(blank=True, max_length=256, null=True, verbose_name='关键词')),
('goods_brief', models.CharField(blank=True, max_length=1024, null=True, verbose_name='货物简介')),
('goods_desc', models.TextField(blank=True, null=True, verbose_name='货物详细')),
('goods_thumb', models.CharField(blank=True, max_length=64, null=True, verbose_name='')),
('goods_img', models.CharField(blank=True, max_length=64, null=True, verbose_name='')),
('original_img', models.CharField(blank=True, max_length=64, null=True, verbose_name='')),
('is_real', models.BooleanField(blank=True, default=True, null=True, verbose_name='是否审核')),
('extension_code', models.CharField(blank=True, max_length=64, null=True, verbose_name='')),
('integral', models.IntegerField(blank=True, null=True, verbose_name='')),
('add_time', models.DateTimeField(blank=True, null=True, verbose_name='添加时间')),
('sort_order', models.IntegerField(blank=True, null=True, verbose_name='排序')),
('is_delete', models.BooleanField(blank=True, default=False, null=True, verbose_name='是否删除')),
('is_best', models.BooleanField(blank=True, default=True, null=True, verbose_name='是否最好')),
('is_new', models.BooleanField(blank=True, default=True, null=True, verbose_name='是否最新')),
('is_hot', models.BooleanField(blank=True, default=False, null=True, verbose_name='是否最热')),
('is_promote', models.BooleanField(blank=True, default=True, null=True, verbose_name='是否公开')),
('bonus_type_id', models.IntegerField(blank=True, null=True, verbose_name='利润类型')),
('last_update', models.DateTimeField(blank=True, null=True, verbose_name='更新时间')),
('goods_type', models.IntegerField(blank=True, null=True, verbose_name='商品类型,促销有用')),
('goods_info', models.CharField(blank=True, max_length=10000, null=True, verbose_name='商品属性')),
('seller_note', models.CharField(blank=True, max_length=64, null=True, verbose_name='票据信息')),
('give_integral', models.IntegerField(blank=True, default=0, null=True, verbose_name='积分')),
('comment_status', models.BooleanField(blank=True, default=True, null=True, verbose_name='是否评论')),
('warehouse_id', models.IntegerField(blank=True, null=True, verbose_name='仓库ID')),
('goods_status', models.IntegerField(blank=True, default=0, null=True, verbose_name='商品上下架,-1是回收站,0是缺货,1是下架')),
('is_taozhuang', models.BooleanField(blank=True, default=False, null=True, verbose_name='是否套餐')),
('gift_id', models.IntegerField(blank=True, null=True, verbose_name='赠品ID')),
('series_id', models.IntegerField(blank=True, null=True, verbose_name='系列ID')),
('reserved_field', models.CharField(blank=True, max_length=64, null=True, verbose_name='保留字段')),
],
),
migrations.CreateModel(
name='SmGoodsCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cat_name', models.CharField(blank=True, max_length=256, null=True, verbose_name='目录名称')),
('keywords', models.CharField(blank=True, max_length=256, null=True, verbose_name='关键字')),
('cat_desc', models.CharField(blank=True, max_length=256, null=True, verbose_name='目录描述')),
('parent_id', models.IntegerField(blank=True, null=True, verbose_name='父ID')),
('g_level', models.IntegerField(blank=True, null=True, verbose_name='层级')),
('sort_order', models.IntegerField(blank=True, null=True, verbose_name='排序')),
('template_file', models.CharField(blank=True, max_length=256, null=True, verbose_name='')),
('measure_unit', models.CharField(blank=True, max_length=256, null=True, verbose_name='')),
('show_in_nav', models.IntegerField(blank=True, null=True, verbose_name='')),
('style', models.CharField(blank=True, max_length=256, null=True, verbose_name='')),
('is_show', models.BooleanField(blank=True, default=True, null=True, verbose_name='是否显示')),
('grade', models.IntegerField(blank=True, null=True, verbose_name='')),
('filter_attr', models.IntegerField(blank=True, null=True, verbose_name='')),
('site_id', models.IntegerField(blank=True, default=0, null=True, verbose_name='')),
],
),
migrations.CreateModel(
name='SmOrderGoods',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order_id', models.IntegerField(blank=True, null=True, verbose_name='订单编号 开发用')),
('goods_id', models.IntegerField(blank=True, default=0, null=True, verbose_name='商品编号')),
('goods_name', models.CharField(blank=True, max_length=64, null=True, verbose_name='商品')),
('goods_sn', models.CharField(blank=True, max_length=64, null=True, verbose_name='货号')),
('goods_number', models.IntegerField(blank=True, default=1, null=True, verbose_name='数量')),
('market_price', models.FloatField(blank=True, null=True, verbose_name='市价')),
('goods_price', models.FloatField(blank=True, null=True, verbose_name='商品价')),
('goods_old_price', models.FloatField(blank=True, null=True, verbose_name='临时变量,保存因为折扣用的价格在里面,没怎么用')),
('goods_attr', models.CharField(blank=True, max_length=1024, null=True, verbose_name='商品口属性')),
('send_number', models.IntegerField(blank=True, default=0, null=True, verbose_name='')),
('is_real', models.IntegerField(blank=True, default=1, null=True, verbose_name='')),
('parent_id', models.IntegerField(blank=True, null=True, verbose_name='优惠券')),
('is_gift', models.BooleanField(blank=True, default=False, null=True, verbose_name='是否赠品')),
('add_time', models.DateTimeField(auto_now_add=True)),
('is_active', models.BooleanField(blank=True, default=True, null=True, verbose_name='是否显示')),
('goods_type', models.IntegerField(blank=True, null=True, verbose_name='商品类型,用于打折')),
('admin_id', models.IntegerField(blank=True, null=True, verbose_name='操作人')),
('shop_price', models.FloatField(blank=True, null=True, verbose_name='商品实际单价,等于折扣后的价格*数量')),
('bramd_id', models.IntegerField(blank=True, null=True, verbose_name='品牌ID')),
('bramd_name', models.CharField(blank=True, max_length=64, null=True, verbose_name='品牌')),
('comment_status', models.BooleanField(blank=True, default=True, null=True, verbose_name='是否评论')),
],
),
]
| 78.532967
| 127
| 0.621913
| 1,637
| 14,293
| 5.244349
| 0.141723
| 0.161444
| 0.206174
| 0.261153
| 0.855329
| 0.852301
| 0.842633
| 0.840769
| 0.797321
| 0.692487
| 0
| 0.010646
| 0.217939
| 14,293
| 181
| 128
| 78.966851
| 0.757381
| 0.003148
| 0
| 0.373563
| 1
| 0
| 0.126421
| 0.006107
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005747
| 0
| 0.028736
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
698ecde81b62aeaa9bf3fe5a0d960ad9b9475ac1
| 99,441
|
py
|
Python
|
source_code/icesatIO.py
|
carlos-alberto-silva/PhoREAL
|
4e582d6c4e2ddc1830dc2692ae136f9b5c80c9d8
|
[
"BSD-4-Clause-UC"
] | 2
|
2020-02-20T16:48:17.000Z
|
2021-01-09T17:42:23.000Z
|
source_code/icesatIO.py
|
dpoursanidis/PhoREAL
|
4e582d6c4e2ddc1830dc2692ae136f9b5c80c9d8
|
[
"BSD-4-Clause-UC"
] | null | null | null |
source_code/icesatIO.py
|
dpoursanidis/PhoREAL
|
4e582d6c4e2ddc1830dc2692ae136f9b5c80c9d8
|
[
"BSD-4-Clause-UC"
] | 2
|
2020-02-14T23:12:38.000Z
|
2020-03-24T19:57:56.000Z
|
# -*- coding: utf-8 -*-
"""
Script that provides basic I/O functionality for ATL03
Copyright 2019 Applied Research Laboratories, University of Texas at Austin
This package is free software; the copyright holder gives unlimited
permission to copy and/or distribute, with or without modification, as
long as this notice is preserved.
Authors:
Mike Alonzo
Eric Guenther
Date: September 20, 2019
"""
# Import modules
import os
import sys
import warnings
import csv
import numpy as np
import h5py
import laspy
from laspy.file import File
from scipy.io import loadmat
import simplekml
# Object for readKmlBounds function
class kmlStruct:
# Define class with designated fields
def __init__(self, regionName, lonMin, lonMax, latMin, latMax,headerFilePath, truthFilePath):
self.regionName = regionName
self.lonMin = lonMin
self.lonMax = lonMax
self.latMin = latMin
self.latMax = latMax
self.headerFilePath = headerFilePath
self.truthFilePath = truthFilePath
# Object for readHeaderFile function
class headerStruct:
# Define class with designated fields
def __init__(self, coordType, zone, hemi, ellipsoid, xmin, xmax, ymin, ymax, tileName):
self.coordType = coordType
self.zone = zone
self.hemi = hemi
self.ellipsoid = ellipsoid
self.xmin = np.c_[xmin]
self.xmax = np.c_[xmax]
self.ymin = np.c_[ymin]
self.ymax = np.c_[ymax]
self.tileName = np.c_[tileName]
# Object for readLas function
class lasStruct:
# Define class with designated fields
def __init__(self, x, y, z, classification, intensity, headerData):
self.x = np.c_[x]
self.y = np.c_[y]
self.z = np.c_[z]
self.classification = np.c_[classification]
self.intensity = np.c_[intensity]
self.headerData = np.c_[headerData]
# Object for readGeoidFile function
class geoidStruct:
# Define class with designated fields
def __init__(self, lats, lons, geoidalHeights):
self.lats = lats
self.lons = lons
self.geoidalHeights = geoidalHeights
##### Function to read kmlBounds.txt
def readTruthRegionsTxtFile(kmlBoundsTextFile):
# Initialize output parameters
regionName = []
lonMin = []
lonMax = []
latMin = []
latMax = []
headerFilePath = []
truthFilePath = []
# Open file for reading
f = open(kmlBoundsTextFile,'r')
# Get all lines of file into a list
allLines = list(f)
# Close file for reading
f.close()
# Read lines with path info
headerPathLine = allLines[2]
headerPath = headerPathLine.split(',')[1].strip()
truthPathLine = allLines[3]
truthPath = truthPathLine.split(',')[1].strip()
# Read rest of lines, skip first six lines (comments)
textLines = allLines[6:]
# Loop through each line in text file
for line in textLines:
# If line text exists
if line:
# Split line by commas
fields = line.split(',')
# Store field names into lists
regionName.append(fields[0].strip())
lonMin.append(float(fields[1].strip()))
lonMax.append(float(fields[2].strip()))
latMin.append(float(fields[3].strip()))
latMax.append(float(fields[4].strip()))
headerFilePath.append(headerPath + fields[5].strip())
truthFilePath.append(truthPath + fields[6].strip())
# Endif
# Endfor
# Call class to populate field names
kmlInfo = kmlStruct(regionName, lonMin, lonMax, latMin, latMax, headerFilePath, truthFilePath)
# Return output
return kmlInfo
##### Function to read header truth .mat file
def readHeaderMatFile(headerFilePath):
# Initialize output data
coordType = []
zone = []
hemi = []
# Get coordinate type from header .mat file (1 = UTM, 2 = Lat/Lon)
matData = loadmat(headerFilePath)
coordNum= matData['headerData'][0][0][10][0][0]
# Convert lat/lon data to UTM coordinates
if(coordNum==1):
coordType = 'UTM'
# Get UTM zone and hemisphere
zone = str(matData['headerData'][0][0][7][0][0])
hemi = matData['headerData'][0][0][8][0][0]
ellipsoid = matData['headerData'][0][0][9][0][0]
else:
coordType = 'Lat/Lon'
# Endif
# Get x/y min/max data and truth tile name for each truth tile
xmin = [matData['headerData'][0][i][0][0][0] for i in range(len(matData['headerData'][0]))]
xmax = [matData['headerData'][0][i][1][0][0] for i in range(len(matData['headerData'][0]))]
ymin = [matData['headerData'][0][i][2][0][0] for i in range(len(matData['headerData'][0]))]
ymax = [matData['headerData'][0][i][3][0][0] for i in range(len(matData['headerData'][0]))]
tileName = [matData['headerData'][0][i][12][0] for i in range(len(matData['headerData'][0]))]
# Store data as object
headerData = headerStruct(coordType, zone, hemi, ellipsoid, xmin, xmax, ymin, ymax, tileName)
# Return data
return headerData
##### Functions to read ATL03 .h5 files
def readAtl03H5(in_file03, fieldName, label):
# fieldName Options:
# lat_ph
# lon_ph
# h_ph
# delta_time
# crossing_time
# signal_conf_ph
if not os.path.isfile(in_file03):
print('ATL03 file does not exist')
try:
with h5py.File(in_file03, 'r') as f:
dsname=''.join([label,'/heights/', fieldName])
if dsname in f:
dataOut = np.array(f[dsname])
if(fieldName == 'signal_conf_ph'):
dataOut = dataOut[:,0]
else:
dataOut = []
except Exception as e:
print('Python message: %s\n' % e)
return dataOut
##### Functions to read ATL08 .h5 files
def readAtl08H5(in_file08, fieldName, label):
# fieldName Options:
# /land_segments/longitude
# /land_segments/latitude
# /land_segments/canopy/h_max_canopy_abs
# /land_segments/terrain/h_te_best_fit
# /land_segments/terrain/h_te_median
if not os.path.isfile(in_file08):
print('ATL03 file does not exist')
try:
with h5py.File(in_file08, 'r') as f:
dsname=''.join([label, fieldName])
if dsname in f:
dataOut = np.array(f[dsname])
else:
dataOut = []
except Exception as e:
print('Python message: %s\n' % e)
return dataOut
##### Function to read ATL03 .h5 files for mapping
def readAtl03DataMapping(in_file03, label):
#
# Reads the data from ATL03
#
# Open the file
#
if not os.path.isfile(in_file03):
print('File does not exist')
try:
f = h5py.File(in_file03, 'r')
except Exception as e:
print('Python message: %s\n' % e)
# endif
#
# segment_ph_cnt
#
#
# segment_id
#
dsname=label+'/geolocation/segment_id'
if dsname in f:
segment_id=np.array(f[dsname])
else:
segment_id=[]
# endif
#
# segment_lat
#
#
# ph_index_beg
#
dsname=label+'/geolocation/ph_index_beg'
if dsname in f:
ph_index_beg=np.array(f[dsname])
else:
ph_index_beg=[]
# endif
#
#
# Close the file
#
f.close()
return ph_index_beg, segment_id
##### Function to read ATL08 .h5 files for mapping
def readAtl08DataMapping(in_file08, label):
#
# Reads the data from ATL08
#
# Open the file
#
if not os.path.isfile(in_file08):
print('File does not exist')
try:
f = h5py.File(in_file08, 'r')
except Exception as e:
print('Python message: %s\n' % e)
# endif
#
# classed_pc_indx
#
dsname=label+'/signal_photons/classed_pc_indx'
if dsname in f:
classed_pc_indx=np.array(f[dsname])
else:
classed_pc_indx=[]
# endif
#
# classed_pc_flag
#
dsname=label+'/signal_photons/classed_pc_flag'
if dsname in f:
classed_pc_flag=np.array(f[dsname])
else:
classed_pc_flag=[]
# endif
#
# d_flag
#
# seg08_id
#
dsname=label+'/signal_photons/ph_segment_id'
if dsname in f:
seg08_id=np.array(f[dsname])
else:
seg08_id=[]
# endif
#
#
# Close the file
#
f.close()
return classed_pc_indx, classed_pc_flag, seg08_id
##### Function to read geoid .mat file
def readGeoidFile(geoidFile):
# Read .mat file
matFile = os.path.normpath(geoidFile)
matData = loadmat(matFile)
# Get lats, lons, and geoidal heights
lats = matData['geoid']['lats'][0][0]
lons = matData['geoid']['lons'][0][0]
geoidalHeights = matData['geoid']['geoidalHeight'][0][0]
# Store data as an object
geoid = geoidStruct(lats, lons, geoidalHeights)
# Return output
return geoid
##### Function to read .las files
def readLas(lasFilePath):
# Read contents of .las file
with File(lasFilePath, mode = 'r') as lasFile:
# Store output from .las file
x = lasFile.x
y = lasFile.y
z = lasFile.z
classification = lasFile.classification
intensity = lasFile.intensity
headerData = lasFile.header
# Store output into class structure
lasData = lasStruct(x, y, z, classification, intensity, headerData)
# EndWith
return lasData
##### Functions to write .las files
def selectwkt(proj,hemi=None,zone=None):
if proj.lower() == "utm":
if zone:
zone = str(zone)
if hemi.lower() == "n":
if zone == "1":
wkt = b'''PROJCS["WGS 84 / UTM zone 1N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-177],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32601"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "2":
wkt = b'''PROJCS["WGS 84 / UTM zone 2N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-171],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32602"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "3":
wkt == b'''ROJCS["WGS 84 / UTM zone 3N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-165],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32603"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "4":
wkt = b'''PROJCS["WGS 84 / UTM zone 4N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-159],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32604"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "5":
wkt = b'''PROJCS["WGS 84 / UTM zone 5N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-153],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32605"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "6":
wkt = b'''PROJCS["WGS 84 / UTM zone 6N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32606"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "7":
wkt = b'''PROJCS["WGS 84 / UTM zone 7N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32607"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "8":
wkt = b'''PROJCS["WGS 84 / UTM zone 8N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-135],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32608"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "9":
wkt = b'''PROJCS["WGS 84 / UTM zone 9N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-129],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32609"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "10":
wkt = b'''PROJCS["WGS 84 / UTM zone 10N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32610"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "11":
wkt = b'''PROJCS["WGS 84 / UTM zone 11N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32611"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "12":
wkt = b'''PROJCS["WGS 84 / UTM zone 12N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-111],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32612"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "13":
wkt = b'''PROJCS["WGS 84 / UTM zone 13N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-105],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32613"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "14":
wkt = b'''PROJCS["WGS 84 / UTM zone 14N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-99],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32614"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "15":
wkt = b'''PROJCS["WGS 84 / UTM zone 15N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32615"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "16":
wkt = b'''PROJCS["WGS 84 / UTM zone 16N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-87],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32616"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "17":
wkt = b'''PROJCS["WGS 84 / UTM zone 17N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32617"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "18":
wkt = b'''PROJCS["WGS 84 / UTM zone 18N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-75],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32618"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "19":
wkt = b'''PROJCS["WGS 84 / UTM zone 19N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-69],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32619"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "20":
wkt = b'''PROJCS["WGS 84 / UTM zone 20N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-63],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32620"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "21":
wkt = b'''PROJCS["WGS 84 / UTM zone 21N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-57],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32621"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "22":
wkt = b'''PROJCS["WGS 84 / UTM zone 22N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-51],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32622"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "23":
wkt = b'''PROJCS["WGS 84 / UTM zone 23N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-45],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32623"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "24":
wkt = b'''PROJCS["WGS 84 / UTM zone 24N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-39],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32624"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "25":
wkt = b'''PROJCS["WGS 84 / UTM zone 25N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-33],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32625"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "26":
wkt = b'''PROJCS["WGS 84 / UTM zone 26N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-27],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32626"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "27":
wkt = b'''PROJCS["WGS 84 / UTM zone 27N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-21],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32627"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "28":
wkt = b'''PROJCS["WGS 84 / UTM zone 28N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32628"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "29":
wkt = b'''PROJCS["WGS 84 / UTM zone 29N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32629"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "30":
wkt = b'''PROJCS["WGS 84 / UTM zone 30N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32630"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "31":
wkt = b'''PROJCS["WGS 84 / UTM zone 31N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32631"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "32":
wkt = b'''PROJCS["WGS 84 / UTM zone 32N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32632"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "33":
wkt = b'''PROJCS["WGS 84 / UTM zone 33N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32633"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "34":
wkt = b'''PROJCS["WGS 84 / UTM zone 34N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",21],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32634"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "35":
wkt = b'''PROJCS["WGS 84 / UTM zone 35N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",27],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32635"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "36":
wkt = b'''PROJCS["WGS 84 / UTM zone 36N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",33],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32636"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "37":
wkt = b'''PROJCS["WGS 84 / UTM zone 37N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",39],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32637"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "38":
wkt = b'''PROJCS["WGS 84 / UTM zone 38N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",45],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32638"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "39":
wkt = b'''PROJCS["WGS 84 / UTM zone 39N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",51],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32639"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "40":
wkt = b'''PROJCS["WGS 84 / UTM zone 40N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",57],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32640"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "41":
wkt = b'''PROJCS["WGS 84 / UTM zone 41N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",63],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32641"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "42":
wkt = b'''PROJCS["WGS 84 / UTM zone 42N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",69],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32642"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "43":
wkt = b'''PROJCS["WGS 84 / UTM zone 43N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",75],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32643"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "44":
wkt = b'''PROJCS["WGS 84 / UTM zone 44N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32644"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "45":
wkt = b'''PROJCS["WGS 84 / UTM zone 45N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",87],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32645"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "46":
wkt = b'''PROJCS["WGS 84 / UTM zone 46N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32646"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "47":
wkt = b'''PROJCS["WGS 84 / UTM zone 46N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32646"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "48":
wkt = b'''PROJCS["WGS 84 / UTM zone 48N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",105],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32648"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "49":
wkt = b'''PROJCS["WGS 84 / UTM zone 49N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",111],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32649"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "50":
wkt = b'''PROJCS["WGS 84 / UTM zone 50N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32650"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "51":
wkt = b'''PROJCS["WGS 84 / UTM zone 51N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32651"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "52":
wkt = b'''PROJCS["WGS 84 / UTM zone 52N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",129],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32652"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "53":
wkt = b'''PROJCS["WGS 84 / UTM zone 53N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",135],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32653"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "54":
wkt = b'''PROJCS["WGS 84 / UTM zone 54N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32654"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "55":
wkt = b'''PROJCS["WGS 84 / UTM zone 55N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32655"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "56":
wkt = b'''PROJCS["WGS 84 / UTM zone 56N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",153],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32656"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "57":
wkt = b'''PROJCS["WGS 84 / UTM zone 57N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",159],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32657"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "58":
wkt = b'''PROJCS["WGS 84 / UTM zone 58N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",165],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32658"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "59":
wkt = b'''PROJCS["WGS 84 / UTM zone 59N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",171],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32659"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "60":
wkt = b'''PROJCS["WGS 84 / UTM zone 60N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",177],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],AUTHORITY["EPSG","32660"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif hemi.lower() == "s":
if zone == "1":
wkt = b'''PROJCS["WGS 84 / UTM zone 1S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-177],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32701"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "2":
wkt = b'''PROJCS["WGS 84 / UTM zone 2S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-171],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32702"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "3":
wkt = b'''PROJCS["WGS 84 / UTM zone 3S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-165],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32703"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "4":
wkt = b'''PROJCS["WGS 84 / UTM zone 4S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-159],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32704"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "5":
wkt = b'''PROJCS["WGS 84 / UTM zone 5S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-153],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32705"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "6":
wkt = b'''PROJCS["WGS 84 / UTM zone 6S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32706"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "7":
wkt = b'''PROJCS["WGS 84 / UTM zone 7S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32707"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "8":
wkt = b'''PROJCS["WGS 84 / UTM zone 8S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-135],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32708"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "9":
wkt = b'''PROJCS["WGS 84 / UTM zone 9S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-129],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32709"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "10":
wkt = b'''PROJCS["WGS 84 / UTM zone 10S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32710"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "11":
wkt = b'''PROJCS["WGS 84 / UTM zone 11S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32711"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "12":
wkt = b'''PROJCS["WGS 84 / UTM zone 12S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-111],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32712"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "13":
wkt = b'''PROJCS["WGS 84 / UTM zone 13S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-105],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32713"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "14":
wkt = b'''PROJCS["WGS 84 / UTM zone 14S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-99],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32714"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "15":
wkt = b'''PROJCS["WGS 84 / UTM zone 15S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32715"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "16":
wkt = b'''PROJCS["WGS 84 / UTM zone 16S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-87],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32716"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "17":
wkt = b'''PROJCS["WGS 84 / UTM zone 17S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32717"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "18":
wkt = b'''PROJCS["WGS 84 / UTM zone 18S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-75],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32718"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "19":
wkt = b'''PROJCS["WGS 84 / UTM zone 19S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-69],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32719"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "20":
wkt = b'''PROJCS["WGS 84 / UTM zone 20S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-63],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32720"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "21":
wkt = b'''PROJCS["WGS 84 / UTM zone 21S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-57],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32721"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "22":
wkt = b'''PROJCS["WGS 84 / UTM zone 22S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-51],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32722"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "23":
wkt = b'''PROJCS["WGS 84 / UTM zone 23S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-45],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32723"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "24":
wkt = b'''PROJCS["WGS 84 / UTM zone 24S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-39],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32724"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "25":
wkt = b'''PROJCS["WGS 84 / UTM zone 25S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-33],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32725"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "26":
wkt = b'''PROJCS["WGS 84 / UTM zone 26S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-27],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32726"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "27":
wkt = b'''PROJCS["WGS 84 / UTM zone 27S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-21],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32727"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "28":
wkt = b'''PROJCS["WGS 84 / UTM zone 28S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32728"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "29":
wkt = b'''PROJCS["WGS 84 / UTM zone 29S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32729"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "30":
wkt = b'''PROJCS["WGS 84 / UTM zone 30S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32730"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "31":
wkt = b'''PROJCS["WGS 84 / UTM zone 31S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32731"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "32":
wkt = b'''PROJCS["WGS 84 / UTM zone 32S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",9],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32732"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "33":
wkt = b'''PROJCS["WGS 84 / UTM zone 33S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32733"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "34":
wkt = b'''PROJCS["WGS 84 / UTM zone 34S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",21],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32734"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "35":
wkt = b'''PROJCS["WGS 84 / UTM zone 35S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",27],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32735"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "36":
wkt = b'''PROJCS["WGS 84 / UTM zone 36S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",33],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32736"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "37":
wkt = b'''PROJCS["WGS 84 / UTM zone 37S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",39],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32737"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "38":
wkt = b'''PROJCS["WGS 84 / UTM zone 38S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",45],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32738"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "39":
wkt = b'''PROJCS["WGS 84 / UTM zone 39S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",51],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32739"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "40":
wkt = b'''PROJCS["WGS 84 / UTM zone 40S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",57],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32740"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "41":
wkt = b'''PROJCS["WGS 84 / UTM zone 41S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",63],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32741"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "42":
wkt = b'''PROJCS["WGS 84 / UTM zone 42S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",69],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32742"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "43":
wkt = b'''PROJCS["WGS 84 / UTM zone 43S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",75],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32743"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "44":
wkt = b'''PROJCS["WGS 84 / UTM zone 44S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",81],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32744"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "45":
wkt = b'''PROJCS["WGS 84 / UTM zone 45S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",87],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32745"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "46":
wkt = b'''PROJCS["WGS 84 / UTM zone 46S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",93],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32746"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "47":
wkt = b'''PROJCS["WGS 84 / UTM zone 47S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",99],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32747"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "48":
wkt = b'''PROJCS["WGS 84 / UTM zone 48S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",105],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32748"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "49":
wkt = b'''PROJCS["WGS 84 / UTM zone 49S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",111],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32749"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "50":
wkt = b'''PROJCS["WGS 84 / UTM zone 50S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32750"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "51":
wkt = b'''PROJCS["WGS 84 / UTM zone 51S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",123],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32751"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "52":
wkt = b'''PROJCS["WGS 84 / UTM zone 52S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",129],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32752"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "53":
wkt = b'''PROJCS["WGS 84 / UTM zone 53S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",135],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32753"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "54":
wkt = b'''PROJCS["WGS 84 / UTM zone 54S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32754"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "55":
wkt = b'''PROJCS["WGS 84 / UTM zone 55S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",147],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32755"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "56":
wkt = b'''PROJCS["WGS 84 / UTM zone 56S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",153],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32756"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "57":
wkt = b'''PROJCS["WGS 84 / UTM zone 57S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",159],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32757"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "58":
wkt = b'''PROJCS["WGS 84 / UTM zone 58S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",165],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32758"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "59":
wkt = b'''PROJCS["WGS 84 / UTM zone 59S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",171],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32759"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif zone == "60":
wkt = b'''PROJCS["WGS 84 / UTM zone 60S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",177],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],AUTHORITY["EPSG","32760"],AXIS["Easting",EAST],AXIS["Northing",NORTH]]'''
elif proj.lower() == "wgs84":
wkt = b'''GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]'''
else:
print("No defined Projected Coordinate System Selected")
return wkt
def writeLas(xx,yy,zz,proj,output_file,classification,intensity,hemi=None,zone=None):
wkt = selectwkt(proj,hemi,zone)
#Create new VLR
new_vlr = laspy.header.VLR(user_id = "LASF_Projection",
record_id = 2112,
VLR_body = wkt,
description = "OGC Coordinate System WKT")
inVLRs = []
inVLRs.append(new_vlr)
#Create new Header
hdr = laspy.header.Header()
hdr.file_sig = 'LASF'
#Create new las file with Header and VLR
outfile = laspy.file.File(output_file, mode="w", header=hdr)
outfile.header.vlrs = inVLRs
outfile.header.set_wkt = 1
#Establish offset
xmin = np.min(xx)
ymin = np.min(yy)
zmin = np.min(zz)
xmax = np.max(xx)
ymax = np.max(yy)
zmax = np.max(zz)
#Calculate x, y, and z scale factor
if xmax == xmin:
xscale = 1
else:
xscale = (xmax - xmin) / 100000000;
if ymax == ymin:
yscale = 1
else:
yscale = (ymax - ymin) / 100000000;
if zmax == zmin:
zscale = 1
else:
zscale = (zmax - zmin) / 100000000;
in_scale = [xscale, yscale, zscale]
outfile.header.offset = [xmin,ymin,zmin]
#Establish scale
outfile.header.scale = in_scale
#Write x, y, z data and if available classification and intensity data
outfile.x = xx
outfile.y = yy
outfile.z = zz
if classification is not None:
outfile.raw_classification = classification
if intensity is not None:
outfile.intensity = intensity
#Close las
outfile.close()
##### Functions to write .kml files
def writeKml(lat, lon, time, kmlName):
# Suppress warnings that may come from simple kml
if not sys.warnoptions:
warnings.simplefilter("ignore")
# endif
# Open Simple KML
kml = simplekml.Kml()
# Plot line for ground track
latLon = np.column_stack((lon, lat))
latLonTuple = tuple(latLon)
ls = kml.newlinestring(name = 'test', coords=latLonTuple)
ls.extrude = 1
ls.altitudemode = simplekml.AltitudeMode.clamptoground
ls.style.linestyle.width = 5
ls.style.linestyle.color = simplekml.Color.blue
# Open Simple KML style editor
style = simplekml.Style()
style.iconstyle.icon.href = 'http://maps.google.com/mapfiles/kml/shapes/placemark_circle.png'
# Loop through all lat/lon values and make KML markers
for i in range(0,len(lon)):
# Get time string
timeRounded = str(np.round(time[i],1)) + ' sec'
# Plot marker points
pnt = kml.newpoint(name = timeRounded, coords=[(lon[i][0], lat[i][0])])
pnt.style = style
# EndFor
# Save output KML file
kml.save(kmlName)
def writeArrayToCSV(csv_out,namelist,datalist):
if datalist:
in_data = datalist[0]
if len(datalist) > 0:
for data in datalist[1:]:
in_data = np.column_stack((in_data,data))
with open(csv_out, 'w', newline = '') as csvFile:
writer = csv.writer(csvFile)
writer.writerow(namelist)
writer.writerows(in_data)
csvFile.close
def writeATL08toCSV(in_file08,groundtrack,csv_out):
delta_time = readAtl08H5(in_file08, '/land_segments/delta_time',
groundtrack)
lat = readAtl08H5(in_file08, '/land_segments/latitude', groundtrack)
lon = readAtl08H5(in_file08, '/land_segments/longitude', groundtrack)
h_max_canopy_abs = readAtl08H5(in_file08,
'/land_segments/canopy/h_max_canopy_abs',
groundtrack)
h_te_best_fit = readAtl08H5(in_file08,
'/land_segments/terrain/h_te_best_fit',
groundtrack)
h_te_median = readAtl08H5(in_file08, '/land_segments/terrain/h_te_median',
groundtrack)
namelist = ['Delta Time','Latitude','Longitude',
'Absolute Max Canopy Height','Best Fit Ground Height',
'Median Ground Height']
datalist = [delta_time,lat,lon,h_max_canopy_abs,h_te_best_fit,h_te_median]
writeArrayToCSV(csv_out,namelist,datalist)
def writeATL03toCSV(in_file03,groundtrack,csv_out):
delta_time = readAtl03H5(in_file03, 'delta_time', groundtrack)
lat = readAtl03H5(in_file03, 'lat_ph', groundtrack)
lon = readAtl03H5(in_file03, 'lon_ph', groundtrack)
h_ph = readAtl03H5(in_file03, 'h_ph', groundtrack)
signal_conf_ph = readAtl03H5(in_file03, 'signal_conf_ph', groundtrack)
namelist = ['Delta Time','Latitude','Longitude','Height',
'Signal Confidence']
datalist = [delta_time,lat,lon,h_ph,signal_conf_ph]
writeArrayToCSV(csv_out,namelist,datalist)
def createHTMLChart(ytrack, h_ph, classification,
classification_list = [1,2,3],output_folder = "",
in_file03_name = "ATL03", blank = "Viewer_blank.html"):
total_photons = 20000
classification_list = [1,2,3]
num_returns = 0
classification_to_list = classification.tolist()
for class_num in classification_list:
num_returns = num_returns + classification_to_list.count(class_num)
file_returns = int(np.ceil(num_returns/total_photons))
start = 0
for i in range(0,file_returns):
end = min([(start + total_photons),num_returns])
viewer_output = ("Viewer_" + in_file03_name + "_" + str(start) + "_to_"
+ str(end) + ".html")
#Copy the Blank Template into
with open(blank) as f:
lines = f.readlines()
lines = [l for l in lines]
with open(viewer_output, "w") as f1:
f1.writelines(lines)
#Read file and load it to memory
with open(viewer_output, "r") as in_file:
buf = in_file.readlines()
#Write data into the HTML file
with open(viewer_output, "w") as out_file:
for line in buf:
if line == "var data = [\n":
j = i * total_photons
while j < end:
if ((classification[j] == 0) &
(0 in classification_list)):
line = line + ("{ytrack: "
+ str(ytrack[j]) + ", zheight: "
+ str(h_ph[j]) +
", color: '#C2C5CC' }, \n")
elif ((classification[j] == 1) &
(1 in classification_list)):
line = line + ("{ytrack: "
+ str(ytrack[j]) + ", zheight: "
+ str(h_ph[j]) +
", color: '#D2B826' }, \n")
elif ((classification[j] == 2) &
(2 in classification_list)):
line = line + ("{ytrack: "
+ str(ytrack[j])
+ ", zheight: " + str(h_ph[j])
+ ", color: '#45811A' }, \n")
elif ((classification[j] == 3) &
(3 in classification_list)):
line = line + ("{ytrack: "
+ str(ytrack[j]) + ", zheight: "
+ str(h_ph[j])
+ ", color: '#85F334' }, \n")
j += 1
out_file.write(line)
start = start + total_photons
| 116.441452
| 658
| 0.662342
| 12,313
| 99,441
| 5.25136
| 0.063185
| 0.169889
| 0.03919
| 0.029941
| 0.874977
| 0.868017
| 0.858831
| 0.851361
| 0.846536
| 0.843551
| 0
| 0.143091
| 0.125874
| 99,441
| 854
| 659
| 116.441452
| 0.600778
| 0.029123
| 0
| 0.316781
| 0
| 0.207192
| 0.775245
| 0.707412
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032534
| false
| 0
| 0.017123
| 0
| 0.071918
| 0.015411
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0e0f19ffc2331f8d79b2f20a09ef4e866816f3e8
| 15,773
|
py
|
Python
|
tests/test_dask_image/test_ndmorph/test_ndmorph.py
|
jakirkham/dask-image
|
f28094d4ee705b015f3437ed60a9b0495031b156
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_dask_image/test_ndmorph/test_ndmorph.py
|
jakirkham/dask-image
|
f28094d4ee705b015f3437ed60a9b0495031b156
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_dask_image/test_ndmorph/test_ndmorph.py
|
jakirkham/dask-image
|
f28094d4ee705b015f3437ed60a9b0495031b156
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
import numpy as np
import scipy.ndimage as spnd
import dask.array as da
import dask.array.utils as dau
import dask_image.ndmorph as da_ndm
@pytest.mark.parametrize(
"funcname",
[
"binary_closing",
"binary_dilation",
"binary_erosion",
"binary_opening",
]
)
@pytest.mark.parametrize(
"err_type, input, structure, origin",
[
(
RuntimeError,
da.ones([1, 2], dtype=bool, chunks=(1, 2,)),
da.arange(2, dtype=bool, chunks=(2,)),
0
),
(
TypeError,
da.arange(2, dtype=bool, chunks=(2,)),
2.0,
0
),
(
TypeError,
da.ones([2], dtype=bool, chunks=(2,)),
da.arange(2, dtype=bool, chunks=(2,)),
0.0
),
]
)
def test_errs_binary_ops(funcname,
err_type,
input,
structure,
origin):
da_func = getattr(da_ndm, funcname)
with pytest.raises(err_type):
da_func(
input,
structure=structure,
origin=origin
)
@pytest.mark.parametrize(
"funcname",
[
"binary_closing",
"binary_dilation",
"binary_erosion",
"binary_opening",
]
)
@pytest.mark.parametrize(
"err_type, input, structure, iterations, origin",
[
(
TypeError,
da.ones([2], dtype=bool, chunks=(2,)),
da.arange(2, dtype=bool, chunks=(2,)),
1.0,
0
),
(
NotImplementedError,
da.ones([2], dtype=bool, chunks=(2,)),
da.arange(2, dtype=bool, chunks=(2,)),
0,
0
)
]
)
def test_errs_binary_ops_iter(funcname,
err_type,
input,
structure,
iterations,
origin):
da_func = getattr(da_ndm, funcname)
with pytest.raises(err_type):
da_func(
input,
structure=structure,
iterations=iterations,
origin=origin
)
@pytest.mark.parametrize(
"funcname",
[
"binary_closing",
"binary_dilation",
"binary_erosion",
"binary_opening",
]
)
@pytest.mark.parametrize(
"err_type, input, structure, iterations, mask, border_value, origin"
", brute_force",
[
(
RuntimeError,
da.ones([2], dtype=bool, chunks=(2,)),
da.arange(2, dtype=bool, chunks=(2,)),
1,
da.arange(2, dtype=bool, chunks=(2,))[None],
0,
0,
False
),
(
TypeError,
da.ones([2], dtype=bool, chunks=(2,)),
da.arange(2, dtype=bool, chunks=(2,)),
1,
da.arange(2, dtype=bool, chunks=(2,)),
2.0,
0,
False
),
(
NotImplementedError,
da.ones([2], dtype=bool, chunks=(2,)),
da.arange(2, dtype=bool, chunks=(2,)),
1,
da.arange(2, dtype=bool, chunks=(2,)),
0,
0,
True
),
]
)
def test_errs_binary_ops_expanded(funcname,
err_type,
input,
structure,
iterations,
mask,
border_value,
origin,
brute_force):
da_func = getattr(da_ndm, funcname)
with pytest.raises(err_type):
da_func(
input,
structure=structure,
iterations=iterations,
mask=mask,
border_value=border_value,
origin=origin,
brute_force=brute_force
)
@pytest.mark.parametrize(
"funcname",
[
"binary_closing",
"binary_dilation",
"binary_erosion",
"binary_opening",
]
)
@pytest.mark.parametrize(
"input, structure, origin",
[
(
da.from_array(
np.array(
[[0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1],
[1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0]],
dtype=bool
),
chunks=(5, 6)
),
None,
0
),
(
da.from_array(
np.array(
[[0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1],
[1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0]],
dtype=bool
),
chunks=(5, 6)
),
np.ones([3, 3], dtype=bool),
0
),
(
da.from_array(
np.array(
[[0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1],
[1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0]],
dtype=bool
),
chunks=(5, 6)
),
np.ones([3, 3], dtype=bool),
1
),
(
da.from_array(
np.array(
[[0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1],
[1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0]],
dtype=bool
),
chunks=(5, 6)
),
np.ones([3, 3], dtype=bool),
-1
),
]
)
def test_binary_ops(funcname,
input,
structure,
origin):
da_func = getattr(da_ndm, funcname)
sp_func = getattr(spnd, funcname)
da_result = da_func(
input,
structure=structure,
origin=origin
)
sp_result = sp_func(
input,
structure=structure,
origin=origin
)
dau.assert_eq(sp_result, da_result)
@pytest.mark.parametrize(
"funcname",
[
"binary_closing",
"binary_dilation",
"binary_erosion",
"binary_opening",
]
)
@pytest.mark.parametrize(
"input, structure, iterations, origin",
[
(
da.from_array(
np.array(
[[0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1],
[1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0]],
dtype=bool
),
chunks=(5, 6)
),
np.ones([3, 3], dtype=bool),
3,
0
),
(
da.from_array(
np.array(
[[0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1],
[1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0]],
dtype=bool
),
chunks=(5, 6)
),
np.ones([3, 3], dtype=bool),
3,
1
),
]
)
def test_binary_ops_iter(funcname,
input,
structure,
iterations,
origin):
da_func = getattr(da_ndm, funcname)
sp_func = getattr(spnd, funcname)
da_result = da_func(
input,
structure=structure,
iterations=iterations,
origin=origin
)
sp_result = sp_func(
input,
structure=structure,
iterations=iterations,
origin=origin
)
dau.assert_eq(sp_result, da_result)
@pytest.mark.parametrize(
"funcname",
[
"binary_closing",
"binary_dilation",
"binary_erosion",
"binary_opening",
]
)
@pytest.mark.parametrize(
"input, structure, iterations, mask, border_value, origin, brute_force",
[
(
da.from_array(
np.array(
[[0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1],
[1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0]],
dtype=bool
),
chunks=(5, 6)
),
np.ones([3, 3], dtype=bool),
1,
None,
1,
0,
False
),
(
da.from_array(
np.array(
[[0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1],
[1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0]],
dtype=bool
),
chunks=(5, 6)
),
np.ones([3, 3], dtype=bool),
1,
da.from_array(
np.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
dtype=bool
),
chunks=(5, 6)
),
0,
0,
False
),
(
da.from_array(
np.array(
[[0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1],
[1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0]],
dtype=bool
),
chunks=(5, 6)
),
np.ones([3, 3], dtype=bool),
3,
da.from_array(
np.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
dtype=bool
),
chunks=(5, 6)
),
0,
0,
False
),
]
)
def test_binary_ops_expanded(funcname,
input,
structure,
iterations,
mask,
border_value,
origin,
brute_force):
da_func = getattr(da_ndm, funcname)
sp_func = getattr(spnd, funcname)
da_result = da_func(
input,
structure=structure,
iterations=iterations,
mask=mask,
border_value=border_value,
origin=origin,
brute_force=brute_force
)
sp_result = sp_func(
input,
structure=structure,
iterations=iterations,
mask=mask,
border_value=border_value,
origin=origin,
brute_force=brute_force
)
dau.assert_eq(sp_result, da_result)
| 30.274472
| 76
| 0.32283
| 2,109
| 15,773
| 2.353248
| 0.036036
| 0.22567
| 0.239976
| 0.246625
| 0.933306
| 0.918396
| 0.912553
| 0.907113
| 0.907113
| 0.869837
| 0
| 0.188568
| 0.51753
| 15,773
| 520
| 77
| 30.332692
| 0.463601
| 0.002663
| 0
| 0.8
| 0
| 0
| 0.043105
| 0
| 0
| 0
| 0
| 0
| 0.006122
| 1
| 0.012245
| false
| 0
| 0.014286
| 0
| 0.026531
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
38534ca19054395ae02d1e9c3c8cd593669073ed
| 3,393
|
py
|
Python
|
ModernWarfare/XAssets/kiosks.py
|
Mario-Kart-Felix/Hyde
|
eaaae703cf04752caa009a93c3928959de8047bd
|
[
"MIT"
] | 14
|
2020-05-01T13:54:48.000Z
|
2022-02-14T21:58:35.000Z
|
ModernWarfare/XAssets/kiosks.py
|
Mario-Kart-Felix/Hyde
|
eaaae703cf04752caa009a93c3928959de8047bd
|
[
"MIT"
] | 3
|
2020-06-09T19:24:48.000Z
|
2021-03-06T11:34:04.000Z
|
ModernWarfare/XAssets/kiosks.py
|
Mario-Kart-Felix/Hyde
|
eaaae703cf04752caa009a93c3928959de8047bd
|
[
"MIT"
] | 4
|
2020-05-24T19:15:08.000Z
|
2022-02-04T21:20:29.000Z
|
import logging
from typing import Any, Dict, List, TypedDict
from utility import Utility
log: logging.Logger = logging.getLogger(__name__)
class BRKioskPurchases(TypedDict):
"""Structure of mp/brkioskpurchases.csv"""
index: int
type: str
ref: str
cost: int
title: str
desc: str
icon: str
tabNum: int
slotLabel: str
fireSaleDiscount: int
perkDiscount: int
overrideFileOnly: int # bool
class BRKioskPurchasesTruckWar(TypedDict):
"""Structure of mp/brkioskpurchases_truckwar.csv"""
index: int
type: str
ref: str
cost: int
title: str
desc: str
icon: str
tabNum: int
slotLabel: str
fireSaleDiscount: int
perkDiscount: int
overrideFileOnly: int # bool
class KioskBR:
"""Battle Royale Kiosk Purchases XAssets."""
def Compile(self: Any) -> None:
"""Compile the Battle Royale Kiosk Purchases XAssets."""
items: List[Dict[str, Any]] = []
items = KioskBR.IDs(self, items)
Utility.WriteFile(self, f"{self.eXAssets}/kioskBR.json", items)
log.info(f"Compiled {len(items):,} Kiosk Items (BR)")
def IDs(self: Any, items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""Compile the mp/brkioskpurchases.csv XAsset."""
ids: List[Dict[str, Any]] = Utility.ReadCSV(
self,
f"{self.iXAssets}/mp/brkioskpurchases.csv",
BRKioskPurchasesTruckWar,
)
if ids is None:
return items
for entry in ids:
items.append(
{
"altId": entry.get("ref"),
"name": self.localize.get(entry.get("title")),
"description": self.localize.get(entry.get("desc")),
"label": self.localize.get(entry.get("slotLabel")),
"type": entry.get("type"),
"price": entry.get("cost") * 100,
"image": entry.get("icon"),
}
)
return items
class KioskBRTruck:
"""Battle Royale Kiosk Purchases (Truck War) XAssets."""
def Compile(self: Any) -> None:
"""Compile the Battle Royale Kiosk Purchases (Truck War) XAssets."""
items: List[Dict[str, Any]] = []
items = KioskBRTruck.IDs(self, items)
Utility.WriteFile(self, f"{self.eXAssets}/kioskBRTruck.json", items)
log.info(f"Compiled {len(items):,} Kiosk Items (BR Truck War)")
def IDs(self: Any, items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""Compile the mp/brkioskpurchases_truckwar.csv XAsset."""
ids: List[Dict[str, Any]] = Utility.ReadCSV(
self,
f"{self.iXAssets}/mp/brkioskpurchases_truckwar.csv",
BRKioskPurchasesTruckWar,
)
if ids is None:
return items
for entry in ids:
items.append(
{
"altId": entry.get("ref"),
"name": self.localize.get(entry.get("title")),
"description": self.localize.get(entry.get("desc")),
"label": self.localize.get(entry.get("slotLabel")),
"type": entry.get("type"),
"price": entry.get("cost") * 100,
"image": entry.get("icon"),
}
)
return items
| 27.144
| 76
| 0.547893
| 360
| 3,393
| 5.144444
| 0.216667
| 0.060475
| 0.047516
| 0.060475
| 0.866091
| 0.811015
| 0.811015
| 0.75486
| 0.75486
| 0.706264
| 0
| 0.002606
| 0.321544
| 3,393
| 124
| 77
| 27.362903
| 0.801911
| 0.116121
| 0
| 0.714286
| 0
| 0
| 0.129185
| 0.050051
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.035714
| 0
| 0.464286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
387151169fb56903951502857173f6a5897a61dc
| 237
|
py
|
Python
|
deep_sdf/__init__.py
|
TMats/DeepSDF
|
eafda02220ff25c6e2afd16697a9c5bc526ff2d1
|
[
"MIT"
] | 889
|
2019-07-08T22:50:39.000Z
|
2022-03-29T11:50:08.000Z
|
deep_sdf/__init__.py
|
TMats/DeepSDF
|
eafda02220ff25c6e2afd16697a9c5bc526ff2d1
|
[
"MIT"
] | 90
|
2019-07-09T20:33:09.000Z
|
2022-03-29T08:59:37.000Z
|
deep_sdf/__init__.py
|
TMats/DeepSDF
|
eafda02220ff25c6e2afd16697a9c5bc526ff2d1
|
[
"MIT"
] | 214
|
2019-07-09T00:55:04.000Z
|
2022-03-19T03:10:48.000Z
|
#!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
from deep_sdf.data import *
from deep_sdf.mesh import *
from deep_sdf.metrics.chamfer import *
from deep_sdf.utils import *
from deep_sdf.workspace import *
| 26.333333
| 55
| 0.78903
| 37
| 237
| 4.918919
| 0.567568
| 0.21978
| 0.302198
| 0.373626
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024155
| 0.126582
| 237
| 8
| 56
| 29.625
| 0.855072
| 0.316456
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
387627a7bf29d326bf5bdd9daa17c169f3e87a20
| 9,579
|
py
|
Python
|
bcn/solvers.py
|
a378ec99/bcn
|
51c8984d74dc5945bf6296137caffadb91c142ff
|
[
"MIT"
] | null | null | null |
bcn/solvers.py
|
a378ec99/bcn
|
51c8984d74dc5945bf6296137caffadb91c142ff
|
[
"MIT"
] | null | null | null |
bcn/solvers.py
|
a378ec99/bcn
|
51c8984d74dc5945bf6296137caffadb91c142ff
|
[
"MIT"
] | null | null | null |
"""Solvers for matrix recovery.
Note
----
Currently only supports conjugate gradient methods.
"""
from __future__ import division, absolute_import
import numpy as np
from pymanopt import Problem
from pymanopt.solvers import ConjugateGradient, SteepestDescent
from pymanopt.manifolds import FixedRankEmbedded
class ConjugateGradientSolver(object):
def __init__(self, mixed, cost_func, guess_func, rank, n_restarts=10, guess_noise_amplitude=5.0, maxiter=1000, maxtime=100, mingradnorm=1e-12, minstepsize=1e-12, n_retries_svd=10, verbosity=2):
"""Solver for matrix recovery.
Parameters
----------
mixed : numpy.ndarray, shape=(n_samples, n_features)
Corrupted signal to be cleaned.
cost_func : func
Cost function based on linear operators A and targets y.
guess_func : func
Guess function that guesses an intial point for the solver to start optimizing at.
rank : int
Rank of the matrix to be recovered and of the initial guess.
n_restarts : int
Number of restats of the solver with a fresh initial guess.
guess_noise_amplitude : float
Noise amplitude for the random low-rank initial guess.
maxiter : int
Maximum number of iterations of solver.
maxtime : int
Maximum run time of solver in seconds.
mingradnorm : float
Minimal gradient norm of solver (before stopping).
minstepsize : int
Minimal step size of solver (before stopping).
n_retries_svd : int
Number of retries when LinAlgError.
verbosiy : int, values=(0, 1, 2)
Higher verbosity means more information printed.
"""
self.mixed = mixed
self.shape = self.mixed.shape
self.guess_func = guess_func
self.cost_func = cost_func
self.rank = rank
self.n_restarts = n_restarts
self.guess_noise_amplitude = guess_noise_amplitude
self.manifold = FixedRankEmbedded(
self.shape[0], self.shape[1], self.rank)
self.problem = Problem(manifold=self.manifold,
cost=self.cost_func, verbosity=verbosity)
self.maxiter = maxiter
self.maxtime = maxtime
self.mingradnorm = mingradnorm
self.minstepsize = minstepsize
self.solver = ConjugateGradient(logverbosity=2, maxiter=self.maxiter, maxtime=self.maxtime,
mingradnorm=self.mingradnorm, minstepsize=self.minstepsize)
self.n_retries_svd = n_retries_svd
def solve(self, guess):
""" Solve a matrix recovery problem based on the given constraints and an initial guess.
Parameters
----------
guess : tuple, values=(u, s, vt)
Descomposed random low-rank matrix.
Returns
-------
X : numpy.ndarray, shape=(n_samples, n_features)
Solution of the recovery problem.
stopping_reason : str
Why the solver finished, e.g. out of time, out of steps, etc.
final_cost : float
Final value of the cost function.
"""
worked = False
for n in xrange(self.n_retries_svd):
try:
usvt, optlog = self.solver.solve(self.problem, x=guess)
worked = True
break
except np.linalg.LinAlgError:
continue
if worked == False:
raise Exception('Not enough SVD restarts.')
stopping_reason = optlog['stoppingreason']
final_cost = optlog['final_values']['f(x)']
X = usvt[0].dot(np.diag(usvt[1])).dot(usvt[2])
return X, stopping_reason, final_cost
def recover(self):
"""
Runs the solver n_restarts times and picks the best run.
Returns
-------
results : dict
Results of the recovery with initial guess, estimated signal, estimated bias and final cost.
"""
estimates, errors, guesses_X, guesses_usvt = [], [], [], []
for k in xrange(self.n_restarts):
guess = self.guess_func(
self.shape, self.rank, noise_amplitude=self.guess_noise_amplitude)
X, stopping_reason, final_cost = self.solve(guess['usvt'])
estimates.append(X)
guesses_X.append(guess['X'])
guesses_usvt.append(guess['usvt'])
errors.append(final_cost)
index = np.argmin(errors)
error = errors[index]
estimated_bias = estimates[index]
guess_X = guesses_X[index]
guess_usvt = guesses_usvt[index]
results = {'guess_X': guess_X,
'guess_usvt': guess_usvt,
'estimated_bias': estimated_bias,
'estimated_signal': self.mixed - estimated_bias,
'final_cost': error}
return results
class SteepestDescentSolver(object):
def __init__(self, mixed, cost_func, guess_func, rank, n_restarts=10, guess_noise_amplitude=5.0, maxiter=1000, maxtime=100, mingradnorm=1e-12, minstepsize=1e-12, n_retries_svd=10, verbosity=2):
"""Solver for matrix recovery.
Parameters
----------
mixed : numpy.ndarray, shape=(n_samples, n_features)
Corrupted signal to be cleaned.
cost_func : func
Cost function based on linear operators A and targets y.
guess_func : func
Guess function that guesses an intial point for the solver to start optimizing at.
rank : int
Rank of the matrix to be recovered and of the initial guess.
n_restarts : int
Number of restats of the solver with a fresh initial guess.
guess_noise_amplitude : float
Noise amplitude for the random low-rank initial guess.
maxiter : int
Maximum number of iterations of solver.
maxtime : int
Maximum run time of solver in seconds.
mingradnorm : float
Minimal gradient norm of solver (before stopping).
minstepsize : int
Minimal step size of solver (before stopping).
n_retries_svd : int
Number of retries when LinAlgError.
verbosiy : int, values=(0, 1, 2)
Higher verbosity means more information printed.
"""
self.mixed = mixed
self.shape = self.mixed.shape
self.guess_func = guess_func
self.cost_func = cost_func
self.rank = rank
self.n_restarts = n_restarts
self.guess_noise_amplitude = guess_noise_amplitude
self.manifold = FixedRankEmbedded(
self.shape[0], self.shape[1], self.rank)
self.problem = Problem(manifold=self.manifold,
cost=self.cost_func, verbosity=verbosity)
self.maxiter = maxiter
self.maxtime = maxtime
self.mingradnorm = mingradnorm
self.minstepsize = minstepsize
self.solver = SteepestDescent(logverbosity=2, maxiter=self.maxiter, maxtime=self.maxtime,
mingradnorm=self.mingradnorm, minstepsize=self.minstepsize)
self.n_retries_svd = n_retries_svd
def solve(self, guess):
""" Solve a matrix recovery problem based on the given constraints and an initial guess.
Parameters
----------
guess : tuple, values=(u, s, vt)
Descomposed random low-rank matrix.
Returns
-------
X : numpy.ndarray, shape=(n_samples, n_features)
Solution of the recovery problem.
stopping_reason : str
Why the solver finished, e.g. out of time, out of steps, etc.
final_cost : float
Final value of the cost function.
"""
worked = False
for n in xrange(self.n_retries_svd):
try:
usvt, optlog = self.solver.solve(self.problem, x=guess)
worked = True
break
except np.linalg.LinAlgError:
continue
if worked == False:
raise Exception('Not enough SVD restarts.')
stopping_reason = optlog['stoppingreason']
final_cost = optlog['final_values']['f(x)']
X = usvt[0].dot(np.diag(usvt[1])).dot(usvt[2])
return X, stopping_reason, final_cost
def recover(self):
"""
Runs the solver n_restarts times and picks the best run.
Returns
-------
results : dict
Results of the recovery with initial guess, estimated signal, estimated bias and final cost.
"""
estimates, errors, guesses_X, guesses_usvt = [], [], [], []
for k in xrange(self.n_restarts):
guess = self.guess_func(
self.shape, self.rank, noise_amplitude=self.guess_noise_amplitude)
X, stopping_reason, final_cost = self.solve(guess['usvt'])
estimates.append(X)
guesses_X.append(guess['X'])
guesses_usvt.append(guess['usvt'])
errors.append(final_cost)
index = np.argmin(errors)
error = errors[index]
estimated_bias = estimates[index]
guess_X = guesses_X[index]
guess_usvt = guesses_usvt[index]
results = {'guess_X': guess_X,
'guess_usvt': guess_usvt,
'estimated_bias': estimated_bias,
'estimated_signal': self.mixed - estimated_bias,
'final_cost': error}
return results
| 39.582645
| 197
| 0.60142
| 1,108
| 9,579
| 5.063177
| 0.161552
| 0.034938
| 0.033868
| 0.012834
| 0.93975
| 0.93975
| 0.93975
| 0.93975
| 0.93975
| 0.93975
| 0
| 0.008848
| 0.315691
| 9,579
| 242
| 198
| 39.582645
| 0.846987
| 0.341476
| 0
| 0.923077
| 0
| 0
| 0.043431
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051282
| false
| 0
| 0.042735
| 0
| 0.145299
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
387a2b855279bc596b39818974a170c5434bd0ed
| 18,366
|
py
|
Python
|
IndiaFacts/skill_env/ask_sdk_model/services/reminder_management/reminder_management_service_client.py
|
JeremieBou/AlexaSkills
|
5d81c2c6e7ca8068e3850e8f2f2ebb3d9cca87cd
|
[
"Apache-2.0"
] | 1
|
2019-02-04T21:07:06.000Z
|
2019-02-04T21:07:06.000Z
|
ask_sdk_model/services/reminder_management/reminder_management_service_client.py
|
GR3C0/VLC_skill
|
ae0c85058ef9f12828c7ffccb9ad65baeb81f881
|
[
"MIT"
] | 9
|
2020-03-24T16:32:57.000Z
|
2022-03-11T23:37:22.000Z
|
IndiaFacts/skill_env/ask_sdk_model/services/reminder_management/reminder_management_service_client.py
|
JeremieBou/AlexaSkills
|
5d81c2c6e7ca8068e3850e8f2f2ebb3d9cca87cd
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the License.
#
import sys
import os
import re
import six
import typing
from ask_sdk_model.services.base_service_client import BaseServiceClient
from ask_sdk_model.services.api_configuration import ApiConfiguration
from ask_sdk_model.services.service_client_response import ServiceClientResponse
if typing.TYPE_CHECKING:
from typing import Dict, List, Union
from datetime import datetime
from ask_sdk_model.services.reminder_management.get_reminders_response import GetRemindersResponse
from ask_sdk_model.services.reminder_management.get_reminder_response import GetReminderResponse
from ask_sdk_model.services.reminder_management.reminder_response import ReminderResponse
from ask_sdk_model.services.reminder_management.reminder_request import ReminderRequest
from ask_sdk_model.services.reminder_management.error import Error
class ReminderManagementServiceClient(BaseServiceClient):
"""ServiceClient for calling the ReminderManagementService APIs.
:param api_configuration: Instance of :py:class:`ask_sdk_model.services.api_configuration.ApiConfiguration`
:type api_configuration: ask_sdk_model.services.api_configuration.ApiConfiguration
"""
def __init__(self, api_configuration):
# type: (ApiConfiguration) -> None
"""
:param api_configuration: Instance of :py:class:`ask_sdk_model.services.api_configuration.ApiConfiguration`
:type api_configuration: ask_sdk_model.services.api_configuration.ApiConfiguration
"""
super(ReminderManagementServiceClient, self).__init__(api_configuration)
def delete_reminder(self, alert_token, **kwargs):
# type: (str) -> Union[Error]
"""
This API is invoked by the skill to delete a single reminder.
:param alert_token: (required)
:type alert_token: str
:rtype: None
"""
operation_name = "delete_reminder"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'alert_token' is set
if ('alert_token' not in params) or (params['alert_token'] is None):
raise ValueError(
"Missing the required parameter `alert_token` when calling `" + operation_name + "`")
resource_path = '/v1/alerts/reminders/{alertToken}'.replace('{format}', 'json')
path_params = {}
if 'alert_token' in params:
path_params['alertToken'] = params['alert_token']
query_params = []
header_params = []
body_params = None
header_params.append(('Content-type', 'application/json'))
# Authentication setting
authorization_value = "Bearer " + self._authorization_value
header_params.append(("Authorization", authorization_value))
error_definitions = []
error_definitions.append(ServiceClientResponse(response_type=None, status_code=200, message="Success"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=401, message="UserAuthenticationException. Request is not authorized/authenticated e.g. If customer does not have permission to create a reminder."))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=429, message="RateExceededException e.g. When the skill is throttled for exceeding the max rate"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=500, message="Internal Server Error"))
self.invoke(
method="DELETE",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
def get_reminder(self, alert_token, **kwargs):
# type: (str) -> Union[GetReminderResponse, Error]
"""
This API is invoked by the skill to get a single reminder.
:param alert_token: (required)
:type alert_token: str
:rtype: Union[GetReminderResponse, Error]
"""
operation_name = "get_reminder"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'alert_token' is set
if ('alert_token' not in params) or (params['alert_token'] is None):
raise ValueError(
"Missing the required parameter `alert_token` when calling `" + operation_name + "`")
resource_path = '/v1/alerts/reminders/{alertToken}'.replace('{format}', 'json')
path_params = {}
if 'alert_token' in params:
path_params['alertToken'] = params['alert_token']
query_params = []
header_params = []
body_params = None
header_params.append(('Content-type', 'application/json'))
# Authentication setting
authorization_value = "Bearer " + self._authorization_value
header_params.append(("Authorization", authorization_value))
error_definitions = []
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.get_reminder_response.GetReminderResponse", status_code=200, message="Success"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=401, message="UserAuthenticationException. Request is not authorized/authenticated e.g. If customer does not have permission to create a reminder."))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=429, message="RateExceededException e.g. When the skill is throttled for exceeding the max rate"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=500, message="Internal Server Error"))
return self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_sdk_model.services.reminder_management.get_reminder_response.GetReminderResponse")
def update_reminder(self, alert_token, reminder_request, **kwargs):
# type: (str, ReminderRequest) -> Union[ReminderResponse, Error]
"""
This API is invoked by the skill to update a reminder.
:param alert_token: (required)
:type alert_token: str
:param reminder_request: (required)
:type reminder_request: ask_sdk_model.services.reminder_management.reminder_request.ReminderRequest
:rtype: Union[ReminderResponse, Error]
"""
operation_name = "update_reminder"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'alert_token' is set
if ('alert_token' not in params) or (params['alert_token'] is None):
raise ValueError(
"Missing the required parameter `alert_token` when calling `" + operation_name + "`")
# verify the required parameter 'reminder_request' is set
if ('reminder_request' not in params) or (params['reminder_request'] is None):
raise ValueError(
"Missing the required parameter `reminder_request` when calling `" + operation_name + "`")
resource_path = '/v1/alerts/reminders/{alertToken}'.replace('{format}', 'json')
path_params = {}
if 'alert_token' in params:
path_params['alertToken'] = params['alert_token']
query_params = []
header_params = []
body_params = None
if 'reminder_request' in params:
body_params = params['reminder_request']
header_params.append(('Content-type', 'application/json'))
# Authentication setting
authorization_value = "Bearer " + self._authorization_value
header_params.append(("Authorization", authorization_value))
error_definitions = []
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.reminder_response.ReminderResponse", status_code=200, message="Success"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=400, message="Bad Request"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=404, message="NotFoundException e.g. Retured when reminder is not found"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=409, message="UserAuthenticationException. Request is not authorized/authenticated e.g. If customer does not have permission to create a reminder."))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=429, message="RateExceededException e.g. When the skill is throttled for exceeding the max rate"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=500, message="Internal Server Error"))
return self.invoke(
method="PUT",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_sdk_model.services.reminder_management.reminder_response.ReminderResponse")
def delete_reminders(self, **kwargs):
# type: () -> Union[Error]
"""
This API is invoked by the skill to delete all reminders created by the caller.
:rtype: None
"""
operation_name = "delete_reminders"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
resource_path = '/v1/alerts/reminders/'.replace('{format}', 'json')
path_params = {}
query_params = []
header_params = []
body_params = None
header_params.append(('Content-type', 'application/json'))
# Authentication setting
authorization_value = "Bearer " + self._authorization_value
header_params.append(("Authorization", authorization_value))
error_definitions = []
error_definitions.append(ServiceClientResponse(response_type=None, status_code=200, message="Success"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=401, message="UserAuthenticationException. Request is not authorized/authenticated e.g. If customer does not have permission to create a reminder."))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=429, message="RateExceededException e.g. When the skill is throttled for exceeding the max rate"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=500, message="Internal Server Error"))
self.invoke(
method="DELETE",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
def get_reminders(self, **kwargs):
# type: () -> Union[GetRemindersResponse, Error]
"""
This API is invoked by the skill to get a all reminders created by the caller.
:rtype: Union[GetRemindersResponse, Error]
"""
operation_name = "get_reminders"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
resource_path = '/v1/alerts/reminders/'.replace('{format}', 'json')
path_params = {}
query_params = []
header_params = []
body_params = None
header_params.append(('Content-type', 'application/json'))
# Authentication setting
authorization_value = "Bearer " + self._authorization_value
header_params.append(("Authorization", authorization_value))
error_definitions = []
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.get_reminders_response.GetRemindersResponse", status_code=200, message="Success"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=401, message="UserAuthenticationException. Request is not authorized/authenticated e.g. If customer does not have permission to create a reminder."))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=429, message="RateExceededException e.g. When the skill is throttled for exceeding the max rate"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=500, message="Internal Server Error"))
return self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_sdk_model.services.reminder_management.get_reminders_response.GetRemindersResponse")
def create_reminder(self, reminder_request, **kwargs):
# type: (ReminderRequest) -> Union[ReminderResponse, Error]
"""
This API is invoked by the skill to create a new reminder.
:param reminder_request: (required)
:type reminder_request: ask_sdk_model.services.reminder_management.reminder_request.ReminderRequest
:rtype: Union[ReminderResponse, Error]
"""
operation_name = "create_reminder"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'reminder_request' is set
if ('reminder_request' not in params) or (params['reminder_request'] is None):
raise ValueError(
"Missing the required parameter `reminder_request` when calling `" + operation_name + "`")
resource_path = '/v1/alerts/reminders/'.replace('{format}', 'json')
path_params = {}
query_params = []
header_params = []
body_params = None
if 'reminder_request' in params:
body_params = params['reminder_request']
header_params.append(('Content-type', 'application/json'))
# Authentication setting
authorization_value = "Bearer " + self._authorization_value
header_params.append(("Authorization", authorization_value))
error_definitions = []
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.reminder_response.ReminderResponse", status_code=200, message="Success"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=400, message="Bad Request"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=403, message="Forbidden"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=429, message="RateExceededException e.g. When the skill is throttled for exceeding the max rate"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=500, message="Internal Server Error"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=503, message="Service Unavailable"))
error_definitions.append(ServiceClientResponse(response_type="ask_sdk_model.services.reminder_management.error.Error", status_code=504, message="Gateway Timeout"))
return self.invoke(
method="POST",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_sdk_model.services.reminder_management.reminder_response.ReminderResponse")
| 51.016667
| 288
| 0.707176
| 2,029
| 18,366
| 6.163627
| 0.103499
| 0.02159
| 0.039581
| 0.068367
| 0.884775
| 0.873021
| 0.870222
| 0.860947
| 0.845274
| 0.8324
| 0
| 0.006946
| 0.200425
| 18,366
| 359
| 289
| 51.158774
| 0.844671
| 0.148971
| 0
| 0.811659
| 0
| 0.022422
| 0.302997
| 0.159513
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03139
| false
| 0
| 0.067265
| 0
| 0.121076
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a09700a53378403a34f8c196125564bb6e4c5ae
| 131,510
|
py
|
Python
|
ports/rp2/natmod/picantepy/tileset_dungeon.py
|
mrgkingcs/micropython
|
1d7f16a98edd7e505c4a230623ab2268637d0848
|
[
"MIT"
] | null | null | null |
ports/rp2/natmod/picantepy/tileset_dungeon.py
|
mrgkingcs/micropython
|
1d7f16a98edd7e505c4a230623ab2268637d0848
|
[
"MIT"
] | null | null | null |
ports/rp2/natmod/picantepy/tileset_dungeon.py
|
mrgkingcs/micropython
|
1d7f16a98edd7e505c4a230623ab2268637d0848
|
[
"MIT"
] | null | null | null |
tileset_dungeon = [
bytes(b'\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x85\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae'),
bytes(b'\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae'),
bytes(b'\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x85\x85\x85\x8a\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85'),
bytes(b'\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\x8a\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\x8a\x8a\x8a\x8a\xb2\xb2\xb2\xae\xae\xae\xae\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xae\xae\xb2\xb2\xb2\xb2\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\xb2\xb2\xb2\xb2\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xb2\xb2\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xb2\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\x8a\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\x8a\x8a\x8a\x8a\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xae\xae\xae\xae\xae\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\xb2\xb2\xb2\xb2\xb2\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x85\x85\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\x8a\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\x8a\x8a\x8a\x8a\xb2\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xae\xae\xae\xae\xb2\xb2\xb2\xb2\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x85\xb2\xb2\xb2\xb2\xb2\xb2\xb2\x85\x85\x85\x8a\x8a\x85\x85\x85\xb2\xb2\xb2\xb2\x85\x85\x8a\x8a\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae'),
bytes(b'\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x8a\x85\x85\x85\x85\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x85\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80'),
bytes(b'\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80'),
bytes(b'\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\xae\xae\xae\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae'),
bytes(b'\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\xa9\xa9\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xa9\x80\x85\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x80\x85\x85\x85\x85\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x85\x8a\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\xa9\xa4\x80\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\xa9\xa4\xa4\xa4\x80\x80\x80\x80\x80\x80\xa4\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\x8a\xa4\xa4\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xcd\xa9\xa4\xa4\x80\x80\x80\xa4\xa4\xa4\xa4\x8a\xae\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\xa4\x80\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\x8a\x8a\xa4\xa4\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa4\xa4\xa9\xcd\xcd\xa9\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\xa9\xa9\xa9\xa4\x80\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\x80\x80\xa4\xa4\xa9\xa9\xcd\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\xcd\xcd\xa9\xa4\x80\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa4\xa9\xcd\x8a\x8a\xae\xae\x8a\x8a\xae\xae\xae\x8a\xcd\xcd\xa9\xa4\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa4\xa9\xcd\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xcd\xcd\xa9\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa4\xcd\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa9\xa9\xa4\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa4\xa4\xa4\xa9\xa9\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa4\xa4\xa4\xa4\x80\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa4\xa4\xa4\xa9\xa4\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa4\x80\xa4\x80\x80\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa9\xcd\xcd\xa9\xa9\xa4\xa4\xa4\x80\xa4\xa4\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xa4\xa9\xa9\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa4\x80\xa4\xa4\xa4\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa9\xa4\xa4\xa4\x80\x80\x80\xa4\xa4\xa9\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\xa9\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\xa4\xa4\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\x8a\xa9\xcd\xcd\xcd\xcd\xcd\xa9\xa4\x80\xa4\xa4\xa4\x80\x80\xa4\xa9\xa9\xa9\xa9\xa4\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa9\xcd\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa9\xa4\xa4\x80\xa4\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\xae\x8a\xae\x8a\xa9\xcd\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x8a\xae\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa9\xa9\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa9\xcd\xa9\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa9\xa9\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa9\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\x8a\x8a\xa4\xa4\x80\x80\x80\x80\x80\x80\x80\x80\xa4\xa4\xa4\x80\x80\x80\x80\x80\xa4\xa4\xa4\x80\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x8a\x8a\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\xae\xae\xae\x8a'),
bytes(b'\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x85\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae'),
bytes(b'\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae'),
bytes(b'\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x85\x85\x85\x8a\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85'),
bytes(b'\x8a\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\x8a\x8a\x8a\x8a\xb2\xb2\xb2\xae\xae\xae\xae\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xae\xae\xb2\xb2\xb2\xb2\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\xb2\xb2\xb2\xb2\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xb2\xb2\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xb2\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\x8a\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\x8a\x8a\x8a\x8a\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xae\xae\xae\xae\xae\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\xb2\xb2\xb2\xb2\xb2\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x85\x85\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\x8a\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\x8a\x8a\x8a\x8a\xb2\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xae\xae\xae\xae\xb2\xb2\xb2\xb2\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x85\xb2\xb2\xb2\xb2\xb2\xb2\xb2\x85\x85\x85\x8a\x8a\x85\x85\x85\xb2\xb2\xb2\xb2\x85\x85\x8a\x8a\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae'),
bytes(b'\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x8a\x85\x85\x85\x85\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x85\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80'),
bytes(b'\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80'),
bytes(b'\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\xae\xae\xae\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae'),
bytes(b'\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\xa9\xa9\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xa9\x80\x85\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x80\x85\x85\x85\x85\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x85\x8a\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\xa9\xa4\x80\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\xa9\xa4\xa4\xa4\x80\x80\x80\x80\x80\x80\xa4\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\x8a\xa4\xa4\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xcd\xa9\xa4\xa4\x80\x80\x80\xa4\xa4\xa4\xa4\x8a\xae\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\xa4\x80\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\x8a\x8a\xa4\xa4\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa4\xa4\xa9\xcd\xcd\xa9\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\xa9\xa9\xa9\xa4\x80\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\x80\x80\xa4\xa4\xa9\xa9\xcd\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\xcd\xcd\xa9\xa4\x80\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa4\xa9\xcd\x8a\x8a\xae\xae\x8a\x8a\xae\xae\xae\x8a\xcd\xcd\xa9\xa4\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa4\xa9\xcd\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xcd\xcd\xa9\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa4\xcd\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa9\xa9\xa4\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa4\xa4\xa4\xa9\xa9\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa4\xa4\xa4\xa4\x80\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa4\xa4\xa4\xa9\xa4\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa4\x80\xa4\x80\x80\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa9\xcd\xcd\xa9\xa9\xa4\xa4\xa4\x80\xa4\xa4\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xa4\xa9\xa9\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa4\x80\xa4\xa4\xa4\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa9\xa4\xa4\xa4\x80\x80\x80\xa4\xa4\xa9\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\xa9\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\xa4\xa4\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\x8a\xa9\xcd\xcd\xcd\xcd\xcd\xa9\xa4\x80\xa4\xa4\xa4\x80\x80\xa4\xa9\xa9\xa9\xa9\xa4\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa9\xcd\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa9\xa4\xa4\x80\xa4\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\xae\x8a\xae\x8a\xa9\xcd\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x8a\xae\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa9\xa9\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa9\xcd\xa9\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa9\xa9\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa9\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\x8a\x8a\xa4\xa4\x80\x80\x80\x80\x80\x80\x80\x80\xa4\xa4\xa4\x80\x80\x80\x80\x80\xa4\xa4\xa4\x80\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x8a\x8a\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8a\xae\xae\xae\x8a'),
bytes(b'\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x85\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae'),
bytes(b'\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae'),
bytes(b'\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x85\x85\x85\x8a\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a'),
bytes(b'\x8a\x8a\x8a\xb2\xb2\xb2\xae\xae\xae\xae\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xae\xae\xb2\xb2\xb2\xb2\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\xb2\xb2\xb2\xb2\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xb2\xb2\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xb2\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\x8a\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\x8a\x8a\x8a\x8a\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xae\xae\xae\xae\xae\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\xb2\xb2\xb2\xb2\xb2\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x85\x85\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\x8a\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\x8a\x8a\x8a\x8a\xb2\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xae\xae\xae\xae\xb2\xb2\xb2\xb2\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x85\xb2\xb2\xb2\xb2\xb2\xb2\xb2\x85\x85\x85\x8a\x8a\x85\x85\x85\xb2\xb2\xb2\xb2\x85\x85\x8a\x8a\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae'),
bytes(b'\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x8a\x85\x85\x85\x85\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x85\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80'),
bytes(b'\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80'),
bytes(b'\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\xae\xae\xae\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae'),
bytes(b'\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\xa9\xa9\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xa9\x80\x85\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x80\x85\x85\x85\x85\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x85\x8a\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\xa9\xa4\x80\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\xa9\xa4\xa4\xa4\x80\x80\x80\x80\x80\x80\xa4\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\x8a\xa4\xa4\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xcd\xa9\xa4\xa4\x80\x80\x80\xa4\xa4\xa4\xa4\x8a\xae\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\xa4\x80\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\x8a\x8a\xa4\xa4\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa4\xa4\xa9\xcd\xcd\xa9\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\xa9\xa9\xa9\xa4\x80\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\x80\x80\xa4\xa4\xa9\xa9\xcd\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\xcd\xcd\xa9\xa4\x80\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa4\xa9\xcd\x8a\x8a\xae\xae\x8a\x8a\xae\xae\xae\x8a\xcd\xcd\xa9\xa4\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa4\xa9\xcd\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xcd\xcd\xa9\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa4\xcd\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa9\xa9\xa4\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa4\xa4\xa4\xa9\xa9\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa4\xa4\xa4\xa4\x80\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa4\xa4\xa4\xa9\xa4\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa4\x80\xa4\x80\x80\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa9\xcd\xcd\xa9\xa9\xa4\xa4\xa4\x80\xa4\xa4\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xa4\xa9\xa9\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa4\x80\xa4\xa4\xa4\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa9\xa4\xa4\xa4\x80\x80\x80\xa4\xa4\xa9\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\xa9\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\xa4\xa4\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\x8a\xa9\xcd\xcd\xcd\xcd\xcd\xa9\xa4\x80\xa4\xa4\xa4\x80\x80\xa4\xa9\xa9\xa9\xa9\xa4\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa9\xcd\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa9\xa4\xa4\x80\xa4\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\xae\x8a\xae\x8a\xa9\xcd\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x8a\xae\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa9\xa9\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa9\xcd\xa9\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa9\xa9\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa9\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\x8a\x8a\xa4\xa4\x80\x80\x80\x80\x80\x80\x80\x80\xa4\xa4\xa4\x80\x80\x80\x80\x80\xa4\xa4\xa4\x80\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x8a\x8a\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8a\xae\xae\xae\x8a'),
bytes(b'\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x85\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x85\x85\x85\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae'),
bytes(b'\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae'),
bytes(b'\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x85\x85\x85\x8a\xae\x8a\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x85\x85\x85\x85\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x85\x8a\x8a'),
bytes(b'\x8a\x8a\x8a\x8a\x8a\x85\xb2\xb2\xb2\xb2\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xb2\xb2\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xb2\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\x8a\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\x8a\x8a\x8a\x8a\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xae\xae\xae\xae\xae\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\xb2\xb2\xb2\xb2\xb2\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x85\x85\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\x8a\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\x8a\x8a\x8a\x8a\xb2\xb2\xb2\xae\xae\xae\xae\xae\xae\xae\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xb2\xae\xae\xae\xae\xb2\xb2\xb2\xb2\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x85\xb2\xb2\xb2\xb2\xb2\xb2\xb2\x85\x85\x85\x8a\x8a\x85\x85\x85\xb2\xb2\xb2\xb2\x85\x85\x8a\x8a\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x85\x85\x85\x8a\x8a\x85\x8a\x85\x8a\x8a\x85\x8a\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x8a\x85\x85\x85\x85\x8a\x85\x8a\x8a\xae\x8a\x8a\x8a\x85\x85\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x85\x85\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae'),
bytes(b'\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\x8a\x8a\x8a\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x8a\x85\x85\x85\x85\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x85\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80'),
bytes(b'\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80'),
bytes(b'\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x8a\x85\x85\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\x8a\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\x85\x85\x85\xae\xae\xae\x85\x85\x85\x85\x85\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\xae\xae\xae\x85\x85\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x85\x85\x85\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\xae\xae\xae\xae\xae'),
bytes(b'\x8a\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\x8a\xae\xae\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\xae\xae\xae\xae\xae\xae\xae\xae\x8a\x8a\x8a\xae\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\xa9\xa9\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\xa9\x80\x85\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x80\x85\x85\x85\x85\x80\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x80\x85\x8a\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\xa9\xa4\x80\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\xa9\xa4\xa4\xa4\x80\x80\x80\x80\x80\x80\xa4\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\xae\x8a\xa4\xa4\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xcd\xa9\xa4\xa4\x80\x80\x80\xa4\xa4\xa4\xa4\x8a\xae\x8a\x8a\x8a\x8a\xae\x8a\x8a\x8a\xa4\x80\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa9\xa9\xa9\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\x8a\x8a\xa4\xa4\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa4\xa4\xa9\xcd\xcd\xa9\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\xa9\xa9\xa9\xa4\x80\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\x80\x80\xa4\xa4\xa9\xa9\xcd\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\xcd\xcd\xa9\xa4\x80\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa4\xa9\xcd\x8a\x8a\xae\xae\x8a\x8a\xae\xae\xae\x8a\xcd\xcd\xa9\xa4\x80\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa4\xa9\xcd\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xcd\xcd\xa9\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa4\xcd\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa9\xa9\xa4\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa4\xa4\xa4\xa9\xa9\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa4\xa4\xa4\xa4\x80\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xa4\xa4\xa4\xa9\xa4\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa4\x80\xa4\x80\x80\xa4\xa4\xa4\xa9\xa9\xcd\xcd\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa9\xcd\xcd\xa9\xa9\xa4\xa4\xa4\x80\xa4\xa4\x8a\xae\xae\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xa4\xa9\xa9\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa4\x80\xa4\xa4\xa4\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa9\xa4\xa4\xa4\x80\x80\x80\xa4\xa4\xa9\x8a\x8a\x8a\x8a\x8a\x8a\x8a\xae\xae\x8a\xa9\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x80\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\xa4\x80\xa4\xa4\x8a\xae\xae\x8a\x8a\x8a\xae\xae\xae\x8a\xa9\xcd\xcd\xcd\xcd\xcd\xa9\xa4\x80\xa4\xa4\xa4\x80\x80\xa4\xa9\xa9\xa9\xa9\xa4\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\xa9\xcd\xcd\xcd\xcd\xcd\xa9\xa4\xa4\xa4\xa9\xa4\xa4\x80\xa4\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x8a\xae\xae\xae\x8a\x8a\xae\x8a\xae\x8a\xa9\xcd\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa9\xcd\xcd\xcd\xa9\xa4\xa4\x8a\xae\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa9\xa9\xcd\xcd\xcd\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa4\xa4\xa4\xa9\xa9\xa9\xcd\xa9\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\x8a\x8a\x8a\xa9\xa9\xa4\xa4\xa4\xa4\xa4\x80\x80\xa4\xa9\xa9\xa9\xa4\xa4\xa4\xa4\xa4\xa9\xa9\xa4\xa4\x8a\x8a\x8a\xae\x8a\x8a\xae\xae\x8a\x8a\xa4\xa4\x80\x80\x80\x80\x80\x80\x80\x80\xa4\xa4\xa4\x80\x80\x80\x80\x80\xa4\xa4\xa4\x80\x8a\x8a\xae\xae\x8a\x8a\xae\xae\x8a\x8a\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x8a\x8a\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x8a\x85\x8a\x8a\x8a\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x85\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8a\xae\xae\xae\x8a\x8a\xae\xae\xae\x8a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8a\xae\xae\xae\x8a\x8a\x8a\xae\xae\x8a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8a\xae\xae\x8a\x8a'),
]
| 3,757.428571
| 4,108
| 0.749076
| 32,834
| 131,510
| 3.000244
| 0.000396
| 0.688011
| 0.928505
| 1.115582
| 0.999858
| 0.999797
| 0.999797
| 0.999797
| 0.999736
| 0.999736
| 0
| 0.236272
| 0.000517
| 131,510
| 34
| 4,109
| 3,867.941176
| 0.513185
| 0
| 0
| 0
| 0
| 0.941176
| 0.996669
| 0.996669
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 16
|
2a34aa5b6b59a101a21f349665a4b537c8474882
| 34,582
|
py
|
Python
|
metal/models/batches_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | null | null | null |
metal/models/batches_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | 3
|
2021-09-27T05:10:36.000Z
|
2021-09-27T06:10:57.000Z
|
metal/models/batches_api.py
|
displague/metal-python
|
96e64e9ac41025d85ff6f61693165e29e1c366db
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Metal API
This is the API for Equinix Metal. The API allows you to programmatically interact with all of your Equinix Metal resources, including devices, networks, addresses, organizations, projects, and your user account. The official API docs are hosted at <https://metal.equinix.com/developers/api>. # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@equinixmetal.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from metal.api_client import ApiClient
from metal.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class BatchesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_device_batch(self, id, batch, **kwargs): # noqa: E501
"""Create a devices batch # noqa: E501
Creates new devices in batch and provisions them in our datacenter. Type-specific options (such as operating_system for baremetal devices) should be included in the main data structure alongside hostname and plan. The features attribute allows you to optionally specify what features your server should have. For example, if you require a server with a TPM chip, you may specify `{ \"features\": { \"tpm\": \"required\" } }` (or `{ \"features\": [\"tpm\"] }` in shorthand). The request will fail if there are no available servers matching your criteria. Alternatively, if you do not require a certain feature, but would prefer to be assigned a server with that feature if there are any available, you may specify that feature with a preferred value (see the example request below). The request will not fail if we have no servers with that feature in our inventory. The facilities attribute specifies in what datacenter you wish to create the device. You can either specify a single facility `{ \"facility\": \"f1\" }` , or you can instruct to create the device in the best available datacenter `{ \"facility\": \"any\" }`. Additionally it is possible to set a prioritized location selection. For example `{ \"facility\": [\"f3\", \"f2\", \"any\"] }` will try to assign to the facility f3, if there are no available f2, and so on. If \"any\" is not specified for \"facility\", the request will fail unless it can assign in the selected locations. With `{ \"facility\": \"any\" }` you have the option to diversify to indicate how many facilities you are willing to be spread across. For this purpose use parameter: `facility_diversity_level = N`. For example: `{ \"facilities\": [\"sjc1\", \"ewr1\", \"any\"] , \"facility_diversity_level\" = 1, \"quantity\" = 10 }` will assign 10 devices into the same facility, trying first in \"sjc1\", and if there aren’t available, it will try in \"ewr1\", otherwise any other. The `ip_addresses` attribute will allow you to specify the addresses you want created with your device. To maintain backwards compatibility, If the attribute is not sent in the request, it will be treated as if `{ \"ip_addresses\": [{ \"address_family\": 4, \"public\": true }, { \"address_family\": 4, \"public\": false }, { \"address_family\": 6, \"public\": true }] }` was sent. The private IPv4 address is required and always need to be sent in the array. Not all operating systems support no public IPv4 address, so in those cases you will receive an error message. For example, to only configure your server with a private IPv4 address, you can send `{ \"ip_addresses\": [{ \"address_family\": 4, \"public\": false }] }`. Note: when specifying a subnet size larger than a /30, you will need to supply the UUID(s) of existing ip_reservations in your project to assign IPs from. For example, `{ \"ip_addresses\": [..., {\"address_family\": 4, \"public\": true, \"ip_reservations\": [\"uuid1\", \"uuid2\"]}] }` To access a server without public IPs, you can use our Out-of-Band console access (SOS) or use another server with public IPs as a proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_device_batch(id, batch, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param batch: Batches to create (required)
:type batch: InstancesBatchCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BatchesList
"""
kwargs['_return_http_data_only'] = True
return self.create_device_batch_with_http_info(id, batch, **kwargs) # noqa: E501
def create_device_batch_with_http_info(self, id, batch, **kwargs): # noqa: E501
"""Create a devices batch # noqa: E501
Creates new devices in batch and provisions them in our datacenter. Type-specific options (such as operating_system for baremetal devices) should be included in the main data structure alongside hostname and plan. The features attribute allows you to optionally specify what features your server should have. For example, if you require a server with a TPM chip, you may specify `{ \"features\": { \"tpm\": \"required\" } }` (or `{ \"features\": [\"tpm\"] }` in shorthand). The request will fail if there are no available servers matching your criteria. Alternatively, if you do not require a certain feature, but would prefer to be assigned a server with that feature if there are any available, you may specify that feature with a preferred value (see the example request below). The request will not fail if we have no servers with that feature in our inventory. The facilities attribute specifies in what datacenter you wish to create the device. You can either specify a single facility `{ \"facility\": \"f1\" }` , or you can instruct to create the device in the best available datacenter `{ \"facility\": \"any\" }`. Additionally it is possible to set a prioritized location selection. For example `{ \"facility\": [\"f3\", \"f2\", \"any\"] }` will try to assign to the facility f3, if there are no available f2, and so on. If \"any\" is not specified for \"facility\", the request will fail unless it can assign in the selected locations. With `{ \"facility\": \"any\" }` you have the option to diversify to indicate how many facilities you are willing to be spread across. For this purpose use parameter: `facility_diversity_level = N`. For example: `{ \"facilities\": [\"sjc1\", \"ewr1\", \"any\"] , \"facility_diversity_level\" = 1, \"quantity\" = 10 }` will assign 10 devices into the same facility, trying first in \"sjc1\", and if there aren’t available, it will try in \"ewr1\", otherwise any other. The `ip_addresses` attribute will allow you to specify the addresses you want created with your device. To maintain backwards compatibility, If the attribute is not sent in the request, it will be treated as if `{ \"ip_addresses\": [{ \"address_family\": 4, \"public\": true }, { \"address_family\": 4, \"public\": false }, { \"address_family\": 6, \"public\": true }] }` was sent. The private IPv4 address is required and always need to be sent in the array. Not all operating systems support no public IPv4 address, so in those cases you will receive an error message. For example, to only configure your server with a private IPv4 address, you can send `{ \"ip_addresses\": [{ \"address_family\": 4, \"public\": false }] }`. Note: when specifying a subnet size larger than a /30, you will need to supply the UUID(s) of existing ip_reservations in your project to assign IPs from. For example, `{ \"ip_addresses\": [..., {\"address_family\": 4, \"public\": true, \"ip_reservations\": [\"uuid1\", \"uuid2\"]}] }` To access a server without public IPs, you can use our Out-of-Band console access (SOS) or use another server with public IPs as a proxy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_device_batch_with_http_info(id, batch, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param batch: Batches to create (required)
:type batch: InstancesBatchCreateInput
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BatchesList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'batch'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_device_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_device_batch`") # noqa: E501
# verify the required parameter 'batch' is set
if self.api_client.client_side_validation and ('batch' not in local_var_params or # noqa: E501
local_var_params['batch'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `batch` when calling `create_device_batch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch' in local_var_params:
body_params = local_var_params['batch']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
201: "BatchesList",
401: "Error",
403: "Error",
404: "Error",
422: "Error",
}
return self.api_client.call_api(
'/projects/{id}/devices/batch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def delete_batch(self, id, **kwargs): # noqa: E501
"""Delete the Batch # noqa: E501
Deletes the Batch. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_batch(id, async_req=True)
>>> result = thread.get()
:param id: Batch UUID (required)
:type id: str
:param remove_associated_instances: Delete all instances created from this batch
:type remove_associated_instances: bool
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.delete_batch_with_http_info(id, **kwargs) # noqa: E501
def delete_batch_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete the Batch # noqa: E501
Deletes the Batch. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_batch_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Batch UUID (required)
:type id: str
:param remove_associated_instances: Delete all instances created from this batch
:type remove_associated_instances: bool
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'id',
'remove_associated_instances'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_batch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'remove_associated_instances' in local_var_params and local_var_params['remove_associated_instances'] is not None: # noqa: E501
query_params.append(('remove_associated_instances', local_var_params['remove_associated_instances'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/batches/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_batch_by_id(self, id, **kwargs): # noqa: E501
"""Retrieve a Batch # noqa: E501
Returns a Batch # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_batch_by_id(id, async_req=True)
>>> result = thread.get()
:param id: Batch UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Batch
"""
kwargs['_return_http_data_only'] = True
return self.find_batch_by_id_with_http_info(id, **kwargs) # noqa: E501
def find_batch_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve a Batch # noqa: E501
Returns a Batch # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_batch_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Batch UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Batch, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_batch_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_batch_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "Batch",
401: "Error",
404: "Error",
}
return self.api_client.call_api(
'/batches/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def find_batches_by_project(self, id, **kwargs): # noqa: E501
"""Retrieve all batches by project # noqa: E501
Returns all batches for the given project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_batches_by_project(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BatchesList
"""
kwargs['_return_http_data_only'] = True
return self.find_batches_by_project_with_http_info(id, **kwargs) # noqa: E501
def find_batches_by_project_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve all batches by project # noqa: E501
Returns all batches for the given project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_batches_by_project_with_http_info(id, async_req=True)
>>> result = thread.get()
:param id: Project UUID (required)
:type id: str
:param include: Nested attributes to include. Included objects will return their full attributes. Attribute names can be dotted (up to 3 levels) to included deeply nested objects.
:type include: list[str]
:param exclude: Nested attributes to exclude. Excluded objects will return only the href attribute. Attribute names can be dotted (up to 3 levels) to exclude deeply nested objects.
:type exclude: list[str]
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BatchesList, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'id',
'include',
'exclude'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method find_batches_by_project" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `find_batches_by_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include' in local_var_params and local_var_params['include'] is not None: # noqa: E501
query_params.append(('include', local_var_params['include'])) # noqa: E501
collection_formats['include'] = 'csv' # noqa: E501
if 'exclude' in local_var_params and local_var_params['exclude'] is not None: # noqa: E501
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
collection_formats['exclude'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['x_auth_token'] # noqa: E501
response_types_map = {
200: "BatchesList",
401: "Error",
403: "Error",
404: "Error",
}
return self.api_client.call_api(
'/projects/{id}/batches', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 54.034375
| 3,108
| 0.619166
| 4,176
| 34,582
| 4.941331
| 0.09363
| 0.030628
| 0.048171
| 0.020935
| 0.945239
| 0.941071
| 0.935789
| 0.927986
| 0.915144
| 0.902108
| 0
| 0.014241
| 0.301515
| 34,582
| 639
| 3,109
| 54.118936
| 0.840033
| 0.576456
| 0
| 0.734982
| 0
| 0
| 0.166306
| 0.043158
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031802
| false
| 0
| 0.017668
| 0
| 0.081272
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
aa8a6fb84fcfabba1bf104091f77ba282184c086
| 972
|
py
|
Python
|
tests/test_autoexecute.py
|
Rayapur/pytest-play
|
c39a1a7e74d90aebeb30797a61d0e491942557e8
|
[
"Apache-2.0"
] | 51
|
2018-04-26T09:02:38.000Z
|
2021-11-21T10:57:32.000Z
|
tests/test_autoexecute.py
|
Rayapur/pytest-play
|
c39a1a7e74d90aebeb30797a61d0e491942557e8
|
[
"Apache-2.0"
] | 39
|
2017-12-20T14:27:33.000Z
|
2018-04-05T22:45:12.000Z
|
tests/test_autoexecute.py
|
tierratelematics/pytest-play
|
c39a1a7e74d90aebeb30797a61d0e491942557e8
|
[
"Apache-2.0"
] | 5
|
2018-06-30T15:51:39.000Z
|
2020-04-13T19:31:25.000Z
|
def test_autoexecute_yml_pass(testdir):
yml_file = testdir.makefile(".yml", """
---
- provider: python
type: assert
expression: "1"
""")
assert yml_file.basename.startswith('test_')
assert yml_file.basename.endswith('.yml')
result = testdir.runpytest()
result.assert_outcomes(passed=1)
def test_autoexecute_yml_fail(testdir):
yml_file = testdir.makefile(".yml", """
---
- provider: python
type: assert
expression: "0"
""")
assert yml_file.basename.startswith('test_')
assert yml_file.basename.endswith('.yml')
result = testdir.runpytest()
result.assert_outcomes(failed=1)
def test_autoexecute_yml_cli_pass(testdir):
yml_file = testdir.makefile(".yml", """
---
- provider: python
type: assert
expression: "1"
""")
assert yml_file.basename.startswith('test_')
assert yml_file.basename.endswith('.yml')
result = testdir.runpytest(yml_file.strpath)
result.assert_outcomes(passed=1)
| 22.090909
| 48
| 0.686214
| 115
| 972
| 5.573913
| 0.226087
| 0.109204
| 0.121685
| 0.196568
| 0.923557
| 0.803432
| 0.803432
| 0.803432
| 0.803432
| 0.803432
| 0
| 0.007417
| 0.167695
| 972
| 43
| 49
| 22.604651
| 0.78492
| 0
| 0
| 0.818182
| 0
| 0
| 0.228395
| 0
| 0
| 0
| 0
| 0
| 0.363636
| 1
| 0.090909
| false
| 0.121212
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
aa8a7dfd07a3303ed33f55b36c61bd943da2e475
| 24,475
|
py
|
Python
|
test_autoastro/unit/profiles/test_light_and_mass_profiles.py
|
woodyZootopia/PyAutoAstro
|
6500b9746b3e73c3f3129fcbaa3a0419bb400915
|
[
"MIT"
] | null | null | null |
test_autoastro/unit/profiles/test_light_and_mass_profiles.py
|
woodyZootopia/PyAutoAstro
|
6500b9746b3e73c3f3129fcbaa3a0419bb400915
|
[
"MIT"
] | null | null | null |
test_autoastro/unit/profiles/test_light_and_mass_profiles.py
|
woodyZootopia/PyAutoAstro
|
6500b9746b3e73c3f3129fcbaa3a0419bb400915
|
[
"MIT"
] | null | null | null |
import numpy as np
import pytest
import autofit as af
import autoarray as aa
import autoastro as aast
grid = aa.grid_irregular.manual_1d([[1.0, 1.0], [2.0, 2.0], [3.0, 3.0], [2.0, 4.0]])
@pytest.fixture(autouse=True)
def reset_config():
"""
Use configuration from the default path. You may want to change this to set a specific path.
"""
af.conf.instance = af.conf.default
class TestGaussian(object):
def test__constructor_and_units(self):
gaussian = aast.lmp.EllipticalGaussian(
centre=(1.0, 2.0),
axis_ratio=0.5,
phi=45.0,
intensity=1.0,
sigma=4.0,
mass_to_light_ratio=10.0,
)
assert gaussian.centre == (1.0, 2.0)
assert isinstance(gaussian.centre[0], aast.dim.Length)
assert isinstance(gaussian.centre[1], aast.dim.Length)
assert gaussian.centre[0].unit == "arcsec"
assert gaussian.centre[1].unit == "arcsec"
assert gaussian.axis_ratio == 0.5
assert isinstance(gaussian.axis_ratio, float)
assert gaussian.phi == 45.0
assert isinstance(gaussian.phi, float)
assert gaussian.intensity == 1.0
assert isinstance(gaussian.intensity, aast.dim.Luminosity)
assert gaussian.intensity.unit == "eps"
assert gaussian.sigma == 4.0
assert isinstance(gaussian.sigma, aast.dim.Length)
assert gaussian.sigma.unit_length == "arcsec"
assert gaussian.mass_to_light_ratio == 10.0
assert isinstance(gaussian.mass_to_light_ratio, aast.dim.MassOverLuminosity)
assert gaussian.mass_to_light_ratio.unit == "angular / eps"
def test__grid_calculations__same_as_gaussian(self):
gaussian_lp = aast.lmp.EllipticalGaussian(
axis_ratio=0.7, phi=1.0, intensity=1.0, sigma=5.0
)
gaussian_mp = aast.lmp.EllipticalGaussian(
axis_ratio=0.7, phi=1.0, intensity=1.0, sigma=5.0, mass_to_light_ratio=2.0
)
gaussian_lmp = aast.lmp.EllipticalGaussian(
axis_ratio=0.7, phi=1.0, intensity=1.0, sigma=5.0, mass_to_light_ratio=2.0
)
assert (
gaussian_lp.profile_image_from_grid(grid=grid)
== gaussian_lmp.profile_image_from_grid(grid=grid)
).all()
assert (
gaussian_mp.convergence_from_grid(grid=grid)
== gaussian_lmp.convergence_from_grid(grid=grid)
).all()
# assert (sersic_mp.potential_from_grid(grid=grid) == sersic_lmp.potential_from_grid(grid=grid)).all()
assert (
gaussian_mp.deflections_from_grid(grid=grid)
== gaussian_lmp.deflections_from_grid(grid=grid)
).all()
class TestSersic:
def test__constructor_and_units(self):
sersic = aast.lmp.EllipticalSersic(
centre=(1.0, 2.0),
axis_ratio=0.5,
phi=45.0,
intensity=1.0,
effective_radius=0.6,
sersic_index=4.0,
mass_to_light_ratio=10.0,
)
assert sersic.centre == (1.0, 2.0)
assert isinstance(sersic.centre[0], aast.dim.Length)
assert isinstance(sersic.centre[1], aast.dim.Length)
assert sersic.centre[0].unit == "arcsec"
assert sersic.centre[1].unit == "arcsec"
assert sersic.axis_ratio == 0.5
assert isinstance(sersic.axis_ratio, float)
assert sersic.phi == 45.0
assert isinstance(sersic.phi, float)
assert sersic.intensity == 1.0
assert isinstance(sersic.intensity, aast.dim.Luminosity)
assert sersic.intensity.unit == "eps"
assert sersic.effective_radius == 0.6
assert isinstance(sersic.effective_radius, aast.dim.Length)
assert sersic.effective_radius.unit_length == "arcsec"
assert sersic.sersic_index == 4.0
assert isinstance(sersic.sersic_index, float)
assert sersic.mass_to_light_ratio == 10.0
assert isinstance(sersic.mass_to_light_ratio, aast.dim.MassOverLuminosity)
assert sersic.mass_to_light_ratio.unit == "angular / eps"
assert sersic.sersic_constant == pytest.approx(7.66925, 1e-3)
assert sersic.elliptical_effective_radius == 0.6 / np.sqrt(0.5)
sersic = aast.lmp.SphericalSersic(
centre=(1.0, 2.0),
intensity=1.0,
effective_radius=0.6,
sersic_index=4.0,
mass_to_light_ratio=10.0,
)
assert sersic.centre == (1.0, 2.0)
assert isinstance(sersic.centre[0], aast.dim.Length)
assert isinstance(sersic.centre[1], aast.dim.Length)
assert sersic.centre[0].unit == "arcsec"
assert sersic.centre[1].unit == "arcsec"
assert sersic.axis_ratio == 1.0
assert isinstance(sersic.axis_ratio, float)
assert sersic.phi == 0.0
assert isinstance(sersic.phi, float)
assert sersic.intensity == 1.0
assert isinstance(sersic.intensity, aast.dim.Luminosity)
assert sersic.intensity.unit == "eps"
assert sersic.effective_radius == 0.6
assert isinstance(sersic.effective_radius, aast.dim.Length)
assert sersic.effective_radius.unit_length == "arcsec"
assert sersic.sersic_index == 4.0
assert isinstance(sersic.sersic_index, float)
assert sersic.mass_to_light_ratio == 10.0
assert isinstance(sersic.mass_to_light_ratio, aast.dim.MassOverLuminosity)
assert sersic.mass_to_light_ratio.unit == "angular / eps"
assert sersic.sersic_constant == pytest.approx(7.66925, 1e-3)
assert sersic.elliptical_effective_radius == 0.6
def test__grid_calculations__same_as_sersic(self):
sersic_lp = aast.lmp.EllipticalSersic(
axis_ratio=0.7,
phi=1.0,
intensity=1.0,
effective_radius=0.6,
sersic_index=2.0,
)
sersic_mp = aast.lmp.EllipticalSersic(
axis_ratio=0.7,
phi=1.0,
intensity=1.0,
effective_radius=0.6,
sersic_index=2.0,
mass_to_light_ratio=2.0,
)
sersic_lmp = aast.lmp.EllipticalSersic(
axis_ratio=0.7,
phi=1.0,
intensity=1.0,
effective_radius=0.6,
sersic_index=2.0,
mass_to_light_ratio=2.0,
)
assert (
sersic_lp.profile_image_from_grid(grid=grid)
== sersic_lmp.profile_image_from_grid(grid=grid)
).all()
assert (
sersic_mp.convergence_from_grid(grid=grid)
== sersic_lmp.convergence_from_grid(grid=grid)
).all()
# assert (sersic_mp.potential_from_grid(grid=grid) == sersic_lmp.potential_from_grid(grid=grid)).all()
assert (
sersic_mp.deflections_from_grid(grid=grid)
== sersic_lmp.deflections_from_grid(grid=grid)
).all()
def test__spherical_and_elliptical_identical(self):
elliptical = aast.lmp.EllipticalSersic(
centre=(0.0, 0.0),
axis_ratio=1.0,
phi=0.0,
intensity=1.0,
effective_radius=1.0,
sersic_index=2.0,
mass_to_light_ratio=2.0,
)
spherical = aast.lmp.SphericalSersic(
centre=(0.0, 0.0),
intensity=1.0,
effective_radius=1.0,
sersic_index=2.0,
mass_to_light_ratio=2.0,
)
assert (
elliptical.profile_image_from_grid(grid=grid)
== spherical.profile_image_from_grid(grid=grid)
).all()
assert (
elliptical.convergence_from_grid(grid=grid)
== spherical.convergence_from_grid(grid=grid)
).all()
# assert (elliptical.potential_from_grid(grid=grid) == spherical.potential_from_grid(grid=grid)).all()
np.testing.assert_almost_equal(
elliptical.deflections_from_grid(grid=grid),
spherical.deflections_from_grid(grid=grid),
)
class TestExponential:
def test__constructor_and_units(self):
exponential = aast.lmp.EllipticalExponential(
centre=(1.0, 2.0),
axis_ratio=0.5,
phi=45.0,
intensity=1.0,
effective_radius=0.6,
mass_to_light_ratio=10.0,
)
assert exponential.centre == (1.0, 2.0)
assert isinstance(exponential.centre[0], aast.dim.Length)
assert isinstance(exponential.centre[1], aast.dim.Length)
assert exponential.centre[0].unit == "arcsec"
assert exponential.centre[1].unit == "arcsec"
assert exponential.axis_ratio == 0.5
assert isinstance(exponential.axis_ratio, float)
assert exponential.phi == 45.0
assert isinstance(exponential.phi, float)
assert exponential.intensity == 1.0
assert isinstance(exponential.intensity, aast.dim.Luminosity)
assert exponential.intensity.unit == "eps"
assert exponential.effective_radius == 0.6
assert isinstance(exponential.effective_radius, aast.dim.Length)
assert exponential.effective_radius.unit_length == "arcsec"
assert exponential.sersic_index == 1.0
assert isinstance(exponential.sersic_index, float)
assert exponential.mass_to_light_ratio == 10.0
assert isinstance(exponential.mass_to_light_ratio, aast.dim.MassOverLuminosity)
assert exponential.mass_to_light_ratio.unit == "angular / eps"
assert exponential.sersic_constant == pytest.approx(1.67838, 1e-3)
assert exponential.elliptical_effective_radius == 0.6 / np.sqrt(0.5)
exponential = aast.lmp.SphericalExponential(
centre=(1.0, 2.0),
intensity=1.0,
effective_radius=0.6,
mass_to_light_ratio=10.0,
)
assert exponential.centre == (1.0, 2.0)
assert isinstance(exponential.centre[0], aast.dim.Length)
assert isinstance(exponential.centre[1], aast.dim.Length)
assert exponential.centre[0].unit == "arcsec"
assert exponential.centre[1].unit == "arcsec"
assert exponential.axis_ratio == 1.0
assert isinstance(exponential.axis_ratio, float)
assert exponential.phi == 0.0
assert isinstance(exponential.phi, float)
assert exponential.intensity == 1.0
assert isinstance(exponential.intensity, aast.dim.Luminosity)
assert exponential.intensity.unit == "eps"
assert exponential.effective_radius == 0.6
assert isinstance(exponential.effective_radius, aast.dim.Length)
assert exponential.effective_radius.unit_length == "arcsec"
assert exponential.sersic_index == 1.0
assert isinstance(exponential.sersic_index, float)
assert exponential.mass_to_light_ratio == 10.0
assert isinstance(exponential.mass_to_light_ratio, aast.dim.MassOverLuminosity)
assert exponential.mass_to_light_ratio.unit == "angular / eps"
assert exponential.sersic_constant == pytest.approx(1.67838, 1e-3)
assert exponential.elliptical_effective_radius == 0.6
def test__grid_calculations__same_as_exponential(self):
sersic_lp = aast.lmp.EllipticalExponential(
axis_ratio=0.7, phi=1.0, intensity=1.0, effective_radius=0.6
)
sersic_mp = aast.lmp.EllipticalExponential(
axis_ratio=0.7,
phi=1.0,
intensity=1.0,
effective_radius=0.6,
mass_to_light_ratio=2.0,
)
sersic_lmp = aast.lmp.EllipticalExponential(
axis_ratio=0.7,
phi=1.0,
intensity=1.0,
effective_radius=0.6,
mass_to_light_ratio=2.0,
)
assert (
sersic_lp.profile_image_from_grid(grid=grid)
== sersic_lmp.profile_image_from_grid(grid=grid)
).all()
assert (
sersic_mp.convergence_from_grid(grid=grid)
== sersic_lmp.convergence_from_grid(grid=grid)
).all()
# assert (sersic_mp.potential_from_grid(grid=grid) == sersic_lmp.potential_from_grid(grid=grid)).all()
assert (
sersic_mp.deflections_from_grid(grid=grid)
== sersic_lmp.deflections_from_grid(grid=grid)
).all()
def test__spherical_and_elliptical_identical(self):
elliptical = aast.lmp.EllipticalExponential(
centre=(0.0, 0.0),
axis_ratio=1.0,
phi=0.0,
intensity=1.0,
effective_radius=1.0,
)
spherical = aast.lmp.SphericalExponential(
centre=(0.0, 0.0), intensity=1.0, effective_radius=1.0
)
assert (
elliptical.profile_image_from_grid(grid=grid)
== spherical.profile_image_from_grid(grid=grid)
).all()
assert (
elliptical.convergence_from_grid(grid=grid)
== spherical.convergence_from_grid(grid=grid)
).all()
# assert elliptical.potential_from_grid(grid=grid) == spherical.potential_from_grid(grid=grid)
np.testing.assert_almost_equal(
elliptical.deflections_from_grid(grid=grid),
spherical.deflections_from_grid(grid=grid),
)
class TestDevVaucouleurs:
def test__constructor_and_units(self):
dev_vaucouleurs = aast.lmp.EllipticalDevVaucouleurs(
centre=(1.0, 2.0),
axis_ratio=0.5,
phi=45.0,
intensity=1.0,
effective_radius=0.6,
mass_to_light_ratio=10.0,
)
assert dev_vaucouleurs.centre == (1.0, 2.0)
assert isinstance(dev_vaucouleurs.centre[0], aast.dim.Length)
assert isinstance(dev_vaucouleurs.centre[1], aast.dim.Length)
assert dev_vaucouleurs.centre[0].unit == "arcsec"
assert dev_vaucouleurs.centre[1].unit == "arcsec"
assert dev_vaucouleurs.axis_ratio == 0.5
assert isinstance(dev_vaucouleurs.axis_ratio, float)
assert dev_vaucouleurs.phi == 45.0
assert isinstance(dev_vaucouleurs.phi, float)
assert dev_vaucouleurs.intensity == 1.0
assert isinstance(dev_vaucouleurs.intensity, aast.dim.Luminosity)
assert dev_vaucouleurs.intensity.unit == "eps"
assert dev_vaucouleurs.effective_radius == 0.6
assert isinstance(dev_vaucouleurs.effective_radius, aast.dim.Length)
assert dev_vaucouleurs.effective_radius.unit_length == "arcsec"
assert dev_vaucouleurs.sersic_index == 4.0
assert isinstance(dev_vaucouleurs.sersic_index, float)
assert dev_vaucouleurs.mass_to_light_ratio == 10.0
assert isinstance(
dev_vaucouleurs.mass_to_light_ratio, aast.dim.MassOverLuminosity
)
assert dev_vaucouleurs.mass_to_light_ratio.unit == "angular / eps"
assert dev_vaucouleurs.sersic_constant == pytest.approx(7.66924, 1e-3)
assert dev_vaucouleurs.elliptical_effective_radius == 0.6 / np.sqrt(0.5)
dev_vaucouleurs = aast.lmp.SphericalDevVaucouleurs(
centre=(1.0, 2.0),
intensity=1.0,
effective_radius=0.6,
mass_to_light_ratio=10.0,
)
assert dev_vaucouleurs.centre == (1.0, 2.0)
assert isinstance(dev_vaucouleurs.centre[0], aast.dim.Length)
assert isinstance(dev_vaucouleurs.centre[1], aast.dim.Length)
assert dev_vaucouleurs.centre[0].unit == "arcsec"
assert dev_vaucouleurs.centre[1].unit == "arcsec"
assert dev_vaucouleurs.axis_ratio == 1.0
assert isinstance(dev_vaucouleurs.axis_ratio, float)
assert dev_vaucouleurs.phi == 0.0
assert isinstance(dev_vaucouleurs.phi, float)
assert dev_vaucouleurs.intensity == 1.0
assert isinstance(dev_vaucouleurs.intensity, aast.dim.Luminosity)
assert dev_vaucouleurs.intensity.unit == "eps"
assert dev_vaucouleurs.effective_radius == 0.6
assert isinstance(dev_vaucouleurs.effective_radius, aast.dim.Length)
assert dev_vaucouleurs.effective_radius.unit_length == "arcsec"
assert dev_vaucouleurs.sersic_index == 4.0
assert isinstance(dev_vaucouleurs.sersic_index, float)
assert dev_vaucouleurs.mass_to_light_ratio == 10.0
assert isinstance(
dev_vaucouleurs.mass_to_light_ratio, aast.dim.MassOverLuminosity
)
assert dev_vaucouleurs.mass_to_light_ratio.unit == "angular / eps"
assert dev_vaucouleurs.sersic_constant == pytest.approx(7.66924, 1e-3)
assert dev_vaucouleurs.elliptical_effective_radius == 0.6
def test__grid_calculations__same_as_dev_vaucouleurs(self):
sersic_lp = aast.lmp.EllipticalDevVaucouleurs(
axis_ratio=0.7, phi=1.0, intensity=1.0, effective_radius=0.6
)
sersic_mp = aast.lmp.EllipticalDevVaucouleurs(
axis_ratio=0.7,
phi=1.0,
intensity=1.0,
effective_radius=0.6,
mass_to_light_ratio=2.0,
)
sersic_lmp = aast.lmp.EllipticalDevVaucouleurs(
axis_ratio=0.7,
phi=1.0,
intensity=1.0,
effective_radius=0.6,
mass_to_light_ratio=2.0,
)
assert (
sersic_lp.profile_image_from_grid(grid=grid)
== sersic_lmp.profile_image_from_grid(grid=grid)
).all()
assert (
sersic_mp.convergence_from_grid(grid=grid)
== sersic_lmp.convergence_from_grid(grid=grid)
).all()
# assert (sersic_mp.potential_from_grid(grid=grid) == sersic_lmp.potential_from_grid(grid=grid)).all()
assert (
sersic_mp.deflections_from_grid(grid=grid)
== sersic_lmp.deflections_from_grid(grid=grid)
).all()
def test__spherical_and_elliptical_identical(self):
elliptical = aast.lmp.EllipticalDevVaucouleurs(
centre=(0.0, 0.0),
axis_ratio=1.0,
phi=0.0,
intensity=1.0,
effective_radius=1.0,
)
spherical = aast.lmp.SphericalDevVaucouleurs(
centre=(0.0, 0.0), intensity=1.0, effective_radius=1.0
)
assert (
elliptical.profile_image_from_grid(grid=grid)
== spherical.profile_image_from_grid(grid=grid)
).all()
assert (
elliptical.convergence_from_grid(grid=grid)
== spherical.convergence_from_grid(grid=grid)
).all()
# assert elliptical.potential_from_grid(grid=grid) == spherical.potential_from_grid(grid=grid)
np.testing.assert_almost_equal(
elliptical.deflections_from_grid(grid=grid),
spherical.deflections_from_grid(grid=grid),
)
class TestSersicRadialGradient:
def test__constructor_and_units(self):
sersic = aast.lmp.EllipticalSersicRadialGradient(
centre=(1.0, 2.0),
axis_ratio=0.5,
phi=45.0,
intensity=1.0,
effective_radius=0.6,
sersic_index=4.0,
mass_to_light_ratio=10.0,
mass_to_light_gradient=-1.0,
)
assert sersic.centre == (1.0, 2.0)
assert isinstance(sersic.centre[0], aast.dim.Length)
assert isinstance(sersic.centre[1], aast.dim.Length)
assert sersic.centre[0].unit == "arcsec"
assert sersic.centre[1].unit == "arcsec"
assert sersic.axis_ratio == 0.5
assert isinstance(sersic.axis_ratio, float)
assert sersic.phi == 45.0
assert isinstance(sersic.phi, float)
assert sersic.intensity == 1.0
assert isinstance(sersic.intensity, aast.dim.Luminosity)
assert sersic.intensity.unit == "eps"
assert sersic.effective_radius == 0.6
assert isinstance(sersic.effective_radius, aast.dim.Length)
assert sersic.effective_radius.unit_length == "arcsec"
assert sersic.sersic_index == 4.0
assert isinstance(sersic.sersic_index, float)
assert sersic.mass_to_light_ratio == 10.0
assert isinstance(sersic.mass_to_light_ratio, aast.dim.MassOverLuminosity)
assert sersic.mass_to_light_ratio.unit == "angular / eps"
assert sersic.mass_to_light_gradient == -1.0
assert isinstance(sersic.mass_to_light_gradient, float)
assert sersic.sersic_constant == pytest.approx(7.66925, 1e-3)
assert sersic.elliptical_effective_radius == 0.6 / np.sqrt(0.5)
sersic = aast.lmp.SphericalSersicRadialGradient(
centre=(1.0, 2.0),
intensity=1.0,
effective_radius=0.6,
sersic_index=4.0,
mass_to_light_ratio=10.0,
mass_to_light_gradient=-1.0,
)
assert sersic.centre == (1.0, 2.0)
assert isinstance(sersic.centre[0], aast.dim.Length)
assert isinstance(sersic.centre[1], aast.dim.Length)
assert sersic.centre[0].unit == "arcsec"
assert sersic.centre[1].unit == "arcsec"
assert sersic.axis_ratio == 1.0
assert isinstance(sersic.axis_ratio, float)
assert sersic.phi == 0.0
assert isinstance(sersic.phi, float)
assert sersic.intensity == 1.0
assert isinstance(sersic.intensity, aast.dim.Luminosity)
assert sersic.intensity.unit == "eps"
assert sersic.effective_radius == 0.6
assert isinstance(sersic.effective_radius, aast.dim.Length)
assert sersic.effective_radius.unit_length == "arcsec"
assert sersic.sersic_index == 4.0
assert isinstance(sersic.sersic_index, float)
assert sersic.mass_to_light_ratio == 10.0
assert isinstance(sersic.mass_to_light_ratio, aast.dim.MassOverLuminosity)
assert sersic.mass_to_light_ratio.unit == "angular / eps"
assert sersic.mass_to_light_gradient == -1.0
assert isinstance(sersic.mass_to_light_gradient, float)
assert sersic.sersic_constant == pytest.approx(7.66925, 1e-3)
assert sersic.elliptical_effective_radius == 0.6
def test__grid_calculations__same_as_sersic_radial_gradient(self):
sersic_lp = aast.lmp.EllipticalSersic(
axis_ratio=0.7,
phi=1.0,
intensity=1.0,
effective_radius=0.6,
sersic_index=2.0,
)
sersic_mp = aast.lmp.EllipticalSersicRadialGradient(
axis_ratio=0.7,
phi=1.0,
intensity=1.0,
effective_radius=0.6,
sersic_index=2.0,
mass_to_light_ratio=2.0,
mass_to_light_gradient=0.5,
)
sersic_lmp = aast.lmp.EllipticalSersicRadialGradient(
axis_ratio=0.7,
phi=1.0,
intensity=1.0,
effective_radius=0.6,
sersic_index=2.0,
mass_to_light_ratio=2.0,
mass_to_light_gradient=0.5,
)
assert (
sersic_lp.profile_image_from_grid(grid=grid)
== sersic_lmp.profile_image_from_grid(grid=grid)
).all()
assert (
sersic_mp.convergence_from_grid(grid=grid)
== sersic_lmp.convergence_from_grid(grid=grid)
).all()
# assert (sersic_mp.potential_from_grid(grid=grid) == sersic_lmp.potential_from_grid(grid=grid)).all()
assert (
sersic_mp.deflections_from_grid(grid=grid)
== sersic_lmp.deflections_from_grid(grid=grid)
).all()
def test__spherical_and_elliptical_identical(self):
elliptical = aast.lmp.EllipticalSersicRadialGradient(
centre=(0.0, 0.0),
axis_ratio=1.0,
phi=0.0,
intensity=1.0,
effective_radius=1.0,
)
spherical = aast.lmp.SphericalSersicRadialGradient(
centre=(0.0, 0.0), intensity=1.0, effective_radius=1.0
)
assert (
elliptical.profile_image_from_grid(grid=grid)
== spherical.profile_image_from_grid(grid=grid)
).all()
assert (
elliptical.convergence_from_grid(grid=grid)
== spherical.convergence_from_grid(grid=grid)
).all()
# assert elliptical.potential_from_grid(grid=grid) == spherical.potential_from_grid(grid=grid)
np.testing.assert_almost_equal(
elliptical.deflections_from_grid(grid=grid),
spherical.deflections_from_grid(grid=grid),
)
| 36.749249
| 113
| 0.62954
| 2,988
| 24,475
| 4.930723
| 0.04083
| 0.078192
| 0.058644
| 0.078192
| 0.921808
| 0.907894
| 0.888685
| 0.878165
| 0.870834
| 0.851083
| 0
| 0.038026
| 0.268274
| 24,475
| 665
| 114
| 36.804511
| 0.784633
| 0.040572
| 0
| 0.770642
| 0
| 0
| 0.013045
| 0
| 0
| 0
| 0
| 0
| 0.412844
| 1
| 0.027523
| false
| 0
| 0.009174
| 0
| 0.045872
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
aaa9e12aff7dccc530ed459f70a72608562d7dce
| 702
|
py
|
Python
|
examples/python/cpu/tensors/ocean_arange_01.py
|
kant/ocean-tensor-package
|
fb3fcff8bba7f4ef6cd8b8d02f0e1be1258da02d
|
[
"Apache-2.0"
] | 27
|
2018-08-16T21:32:49.000Z
|
2021-11-30T10:31:08.000Z
|
examples/python/cpu/tensors/ocean_arange_01.py
|
kant/ocean-tensor-package
|
fb3fcff8bba7f4ef6cd8b8d02f0e1be1258da02d
|
[
"Apache-2.0"
] | null | null | null |
examples/python/cpu/tensors/ocean_arange_01.py
|
kant/ocean-tensor-package
|
fb3fcff8bba7f4ef6cd8b8d02f0e1be1258da02d
|
[
"Apache-2.0"
] | 13
|
2018-08-17T17:33:16.000Z
|
2021-11-30T10:31:09.000Z
|
import pyOcean_cpu as ocean
print(ocean.arange(10))
print(ocean.arange(2,10))
print(ocean.arange(2,10,3,ocean.int8))
print(ocean.arange(10,2,-3,ocean.half))
print(ocean.arange(10.))
print(ocean.arange(2.,10))
print(ocean.arange(2,10,3.,ocean.int8))
print(ocean.arange(10,2.,-3,ocean.half))
print("\n--------- Non-integer step ---------")
print(ocean.arange(2,5,0.3))
print("\n--------- Single element ---------")
print(ocean.arange(2,5,10))
print(ocean.arange(2,5,ocean.inf))
print("\n--------- Empty ---------")
print(ocean.arange(5,2))
print(ocean.arange(2,5,-10))
print(ocean.arange(2,5,-ocean.inf))
print(ocean.arange(5,2.))
print(ocean.arange(2,5,-10.))
print(ocean.arange(2,5,-ocean.inf))
| 24.206897
| 47
| 0.652422
| 123
| 702
| 3.715447
| 0.178862
| 0.371991
| 0.595186
| 0.40919
| 0.857768
| 0.818381
| 0.818381
| 0.818381
| 0.818381
| 0.818381
| 0
| 0.081448
| 0.055556
| 702
| 28
| 48
| 25.071429
| 0.607843
| 0
| 0
| 0.095238
| 0
| 0
| 0.14408
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.047619
| 0
| 0.047619
| 0.952381
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
2ad81f3a256e53fb3497681f735a0f1d7340de0c
| 27,629
|
py
|
Python
|
digsby/src/tests/mock/mockprofiles.py
|
ifwe/digsby
|
f5fe00244744aa131e07f09348d10563f3d8fa99
|
[
"Python-2.0"
] | 35
|
2015-08-15T14:32:38.000Z
|
2021-12-09T16:21:26.000Z
|
digsby/src/tests/mock/mockprofiles.py
|
niterain/digsby
|
16a62c7df1018a49eaa8151c0f8b881c7e252949
|
[
"Python-2.0"
] | 4
|
2015-09-12T10:42:57.000Z
|
2017-02-27T04:05:51.000Z
|
digsby/src/tests/mock/mockprofiles.py
|
niterain/digsby
|
16a62c7df1018a49eaa8151c0f8b881c7e252949
|
[
"Python-2.0"
] | 15
|
2015-07-10T23:58:07.000Z
|
2022-01-23T22:16:33.000Z
|
from util import Storage
MockProfiles = Storage(
Aaron="""
<TABLE WIDTH=100% cellpadding=1 border=0><TR><TD>
<TABLE WIDTH=100% cellpadding=0>
<TR><TD valign=center><IMG SRC="C:\jabber.png">
<FONT FACE=ARIAL SIZE="+0"> <B>Aaron</B></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>JID: </B><FONT COLOR="#777777">brok3nhalo@gmail.com</FONT></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Subscription: </B><FONT COLOR="#777777">Both</FONT></FONT>
<TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE>
<br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Gaim (5)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Free for Chat</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT><TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE><br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Psi (10)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Extended Away</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT></TD><TD width=64 valign=top align=center>
<table cellpadding=2 border=0><tr><td align=center><img src="C:\icon.png">
</td></tr></table>
<br><a href=#><FONT FACE=ARIAL SIZE="-1">Hide Profile<br></FONT></a>
</TD></TR></TABLE>
<TABLE WIDTH=100% cellpadding=0 border=0><TR><TD><img src="C:\line.png" width='270'><br><table cellpadding=1 cellspacing=1><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Full Name: </B></FONT><FONT SIZE="-1" COLOR="#777777">Steve Shapiro</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Birthday: </B></FONT><FONT SIZE="-1" COLOR="#777777">11/20/1982</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Phone: </B></FONT><FONT SIZE="-1" COLOR="#777777">917-757-7555</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Email: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=mailto:shaps776@gmail.com>shaps776@gmail.com</a></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.digsby.com>http://www.digsby.com</a></FONT>
<br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Additional Information:</B></FONT>
<br><FONT SIZE="-1" COLOR="#777777">Here is my extensive information section which may have a whole paragraph</FONT><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><br>
<FONT FACE=ARIAL SIZE="-1"><B>Home Address: </B></FONT><FONT SIZE="-1" COLOR="#777777">(<a href=http://maps.google.com>Map</a>)</FONT>
<br><FONT SIZE="-1" COLOR="#777777">Address Line<br>125 Tech Park Drive<br>Rochester, NY 14623</FONT><br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><FONT FACE=ARIAL SIZE="-1"><B>Company: </B></FONT><FONT SIZE="-1" COLOR="#777777">dotSyntax, LLC</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Department: </B></FONT><FONT SIZE="-1" COLOR="#777777">Software</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Position: </B></FONT><FONT SIZE="-1" COLOR="#777777">HCI Guy</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Role: </B></FONT><FONT SIZE="-1" COLOR="#777777">Do stuff</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.dotsyntax.com>http://www.dotsyntax.com</a></FONT>
</TD></TR></TABLE>
</TD></TR></TABLE>
""",
Chris="""
<TABLE WIDTH=100% cellpadding=1 border=0><TR><TD>
<TABLE WIDTH=100% cellpadding=0>
<TR><TD valign=center><IMG SRC="C:\jabber.png">
<FONT FACE=ARIAL SIZE="+0"> <B>Chis</B></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>JID: </B><FONT COLOR="#777777">stelminator@gmail.com</FONT></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Subscription: </B><FONT COLOR="#777777">Both</FONT></FONT>
<TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE>
<br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Gaim (5)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Free for Chat</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT><TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE><br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Psi (10)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Extended Away</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT></TD><TD width=64 valign=top align=center>
<table cellpadding=2 border=0><tr><td align=center><img src="C:\icon.png">
</td></tr></table>
<br><a href=#><FONT FACE=ARIAL SIZE="-1">Hide Profile<br></FONT></a>
</TD></TR></TABLE>
<TABLE WIDTH=100% cellpadding=0 border=0><TR><TD><img src="C:\line.png" width='270'><br><table cellpadding=1 cellspacing=1><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Full Name: </B></FONT><FONT SIZE="-1" COLOR="#777777">Steve Shapiro</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Birthday: </B></FONT><FONT SIZE="-1" COLOR="#777777">11/20/1982</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Phone: </B></FONT><FONT SIZE="-1" COLOR="#777777">917-757-7555</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Email: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=mailto:shaps776@gmail.com>shaps776@gmail.com</a></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.digsby.com>http://www.digsby.com</a></FONT>
<br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Additional Information:</B></FONT>
<br><FONT SIZE="-1" COLOR="#777777">Here is my extensive information section which may have a whole paragraph</FONT><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><br>
<FONT FACE=ARIAL SIZE="-1"><B>Home Address: </B></FONT><FONT SIZE="-1" COLOR="#777777">(<a href=http://maps.google.com>Map</a>)</FONT>
<br><FONT SIZE="-1" COLOR="#777777">Address Line<br>125 Tech Park Drive<br>Rochester, NY 14623</FONT><br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><FONT FACE=ARIAL SIZE="-1"><B>Company: </B></FONT><FONT SIZE="-1" COLOR="#777777">dotSyntax, LLC</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Department: </B></FONT><FONT SIZE="-1" COLOR="#777777">Software</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Position: </B></FONT><FONT SIZE="-1" COLOR="#777777">HCI Guy</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Role: </B></FONT><FONT SIZE="-1" COLOR="#777777">Do stuff</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.dotsyntax.com>http://www.dotsyntax.com</a></FONT>
</TD></TR></TABLE>
</TD></TR></TABLE>
""",
Jeff="""
<TABLE WIDTH=100% cellpadding=1 border=0><TR><TD>
<TABLE WIDTH=100% cellpadding=0>
<TR><TD valign=center><IMG SRC="C:\jabber.png">
<FONT FACE=ARIAL SIZE="+0"> <B>Jeff</B></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>JID: </B><FONT COLOR="#777777">Jeff@aol.com</FONT></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Subscription: </B><FONT COLOR="#777777">Both</FONT></FONT>
<TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE>
<br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Gaim (5)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Free for Chat</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT><TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE><br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Psi (10)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Extended Away</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT></TD><TD width=64 valign=top align=center>
<table cellpadding=2 border=0><tr><td align=center><img src="C:\icon.png">
</td></tr></table>
<br><a href=#><FONT FACE=ARIAL SIZE="-1">Hide Profile<br></FONT></a>
</TD></TR></TABLE>
<TABLE WIDTH=100% cellpadding=0 border=0><TR><TD><img src="C:\line.png" width='270'><br><table cellpadding=1 cellspacing=1><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Full Name: </B></FONT><FONT SIZE="-1" COLOR="#777777">Steve Shapiro</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Birthday: </B></FONT><FONT SIZE="-1" COLOR="#777777">11/20/1982</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Phone: </B></FONT><FONT SIZE="-1" COLOR="#777777">917-757-7555</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Email: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=mailto:shaps776@gmail.com>shaps776@gmail.com</a></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.digsby.com>http://www.digsby.com</a></FONT>
<br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Additional Information:</B></FONT>
<br><FONT SIZE="-1" COLOR="#777777">Here is my extensive information section which may have a whole paragraph</FONT><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><br>
<FONT FACE=ARIAL SIZE="-1"><B>Home Address: </B></FONT><FONT SIZE="-1" COLOR="#777777">(<a href=http://maps.google.com>Map</a>)</FONT>
<br><FONT SIZE="-1" COLOR="#777777">Address Line<br>125 Tech Park Drive<br>Rochester, NY 14623</FONT><br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><FONT FACE=ARIAL SIZE="-1"><B>Company: </B></FONT><FONT SIZE="-1" COLOR="#777777">dotSyntax, LLC</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Department: </B></FONT><FONT SIZE="-1" COLOR="#777777">Software</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Position: </B></FONT><FONT SIZE="-1" COLOR="#777777">HCI Guy</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Role: </B></FONT><FONT SIZE="-1" COLOR="#777777">Do stuff</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.dotsyntax.com>http://www.dotsyntax.com</a></FONT>
</TD></TR></TABLE>
</TD></TR></TABLE>
""",
Kevin="""
<TABLE WIDTH=100% cellpadding=1 border=0><TR><TD>
<TABLE WIDTH=100% cellpadding=0>
<TR><TD valign=center><IMG SRC="C:\jabber.png">
<FONT FACE=ARIAL SIZE="+0"> <B>Kevin</B></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>JID: </B><FONT COLOR="#777777">Kevin@yahoo.com</FONT></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Subscription: </B><FONT COLOR="#777777">Both</FONT></FONT>
<TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE>
<br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Gaim (5)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Free for Chat</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT><TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE><br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Psi (10)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Extended Away</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT></TD><TD width=64 valign=top align=center>
<table cellpadding=2 border=0><tr><td align=center><img src="C:\icon.png">
</td></tr></table>
<br><a href=#><FONT FACE=ARIAL SIZE="-1">Hide Profile<br></FONT></a>
</TD></TR></TABLE>
<TABLE WIDTH=100% cellpadding=0 border=0><TR><TD><img src="C:\line.png" width='270'><br><table cellpadding=1 cellspacing=1><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Full Name: </B></FONT><FONT SIZE="-1" COLOR="#777777">Steve Shapiro</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Birthday: </B></FONT><FONT SIZE="-1" COLOR="#777777">11/20/1982</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Phone: </B></FONT><FONT SIZE="-1" COLOR="#777777">917-757-7555</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Email: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=mailto:shaps776@gmail.com>shaps776@gmail.com</a></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.digsby.com>http://www.digsby.com</a></FONT>
<br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Additional Information:</B></FONT>
<br><FONT SIZE="-1" COLOR="#777777">Here is my extensive information section which may have a whole paragraph</FONT><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><br>
<FONT FACE=ARIAL SIZE="-1"><B>Home Address: </B></FONT><FONT SIZE="-1" COLOR="#777777">(<a href=http://maps.google.com>Map</a>)</FONT>
<br><FONT SIZE="-1" COLOR="#777777">Address Line<br>125 Tech Park Drive<br>Rochester, NY 14623</FONT><br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><FONT FACE=ARIAL SIZE="-1"><B>Company: </B></FONT><FONT SIZE="-1" COLOR="#777777">dotSyntax, LLC</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Department: </B></FONT><FONT SIZE="-1" COLOR="#777777">Software</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Position: </B></FONT><FONT SIZE="-1" COLOR="#777777">HCI Guy</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Role: </B></FONT><FONT SIZE="-1" COLOR="#777777">Do stuff</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.dotsyntax.com>http://www.dotsyntax.com</a></FONT>
</TD></TR></TABLE>
</TD></TR></TABLE>
<p>Once upon a midnight
dreary, while my thumbs grew weak and weary<br>
My whole library of Zelda games laid out upon the floor<br>
Each I conquered, fully finished, from original to "Minish,"<br>
Yet my thirst did not diminish for a kingdom to explore<br>
Instantly I wanted more<br>
<br>
With my catalog completed, I began to feel defeated<br>
And absurdly missed the heated battles waged throughout the
war<br>
RPGs, I felt, were boring, but my friends replied, ignoring,<br>
And at once began assuring that I simply would adore<br>
The next game to hit to the store<br>
<br>
So they told me, with great vigor, that this new game would
be bigger<br>
Than the last hit, "Chrono Trigger," which they
took months to explore<br>
But the fighting style I hated. To attack, my turn I waited<br>
As my enemy invaded, and did knock me on the floor <br>
'Twas entitled, "Evermore"<br>
<br>
And they asked me, "Don't you love it?" But I said
that they could shove it<br>
I was sick and tired of it and its "Gotcha Last"
type war<br>
Though they thought that it was uncool, I retreated to the
old school<br>
And proceeded to save Hyrule like the good old days of yore<br>
There I'll stay, forevermore<br>
<b>( <a href="http://forums.progressiveboink.com/">Add
Comment</a> )</b></p>
""",
Mike="""
<TABLE WIDTH=100% cellpadding=1 border=0><TR><TD>
<TABLE WIDTH=100% cellpadding=0>
<TR><TD valign=center><IMG SRC="C:\jabber.png">
<FONT FACE=ARIAL SIZE="+0"> <B>Mike</B></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>JID: </B><FONT COLOR="#777777">Mike@hotmail.com</FONT></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Subscription: </B><FONT COLOR="#777777">Both</FONT></FONT>
<TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE>
<br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Gaim (5)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Free for Chat</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT><TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE><br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Psi (10)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Extended Away</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT></TD><TD width=64 valign=top align=center>
<table cellpadding=2 border=0><tr><td align=center><img src="C:\icon.png">
</td></tr></table>
<br><a href=#><FONT FACE=ARIAL SIZE="-1">Hide Profile<br></FONT></a>
</TD></TR></TABLE>
<TABLE WIDTH=100% cellpadding=0 border=0><TR><TD><img src="C:\line.png" width='270'><br><table cellpadding=1 cellspacing=1><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Full Name: </B></FONT><FONT SIZE="-1" COLOR="#777777">Steve Shapiro</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Birthday: </B></FONT><FONT SIZE="-1" COLOR="#777777">11/20/1982</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Phone: </B></FONT><FONT SIZE="-1" COLOR="#777777">917-757-7555</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Email: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=mailto:shaps776@gmail.com>shaps776@gmail.com</a></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.digsby.com>http://www.digsby.com</a></FONT>
<br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Additional Information:</B></FONT>
<br><FONT SIZE="-1" COLOR="#777777">Here is my extensive information section which may have a whole paragraph</FONT><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><br>
<FONT FACE=ARIAL SIZE="-1"><B>Home Address: </B></FONT><FONT SIZE="-1" COLOR="#777777">(<a href=http://maps.google.com>Map</a>)</FONT>
<br><FONT SIZE="-1" COLOR="#777777">Address Line<br>125 Tech Park Drive<br>Rochester, NY 14623</FONT><br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><FONT FACE=ARIAL SIZE="-1"><B>Company: </B></FONT><FONT SIZE="-1" COLOR="#777777">dotSyntax, LLC</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Department: </B></FONT><FONT SIZE="-1" COLOR="#777777">Software</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Position: </B></FONT><FONT SIZE="-1" COLOR="#777777">HCI Guy</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Role: </B></FONT><FONT SIZE="-1" COLOR="#777777">Do stuff</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.dotsyntax.com>http://www.dotsyntax.com</a></FONT>
</TD></TR></TABLE>
</TD></TR></TABLE>
""",
Steve="""
<TABLE WIDTH=100% cellpadding=1 border=0><TR><TD>
<TABLE WIDTH=100% cellpadding=0>
<TR><TD valign=center><IMG SRC="C:\jabber.png">
<FONT FACE=ARIAL SIZE="+0"> <B>Steve</B></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>JID: </B><FONT COLOR="#777777">shaps776@jabber.org</FONT></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Subscription: </B><FONT COLOR="#777777">Both</FONT></FONT>
<TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE>
<br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Gaim (5)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Free for Chat</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT><TABLE WIDTH=100% BORDER=0 CELLPADDING=2 CELLSPACING=0>
<TR><TD><IMG SRC="C:\line.png" width='210'></TD></TR></TABLE><br><FONT FACE=ARIAL SIZE="-1"><B>Resource: </B></FONT><FONT SIZE="-1" COLOR="#777777">Psi (10)</FONT><br><FONT FACE=ARIAL SIZE="-1"><B>Status: </B></FONT><FONT SIZE="-1" COLOR="#777777">Extended Away</FONT><br><FONT SIZE="-1" COLOR="#777777">Getting ready for class and then making a GUI for this IM client because it roxorssssssssss</FONT></TD><TD width=64 valign=top align=center>
<table cellpadding=2 border=0><tr><td align=center><img src="C:\icon.png">
</td></tr></table>
<br><a href=#><FONT FACE=ARIAL SIZE="-1">Hide Profile<br></FONT></a>
</TD></TR></TABLE>
<TABLE WIDTH=100% cellpadding=0 border=0><TR><TD><img src="C:\line.png" width='270'><br><table cellpadding=1 cellspacing=1><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Full Name: </B></FONT><FONT SIZE="-1" COLOR="#777777">Steve Shapiro</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Birthday: </B></FONT><FONT SIZE="-1" COLOR="#777777">11/20/1982</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Phone: </B></FONT><FONT SIZE="-1" COLOR="#777777">917-757-7555</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Email: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=mailto:shaps776@gmail.com>shaps776@gmail.com</a></FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.digsby.com>http://www.digsby.com</a></FONT>
<br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table>
<FONT FACE=ARIAL SIZE="-1"><B>Additional Information:</B></FONT>
<br><FONT SIZE="-1" COLOR="#777777">Here is my extensive information section which may have a whole paragraph</FONT><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><br>
<FONT FACE=ARIAL SIZE="-1"><B>Home Address: </B></FONT><FONT SIZE="-1" COLOR="#777777">(<a href=http://maps.google.com>Map</a>)</FONT>
<br><FONT SIZE="-1" COLOR="#777777">Address Line<br>125 Tech Park Drive<br>Rochester, NY 14623</FONT><br><table cellpadding=4 cellspacing=0><tr><td></td></tr></table><FONT FACE=ARIAL SIZE="-1"><B>Company: </B></FONT><FONT SIZE="-1" COLOR="#777777">dotSyntax, LLC</FONT><BR><FONT FACE=ARIAL SIZE="-1"><B>Department: </B></FONT><FONT SIZE="-1" COLOR="#777777">Software</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Position: </B></FONT><FONT SIZE="-1" COLOR="#777777">HCI Guy</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Role: </B></FONT><FONT SIZE="-1" COLOR="#777777">Do stuff</FONT>
<br><FONT FACE=ARIAL SIZE="-1"><B>Website: </B></FONT><FONT SIZE="-1" COLOR="#777777"><a href=http://www.dotsyntax.com>http://www.dotsyntax.com</a></FONT>
</TD></TR></TABLE>
</TD></TR></TABLE>
""",
Dude="""
<p>Once upon a midnight
dreary, while my thumbs grew weak and weary<br>
My whole library of Zelda games laid out upon the floor<br>
Each I conquered, fully finished, from original to "Minish,"<br>
Yet my thirst did not diminish for a kingdom to explore<br>
Instantly I wanted more<br>
<br>
With my catalog completed, I began to feel defeated<br>
And absurdly missed the heated battles waged throughout the
war<br>
RPGs, I felt, were boring, but my friends replied, ignoring,<br>
And at once began assuring that I simply would adore<br>
The next game to hit to the store<br>
<br>
So they told me, with great vigor, that this new game would
be bigger<br>
Than the last hit, "Chrono Trigger," which they
took months to explore<br>
But the fighting style I hated. To attack, my turn I waited<br>
As my enemy invaded, and did knock me on the floor <br>
'Twas entitled, "Evermore"<br>
<br>
And they asked me, "Don't you love it?" But I said
that they could shove it<br>
I was sick and tired of it and its "Gotcha Last"
type war<br>
Though they thought that it was uncool, I retreated to the
old school<br>
And proceeded to save Hyrule like the good old days of yore<br>
There I'll stay, forevermore<br>
<b>( <a href="http://forums.progressiveboink.com/">Add
Comment</a> )</b></p>
"""
)
| 73.874332
| 475
| 0.605016
| 4,369
| 27,629
| 3.826047
| 0.062257
| 0.068198
| 0.093324
| 0.122039
| 0.990129
| 0.990129
| 0.990129
| 0.990129
| 0.990129
| 0.990129
| 0
| 0.064401
| 0.164863
| 27,629
| 374
| 476
| 73.874332
| 0.66005
| 0
| 0
| 0.909804
| 0
| 0.447059
| 0.99358
| 0.364824
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003922
| 0
| 0.003922
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6301c66be5c3ce9864787b7b8b4bebdf8f82c11c
| 3,183
|
py
|
Python
|
tests/test_loss.py
|
jcreinhold/uncertaintorch
|
0cdc9f25fefad938c9f0bd3a6b40dfaa362dfca5
|
[
"Apache-2.0"
] | 1
|
2021-03-21T23:13:45.000Z
|
2021-03-21T23:13:45.000Z
|
tests/test_loss.py
|
jcreinhold/uncertaintorch
|
0cdc9f25fefad938c9f0bd3a6b40dfaa362dfca5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_loss.py
|
jcreinhold/uncertaintorch
|
0cdc9f25fefad938c9f0bd3a6b40dfaa362dfca5
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
tests.test_loss
test the uncertaintorch loss functions
Author: Jacob Reinhold (jacob.reinhold@jhu.edu)
Created on: Jan 14, 2020
"""
import unittest
import torch
from uncertaintorch.learn import *
class TestLoss(unittest.TestCase):
def setUp(self):
pass
def test_mseonlyloss_nomask(self):
fn = MSEOnlyLoss()
x, y = (torch.zeros((2,1,2,2,2)), torch.zeros((2,1,2,2,2))), torch.zeros((2,1,2,2,2))
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def test_mseonlyloss_mask(self):
fn = MSEOnlyLoss()
x, y = (torch.zeros((2,1,2,2,2)), torch.zeros((2,1,2,2,2))), torch.zeros((2,2,2,2,2))
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def test_gaussiandiagloss_nomask(self):
fn = GaussianDiagLoss()
x, y = (torch.zeros((2,1,2,2,2)), torch.zeros((2,1,2,2,2))), torch.zeros((2,1,2,2,2))
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def test_gaussiandiagloss_mask(self):
fn = GaussianDiagLoss()
x, y = (torch.zeros((2,1,2,2,2)), torch.zeros((2,1,2,2,2))), torch.zeros((2,2,2,2,2))
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def test_l1onlyloss_nomask(self):
fn = L1OnlyLoss()
x, y = (torch.zeros((2,1,2,2,2)), torch.zeros((2,1,2,2,2))), torch.zeros((2,1,2,2,2))
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def test_l1onlyloss_mask(self):
fn = L1OnlyLoss()
x, y = (torch.zeros((2,1,2,2,2)), torch.zeros((2,1,2,2,2))), torch.zeros((2,2,2,2,2))
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def test_laplaciandiagloss_nomask(self):
fn = LaplacianDiagLoss()
x, y = (torch.zeros((2,1,2,2,2)), torch.zeros((2,1,2,2,2))), torch.zeros((2,1,2,2,2))
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def test_laplaciandiagloss_mask(self):
fn = LaplacianDiagLoss()
x, y = (torch.zeros((2,1,2,2,2)), torch.zeros((2,1,2,2,2))), torch.zeros((2,2,2,2,2))
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def test_focalloss_nomask(self):
fn = FocalLoss()
x, y = torch.zeros((2,2,2,2,2)), torch.zeros((2,2,2,2),dtype=torch.long)
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def test_focalloss_mask(self):
fn = FocalLoss()
x, y = torch.zeros((2,2,2,2,2)), torch.zeros((2,2,2,2),dtype=torch.long)
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def test_extendedcrossentropy_nomask(self):
fn = ExtendedCrossEntropy()
x, y = (torch.zeros((2,2,2,2,2)), torch.zeros((2,2,2,2,2))), torch.zeros((2,2,2,2),dtype=torch.long)
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def test_extendedcrossentropy_mask(self):
fn = ExtendedCrossEntropy()
x, y = (torch.zeros((2,2,2,2,2)), torch.zeros((2,2,2,2,2))), torch.zeros((2,2,2,2),dtype=torch.long)
loss = fn(x, y)
self.assertEqual(loss.item(), 0)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
| 30.902913
| 108
| 0.571473
| 514
| 3,183
| 3.474708
| 0.11284
| 0.103024
| 0.097424
| 0.053751
| 0.801792
| 0.801792
| 0.801792
| 0.801792
| 0.801792
| 0.801792
| 0
| 0.07708
| 0.229658
| 3,183
| 102
| 109
| 31.205882
| 0.651305
| 0.054351
| 0
| 0.714286
| 0
| 0
| 0.002667
| 0
| 0
| 0
| 0
| 0
| 0.171429
| 1
| 0.2
| false
| 0.028571
| 0.042857
| 0
| 0.257143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2d77e1daac5983e56351de236bcde1ac2993133a
| 2,308
|
py
|
Python
|
agents/dqn.py
|
pvnieo/beating-atari
|
a38a3f106cb4e717ded205c37a95b857d8d91e4d
|
[
"Apache-2.0"
] | 1
|
2019-12-12T22:16:10.000Z
|
2019-12-12T22:16:10.000Z
|
agents/dqn.py
|
pvnieo/beating-atari
|
a38a3f106cb4e717ded205c37a95b857d8d91e4d
|
[
"Apache-2.0"
] | null | null | null |
agents/dqn.py
|
pvnieo/beating-atari
|
a38a3f106cb4e717ded205c37a95b857d8d91e4d
|
[
"Apache-2.0"
] | null | null | null |
# 3p
import torch
from torch.nn.functional import smooth_l1_loss
# project
from .base_model import DQNBasedModel
class DQNNips(DQNBasedModel):
def __init__(self, env, network, policy, memory, optimizer, outputs_dir, logger, discount_factor=0.99):
super().__init__(env, network, policy, memory, optimizer, outputs_dir, logger, discount_factor)
@property
def name(self):
return "dqn_nips"
def fit_batch(self, states, actions, rewards, next_states, is_terminals):
target_q_values = self.online_net(next_states)
# If terminal, we use y_i = r_i instead of y_i = r_i + gamma * max Q
target_q_values[is_terminals] = 0
# Compute targets: y_i = r_i + gamma * max Q
target_q_values = (torch.FloatTensor(rewards) +
self.discount_factor * torch.max(target_q_values, dim=1)[0]).reshape(-1, 1)
# compute loss
predicted_q_values = torch.gather(self.online_net(states), 1, torch.LongTensor(actions).reshape(-1, 1))
loss = smooth_l1_loss(predicted_q_values, target_q_values)
# optimize
self.optimizer.zero_grad()
loss.backward()
return loss.item()
def update_target_net(self):
pass
class DQN(DQNBasedModel):
def __init__(self, env, network, policy, memory, optimizer, outputs_dir, logger, discount_factor=0.99):
super().__init__(env, network, policy, memory, optimizer, outputs_dir, logger, discount_factor)
@property
def name(self):
return "dqn"
def fit_batch(self, states, actions, rewards, next_states, is_terminals):
target_q_values = self.target_net(next_states)
# If terminal, we use y_i = r_i instead of y_i = r_i + gamma * max Q
target_q_values[is_terminals] = 0
# Compute targets: y_i = r_i + gamma * max Q-
target_q_values = (torch.FloatTensor(rewards) +
self.discount_factor * torch.max(target_q_values, dim=1)[0]).reshape(-1, 1)
# compute loss
predicted_q_values = torch.gather(self.online_net(states), 1, torch.LongTensor(actions).reshape(-1, 1))
loss = smooth_l1_loss(predicted_q_values, target_q_values)
# optimize
self.optimizer.zero_grad()
loss.backward()
return loss.item()
| 39.118644
| 111
| 0.663345
| 314
| 2,308
| 4.582803
| 0.242038
| 0.068103
| 0.090341
| 0.016678
| 0.893676
| 0.893676
| 0.893676
| 0.893676
| 0.893676
| 0.893676
| 0
| 0.014706
| 0.233969
| 2,308
| 58
| 112
| 39.793103
| 0.799208
| 0.119151
| 0
| 0.702703
| 0
| 0
| 0.00544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.189189
| false
| 0.027027
| 0.081081
| 0.054054
| 0.432432
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2dd6d094ccdefa5b0bafd66fb85b544a22dccf6b
| 110
|
py
|
Python
|
unitpy/units/__init__.py
|
jenders97/unitpy
|
a39fefe1c109b57c174eeba53b877f32f044de0f
|
[
"MIT"
] | null | null | null |
unitpy/units/__init__.py
|
jenders97/unitpy
|
a39fefe1c109b57c174eeba53b877f32f044de0f
|
[
"MIT"
] | null | null | null |
unitpy/units/__init__.py
|
jenders97/unitpy
|
a39fefe1c109b57c174eeba53b877f32f044de0f
|
[
"MIT"
] | null | null | null |
from unitpy.units.mass import *
from unitpy.units.current import *
from unitpy.units.light_intensity import *
| 27.5
| 42
| 0.809091
| 16
| 110
| 5.5
| 0.5
| 0.340909
| 0.511364
| 0.477273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109091
| 110
| 3
| 43
| 36.666667
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
fa939b8643aed6c3d712b68340c7d3b25e6242ce
| 7,952
|
py
|
Python
|
cifar_nets.py
|
sashkarivkind/imagewalker
|
999e1ae78cfe1512e1be894d9e7891a7d0c41233
|
[
"Apache-2.0"
] | 2
|
2021-04-28T13:33:45.000Z
|
2021-11-09T14:31:09.000Z
|
cifar_nets.py
|
sashkarivkind/imagewalker
|
999e1ae78cfe1512e1be894d9e7891a7d0c41233
|
[
"Apache-2.0"
] | null | null | null |
cifar_nets.py
|
sashkarivkind/imagewalker
|
999e1ae78cfe1512e1be894d9e7891a7d0c41233
|
[
"Apache-2.0"
] | 1
|
2021-03-07T13:25:59.000Z
|
2021-03-07T13:25:59.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
cifar syclop selected nets
"""
import sys
sys.path.insert(1, '/home/labs/ahissarlab/orra/imagewalker')
import numpy as np
import tensorflow.keras as keras
import tensorflow as tf
def cnn_gru(n_timesteps = 5, hidden_size = 128,input_size = 32,
cnn_dropout=0.4,rnn_dropout=0.2, lr = 5e-4,
concat = True):
'''
CNN GRU combination that extends the CNN to a network that achieves
~80% accuracy on full res cifar.
Reaches 62% on low_res syclop with hyperparameters:
hs = 256, sample_size = 10,
cnn_dropout = 0.4 and rnn dropout = 0.2
lr = 5e-4
res = 8
'''
inputA = keras.layers.Input(shape=(n_timesteps,input_size,input_size,3))
inputB = keras.layers.Input(shape=(n_timesteps,2))
# define CNN model
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(32,(3,3),activation='relu', padding = 'same'))(inputA)
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(32,(3,3),activation='relu', padding = 'same'))(x1)
x1=keras.layers.TimeDistributed(keras.layers.MaxPooling2D(pool_size=(2, 2)))(x1)
x1=keras.layers.TimeDistributed(keras.layers.Dropout(cnn_dropout))(x1)
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(64,(3,3),activation='relu', padding = 'same'))(x1)
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(64,(3,3),activation='relu', padding = 'same'))(x1)
x1=keras.layers.TimeDistributed(keras.layers.MaxPooling2D(pool_size=(2, 2)))(x1)
x1=keras.layers.TimeDistributed(keras.layers.Dropout(cnn_dropout))(x1)
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(128,(3,3),activation='relu', padding = 'same'))(x1)
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(128,(3,3),activation='relu', padding = 'same'))(x1)
x1=keras.layers.TimeDistributed(keras.layers.MaxPooling2D(pool_size=(2, 2)))(x1)
x1=keras.layers.TimeDistributed(keras.layers.Dropout(cnn_dropout))(x1)
print(x1.shape)
x1=keras.layers.TimeDistributed(keras.layers.Flatten())(x1)
print(x1.shape)
if concat:
x = keras.layers.Concatenate()([x1,inputB])
else:
x = x1
print(x.shape)
# define LSTM model
x = keras.layers.GRU(hidden_size,input_shape=(n_timesteps, None),
return_sequences=True,recurrent_dropout=rnn_dropout,
kernel_regularizer=keras.regularizers.l1_l2(l1=0.01, l2=0.01))(x)
x = keras.layers.Flatten()(x)
x = keras.layers.Dense(10,activation="softmax")(x)
model = keras.models.Model(inputs=[inputA,inputB],outputs=x, name = 'cnn_gru_{}'.format(concat))
opt=tf.keras.optimizers.Adam(lr=lr)
model.compile(
optimizer=opt,
loss="sparse_categorical_crossentropy",
metrics=["sparse_categorical_accuracy"],
)
return model
def parallel_gru(n_timesteps = 10, hidden_size = 256,input_size = 8,
cnn_dropout=0.4,rnn_dropout=0.2, lr = 5e-4,
concat = True):
'''
CNN RNN combination that extends the CNN to a network that achieves
~80% accuracy on full res cifar.
Parameters
----------
n_timesteps : TYPE, optional
DESCRIPTION. The default is 5.
img_dim : TYPE, optional
DESCRIPTION. The default is 32.
hidden_size : TYPE, optional
DESCRIPTION. The default is 128.
input_size : TYPE, optional
DESCRIPTION. The default is 32.
Returns
-------
model : TYPE
DESCRIPTION.
'''
inputA = keras.layers.Input(shape=(n_timesteps,input_size,input_size,3))
inputB = keras.layers.Input(shape=(n_timesteps,2))
###################### CNN Chanell 1#######################################
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(32,(3,3), activation='relu',padding = 'same'))(inputA)
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(32,(3,3), activation='relu',padding = 'same'))(x1)
x1=keras.layers.TimeDistributed(keras.layers.MaxPooling2D(pool_size=(2, 2)))(x1)
x1=keras.layers.TimeDistributed(keras.layers.Dropout(cnn_dropout))(x1)
###################### Parallel Chanell 1##################################
rnn_temp = keras.layers.TimeDistributed(keras.layers.Flatten())(x1)
if concat:
rnn_temp = keras.layers.Concatenate()([rnn_temp,inputB])
else:
rnn_temp = rnn_temp
print('flat shape after cnn1', rnn_temp.shape)
rnn_x = keras.layers.GRU( hidden_size,input_shape=(n_timesteps, None),
kernel_regularizer=keras.regularizers.l1_l2(l1=0.01, l2=0.01),
return_sequences=True,recurrent_dropout=2*rnn_dropout,
)(rnn_temp)
print('gru hidden states 1 ', rnn_x.shape)
###################### CNN Chanell 2 #######################################
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(64,(3,3),activation='relu', padding = 'same'))(x1)
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(64,(3,3),activation='relu', padding = 'same'))(x1)
x1=keras.layers.TimeDistributed(keras.layers.MaxPooling2D(pool_size=(2, 2),name = 'test'),name = 'test')(x1)
x1=keras.layers.TimeDistributed(keras.layers.Dropout(cnn_dropout))(x1)
###################### Parallel Chanell 2 ##################################
rnn_temp = keras.layers.TimeDistributed(keras.layers.Flatten())(x1)
print('flat shape after cnn2',rnn_temp.shape)
if concat:
rnn_temp = keras.layers.Concatenate()([rnn_x,rnn_temp,inputB])
else:
rnn_temp = keras.layers.Concatenate()([rnn_x,rnn_temp])
print(' cnn2 input combined with fst hidden state', rnn_temp.shape)
rnn_x = keras.layers.GRU( hidden_size,input_shape=(n_timesteps, None),
kernel_regularizer=keras.regularizers.l1_l2(l1=0.01, l2=0.01),
return_sequences=True,recurrent_dropout=2*rnn_dropout,
)(rnn_temp)
print('gru hidden states 2 ', rnn_x.shape)
###################### CNN Chanell 3 #######################################
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(128,(3,3),activation='relu', padding = 'same'))(x1)
x1=keras.layers.TimeDistributed(keras.layers.Conv2D(128,(3,3),activation='relu', padding = 'same'))(x1)
x1=keras.layers.TimeDistributed(keras.layers.MaxPooling2D(pool_size=(2, 2)))(x1)
x1=keras.layers.TimeDistributed(keras.layers.Dropout(cnn_dropout))(x1)
print(x1.shape)
###################### Parallel Chanell 3 ##################################
# rnn_temp = keras.layers.TimeDistributed(keras.layers.Flatten())(x1)
# print('flat shape after cnn3',rnn_temp.shape)
# if concat:
# rnn_temp = keras.layers.Concatenate()([rnn_x,rnn_temp,inputB])
# else:
# rnn_temp = keras.layers.Concatenate()([rnn_x,rnn_temp])
# print(' cnn23input combined with snd hidden state', rnn_temp.shape)
# rnn_x = keras.layers.GRU(hidden_size,input_shape=(n_timesteps, None),return_sequences=True,recurrent_dropout=2*rnn_dropout)(rnn_temp)
# print('gru hidden states 3 ', rnn_x.shape)
x1=keras.layers.TimeDistributed(keras.layers.Flatten())(x1)
if concat:
x = keras.layers.Concatenate()([x1,rnn_x,inputB])
else:
x = keras.layers.Concatenate()([x1,rnn_x])
print(x.shape)
# define LSTM model
x = keras.layers.GRU(hidden_size,input_shape=(n_timesteps, None),return_sequences=True,recurrent_dropout=rnn_dropout)(x)
x = keras.layers.Flatten()(x)
x = keras.layers.Dense(10,activation="softmax")(x)
model = keras.models.Model(inputs=[inputA,inputB],outputs=x, name = 'parallel_gru_v1_{}'.format(concat))
opt=tf.keras.optimizers.Adam(lr=lr)
model.compile(
optimizer=opt,
loss="sparse_categorical_crossentropy",
metrics=["sparse_categorical_accuracy"],
)
return model
| 43.933702
| 139
| 0.638707
| 1,024
| 7,952
| 4.839844
| 0.142578
| 0.175343
| 0.152139
| 0.181396
| 0.875504
| 0.867837
| 0.856538
| 0.811138
| 0.803672
| 0.76937
| 0
| 0.03871
| 0.181338
| 7,952
| 181
| 140
| 43.933702
| 0.722581
| 0.177943
| 0
| 0.712871
| 0
| 0
| 0.070199
| 0.025497
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019802
| false
| 0
| 0.039604
| 0
| 0.079208
| 0.09901
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
87b0afdec783608333a5a0082af09774d6493ad3
| 22,455
|
py
|
Python
|
v0/aia_eis_v0/goa/evolution_based/differential_evolution/de_0.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | 1
|
2022-03-02T12:57:19.000Z
|
2022-03-02T12:57:19.000Z
|
v0/aia_eis_v0/goa/evolution_based/differential_evolution/de_0.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
v0/aia_eis_v0/goa/evolution_based/differential_evolution/de_0.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
import copy
import math
import random
from time import perf_counter
import os
import sys
sys.path.append('../../../')
from utils.file_utils.filename_utils import get_ecm_num_str, get_Num_len
from data_processor.GOA_simulation.GOA_ECMs_simulation import load_sim_ecm_para_config_dict
from goa.GOA_criterions import goa_criterion_pack
from GA_pack.fittness_functions.eis_fitness import cal_EIS_WSE_fitness_1
class DE_0:
"""
Refer:
Paper:
paper0: Differential Evolution – A Simple and Efficient Heuristic for Global Optimization over Continuous Spaces
webs:
差分进化算法(Differential Evolution)
https://blog.csdn.net/qq_37423198/article/details/77856744
Adjustable parameters:
casual:
number of search agents
number of iteration
unique:
Attention:
Version:
0
"""
class Entity:
def __init__(self, limits_list, fitness_function):
self.limits_list = limits_list
self.fitness_function = fitness_function
self.x_list = [random.uniform(limit[0], limit[1]) for limit in limits_list]
self.fitness = fitness_function(self.x_list)
def update(self):
for i in range(len(self.limits_list)):
if self.x_list[i] > self.limits_list[i][1]:
self.x_list[i] = self.limits_list[i][1]
if self.x_list[i] < self.limits_list[i][0]:
self.x_list[i] = self.limits_list[i][0]
self.fitness = self.fitness_function(self.x_list)
def __init__(self, iter_num, entity_num, limits_list, fitness_function, F=None, CR=0.9):
self.iter_num = iter_num
# paper0: a reasonable choice for entity_num is 5 * D ~ 10 * D (D: dimension), and has to > 4
self.entity_num = entity_num
self.limits_list = limits_list
self.fitness_function = fitness_function
# paper0: F = 0.5
self.F = F
# paper0: CR = 0.1
self.CR = CR
self.entities_list = [self.Entity(limits_list, fitness_function) for i in range(entity_num)]
self.global_best_entity = self.Entity(limits_list, fitness_function)
def search(self):
current_best_entity_list = []
global_best_entity_list = []
for iter_index in range(self.iter_num):
current_best_entity = sorted(self.entities_list, key=lambda entity:entity.fitness, reverse=False)[0]
current_best_entity_list.append(current_best_entity)
if current_best_entity.fitness < self.global_best_entity.fitness:
self.global_best_entity = copy.deepcopy(current_best_entity)
global_best_entity_list.append(current_best_entity)
else:
global_best_entity_list.append(self.global_best_entity)
if self.F == None:
self.F = random.random() / 2 + 0.5
for x_index in range(self.entity_num):
x1_index = random.randint(0, self.entity_num - 1)
while x1_index == x_index:
x1_index = random.randint(0, self.entity_num - 1)
x2_index = random.randint(0, self.entity_num - 1)
while (x2_index == x1_index) or (x2_index == x_index):
x2_index = random.randint(0, self.entity_num - 1)
x3_index = random.randint(0, self.entity_num - 1)
while (x3_index == x2_index) or (x3_index == x1_index) or (x3_index == x_index):
x3_index = random.randint(0, self.entity_num - 1)
tmp_x_list = []
counter = 0
for a, b, c in zip(self.entities_list[x1_index].x_list, self.entities_list[x2_index].x_list, self.entities_list[x3_index].x_list):
# Mutation
t_x = a + self.F * (b - c)
# Crossover
if random.random() < self.CR:
tmp_x_list.append(t_x)
else:
tmp_x_list.append(self.entities_list[x_index].x_list[counter])
counter += 1
tmp_entity = self.Entity(self.limits_list, self.fitness_function)
tmp_entity.x_list = tmp_x_list
tmp_entity.update()
# Selection (Greedy)
if tmp_entity.fitness < self.entities_list[x_index].fitness:
self.entities_list[x_index] = copy.deepcopy(tmp_entity)
return current_best_entity_list, global_best_entity_list
# if __name__ == '__main__':
# iter_num = 1000
# entity_num = 10
# dim = 5
#
# f1_limits_list = [[-180, 70] for i in range(dim)]
# from GA_pack.fittness_functions.f1 import f1
# f1_fitness_function = f1
#
# de = DE_0(iter_num, entity_num, f1_limits_list, f1_fitness_function)
# current_best_entity_list, global_best_entity_list = de.search()
# print('Best entity position:', de.global_best_entity.x_list)
# print('Fitness:', de.global_best_entity.fitness)
#
# # Draw the best universe in each iteration.
# iter_list = [i for i in range(iter_num)]
# cur_fitness_list = [entity.fitness for entity in current_best_entity_list]
# cur_global_fitness_list = [entity.fitness for entity in global_best_entity_list]
#
# import matplotlib.pyplot as plt
# fig, ax = plt.subplots()
# line1, = ax.plot(iter_list, cur_fitness_list, label='Current Iteration {0}\nentity number {1}\nDimension {2}'.format(iter_num, entity_num, dim))
# line1.set_dashes([2, 2, 10, 2]) # 2pt line, 2pt break, 10pt line, 2pt break
# # line2, = ax.plot(iter_list, cur_global_fitness_list, label='Current Global Iteration {0}\nentity number {1}\nDimension {2}'.format(iter_num, entity_num, dim))
# # line2.set_dashes([2, 2, 10, 2]) # 2pt line, 2pt break, 10pt line, 2pt break
# ax.legend()
# plt.xlabel('Iteration times')
# plt.ylabel('Error rate')
# plt.title('Search the minimum of f1 = sum(Xi ^ 2)')
# plt.show()
class DE_1:
"""
Refer:
Paper:
paper0: Differential Evolution – A Simple and Efficient Heuristic for Global Optimization over Continuous Spaces
webs:
差分进化算法(Differential Evolution)
https://blog.csdn.net/qq_37423198/article/details/77856744
Adjustable parameters:
casual:
number of search agents
number of iteration
unique:
Attention:
Version:
0
"""
class Entity:
def __init__(self, limits_list, fitness_function):
self.limits_list = limits_list
self.fitness_function = fitness_function
self.x_list = [random.uniform(limit[0], limit[1]) for limit in limits_list]
self.fitness = fitness_function(self.x_list)
def update(self):
for i in range(len(self.limits_list)):
if self.x_list[i] > self.limits_list[i][1]:
self.x_list[i] = self.limits_list[i][1]
if self.x_list[i] < self.limits_list[i][0]:
self.x_list[i] = self.limits_list[i][0]
self.fitness = self.fitness_function(self.x_list)
def __init__(self, iter_num, entity_num, limits_list, fitness_function, F=None, CR=0.9):
self.iter_num = iter_num
# paper0: a reasonable choice for entity_num is 5 * D ~ 10 * D (D: dimension), and has to > 4
self.entity_num = entity_num
self.limits_list = limits_list
self.fitness_function = fitness_function
# paper0: F = 0.5
self.F = F
# paper0: CR = 0.1
self.CR = CR
self.entities_list = [self.Entity(limits_list, fitness_function) for i in range(entity_num)]
self.global_best_entity = self.Entity(limits_list, fitness_function)
def search(self):
current_best_entity_list = []
global_best_entity_list = []
for iter_index in range(self.iter_num):
self.entities_list.sort(key=lambda en:en.fitness, reverse=False)
current_best_entity = self.entities_list[0]
if current_best_entity.fitness < self.global_best_entity.fitness:
self.global_best_entity = copy.deepcopy(current_best_entity)
current_best_entity_list.append(copy.deepcopy(current_best_entity))
global_best_entity_list.append(copy.deepcopy(self.global_best_entity))
if self.F == None:
self.F = random.random() / 2 + 0.5
for x_index in range(self.entity_num):
x1_index, x2_index, x3_index = random.sample(range(self.entity_num), 3)
tmp_x_list = []
for i, a, b, c in zip(range(len(self.limits_list)),
self.entities_list[x1_index].x_list,
self.entities_list[x2_index].x_list,
self.entities_list[x3_index].x_list):
# Mutation
t_x = a + self.F * (b - c)
# Crossover
if random.random() < self.CR:
tmp_x_list.append(t_x)
else:
tmp_x_list.append(self.entities_list[x_index].x_list[i])
tmp_entity = self.Entity(self.limits_list, self.fitness_function)
tmp_entity.x_list = tmp_x_list
tmp_entity.update()
# Selection (Greedy)
if tmp_entity.fitness < self.entities_list[x_index].fitness:
self.entities_list[x_index] = copy.deepcopy(tmp_entity)
return current_best_entity_list, global_best_entity_list
# if __name__ == '__main__':
# iter_num = 1000
# entity_num = 10
# dim = 5
#
# f1_limits_list = [[-180, 70] for i in range(dim)]
# from GA_pack.fittness_functions.f1 import f1
#
# f1_fitness_function = f1
#
# de = DE_1(iter_num, entity_num, f1_limits_list, f1_fitness_function)
# current_best_entity_list, global_best_entity_list = de.search()
# print('Best entity position:', de.global_best_entity.x_list)
# print('Fitness:', de.global_best_entity.fitness)
#
# # Draw the best universe in each iteration.
# iter_list = [i for i in range(iter_num)]
# cur_fitness_list = [entity.fitness for entity in current_best_entity_list]
# cur_global_fitness_list = [entity.fitness for entity in global_best_entity_list]
#
# import matplotlib.pyplot as plt
#
# fig, ax = plt.subplots()
# line1, = ax.plot(iter_list, cur_fitness_list,
# label='Current Iteration {0}\nentity number {1}\nDimension {2}'.format(iter_num, entity_num, dim))
# line1.set_dashes([2, 2, 10, 2]) # 2pt line, 2pt break, 10pt line, 2pt break
# # line2, = ax.plot(iter_list, cur_global_fitness_list, label='Current Global Iteration {0}\nentity number {1}\nDimension {2}'.format(iter_num, entity_num, dim))
# # line2.set_dashes([2, 2, 10, 2]) # 2pt line, 2pt break, 10pt line, 2pt break
# ax.legend()
# plt.xlabel('Iteration times')
# plt.ylabel('Error rate')
# plt.title('Search the minimum of f1 = sum(Xi ^ 2)')
# plt.show()
class DE_EIS:
"""
Refer:
Paper:
paper0: Differential Evolution – A Simple and Efficient Heuristic for Global Optimization over Continuous Spaces
webs:
差分进化算法(Differential Evolution)
https://blog.csdn.net/qq_37423198/article/details/77856744
Adjustable parameters:
casual:
number of search agents
number of iteration
unique:
Attention:
Version:
0
"""
class Entity:
def __init__(self, exp_data_dict, fitness_function):
self.exp_data_dict = exp_data_dict
self.limits_list = exp_data_dict['limit']
self.fitness_function = fitness_function
self.x_list = [random.uniform(limit[0], limit[1]) for limit in self.limits_list]
self.fitness = fitness_function(self.exp_data_dict, self.x_list)
def update(self):
for i in range(len(self.limits_list)):
if self.x_list[i] > self.limits_list[i][1]:
self.x_list[i] = self.limits_list[i][1]
if self.x_list[i] < self.limits_list[i][0]:
self.x_list[i] = self.limits_list[i][0]
self.fitness = self.fitness_function(self.exp_data_dict, self.x_list)
def __init__(self, exp_data_dict, iter_num, entity_num, fitness_function=cal_EIS_WSE_fitness_1, F=None, CR=0.9):
self.exp_data_dict = exp_data_dict
self.limits_list = exp_data_dict['limit']
self.iter_num = iter_num
# paper0: a reasonable choice for entity_num is 5 * D ~ 10 * D (D: dimension), and has to > 4
self.entity_num = entity_num
self.fitness_function = fitness_function
# paper0: F = 0.5
self.F = F
# paper0: CR = 0.1
self.CR = CR
self.entities_list = [self.Entity(self.exp_data_dict, fitness_function) for i in range(entity_num)]
self.global_best_entity = self.Entity(self.exp_data_dict, fitness_function)
def search(self):
current_best_entity_list = []
global_best_entity_list = []
continue_criterion = True
iter = 0
while continue_criterion:
self.entities_list.sort(key=lambda en:en.fitness, reverse=False)
current_best_entity = self.entities_list[0]
if current_best_entity.fitness < self.global_best_entity.fitness:
self.global_best_entity = copy.deepcopy(current_best_entity)
current_best_entity_list.append(copy.deepcopy(current_best_entity))
global_best_entity_list.append(copy.deepcopy(self.global_best_entity))
if self.F == None:
self.F = random.random() / 2 + 0.5
for x_index in range(self.entity_num):
x1_index, x2_index, x3_index = random.sample(range(self.entity_num), 3)
tmp_x_list = []
for i, a, b, c in zip(range(len(self.limits_list)),
self.entities_list[x1_index].x_list,
self.entities_list[x2_index].x_list,
self.entities_list[x3_index].x_list):
# Mutation
t_x = a + self.F * (b - c)
# Crossover
if random.random() < self.CR:
tmp_x_list.append(t_x)
else:
tmp_x_list.append(self.entities_list[x_index].x_list[i])
tmp_entity = self.Entity(self.exp_data_dict, self.fitness_function)
tmp_entity.x_list = tmp_x_list
tmp_entity.update()
# Selection (Greedy)
if tmp_entity.fitness < self.entities_list[x_index].fitness:
self.entities_list[x_index] = copy.deepcopy(tmp_entity)
# -------------------------------------- Update global settings --------------------------------------
# There are two entities only after at least two iteration
# If there is global_best_entity_list, use it,
# If not, use current_best_entity_list to replace
if iter >= 1:
x_lists_list = [global_best_entity_list[-2].x_list, global_best_entity_list[-1].x_list]
goa_criterion, chi_squared = goa_criterion_pack(x_lists_list=x_lists_list, iter=iter,
max_iter_time=self.iter_num,
data_dict=self.exp_data_dict)
if goa_criterion:
continue_criterion = False
iter += 1
return current_best_entity_list, global_best_entity_list, iter, chi_squared
class DE_EIS_access:
"""
Refer:
Paper:
paper0: Differential Evolution – A Simple and Efficient Heuristic for Global Optimization over Continuous Spaces
webs:
差分进化算法(Differential Evolution)
https://blog.csdn.net/qq_37423198/article/details/77856744
Adjustable parameters:
casual:
number of search agents
number of iteration
unique:
Attention:
Version:
0
"""
class Entity:
def __init__(self, exp_data_dict, fitness_function):
self.exp_data_dict = exp_data_dict
self.limits_list = exp_data_dict['limit']
self.fitness_function = fitness_function
self.x_list = [random.uniform(limit[0], limit[1]) for limit in self.limits_list]
self.fitness = fitness_function(self.exp_data_dict, self.x_list)
def update(self):
for i in range(len(self.limits_list)):
if self.x_list[i] > self.limits_list[i][1]:
self.x_list[i] = self.limits_list[i][1]
if self.x_list[i] < self.limits_list[i][0]:
self.x_list[i] = self.limits_list[i][0]
self.fitness = self.fitness_function(self.exp_data_dict, self.x_list)
def __init__(self, exp_data_dict, iter_num, entity_num, fitness_function=cal_EIS_WSE_fitness_1, F=None, CR=0.9):
self.exp_data_dict = exp_data_dict
self.limits_list = exp_data_dict['limit']
self.iter_num = iter_num
# paper0: a reasonable choice for entity_num is 5 * D ~ 10 * D (D: dimension), and has to > 4
self.entity_num = entity_num
self.fitness_function = fitness_function
# paper0: F = 0.5
self.F = F
# paper0: CR = 0.1
self.CR = CR
self.entities_list = [self.Entity(self.exp_data_dict, fitness_function) for i in range(entity_num)]
self.global_best_entity = self.Entity(self.exp_data_dict, fitness_function)
def search(self, res_fn, start_time):
current_best_entity_list = []
global_best_entity_list = []
continue_criterion = True
iter = 0
while continue_criterion:
self.entities_list.sort(key=lambda en:en.fitness, reverse=False)
current_best_entity = self.entities_list[0]
if current_best_entity.fitness < self.global_best_entity.fitness:
self.global_best_entity = copy.deepcopy(current_best_entity)
current_best_entity_list.append(copy.deepcopy(current_best_entity))
global_best_entity_list.append(copy.deepcopy(self.global_best_entity))
if self.F == None:
self.F = random.random() / 2 + 0.5
for x_index in range(self.entity_num):
x1_index, x2_index, x3_index = random.sample(range(self.entity_num), 3)
tmp_x_list = []
for i, a, b, c in zip(range(len(self.limits_list)),
self.entities_list[x1_index].x_list,
self.entities_list[x2_index].x_list,
self.entities_list[x3_index].x_list):
# Mutation
t_x = a + self.F * (b - c)
# Crossover
if random.random() < self.CR:
tmp_x_list.append(t_x)
else:
tmp_x_list.append(self.entities_list[x_index].x_list[i])
tmp_entity = self.Entity(self.exp_data_dict, self.fitness_function)
tmp_entity.x_list = tmp_x_list
tmp_entity.update()
# Selection (Greedy)
if tmp_entity.fitness < self.entities_list[x_index].fitness:
self.entities_list[x_index] = copy.deepcopy(tmp_entity)
# -------------------------------------- Update global settings --------------------------------------
# There are two entities only after at least two iteration
# If there is global_best_entity_list, use it,
# If not, use current_best_entity_list to replace
if iter >= 1:
x_lists_list = [global_best_entity_list[-2].x_list, global_best_entity_list[-1].x_list]
goa_criterion, chi_squared = goa_criterion_pack(x_lists_list=x_lists_list, \
iter=iter, \
max_iter_time=self.iter_num, \
data_dict=self.exp_data_dict, \
CS_limit=1e-70)
# Write R(RC)_IS_lin-kk_res.txt into a txt file
# R(RC)_IS_lin-kk_res.txt = iter_time + fitted_para_list + Chi-Squared + Code running time
with open(res_fn, 'a+') as file:
line = str(iter) + ',[' \
+ ','.join([str(para) for para in global_best_entity_list[-1].x_list]) + '],' \
+ str(chi_squared) + ',' + str(perf_counter() - start_time) + '\n'
file.write(line)
if goa_criterion:
continue_criterion = False
iter += 1
def access_DE_EIS():
counter = 0
# Iterate on 9 ECMs
# for i in range(1, 10):
for i in range(2, 10):
ecm_sim_folder = '../../../datasets/goa_datasets/simulated'
ecm_num = i
ecm_num_str = get_ecm_num_str(ecm_num)
file_path = os.path.join(ecm_sim_folder, 'ecm_' + ecm_num_str)
sim_ecm = load_sim_ecm_para_config_dict(ecm_num, file_path)
para_num = len(sim_ecm['para'])
# Iterate for 100 times
for j in range(100):
t_start = perf_counter()
# ------------------------------ Change GOA name ------------------------------
goa = DE_EIS_access(exp_data_dict=sim_ecm, iter_num=10000, entity_num=10*para_num)
res_fn = 'de_ecm{0}_'.format(i) + get_Num_len(num=j, length=2) + '.txt'
# ------------------------------ Change GOA name ------------------------------
goa.search(res_fn, start_time=t_start)
counter += 1
print('DE left: {0}'.format(900 - counter))
# access_DE_EIS()
| 44.465347
| 166
| 0.585081
| 2,904
| 22,455
| 4.229339
| 0.082989
| 0.064322
| 0.056017
| 0.037453
| 0.905064
| 0.897981
| 0.894073
| 0.884954
| 0.878277
| 0.856701
| 0
| 0.02375
| 0.309953
| 22,455
| 505
| 167
| 44.465347
| 0.768635
| 0.264752
| 0
| 0.808664
| 0
| 0
| 0.007006
| 0.00248
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061372
| false
| 0
| 0.036101
| 0
| 0.137184
| 0.00361
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
87ba4103abcfd0f5321cf2ea8c53a47098bc5159
| 143
|
py
|
Python
|
rlagent/memories/__init__.py
|
YunjaeChoi/rlagent
|
41062fc1beaa2d5a0765bb782e1a55d1962ab058
|
[
"MIT"
] | null | null | null |
rlagent/memories/__init__.py
|
YunjaeChoi/rlagent
|
41062fc1beaa2d5a0765bb782e1a55d1962ab058
|
[
"MIT"
] | null | null | null |
rlagent/memories/__init__.py
|
YunjaeChoi/rlagent
|
41062fc1beaa2d5a0765bb782e1a55d1962ab058
|
[
"MIT"
] | null | null | null |
from rlagent.memories.base import Memory
from rlagent.memories.replaybuffer import ReplayBuffer
from rlagent.memories.nstep import NStepMemory
| 35.75
| 54
| 0.874126
| 18
| 143
| 6.944444
| 0.5
| 0.264
| 0.456
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083916
| 143
| 3
| 55
| 47.666667
| 0.954198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
87dc7c1a3342c1a50641f800e2325fadf800f738
| 2,006
|
py
|
Python
|
malaya_speech/supervised/unet.py
|
dtx525942103/malaya-speech
|
212c4e890d0cbcbbca0037c89a698b68b05db393
|
[
"MIT"
] | null | null | null |
malaya_speech/supervised/unet.py
|
dtx525942103/malaya-speech
|
212c4e890d0cbcbbca0037c89a698b68b05db393
|
[
"MIT"
] | null | null | null |
malaya_speech/supervised/unet.py
|
dtx525942103/malaya-speech
|
212c4e890d0cbcbbca0037c89a698b68b05db393
|
[
"MIT"
] | 1
|
2021-08-19T02:34:41.000Z
|
2021-08-19T02:34:41.000Z
|
from malaya_speech.utils import (
check_file,
load_graph,
generate_session,
nodes_session,
)
from malaya_speech.model.tf import UNET, UNETSTFT, UNET1D
def load(model, module, quantized = False, **kwargs):
path = check_file(
file = model,
module = module,
keys = {'model': 'model.pb'},
quantized = quantized,
**kwargs,
)
g = load_graph(path['model'], **kwargs)
inputs = ['Placeholder']
outputs = ['logits']
input_nodes, output_nodes = nodes_session(g, inputs, outputs)
return UNET(
input_nodes = input_nodes,
output_nodes = output_nodes,
sess = generate_session(graph = g, **kwargs),
model = model,
name = module,
)
def load_stft(model, module, instruments, quantized = False, **kwargs):
path = check_file(
file = model,
module = module,
keys = {'model': 'model.pb'},
quantized = quantized,
**kwargs,
)
g = load_graph(path['model'], **kwargs)
inputs = ['Placeholder']
outputs = [f'logits_{i}' for i in range(len(instruments))]
input_nodes, output_nodes = nodes_session(g, inputs, outputs)
return UNETSTFT(
input_nodes = input_nodes,
output_nodes = output_nodes,
instruments = instruments,
sess = generate_session(graph = g, **kwargs),
model = model,
name = module,
)
def load_1d(model, module, quantized = False, **kwargs):
path = check_file(
file = model,
module = module,
keys = {'model': 'model.pb'},
quantized = quantized,
**kwargs,
)
g = load_graph(path['model'], **kwargs)
inputs = ['Placeholder']
outputs = ['logits']
input_nodes, output_nodes = nodes_session(g, inputs, outputs)
return UNET1D(
input_nodes = input_nodes,
output_nodes = output_nodes,
sess = generate_session(graph = g, **kwargs),
model = model,
name = module,
)
| 26.051948
| 71
| 0.588734
| 215
| 2,006
| 5.311628
| 0.204651
| 0.078809
| 0.126095
| 0.110333
| 0.801226
| 0.801226
| 0.801226
| 0.801226
| 0.764448
| 0.764448
| 0
| 0.002104
| 0.289133
| 2,006
| 76
| 72
| 26.394737
| 0.798738
| 0
| 0
| 0.676923
| 1
| 0
| 0.054337
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046154
| false
| 0
| 0.030769
| 0
| 0.123077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
87e9a1a8199362aa568853bda77b869ca855236b
| 8,901
|
py
|
Python
|
onnxruntime/test/python/contrib_ops/onnx_test_trilu.py
|
dennyac/onnxruntime
|
d5175795d2b7f2db18b0390f394a49238f814668
|
[
"MIT"
] | 6,036
|
2019-05-07T06:03:57.000Z
|
2022-03-31T17:59:54.000Z
|
onnxruntime/test/python/contrib_ops/onnx_test_trilu.py
|
dennyac/onnxruntime
|
d5175795d2b7f2db18b0390f394a49238f814668
|
[
"MIT"
] | 5,730
|
2019-05-06T23:04:55.000Z
|
2022-03-31T23:55:56.000Z
|
onnxruntime/test/python/contrib_ops/onnx_test_trilu.py
|
dennyac/onnxruntime
|
d5175795d2b7f2db18b0390f394a49238f814668
|
[
"MIT"
] | 1,566
|
2019-05-07T01:30:07.000Z
|
2022-03-31T17:06:50.000Z
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
#
# Test reference implementation and model for ONNX Runtime conrtib op trilu
import onnx
import unittest
import numpy as np
from onnx_contrib_ops_helper import expect
def triu_reference_implementation(x, k=0):
return np.triu(x, k)
def tril_reference_implementation(x, k=0):
return np.tril(x, k)
class ONNXReferenceImplementationTest(unittest.TestCase):
def test_triu(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x'],
outputs=['y'],
domain="com.microsoft",
)
x = np.random.randn(3, 4, 5).astype(np.float32)
y = triu_reference_implementation(x)
expect(node, inputs=[x], outputs=[y], name='test_triu')
def test_triu_neg(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
domain="com.microsoft",
)
x = np.random.randn(3, 4, 5).astype(np.float32)
k = np.array([-1]).astype(np.int64)
y = triu_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_triu_neg')
def test_triu_out_neg(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
domain="com.microsoft",
)
x = np.random.randn(3, 4, 5).astype(np.float32)
k = np.array([-7]).astype(np.int64)
y = triu_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_triu_out_neg')
def test_triu_pos(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
domain="com.microsoft",
)
x = np.random.randn(3, 4, 5).astype(np.float32)
k = np.array([2]).astype(np.int64)
y = triu_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_triu_pos')
def test_triu_out_pos(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
domain="com.microsoft",
)
x = np.random.randn(3, 4, 5).astype(np.float32)
k = np.array([6]).astype(np.int64)
y = triu_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_triu_out_pos')
def test_triu_square(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x'],
outputs=['y'],
domain="com.microsoft",
)
x = np.random.randn(3, 5, 5).astype(np.float32)
y = triu_reference_implementation(x)
expect(node, inputs=[x], outputs=[y], name='test_triu_square')
def test_triu_square_neg(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
domain="com.microsoft",
)
x = np.random.randn(3, 5, 5).astype(np.float32)
k = np.array([-1]).astype(np.int64)
y = triu_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_triu_square_neg')
def test_triu_one_row_neg(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
domain="com.microsoft",
)
x = np.random.randn(3, 1, 5).astype(np.float32)
k = np.array([-7]).astype(np.int64)
y = triu_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_triu_one_row_neg')
def test_triu_square_pos(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
domain="com.microsoft",
)
x = np.random.randn(3, 5, 5).astype(np.float32)
k = np.array([2]).astype(np.int64)
y = triu_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_triu_square_pos')
def test_triu_zero(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
domain="com.microsoft",
)
x = np.random.randn(3, 0, 5).astype(np.float32)
k = np.array([6]).astype(np.int64)
y = triu_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_triu_zero')
def test_tril(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x'],
outputs=['y'],
upper=0,
domain="com.microsoft",
)
x = np.random.randn(3, 4, 5).astype(np.float32)
y = tril_reference_implementation(x)
expect(node, inputs=[x], outputs=[y], name='test_tril')
def test_tril_neg(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
upper=0,
domain="com.microsoft",
)
x = np.random.randn(3, 4, 5).astype(np.float32)
k = np.array([-1]).astype(np.int64)
y = tril_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_tril_neg')
def test_tril_out_neg(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
upper=0,
domain="com.microsoft",
)
x = np.random.randn(3, 4, 5).astype(np.float32)
k = np.array([-7]).astype(np.int64)
y = tril_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_tril_out_neg')
def test_tril_pos(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
upper=0,
domain="com.microsoft",
)
x = np.random.randn(3, 4, 5).astype(np.float32)
k = np.array([2]).astype(np.int64)
y = tril_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_tril_pos')
def test_tril_out_pos(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
upper=0,
domain="com.microsoft",
)
x = np.random.randn(3, 4, 5).astype(np.float32)
k = np.array([6]).astype(np.int64)
y = tril_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_tril_out_pos')
def test_tril_square(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x'],
outputs=['y'],
upper=0,
domain="com.microsoft",
)
x = np.random.randn(3, 5, 5).astype(np.float32)
y = tril_reference_implementation(x)
expect(node, inputs=[x], outputs=[y], name='test_tril_square')
def test_tril_square_neg(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
upper=0,
domain="com.microsoft",
)
x = np.random.randn(3, 5, 5).astype(np.float32)
k = np.array([-1]).astype(np.int64)
y = tril_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_tril_square_neg')
def test_tril_one_row_neg(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
upper=0,
domain="com.microsoft",
)
x = np.random.randn(3, 1, 5).astype(np.float32)
k = np.array([-7]).astype(np.int64)
y = tril_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_tril_one_row_neg')
def test_tril_square_pos(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
upper=0,
domain="com.microsoft",
)
x = np.random.randn(3, 5, 5).astype(np.float32)
k = np.array([2]).astype(np.int64)
y = tril_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_tril_square_pos')
def test_tril_zero(self):
node = onnx.helper.make_node(
'Trilu',
inputs=['x', 'k'],
outputs=['y'],
upper=0,
domain="com.microsoft",
)
x = np.random.randn(3, 0, 5).astype(np.float32)
k = np.array([6]).astype(np.int64)
y = tril_reference_implementation(x, k)
expect(node, inputs=[x, k], outputs=[y], name='test_tril_zero')
if __name__ == '__main__':
unittest.main(module=__name__, buffer=True)
| 30.693103
| 78
| 0.530053
| 1,135
| 8,901
| 3.999119
| 0.068722
| 0.022913
| 0.0564
| 0.10575
| 0.877286
| 0.864948
| 0.862304
| 0.847323
| 0.847323
| 0.847323
| 0
| 0.026025
| 0.309291
| 8,901
| 289
| 79
| 30.799308
| 0.712264
| 0.018313
| 0
| 0.700422
| 0
| 0
| 0.085194
| 0.004809
| 0
| 0
| 0
| 0
| 0
| 1
| 0.092827
| false
| 0
| 0.016878
| 0.008439
| 0.122363
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
87fcae1cfd2dd4c21ad48fdbc16ed252caa5898f
| 13,133
|
py
|
Python
|
scripts/populate_completed_meta_data.py
|
Cooops/ABUpower
|
ec84f8f0177b09a09195a974357d328376df7efc
|
[
"MIT"
] | 2
|
2018-10-09T01:17:21.000Z
|
2019-05-17T11:01:46.000Z
|
scripts/populate_completed_meta_data.py
|
Cooops/ABUpower
|
ec84f8f0177b09a09195a974357d328376df7efc
|
[
"MIT"
] | null | null | null |
scripts/populate_completed_meta_data.py
|
Cooops/ABUpower
|
ec84f8f0177b09a09195a974357d328376df7efc
|
[
"MIT"
] | null | null | null |
import pandas as pd
from db_queries import fetch_data, get_trace_and_log, prune_completed
from gen_utils import database_connection, get_search_words, POWER_CONFIG, DUALS_CONFIG
def generate_stat_history(setCheck, boolCheck):
dataArray = []
if setCheck == 'Alpha' and boolCheck == 'Power':
query = (
f"""
SELECT completed_product_nick, avg(completed_product_prices), min(completed_product_prices), max(completed_product_prices), count(completed_product_prices), CAST(sum(completed_product_end::date - completed_product_start::date) as double precision)/count(completed_product_end) as average_length, sum(completed_product_prices)
FROM completed_products
WHERE completed_product_nick IN ('{setCheck} Black Lotus', '{setCheck} Mox Sapphire', '{setCheck} Mox Jet', '{setCheck} Mox Pearl', '{setCheck} Mox Ruby', '{setCheck} Mox Emerald', '{setCheck} Timetwister', '{setCheck} Ancestral Recall', '{setCheck} Time Walk')
AND completed_product_end::date > current_timestamp - interval '90' day
GROUP BY completed_product_nick;
"""
)
data = fetch_data(query)
dataArray.append(data.values)
return dataArray
elif setCheck != 'Alpha' and boolCheck == 'Power':
query = (
f"""
SELECT completed_product_nick, avg(completed_product_prices), min(completed_product_prices), max(completed_product_prices), count(completed_product_prices), CAST(sum(completed_product_end::date - completed_product_start::date) as double precision)/count(completed_product_end) as average_length, sum(completed_product_prices)
FROM completed_products
WHERE completed_product_nick IN ('{setCheck} Black Lotus MTG', '{setCheck} Mox Sapphire', '{setCheck} Mox Jet', '{setCheck} Mox Pearl', '{setCheck} Mox Ruby', '{setCheck} Mox Emerald', '{setCheck} Timetwister', '{setCheck} Ancestral Recall', '{setCheck} Time Walk')
AND completed_product_end::date > current_timestamp - interval '90' day
GROUP BY completed_product_nick;
"""
)
data = fetch_data(query)
dataArray.append(data.values)
return dataArray
elif boolCheck == 'Duals':
query = (
f"""
SELECT completed_product_nick, avg(completed_product_prices), min(completed_product_prices), max(completed_product_prices), count(completed_product_prices), CAST(sum(completed_product_end::date - completed_product_start::date) as double precision)/count(completed_product_end) as average_length, sum(completed_product_prices)
FROM completed_products
WHERE completed_product_nick IN ('{setCheck} Tundra MTG', '{setCheck} Underground Sea MTG', '{setCheck} Badlands MTG', '{setCheck} Taiga MTG', '{setCheck} Savannah MTG', '{setCheck} Scrubland MTG', '{setCheck} Volcanic Island MTG', '{setCheck} Bayou MTG', '{setCheck} Plateau MTG', '{setCheck} Tropical Island MTG')
AND completed_product_end::date > current_timestamp - interval '90' day
GROUP BY completed_product_nick;
"""
)
data = fetch_data(query)
dataArray.append(data.values)
return dataArray
def generate_index_history(setCheck, setId, boolCheck):
dataArray = []
if setCheck == 'Alpha' and boolCheck == 'Power':
query = (
f"""
SELECT '{setCheck}', '{setId}', sum(stats.avger), sum(stats.miner), sum(stats.maxer), avg(stats.lengther), sum(stats.counter) ,sum(stats.sumer)
FROM (SELECT completed_product_nick, avg(completed_product_prices) as avger, min(completed_product_prices) as miner, max(completed_product_prices) as maxer, count(completed_product_prices) as counter, CAST(sum(completed_product_end::date - completed_product_start::date) as double precision)/count(completed_product_end) as lengther, sum(completed_product_prices) as sumer
FROM completed_products
WHERE completed_product_nick IN ('{setCheck} Black Lotus', '{setCheck} Mox Sapphire', '{setCheck} Mox Jet', '{setCheck} Mox Pearl', '{setCheck} Mox Ruby', '{setCheck} Mox Emerald', '{setCheck} Timetwister', '{setCheck} Ancestral Recall', '{setCheck} Time Walk')
AND completed_product_end::date > current_timestamp - interval '90' day
GROUP BY completed_product_nick) stats;
"""
)
data = fetch_data(query)
dataArray.append(data.values)
return dataArray
elif setCheck != 'Alpha' and boolCheck == 'Power':
query = (
f"""
SELECT '{setCheck}', '{setId}', sum(stats.avger), sum(stats.miner), sum(stats.maxer), avg(stats.lengther), sum(stats.counter) ,sum(stats.sumer)
FROM (SELECT completed_product_nick, avg(completed_product_prices) as avger, min(completed_product_prices) as miner, max(completed_product_prices) as maxer, count(completed_product_prices) as counter, CAST(sum(completed_product_end::date - completed_product_start::date) as double precision)/count(completed_product_end) as lengther, sum(completed_product_prices) as sumer
FROM completed_products
WHERE completed_product_nick IN ('{setCheck} Black Lotus MTG', '{setCheck} Mox Sapphire', '{setCheck} Mox Jet', '{setCheck} Mox Pearl', '{setCheck} Mox Ruby', '{setCheck} Mox Emerald', '{setCheck} Timetwister', '{setCheck} Ancestral Recall', '{setCheck} Time Walk')
AND completed_product_end::date > current_timestamp - interval '90' day
GROUP BY completed_product_nick) stats;
"""
)
data = fetch_data(query)
dataArray.append(data.values)
return dataArray
elif boolCheck == 'Duals':
query = (
f"""
SELECT '{setCheck}', '{setId}', sum(stats.avger), sum(stats.miner), sum(stats.maxer), avg(stats.lengther), sum(stats.counter) ,sum(stats.sumer)
FROM (SELECT completed_product_nick, avg(completed_product_prices) as avger, min(completed_product_prices) as miner, max(completed_product_prices) as maxer, count(completed_product_prices) as counter, CAST(sum(completed_product_end::date - completed_product_start::date) as double precision)/count(completed_product_end) as lengther, sum(completed_product_prices) as sumer
FROM completed_products
WHERE completed_product_nick IN ('{setCheck} Tundra MTG', '{setCheck} Underground Sea MTG', '{setCheck} Badlands MTG', '{setCheck} Taiga MTG', '{setCheck} Savannah MTG', '{setCheck} Scrubland MTG', '{setCheck} Volcanic Island MTG', '{setCheck} Bayou MTG', '{setCheck} Plateau MTG', '{setCheck} Tropical Island MTG')
AND completed_product_end::date > current_timestamp - interval '90' day
GROUP BY completed_product_nick) stats;
"""
)
data = fetch_data(query)
dataArray.append(data.values)
return dataArray
def insert_stats(cursor, mtgArray):
for neach in mtgArray:
for each in neach:
try:
cursor.execute("""INSERT INTO production_completed_products_stats(completed_product_nick, completed_product_avg, completed_product_min, completed_product_max, completed_product_depth, completed_product_avg_length, completed_product_sum)
VALUES (%s, %s, %s, %s, %s, %s, %s)""", (each[0], each[1], each[2], each[3], each[4], each[5], each[6]))
except Exception as e:
get_trace_and_log(e)
def insert_index(cursor, mtgArray):
for neach in mtgArray:
for each in neach:
try:
cursor.execute("""INSERT INTO production_completed_products_index(completed_product_set_name, completed_product_set_id, completed_product_index_avg, completed_product_index_min, completed_product_index_max, completed_product_index_length_avg, completed_product_index_count_sum, completed_product_index_sum)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)""", (each[0], each[1], each[2], each[3], each[4], each[5], each[6], each[7]))
except Exception as e:
get_trace_and_log(e)
def pipe_duals_stats():
# generate `cursor` (used to execute db queries)
cursor = database_connection()
# iterate over `DUALS_CONFIG` and pipe each nested array.
for each in DUALS_CONFIG:
print(f"Pulling {DUALS_CONFIG[each]} from {each}")
dualsArray = generate_stat_history(setCheck=each, boolCheck=DUALS_CONFIG[each])
if len(dualsArray) > 0:
print(f"Piping nested arrays")
insert_stats(cursor=cursor, mtgArray=dualsArray)
def pipe_power_stats():
cursor = database_connection()
for each in POWER_CONFIG:
print(f"Pulling {POWER_CONFIG[each]} from {each}")
powerArray = generate_stat_history(setCheck=each, boolCheck=POWER_CONFIG[each])
if len(powerArray) > 0:
print(f"Piping nested arrays")
insert_stats(cursor=cursor, mtgArray=powerArray)
def pipe_duals_index():
cursor = database_connection()
for each in DUALS_CONFIG:
if each == 'Alpha':
print(f"Forming {DUALS_CONFIG[each]} index from {each} stats")
dualsArray = generate_index_history(setCheck=each, setId=4, boolCheck=DUALS_CONFIG[each])
if len(dualsArray) > 0:
print(f"Piping nested arrays")
insert_index(cursor=cursor, mtgArray=dualsArray)
elif each == 'Beta':
print(f"Forming {DUALS_CONFIG[each]} index from {each} stats")
dualsArray = generate_index_history(setCheck=each, setId=5, boolCheck=DUALS_CONFIG[each])
if len(dualsArray) > 0:
print(f"Piping nested arrays")
insert_index(cursor=cursor, mtgArray=dualsArray)
elif each == 'Unlimited':
print(f"Forming {DUALS_CONFIG[each]} index from {each} stats")
dualsArray = generate_index_history(setCheck=each, setId=6, boolCheck=DUALS_CONFIG[each])
if len(dualsArray) > 0:
print(f"Piping nested arrays")
insert_index(cursor=cursor, mtgArray=dualsArray)
elif each == 'Revised':
print(f"Forming {DUALS_CONFIG[each]} index from {each} stats")
dualsArray = generate_index_history(setCheck=each, setId=7, boolCheck=DUALS_CONFIG[each])
if len(dualsArray) > 0:
print(f"Piping nested arrays")
insert_index(cursor=cursor, mtgArray=dualsArray)
def pipe_power_index():
cursor = database_connection()
for each in POWER_CONFIG:
if each == 'Alpha':
print(f"Pulling {POWER_CONFIG[each]} from {each} stats")
powerArray = generate_index_history(setCheck=each, setId=1, boolCheck=POWER_CONFIG[each])
if len(powerArray) > 0:
print(f"Piping nested arrays")
insert_index(cursor=cursor, mtgArray=powerArray)
elif each == 'Beta':
print(f"Pulling {POWER_CONFIG[each]} from {each} stats")
powerArray = generate_index_history(setCheck=each, setId=2, boolCheck=POWER_CONFIG[each])
if len(powerArray) > 0:
print(f"Piping nested arrays")
insert_index(cursor=cursor, mtgArray=powerArray)
elif each == 'Unlimited':
print(f"Pulling {POWER_CONFIG[each]} from {each} stats")
powerArray = generate_index_history(setCheck=each, setId=3, boolCheck=POWER_CONFIG[each])
if len(powerArray) > 0:
print(f"Piping nested arrays")
insert_index(cursor=cursor, mtgArray=powerArray)
def prune_db(cursor):
"""(cursor) -> ()
Prunes active_products before making any further calculations (averages, etc.)"""
words = get_search_words()
# words = ['Revised Tundra MTG']
for value in words:
print(f'Pruning {value}....')
prune_completed(value, cursor)
print('-------------------------------------')
print('Succesfully pruned completed_products')
print('-------------------------------------')
if __name__ == '__main__':
inputCheck = input('Beginning once-a-day batch calc script -- are you sure you want to proceed?: ')
if inputCheck in ('Y', 'y'):
print('I understand. Beggining once-a-day batch script.')
prune_db(cursor=database_connection())
print()
# begin piping stats
pipe_power_stats()
print()
pipe_duals_stats()
print()
# begin piping index
pipe_power_index()
print()
pipe_duals_index()
print()
print('Batch process completed. Data has been successfully inserted.')
elif inputCheck in ('N', 'n'):
print('Exiting batch process.')
| 61.657277
| 389
| 0.645473
| 1,509
| 13,133
| 5.404241
| 0.121935
| 0.170693
| 0.080932
| 0.044145
| 0.828571
| 0.820356
| 0.809319
| 0.799877
| 0.784672
| 0.782342
| 0
| 0.004324
| 0.242747
| 13,133
| 212
| 390
| 61.948113
| 0.815686
| 0.02033
| 0
| 0.725389
| 1
| 0.088083
| 0.576266
| 0.208623
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046632
| false
| 0
| 0.015544
| 0
| 0.093264
| 0.15544
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
354c9052b6484bd2a8ee53de9c8d18d73e6ff7f9
| 5,712
|
py
|
Python
|
stats/user_activity.py
|
mchubby-3rdparty/mattermost-privileged
|
8b7eff7eb4c823686b686cf6f4cd14dc7c0f5369
|
[
"MIT"
] | null | null | null |
stats/user_activity.py
|
mchubby-3rdparty/mattermost-privileged
|
8b7eff7eb4c823686b686cf6f4cd14dc7c0f5369
|
[
"MIT"
] | null | null | null |
stats/user_activity.py
|
mchubby-3rdparty/mattermost-privileged
|
8b7eff7eb4c823686b686cf6f4cd14dc7c0f5369
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Someone's Mattermost scripts.
# Copyright (c) 2016-2021 by Someone <someone@somenet.org> (aka. Jan Vales <jan@jvales.net>)
# published under MIT-License
#
# Users online
#
import psycopg2
import psycopg2.extras
import config
def main(dbconn):
msg = "#user_activity #mmstats distinct user activity.\n\n|users ...|day|week|month|since "+config.cutoff_date+"|\n|---|---:|---:|---:|---:|\n"
# online
cur = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("""
SELECT sum(CASE WHEN lastactivity > extract(epoch FROM (NOW() - INTERVAL '1 day'))*1000 THEN 1 ELSE 0 END) AS cnt_day,
sum(CASE WHEN lastactivity > extract(epoch FROM (NOW() - INTERVAL '1 week'))*1000 THEN 1 ELSE 0 END) AS cnt_week,
sum(CASE WHEN lastactivity > extract(epoch FROM (NOW() - INTERVAL '1 month'))*1000 THEN 1 ELSE 0 END) AS cnt_month,
sum(CASE WHEN lastactivity > extract(epoch FROM TIMESTAMP '"""+config.cutoff_date+"""')*1000 THEN 1 ELSE 0 END) AS cnt_cutoff
FROM (SELECT users.id, GREATEST(status.lastactivityat, MAX(sessions.lastactivityat), users.updateat) as lastactivity
FROM users LEFT JOIN status ON (users.id = status.userid) LEFT JOIN sessions ON (users.id = sessions.userid)
GROUP BY users.id, status.lastactivityat, users.updateat) AS a;
""")
record = cur.fetchall()[0]
msg += "|online|"+str(record["cnt_day"])+"|"+str(record["cnt_week"])+"|"+str(record["cnt_month"])+"|"+str(record["cnt_cutoff"])+"|\n"
# posts
cur = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("""
SELECT sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 day'))*1000 THEN 1 ELSE 0 END) AS cnt_day,
sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 week'))*1000 THEN 1 ELSE 0 END) AS cnt_week,
sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 month'))*1000 THEN 1 ELSE 0 END) AS cnt_month,
sum(CASE WHEN createat > extract(epoch FROM TIMESTAMP '"""+config.cutoff_date+"""')*1000 THEN 1 ELSE 0 END) AS cnt_cutoff
FROM (select userid, max(createat) as createat FROM posts GROUP BY userid ORDER BY createat DESC) AS a limit 10
""")
record = cur.fetchall()[0]
msg += "|posted|"+str(record["cnt_day"])+"|"+str(record["cnt_week"])+"|"+str(record["cnt_month"])+"|"+str(record["cnt_cutoff"])+"|\n"
# pubchan posts
cur = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("""
SELECT sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 day'))*1000 THEN 1 ELSE 0 END) AS cnt_day,
sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 week'))*1000 THEN 1 ELSE 0 END) AS cnt_week,
sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 month'))*1000 THEN 1 ELSE 0 END) AS cnt_month,
sum(CASE WHEN createat > extract(epoch FROM TIMESTAMP '"""+config.cutoff_date+"""')*1000 THEN 1 ELSE 0 END) AS cnt_cutoff
FROM (select userid, max(posts.createat) as createat FROM posts JOIN channels ON (posts.channelid = channels.id) WHERE channels.type='O' GROUP BY userid ORDER BY createat DESC) AS a
""")
record = cur.fetchall()[0]
msg += "|posted in pubchan|"+str(record["cnt_day"])+"|"+str(record["cnt_week"])+"|"+str(record["cnt_month"])+"|"+str(record["cnt_cutoff"])+"|\n"
# privchan posts
cur = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("""
SELECT sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 day'))*1000 THEN 1 ELSE 0 END) AS cnt_day,
sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 week'))*1000 THEN 1 ELSE 0 END) AS cnt_week,
sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 month'))*1000 THEN 1 ELSE 0 END) AS cnt_month,
sum(CASE WHEN createat > extract(epoch FROM TIMESTAMP '"""+config.cutoff_date+"""')*1000 THEN 1 ELSE 0 END) AS cnt_cutoff
FROM (select userid, max(posts.createat) as createat FROM posts JOIN channels ON (posts.channelid = channels.id) WHERE channels.type='P' GROUP BY userid ORDER BY createat DESC) AS a
""")
record = cur.fetchall()[0]
msg += "|posted in privchan|"+str(record["cnt_day"])+"|"+str(record["cnt_week"])+"|"+str(record["cnt_month"])+"|"+str(record["cnt_cutoff"])+"|\n"
# privchan posts
cur = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("""
SELECT sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 day'))*1000 THEN 1 ELSE 0 END) AS cnt_day,
sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 week'))*1000 THEN 1 ELSE 0 END) AS cnt_week,
sum(CASE WHEN createat > extract(epoch FROM (NOW() - INTERVAL '1 month'))*1000 THEN 1 ELSE 0 END) AS cnt_month,
sum(CASE WHEN createat > extract(epoch FROM TIMESTAMP '"""+config.cutoff_date+"""')*1000 THEN 1 ELSE 0 END) AS cnt_cutoff
FROM (select userid, max(posts.createat) as createat FROM posts JOIN channels ON (posts.channelid = channels.id) WHERE channels.type NOT IN ('O', 'P') GROUP BY userid ORDER BY createat DESC) AS a
""")
record = cur.fetchall()[0]
return msg + "|posted private|"+str(record["cnt_day"])+"|"+str(record["cnt_week"])+"|"+str(record["cnt_month"])+"|"+str(record["cnt_cutoff"])+"|\n"
| 72.303797
| 211
| 0.628501
| 797
| 5,712
| 4.439147
| 0.130489
| 0.03957
| 0.062182
| 0.073488
| 0.838044
| 0.830413
| 0.822781
| 0.812606
| 0.812606
| 0.802713
| 0
| 0.035763
| 0.226541
| 5,712
| 78
| 212
| 73.230769
| 0.765052
| 0.042717
| 0
| 0.631579
| 0
| 0.385965
| 0.753849
| 0.024927
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017544
| false
| 0
| 0.052632
| 0
| 0.087719
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ea3745240198ab0475cdc66a6b1d87cea5d43a77
| 212
|
py
|
Python
|
codewof/programming/content/en/simple-calculator/solution.py
|
uccser-admin/programming-practice-prototype
|
3af4c7d85308ac5bb35bb13be3ec18cac4eb8308
|
[
"MIT"
] | 3
|
2019-08-29T04:11:22.000Z
|
2021-06-22T16:05:51.000Z
|
codewof/programming/content/en/simple-calculator/solution.py
|
uccser-admin/programming-practice-prototype
|
3af4c7d85308ac5bb35bb13be3ec18cac4eb8308
|
[
"MIT"
] | 265
|
2019-05-30T03:51:46.000Z
|
2022-03-31T01:05:12.000Z
|
codewof/programming/content/en/simple-calculator/solution.py
|
samuelsandri/codewof
|
c9b8b378c06b15a0c42ae863b8f46581de04fdfc
|
[
"MIT"
] | 7
|
2019-06-29T12:13:37.000Z
|
2021-09-06T06:49:14.000Z
|
def calculate(x, y, operator):
if operator == '+':
return x + y
if operator == '-':
return x - y
if operator == 'x':
return x * y
if operator == '/':
return x // y
| 21.2
| 30
| 0.448113
| 26
| 212
| 3.653846
| 0.269231
| 0.105263
| 0.336842
| 0.536842
| 0.757895
| 0.757895
| 0.757895
| 0.547368
| 0
| 0
| 0
| 0
| 0.396226
| 212
| 9
| 31
| 23.555556
| 0.742188
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
ea775556ee3e42a1c0554dd058ed6efaf62de2ae
| 460
|
py
|
Python
|
tests/test_d.py
|
zozzz/yapic.json
|
becd15702af3f95e3d981d03d823dd859fe51ac4
|
[
"BSD-2-Clause-FreeBSD"
] | 15
|
2020-04-07T00:08:18.000Z
|
2022-03-25T11:40:49.000Z
|
tests/test_d.py
|
zozzz/yapic.json
|
becd15702af3f95e3d981d03d823dd859fe51ac4
|
[
"BSD-2-Clause-FreeBSD"
] | 14
|
2020-04-07T00:15:45.000Z
|
2021-09-27T11:27:24.000Z
|
tests/test_d.py
|
zozzz/yapic.json
|
becd15702af3f95e3d981d03d823dd859fe51ac4
|
[
"BSD-2-Clause-FreeBSD"
] | 3
|
2020-04-05T23:20:42.000Z
|
2021-04-08T07:07:51.000Z
|
import json as py_json
from yapic import json
def test_d():
# print(len(json.dumpb(["almafa" * 30000, "almafa" * 30000])))
assert json.dumpb("Á", ensure_ascii=False) == py_json.dumps("Á", ensure_ascii=False).encode("utf-8")
assert json.dumpb("\xFF", ensure_ascii=False) == py_json.dumps("\xFF", ensure_ascii=False).encode("utf-8")
assert json.dumpb("\uFFFF", ensure_ascii=False) == py_json.dumps("\uFFFF", ensure_ascii=False).encode("utf-8")
| 46
| 114
| 0.68913
| 71
| 460
| 4.309859
| 0.352113
| 0.215686
| 0.313725
| 0.176471
| 0.617647
| 0.617647
| 0.267974
| 0.267974
| 0.267974
| 0
| 0
| 0.031941
| 0.115217
| 460
| 9
| 115
| 51.111111
| 0.719902
| 0.130435
| 0
| 0
| 0
| 0
| 0.092965
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.166667
| true
| 0
| 0.333333
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
576c01844bb58de116583ed992c60d0a1f015c6d
| 1,961
|
py
|
Python
|
python/testData/inspections/PyTypeCheckerInspection/BitwiseOrUnionsAndOldStyleUnionsAreEquivalent.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/testData/inspections/PyTypeCheckerInspection/BitwiseOrUnionsAndOldStyleUnionsAreEquivalent.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/testData/inspections/PyTypeCheckerInspection/BitwiseOrUnionsAndOldStyleUnionsAreEquivalent.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
from typing import Union, Optional
def expect_old_union(u: Union[int, str]):
expect_new_union(u)
expect_new_union(42)
expect_new_union("42")
expect_new_union(<warning descr="Expected type 'int | str', got 'list[int]' instead">[42]</warning>)
def expect_new_union(u: int | str):
expect_old_union(u)
expect_old_union(42)
expect_old_union("42")
expect_old_union(<warning descr="Expected type 'int | str', got 'list[int]' instead">[42]</warning>)
def expect_old_optional(u: Optional[int]):
expect_new_optional_none_first(u)
expect_new_optional_none_first(42)
expect_new_optional_none_first(None)
expect_new_optional_none_first(<warning descr="Expected type 'int | None', got 'list[int]' instead">[42]</warning>)
expect_new_optional_none_last(u)
expect_new_optional_none_last(42)
expect_new_optional_none_last(None)
expect_new_optional_none_last(<warning descr="Expected type 'int | None', got 'list[int]' instead">[42]</warning>)
def expect_new_optional_none_first(u: None | int):
expect_old_optional(u)
expect_old_optional(42)
expect_old_optional(None)
expect_old_optional(<warning descr="Expected type 'int | None', got 'list[int]' instead">[42]</warning>)
expect_new_optional_none_last(u)
expect_new_optional_none_last(42)
expect_new_optional_none_last(None)
expect_new_optional_none_last(<warning descr="Expected type 'int | None', got 'list[int]' instead">[42]</warning>)
def expect_new_optional_none_last(u: int | None):
expect_old_optional(u)
expect_old_optional(42)
expect_old_optional(None)
expect_old_optional(<warning descr="Expected type 'int | None', got 'list[int]' instead">[42]</warning>)
expect_new_optional_none_first(u)
expect_new_optional_none_first(42)
expect_new_optional_none_first(None)
expect_new_optional_none_first(<warning descr="Expected type 'int | None', got 'list[int]' instead">[42]</warning>)
| 40.020408
| 119
| 0.744008
| 293
| 1,961
| 4.604096
| 0.078498
| 0.153447
| 0.226835
| 0.280208
| 0.898443
| 0.898443
| 0.890289
| 0.829503
| 0.829503
| 0.827279
| 0
| 0.021176
| 0.133095
| 1,961
| 48
| 120
| 40.854167
| 0.772353
| 0
| 0
| 0.631579
| 0
| 0
| 0.209077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.026316
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
57de3582b01a9d54a61fbd1e32d7b51c956b06a0
| 252
|
py
|
Python
|
Codewars/8kyu/exclamation-marks-series-number-11-replace-all-vowel-to-exclamation-mark-in-the-sentence/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/8kyu/exclamation-marks-series-number-11-replace-all-vowel-to-exclamation-mark-in-the-sentence/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/8kyu/exclamation-marks-series-number-11-replace-all-vowel-to-exclamation-mark-in-the-sentence/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
Test.assert_equals(replace_exclamation('Hi!'), 'H!!')
Test.assert_equals(replace_exclamation('!Hi! Hi!'), '!H!! H!!')
Test.assert_equals(replace_exclamation('aeiou'), '!!!!!')
Test.assert_equals(replace_exclamation('ABCDE'), '!BCD!')
| 36
| 63
| 0.694444
| 33
| 252
| 5.060606
| 0.424242
| 0.239521
| 0.383234
| 0.550898
| 0.850299
| 0.646707
| 0
| 0
| 0
| 0
| 0
| 0.012658
| 0.059524
| 252
| 6
| 64
| 42
| 0.691983
| 0.055556
| 0
| 0
| 0
| 0
| 0.177966
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
17b37ef8ea3f9693d36d41b4d2faf86dcc2bce54
| 16,853
|
py
|
Python
|
bottlechest/src/template/func/median.py
|
biolab/bottlechest
|
31a04204a56aebbea1024e0dfc926186d7ee77ac
|
[
"BSD-2-Clause"
] | 2
|
2017-10-05T19:06:51.000Z
|
2020-04-27T12:23:14.000Z
|
bottlechest/src/template/func/median.py
|
biolab/bottlechest
|
31a04204a56aebbea1024e0dfc926186d7ee77ac
|
[
"BSD-2-Clause"
] | null | null | null |
bottlechest/src/template/func/median.py
|
biolab/bottlechest
|
31a04204a56aebbea1024e0dfc926186d7ee77ac
|
[
"BSD-2-Clause"
] | null | null | null |
"median template"
from copy import deepcopy
import bottlechest as bn
__all__ = ["median", "nanmedian"]
FLOAT_DTYPES = [x for x in bn.dtypes if 'float' in x]
INT_DTYPES = [x for x in bn.dtypes if 'int' in x]
# loops ---------------------------------------------------------------------
loop = {}
loop[1] = """\
if nINDEX0 == 0:
return np.FLOAT(NAN)
k = nAXIS >> 1
l = 0
r = nAXIS - 1
with nogil:
while l < r:
x = b[k]
i = l
j = r
while 1:
while b[i] < x: i += 1
while x < b[j]: j -= 1
if i <= j:
tmp = b[i]
b[i] = b[j]
b[j] = tmp
i += 1
j -= 1
if i > j: break
if j < k: l = i
if k < i: r = j
if nAXIS % 2 == 0:
amax = MINDTYPE
for i in range(k):
ai = b[i]
if ai >= amax:
amax = ai
return np.FLOAT(0.5 * (b[k] + amax))
else:
return np.FLOAT(b[k])
"""
loop[2] = """\
if nINDEX1 == 0:
PyArray_FillWithScalar(y, NAN)
return y
for iINDEX0 in range(nINDEX0):
k = nAXIS >> 1
l = 0
r = nAXIS - 1
while l < r:
x = b[INDEXREPLACE|k|]
i = l
j = r
while 1:
while b[INDEXREPLACE|i|] < x: i += 1
while x < b[INDEXREPLACE|j|]: j -= 1
if i <= j:
tmp = b[INDEXREPLACE|i|]
b[INDEXREPLACE|i|] = b[INDEXREPLACE|j|]
b[INDEXREPLACE|j|] = tmp
i += 1
j -= 1
if i > j: break
if j < k: l = i
if k < i: r = j
if nAXIS % 2 == 0:
amax = MINDTYPE
for i in range(k):
ai = b[INDEXREPLACE|i|]
if ai >= amax:
amax = ai
y[INDEXPOP] = 0.5 * (b[INDEXREPLACE|k|] + amax)
else:
y[INDEXPOP] = CASTb[INDEXREPLACE|k|]
return y
"""
# Float dtypes (not axis=None) ----------------------------------------------
floats = {}
floats['dtypes'] = FLOAT_DTYPES
floats['axisNone'] = False
floats['force_output_dtype'] = False
floats['reuse_non_nan_func'] = False
floats['top'] = """
@cython.boundscheck(False)
@cython.wraparound(False)
def NAME_NDIMd_DTYPE_axisAXIS(np.ndarray[np.DTYPE_t, ndim=NDIM] a):
"Median of NDIMd array with dtype=DTYPE along axis=AXIS."
cdef np.npy_intp i, j = 0, l, r, k
cdef np.DTYPE_t x, tmp, amax, ai
cdef np.ndarray[np.DTYPE_t, ndim=NDIM] b = PyArray_Copy(a)
"""
floats['loop'] = {}
floats['loop'][1] = loop[1].replace('FLOAT', 'DTYPE')
floats['loop'][2] = loop[2].replace('CAST', '')
# Int dtypes (not axis=None) ------------------------------------------------
ints = deepcopy(floats)
ints['dtypes'] = INT_DTYPES
ints['force_output_dtype'] = 'float64'
ints['loop'] = {}
ints['loop'][1] = loop[1].replace('FLOAT', 'float64')
ints['loop'][2] = loop[2].replace('CAST', '<np.float64_t> ')
# Slow, unaccelerated ndim/dtype --------------------------------------------
slow = {}
slow['name'] = "median"
slow['signature'] = "arr"
slow['func'] = "bn.slow.median(arr, axis=AXIS)"
# Template ------------------------------------------------------------------
median = {}
median['name'] = 'median'
median['is_reducing_function'] = True
median['cdef_output'] = True
median['slow'] = slow
median['templates'] = {}
median['templates']['float'] = floats
median['templates']['int'] = ints
median['pyx_file'] = 'func/%sbit/median.pyx'
median['main'] = '''"median auto-generated from template"
# Select smallest k elements code used for inner loop of median method:
# http://projects.scipy.org/numpy/attachment/ticket/1213/quickselect.pyx
# (C) 2009 Sturla Molden
# SciPy license
#
# From the original C function (code in public domain) in:
# Fast median search: an ANSI C implementation
# Nicolas Devillard - ndevilla AT free DOT fr
# July 1998
# which, in turn, took the algorithm from
# Wirth, Niklaus
# Algorithms + data structures = programs, p. 366
# Englewood Cliffs: Prentice-Hall, 1976
#
# Adapted and expanded for Bottleneck:
# (C) 2010 Keith Goodman
def median(arr, axis=None):
"""
Median of array elements along given axis.
Parameters
----------
arr : array_like
Input array. If `arr` is not an array, a conversion is attempted.
axis : {int, None}, optional
Axis along which the median is computed. The default (axis=None) is to
compute the median of the flattened array.
Returns
-------
y : ndarray
An array with the same shape as `arr`, except that the specified axis
has been removed. If `arr` is a 0d array, or if axis is None, a scalar
is returned. `float64` return values are used for integer inputs.
See also
--------
bottlechest.nanmedian: Median along specified axis ignoring NaNs.
Notes
-----
This function returns the same output as NumPy's median except when the
input contains NaN.
Examples
--------
>>> a = np.array([[10, 7, 4], [3, 2, 1]])
>>> a
array([[10, 7, 4],
[ 3, 2, 1]])
>>> bn.median(a)
3.5
>>> bn.median(a, axis=0)
array([ 6.5, 4.5, 2.5])
>>> bn.median(a, axis=1)
array([ 7., 2.])
"""
func, arr = median_selector(arr, axis)
return func(arr)
def median_selector(arr, axis):
"""
Return median function and array that matches `arr` and `axis`.
Under the hood Bottleneck uses a separate Cython function for each
combination of ndim, dtype, and axis. A lot of the overhead in
bn.median() is in checking that `axis` is within range, converting `arr`
into an array (if it is not already an array), and selecting the function
to use to calculate the mean.
You can get rid of the overhead by doing all this before you, for example,
enter an inner loop, by using the this function.
Parameters
----------
arr : array_like
Input array. If `arr` is not an array, a conversion is attempted.
axis : {int, None}
Axis along which the median is to be computed.
Returns
-------
func : function
The median function that matches the number of dimensions and dtype
of the input array and the axis along which you wish to find the
median.
a : ndarray
If the input array `arr` is not a ndarray, then `a` will contain the
result of converting `arr` into a ndarray.
Examples
--------
Create a numpy array:
>>> arr = np.array([1.0, 2.0, 3.0])
Obtain the function needed to determine the median of `arr` along axis=0:
>>> func, a = bn.func.median_selector(arr, axis=0)
>>> func
<function median_1d_float64_axis0>
Use the returned function and array to determine the median:
>>> func(a)
2.0
"""
cdef np.ndarray a
if type(arr) is np.ndarray:
a = arr
else:
a = np.array(arr, copy=False)
cdef tuple key
cdef int ndim = PyArray_NDIM(a)
cdef int dtype = PyArray_TYPE(a)
if axis is not None:
if axis < 0:
axis += ndim
else:
a = PyArray_Ravel(a, NPY_CORDER)
axis = 0
ndim = 1
key = (ndim, dtype, axis)
try:
func = median_dict[key]
except KeyError:
if (axis < 0) or (axis >= ndim):
raise ValueError("axis(=%d) out of bounds" % axis)
try:
func = median_slow_dict[axis]
except KeyError:
tup = (str(ndim), str(a.dtype), str(axis))
raise TypeError("Unsupported ndim/dtype/axis (%s/%s/%s)." % tup)
return func, a
'''
"nanmedian template"
from copy import deepcopy
import bottlechest as bn
FLOAT_DTYPES = [x for x in bn.dtypes if 'float' in x]
INT_DTYPES = [x for x in bn.dtypes if 'int' in x]
# loops ---------------------------------------------------------------------
loop = {}
loop[1] = """\
if nINDEX0 == 0:
return np.FLOAT(NAN)
j = nAXIS - 1
flag = 1
for i in range(nINDEX0):
if b[i] != b[i]:
while b[j] != b[j]:
if j <= 0:
break
j -= 1
if i >= j:
flag = 0
break
tmp = b[i]
b[i] = b[j]
b[j] = tmp
n = i + flag
k = n >> 1
l = 0
r = n - 1
with nogil:
while l < r:
x = b[k]
i = l
j = r
while 1:
while b[i] < x: i += 1
while x < b[j]: j -= 1
if i <= j:
tmp = b[i]
b[i] = b[j]
b[j] = tmp
i += 1
j -= 1
if i > j: break
if j < k: l = i
if k < i: r = j
if n % 2 == 0:
amax = MINDTYPE
allnan = 1
for i in range(k):
ai = b[i]
if ai >= amax:
amax = ai
allnan = 0
if allnan == 0:
return np.FLOAT(0.5 * (b[k] + amax))
else:
return np.FLOAT(b[k])
else:
return np.FLOAT(b[k])
"""
loop[2] = """\
if nINDEX1 == 0:
PyArray_FillWithScalar(y, NAN)
return y
for iINDEX0 in range(nINDEX0):
j = nAXIS - 1
flag = 1
for i in range(nINDEX1):
if b[INDEXREPLACE|i|] != b[INDEXREPLACE|i|]:
while b[INDEXREPLACE|j|] != b[INDEXREPLACE|j|]:
if j <= 0:
break
j -= 1
if i >= j:
flag = 0
break
tmp = b[INDEXREPLACE|i|]
b[INDEXREPLACE|i|] = b[INDEXREPLACE|j|]
b[INDEXREPLACE|j|] = tmp
n = i + flag
k = n >> 1
l = 0
r = n - 1
while l < r:
x = b[INDEXREPLACE|k|]
i = l
j = r
while 1:
while b[INDEXREPLACE|i|] < x: i += 1
while x < b[INDEXREPLACE|j|]: j -= 1
if i <= j:
tmp = b[INDEXREPLACE|i|]
b[INDEXREPLACE|i|] = b[INDEXREPLACE|j|]
b[INDEXREPLACE|j|] = tmp
i += 1
j -= 1
if i > j: break
if j < k: l = i
if k < i: r = j
if n % 2 == 0:
amax = MINDTYPE
allnan = 1
for i in range(k):
ai = b[INDEXREPLACE|i|]
if ai >= amax:
amax = ai
allnan = 0
if allnan == 0:
y[INDEXPOP] = 0.5 * (b[INDEXREPLACE|k|] + amax)
else:
y[INDEXPOP] = CASTb[INDEXREPLACE|k|]
else:
y[INDEXPOP] = CASTb[INDEXREPLACE|k|]
return y
"""
# Float dtypes (not axis=None) ----------------------------------------------
floats = {}
floats['dtypes'] = FLOAT_DTYPES
floats['axisNone'] = False
floats['force_output_dtype'] = False
floats['reuse_non_nan_func'] = False
floats['top'] = """
@cython.boundscheck(False)
@cython.wraparound(False)
def NAME_NDIMd_DTYPE_axisAXIS(np.ndarray[np.DTYPE_t, ndim=NDIM] a):
"Median of NDIMd array with dtype=DTYPE along axis=AXIS."
cdef int allnan = 1, flag = 0
cdef np.npy_intp i = 0, j = 0, l, r, k, n
cdef np.DTYPE_t x, tmp, amax, ai
cdef np.ndarray[np.DTYPE_t, ndim=NDIM] b = PyArray_Copy(a)
"""
floats['loop'] = {}
floats['loop'][1] = loop[1].replace('FLOAT', 'DTYPE')
floats['loop'][2] = loop[2].replace('CAST', '')
# Int dtypes (not axis=None) ------------------------------------------------
ints = deepcopy(floats)
ints['dtypes'] = INT_DTYPES
ints['reuse_non_nan_func'] = True
# Slow, unaccelerated ndim/dtype --------------------------------------------
slow = {}
slow['name'] = "nanmedian"
slow['signature'] = "arr"
slow['func'] = "bn.slow.nanmedian(arr, axis=AXIS)"
# Template ------------------------------------------------------------------
nanmedian = {}
nanmedian['name'] = 'nanmedian'
nanmedian['is_reducing_function'] = True
nanmedian['cdef_output'] = True
nanmedian['slow'] = slow
nanmedian['templates'] = {}
nanmedian['templates']['float'] = floats
nanmedian['templates']['int'] = ints
nanmedian['pyx_file'] = 'func/%sbit/median.pyx'
nanmedian['main'] = '''"nanmedian auto-generated from template"
# Select smallest k elements code used for inner loop of median method:
# http://projects.scipy.org/numpy/attachment/ticket/1213/quickselect.pyx
# (C) 2009 Sturla Molden
# SciPy license
#
# From the original C function (code in public domain) in:
# Fast median search: an ANSI C implementation
# Nicolas Devillard - ndevilla AT free DOT fr
# July 1998
# which, in turn, took the algorithm from
# Wirth, Niklaus
# Algorithms + data structures = programs, p. 366
# Englewood Cliffs: Prentice-Hall, 1976
#
# Adapted and expanded for Bottleneck:
# (C) 2010 Keith Goodman
def nanmedian(arr, axis=None):
"""
Median of array elements along given axis ignoring NaNs.
Parameters
----------
arr : array_like
Input array. If `arr` is not an array, a conversion is attempted.
axis : {int, None}, optional
Axis along which the median is computed. The default (axis=None) is to
compute the median of the flattened array.
Returns
-------
y : ndarray
An array with the same shape as `arr`, except that the specified axis
has been removed. If `arr` is a 0d array, or if axis is None, a scalar
is returned. `float64` return values are used for integer inputs.
See also
--------
bottlechest.median: Median along specified axis.
Examples
--------
>>> a = np.array([[np.nan, 7, 4], [3, 2, 1]])
>>> a
array([[ nan, 7., 4.],
[ 3., 2., 1.]])
>>> bn.nanmedian(a)
3.0
>> bn.nanmedian(a, axis=0)
array([ 3. , 4.5, 2.5])
>> bn.nanmedian(a, axis=1)
array([ 5.5, 2. ])
"""
func, arr = nanmedian_selector(arr, axis)
return func(arr)
def nanmedian_selector(arr, axis):
"""
Return nanmedian function and array that matches `arr` and `axis`.
Under the hood Bottleneck uses a separate Cython function for each
combination of ndim, dtype, and axis. A lot of the overhead in
bn.nanmedian() is in checking that `axis` is within range, converting `arr`
into an array (if it is not already an array), and selecting the function
to use to calculate the mean.
You can get rid of the overhead by doing all this before you, for example,
enter an inner loop, by using the this function.
Parameters
----------
arr : array_like
Input array. If `arr` is not an array, a conversion is attempted.
axis : {int, None}
Axis along which the median is to be computed.
Returns
-------
func : function
The nanmedian function that matches the number of dimensions and dtype
of the input array and the axis along which you wish to find the
median.
a : ndarray
If the input array `arr` is not a ndarray, then `a` will contain the
result of converting `arr` into a ndarray.
Examples
--------
Create a numpy array:
>>> arr = np.array([1.0, 2.0, 3.0])
Obtain the function needed to determine the median of `arr` along axis=0:
>>> func, a = bn.func.nanmedian_selector(arr, axis=0)
>>> func
<function nanmedian_1d_float64_axis0>
Use the returned function and array to determine the median:
>>> func(a)
2.0
"""
cdef np.ndarray a
if type(arr) is np.ndarray:
a = arr
else:
a = np.array(arr, copy=False)
cdef tuple key
cdef int ndim = PyArray_NDIM(a)
cdef int dtype = PyArray_TYPE(a)
if axis is not None:
if axis < 0:
axis += ndim
else:
a = PyArray_Ravel(a, NPY_CORDER)
axis = 0
ndim = 1
key = (ndim, dtype, axis)
try:
func = nanmedian_dict[key]
except KeyError:
if (axis < 0) or (axis >= ndim):
raise ValueError("axis(=%d) out of bounds" % axis)
try:
func = nanmedian_slow_dict[axis]
except KeyError:
tup = (str(ndim), str(a.dtype), str(axis))
raise TypeError("Unsupported ndim/dtype/axis (%s/%s/%s)." % tup)
return func, a
'''
| 29.309565
| 79
| 0.514804
| 2,218
| 16,853
| 3.87376
| 0.123535
| 0.039339
| 0.019553
| 0.005819
| 0.881983
| 0.86534
| 0.840084
| 0.812849
| 0.809358
| 0.792132
| 0
| 0.021468
| 0.333887
| 16,853
| 574
| 80
| 29.360627
| 0.743898
| 0.046045
| 0
| 0.816162
| 0
| 0.006061
| 0.900516
| 0.045576
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008081
| 0
| 0.038384
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
17fd526fa96a0aac0149c64f6d95b50358a904b4
| 5,667
|
py
|
Python
|
skactiveml/stream/budget_manager/tests/test_estimated_budget.py
|
scikit-activeml/scikit-activeml
|
2191ba452ca4d0fe349678d2a86b1906d79cb96a
|
[
"BSD-3-Clause"
] | 40
|
2020-09-22T00:50:52.000Z
|
2022-03-15T14:16:42.000Z
|
skactiveml/stream/budget_manager/tests/test_estimated_budget.py
|
scikit-activeml/scikit-activeml
|
2191ba452ca4d0fe349678d2a86b1906d79cb96a
|
[
"BSD-3-Clause"
] | 161
|
2020-08-10T09:24:03.000Z
|
2022-03-29T13:39:46.000Z
|
skactiveml/stream/budget_manager/tests/test_estimated_budget.py
|
scikit-activeml/scikit-activeml
|
2191ba452ca4d0fe349678d2a86b1906d79cb96a
|
[
"BSD-3-Clause"
] | 3
|
2021-11-15T09:10:59.000Z
|
2021-12-15T11:40:47.000Z
|
import unittest
import numpy as np
from skactiveml.stream.budget_manager import (
FixedUncertaintyBudget,
VarUncertaintyBudget,
SplitBudget,
)
class TemplateTestEstimatedBudget:
def setUp(self):
# initialise var for sampled var tests
self.utilities = np.array([True, False])
def test_init_param_budget(self):
# budget must be defined as a float with a range of: 0 < budget <= 1
budget_manager = self.get_budget_manager()(budget="string")
self.assertRaises(
TypeError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(budget=1.1)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(budget=-1.0)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
def test_init_param_w(self):
# w must be defined as an int with a range of w > 0
budget_manager = self.get_budget_manager()(w="string")
self.assertRaises(
TypeError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(w=None)
self.assertRaises(
TypeError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(w=1.1)
self.assertRaises(
TypeError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(w=0)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(w=-1)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
def test_query_param_utilities(self):
# s must be defined as a float ndarray
budget_manager = self.get_budget_manager()()
self.assertRaises(
TypeError, budget_manager.query_by_utility, utilities="string"
)
self.assertRaises(
TypeError, budget_manager.query_by_utility, utilities=None
)
self.assertRaises(
TypeError, budget_manager.query_by_utility, utilities=[10, 10]
)
def test_update_without_query(self):
bm = self.get_budget_manager()()
bm.update(np.array([[0], [1], [2]]), np.array([0, 2]))
class TestFixedUncertaintyBudget(
TemplateTestEstimatedBudget, unittest.TestCase
):
def get_budget_manager(self):
return FixedUncertaintyBudget
def test_init_param_num_classes(self):
# num_classes must be defined as an int and greater than 0
budget_manager = self.get_budget_manager()(num_classes="string")
self.assertRaises(
TypeError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(num_classes=-1)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(num_classes=0)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
class TestVarUncertaintyBudget(TemplateTestEstimatedBudget, unittest.TestCase):
def get_budget_manager(self):
return VarUncertaintyBudget
def test_init_param_theta(self):
# theta must be defined as a float
budget_manager = self.get_budget_manager()(theta="string")
self.assertRaises(
TypeError, budget_manager.query_by_utility, self.utilities
)
def test_init_param_s(self):
# s must be defined as a float with a range of: 0 < s <= 1
budget_manager = self.get_budget_manager()(s="string")
self.assertRaises(
TypeError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(s=1.1)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(s=0.0)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(s=-1.0)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
class TestSplitBudget(TestVarUncertaintyBudget):
def get_budget_manager(self):
return SplitBudget
def test_init_param_random_state(self):
# v must be defined as an float with a range of: 0 < v < 1
budget_manager = self.get_budget_manager()(random_state="string")
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
def test_init_param_v(self):
# v must be defined as an float with a range of: 0 < v < 1
budget_manager = self.get_budget_manager()(v="string")
self.assertRaises(
TypeError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(v=1.1)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(v=0.0)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
budget_manager = self.get_budget_manager()(v=-1.0)
self.assertRaises(
ValueError, budget_manager.query_by_utility, self.utilities
)
| 36.326923
| 79
| 0.66349
| 662
| 5,667
| 5.406344
| 0.110272
| 0.265158
| 0.123498
| 0.134116
| 0.811679
| 0.811679
| 0.768092
| 0.736798
| 0.716122
| 0.620844
| 0
| 0.009681
| 0.252691
| 5,667
| 155
| 80
| 36.56129
| 0.835419
| 0.079584
| 0
| 0.380952
| 0
| 0
| 0.00922
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 1
| 0.103175
| false
| 0
| 0.02381
| 0.02381
| 0.18254
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aa0838f24d216107ca7930904f920c9ce6c304b1
| 6,267
|
py
|
Python
|
flowcelltool/flowcells/emails.py
|
bihealth/flowcelltool
|
6e16190fc34c54d834ecd23888a462f3af47611d
|
[
"MIT"
] | 7
|
2016-10-12T12:56:09.000Z
|
2020-10-27T17:08:09.000Z
|
flowcelltool/flowcells/emails.py
|
iamh2o/flowcelltool
|
6e16190fc34c54d834ecd23888a462f3af47611d
|
[
"MIT"
] | 94
|
2016-10-24T06:28:31.000Z
|
2018-08-06T10:35:13.000Z
|
flowcelltool/flowcells/emails.py
|
iamh2o/flowcelltool
|
6e16190fc34c54d834ecd23888a462f3af47611d
|
[
"MIT"
] | 1
|
2022-03-23T15:57:16.000Z
|
2022-03-23T15:57:16.000Z
|
# -*- coding: utf-8 -*-
"""Module for sending out emails
"""
from django.core.mail import send_mass_mail
from django.db.models import Q
from django.conf import settings
from flowcelltool.users.models import User
from . import rules
# Templates -------------------------------------------------------------------
TEMPLATE_FLOWCELL_CREATED = r"""
Dear {recipient},
The user {user} just created a new flow cell with the following id:
{full_name}
You can see the created flow cell at the following URL:
{flowcell_url}
You are receiving this email because you have the Demultiplexing Administrator
or the Demultiplexing Operator role.
""".lstrip()
TEMPLATE_FLOWCELL_UPDATED = r"""
Dear {recipient},
The user {user} just updated the flow cell with the following id:
{full_name}
You can see the created flow cell at the following URL:
{flowcell_url}
You are receiving this email because you have the Demultiplexing Administrator
or the Demultiplexing Operator role, or you have been assigned as the
demultiplexing operator for this flow cell.
""".lstrip()
TEMPLATE_FLOWCELL_DELETED = r"""
Dear {recipient},
The user {user} just deleted the flow cell with the following id:
{full_name}
You are receiving this email because you have the Demultiplexing Administrator
or the Demultiplexing Operator role, or you have were assigned as the
demultiplexing operator for this flow cell.
""".lstrip()
# Helper Functions ------------------------------------------------------------
def _or_queries(queries):
"""Helper function that concatenates lazy Django queries"""
result = queries.pop()
for item in queries:
result |= item
return result
# Signal Handlers -------------------------------------------------------------
def email_flowcell_created(user, flowcell, request=None):
"""Send email on flow cell creation"""
if not settings.FLOWCELLS_SEND_EMAILS:
return
# Gather groups to send emails to
to_groups = (rules.DEMUX_ADMIN, rules.DEMUX_OPERATOR)
# Build queries and perform the actual sending of emails
queries = [Q(groups__name=group_name) for group_name in to_groups]
queries.append(Q(is_superuser=True))
queries.append(Q(pk=user.pk))
if flowcell.demux_operator:
queries.append(Q(pk=flowcell.demux_operator.pk))
if flowcell.owner:
queries.append(Q(pk=flowcell.owner.pk))
users = User.objects.filter(_or_queries(queries))
users = users.exclude(email__isnull=True).exclude(email__exact='')
# Prepare values to push into email
absolute_url = flowcell.get_absolute_url()
if request:
absolute_url = request.build_absolute_uri(absolute_url)
vals = {
'EMAIL_SUBJECT_PREFIX': settings.EMAIL_SUBJECT_PREFIX,
'full_name': flowcell.get_full_name(),
'flowcell_url': absolute_url,
'user': user,
}
# Create email data tuple generator
template_subject = (
'{EMAIL_SUBJECT_PREFIX}{user} created new flow cell {full_name}')
emails = (
(
template_subject.format(**vals),
TEMPLATE_FLOWCELL_CREATED.format(recipient=u, **vals),
settings.EMAIL_SENDER,
[u.email]
) for u in users)
# Actually send the emails
send_mass_mail(emails, fail_silently=not settings.DEBUG)
def email_flowcell_updated(user, flowcell, request=None):
"""Send email on flow cell update"""
if not settings.FLOWCELLS_SEND_EMAILS:
return
# Gather groups to send emails to
to_groups = (rules.DEMUX_ADMIN, rules.DEMUX_OPERATOR)
# Build queries and perform the actual sending of emails
queries = [Q(groups__name=group_name) for group_name in to_groups]
queries.append(Q(is_superuser=True))
queries.append(Q(pk=user.pk))
if flowcell.owner:
queries.append(Q(pk=flowcell.owner.pk))
if flowcell.demux_operator:
queries.append(Q(pk=flowcell.demux_operator.pk))
users = User.objects.filter(_or_queries(queries))
users = users.exclude(email__isnull=True).exclude(email__exact='')
# Prepare values to push into email
absolute_url = flowcell.get_absolute_url()
if request:
absolute_url = request.build_absolute_uri(absolute_url)
vals = {
'EMAIL_SUBJECT_PREFIX': settings.EMAIL_SUBJECT_PREFIX,
'full_name': flowcell.get_full_name(),
'flowcell_url': absolute_url,
'user': user,
}
# Create email data tuple generator
template_subject = (
'{EMAIL_SUBJECT_PREFIX}{user} updated flow cell {full_name}')
emails = (
(
template_subject.format(**vals),
TEMPLATE_FLOWCELL_UPDATED.format(recipient=u, **vals),
settings.EMAIL_SENDER,
[u.email]
) for u in users)
# Actually send the emails
send_mass_mail(emails, fail_silently=not settings.DEBUG)
def email_flowcell_deleted(user, flowcell, request=None):
"""Send email on flow cell deletion"""
if not settings.FLOWCELLS_SEND_EMAILS:
return
# Gather groups to send emails to
to_groups = (rules.DEMUX_ADMIN, rules.DEMUX_OPERATOR)
# Build queries and perform the actual sending of emails
queries = [Q(groups__name=group_name) for group_name in to_groups]
queries.append(Q(is_superuser=True))
queries.append(Q(pk=user.pk))
if flowcell.owner:
queries.append(Q(pk=flowcell.owner.pk))
if flowcell.demux_operator:
queries.append(Q(pk=flowcell.demux_operator.pk))
users = User.objects.filter(_or_queries(queries))
users = users.exclude(email__isnull=True).exclude(email__exact='')
# Prepare values to push into email
vals = {
'EMAIL_SUBJECT_PREFIX': settings.EMAIL_SUBJECT_PREFIX,
'full_name': flowcell.get_full_name(),
'user': user,
}
# Create email data tuple generator
template_subject = (
'{EMAIL_SUBJECT_PREFIX}{user} deleted flow cell {full_name}')
emails = (
(
template_subject.format(**vals),
TEMPLATE_FLOWCELL_DELETED.format(recipient=u, **vals),
settings.EMAIL_SENDER,
[u.email]
) for u in users)
# Actually send the emails
send_mass_mail(emails, fail_silently=not settings.DEBUG)
| 33.335106
| 79
| 0.675124
| 802
| 6,267
| 5.088529
| 0.164589
| 0.025484
| 0.041166
| 0.035285
| 0.866209
| 0.866209
| 0.866209
| 0.844891
| 0.844891
| 0.814016
| 0
| 0.000201
| 0.206159
| 6,267
| 187
| 80
| 33.513369
| 0.820101
| 0.155896
| 0
| 0.736434
| 0
| 0
| 0.242326
| 0.016015
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031008
| false
| 0
| 0.03876
| 0
| 0.100775
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aa3ca64a8ad24a42a4da70d951f5bd9e0b34ea2f
| 5,710
|
py
|
Python
|
Python/importData.py
|
klocey/MicroMETE
|
5bac4e039be75f4528ccfd8b7ba85500a5495b96
|
[
"Unlicense",
"MIT"
] | null | null | null |
Python/importData.py
|
klocey/MicroMETE
|
5bac4e039be75f4528ccfd8b7ba85500a5495b96
|
[
"Unlicense",
"MIT"
] | null | null | null |
Python/importData.py
|
klocey/MicroMETE
|
5bac4e039be75f4528ccfd8b7ba85500a5495b96
|
[
"Unlicense",
"MIT"
] | null | null | null |
import numpy as np
def import_obs_pred_data(input_filename): # TAKEN FROM THE mete_sads.py script used for White et al. (2012)
if '75' in input_filename:
data = np.genfromtxt(input_filename, dtype = "f8,f8,f8,f8", names = ['site','obs', 'predPln', 'pred7525'], delimiter = " ")
else:
data = np.genfromtxt(input_filename, dtype = "f8,f8,f8", names = ['site','obs','pred'], delimiter = " ")
#test = data[0:10000]
#return test
return data
def import_subsampled_data(input_filename):
if ('zipf' in input_filename):
# 33 for zipf
# this needs to be fixesd, I put the file name twice in old code
data = np.genfromtxt(input_filename, \
dtype = "f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8", \
names = ['site','site2','N0','S0','Nmax', \
'N0_05','S0_05','Nmax_05', 'r2_05', 'gamma_05', \
'N0_025','S0_025','Nmax_025', 'r2_025', 'gamma_025', \
'N0_0125','S0_0125','Nmax_0125', 'r2_0125', 'gamma_0125', \
'N0_00625','S0_00625','Nmax_00625', 'r2_00625', 'gamma_00625', \
'N0_003125','S0_003125','Nmax_003125', 'r2_003125', 'gamma_003125',
'N0_0015625','S0_0015625','Nmax_0015625','r2_0015625', 'gamma_0015625'], \
delimiter = " ")
else:
# 27 columns for mete and geom
data = np.genfromtxt(input_filename, \
dtype = "f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8", \
names = ['site','N0','S0','Nmax', \
'N0_05','S0_05','Nmax_05','r2_05', \
'N0_025','S0_025','Nmax_025','r2_025', \
'N0_0125','S0_0125','Nmax_0125','r2_0125', \
'N0_00625','S0_00625','Nmax_00625','r2_00625', \
'N0_003125','S0_003125','Nmax_003125','r2_003125', \
'N0_0015625','S0_0015625','Nmax_0015625','r2_0015625'], \
delimiter = " ")
return data
def import_subsampled_data_pandas(input_filename):
if ('zipf' in input_filename):
names = ['site','site2','N0','S0','Nmax', \
'N0_05','S0_05','Nmax_05', 'r2_05', 'gamma_05', \
'N0_025','S0_025','Nmax_025', 'r2_025', 'gamma_025', \
'N0_0125','S0_0125','Nmax_0125', 'r2_0125', 'gamma_0125', \
'N0_00625','S0_00625','Nmax_00625', 'r2_00625', 'gamma_00625', \
'N0_003125','S0_003125','Nmax_003125', 'r2_003125', 'gamma_003125',
'N0_0015625','S0_0015625','Nmax_0015625','r2_0015625', 'gamma_0015625']
#data_table = pd.read_table(input_filename, names = names, header = None, sep=' ')
else:
names = ['site','N0','S0','Nmax', \
'N0_05','S0_05','Nmax_05','r2_05', \
'N0_025','S0_025','Nmax_025','r2_025', \
'N0_0125','S0_0125','Nmax_0125','r2_0125', \
'N0_00625','S0_00625','Nmax_00625','r2_00625', \
'N0_003125','S0_003125','Nmax_003125','r2_003125', \
'N0_0015625','S0_0015625','Nmax_0015625','r2_0015625']
data_table = pd.read_table(input_filename, names = names, header = None, sep=' ')
return data_table
def import_NSR2_data(input_filename): # TAKEN FROM THE mete_sads.py script used for White et al. (2012)
input_filename_str = str(input_filename)
#NSR2_method = input_filename_split[-4]
#method = str(NSR2_method.split('/')[1])
if 'Stratified' in input_filename_str:
data = np.genfromtxt(input_filename, dtype = "f8,f8,f8,f8,f8,f8,f8,f8,f8,f8", \
names = ['site','N','S', 'NmaxObs', 'NmaxPred', 'evennessObs', \
'evennessPred', 'skewnessObs', 'skewnessPred','R2'], delimiter = " ")
else:
if 'HMP' in input_filename_str:
if ('zipf' in input_filename_str) :
if ('glm' in input_filename_str) :
data = np.genfromtxt(input_filename, dtype = "f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8", \
names = ['site','N','S', 'NmaxObs', 'NmaxPred', 'evennessObs', \
'evennessPred', 'skewnessObs', 'skewnessPred','R2', 'NAP'], delimiter = " ")
else:
data = np.genfromtxt(input_filename, dtype = "f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8", \
names = ['site','N','S', 'NmaxObs', 'NmaxPred', 'evennessObs', \
'evennessPred', 'skewnessObs', 'skewnessPred','R2', 'NAP'], delimiter = " ")
else:
data = np.genfromtxt(input_filename, dtype = "f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8", \
names = ['site','N','S', 'NmaxObs', 'NmaxPred', 'evennessObs', \
'evennessPred', 'skewnessObs', 'skewnessPred', 'R2','NAP'], delimiter = " ")
else:
if 'zipf' in input_filename_str:
if ('glm' in input_filename_str) :
data = np.genfromtxt(input_filename, dtype = "f8,f8,f8,f8,f8,f8,f8,f8,f8,f8", \
names = ['site','N','S', 'NmaxObs', 'NmaxPred', 'evennessObs', \
'evennessPred', 'skewnessObs', 'skewnessPred', 'R2'], delimiter = " ")
else:
data = np.genfromtxt(input_filename, dtype = "f8,f8,f8,f8,f8,f8,f8,f8,f8,f8,f8", \
names = ['site','N','S', 'NmaxObs', 'NmaxPred', 'evennessObs', \
'evennessPred', 'skewnessObs', 'skewnessPred','gamma', 'R2'], delimiter = " ")
# 'gammma'
else:
data = np.genfromtxt(input_filename, dtype = "f8,f8,f8,f8,f8,f8,f8,f8,f8,f8", \
names = ['site','N','S','NmaxObs', 'NmaxPred', 'evennessObs', \
'evennessPred', 'skewnessObs', 'skewnessPred', 'R2'], delimiter = " ")
return data
| 53.867925
| 131
| 0.561996
| 744
| 5,710
| 4.092742
| 0.129032
| 0.172085
| 0.236453
| 0.286371
| 0.87619
| 0.870279
| 0.840723
| 0.818391
| 0.818391
| 0.818391
| 0
| 0.173569
| 0.247285
| 5,710
| 105
| 132
| 54.380952
| 0.5349
| 0.075131
| 0
| 0.744186
| 0
| 0.069767
| 0.36933
| 0.075157
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046512
| false
| 0
| 0.05814
| 0
| 0.151163
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a4b427b5c53333a7f41b2cc5b3fefdd3ebb9aa6a
| 15,288
|
py
|
Python
|
test/functional/tests/lazy_writes/test_lazy_writes_clean.py
|
sirizhou/open-cas-linux
|
e623c44b3b637238a8c6041f8659d8eb3766b91c
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
test/functional/tests/lazy_writes/test_lazy_writes_clean.py
|
sirizhou/open-cas-linux
|
e623c44b3b637238a8c6041f8659d8eb3766b91c
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
test/functional/tests/lazy_writes/test_lazy_writes_clean.py
|
sirizhou/open-cas-linux
|
e623c44b3b637238a8c6041f8659d8eb3766b91c
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
#
# Copyright(c) 2020 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause-Clear
#
import pytest
from api.cas import casadm
from api.cas.cache_config import CacheMode, CleaningPolicy, CacheModeTrait, SeqCutOffPolicy
from storage_devices.device import Device
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
from core.test_run import TestRun
from test_tools.dd import Dd
from test_tools.disk_utils import Filesystem
from test_tools.fs_utils import create_random_test_file, remove
from test_tools.iostat import IOstatBasic
from test_utils.filesystem.file import File
from test_utils.os_utils import Udev, sync
from test_utils.size import Size, Unit
bs = Size(512, Unit.KibiByte)
mnt_point = "/mnt/cas/"
@pytest.mark.parametrizex("cache_mode", CacheMode.with_traits(CacheModeTrait.LazyWrites))
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_clean_stop_cache(cache_mode):
"""
title: Test of the ability to stop cache in modes with lazy writes.
description: |
Test if OpenCAS stops cache in modes with lazy writes without data loss.
pass_criteria:
- Cache stopping works properly.
- Writes to exported object and core device during OpenCAS's work are equal
- Data on core device is correct after cache is stopped.
"""
with TestRun.step("Prepare devices for cache and core."):
cache_dev = TestRun.disks['cache']
cache_dev.create_partitions([Size(256, Unit.MebiByte)])
cache_part = cache_dev.partitions[0]
core_dev = TestRun.disks['core']
core_dev.create_partitions([Size(512, Unit.MebiByte)])
core_part = core_dev.partitions[0]
Udev.disable()
with TestRun.step(f"Start cache in {cache_mode} mode."):
cache = casadm.start_cache(cache_part, cache_mode)
with TestRun.step("Add core to cache."):
core = cache.add_core(core_part)
with TestRun.step("Disable cleaning and sequential cutoff."):
cache.set_cleaning_policy(CleaningPolicy.nop)
cache.set_seq_cutoff_policy(SeqCutOffPolicy.never)
with TestRun.step("Read IO stats before test"):
core_disk_writes_initial = check_device_write_stats(core_part)
exp_obj_writes_initial = check_device_write_stats(core)
with TestRun.step("Write data to the exported object."):
test_file_main = create_random_test_file("/tmp/test_file_main", Size(64, Unit.MebiByte))
dd = Dd().output(core.path) \
.input(test_file_main.full_path) \
.block_size(bs) \
.count(int(test_file_main.size / bs)) \
.oflag("direct")
dd.run()
test_file_md5sum_main = test_file_main.md5sum()
with TestRun.step("Read IO stats after write to the exported object."):
core_disk_writes_increase = (
check_device_write_stats(core_part) - core_disk_writes_initial
)
exp_obj_writes_increase = (
check_device_write_stats(core) - exp_obj_writes_initial
)
with TestRun.step("Validate IO stats after write to the exported object."):
if core_disk_writes_increase > 0:
TestRun.LOGGER.error("Writes should occur only on the exported object.")
if exp_obj_writes_increase != test_file_main.size.value:
TestRun.LOGGER.error("Not all writes reached the exported object.")
with TestRun.step("Read data from the exported object."):
test_file_1 = File.create_file("/tmp/test_file_1")
dd = Dd().output(test_file_1.full_path) \
.input(core.path) \
.block_size(bs) \
.count(int(test_file_main.size / bs)) \
.oflag("direct")
dd.run()
test_file_1.refresh_item()
sync()
with TestRun.step("Compare md5 sum of test files."):
if test_file_md5sum_main != test_file_1.md5sum():
TestRun.LOGGER.error("Md5 sums should be equal.")
with TestRun.step("Read data from the core device."):
test_file_2 = File.create_file("/tmp/test_file_2")
dd = Dd().output(test_file_2.full_path) \
.input(core_part.path) \
.block_size(bs) \
.count(int(test_file_main.size / bs)) \
.oflag("direct")
dd.run()
test_file_2.refresh_item()
sync()
with TestRun.step("Compare md5 sum of test files."):
if test_file_md5sum_main == test_file_2.md5sum():
TestRun.LOGGER.error("Md5 sums should be different.")
with TestRun.step("Read IO stats before stopping cache."):
core_disk_writes_before_stop = check_device_write_stats(core_part)
with TestRun.step("Stop cache."):
cache.stop()
with TestRun.step("Read IO stats after stopping cache."):
core_disk_writes_increase = (
check_device_write_stats(core_part) - core_disk_writes_before_stop
)
with TestRun.step("Validate IO stats after stopping cache."):
if core_disk_writes_increase == 0:
TestRun.LOGGER.error("Writes should occur on the core device after stopping cache.")
if core_disk_writes_increase != exp_obj_writes_increase:
TestRun.LOGGER.error("Write statistics for the core device should be equal "
"to those from the exported object.")
with TestRun.step("Read data from the core device."):
test_file_3 = File.create_file("/tmp/test_file_2")
dd = Dd().output(test_file_3.full_path) \
.input(core_part.path) \
.block_size(bs) \
.count(int(test_file_main.size / bs)) \
.oflag("direct")
dd.run()
test_file_3.refresh_item()
sync()
with TestRun.step("Compare md5 sum of test files."):
if test_file_md5sum_main != test_file_3.md5sum():
TestRun.LOGGER.error("Md5 sums should be equal.")
with TestRun.step("Delete test files."):
test_file_main.remove(True)
test_file_1.remove(True)
test_file_2.remove(True)
test_file_3.remove(True)
@pytest.mark.parametrizex("fs", Filesystem)
@pytest.mark.parametrizex("cache_mode", CacheMode.with_traits(CacheModeTrait.LazyWrites))
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_clean_remove_core_with_fs(cache_mode, fs):
"""
title: Test of the ability to remove core from cache in lazy-write modes with filesystem.
description: |
Test if OpenCAS removes core from cache in modes with lazy writes and with different
filesystems without data loss.
pass_criteria:
- Core removing works properly.
- Data on core device is correct after core is removed.
"""
with TestRun.step("Prepare devices for cache and core."):
cache_dev = TestRun.disks['cache']
cache_dev.create_partitions([Size(256, Unit.MebiByte)])
cache_part = cache_dev.partitions[0]
core_dev = TestRun.disks['core']
core_dev.create_partitions([Size(512, Unit.MebiByte)])
core_part = core_dev.partitions[0]
Udev.disable()
with TestRun.step(f"Start cache in {cache_mode} mode."):
cache = casadm.start_cache(cache_part, cache_mode)
with TestRun.step(f"Add core with {fs.name} filesystem to cache and mount it."):
core_part.create_filesystem(fs)
core = cache.add_core(core_part)
core.mount(mnt_point)
with TestRun.step("Disable cleaning and sequential cutoff."):
cache.set_cleaning_policy(CleaningPolicy.nop)
cache.set_seq_cutoff_policy(SeqCutOffPolicy.never)
with TestRun.step("Create test file and read its md5 sum."):
test_file_main = create_random_test_file("/tmp/test_file_main", Size(64, Unit.MebiByte))
test_file_md5sum_main = test_file_main.md5sum()
with TestRun.step("Copy test file to the exported object."):
test_file_1 = File.create_file(mnt_point + "test_file_1")
dd = Dd().output(test_file_1.full_path) \
.input(test_file_main.full_path) \
.block_size(bs) \
.count(int(test_file_main.size / bs)) \
.oflag("direct")
dd.run()
test_file_1.refresh_item()
sync()
with TestRun.step("Compare md5 sum of test files."):
if test_file_md5sum_main != test_file_1.md5sum():
TestRun.LOGGER.error("Md5 sums should be equal.")
with TestRun.step("Unmount and remove core."):
core.unmount()
core.remove_core()
with TestRun.step("Mount core device."):
core_part.mount(mnt_point)
with TestRun.step("Read data from the core device."):
test_file_2 = File.create_file("/tmp/test_file_2")
dd = Dd().output(test_file_2.full_path) \
.input(test_file_1.full_path) \
.block_size(bs) \
.count(int(test_file_1.size / bs)) \
.oflag("direct")
dd.run()
test_file_2.refresh_item()
sync()
with TestRun.step("Compare md5 sum of test files."):
if test_file_md5sum_main != test_file_2.md5sum():
TestRun.LOGGER.error("Md5 sums should be equal.")
with TestRun.step("Delete test files."):
test_file_main.remove(True)
test_file_1.remove(True)
test_file_2.remove(True)
with TestRun.step("Unmount core device."):
core_part.unmount()
remove(mnt_point, True, True, True)
@pytest.mark.parametrizex("cache_mode", CacheMode.with_traits(CacheModeTrait.LazyWrites))
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_clean_remove_core_without_fs(cache_mode):
"""
title: Test of the ability to remove core from cache in lazy-write modes without filesystem.
description: |
Test if OpenCAS removes core without filesystem in modes with lazy writes
without data loss.
pass_criteria:
- Core removing works properly.
- Writes to exported object and core device during OpenCAS's work are equal
- Data on core device is correct after core is removed.
"""
with TestRun.step("Prepare devices for cache and core."):
cache_dev = TestRun.disks['cache']
cache_dev.create_partitions([Size(256, Unit.MebiByte)])
cache_part = cache_dev.partitions[0]
core_dev = TestRun.disks['core']
core_dev.create_partitions([Size(512, Unit.MebiByte)])
core_part = core_dev.partitions[0]
Udev.disable()
with TestRun.step(f"Start cache in {cache_mode} mode."):
cache = casadm.start_cache(cache_part, cache_mode)
with TestRun.step("Add core to cache."):
core = cache.add_core(core_part)
with TestRun.step("Disable cleaning and sequential cutoff."):
cache.set_cleaning_policy(CleaningPolicy.nop)
cache.set_seq_cutoff_policy(SeqCutOffPolicy.never)
with TestRun.step("Read IO stats before test"):
core_disk_writes_initial = check_device_write_stats(core_part)
exp_obj_writes_initial = check_device_write_stats(core)
with TestRun.step("Write data to exported object."):
test_file_main = create_random_test_file("/tmp/test_file_main", Size(64, Unit.MebiByte))
dd = Dd().output(core.path) \
.input(test_file_main.full_path) \
.block_size(bs) \
.count(int(test_file_main.size / bs)) \
.oflag("direct")
dd.run()
test_file_md5sum_main = test_file_main.md5sum()
with TestRun.step("Read IO stats after write to the exported object."):
core_disk_writes_increase = (
check_device_write_stats(core_part) - core_disk_writes_initial
)
exp_obj_writes_increase = (
check_device_write_stats(core) - exp_obj_writes_initial
)
with TestRun.step("Validate IO stats after write to the exported object."):
if core_disk_writes_increase > 0:
TestRun.LOGGER.error("Writes should occur only on the exported object.")
if exp_obj_writes_increase != test_file_main.size.value:
TestRun.LOGGER.error("Not all writes reached the exported object.")
with TestRun.step("Read data from the exported object."):
test_file_1 = File.create_file("/tmp/test_file_1")
dd = Dd().output(test_file_1.full_path) \
.input(core.path) \
.block_size(bs) \
.count(int(test_file_main.size / bs)) \
.oflag("direct")
dd.run()
test_file_1.refresh_item()
sync()
with TestRun.step("Compare md5 sum of test files."):
if test_file_md5sum_main != test_file_1.md5sum():
TestRun.LOGGER.error("Md5 sums should be equal.")
with TestRun.step("Read data from the core device."):
test_file_2 = File.create_file("/tmp/test_file_2")
dd = Dd().output(test_file_2.full_path) \
.input(core_part.path) \
.block_size(bs) \
.count(int(test_file_main.size / bs)) \
.oflag("direct")
dd.run()
test_file_2.refresh_item()
sync()
with TestRun.step("Compare md5 sum of test files."):
if test_file_md5sum_main == test_file_2.md5sum():
TestRun.LOGGER.error("Md5 sums should be different.")
with TestRun.step("Read IO stats before removing core."):
core_disk_writes_before_remove = check_device_write_stats(core_part)
with TestRun.step("Remove core."):
core.remove_core()
with TestRun.step("Read IO stats after removing core."):
core_disk_writes_increase = (
check_device_write_stats(core_part) - core_disk_writes_before_remove
)
with TestRun.step("Validate IO stats after removing core."):
if core_disk_writes_increase == 0:
TestRun.LOGGER.error("Writes should occur on the core device after removing core.")
if core_disk_writes_increase != exp_obj_writes_increase:
TestRun.LOGGER.error("Write statistics for the core device should be equal "
"to those from the exported object.")
with TestRun.step("Read data from core device again."):
test_file_3 = File.create_file("/tmp/test_file_3")
dd = Dd().output(test_file_3.full_path) \
.input(core_part.path) \
.block_size(bs) \
.count(int(test_file_main.size / bs)) \
.oflag("direct")
dd.run()
test_file_3.refresh_item()
sync()
with TestRun.step("Compare md5 sum of test files."):
if test_file_md5sum_main != test_file_3.md5sum():
TestRun.LOGGER.error("Md5 sums should be equal.")
with TestRun.step("Delete test files."):
test_file_main.remove(True)
test_file_1.remove(True)
test_file_2.remove(True)
test_file_3.remove(True)
remove(mnt_point, True, True, True)
def check_device_write_stats(device: Device):
return IOstatBasic.get_iostat_list(devices_list=[device])[0].total_writes.value
| 40.768
| 100
| 0.661892
| 2,057
| 15,288
| 4.6772
| 0.090909
| 0.077331
| 0.079514
| 0.029623
| 0.880262
| 0.867893
| 0.846378
| 0.824343
| 0.813845
| 0.786613
| 0
| 0.011285
| 0.23489
| 15,288
| 374
| 101
| 40.877005
| 0.811234
| 0.075026
| 0
| 0.780919
| 0
| 0
| 0.191269
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014134
| false
| 0
| 0.045936
| 0.003534
| 0.063604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
35141e07bedec1abf1406391c6bb709f3a8cf026
| 549
|
py
|
Python
|
parsers/twitter_creds.py
|
Bassem95/Test26
|
7703ae439945ec83005881d31ff3712844c77c97
|
[
"MIT"
] | 105
|
2019-07-07T13:15:32.000Z
|
2022-03-25T09:32:43.000Z
|
parsers/twitter_creds.py
|
Bassem95/Test26
|
7703ae439945ec83005881d31ff3712844c77c97
|
[
"MIT"
] | 2
|
2020-03-11T15:40:14.000Z
|
2021-03-05T23:23:38.000Z
|
parsers/twitter_creds.py
|
Bassem95/Test26
|
7703ae439945ec83005881d31ff3712844c77c97
|
[
"MIT"
] | 9
|
2020-02-14T06:39:59.000Z
|
2022-01-15T01:47:50.000Z
|
# -*- coding: utf-8 -*-
#!/usr/bin/python
"""
"""
import twitter
def TwitterApi():
return twitter.Api(consumer_key='[consumer key]',
consumer_secret='[consumer secret]',
access_token_key='[access token]',
access_token_secret='[access token secret]')
def TwitterApiContext():
return twitter.Api(consumer_key='[consumer key]',
consumer_secret='[consumer secret]',
access_token_key='[access token]',
access_token_secret='[access token secret]')
| 28.894737
| 60
| 0.601093
| 56
| 549
| 5.678571
| 0.321429
| 0.27673
| 0.238994
| 0.150943
| 0.786164
| 0.786164
| 0.786164
| 0.786164
| 0.786164
| 0.786164
| 0
| 0.002451
| 0.256831
| 549
| 18
| 61
| 30.5
| 0.776961
| 0.067395
| 0
| 0.727273
| 0
| 0
| 0.262948
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| true
| 0
| 0.090909
| 0.181818
| 0.454545
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 10
|
10295c25c39c807ebca8c37bfbddb2c39f4968c2
| 6,672
|
py
|
Python
|
src/genie/libs/parser/iosxr/tests/ShowL2vpnBridgeDomain/cli/equal/golden_output6_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxr/tests/ShowL2vpnBridgeDomain/cli/equal/golden_output6_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxr/tests/ShowL2vpnBridgeDomain/cli/equal/golden_output6_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"bridge_group": {
"GTT_DIP": {
"bridge_domain": {
"GPF-CID-2723": {
"id": 45,
"state": "up",
"shg_id": 0,
"mst_i": 0,
"mac_aging_time": 300,
"mac_limit": 4000,
"mac_limit_action": "none",
"mac_limit_notification": "syslog",
"filter_mac_address": 0,
"ac": {
"num_ac": 2,
"num_ac_up": 2,
"interfaces": {
"Bundle-Ether53.2723": {
"state": "up",
"static_mac_address": 0,
},
"TenGigabitEthernet0/2/0/3.2723": {
"state": "up",
"static_mac_address": 0,
},
},
},
"vfi": {
"num_vfi": 1,
"GPF-CID01": {
"state": "up",
"neighbor": {
"172.16.74.2": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.70.3": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.70.6": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.70.7": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.70.8": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.70.12": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.71.14": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.74.15": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.70.17": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.74.20": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.71.26": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.74.26": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.70.160": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
"172.16.70.165": {
"pw_id": {
2723: {"state": "up", "static_mac_address": 0}
}
},
},
},
},
"pw": {"num_pw": 14, "num_pw_up": 14},
"pbb": {"num_pbb": 0, "num_pbb_up": 0},
"vni": {"num_vni": 0, "num_vni_up": 0},
},
"BPI-DIP196-BD": {
"id": 46,
"state": "up",
"shg_id": 0,
"mst_i": 0,
"mac_aging_time": 300,
"mac_limit": 4000,
"mac_limit_action": "none",
"mac_limit_notification": "syslog",
"filter_mac_address": 0,
"ac": {
"num_ac": 1,
"num_ac_up": 1,
"interfaces": {
"Bundle-Ether53.196": {
"state": "up",
"static_mac_address": 0,
}
},
},
"vfi": {
"num_vfi": 1,
"BPI196-VFI01": {
"state": "up",
"neighbor": {
"172.16.70.160": {
"pw_id": {
196: {"state": "up", "static_mac_address": 0}
}
}
},
},
},
"pw": {"num_pw": 1, "num_pw_up": 1},
"pbb": {"num_pbb": 0, "num_pbb_up": 0},
"vni": {"num_vni": 0, "num_vni_up": 0},
},
}
}
}
}
| 43.894737
| 86
| 0.203987
| 380
| 6,672
| 3.310526
| 0.181579
| 0.122417
| 0.174881
| 0.228935
| 0.827504
| 0.799682
| 0.786169
| 0.755962
| 0.755962
| 0.700318
| 0
| 0.136732
| 0.685402
| 6,672
| 151
| 87
| 44.18543
| 0.462601
| 0
| 0
| 0.456954
| 0
| 0
| 0.192446
| 0.011091
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
107a9fd4ae4d80fc9e13ea97df00730b571856d7
| 3,266
|
py
|
Python
|
tienda/migrations/0001_initial.py
|
FreyderUrbano/mypets
|
8dffc425979ca2b5b97eaa004afb8b057e3a2a43
|
[
"MIT"
] | null | null | null |
tienda/migrations/0001_initial.py
|
FreyderUrbano/mypets
|
8dffc425979ca2b5b97eaa004afb8b057e3a2a43
|
[
"MIT"
] | null | null | null |
tienda/migrations/0001_initial.py
|
FreyderUrbano/mypets
|
8dffc425979ca2b5b97eaa004afb8b057e3a2a43
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.6 on 2021-10-09 19:56
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='City',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=10)),
('name', models.CharField(max_length=150)),
('abrev', models.CharField(max_length=4)),
],
),
migrations.CreateModel(
name='Country',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=10)),
('name', models.CharField(max_length=150)),
('abrev', models.CharField(max_length=4)),
],
),
migrations.CreateModel(
name='Identification_type',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(max_length=150)),
('abrow', models.CharField(max_length=4)),
],
),
migrations.CreateModel(
name='Pet',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=10)),
('name', models.CharField(max_length=150)),
],
),
migrations.CreateModel(
name='Race',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=10)),
('name', models.CharField(max_length=150)),
('abrev', models.CharField(max_length=4)),
],
),
migrations.CreateModel(
name='Session',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ip', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Type',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=100)),
('name', models.CharField(max_length=150)),
('abrev', models.CharField(max_length=4)),
],
),
migrations.CreateModel(
name='user',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=200)),
('last_name', models.CharField(max_length=128)),
('email', models.CharField(max_length=200)),
('password', models.CharField(max_length=200)),
],
),
]
| 38.880952
| 117
| 0.53368
| 307
| 3,266
| 5.521173
| 0.192182
| 0.185841
| 0.223009
| 0.297345
| 0.828319
| 0.729794
| 0.729794
| 0.729794
| 0.700295
| 0.700295
| 0
| 0.028933
| 0.322719
| 3,266
| 83
| 118
| 39.349398
| 0.737342
| 0.013778
| 0
| 0.697368
| 1
| 0
| 0.058093
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.013158
| 0.013158
| 0
| 0.065789
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
108fb60a826ff84afba27a35951fe50710080c5c
| 138
|
py
|
Python
|
test/integration/samples_in/simple_str_return.py
|
Inveracity/flynt
|
b975b6f61893d5db1114d68fbb5d212c4e11aeb8
|
[
"MIT"
] | 487
|
2019-06-10T17:44:56.000Z
|
2022-03-26T01:28:19.000Z
|
test/integration/samples_in/simple_str_return.py
|
Inveracity/flynt
|
b975b6f61893d5db1114d68fbb5d212c4e11aeb8
|
[
"MIT"
] | 118
|
2019-07-03T12:26:39.000Z
|
2022-03-06T22:40:17.000Z
|
test/integration/samples_in/simple_str_return.py
|
Inveracity/flynt
|
b975b6f61893d5db1114d68fbb5d212c4e11aeb8
|
[
"MIT"
] | 25
|
2019-07-10T08:39:58.000Z
|
2022-03-03T14:44:15.000Z
|
"echo '{}'\r".format(self.FLUSH_CMD)
"echo '{}'\\r".format(self.FLUSH_CMD)
"echo '%s'\r" % self.FLUSH_CMD
"echo '%s'\\r" % self.FLUSH_CMD
| 27.6
| 37
| 0.623188
| 24
| 138
| 3.416667
| 0.291667
| 0.439024
| 0.585366
| 0.585366
| 1
| 1
| 1
| 0.585366
| 0.585366
| 0
| 0
| 0
| 0.086957
| 138
| 4
| 38
| 34.5
| 0.650794
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
52a62195734b7bdeab98009d6ad57e496f5597ab
| 6,683
|
py
|
Python
|
loldib/getratings/models/NA/na_skarner/na_skarner_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_skarner/na_skarner_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_skarner/na_skarner_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Skarner_Bot_Aatrox(Ratings):
pass
class NA_Skarner_Bot_Ahri(Ratings):
pass
class NA_Skarner_Bot_Akali(Ratings):
pass
class NA_Skarner_Bot_Alistar(Ratings):
pass
class NA_Skarner_Bot_Amumu(Ratings):
pass
class NA_Skarner_Bot_Anivia(Ratings):
pass
class NA_Skarner_Bot_Annie(Ratings):
pass
class NA_Skarner_Bot_Ashe(Ratings):
pass
class NA_Skarner_Bot_AurelionSol(Ratings):
pass
class NA_Skarner_Bot_Azir(Ratings):
pass
class NA_Skarner_Bot_Bard(Ratings):
pass
class NA_Skarner_Bot_Blitzcrank(Ratings):
pass
class NA_Skarner_Bot_Brand(Ratings):
pass
class NA_Skarner_Bot_Braum(Ratings):
pass
class NA_Skarner_Bot_Caitlyn(Ratings):
pass
class NA_Skarner_Bot_Camille(Ratings):
pass
class NA_Skarner_Bot_Cassiopeia(Ratings):
pass
class NA_Skarner_Bot_Chogath(Ratings):
pass
class NA_Skarner_Bot_Corki(Ratings):
pass
class NA_Skarner_Bot_Darius(Ratings):
pass
class NA_Skarner_Bot_Diana(Ratings):
pass
class NA_Skarner_Bot_Draven(Ratings):
pass
class NA_Skarner_Bot_DrMundo(Ratings):
pass
class NA_Skarner_Bot_Ekko(Ratings):
pass
class NA_Skarner_Bot_Elise(Ratings):
pass
class NA_Skarner_Bot_Evelynn(Ratings):
pass
class NA_Skarner_Bot_Ezreal(Ratings):
pass
class NA_Skarner_Bot_Fiddlesticks(Ratings):
pass
class NA_Skarner_Bot_Fiora(Ratings):
pass
class NA_Skarner_Bot_Fizz(Ratings):
pass
class NA_Skarner_Bot_Galio(Ratings):
pass
class NA_Skarner_Bot_Gangplank(Ratings):
pass
class NA_Skarner_Bot_Garen(Ratings):
pass
class NA_Skarner_Bot_Gnar(Ratings):
pass
class NA_Skarner_Bot_Gragas(Ratings):
pass
class NA_Skarner_Bot_Graves(Ratings):
pass
class NA_Skarner_Bot_Hecarim(Ratings):
pass
class NA_Skarner_Bot_Heimerdinger(Ratings):
pass
class NA_Skarner_Bot_Illaoi(Ratings):
pass
class NA_Skarner_Bot_Irelia(Ratings):
pass
class NA_Skarner_Bot_Ivern(Ratings):
pass
class NA_Skarner_Bot_Janna(Ratings):
pass
class NA_Skarner_Bot_JarvanIV(Ratings):
pass
class NA_Skarner_Bot_Jax(Ratings):
pass
class NA_Skarner_Bot_Jayce(Ratings):
pass
class NA_Skarner_Bot_Jhin(Ratings):
pass
class NA_Skarner_Bot_Jinx(Ratings):
pass
class NA_Skarner_Bot_Kalista(Ratings):
pass
class NA_Skarner_Bot_Karma(Ratings):
pass
class NA_Skarner_Bot_Karthus(Ratings):
pass
class NA_Skarner_Bot_Kassadin(Ratings):
pass
class NA_Skarner_Bot_Katarina(Ratings):
pass
class NA_Skarner_Bot_Kayle(Ratings):
pass
class NA_Skarner_Bot_Kayn(Ratings):
pass
class NA_Skarner_Bot_Kennen(Ratings):
pass
class NA_Skarner_Bot_Khazix(Ratings):
pass
class NA_Skarner_Bot_Kindred(Ratings):
pass
class NA_Skarner_Bot_Kled(Ratings):
pass
class NA_Skarner_Bot_KogMaw(Ratings):
pass
class NA_Skarner_Bot_Leblanc(Ratings):
pass
class NA_Skarner_Bot_LeeSin(Ratings):
pass
class NA_Skarner_Bot_Leona(Ratings):
pass
class NA_Skarner_Bot_Lissandra(Ratings):
pass
class NA_Skarner_Bot_Lucian(Ratings):
pass
class NA_Skarner_Bot_Lulu(Ratings):
pass
class NA_Skarner_Bot_Lux(Ratings):
pass
class NA_Skarner_Bot_Malphite(Ratings):
pass
class NA_Skarner_Bot_Malzahar(Ratings):
pass
class NA_Skarner_Bot_Maokai(Ratings):
pass
class NA_Skarner_Bot_MasterYi(Ratings):
pass
class NA_Skarner_Bot_MissFortune(Ratings):
pass
class NA_Skarner_Bot_MonkeyKing(Ratings):
pass
class NA_Skarner_Bot_Mordekaiser(Ratings):
pass
class NA_Skarner_Bot_Morgana(Ratings):
pass
class NA_Skarner_Bot_Nami(Ratings):
pass
class NA_Skarner_Bot_Nasus(Ratings):
pass
class NA_Skarner_Bot_Nautilus(Ratings):
pass
class NA_Skarner_Bot_Nidalee(Ratings):
pass
class NA_Skarner_Bot_Nocturne(Ratings):
pass
class NA_Skarner_Bot_Nunu(Ratings):
pass
class NA_Skarner_Bot_Olaf(Ratings):
pass
class NA_Skarner_Bot_Orianna(Ratings):
pass
class NA_Skarner_Bot_Ornn(Ratings):
pass
class NA_Skarner_Bot_Pantheon(Ratings):
pass
class NA_Skarner_Bot_Poppy(Ratings):
pass
class NA_Skarner_Bot_Quinn(Ratings):
pass
class NA_Skarner_Bot_Rakan(Ratings):
pass
class NA_Skarner_Bot_Rammus(Ratings):
pass
class NA_Skarner_Bot_RekSai(Ratings):
pass
class NA_Skarner_Bot_Renekton(Ratings):
pass
class NA_Skarner_Bot_Rengar(Ratings):
pass
class NA_Skarner_Bot_Riven(Ratings):
pass
class NA_Skarner_Bot_Rumble(Ratings):
pass
class NA_Skarner_Bot_Ryze(Ratings):
pass
class NA_Skarner_Bot_Sejuani(Ratings):
pass
class NA_Skarner_Bot_Shaco(Ratings):
pass
class NA_Skarner_Bot_Shen(Ratings):
pass
class NA_Skarner_Bot_Shyvana(Ratings):
pass
class NA_Skarner_Bot_Singed(Ratings):
pass
class NA_Skarner_Bot_Sion(Ratings):
pass
class NA_Skarner_Bot_Sivir(Ratings):
pass
class NA_Skarner_Bot_Skarner(Ratings):
pass
class NA_Skarner_Bot_Sona(Ratings):
pass
class NA_Skarner_Bot_Soraka(Ratings):
pass
class NA_Skarner_Bot_Swain(Ratings):
pass
class NA_Skarner_Bot_Syndra(Ratings):
pass
class NA_Skarner_Bot_TahmKench(Ratings):
pass
class NA_Skarner_Bot_Taliyah(Ratings):
pass
class NA_Skarner_Bot_Talon(Ratings):
pass
class NA_Skarner_Bot_Taric(Ratings):
pass
class NA_Skarner_Bot_Teemo(Ratings):
pass
class NA_Skarner_Bot_Thresh(Ratings):
pass
class NA_Skarner_Bot_Tristana(Ratings):
pass
class NA_Skarner_Bot_Trundle(Ratings):
pass
class NA_Skarner_Bot_Tryndamere(Ratings):
pass
class NA_Skarner_Bot_TwistedFate(Ratings):
pass
class NA_Skarner_Bot_Twitch(Ratings):
pass
class NA_Skarner_Bot_Udyr(Ratings):
pass
class NA_Skarner_Bot_Urgot(Ratings):
pass
class NA_Skarner_Bot_Varus(Ratings):
pass
class NA_Skarner_Bot_Vayne(Ratings):
pass
class NA_Skarner_Bot_Veigar(Ratings):
pass
class NA_Skarner_Bot_Velkoz(Ratings):
pass
class NA_Skarner_Bot_Vi(Ratings):
pass
class NA_Skarner_Bot_Viktor(Ratings):
pass
class NA_Skarner_Bot_Vladimir(Ratings):
pass
class NA_Skarner_Bot_Volibear(Ratings):
pass
class NA_Skarner_Bot_Warwick(Ratings):
pass
class NA_Skarner_Bot_Xayah(Ratings):
pass
class NA_Skarner_Bot_Xerath(Ratings):
pass
class NA_Skarner_Bot_XinZhao(Ratings):
pass
class NA_Skarner_Bot_Yasuo(Ratings):
pass
class NA_Skarner_Bot_Yorick(Ratings):
pass
class NA_Skarner_Bot_Zac(Ratings):
pass
class NA_Skarner_Bot_Zed(Ratings):
pass
class NA_Skarner_Bot_Ziggs(Ratings):
pass
class NA_Skarner_Bot_Zilean(Ratings):
pass
class NA_Skarner_Bot_Zyra(Ratings):
pass
| 16.026379
| 46
| 0.77151
| 972
| 6,683
| 4.878601
| 0.151235
| 0.203712
| 0.407423
| 0.494728
| 0.808941
| 0.808941
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166243
| 6,683
| 416
| 47
| 16.064904
| 0.851041
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
52b4f66346b98c4762465c9924332faed815e0d7
| 62
|
py
|
Python
|
src/practice/list_example.py
|
Yasilagit/Python_Workspace
|
dc2274e685661e29e91b4f9f09cc9f228b17fe17
|
[
"MIT"
] | null | null | null |
src/practice/list_example.py
|
Yasilagit/Python_Workspace
|
dc2274e685661e29e91b4f9f09cc9f228b17fe17
|
[
"MIT"
] | null | null | null |
src/practice/list_example.py
|
Yasilagit/Python_Workspace
|
dc2274e685661e29e91b4f9f09cc9f228b17fe17
|
[
"MIT"
] | null | null | null |
a=[1,2,3,4,5,6]
print(a)
del a[0]
print(a)
del a
print(a)
| 10.333333
| 16
| 0.548387
| 18
| 62
| 1.888889
| 0.555556
| 0.529412
| 0.529412
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14
| 0.193548
| 62
| 6
| 17
| 10.333333
| 0.54
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
52f414dc20b33ccc328de0aa04642b44294ef904
| 4,868
|
py
|
Python
|
ia_programino.py
|
gabrielepessoa/programino
|
c9c775efd79fb846b9ee0117519674a2955c58ae
|
[
"MIT"
] | null | null | null |
ia_programino.py
|
gabrielepessoa/programino
|
c9c775efd79fb846b9ee0117519674a2955c58ae
|
[
"MIT"
] | null | null | null |
ia_programino.py
|
gabrielepessoa/programino
|
c9c775efd79fb846b9ee0117519674a2955c58ae
|
[
"MIT"
] | null | null | null |
import programino
from .game import Game
import math
import copy
class IAProgramino:
def __init__(self):
self.run = 0 # qtd de iterações do algoritmo
def chose_better_move(self, game):
move = self.play_max(game, True)
# print(self.run)
return move
def play_max(self, game, is_root=False):
# humano é player 0
# máquina é player 1
# se o jogo acabou: retorna +1, se maquina venceu; retorna -1 se humano venceu; retorna 0, se empate.
self.run += 1
game = copy.deepcopy(game)
if not game.valid_moves:
if game.turn == 0:
# print("Humano venceu!\n")
return -1 # humano venceu
elif game.turn == 1:
# print("Máquina venceu!\n")
return 1 # máquina venceu
else:
# print("Empate!\n")
return 0 # empate
vmax = -math.inf
max_move = None
# para cada peça X do conjunto JOGÁVEL (pela máquina)
for move in game.valid_moves:
gcopy = copy.deepcopy(game) # cria uma cópia do jogo
gcopy.make_move(*move) # faz jogada para a máquina
v = self.play_min(gcopy) # chama o play_min para o humano
if v > vmax:
vmax = v
max_move = move
return max_move if is_root else vmax
def play_min(self, game):
# humano é player 0
# máquina é player 1
# se o jogo acabou: retorna +1, se maquina venceu; retorna -1 se humano venceu; retorna 0, se empate.
self.run += 1
game = copy.deepcopy(game)
if not game.valid_moves:
if game.turn == 0:
# print("Humano venceu!\n")
return -1 # humano venceu
elif game.turn == 1:
# print("Máquina venceu!\n")
return 1 # máquina venceu
else:
# print("Empate!\n")
return 0 # empate
vmin = math.inf
min_move = None
# para cada peça X do conjunto JOGÁVEL (pelo humano)
for move in game.valid_moves:
gcopy = copy.deepcopy(game) # cria uma cópia do jogo
gcopy.make_move(*move) # faz jogada para o humano
v = self.play_max(gcopy) # chama o play_max para a máquina
if v < vmin:
vmin = v
min_move = move
return vmin
class Worst:
def __init__(self):
self.run = 0 # qtd de iterações do algoritmo
def chose_worst_move(self, game):
move = self.play_max(game, True)
# print(self.run)
return move
def play_max(self, game, is_root=False):
# humano é player 0
# máquina é player 1
# se o jogo acabou: retorna +1, se maquina venceu; retorna -1 se humano venceu; retorna 0, se empate.
self.run += 1
game = copy.deepcopy(game)
if not game.valid_moves:
if game.turn == 0:
# print("Humano venceu!\n")
return -1 # humano venceu
elif game.turn == 1:
# print("Máquina venceu!\n")
return 1 # máquina venceu
else:
# print("Empate!\n")
return 0 # empate
vmax = -math.inf
max_move = None
# para cada peça X do conjunto JOGÁVEL (pela máquina)
for move in game.valid_moves:
gcopy = copy.deepcopy(game) # cria uma cópia do jogo
gcopy.make_move(*move) # faz jogada para a máquina
v = self.play_min(gcopy) # chama o play_min para o humano
if v > vmax:
vmax = v
max_move = move
return max_move if is_root else vmax
def play_min(self, game):
# humano é player 0
# máquina é player 1
# se o jogo acabou: retorna +1, se maquina venceu; retorna -1 se humano venceu; retorna 0, se empate.
self.run += 1
game = copy.deepcopy(game)
if not game.valid_moves:
if game.turn == 0:
# print("Humano venceu!\n")
return 1 # humano venceu
elif game.turn == 1:
# print("Máquina venceu!\n")
return -1 # máquina venceu
else:
# print("Empate!\n")
return 0 # empate
vmin = math.inf
min_move = None
# para cada peça X do conjunto JOGÁVEL (pelo humano)
for move in game.valid_moves:
gcopy = copy.deepcopy(game) # cria uma cópia do jogo
gcopy.make_move(*move) # faz jogada para o humano
v = self.play_max(gcopy) # chama o play_max para a máquina
if v < vmin:
vmin = v
min_move = move
return vmin
| 34.28169
| 109
| 0.525062
| 628
| 4,868
| 3.987261
| 0.122611
| 0.014377
| 0.031949
| 0.044728
| 0.963259
| 0.963259
| 0.963259
| 0.963259
| 0.963259
| 0.963259
| 0
| 0.01563
| 0.39544
| 4,868
| 141
| 110
| 34.524823
| 0.835202
| 0.328061
| 0
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.041667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d81a1c1ca780fbe0c10704402600cdd9d60e45b6
| 12,079
|
py
|
Python
|
pygitea/resources.py
|
jo-nas/pygitea
|
0937d26dbd33523c728e2eefa80f327d952864a5
|
[
"WTFPL"
] | null | null | null |
pygitea/resources.py
|
jo-nas/pygitea
|
0937d26dbd33523c728e2eefa80f327d952864a5
|
[
"WTFPL"
] | null | null | null |
pygitea/resources.py
|
jo-nas/pygitea
|
0937d26dbd33523c728e2eefa80f327d952864a5
|
[
"WTFPL"
] | 2
|
2019-01-04T18:43:34.000Z
|
2021-10-18T17:43:25.000Z
|
#!/usr/env/bin python
'''
Resources hash. I don't know how to make it less dirty..
Please, send help
'''
resources = {u'/user/times': {u'get': {'parameters': {}}}, u'/repos/{owner}/{repo}/issues/{index}/times': {u'post': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'id': True}}, u'get': {'parameters': {u'owner': True, u'repo': True}}}, u'/repos/{owner}/{repo}/statuses/{sha}': {u'post': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'sha': True}}, u'get': {'parameters': {u'owner': True, u'repo': True, u'sha': True}}}, u'/orgs/{org}/members': {u'get': {'parameters': {u'org': True}}}, u'/users/search': {u'get': {'parameters': {u'q': False, u'limit': False}}}, u'/repos/{owner}/{repo}/branches/{branch}': {u'get': {'parameters': {u'owner': True, u'repo': True, u'branch': True}}}, u'/users/{username}/repos': {u'get': {'parameters': {u'username': True}}}, u'/repos/{owner}/{repo}/issues/comments/{id}': {u'delete': {'parameters': {u'owner': True, u'repo': True, u'id': True}}, u'patch': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'id': True}}}, u'/admin/users/{username}/orgs': {u'post': {'parameters': {u'username': True}}}, u'/user/gpg_keys': {u'post': {'parameters': {u'Form': False}}, u'get': {'parameters': {}}}, u'/repos/{owner}/{repo}/keys/{id}': {u'get': {'parameters': {u'owner': True, u'repo': True, u'id': True}}, u'delete': {'parameters': {u'owner': True, u'repo': True, u'id': True}}}, u'/repos/{owner}/{repo}/keys': {u'post': {'parameters': {u'owner': True, u'repo': True, u'body': False}}, u'get': {'parameters': {u'owner': True, u'repo': True}}}, u'/orgs/{org}/public_members': {u'get': {'parameters': {u'org': True}}}, u'/teams/{id}': {u'get': {'parameters': {u'id': True}}, u'delete': {'parameters': {u'id': True}}, u'patch': {'parameters': {u'body': False, u'id': True}}}, u'/user/gpg_keys/{id}': {u'get': {'parameters': {u'id': True}}, u'delete': {'parameters': {u'id': True}}}, u'/repos/{owner}/{repo}': {u'get': {'parameters': {u'owner': True, u'repo': True}}, u'delete': {'parameters': {u'owner': True, u'repo': True}}}, u'/repos/{owner}/{repo}/times': {u'get': {'parameters': {u'owner': True, u'repo': True}}}, u'/users/{username}/tokens': {u'post': {'parameters': {u'name': False}}, u'get': {'parameters': {}}}, u'/user/following/{username}': {u'put': {'parameters': {u'username': True}}, u'delete': {'parameters': {u'username': True}}}, u'/teams/{id}/repos': {u'get': {'parameters': {u'id': True}}}, u'/markdown/raw': {u'post': {'parameters': {u'body': False}}}, u'/orgs/{org}': {u'get': {'parameters': {u'org': True}}, u'patch': {'parameters': {u'body': False, u'org': True}}}, u'/users/{username}/followers': {u'get': {'parameters': {u'username': True}}}, u'/repos/{owner}/{repo}/releases': {u'get': {'parameters': {u'owner': True, u'repo': True, u'body': False}}}, u'/teams/{id}/members/{username}': {u'put': {'parameters': {u'username': True, u'id': True}}, u'delete': {'parameters': {u'username': True, u'id': True}}}, u'/repos/{owner}/{repo}/milestones': {u'post': {'parameters': {u'owner': True, u'repo': True, u'body': False}}, u'get': {'parameters': {u'owner': True, u'repo': True, u'id': True}}}, u'/repos/{owner}/{repo}/collaborators': {u'get': {'parameters': {u'owner': True, u'repo': True}}}, u'/repos/{owner}/{repo}/releases/{id}': {u'delete': {'parameters': {u'owner': True, u'repo': True, u'id': True}}, u'patch': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'id': True}}}, u'/repos/{owner}/{repo}/subscription': {u'put': {'parameters': {u'owner': True, u'repo': True}}, u'get': {'parameters': {u'owner': True, u'repo': True}}, u'delete': {'parameters': {u'owner': True, u'repo': True}}}, u'/repos/{owner}/{repo}/issues/comments': {u'get': {'parameters': {u'owner': True, u'repo': True, u'string': False}}}, u'/teams/{id}/members': {u'get': {'parameters': {u'id': True}}}, u'/repos/{owner}/{repo}/raw/{filepath}': {u'get': {'parameters': {u'owner': True, u'repo': True, u'filepath': True}}}, u'/orgs/{org}/hooks': {u'get': {'parameters': {}}}, u'/repos/{owner}/{repo}/issue/{index}/labels/{id}': {u'delete': {'parameters': {u'owner': True, u'repo': True, u'id': True, u'index': True}}}, u'/repos/{owner}/{repo}/branches': {u'get': {'parameters': {u'owner': True, u'repo': True}}}, u'/users/{username}': {u'get': {'parameters': {u'username': True}}}, u'/user/orgs': {u'get': {'parameters': {}}}, u'/repos/{owner}/{repo}/hooks': {u'post': {'parameters': {u'owner': True, u'repo': True, u'body': False}}, u'get': {'parameters': {u'owner': True, u'repo': True}}}, u'/repos/{owner}/{repo}/subscribers': {u'get': {'parameters': {u'owner': True, u'repo': True}}}, u'/admin/users': {u'post': {'parameters': {u'body': False}}}, u'/orgs/{org}/repos': {u'get': {'parameters': {u'org': True}}}, u'/users/{username}/following': {u'get': {'parameters': {u'username': True}}}, u'/repos/{owner}/{repo}/mirror-sync': {u'post': {'parameters': {u'owner': True, u'repo': True}}}, u'/repos/{owner}/{repo}/editorconfig/{filepath}': {u'get': {'parameters': {u'owner': True, u'repo': True, u'filepath': True}}}, u'/user/following': {u'get': {'parameters': {}}}, u'/admin/users/{username}/repos': {u'post': {'parameters': {u'username': True}}}, u'/org/{org}/repos': {u'post': {'parameters': {u'body': False, u'org': True}}}, u'/repos/{owner}/{repo}/issue/{index}/labels': {u'put': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'index': True}}, u'post': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'index': True}}, u'delete': {'parameters': {u'owner': True, u'repo': True, u'index': True}}}, u'/repos/{owner}/{repo}/pulls/{index}/merge': {u'post': {'parameters': {u'owner': True, u'repo': True, u'index': True}}, u'get': {'parameters': {u'owner': True, u'repo': True, u'index': True}}}, u'/user/repos': {u'post': {'parameters': {u'body': False}}, u'get': {'parameters': {}}}, u'/users/{follower}/following/{followee}': {u'get': {'parameters': {u'follower': True, u'followee': True}}}, u'/repos/{owner}/{repo}/collaborators/{collaborator}': {u'put': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'collaborator': True}}, u'get': {'parameters': {u'owner': True, u'repo': True, u'collaborator': True}}, u'delete': {'parameters': {u'owner': True, u'repo': True, u'collaborator': True}}}, u'/user/keys': {u'post': {'parameters': {u'body': False}}, u'get': {'parameters': {}}}, u'/repos/{owner}/{repo}/times/{tracker}': {u'get': {'parameters': {u'owner': True, u'repo': True, u'user': True}}}, u'/repos/search': {u'get': {'parameters': {u'exclusive': False, u'uid': False, u'q': False, u'limit': False, u'mode': False, u'page': False}}}, u'/users/{username}/gpg_keys': {u'get': {'parameters': {u'username': True}}}, u'/orgs/{org}/hooks/': {u'post': {'parameters': {}}}, u'/user/subscriptions': {u'get': {'parameters': {}}}, u'/markdown': {u'post': {'parameters': {u'body': False}}}, u'/repos/{owner}/{repo}/issue/{index}/comments': {u'get': {'parameters': {u'owner': True, u'repo': True, u'id': True, u'string': False}}}, u'/orgs/{org}/teams': {u'post': {'parameters': {u'body': False, u'org': True}}, u'get': {'parameters': {u'org': True}}}, u'/user/emails': {u'post': {'parameters': {u'body': False}}, u'get': {'parameters': {}}, u'delete': {'parameters': {u'body': False}}}, u'/repos/{owner}/{repo}/issues': {u'post': {'parameters': {u'owner': True, u'repo': True, u'body': False}}, u'get': {'parameters': {u'owner': True, u'repo': True, u'state': False, u'page': False}}}, u'/user/following/{followee}': {u'get': {'parameters': {u'followee': True}}}, u'/user/{username}/orgs': {u'get': {'parameters': {u'username': False}}}, u'/repos/migrate': {u'post': {'parameters': {u'body': False}}}, u'/users/{username}/subscriptions': {u'get': {'parameters': {u'username': False}}}, u'/user/starred/{owner}/{repo}': {u'put': {'parameters': {u'owner': True, u'repo': True}}, u'get': {'parameters': {u'owner': True, u'repo': True}}, u'delete': {'parameters': {u'owner': True, u'repo': True}}}, u'/orgs/{org}/hooks/{id}': {u'get': {'parameters': {}}, u'delete': {'parameters': {}}, u'patch': {'parameters': {}}}, u'/repos/{owner}/{repo}/hooks/{id}': {u'get': {'parameters': {u'owner': True, u'repo': True, u'id': True}}, u'patch': {'parameters': {u'owner': True, u'repo': True, u'body': False}}}, u'/repos/{owner}/{repo}/archive/{filepath}': {u'get': {'parameters': {u'owner': True, u'repo': True, u'archive': True}}}, u'/repos/{owner}/{repo}/stargazers': {u'get': {'parameters': {u'owner': True, u'repo': True}}}, u'/user/keys/{id}': {u'get': {'parameters': {u'id': True}}, u'delete': {'parameters': {u'id': True}}}, u'/user': {u'get': {'parameters': {}}}, u'/admin/users/{username}': {u'delete': {'parameters': {u'username': True}}, u'patch': {'parameters': {u'username': True, u'body': False}}}, u'/repos/{owner}/{repo}/milestones/{id}': {u'get': {'parameters': {}}, u'delete': {'parameters': {u'owner': True, u'repo': True, u'body': True}}, u'patch': {'parameters': {u'owner': True, u'repo': True, u'body': False}}}, u'/admin/users/{username}/keys': {u'post': {'parameters': {u'username': True}}}, u'/orgs/{org}/members/{username}': {u'get': {'parameters': {u'username': True, u'org': True}}, u'delete': {'parameters': {u'username': True, u'org': True}}}, u'/orgs/{org}/public_members/{username}': {u'put': {'parameters': {u'username': True, u'org': True}}, u'get': {'parameters': {u'username': True, u'org': True}}, u'delete': {'parameters': {u'username': True, u'org': True}}}, u'/repos/{owner}/{repo}/commits/{ref}/statuses': {u'get': {'parameters': {u'owner': True, u'repo': True, u'ref': True}}}, u'/users/{username}/keys': {u'get': {'parameters': {u'username': True}}}, u'/repos/{owner}/{repo}/labels': {u'post': {'parameters': {u'owner': True, u'repo': True, u'body': False}}, u'get': {'parameters': {u'owner': True, u'repo': True}}}, u'/repos/{owner}/{repo}/issues/{index}': {u'get': {'parameters': {u'owner': True, u'repo': True, u'index': True}}, u'patch': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'index': True}}}, u'/repos/{owner}/{repo}/issues/{index}/labels': {u'get': {'parameters': {u'owner': True, u'repo': True, u'index': True}}}, u'/repos/{owner}/{repo}/forks': {u'post': {'parameters': {u'owner': True, u'repo': True, u'body': False}}, u'get': {'parameters': {u'owner': True, u'repo': True}}}, u'/repos/{owner}/{repo}/pulls': {u'post': {'parameters': {u'owner': True, u'repo': True, u'body': False}}, u'get': {'parameters': {u'owner': True, u'repo': True}}}, u'/user/starred': {u'get': {'parameters': {}}}, u'/repos/{owner}/{repo}/pulls/{index}': {u'get': {'parameters': {u'owner': True, u'repo': True, u'index': True}}, u'patch': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'index': True}}}, u'/repos/{owner}/{repo}/labels/{id}': {u'get': {'parameters': {u'owner': True, u'repo': True, u'id': True}}, u'delete': {'parameters': {u'owner': True, u'repo': True, u'id': True}}, u'patch': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'id': True}}}, u'/repositories/{id}': {u'get': {'parameters': {u'id': True}}}, u'/repos/{user}/{repo}/hooks/{id}': {u'delete': {'parameters': {u'owner': True, u'repo': True, u'id': True}}}, u'/repos/{owner}/{repo}/issues/{index}/comments': {u'post': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'id': True}}}, u'/admin/users/{username}/keys/{id}': {u'delete': {'parameters': {u'username': True, u'id': True}}}, u'/users/{username}/starred': {u'get': {'parameters': {u'username': True}}}, u'/user/followers': {u'get': {'parameters': {}}}, u'/repos/{owner}/{repo}/issues/{index}/comments/{id}': {u'delete': {'parameters': {u'owner': True, u'repo': True, u'id': True, u'index': True}}, u'patch': {'parameters': {u'owner': True, u'repo': True, u'body': False, u'id': True, u'index': True}}}, u'/teams/{id}/repos/{org}/{repo}': {u'put': {'parameters': {u'repo': True, u'org': True, u'id': True}}, u'delete': {'parameters': {u'repo': True, u'org': True, u'id': True}}}, u'/version': {u'get': {'parameters': {}}}}
| 1,509.875
| 11,972
| 0.594172
| 1,817
| 12,079
| 3.947166
| 0.053385
| 0.170106
| 0.148355
| 0.15686
| 0.909928
| 0.839375
| 0.800892
| 0.716258
| 0.621584
| 0.588678
| 0
| 0
| 0.084941
| 12,079
| 7
| 11,973
| 1,725.571429
| 0.648874
| 0.007865
| 0
| 0
| 0
| 0
| 0.501253
| 0.180361
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d8374f2140d12fa51924eb213eefc65678901851
| 129
|
py
|
Python
|
pccm/main.py
|
FindDefinition/PCCM
|
fa0cc4e41f886f288bbacf92cea1625d927a54ad
|
[
"MIT"
] | 3
|
2021-10-21T06:26:46.000Z
|
2022-03-10T11:14:40.000Z
|
pccm/main.py
|
FindDefinition/PCCM
|
fa0cc4e41f886f288bbacf92cea1625d927a54ad
|
[
"MIT"
] | 1
|
2021-09-13T02:25:05.000Z
|
2021-09-13T02:27:50.000Z
|
pccm/main.py
|
FindDefinition/PCCM
|
fa0cc4e41f886f288bbacf92cea1625d927a54ad
|
[
"MIT"
] | null | null | null |
"""
pccm xxx.yyy.Class -o xxx.so --shared
pccm-gen xxx.yyy.Class1 xxx.yyy.Class2 xxx.yyy.Class3 --output_dir .
"""
import fire
| 16.125
| 68
| 0.697674
| 23
| 129
| 3.869565
| 0.652174
| 0.269663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026786
| 0.131783
| 129
| 7
| 69
| 18.428571
| 0.767857
| 0.821705
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dc6baf36ccaa3b18edc32cd47bff50ec3775cbce
| 83
|
py
|
Python
|
Python_Exercicios/ex097.py
|
gabrieldepaiva/Exercicios-CursoEmVideo
|
118231c24f040ca0ac3d3b6e6bf633e4eaa06858
|
[
"MIT"
] | null | null | null |
Python_Exercicios/ex097.py
|
gabrieldepaiva/Exercicios-CursoEmVideo
|
118231c24f040ca0ac3d3b6e6bf633e4eaa06858
|
[
"MIT"
] | null | null | null |
Python_Exercicios/ex097.py
|
gabrieldepaiva/Exercicios-CursoEmVideo
|
118231c24f040ca0ac3d3b6e6bf633e4eaa06858
|
[
"MIT"
] | null | null | null |
def print2(txt):
print('~' * len(txt))
print(txt)
print('~' * len(txt))
| 20.75
| 25
| 0.506024
| 11
| 83
| 3.818182
| 0.454545
| 0.571429
| 0.52381
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.240964
| 83
| 4
| 26
| 20.75
| 0.650794
| 0
| 0
| 0.5
| 0
| 0
| 0.02381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
dc711d40e276db66e776db4b7667565a1f805b56
| 365
|
py
|
Python
|
src/model/__init__.py
|
week-with-me/quiz-server
|
35287496aba8381119cf9fc3fcbb8a06d34c7a34
|
[
"MIT"
] | null | null | null |
src/model/__init__.py
|
week-with-me/quiz-server
|
35287496aba8381119cf9fc3fcbb8a06d34c7a34
|
[
"MIT"
] | null | null | null |
src/model/__init__.py
|
week-with-me/quiz-server
|
35287496aba8381119cf9fc3fcbb8a06d34c7a34
|
[
"MIT"
] | null | null | null |
from src.model.actor import Actor
from src.model.movie import Movie
from src.model.genre import Genre
from src.model.nation import Nation
from src.model.director import Director
from src.model.movie_actor import MovieActor
from src.model.movie_genre import MovieGenre
from src.model.movie_nation import MovieNation
from src.model.movie_director import MovieDirector
| 40.555556
| 50
| 0.854795
| 58
| 365
| 5.310345
| 0.224138
| 0.204545
| 0.350649
| 0.275974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09589
| 365
| 9
| 50
| 40.555556
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f4fc2a0f59cecb0119d1e80788abb0dd52bdb761
| 12,211
|
py
|
Python
|
tests/test_ncclimo.py
|
jhkennedy/processflow
|
c404bd3ad043fd6ae18d4f24d735777574faa660
|
[
"MIT"
] | null | null | null |
tests/test_ncclimo.py
|
jhkennedy/processflow
|
c404bd3ad043fd6ae18d4f24d735777574faa660
|
[
"MIT"
] | null | null | null |
tests/test_ncclimo.py
|
jhkennedy/processflow
|
c404bd3ad043fd6ae18d4f24d735777574faa660
|
[
"MIT"
] | null | null | null |
import os
import sys
import unittest
import inspect
from configobj import ConfigObj
if sys.path[0] != '.':
sys.path.insert(0, os.path.abspath('.'))
from jobs.Ncclimo import Climo as Ncclimo
from jobs.JobStatus import JobStatus
from lib.events import EventList
from lib.util import print_message
class TestNcclimo(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestNcclimo, self).__init__(*args, **kwargs)
config_path = os.path.join(
os.getcwd(), 'tests', 'test_configs', 'test_run_no_sta.cfg')
self.config = ConfigObj(config_path)
self.project_path = os.path.join(os.getcwd(), '..', 'testproject')
def test_ncclimo_setup(self):
"""
Run ncclimo setup on valid config
"""
print '\n'; print_message('---- Starting Test: {} ----'.format(inspect.stack()[0][3]), 'ok')
config = {
'account': '',
'year_set': 1,
'start_year': 50,
'end_year': 55,
'caseId': self.config['global']['experiment'],
'annual_mode': 'sdd',
'input_directory': os.path.join(self.project_path, 'input'),
'climo_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'pp', 'ne30', 'climo', '5yr'),
'regrid_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'pp', 'fv129x256', 'climo', '5yr'),
'regrid_map_path': self.config['ncclimo']['regrid_map_path'],
'year_set': 1,
'run_scripts_path': os.path.join(self.project_path, 'output', 'run_scripts')
}
ncclimo = Ncclimo(
config=config,
event_list=EventList())
self.assertEqual(ncclimo.status.name, 'VALID')
def test_ncclimo_valid_prevalidate(self):
"""
Test that valid input config will be marked as valid by the job
"""
print '\n'; print_message('---- Starting Test: {} ----'.format(inspect.stack()[0][3]), 'ok')
config = {
'account': '',
'year_set': 1,
'start_year': 50,
'end_year': 55,
'caseId': self.config['global']['experiment'],
'annual_mode': 'sdd',
'input_directory': os.path.join(self.project_path, 'input'),
'climo_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'pp', 'ne30', 'climo', '5yr'),
'regrid_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'pp', 'fv129x256', 'climo', '5yr'),
'regrid_map_path': self.config['ncclimo']['regrid_map_path'],
'year_set': 1,
'run_scripts_path': os.path.join(self.project_path, 'output', 'run_scripts')
}
ncclimo = Ncclimo(
config=config,
event_list=EventList())
ncclimo.status = JobStatus.VALID
self.assertFalse(ncclimo.prevalidate(config))
def test_ncclimo_missing_input(self):
"""
Test that a missing input item will invalidate the job
"""
print '\n'; print_message('---- Starting Test: {} ----'.format(inspect.stack()[0][3]), 'ok')
config = {
'account': '',
'year_set': 1,
'start_year': 50,
'end_year': 55,
'caseId': self.config['global']['experiment'],
'annual_mode': 'sdd',
#'input_directory': os.path.join(self.project_path, 'input'),
'climo_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'pp', 'ne30', 'climo', '5yr'),
'regrid_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'pp', 'fv129x256', 'climo', '5yr'),
'regrid_map_path': self.config['ncclimo']['regrid_map_path'],
'year_set': 1,
'run_scripts_path': os.path.join(self.project_path, 'output', 'run_scripts')
}
ncclimo = Ncclimo(
config=config,
event_list=EventList())
self.assertEqual(ncclimo.status.name, 'INVALID')
def test_ncclimo_execute_not_completed(self):
"""
Test that ncclimo will do all proper setup in an incomplete run
"""
print '\n'; print_message('---- Starting Test: {} ----'.format(inspect.stack()[0][3]), 'ok')
start_year = 51
end_year = 55
self.config['global']['project_path'] = '/p/user_pub/e3sm/baldwin32/E3SM_test_data/DECKv1b_1pctCO2_not_complete'
self.config['global']['exeriment'] = '20171011.beta2_FCT2-icedeep_branch.A_WCYCL1850S.ne30_oECv3_ICG.edison'
year_set_string = '{start:04d}-{end:04d}'.format(
start=start_year,
end=end_year)
config = {
'account': '',
'year_set': 1,
'start_year': start_year,
'end_year': end_year,
'caseId': self.config['global']['experiment'],
'annual_mode': 'sdd',
'input_directory': os.path.join(self.config['global']['project_path'], 'input', 'atm'),
'climo_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'pp', 'ne30', 'climo', '5yr'),
'regrid_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'pp', 'fv129x256', 'climo', '5yr'),
'regrid_map_path': self.config['ncclimo']['regrid_map_path'],
'year_set': 1,
'run_scripts_path': os.path.join(self.config['global']['project_path'], 'output', 'run_scripts')
}
ncclimo = Ncclimo(
config=config,
event_list=EventList())
self.assertEqual(ncclimo.status, JobStatus.VALID)
self.assertFalse(ncclimo.execute(dryrun=True))
self.assertEqual(ncclimo.status.name, 'COMPLETED')
def test_ncclimo_execute_completed(self):
"""
test that if ncclimo is told to run on a project thats already completed ncclimo
for the given yearset it will varify that the output is present and not run again
"""
print '\n'; print_message('---- Starting Test: {} ----'.format(inspect.stack()[0][3]), 'ok')
start_year = 1
end_year = 10
# REAL DATA
project_path = '/p/user_pub/e3sm/baldwin32/E3SM_test_data/DECKv1b_1pctCO2_complete'
year_set_string = '{start:04d}-{end:04d}'.format(
start=start_year,
end=end_year)
config = {
'account': '',
'year_set': 1,
'start_year': start_year,
'end_year': end_year,
'caseId': '20180129.DECKv1b_piControl.ne30_oEC.edison',
'annual_mode': 'sdd',
'input_directory': os.path.join(project_path, 'input', 'atm'),
'climo_output_directory': os.path.join(project_path, 'output', 'pp', 'ne30', 'climo', '10yr'),
'regrid_output_directory': os.path.join(project_path, 'output', 'pp', 'fv129x256', 'climo', '10yr'),
'regrid_map_path': self.config['ncclimo']['regrid_map_path'],
'year_set': 1,
'run_scripts_path': os.path.join(project_path, 'output', 'run_scripts')
}
ncclimo = Ncclimo(
config=config,
event_list=EventList())
self.assertEqual(ncclimo.status, JobStatus.VALID)
ncclimo.execute(dryrun=True)
self.assertTrue(ncclimo.postvalidate())
def test_ncclimo_execute_bad_year(self):
"""
test that if given the wrong input year ncclimo will exit correctly
"""
print '\n'; print_message('---- Starting Test: {} ----'.format(inspect.stack()[0][3]), 'ok')
start_year = 55
end_year = 60
self.config['global']['project_path'] = '/p/user_pub/e3sm/baldwin32/E3SM_test_data/DECKv1b_1pctCO2_complete'
self.config['global']['exeriment'] = '20180215.DECKv1b_1pctCO2.ne30_oEC.edison'
year_set_string = '{start:04d}-{end:04d}'.format(
start=start_year,
end=end_year)
config = {
'account': '',
'year_set': 1,
'start_year': start_year,
'end_year': end_year,
'caseId': self.config['global']['experiment'],
'annual_mode': 'sdd',
'input_directory': os.path.join(self.config['global']['project_path'], 'input', 'atm'),
'climo_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'climo', '5yr'),
'regrid_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'climo_regrid'),
'regrid_map_path': self.config['ncclimo']['regrid_map_path'],
'year_set': 1,
'run_scripts_path': os.path.join(self.config['global']['project_path'], 'output', 'run_scripts')
}
ncclimo = Ncclimo(
config=config,
event_list=EventList())
self.assertEqual(ncclimo.status, JobStatus.VALID)
self.assertFalse(ncclimo.postvalidate())
def test_ncclimo_execute_bad_regrid_dir(self):
print '\n'; print_message('---- Starting Test: {} ----'.format(inspect.stack()[0][3]), 'ok')
start_year = 56
end_year = 60
self.config['global']['project_path'] = '/p/cscratch/acme/baldwin32/20171016/'
self.config['global']['exeriment'] = '20171011.beta2_FCT2-icedeep_branch.A_WCYCL1850S.ne30_oECv3_ICG.edison'
year_set_string = '{start:04d}-{end:04d}'.format(
start=start_year,
end=end_year)
config = {
'account': '',
'year_set': 1,
'start_year': start_year,
'end_year': end_year,
'caseId': self.config['global']['experiment'],
'annual_mode': 'sdd',
'input_directory': os.path.join(self.config['global']['project_path'], 'input', 'atm'),
'climo_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'climo', '5yr'),
'regrid_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'XXYYZZ'),
'regrid_map_path': self.config['ncclimo']['regrid_map_path'],
'year_set': 1,
'run_scripts_path': os.path.join(self.config['global']['project_path'], 'output', 'run_scripts')
}
ncclimo = Ncclimo(
config=config,
event_list=EventList())
self.assertEqual(ncclimo.status, JobStatus.VALID)
self.assertFalse(ncclimo.postvalidate())
def test_ncclimo_execute_bad_climo_dir(self):
"""
test that ncclimo will correctly exit if given a non-existant climo dir
"""
print '\n'; print_message('---- Starting Test: {} ----'.format(inspect.stack()[0][3]), 'ok')
start_year = 56
end_year = 60
self.config['global']['project_path'] = '/p/cscratch/acme/baldwin32/20171016/'
self.config['global']['exeriment'] = '20171011.beta2_FCT2-icedeep_branch.A_WCYCL1850S.ne30_oECv3_ICG.edison'
year_set_string = '{start:04d}-{end:04d}'.format(
start=start_year,
end=end_year)
config = {
'account': '',
'year_set': 1,
'start_year': start_year,
'end_year': end_year,
'caseId': self.config['global']['experiment'],
'annual_mode': 'sdd',
'input_directory': os.path.join(self.config['global']['project_path'], 'input', 'atm'),
'climo_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'climo', 'XXYYZZ'),
'regrid_output_directory': os.path.join(self.config['global']['project_path'], 'output', 'climo_regrid'),
'regrid_map_path': self.config['ncclimo']['regrid_map_path'],
'year_set': 1,
'run_scripts_path': os.path.join(self.config['global']['project_path'], 'output', 'run_scripts')
}
ncclimo = Ncclimo(
config=config,
event_list=EventList())
self.assertEqual(ncclimo.status, JobStatus.VALID)
self.assertFalse(ncclimo.postvalidate())
if __name__ == '__main__':
unittest.main()
| 46.965385
| 136
| 0.586111
| 1,387
| 12,211
| 4.930065
| 0.119683
| 0.067271
| 0.086575
| 0.057327
| 0.831237
| 0.808424
| 0.797748
| 0.784294
| 0.778883
| 0.768353
| 0
| 0.027835
| 0.249775
| 12,211
| 259
| 137
| 47.146718
| 0.71859
| 0.005733
| 0
| 0.728507
| 0
| 0
| 0.307336
| 0.089782
| 0
| 0
| 0
| 0
| 0.063348
| 0
| null | null | 0
| 0.040724
| null | null | 0.040724
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7601183f480a507e53beec4a7a2c174bc0e2743b
| 31,667
|
py
|
Python
|
tests/python/spec/test_stl_spec_discrete_time_online_evaluation.py
|
sguysc/rtamt
|
a16db77b61028f774d81457ff22e666229a5432c
|
[
"BSD-3-Clause"
] | 24
|
2019-12-04T00:20:16.000Z
|
2022-03-24T17:48:14.000Z
|
tests/python/spec/test_stl_spec_discrete_time_online_evaluation.py
|
sguysc/rtamt
|
a16db77b61028f774d81457ff22e666229a5432c
|
[
"BSD-3-Clause"
] | 142
|
2020-01-16T15:36:21.000Z
|
2022-03-28T20:40:45.000Z
|
tests/python/spec/test_stl_spec_discrete_time_online_evaluation.py
|
sguysc/rtamt
|
a16db77b61028f774d81457ff22e666229a5432c
|
[
"BSD-3-Clause"
] | 17
|
2020-07-07T20:32:08.000Z
|
2022-03-07T07:20:22.000Z
|
import unittest
import math
import rtamt
class TestSTLSpecDiscreteTimeOnlineEvaluation(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestSTLSpecDiscreteTimeOnlineEvaluation, self).__init__(*args, **kwargs)
self.left1 = 100
self.right1 = 20
self.left2 = -1
self.right2 = -2
self.left3 = -2
self.right3 = 10
self.left4 = 5
self.right4 = 4
self.left5 = -1
self.right5 = -1
def test_constant(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('out', 'float')
spec.spec = 'out = 5'
spec.parse();
out1 = spec.update(0, [])
out2 = spec.update(1, [])
self.assertEqual(out1, 5, "input 1")
self.assertEqual(out2, 5, "input 2")
def test_constant_2(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_const('c', 'int', 5)
spec.declare_var('out', 'float')
spec.spec = 'out = c'
spec.parse();
out1 = spec.update(0, [])
out2 = spec.update(1, [])
self.assertEqual(out1, 5, "input 1")
self.assertEqual(out2, 5, "input 2")
def test_addition(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req + gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 120, "input 1")
self.assertEqual(out2, -3, "input 2")
self.assertEqual(out3, 8, "input 3")
self.assertEqual(out4, 9, "input 4")
self.assertEqual(out5, -2, "input 5")
def test_subtraction(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req - gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 80, "input 1")
self.assertEqual(out2, 1, "input 2")
self.assertEqual(out3, -12, "input 3")
self.assertEqual(out4, 1, "input 4")
self.assertEqual(out5, 0, "input 5")
def test_multiplication(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req * gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 2000, "input 1")
self.assertEqual(out2, 2, "input 2")
self.assertEqual(out3, -20, "input 3")
self.assertEqual(out4, 20, "input 4")
self.assertEqual(out5, 1, "input 5")
def test_division(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req / gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 100 / 20, "input 1")
self.assertEqual(out2, -1 / -2, "input 2")
self.assertEqual(out3, -2 / 10, "input 3")
self.assertEqual(out4, 5 / 4, "input 4")
self.assertEqual(out5, -1 / -1, "input 5")
def test_abs(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = abs(req)'
spec.parse();
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, 100, "input 1")
self.assertEqual(out2, 1, "input 2")
self.assertEqual(out3, 2, "input 3")
self.assertEqual(out4, 5, "input 4")
self.assertEqual(out5, 1, "input 5")
def test_sqrt(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = sqrt(req)'
spec.parse();
out1 = spec.update(0, [('req', 2)])
out2 = spec.update(1, [('req', 3.3)])
out3 = spec.update(2, [('req', 4.5)])
out4 = spec.update(3, [('req', 0.1)])
out5 = spec.update(4, [('req', 1)])
self.assertEqual(out1, math.sqrt(2), "input 1")
self.assertEqual(out2, math.sqrt(3.3), "input 2")
self.assertEqual(out3, math.sqrt(4.5), "input 3")
self.assertEqual(out4, math.sqrt(0.1), "input 4")
self.assertEqual(out5, math.sqrt(1), "input 5")
def test_previous(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = prev(req)'
spec.parse();
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, float("inf"), "input 1")
self.assertEqual(out2, 100, "input 2")
self.assertEqual(out3, -1, "input 3")
self.assertEqual(out4, -2, "input 4")
self.assertEqual(out5, 5, "input 5")
def test_next_without_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = next(req)'
spec.parse()
self.assertRaises(rtamt.LTLNotImplementedException, spec.update, 0, [('req', self.left1)])
def test_next_with_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = next(req)'
spec.parse()
spec.pastify()
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, 100, "input 1")
self.assertEqual(out2, -1, "input 2")
self.assertEqual(out3, -2, "input 3")
self.assertEqual(out4, 5, "input 4")
self.assertEqual(out5, -1, "input 5")
def test_and(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req and gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 20, "input 1")
self.assertEqual(out2, -2, "input 2")
self.assertEqual(out3, -2, "input 3")
self.assertEqual(out4, 4, "input 4")
self.assertEqual(out5, -1, "input 5")
def test_or(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req or gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 100, "input 1")
self.assertEqual(out2, -1, "input 2")
self.assertEqual(out3, 10, "input 3")
self.assertEqual(out4, 5, "input 4")
self.assertEqual(out5, -1, "input 5")
def test_iff(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req iff gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, -80, "input 1")
self.assertEqual(out2, -1, "input 2")
self.assertEqual(out3, -12, "input 3")
self.assertEqual(out4, -1, "input 4")
self.assertEqual(out5, 0, "input 5")
def test_xor(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req xor gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 80, "input 1")
self.assertEqual(out2, 1, "input 2")
self.assertEqual(out3, 12, "input 3")
self.assertEqual(out4, 1, "input 4")
self.assertEqual(out5, 0, "input 5")
def test_implies(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req -> gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 20, "input 1")
self.assertEqual(out2, 1, "input 2")
self.assertEqual(out3, 10, "input 3")
self.assertEqual(out4, 4, "input 4")
self.assertEqual(out5, 1, "input 5")
def test_always_without_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = always(req)'
spec.parse()
self.assertRaises(rtamt.LTLNotImplementedException, spec.update, 0, [('req', self.left1)])
def test_always_with_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = always(req)'
spec.parse()
self.assertRaises(rtamt.LTLPastifyException, spec.pastify)
def test_eventually_without_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = eventually(req)'
spec.parse()
self.assertRaises(rtamt.LTLNotImplementedException, spec.update, 0, [('req', self.left1)])
def test_eventually_with_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = eventually(req)'
spec.parse()
self.assertRaises(rtamt.LTLPastifyException, spec.pastify)
def test_historically(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = historically(req)'
spec.parse();
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, 100, "input 1")
self.assertEqual(out2, -1, "input 2")
self.assertEqual(out3, -2, "input 3")
self.assertEqual(out4, -2, "input 4")
self.assertEqual(out5, -2, "input 5")
def test_once(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = once(req)'
spec.parse();
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, 100, "input 1")
self.assertEqual(out2, 100, "input 2")
self.assertEqual(out3, 100, "input 3")
self.assertEqual(out4, 100, "input 4")
self.assertEqual(out5, 100, "input 5")
def test_since(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req since gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 20, "input 1")
self.assertEqual(out2, -1, "input 2")
self.assertEqual(out3, 10, "input 3")
self.assertEqual(out4, 5, "input 4")
self.assertEqual(out5, -1, "input 5")
def test_until_without_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification()
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req until gnt'
spec.parse()
self.assertRaises(rtamt.LTLNotImplementedException, spec.update, 0, [('req', self.left1), ('gnt', self.right1)])
def test_always_with_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification()
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req until gnt'
spec.parse()
self.assertRaises(rtamt.LTLPastifyException, spec.pastify)
def test_eventually_0_1_without_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = eventually[0,1](req)'
spec.parse()
self.assertRaises(rtamt.STLNotImplementedException, spec.update, 0, [('req', self.left1)])
def test_eventually_0_1_with_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = eventually[0,1](req)'
spec.parse()
spec.pastify()
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, 100, "input 1")
self.assertEqual(out2, 100, "input 2")
self.assertEqual(out3, -1, "input 3")
self.assertEqual(out4, 5, "input 4")
self.assertEqual(out5, 5, "input 5")
def test_once_0_1(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = once[0,1](req)'
spec.parse();
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, 100, "input 1")
self.assertEqual(out2, 100, "input 2")
self.assertEqual(out3, -1, "input 3")
self.assertEqual(out4, 5, "input 4")
self.assertEqual(out5, 5, "input 5")
def test_once_1_2(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = once[1,2](req)'
spec.parse();
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, -float("inf"), "input 1")
self.assertEqual(out2, 100, "input 2")
self.assertEqual(out3, 100, "input 3")
self.assertEqual(out4, -1, "input 4")
self.assertEqual(out5, 5, "input 5")
def test_always_0_1_without_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = always[0,1](req)'
spec.parse()
self.assertRaises(rtamt.STLNotImplementedException, spec.update, 0, [('req', self.left1)])
def test_always_0_1_with_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = always[0,1](req)'
spec.parse();
spec.pastify()
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, 100, "input 1")
self.assertEqual(out2, -1, "input 2")
self.assertEqual(out3, -2, "input 3")
self.assertEqual(out4, -2, "input 4")
self.assertEqual(out5, -1, "input 5")
def test_historically_0_1(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = historically[0,1](req)'
spec.parse();
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, 100, "input 1")
self.assertEqual(out2, -1, "input 2")
self.assertEqual(out3, -2, "input 3")
self.assertEqual(out4, -2, "input 4")
self.assertEqual(out5, -1, "input 5")
def test_historically_1_2(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = historically[1,2](req)'
spec.parse();
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, float("inf"), "input 1")
self.assertEqual(out2, 100, "input 2")
self.assertEqual(out3, -1, "input 3")
self.assertEqual(out4, -2, "input 4")
self.assertEqual(out5, -2, "input 5")
def test_until_0_1_without_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req until[0,1] gnt'
spec.parse()
self.assertRaises(rtamt.STLNotImplementedException, spec.update, 0, [('req', self.left1), ('gnt', self.right1)])
def test_until_1_2_with_pastify(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req until[1,2] gnt'
spec.parse();
spec.pastify();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 20, "input 1")
self.assertEqual(out2, 20, "input 2")
self.assertEqual(out3, -1, "input 3")
self.assertEqual(out4, -1, "input 4")
self.assertEqual(out5, -2, "input 5")
def test_since_0_1(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req since[0,1] gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 20, "input 1")
self.assertEqual(out2, -1, "input 2")
self.assertEqual(out3, 10, "input 3")
self.assertEqual(out4, 5, "input 4")
self.assertEqual(out5, -1, "input 5")
def test_not(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = not(req)'
spec.parse();
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, -100, "input 1")
self.assertEqual(out2, 1, "input 2")
self.assertEqual(out3, 2, "input 3")
self.assertEqual(out4, -5, "input 4")
self.assertEqual(out5, 1, "input 5")
def test_rise(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = rise(req)'
spec.parse();
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, 100, "input 1")
self.assertEqual(out2, -100, "input 2")
self.assertEqual(out3, -2, "input 3")
self.assertEqual(out4, 2, "input 4")
self.assertEqual(out5, -5, "input 5")
def test_fall(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = fall(req)'
spec.parse();
out1 = spec.update(0, [('req', self.left1)])
out2 = spec.update(1, [('req', self.left2)])
out3 = spec.update(2, [('req', self.left3)])
out4 = spec.update(3, [('req', self.left4)])
out5 = spec.update(4, [('req', self.left5)])
self.assertEqual(out1, -100, "input 1")
self.assertEqual(out2, 1, "input 2")
self.assertEqual(out3, -1, "input 3")
self.assertEqual(out4, -5, "input 4")
self.assertEqual(out5, 1, "input 5")
def test_predicate_leq(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req <= gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, -80, "input 1")
self.assertEqual(out2, -1, "input 2")
self.assertEqual(out3, 12, "input 3")
self.assertEqual(out4, -1, "input 4")
self.assertEqual(out5, 0, "input 5")
def test_predicate_less(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req < gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, -80, "input 1")
self.assertEqual(out2, -1, "input 2")
self.assertEqual(out3, 12, "input 3")
self.assertEqual(out4, -1, "input 4")
self.assertEqual(out5, 0, "input 5")
def test_predicate_geq(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req >= gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 80, "input 1")
self.assertEqual(out2, 1, "input 2")
self.assertEqual(out3, -12, "input 3")
self.assertEqual(out4, 1, "input 4")
self.assertEqual(out5, 0, "input 5")
def test_predicate_greater(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req > gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 80, "input 1")
self.assertEqual(out2, 1, "input 2")
self.assertEqual(out3, -12, "input 3")
self.assertEqual(out4, 1, "input 4")
self.assertEqual(out5, 0, "input 5")
def test_predicate_eq(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req == gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, -80, "input 1")
self.assertEqual(out2, -1, "input 2")
self.assertEqual(out3, -12, "input 3")
self.assertEqual(out4, -1, "input 4")
self.assertEqual(out5, 0, "input 5")
def test_predicate_neq(self):
spec = rtamt.STLDiscreteTimeSpecification();
spec.declare_var('req', 'float')
spec.declare_var('gnt', 'float')
spec.declare_var('out', 'float')
spec.spec = 'out = req !== gnt'
spec.parse();
out1 = spec.update(0, [('req', self.left1), ('gnt', self.right1)])
out2 = spec.update(1, [('req', self.left2), ('gnt', self.right2)])
out3 = spec.update(2, [('req', self.left3), ('gnt', self.right3)])
out4 = spec.update(3, [('req', self.left4), ('gnt', self.right4)])
out5 = spec.update(4, [('req', self.left5), ('gnt', self.right5)])
self.assertEqual(out1, 80, "input 1")
self.assertEqual(out2, 1, "input 2")
self.assertEqual(out3, 12, "input 3")
self.assertEqual(out4, 1, "input 4")
self.assertEqual(out5, 0, "input 5")
if __name__ == '__main__':
unittest.main()
| 37.879187
| 120
| 0.563678
| 3,900
| 31,667
| 4.518462
| 0.025897
| 0.099875
| 0.086596
| 0.069005
| 0.965441
| 0.947736
| 0.943139
| 0.931563
| 0.926115
| 0.92464
| 0
| 0.05361
| 0.246029
| 31,667
| 836
| 121
| 37.879187
| 0.684453
| 0
| 0
| 0.804545
| 0
| 0
| 0.115985
| 0.001389
| 0
| 0
| 0
| 0
| 0.271212
| 1
| 0.069697
| false
| 0
| 0.004545
| 0
| 0.075758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7616b78ee8c643a1b358ee3c436ad160c22a48c6
| 1,565
|
py
|
Python
|
system/t06_publish/show.py
|
Yelp/aptly
|
59a0c0140ba0f0f12554d57d99110511eb3e6229
|
[
"MIT"
] | 666
|
2018-04-21T19:27:02.000Z
|
2022-03-31T22:58:06.000Z
|
system/t06_publish/show.py
|
Yelp/aptly
|
59a0c0140ba0f0f12554d57d99110511eb3e6229
|
[
"MIT"
] | 460
|
2018-04-18T18:35:24.000Z
|
2022-03-31T13:39:22.000Z
|
system/t06_publish/show.py
|
Yelp/aptly
|
59a0c0140ba0f0f12554d57d99110511eb3e6229
|
[
"MIT"
] | 141
|
2018-05-31T12:13:37.000Z
|
2022-03-31T11:07:22.000Z
|
from lib import BaseTest
class PublishShow1Test(BaseTest):
"""
publish show: existing snapshot
"""
fixtureDB = True
fixturePool = True
fixtureCmds = [
"aptly snapshot create snap1 from mirror gnuplot-maverick",
"aptly publish snapshot -keyring=${files}/aptly.pub -secret-keyring=${files}/aptly.sec snap1",
]
runCmd = "aptly publish show maverick"
class PublishShow2Test(BaseTest):
"""
publish show: under prefix
"""
fixtureDB = True
fixturePool = True
fixtureCmds = [
"aptly snapshot create snap1 from mirror gnuplot-maverick",
"aptly publish snapshot -keyring=${files}/aptly.pub -secret-keyring=${files}/aptly.sec snap1 ppa/smira",
]
runCmd = "aptly publish show maverick ppa/smira"
class PublishShow3Test(BaseTest):
"""
publish show json: existing snapshot
"""
fixtureDB = True
fixturePool = True
fixtureCmds = [
"aptly snapshot create snap1 from mirror gnuplot-maverick",
"aptly publish snapshot -keyring=${files}/aptly.pub -secret-keyring=${files}/aptly.sec snap1",
]
runCmd = "aptly publish show -json maverick"
class PublishShow4Test(BaseTest):
"""
publish show json: under prefix
"""
fixtureDB = True
fixturePool = True
fixtureCmds = [
"aptly snapshot create snap1 from mirror gnuplot-maverick",
"aptly publish snapshot -keyring=${files}/aptly.pub -secret-keyring=${files}/aptly.sec snap1 ppa/smira",
]
runCmd = "aptly publish show -json maverick ppa/smira"
| 28.981481
| 112
| 0.663898
| 168
| 1,565
| 6.184524
| 0.202381
| 0.084697
| 0.130895
| 0.107796
| 0.802695
| 0.787295
| 0.764196
| 0.764196
| 0.764196
| 0.764196
| 0
| 0.009959
| 0.230032
| 1,565
| 53
| 113
| 29.528302
| 0.852282
| 0.08115
| 0
| 0.606061
| 0
| 0.121212
| 0.54321
| 0.177197
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030303
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
76292cbebd84ed742fee6d813ffe16cea3ed4ab6
| 112,112
|
py
|
Python
|
tests/dhcpv6/kea_only/config_backend/test_cb_v6_cmds_api.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 22
|
2015-02-27T11:51:05.000Z
|
2022-02-28T12:39:29.000Z
|
tests/dhcpv6/kea_only/config_backend/test_cb_v6_cmds_api.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 16
|
2018-10-30T15:00:12.000Z
|
2019-01-11T17:55:13.000Z
|
tests/dhcpv6/kea_only/config_backend/test_cb_v6_cmds_api.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 11
|
2015-02-27T11:51:36.000Z
|
2021-03-30T08:33:54.000Z
|
"""Kea database config backend commands hook testing"""
import pytest
import srv_msg
from cb_model import setup_server_for_config_backend_cmds
pytestmark = [pytest.mark.v6,
pytest.mark.kea_only,
pytest.mark.controlchannel,
pytest.mark.hook,
pytest.mark.config_backend,
pytest.mark.cb_cmds]
@pytest.fixture(autouse=True)
def run_around_tests():
setup_server_for_config_backend_cmds()
cmd = dict(command="remote-server6-set", arguments={"remote": {"type": "mysql"},
"servers": [{"server-tag": "abc"}]})
srv_msg.send_ctrl_cmd(cmd, exp_result=0)
def test_availability():
cmd = dict(command='list-commands')
response = srv_msg.send_ctrl_cmd(cmd)
for cmd in ["remote-global-parameter6-del",
"remote-global-parameter6-get",
"remote-global-parameter6-get-all",
"remote-global-parameter6-set",
"remote-network6-del",
"remote-network6-get",
"remote-network6-list",
"remote-network6-set",
"remote-option-def6-del",
"remote-option-def6-get",
"remote-option-def6-get-all",
"remote-option-def6-set",
"remote-option6-global-del",
"remote-option6-global-get",
"remote-option6-global-get-all",
"remote-option6-global-set",
"remote-subnet6-del-by-id",
"remote-subnet6-del-by-prefix",
"remote-subnet6-get-by-id",
"remote-subnet6-get-by-prefix",
"remote-subnet6-list",
"remote-subnet6-set"]:
assert cmd in response['arguments']
# subnet tests
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_subnet6_set_basic(channel):
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64",
"interface": "$(SERVER_IFACE)",
"id": 1,
"shared-network-name": "",
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
def test_remote_subnet6_set_empty_subnet():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-network-name": "",
"subnets": [{"subnet": "",
"id": 1,
"interface": "$(SERVER_IFACE)"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "subnet configuration failed: Invalid subnet syntax (prefix/len expected)" in response["text"]
def test_remote_subnet6_set_missing_subnet():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-network-name": "",
"subnets": [{"interface": "$(SERVER_IFACE)", "id": 1}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "subnet configuration failed: mandatory 'subnet' parameter " \
"is missing for a subnet being configured" in response["text"]
def test_remote_subnet6_set_stateless():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64",
"id": 1,
"shared-network-name": "",
"interface": "$(SERVER_IFACE)"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
def test_remote_subnet6_set_id():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 5,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 5, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
def test_remote_subnet6_set_duplicated_id():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 5,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 5, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:2::/64", "id": 5,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"pools": [
{"pool": "2001:db8:2::1-2001:db8:2::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 5, "subnet": "2001:db8:2::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-list", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1,
"subnets": [{"id": 5, "metadata": {"server-tags": ["abc"]},
"shared-network-name": None, "subnet": "2001:db8:2::/64"}]},
"result": 0, "text": "1 IPv6 subnet(s) found."}
def test_remote_subnet6_set_duplicated_subnet():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 5,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 5, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 1,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
def test_remote_subnet6_set_all_values():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"shared-network-name": "",
"require-client-classes": ["XYZ"],
"id": 2, "interface": "$(SERVER_IFACE)",
"pools": [{"pool": "2001:db8:1::1-2001:db8:1::10",
"option-data": [{"code": 7,
"data": "12",
"always-send": True,
"csv-format": True}]}],
"pd-pools": [{
"delegated-len": 91,
"prefix": "2001:db8:2::",
"prefix-len": 90}],
"reservation-mode": "all",
"subnet": "2001:db8:1::/64",
"valid-lifetime": 1000,
"rebind-timer": 500,
"renew-timer": 200,
"option-data": [{"code": 7,
"data": "123",
"always-send": True,
"csv-format": True}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 2, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
def test_remote_subnet6_get_all_values():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"shared-network-name": "",
"require-client-classes": ["XYZ"],
"id": 2, "interface": "$(SERVER_IFACE)",
"pools": [{"pool": "2001:db8:1::1-2001:db8:1::10",
"option-data": [{"code": 7,
"data": "12",
"always-send": True,
"csv-format": True}]}],
"pd-pools": [{
"delegated-len": 91,
"prefix": "2001:db8:2::",
"prefix-len": 90}],
"reservation-mode": "all",
"subnet": "2001:db8:1::/64",
"valid-lifetime": 1000,
"rebind-timer": 500,
"renew-timer": 200,
"option-data": [{"code": 7,
"data": "123",
"always-send": True,
"csv-format": True}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 2, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {
"count": 1,
"subnets": [{
"metadata": {"server-tags": ["abc"]},
"require-client-classes": ["XYZ"],
"shared-network-name": None,
"id": 2,
"interface": srv_msg.get_interface(),
"option-data": [{"always-send": True,
"code": 7,
"csv-format": True,
"name": "preference",
"space": "dhcp6",
"data": "123"}],
"pools": [{
"option-data": [{"code": 7,
"data": "12",
"name": "preference",
"always-send": True,
"csv-format": True,
"space": "dhcp6"}],
"pool": "2001:db8:1::1-2001:db8:1::10"}],
"pd-pools": [{
"option-data": [],
"delegated-len": 91,
"prefix": "2001:db8:2::",
"prefix-len": 90}],
"reservations-global": False,
"reservations-in-subnet": True,
"reservations-out-of-pool": False,
"subnet": "2001:db8:1::/64",
"rebind-timer": 500,
"renew-timer": 200,
"relay": {"ip-addresses": []},
"valid-lifetime": 1000}]}, "result": 0, "text": "IPv6 subnet 2001:db8:1::/64 found."}
# reservation-mode is integer in db, so we need to check if it's converted correctly
def test_remote_subnet6_set_reservation_mode_all_old():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 1,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"reservation-mode": "all",
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response["arguments"]["subnets"][0]["reservations-global"] is False
assert response["arguments"]["subnets"][0]["reservations-in-subnet"] is True
assert response["arguments"]["subnets"][0]["reservations-out-of-pool"] is False
def test_remote_subnet6_set_reservation_mode_all_new():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 1,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"reservations-global": False,
"reservations-in-subnet": True,
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd)
subnet = response["arguments"]["subnets"][0]
assert subnet["reservations-global"] is False
assert subnet["reservations-in-subnet"] is True
if "reservations-out-of-pool" in subnet:
assert subnet["reservations-out-of-pool"] is False
def test_remote_subnet6_set_reservation_mode_global_old():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 1,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"reservation-mode": "global",
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd)
subnet = response["arguments"]["subnets"][0]
assert subnet["reservations-global"] is True
assert subnet["reservations-in-subnet"] is False
if "reservations-out-of-pool" in subnet:
assert subnet["reservations-out-of-pool"] is False
def test_remote_subnet6_set_reservation_mode_global_new():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 1,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"reservations-global": True,
"reservations-in-subnet": False,
"reservations-out-of-pool": False,
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd)
subnet = response["arguments"]["subnets"][0]
assert subnet["reservations-global"] is True
assert subnet["reservations-in-subnet"] is False
if "reservations-out-of-pool" in subnet:
assert subnet["reservations-out-of-pool"] is False
def test_remote_subnet6_set_reservation_mode_out_pool_old():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 1,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"reservation-mode": "out-of-pool",
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd)
subnet = response["arguments"]["subnets"][0]
assert subnet["reservations-global"] is False
assert subnet["reservations-in-subnet"] is True
assert subnet["reservations-out-of-pool"] is True
def test_remote_subnet6_set_reservation_mode_out_pool_new():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 1,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"reservations-global": False,
"reservations-in-subnet": True,
"reservations-out-of-pool": True,
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd)
subnet = response["arguments"]["subnets"][0]
assert subnet["reservations-global"] is False
assert subnet["reservations-in-subnet"] is True
assert subnet["reservations-out-of-pool"] is True
def test_remote_subnet6_set_reservation_mode_disabled_old():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 1,
"shared-network-name": "",
"interface": "$(SERVER_IFACE)",
"reservation-mode": "disabled"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd)
subnet = response["arguments"]["subnets"][0]
assert subnet["reservations-global"] is False
assert subnet["reservations-in-subnet"] is False
if "reservations-out-of-pool" in subnet:
assert subnet["reservations-out-of-pool"] is False
def test_remote_subnet6_set_reservation_mode_disabled_new():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 1,
"shared-network-name": "",
"interface": "$(SERVER_IFACE)",
"reservations-global": False,
"reservations-in-subnet": False,
"reservations-out-of-pool": False}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd)
subnet = response["arguments"]["subnets"][0]
assert subnet["reservations-global"] is False
assert subnet["reservations-in-subnet"] is False
if "reservations-out-of-pool" in subnet:
assert subnet["reservations-out-of-pool"] is False
def _subnet_set():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 5,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 5, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
def test_remote_subnet6_del_by_id():
_subnet_set()
cmd = dict(command="remote-subnet6-del-by-id", arguments={"remote": {"type": "mysql"},
"subnets": [{"id": 5}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 IPv6 subnet(s) deleted."}
def test_remote_subnet6_del_by_id_incorrect_id():
_subnet_set()
cmd = dict(command="remote-subnet6-del-by-id", arguments={"remote": {"type": "mysql"},
"subnets": [{"id": 15}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0}, "result": 3, "text": "0 IPv6 subnet(s) deleted."}
def test_remote_subnet6_del_id_negative_missing_subnet():
_subnet_set()
cmd = dict(command="remote-subnet6-del-by-id", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "missing 'id' parameter"}
def test_remote_subnet6_del_by_prefix():
_subnet_set()
cmd = dict(command="remote-subnet6-del-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 IPv6 subnet(s) deleted."}
def test_remote_subnet6_del_by_prefix_non_existing_subnet():
_subnet_set()
cmd = dict(command="remote-subnet6-del-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:2::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0}, "result": 3, "text": "0 IPv6 subnet(s) deleted."}
def test_remote_subnet6_del_by_prefix_missing_subnet_():
_subnet_set()
cmd = dict(command="remote-subnet6-del-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"id": 2}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "missing 'subnet' parameter"}
def test_remote_subnet6_get_by_id():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"shared-network-name": "",
"id": 2, "interface": "$(SERVER_IFACE)",
"pools": [{"pool": "2001:db8:1::1-2001:db8:1::10",
"option-data": [{"code": 7,
"data": "123",
"always-send": True,
"csv-format": True}]}],
"reservation-mode": "global",
"subnet": "2001:db8:1::/64",
"valid-lifetime": 1000,
"rebind-timer": 500,
"renew-timer": 200,
"option-data": [{"code": 7,
"data": "12",
"always-send": True,
"csv-format": True}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 2, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-get-by-id", arguments={"remote": {"type": "mysql"},
"subnets": [{"id": 2}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1,
"subnets": [{"metadata": {"server-tags": ["abc"]},
"shared-network-name": None,
"id": 2, "interface": srv_msg.get_interface(),
"option-data": [{"always-send": True, "code": 7, "csv-format": True,
"data": "12", "name": "preference",
"space": "dhcp6"}],
"pools": [{"option-data": [{"always-send": True, "code": 7,
"csv-format": True, "data": "123",
"name": "preference",
"space": "dhcp6"}],
"pool": "2001:db8:1::1-2001:db8:1::10"}],
"rebind-timer": 500, "renew-timer": 200,
"reservations-global": True,
"reservations-in-subnet": False,
"pd-pools": [],
"relay": {"ip-addresses": []},
"subnet": "2001:db8:1::/64", "valid-lifetime": 1000}]},
"result": 0, "text": "IPv6 subnet 2 found."}
def test_remote_subnet6_get_by_id_incorrect_id():
_subnet_set()
cmd = dict(command="remote-subnet6-get-by-id", arguments={"remote": {"type": "mysql"},
"subnets": [{"id": 3}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0, "subnets": []},
"result": 3, "text": "IPv6 subnet 3 not found."}
def test_remote_subnet6_get_by_id_missing_id():
_subnet_set()
cmd = dict(command="remote-subnet6-get-by-id", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": 3}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1,
"text": "missing 'id' parameter"}
def test_remote_subnet6_get_by_prefix():
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"shared-network-name": "",
"interface": "$(SERVER_IFACE)",
"pools": [
{"pool": "2001:db8:1::1-2001:db8:1::10"}],
"reservation-mode": "all",
"require-client-classes": ["XYZ"],
"subnet": "2001:db8:1::/64", "id": 1,
"valid-lifetime": 1000}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:1::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {
"count": 1,
"subnets": [{
"metadata": {"server-tags": ["abc"]},
"require-client-classes": ["XYZ"],
"shared-network-name": None,
"id": 1,
"interface": srv_msg.get_interface(),
"option-data": [],
"pools": [{
"option-data": [],
"pool": "2001:db8:1::1-2001:db8:1::10"}],
"reservations-global": False,
"reservations-in-subnet": True,
"reservations-out-of-pool": False,
"pd-pools": [],
"relay": {"ip-addresses": []},
"subnet": "2001:db8:1::/64",
"valid-lifetime": 1000}]}, "result": 0, "text": "IPv6 subnet 2001:db8:1::/64 found."}
def test_remote_subnet6_get_by_prefix_negative():
_subnet_set()
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "2001:db8:2::/63"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0, "subnets": []},
"result": 3, "text": "IPv6 subnet 2001:db8:2::/63 not found."}
def test_remote_subnet6_get_by_prefix_incorrect_prefix():
_subnet_set()
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"subnet": "::/64"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1,
"text": "unable to parse invalid IPv6 prefix ::/64"}
def test_remote_subnet6_get_by_prefix_missing_prefix():
_subnet_set()
cmd = dict(command="remote-subnet6-get-by-prefix", arguments={"remote": {"type": "mysql"},
"subnets": [{"id": "2001:db8:2::/63"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1,
"text": "missing 'subnet' parameter"}
def test_remote_subnet6_list():
_subnet_set()
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:2::/64", "id": 3,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"pools": [
{"pool": "2001:db8:2::1-2001:db8:2::10"}]}]})
srv_msg.send_ctrl_cmd(cmd)
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:3::/64", "id": 1,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "",
"pools": [
{"pool": "2001:db8:3::1-2001:db8:3::10"}]}]})
srv_msg.send_ctrl_cmd(cmd)
cmd = dict(command="remote-subnet6-list", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 3, "subnets": [{"id": 1,
"metadata": {"server-tags": ["abc"]},
"shared-network-name": None,
"subnet": "2001:db8:3::/64"},
{"id": 3,
"metadata": {"server-tags": ["abc"]},
"shared-network-name": None,
"subnet": "2001:db8:2::/64"},
{"id": 5,
"metadata": {"server-tags": ["abc"]},
"shared-network-name": None,
"subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "3 IPv6 subnet(s) found."}
# network tests
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_network6_set_basic(channel):
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{"name": "floor13"}]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"shared-networks": [{"name": "floor13"}]},
"result": 0, "text": "IPv6 shared network successfully set."}
def test_remote_network6_set_missing_name():
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "missing parameter 'name'" in response["text"]
def test_remote_network6_set_empty_name():
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": ""}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "'name' parameter must not be empty"}
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_network6_get_basic(channel):
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net1",
"interface": "$(SERVER_IFACE)"}]})
srv_msg.send_ctrl_cmd(cmd, channel=channel)
cmd = dict(command="remote-network6-get", arguments={"remote": {"type": "mysql"},
"shared-networks": [{
"name": "net1"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1,
"shared-networks": [{"interface": srv_msg.get_interface(), "name": "net1",
"metadata": {"server-tags": ["abc"]},
"option-data": [], "relay": {"ip-addresses": []}}]},
"result": 0, "text": "IPv6 shared network 'net1' found."}
def test_remote_network6_get_all_values():
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net1",
"client-class": "abc",
"require-client-classes": ["XYZ"],
"rebind-timer": 200,
"renew-timer": 100,
"calculate-tee-times": True,
"t1-percent": 0.5,
"t2-percent": 0.8,
"rapid-commit": True,
"valid-lifetime": 300,
"reservation-mode": "global",
"user-context": {"some weird network": 55},
"interface": "$(SERVER_IFACE)",
"option-data": [{"code": 7,
"data": "123",
"always-send": True,
"csv-format": True}]}]})
srv_msg.send_ctrl_cmd(cmd)
cmd = dict(command="remote-network6-get", arguments={"remote": {"type": "mysql"},
"shared-networks": [{
"name": "net1"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1,
"shared-networks": [{"client-class": "abc",
"rebind-timer": 200, "renew-timer": 100,
"valid-lifetime": 300,
"reservations-global": True,
"reservations-in-subnet": False,
"interface": srv_msg.get_interface(),
"metadata": {"server-tags": ["abc"]},
"require-client-classes": ["XYZ"],
"calculate-tee-times": True,
"t1-percent": 0.5,
"t2-percent": 0.8,
"rapid-commit": True,
"name": "net1",
"option-data": [{"always-send": True, "code": 7,
"csv-format": True, "data": "123",
"name": "preference",
"space": "dhcp6"}],
"relay": {"ip-addresses": []},
"user-context": {"some weird network": 55}}]},
"result": 0, "text": "IPv6 shared network 'net1' found."}
def test_remote_network6_set_t1_t2():
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net1",
"calculate-tee-times": True,
"t1-percent": 0.5,
"t2-percent": 10,
"interface": "$(SERVER_IFACE)"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "'t2-percent' parameter is not a real" in response["text"]
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net1",
"calculate-tee-times": True,
"t1-percent": 10,
"t2-percent": 0.5,
"interface": "$(SERVER_IFACE)"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "'t1-percent' parameter is not a real" in response["text"]
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net1",
"calculate-tee-times": True,
"t1-percent": 0.5,
"t2-percent": 0.1,
"interface": "$(SERVER_IFACE)"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "t1-percent: 0.5 is invalid, it must be less than t2-percent: 0.1" in response["text"]
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_network6_list_basic(channel):
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net1",
"interface": "$(SERVER_IFACE)"}]})
srv_msg.send_ctrl_cmd(cmd, channel=channel)
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net2",
"interface": "$(SERVER_IFACE)"}]})
srv_msg.send_ctrl_cmd(cmd, channel=channel)
cmd = dict(command="remote-network6-list", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 2, "shared-networks": [{"metadata": {"server-tags": ["abc"]},
"name": "net1"},
{"metadata": {"server-tags": ["abc"]},
"name": "net2"}]},
"result": 0,
"text": "2 IPv6 shared network(s) found."}
def test_remote_network6_list_no_networks():
cmd = dict(command="remote-network6-list", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0,
"shared-networks": []},
"result": 3,
"text": "0 IPv6 shared network(s) found."}
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_network6_del_basic(channel):
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net1",
"interface": "$(SERVER_IFACE)"}]})
srv_msg.send_ctrl_cmd(cmd, channel=channel)
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net2",
"interface": "$(SERVER_IFACE)"}]})
srv_msg.send_ctrl_cmd(cmd, channel=channel)
cmd = dict(command="remote-network6-list", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"count": 2,
"shared-networks": [{"metadata": {"server-tags": ["abc"]}, "name": "net1"},
{"metadata": {"server-tags": ["abc"]}, "name": "net2"}]},
"result": 0,
"text": "2 IPv6 shared network(s) found."}
cmd = dict(command="remote-network6-del", arguments={"remote": {"type": "mysql"},
"shared-networks": [{"name": "net1"}]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 IPv6 shared network(s) deleted."}
cmd = dict(command="remote-network6-list", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"count": 1,
"shared-networks": [{"metadata": {"server-tags": ["abc"]}, "name": "net2"}]},
"result": 0, "text": "1 IPv6 shared network(s) found."}
cmd = dict(command="remote-network6-del", arguments={"remote": {"type": "mysql"},
"shared-networks": [{"name": "net2"}]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 IPv6 shared network(s) deleted."}
cmd = dict(command="remote-network6-list", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel, exp_result=3)
assert response == {"arguments": {"count": 0,
"shared-networks": []},
"result": 3,
"text": "0 IPv6 shared network(s) found."}
def test_remote_network6_del_subnet_keep():
# add networks
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net1",
"interface": "$(SERVER_IFACE)"}]})
srv_msg.send_ctrl_cmd(cmd)
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net2",
"interface": "$(SERVER_IFACE)"}]})
srv_msg.send_ctrl_cmd(cmd)
cmd = dict(command="remote-network6-list", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 2,
"shared-networks": [{"metadata": {"server-tags": ["abc"]},
"name": "net1"},
{"metadata": {"server-tags": ["abc"]},
"name": "net2"}]},
"result": 0,
"text": "2 IPv6 shared network(s) found."}
# add subnets to networks
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 1,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "net1",
"pools": [{
"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:2::/64", "id": 2,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "net2",
"pools": [{
"pool": "2001:db8:2::1-2001:db8:2::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 2, "subnet": "2001:db8:2::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
# we want to have 2 subnets
cmd = dict(command="remote-subnet6-list", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 2, "subnets": [{"id": 1, "subnet": "2001:db8:1::/64",
"shared-network-name": "net1",
"metadata": {"server-tags": ["abc"]}},
{"id": 2, "subnet": "2001:db8:2::/64",
"shared-network-name": "net2",
"metadata": {"server-tags": ["abc"]}}]},
"result": 0, "text": "2 IPv6 subnet(s) found."}
cmd = dict(command="remote-network6-del", arguments={"remote": {"type": "mysql"}, "subnets-action": "keep",
"shared-networks": [{"name": "net1"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 IPv6 shared network(s) deleted."}
cmd = dict(command="remote-network6-list", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1,
"shared-networks": [{"metadata": {"server-tags": ["abc"]}, "name": "net2"}]},
"result": 0, "text": "1 IPv6 shared network(s) found."}
# after deleting network we still want to have 2 subnets
cmd = dict(command="remote-subnet6-list", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 2,
"subnets": [{"id": 1, "metadata": {"server-tags": ["abc"]},
"shared-network-name": None, "subnet": "2001:db8:1::/64"},
{"id": 2, "metadata": {"server-tags": ["abc"]},
"shared-network-name": "net2", "subnet": "2001:db8:2::/64"}]},
"result": 0, "text": "2 IPv6 subnet(s) found."}
cmd = dict(command="remote-network6-del", arguments={"remote": {"type": "mysql"}, "subnets-action": "keep",
"shared-networks": [{"name": "net2"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 IPv6 shared network(s) deleted."}
cmd = dict(command="remote-network6-list", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0,
"shared-networks": []},
"result": 3,
"text": "0 IPv6 shared network(s) found."}
# after removing all networks we still want to have both subnets
cmd = dict(command="remote-subnet6-list", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 2,
"subnets": [{"id": 1, "metadata": {"server-tags": ["abc"]},
"shared-network-name": None, "subnet": "2001:db8:1::/64"},
{"id": 2, "metadata": {"server-tags": ["abc"]},
"shared-network-name": None, "subnet": "2001:db8:2::/64"}]},
"result": 0, "text": "2 IPv6 subnet(s) found."}
def test_remote_network6_del_subnet_delete():
# add networks
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net1",
"interface": "$(SERVER_IFACE)"}]})
srv_msg.send_ctrl_cmd(cmd)
cmd = dict(command="remote-network6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"shared-networks": [{
"name": "net2",
"interface": "$(SERVER_IFACE)"}]})
srv_msg.send_ctrl_cmd(cmd)
cmd = dict(command="remote-network6-list", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 2,
"shared-networks": [{"metadata": {"server-tags": ["abc"]},
"name": "net1"},
{"metadata": {"server-tags": ["abc"]},
"name": "net2"}]},
"result": 0,
"text": "2 IPv6 shared network(s) found."}
# add subnets to networks
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:1::/64", "id": 1,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "net1",
"pools": [{
"pool": "2001:db8:1::1-2001:db8:1::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 1, "subnet": "2001:db8:1::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
cmd = dict(command="remote-subnet6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"subnets": [{"subnet": "2001:db8:2::/64", "id": 2,
"interface": "$(SERVER_IFACE)",
"shared-network-name": "net2",
"pools": [{
"pool": "2001:db8:2::1-2001:db8:2::10"}]}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"subnets": [{"id": 2, "subnet": "2001:db8:2::/64"}]},
"result": 0, "text": "IPv6 subnet successfully set."}
# we want to have 2 subnets
cmd = dict(command="remote-subnet6-list", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 2, "subnets": [{"id": 1, "subnet": "2001:db8:1::/64",
"shared-network-name": "net1",
"metadata": {"server-tags": ["abc"]}},
{"id": 2, "subnet": "2001:db8:2::/64",
"shared-network-name": "net2",
"metadata": {"server-tags": ["abc"]}}]},
"result": 0, "text": "2 IPv6 subnet(s) found."}
cmd = dict(command="remote-network6-del", arguments={"remote": {"type": "mysql"}, "subnets-action": "delete",
"shared-networks": [{"name": "net1"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 IPv6 shared network(s) deleted."}
cmd = dict(command="remote-network6-list", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1,
"shared-networks": [{"metadata": {"server-tags": ["abc"]}, "name": "net2"}]},
"result": 0, "text": "1 IPv6 shared network(s) found."}
# after deleting network we still want to have 2 subnets
cmd = dict(command="remote-subnet6-list", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1,
"subnets": [{"id": 2, "metadata": {"server-tags": ["abc"]},
"shared-network-name": "net2", "subnet": "2001:db8:2::/64"}]},
"result": 0, "text": "1 IPv6 subnet(s) found."}
cmd = dict(command="remote-network6-del", arguments={"remote": {"type": "mysql"}, "subnets-action": "delete",
"shared-networks": [{"name": "net2"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 IPv6 shared network(s) deleted."}
cmd = dict(command="remote-network6-list", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0,
"shared-networks": []},
"result": 3,
"text": "0 IPv6 shared network(s) found."}
# all subnets should be removed now
cmd = dict(command="remote-subnet6-list", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0, "subnets": []},
"result": 3, "text": "0 IPv6 subnet(s) found."}
def _set_global_parameter():
cmd = dict(command="remote-global-parameter6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"parameters": {
"decline-probation-period": 123456}})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1, "parameters": {"decline-probation-period": 123456}},
"result": 0,
"text": "1 DHCPv6 global parameter(s) successfully set."}
# global-parameter tests
def test_remote_global_parameter6_set_integer():
cmd = dict(command="remote-global-parameter6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"parameters": {"valid-lifetime": 1000}})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1, "parameters": {"valid-lifetime": 1000}},
"result": 0,
"text": "1 DHCPv6 global parameter(s) successfully set."}
def test_remote_global_parameter6_set_incorrect_parameter():
cmd = dict(command="remote-global-parameter6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"parameters": {"decline-aaa-period": 1234556}})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "unknown parameter 'decline-aaa-period'"}
def test_remote_global_parameter6_del():
_set_global_parameter()
cmd = dict(command="remote-global-parameter6-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"parameters": ["decline-probation-period"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1},
"result": 0, "text": "1 DHCPv6 global parameter(s) deleted."}
def test_remote_global_parameter6_del_not_existing_parameter():
cmd = dict(command="remote-global-parameter6-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"parameters": ["decline-probation-period"]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0},
"result": 3, "text": "0 DHCPv6 global parameter(s) deleted."}
def test_remote_global_parameter6_get():
_set_global_parameter()
cmd = dict(command="remote-global-parameter6-get", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"parameters": ["decline-probation-period"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1,
"parameters": {"decline-probation-period": 123456,
"metadata": {"server-tags": ["abc"]}}},
"result": 0, "text": "'decline-probation-period' DHCPv6 global parameter found."}
def test_remote_global_parameter6_get_all_one():
_set_global_parameter()
cmd = dict(command="remote-global-parameter6-get-all", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1, "parameters": [{"decline-probation-period": 123456,
"metadata": {"server-tags": ["abc"]}}]},
"result": 0, "text": "1 DHCPv6 global parameter(s) found."}
def test_remote_global_parameter6_get_all_multiple():
_set_global_parameter()
cmd = dict(command="remote-global-parameter6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"parameters": {"calculate-tee-times": True}})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1, "parameters": {"calculate-tee-times": True}},
"result": 0,
"text": "1 DHCPv6 global parameter(s) successfully set."}
cmd = dict(command="remote-global-parameter6-get-all", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response["result"] == 0
assert response["text"] == "2 DHCPv6 global parameter(s) found."
assert response["arguments"]["count"] == 2
assert {"calculate-tee-times": True, "metadata": {"server-tags": ["abc"]}} in response["arguments"]["parameters"]
assert {"decline-probation-period": 123456,
"metadata": {"server-tags": ["abc"]}} in response["arguments"]["parameters"]
def test_remote_global_parameter6_get_all_zero():
cmd = dict(command="remote-global-parameter6-get-all", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0, "parameters": []},
"result": 3, "text": "0 DHCPv6 global parameter(s) found."}
def _set_option_def(channel='http'):
cmd = dict(command="remote-option-def6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"name": "foo",
"code": 222,
"type": "uint32"}]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"option-defs": [{"code": 222, "space": "dhcp6"}]},
"result": 0, "text": "DHCPv6 option definition successfully set."}
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_option_def6_set_basic(channel):
_set_option_def(channel)
def test_remote_option_def6_set_using_zero_as_code():
cmd = dict(command="remote-option-def6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"name": "foo",
"code": 0,
"type": "uint32"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "invalid option code '0': reserved value" in response["text"]
def test_remote_option_def6_set_using_standard_code():
cmd = dict(command="remote-option-def6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"name": "foo",
"code": 24,
"type": "uint32"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "an option with code 24 already exists in space 'dhcp6'"}
def test_remote_option_def6_set_missing_parameters():
cmd = dict(command="remote-option-def6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"code": 222,
"type": "uint32",
"array": False,
"record-types": "",
"space": "dhcp6",
"encapsulate": ""}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "missing parameter 'name'" in response["text"]
cmd = dict(command="remote-option-def6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"name": "aa",
"type": "uint32",
"array": False,
"record-types": "",
"space": "dhcp6",
"encapsulate": ""}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "missing parameter 'code'" in response["text"]
cmd = dict(command="remote-option-def6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"name": "aa",
"code": 234,
"array": False,
"record-types": "",
"space": "dhcp6",
"encapsulate": ""}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "missing parameter 'type'" in response["text"]
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_option_def6_get_basic(channel):
_set_option_def()
cmd = dict(command="remote-option-def6-get", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"code": 222}]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"count": 1, "option-defs": [{"array": False, "code": 222, "encapsulate": "",
"name": "foo", "record-types": "", "space": "dhcp6",
"metadata": {"server-tags": ["abc"]},
"type": "uint32"}]},
"result": 0, "text": "DHCPv6 option definition 222 in 'dhcp6' found."}
def test_remote_option_def6_get_multiple_defs():
_set_option_def()
cmd = dict(command="remote-option-def6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"name": "foo",
"code": 222,
"type": "uint32",
"space": "abc"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"option-defs": [{"code": 222, "space": "abc"}]},
"result": 0, "text": "DHCPv6 option definition successfully set."}
cmd = dict(command="remote-option-def6-get", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"code": 222,
"space": "abc"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1, "option-defs": [{"array": False, "code": 222, "encapsulate": "",
"name": "foo", "record-types": "", "space": "abc",
"metadata": {"server-tags": ["abc"]},
"type": "uint32"}]},
"result": 0, "text": "DHCPv6 option definition 222 in 'abc' found."}
def test_remote_option_def6_get_missing_code():
cmd = dict(command="remote-option-def6-get", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"name": "foo"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "missing 'code' parameter"}
def test_remote_option_def6_get_all_option_not_defined():
cmd = dict(command="remote-option-def6-get-all", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0, "option-defs": []},
"result": 3, "text": "0 DHCPv6 option definition(s) found."}
def test_remote_option_def6_get_all_multiple_defs():
_set_option_def()
cmd = dict(command="remote-option-def6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"name": "foo",
"code": 222,
"type": "uint32",
"space": "abc"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"option-defs": [{"code": 222, "space": "abc"}]},
"result": 0, "text": "DHCPv6 option definition successfully set."}
cmd = dict(command="remote-option-def6-get-all", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 2, "option-defs": [{"array": False, "code": 222,
"encapsulate": "", "name": "foo",
"record-types": "", "space": "abc",
"metadata": {"server-tags": ["abc"]},
"type": "uint32"},
{"array": False, "code": 222,
"encapsulate": "", "name": "foo",
"record-types": "", "space": "dhcp6",
"metadata": {"server-tags": ["abc"]},
"type": "uint32"}]},
"result": 0, "text": "2 DHCPv6 option definition(s) found."}
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_option_def6_get_all_basic(channel):
_set_option_def()
cmd = dict(command="remote-option-def6-get-all", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"count": 1, "option-defs": [{"array": False, "code": 222, "encapsulate": "",
"metadata": {"server-tags": ["abc"]},
"name": "foo", "record-types": "", "space": "dhcp6",
"type": "uint32"}]},
"result": 0, "text": "1 DHCPv6 option definition(s) found."}
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_option_def6_del_basic(channel):
_set_option_def()
cmd = dict(command="remote-option-def6-del", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"],
"option-defs": [{"code": 222}]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 DHCPv6 option definition(s) deleted."}
def test_remote_option_def6_del_different_space():
_set_option_def()
cmd = dict(command="remote-option-def6-del", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"],
"option-defs": [{"code": 222, "space": "abc"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0}, "result": 3, "text": "0 DHCPv6 option definition(s) deleted."}
def test_remote_option_def6_del_incorrect_code():
cmd = dict(command="remote-option-def6-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"], "option-defs": [{"name": 22}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "missing 'code' parameter"}
cmd = dict(command="remote-option-def6-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"], "option-defs": [{}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "missing 'code' parameter"}
cmd = dict(command="remote-option-def6-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{"code": "abc"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "'code' parameter is not an integer"}
def test_remote_option_def6_del_missing_option():
cmd = dict(command="remote-option-def6-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{"code": 212}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0}, "result": 3, "text": "0 DHCPv6 option definition(s) deleted."}
def test_remote_option_def6_del_multiple_options():
_set_option_def()
cmd = dict(command="remote-option-def6-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{
"name": "foo",
"code": 222,
"type": "uint32",
"space": "abc"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"option-defs": [{"code": 222, "space": "abc"}]},
"result": 0, "text": "DHCPv6 option definition successfully set."}
cmd = dict(command="remote-option-def6-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"option-defs": [{"code": 222}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 DHCPv6 option definition(s) deleted."}
cmd = dict(command="remote-option-def6-get-all", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1, "option-defs": [{"array": False, "code": 222, "encapsulate": "",
"metadata": {"server-tags": ["abc"]},
"name": "foo", "record-types": "", "space": "abc",
"type": "uint32"}]},
"result": 0, "text": "1 DHCPv6 option definition(s) found."}
def _set_global_option(channel='http'):
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{
"code": 7,
"data": "123"}]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"result": 0, "text": "DHCPv6 option successfully set.",
"arguments": {"options": [{"code": 7, "space": "dhcp6"}]}}
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_global_option6_global_set_basic(channel):
_set_global_option(channel)
def test_remote_global_option6_global_set_missing_data():
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{
"code": 7}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "no option value specified" in response["text"]
def test_remote_global_option6_global_set_name():
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{
"name": "sip-server-dns",
"data": "isc.example.com"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"options": [{"code": 21, "space": "dhcp6"}]},
"result": 0, "text": "DHCPv6 option successfully set."}
def test_remote_global_option6_global_set_incorrect_code_missing_name():
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{
"code": "aaa"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "'code' parameter is not an integer" in response["text"]
def test_remote_global_option6_global_set_incorrect_name_missing_code():
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{
"name": 123}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "'name' parameter is not a string" in response["text"]
def test_remote_global_option6_global_set_missing_code_and_name():
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "option data configuration requires one of 'code' or 'name' parameters to be specified" in response["text"]
def test_remote_global_option6_global_set_incorrect_code():
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": "aa",
"name": "cc"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "'code' parameter is not an integer" in response["text"]
def test_remote_global_option6_global_set_incorrect_name():
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 7,
"name": 7,
"data": "123"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "'name' parameter is not a string" in response["text"]
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_global_option6_global_get_basic(channel):
_set_global_option()
cmd = dict(command="remote-option6-global-get", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 7}]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"count": 1, "options": [{"always-send": False, "code": 7, "csv-format": True,
"data": "123",
"metadata": {"server-tags": ["abc"]},
"name": "preference", "space": "dhcp6"}]},
"result": 0, "text": "DHCPv6 option 7 in 'dhcp6' found."}
def test_remote_global_option6_global_set_different_space():
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 7,
"data": "123",
"always-send": True,
"csv-format": True,
"space": "xyz"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "definition for the option 'xyz.' having code '7' does not exist" in response["text"]
def test_remote_global_option6_global_set_csv_false_incorrect():
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 7,
"data": "12Z3",
"always-send": True,
"csv-format": False}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "option data is not a valid string of hexadecimal digits: 12Z3" in response["text"]
def test_remote_global_option6_global_set_csv_false_incorrect_hex():
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 7,
"data": "C0000201Z",
"always-send": True,
"csv-format": False}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert "option data is not a valid string of hexadecimal digits: C0000201Z" in response["text"]
@pytest.mark.parametrize('channel', ['socket', 'http'])
def test_remote_global_option6_global_del_basic(channel):
_set_global_option()
cmd = dict(command="remote-option6-global-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 7}]})
response = srv_msg.send_ctrl_cmd(cmd, channel=channel)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 DHCPv6 option(s) deleted."}
def test_remote_global_option6_global_del_missing_code():
cmd = dict(command="remote-option6-global-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"ab": 7}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "missing 'code' parameter"}
def test_remote_global_option6_global_del_incorrect_code():
cmd = dict(command="remote-option6-global-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": "7"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "'code' parameter is not an integer"}
def test_remote_global_option6_global_del_missing_option():
cmd = dict(command="remote-option6-global-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 7}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0}, "result": 3, "text": "0 DHCPv6 option(s) deleted."}
def test_remote_global_option6_global_get_missing_code():
cmd = dict(command="remote-option6-global-get", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"ab": 6}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "missing 'code' parameter"}
def test_remote_global_option6_global_get_incorrect_code():
cmd = dict(command="remote-option6-global-get", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": "7"}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=1)
assert response == {"result": 1, "text": "'code' parameter is not an integer"}
def test_remote_global_option6_global_get_missing_option():
cmd = dict(command="remote-option6-global-get", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 6}]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0, "options": []},
"result": 3, "text": "DHCPv6 option 6 in 'dhcp6' not found."}
def test_remote_global_option6_global_get_csv_false():
cmd = dict(command="remote-option6-global-set",
arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 22,
# in data: 1 IPv6 address encoded as 16 octets
"data": "C0000301C00003020a0b0c0d0e0f0807",
"always-send": True,
"csv-format": False}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"result": 0, "text": "DHCPv6 option successfully set.",
"arguments": {"options": [{"code": 22, "space": "dhcp6"}]}}
cmd = dict(command="remote-option6-global-get",
arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 22}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1, "options": [{"always-send": True, "code": 22, "csv-format": False,
"data": "C0000301C00003020A0B0C0D0E0F0807",
"metadata": {"server-tags": ["abc"]},
"name": "sip-server-addr", "space": "dhcp6"}]},
"result": 0, "text": "DHCPv6 option 22 in 'dhcp6' found."}
def test_remote_global_option6_global_get_all():
_set_global_option()
cmd = dict(command="remote-option6-global-set", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{
"code": 22,
"data": "2001:db8::2"}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"result": 0, "text": "DHCPv6 option successfully set.",
"arguments": {"options": [{"code": 22, "space": "dhcp6"}]}}
cmd = dict(command="remote-option6-global-get-all", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 2,
"options": [{"always-send": False, "code": 7, "csv-format": True,
"metadata": {"server-tags": ["abc"]},
"data": "123", "name": "preference",
"space": "dhcp6"},
{"always-send": False, "code": 22, "csv-format": True,
"metadata": {"server-tags": ["abc"]},
"data": "2001:db8::2", "name": "sip-server-addr",
"space": "dhcp6"}]},
"result": 0, "text": "2 DHCPv6 option(s) found."}
cmd = dict(command="remote-option6-global-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 7}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 DHCPv6 option(s) deleted."}
cmd = dict(command="remote-option6-global-get-all", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1, "options": [{"always-send": False, "code": 22, "csv-format": True,
"data": "2001:db8::2", "name": "sip-server-addr",
"metadata": {"server-tags": ["abc"]},
"space": "dhcp6"}]},
"result": 0, "text": "1 DHCPv6 option(s) found."}
cmd = dict(command="remote-option6-global-del", arguments={"remote": {"type": "mysql"},
"server-tags": ["abc"],
"options": [{"code": 22}]})
response = srv_msg.send_ctrl_cmd(cmd)
assert response == {"arguments": {"count": 1}, "result": 0, "text": "1 DHCPv6 option(s) deleted."}
cmd = dict(command="remote-option6-global-get-all", arguments={"remote": {"type": "mysql"}, "server-tags": ["abc"]})
response = srv_msg.send_ctrl_cmd(cmd, exp_result=3)
assert response == {"arguments": {"count": 0, "options": []}, "result": 3, "text": "0 DHCPv6 option(s) found."}
| 60.372644
| 120
| 0.388817
| 8,888
| 112,112
| 4.769914
| 0.031728
| 0.039391
| 0.051209
| 0.051185
| 0.942588
| 0.926572
| 0.908739
| 0.891638
| 0.866423
| 0.855337
| 0
| 0.039864
| 0.468139
| 112,112
| 1,856
| 121
| 60.405172
| 0.671128
| 0.005004
| 0
| 0.782096
| 0
| 0.000694
| 0.25141
| 0.039962
| 0
| 0
| 0
| 0
| 0.112422
| 1
| 0.063845
| false
| 0
| 0.002082
| 0
| 0.065926
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
525e33a8dc344dd38134bef9c47d4f50aaf4b116
| 222
|
py
|
Python
|
trakt/core/components/__init__.py
|
jmolinski/traktpy
|
e6ff22acaf273b7b45070a4f8938c210fe4d63d7
|
[
"MIT"
] | null | null | null |
trakt/core/components/__init__.py
|
jmolinski/traktpy
|
e6ff22acaf273b7b45070a4f8938c210fe4d63d7
|
[
"MIT"
] | 1
|
2019-04-13T10:15:48.000Z
|
2019-04-13T10:15:48.000Z
|
trakt/core/components/__init__.py
|
jmolinski/traktpy
|
e6ff22acaf273b7b45070a4f8938c210fe4d63d7
|
[
"MIT"
] | null | null | null |
# flake8: noqa: F403
from trakt.core.components.cache import CacheManager, FrozenRequest
from trakt.core.components.http_component import DefaultHttpComponent
from trakt.core.components.oauth import DefaultOauthComponent
| 37
| 69
| 0.86036
| 26
| 222
| 7.307692
| 0.615385
| 0.142105
| 0.205263
| 0.363158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.081081
| 222
| 5
| 70
| 44.4
| 0.911765
| 0.081081
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5e9087b7af825c28d957d5ea97fd019874a1cff9
| 2,991
|
py
|
Python
|
incapsulaminer/node.py
|
bilalbox/incapsula-miner
|
7bf8ab22ae546a8ce6734702d0bbd350bd3fe1b4
|
[
"Apache-2.0"
] | 1
|
2019-05-30T18:54:28.000Z
|
2019-05-30T18:54:28.000Z
|
incapsulaminer/node.py
|
bilalbox/incapsula-miner
|
7bf8ab22ae546a8ce6734702d0bbd350bd3fe1b4
|
[
"Apache-2.0"
] | 1
|
2017-12-05T09:42:49.000Z
|
2017-12-05T09:47:01.000Z
|
incapsulaminer/node.py
|
bilalbox/incapsula-miner
|
7bf8ab22ae546a8ce6734702d0bbd350bd3fe1b4
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
import logging
import requests
import json
from minemeld.ft.basepoller import BasePollerFT
LOG = logging.getLogger(__name__)
class IPv4(BasePollerFT):
def configure(self):
super(IPv4, self).configure()
self.polling_timeout = self.config.get('polling_timeout', 20)
self.verify_cert = self.config.get('verify_cert', False)
self.url = 'https://my.incapsula.com/api/integration/v1/ips'
def _process_item(self, item):
# called on each item returned by _build_iterator
# it should return a list of (indicator, value) pairs
if item is None:
LOG.error('%s - no IP information found', self.name)
return []
else:
value = {
'type': 'IPv4',
'confidence': 100
}
return [[item, value]]
def _build_iterator(self, now):
# called at every polling interval
# here you should retrieve and return the list of items
rkwargs = dict(
stream=False,
verify=self.verify_cert,
timeout=self.polling_timeout,
data=[('resp_format','json'),]
)
r = requests.post(
self.url,
**rkwargs
)
try:
r.raise_for_status()
except:
LOG.debug('%s - exception in request: %s %s',
self.name, r.status_code, r.content)
raise
# parse the results into a list
return iter(json.loads(r.text)['ipRanges'])
class IPv6(BasePollerFT):
def configure(self):
super(IPv6, self).configure()
self.polling_timeout = self.config.get('polling_timeout', 20)
self.verify_cert = self.config.get('verify_cert', False)
self.url = 'https://my.incapsula.com/api/integration/v1/ips'
def _process_item(self, item):
# called on each item returned by _build_iterator
# it should return a list of (indicator, value) pairs
if item is None:
LOG.error('%s - no IP information found', self.name)
return []
else:
value = {
'type': 'IPv6',
'confidence': 100
}
return [[item, value]]
def _build_iterator(self, now):
# called at every polling interval
# here you should retrieve and return the list of items
rkwargs = dict(
stream=False,
verify=self.verify_cert,
timeout=self.polling_timeout,
data=[('resp_format','json'),]
)
r = requests.post(
self.url,
**rkwargs
)
try:
r.raise_for_status()
except:
LOG.debug('%s - exception in request: %s %s',
self.name, r.status_code, r.content)
raise
# parse the results into a list
return iter(json.loads(r.text)['ipv6Ranges'])
| 28.216981
| 69
| 0.554664
| 341
| 2,991
| 4.744868
| 0.31085
| 0.051916
| 0.044499
| 0.034611
| 0.886279
| 0.845488
| 0.845488
| 0.845488
| 0.845488
| 0.845488
| 0
| 0.009669
| 0.343029
| 2,991
| 105
| 70
| 28.485714
| 0.81374
| 0.144768
| 0
| 0.72973
| 0
| 0
| 0.137417
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081081
| false
| 0
| 0.067568
| 0
| 0.256757
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5eac09f5bde32596e1ef1ac080a5e5f464714ac9
| 2,850
|
py
|
Python
|
audio_dspy/adaptive_filt.py
|
jatinchowdhury18/AudioDSPy
|
96ce0e223877cb5515f14da4f6d2ce8282d81f3c
|
[
"MIT"
] | 37
|
2019-11-06T07:47:40.000Z
|
2022-03-07T10:50:45.000Z
|
audio_dspy/adaptive_filt.py
|
jatinchowdhury18/AudioDSPy
|
96ce0e223877cb5515f14da4f6d2ce8282d81f3c
|
[
"MIT"
] | 3
|
2019-10-22T03:24:09.000Z
|
2022-01-30T00:45:21.000Z
|
audio_dspy/adaptive_filt.py
|
jatinchowdhury18/AudioDSPy
|
96ce0e223877cb5515f14da4f6d2ce8282d81f3c
|
[
"MIT"
] | 3
|
2020-09-09T20:18:37.000Z
|
2022-03-17T00:48:05.000Z
|
import numpy as np
def LMS(input, desired, mu, L):
"""
Performs LMS adpative filtering on input signal
Parameters:
input : array-like
Input signa;
desired : array-like
Desired signal
mu : float
Learning rate
L : int
Length of adaptive filter
Return:
y : array-like
Filtered signal
e : array-like
Error signal
w : array-like
Final filter coefficients (of length L)
"""
assert len(input) == len(
desired), 'Desired and input signals must have equal length'
N = len(input)
w = np.zeros(L)
y = np.zeros(N)
x_win = np.zeros(L)
e = np.zeros(N)
for n in range(N):
x_win = np.concatenate((x_win[1:L], [input[n]]))
y[n] = np.dot(w, x_win)
e[n] = desired[n] - y[n]
w = w + mu * e[n] * x_win
return y, e, w
def NLMS(input, desired, mu=0.1, L=7):
"""
Performs Norm LMS adpative filtering on input signal
Parameters:
input : array-like
Input signa;
desired : array-like
Desired signal
mu : float
Learning rate
L : int
Length of adaptive filter
Return:
y : array-like
Filtered signal
e : array-like
Error signal
w : array-like
Final filter coefficients (of length L)
"""
assert len(input) == len(
desired), 'Desired and input signals must have equal length'
N = len(input)
w = np.zeros(L)
y = np.zeros(N)
x_win = np.zeros(L)
e = np.zeros(N)
for n in range(N):
x_win = np.concatenate((x_win[1:L], [input[n]]))
y[n] = np.dot(w, x_win)
e[n] = desired[n] - y[n]
w = w + mu * e[n] * x_win / np.sqrt(np.sum(x_win**2))
return y, e, w
def NL_LMS(input, desired, mu, L, g, g_prime):
"""
Performs Nonlinear LMS adaptive filtering on input signal
Parameters:
input : array-like
Input signa;
desired : array-like
Desired signal
mu : float
Learning rate
L : int
Length of adaptive filter
g : lambda (float) : float
Nonlinear function, ex: tanh(x)
g_prime : lambda (float) : float
Derivative of nonlinear function, ex 1/cosh(x)^2
Return:
y : array-like
Filtered signal
e : array-like
Error signal
w : array-like
Final filter coefficients (of length L)
"""
assert len(input) == len(
desired), 'Desired and input signals must have equal length'
N = len(input)
w = np.zeros(L)
y = np.zeros(N)
x_win = np.zeros(L)
e = np.zeros(N)
for n in range(N):
x_win = np.concatenate((x_win[1:L], [input[n]]))
y[n] = np.dot(w, x_win)
e[n] = desired[n] - g(y[n])
w = w + mu * e[n] * x_win * g_prime(y[n])
return y, e, w
| 22.265625
| 68
| 0.549825
| 426
| 2,850
| 3.631455
| 0.161972
| 0.04137
| 0.029089
| 0.031674
| 0.850032
| 0.811248
| 0.811248
| 0.811248
| 0.811248
| 0.803491
| 0
| 0.004747
| 0.334737
| 2,850
| 127
| 69
| 22.440945
| 0.811181
| 0.414035
| 0
| 0.813953
| 0
| 0
| 0.101695
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 1
| 0.069767
| false
| 0
| 0.023256
| 0
| 0.162791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5ec2030f5d2c2d6c5af8d4acf27f103073fa0d0b
| 3,846
|
py
|
Python
|
tests/unit/utils/test_platforms.py
|
cmcmarrow/pytest-salt-factories
|
12515411ea0fa11d7058a9deb61584a56c5f5108
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/utils/test_platforms.py
|
cmcmarrow/pytest-salt-factories
|
12515411ea0fa11d7058a9deb61584a56c5f5108
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/utils/test_platforms.py
|
cmcmarrow/pytest-salt-factories
|
12515411ea0fa11d7058a9deb61584a56c5f5108
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
tests.unit.utils.test_platforms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests for saltfactories.utils.platforms
"""
import mock
import saltfactories.utils.platform
def test_is_windows():
return_value = True
with mock.patch("salt.utils.platform.is_windows", return_value=return_value):
assert saltfactories.utils.platform.is_windows() is return_value
def test_is_not_windows():
return_value = True
with mock.patch("salt.utils.platform.is_windows", return_value=return_value):
assert saltfactories.utils.platform.is_windows() is return_value
def test_is_linux():
return_value = True
with mock.patch("salt.utils.platform.is_linux", return_value=return_value):
assert saltfactories.utils.platform.is_linux() is return_value
def test_is_not_linux():
return_value = True
with mock.patch("salt.utils.platform.is_linux", return_value=return_value):
assert saltfactories.utils.platform.is_linux() is return_value
def test_is_darwin():
return_value = True
with mock.patch("salt.utils.platform.is_darwin", return_value=return_value):
assert saltfactories.utils.platform.is_darwin() is return_value
def test_is_not_darwin():
return_value = True
with mock.patch("salt.utils.platform.is_darwin", return_value=return_value):
assert saltfactories.utils.platform.is_darwin() is return_value
def test_is_sunos():
return_value = True
with mock.patch("salt.utils.platform.is_sunos", return_value=return_value):
assert saltfactories.utils.platform.is_sunos() is return_value
def test_is_not_sunos():
return_value = True
with mock.patch("salt.utils.platform.is_sunos", return_value=return_value):
assert saltfactories.utils.platform.is_sunos() is return_value
def test_is_smartos():
return_value = True
with mock.patch("salt.utils.platform.is_smartos", return_value=return_value):
assert saltfactories.utils.platform.is_smartos() is return_value
def test_is_not_smartos():
return_value = True
with mock.patch("salt.utils.platform.is_smartos", return_value=return_value):
assert saltfactories.utils.platform.is_smartos() is return_value
def test_is_freebsd():
return_value = True
with mock.patch("salt.utils.platform.is_freebsd", return_value=return_value):
assert saltfactories.utils.platform.is_freebsd() is return_value
def test_is_not_freebsd():
return_value = True
with mock.patch("salt.utils.platform.is_freebsd", return_value=return_value):
assert saltfactories.utils.platform.is_freebsd() is return_value
def test_is_netbsd():
return_value = True
with mock.patch("salt.utils.platform.is_netbsd", return_value=return_value):
assert saltfactories.utils.platform.is_netbsd() is return_value
def test_is_not_netbsd():
return_value = True
with mock.patch("salt.utils.platform.is_netbsd", return_value=return_value):
assert saltfactories.utils.platform.is_netbsd() is return_value
def test_is_openbsd():
return_value = True
with mock.patch("salt.utils.platform.is_openbsd", return_value=return_value):
assert saltfactories.utils.platform.is_openbsd() is return_value
def test_is_not_openbsd():
return_value = True
with mock.patch("salt.utils.platform.is_openbsd", return_value=return_value):
assert saltfactories.utils.platform.is_openbsd() is return_value
def test_is_aix():
return_value = True
with mock.patch("salt.utils.platform.is_aix", return_value=return_value):
assert saltfactories.utils.platform.is_aix() is return_value
def test_is_not_aix():
return_value = True
with mock.patch("salt.utils.platform.is_aix", return_value=return_value):
assert saltfactories.utils.platform.is_aix() is return_value
| 32.319328
| 81
| 0.74441
| 532
| 3,846
| 5.092105
| 0.06015
| 0.292359
| 0.199336
| 0.126246
| 0.954596
| 0.954596
| 0.954596
| 0.941307
| 0.941307
| 0.941307
| 0
| 0.000305
| 0.148206
| 3,846
| 118
| 82
| 32.59322
| 0.826618
| 0.033021
| 0
| 0.72973
| 0
| 0
| 0.140579
| 0.140579
| 0
| 0
| 0
| 0
| 0.243243
| 1
| 0.243243
| false
| 0
| 0.027027
| 0
| 0.27027
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.