hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
834d7d80737561e04b4fc7112c3661611e80b59c
| 199
|
py
|
Python
|
setup.py
|
UbuhingaVizion/ihela-pyhton-client
|
a5f808a0c138ef407416f0d8548e1ddf8957a12a
|
[
"MIT"
] | 2
|
2020-12-10T13:20:37.000Z
|
2021-11-15T02:44:16.000Z
|
setup.py
|
UbuhingaVizion/ihela-pyhton-client
|
a5f808a0c138ef407416f0d8548e1ddf8957a12a
|
[
"MIT"
] | 3
|
2020-09-19T20:05:23.000Z
|
2021-06-02T00:46:53.000Z
|
setup.py
|
UbuhingaVizion/ihela-pyhton-client
|
a5f808a0c138ef407416f0d8548e1ddf8957a12a
|
[
"MIT"
] | 4
|
2020-09-09T16:40:10.000Z
|
2021-08-03T09:48:34.000Z
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(setup_cfg=True)
| 16.583333
| 39
| 0.748744
| 27
| 199
| 5.37037
| 0.555556
| 0.193103
| 0.275862
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190955
| 199
| 11
| 40
| 18.090909
| 0.900621
| 0.100503
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.571429
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
55d197f76f27cf4fd77bdcdad5363a4e10ec53a4
| 59,235
|
py
|
Python
|
welleng/errors/tool_errors.py
|
mkamyab/welleng
|
0ab73440e5ac3ad9a67d880658f9cdde33c0e0e7
|
[
"Apache-2.0"
] | 50
|
2020-12-02T13:53:24.000Z
|
2022-03-30T15:17:47.000Z
|
welleng/errors/tool_errors.py
|
mkamyab/welleng
|
0ab73440e5ac3ad9a67d880658f9cdde33c0e0e7
|
[
"Apache-2.0"
] | 27
|
2020-11-27T13:43:47.000Z
|
2022-02-18T01:54:02.000Z
|
welleng/errors/tool_errors.py
|
mkamyab/welleng
|
0ab73440e5ac3ad9a67d880658f9cdde33c0e0e7
|
[
"Apache-2.0"
] | 20
|
2020-12-03T18:59:02.000Z
|
2022-02-16T13:20:55.000Z
|
import numpy as np
from numpy import sin, cos, tan, pi, sqrt
from numpy.core.defchararray import index
import yaml
import os
from collections import OrderedDict
# import imp
# import welleng.error
from ..utils import NEV_to_HLA
# since this is running on different OS flavors
PATH = os.path.dirname(__file__)
TOOL_INDEX = os.path.join(
'', *[PATH, 'tool_index.yaml']
)
ACCURACY = 1e-6
class ToolError:
def __init__(
self,
error,
model
):
"""
Class using the ISCWSA listed tool errors to determine well bore
uncertainty.
Parameters
----------
error: an intitiated welleng.error.ErrorModel object
model: string
Returns
-------
errors: welleng.error.ErrorModel object
A populated ErrorModel object for the selected error model.
"""
error.__init__
self.e = error
self.errors = {}
filename = os.path.join(
'', *[PATH, 'tool_codes', f"{model}.yaml"]
)
with open(filename, 'r') as file:
self.em = yaml.safe_load(file)
# for gyro tools the continuous survey errors need to be done last
self.em['codes'] = OrderedDict(self.em['codes'])
gyro_continuous = ['GXY-GD', 'GXY-GRW']
gyro_stationary = ['GXY-B1S', 'GXY-B2S', 'GXY-G4', 'GXY-RN']
for tool in gyro_continuous:
if tool in self.em['codes']:
self.gyro_continuous = []
self.em['codes'].move_to_end(tool)
self.gyro_continuous.append(tool)
self.gyro_stationary = [
tool for tool in gyro_stationary
if tool in self.em['codes']
]
# self.em = iscwsa_error_models[model]
# iscwsa_error_models = yaml.safe_load(file)
# self.em = iscwsa_error_models[model]
if 'Default Tortusity (rad/m)' in self.em['header']:
self.tortuosity = self.em['header']['Default Tortusity (rad/m)']
elif 'XCL Tortuosity' in self.em['header']:
# assuming that this is always 1 deg / 100 ft but this might not
# be the case
# TODO use pint to handle this string inputs
self.tortuosity = (np.radians(1.) / 100) * 3.281
else:
self.tortuosity = None
# if model == "iscwsa_mwd_rev5":
# if model == "ISCWSA MWD Rev5":
# assert self.tortuosity is not None, (
# "No default tortuosity defined in model header"
# )
if "Inclination Range Max" in self.em['header'].keys():
value = np.radians(float(
self.em['header']['Inclination Range Max'].split(" ")[0]
))
assert np.amax(self.e.survey.inc_rad) < value, (
"Model not suitable for this well path inclination"
)
self._initiate_func_dict()
for err in self.em['codes']:
# func = self._get_the_func_out(err)
func = self.em['codes'][err]['function']
mag = self.em['codes'][err]['magnitude']
propagation = self.em['codes'][err]['propagation']
self.errors[err] = (
self.call_func(
code=err,
func=func,
error=self.e,
mag=mag,
propagation=propagation,
tortuosity=self.tortuosity,
header=self.em['header'],
errors=self
)
)
self.cov_NEVs = np.zeros((3, 3, len(self.e.survey_rad)))
for _, value in self.errors.items():
self.cov_NEVs += value.cov_NEV
self.cov_HLAs = NEV_to_HLA(self.e.survey_rad, self.cov_NEVs)
def _get_the_func_out(self, err):
if err in self.exceptional_funcs:
func = self.exceptional_funcs[err]
else:
func = self.em['codes'][err]['function']
return func
def call_func(self, code, func, error, mag, propagation, **kwargs):
"""
Function for calling functions by mapping function labels to their
functions.
"""
assert func in self.func_dict, f"no function for function {func}"
return self.func_dict[func](code, error, mag, propagation, **kwargs)
def _initiate_func_dict(self):
"""
This dictionary will need to be updated if/when additional error
functions are added to the model.
"""
self.func_dict = {
'ABXY_TI1': ABXY_TI1,
'ABXY_TI2': ABXY_TI2,
'ABZ': ABZ,
'AMIL': AMIL,
'ASXY_TI1': ASXY_TI1,
'ASXY_TI2': ASXY_TI2,
'ASXY_TI3': ASXY_TI3,
'ASZ': ASZ,
'DBH': DBH,
'AZ': AZ,
'DREF': DREF,
'DSF': DSF,
'DST': DST,
'MBXY_TI1': MBXY_TI1,
'MBXY_TI2': MBXY_TI2,
'MBZ': MBZ,
'MSXY_TI1': MSXY_TI1,
'MSXY_TI2': MSXY_TI2,
'MSXY_TI3': MSXY_TI3,
'MSZ': MSZ,
'SAG': SAG,
'XYM1': XYM1,
'XYM2': XYM2,
'XYM3': XYM3,
'XYM4': XYM4,
'SAGE': SAGE,
'XCL': XCL, # requires an exception
'XYM3L': XYM3L, # looks like there's a mistake in the ISCWSA model
'XYM4L': XYM4L,
'XCLA': XCLA,
'XCLH': XCLH,
'XYM3E': XYM3E, # Needs QAQC
'XYM4E': XYM4E, # Need QAQC
'ASIXY_TI1': ASIXY_TI1, # Needs QAQC
'ASIXY_TI2': ASIXY_TI2, # Needs QAQC
'ASIXY_TI3': ASIXY_TI3, # Needs QAQC
'ABIXY_TI1': ABIXY_TI1, # Needs QAQC
'ABIXY_TI2': ABIXY_TI2, # Needs QAQC
'ABIZ': ABIZ, # Needs QAQC
'ASIZ': ASIZ, # Needs QAQC
'MBIXY_TI1': MBIXY_TI1, # Needs QAQC
'MBIXY_TI2': MBIXY_TI2, # Needs QAQC
'MDI': MDI, # Needs QAQC
'AXYZ_MIS': AXYZ_MIS, # Needs QAQC
'AXYZ_SF': AXYZ_SF, # Needs QAQC
'AXYZ_ZB': AXYZ_ZB, # Needs QAQC
'GXY_B1': GXY_B1, # Needs QAQC
'GXY_B2': GXY_B2, # Needs QAQC
'GXY_G1': GXY_G1, # Needs QAQC
'GXY_G4': GXY_G4, # Needs QAQC
'GXY_RN': GXY_RN, # Needs QAQC
'GXY_GD': GXY_GD, # Needs QAQC
'GXY_GRW': GXY_GRW, # Needs QAQC
'MFI': MFI, # Needs QAQC
'MSIXY_TI1': MSIXY_TI1, # Needs QAQC
'MSIXY_TI2': MSIXY_TI1, # Needs QAQC
'MSIXY_TI3': MSIXY_TI1, # Needs QAQC
'AMID': AMID, # Needs QAQC
'CNA': CNA, # Needs QAQC
'CNI': CNI, # Needs QAQC
}
def _funky_denominator(error):
with np.errstate(divide='ignore', invalid='ignore'):
result = np.nan_to_num((
1 - sin(error.survey.inc_rad) ** 2
* sin(error.survey.azi_mag_rad) ** 2
),
# nan=1e-6,
# posinf=1.0,
# neginf=-1.0
)
# ACCURACY = 1e-6
# with np.errstate(divide='ignore', invalid='ignore'):
# coeff = np.nan_to_num(
# result / np.abs(result) * ACCURACY,
# nan=ACCURACY
# )
# result = np.where(np.abs(result) > ACCURACY, result, coeff)
return result
# error functions #
def DREF(code, error, mag=0.35, propagation='random', NEV=True, **kwargs):
dpde = np.full((len(error.survey_rad), 3), [1., 0., 0.])
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def DSF(
code, error, mag=0.00056, propagation='systematic', NEV=True, **kwargs
):
dpde = np.full((len(error.survey_rad), 3), [1., 0., 0.])
dpde = dpde * np.array(error.survey_rad)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def DST(
code, error, mag=0.00000025, propagation='systematic', NEV=True, **kwargs
):
dpde = np.full((len(error.survey_rad), 3), [1., 0., 0.])
dpde[:, 0] = error.survey.tvd
dpde = dpde * np.array(error.survey_rad)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ABIZ(
code, error, mag=0.0040, propagation='systematic', NEV=True, **kwargs
):
denom = _funky_denominator(error) / error.survey.header.G
denom = np.where(denom > ACCURACY, denom, ACCURACY)
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = -sin(error.survey.inc_rad) / error.survey.header.G
dpde[:, 2] = (
sin(error.survey.inc_rad)
* cos(error.survey.inc_rad)
* sin(error.survey.azi_mag_rad)
* (
tan(error.survey.header.dip)
* cos(error.survey.inc_rad)
+ sin(error.survey.inc_rad) * cos(error.survey.azi_mag_rad)
)
) / denom
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ABIXY_TI1(
code, error, mag=0.0040, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = -cos(error.survey.inc_rad) / error.survey.header.G
dpde[:, 2] = (
cos(error.survey.inc_rad) ** 2
* sin(error.survey.azi_mag_rad)
* (
tan(error.survey.header.dip)
* cos(error.survey.inc_rad)
+ sin(error.survey.inc_rad) * cos(error.survey.azi_mag_rad)
)
) / (
error.survey.header.G * (
_funky_denominator(error)
)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ABXY_TI1(
code, error, mag=0.0040, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = -cos(error.survey.inc_rad) / error.survey.header.G
dpde[:, 2] = (
cos(error.survey.inc_rad)
* tan(error.survey.header.dip)
* sin(error.survey.azi_mag_rad)
) / error.survey.header.G
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ABIXY_TI2(
code, error, mag=0.004, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
with np.errstate(divide='ignore', invalid='ignore'):
dpde[:, 2] = np.nan_to_num(
(
-(
tan(error.survey.header.dip)
* cos(error.survey.azi_mag_rad)
- tan(
pi/2 - error.survey.inc_rad
)
) / (
error.survey.header.G
* (
_funky_denominator(error)
)
)
),
posinf=0.0,
neginf=0.0
)
e_DIA = dpde * mag
sing = np.where(
error.survey_rad[:, 1] < error.survey.header.vertical_inc_limit
)
if len(sing[0]) < 1:
return error._generate_error(code, e_DIA, propagation, NEV)
else:
e_NEV = error._e_NEV(e_DIA)
n = np.array(
0.5 * error.drdp_sing['double_delta_md']
* -sin(error.drdp_sing['azi2']) * mag
) / error.survey.header.G
e = np.array(
0.5 * error.drdp_sing['double_delta_md']
* cos(error.drdp_sing['azi2']) * mag
) / error.survey.header.G
v = np.zeros_like(n)
e_NEV_sing = np.vstack(
(
np.zeros((1, 3)),
np.stack((n, e, v), axis=-1),
np.zeros((1, 3))
)
)
e_NEV_sing[1, 1] = (
(
error.survey.md[2]
+ error.survey.md[1]
- 2 * error.survey.md[0]
) / 2
* mag * cos(error.survey.azi_true_rad[1])
/ error.survey.header.G
)
e_NEV[sing] = e_NEV_sing[sing]
e_NEV_star = error._e_NEV_star(e_DIA)
n = np.array(
0.5 * error.drdp_sing['delta_md']
* -sin(error.drdp_sing['azi2']) * mag
) / error.survey.header.G
e = np.array(
0.5 * error.drdp_sing['delta_md']
* cos(error.drdp_sing['azi2']) * mag
) / error.survey.header.G
v = np.zeros_like(n)
e_NEV_star_sing = np.vstack(
(
np.zeros((1, 3)),
np.stack((n, e, v), axis=-1),
np.zeros((1, 3))
)
)
e_NEV_star_sing[1, 1] = (
(error.survey.md[1] - error.survey.md[0])
* mag
* (
cos(error.survey.azi_true_rad[1])
/ error.survey.header.G
)
)
e_NEV_star[sing] = e_NEV_star_sing[sing]
return error._generate_error(
code, e_DIA, propagation, NEV, e_NEV, e_NEV_star
)
def ABXY_TI2(
code, error, mag=0.004, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
with np.errstate(divide='ignore', invalid='ignore'):
dpde[:, 2] = np.nan_to_num(
(
(
tan(-(error.survey_rad[:, 1]) + (pi/2))
- tan(error.survey.header.dip)
* cos(error.survey.azi_mag_rad)
) / error.survey.header.G
),
posinf=0.0,
neginf=0.0
)
e_DIA = dpde * mag
sing = np.where(
error.survey_rad[:, 1] < error.survey.header.vertical_inc_limit
)
if len(sing[0]) < 1:
return error._generate_error(code, e_DIA, propagation, NEV)
else:
e_NEV = error._e_NEV(e_DIA)
n = np.array(
0.5 * error.drdp_sing['double_delta_md']
* -sin(error.drdp_sing['azi2']) * mag
) / error.survey.header.G
e = np.array(
0.5 * error.drdp_sing['double_delta_md']
* cos(error.drdp_sing['azi2']) * mag
) / error.survey.header.G
v = np.zeros_like(n)
e_NEV_sing = np.vstack(
(
np.zeros((1, 3)),
np.stack((n, e, v), axis=-1),
np.zeros((1, 3))
)
)
if error.error_model.lower().split(' ')[-1] != 'rev4':
e_NEV_sing[1, 1] = (
(
error.survey.md[2]
+ error.survey.md[1]
- 2 * error.survey.md[0]
) / 2
* mag * cos(error.survey.azi_true_rad[1])
/ error.survey.header.G
)
e_NEV[sing] = e_NEV_sing[sing]
e_NEV_star = error._e_NEV_star(e_DIA)
n = np.array(
0.5 * error.drdp_sing['delta_md']
* -sin(error.drdp_sing['azi2']) * mag
) / error.survey.header.G
e = np.array(
0.5 * error.drdp_sing['delta_md']
* cos(error.drdp_sing['azi2']) * mag
) / error.survey.header.G
v = np.zeros_like(n)
e_NEV_star_sing = np.vstack(
(
np.zeros((1, 3)),
np.stack((n, e, v), axis=-1),
np.zeros((1, 3))
)
)
if error.error_model.lower().split(' ')[-1] != 'rev4':
e_NEV_star_sing[1, 1] = (
(error.survey.md[1] - error.survey.md[0])
* mag
* (
cos(error.survey.azi_true_rad[1])
/ error.survey.header.G
)
)
e_NEV_star[sing] = e_NEV_star_sing[sing]
return error._generate_error(
code, e_DIA, propagation, NEV, e_NEV, e_NEV_star
)
def AMID(code, error, mag=0.04363323129985824, propagation='systematic',
NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
sin(error.survey.inc_rad)
* sin(error.survey.azi_mag_rad)
)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def ABZ(code, error, mag=0.004, propagation='systematic', NEV=True, **kwargs):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = -sin(np.array(error.survey_rad)[:, 1]) / error.survey.header.G
dpde[:, 2] = (
sin(np.array(error.survey_rad)[:, 1])
* tan(error.survey.header.dip) * sin(error.survey.azi_mag_rad)
) / error.survey.header.G
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ASXY_TI1(
code, error, mag=0.0005, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = (
sin(error.survey.inc_rad)
* cos(error.survey.inc_rad)
) / sqrt(2)
dpde[:, 2] = (
sin(error.survey.inc_rad)
* -tan(error.survey.header.dip)
* cos(error.survey.inc_rad)
* sin(error.survey.azi_mag_rad)
) / sqrt(2)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ASIXY_TI1(
code, error, mag=0.0005, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = (
sin(error.survey.inc_rad)
* cos(error.survey.inc_rad)
/ sqrt(2)
)
dpde[:, 2] = -(
sin(error.survey.inc_rad)
* cos(error.survey.inc_rad) ** 2
* sin(error.survey.azi_mag_rad)
* (
tan(error.survey.header.dip)
* cos(error.survey.inc_rad)
+ sin(error.survey.inc_rad)
* cos(error.survey.azi_mag_rad)
)
) / (
sqrt(2) * _funky_denominator(error)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ASXY_TI2(
code, error, mag=0.0005, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = sin(
np.array(error.survey_rad)[:, 1]
) * cos(np.array(error.survey_rad)[:, 1]) / 2
dpde[:, 2] = (
sin(np.array(error.survey_rad)[:, 1])
* -tan(error.survey.header.dip) * cos(np.array(error.survey_rad)[:, 1])
* sin(error.survey.azi_mag_rad)
) / 2
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ASIXY_TI2(
code, error, mag=0.0005, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = (
sin(error.survey.inc_rad)
* cos(error.survey.inc_rad)
/ 2
)
dpde[:, 2] = -(
sin(error.survey.inc_rad)
* cos(error.survey.inc_rad) ** 2
* sin(error.survey.azi_mag_rad)
* (
tan(error.survey.header.dip)
* cos(error.survey.inc_rad)
+ sin(error.survey.inc_rad) * cos(error.survey.azi_mag_rad)
)
) / (
2 * _funky_denominator(error)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ASXY_TI3(
code, error, mag=0.0005, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
sin(np.array(error.survey_rad)[:, 1])
* tan(error.survey.header.dip) * cos(error.survey.azi_mag_rad)
- cos(np.array(error.survey_rad)[:, 1])) / 2
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ASIXY_TI3(
code, error, mag=0.0005, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
tan(error.survey.header.dip)
* sin(error.survey.inc_rad)
* cos(error.survey.azi_mag_rad)
- cos(error.survey.inc_rad)
) / (
2 * _funky_denominator(error)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ASZ(code, error, mag=0.0005, propagation='systematic', NEV=True, **kwargs):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = (
-sin(np.array(error.survey_rad)[:, 1])
* cos(np.array(error.survey_rad)[:, 1])
)
dpde[:, 2] = (
sin(np.array(error.survey_rad)[:, 1])
* tan(error.survey.header.dip)
* cos(np.array(error.survey_rad)[:, 1])
* sin(error.survey.azi_mag_rad)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def ASIZ(
code, error, mag=0.0005, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = (
-sin(error.survey.inc_rad)
* cos(error.survey.inc_rad)
)
dpde[:, 2] = (
sin(error.survey.inc_rad)
* cos(error.survey.inc_rad) ** 2
* sin(error.survey.azi_mag_rad)
* (
tan(error.survey.header.dip)
* cos(error.survey.inc_rad)
+ sin(error.survey.inc_rad)
* cos(error.survey.azi_mag_rad)
)
) / (
_funky_denominator(error)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def AXYZ_MIS(
code, error, mag=0.0001658062789394613, propagation='systematic', NEV=True,
**kwargs
):
"""
SPE 90408 Table 1
"""
dpde = np.full((len(error.survey_rad), 3), [0., 1., 0.])
dpde = dpde * np.array(error.survey_rad)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def AXYZ_SF(
code, error, mag=0.000111, propagation='systematic', NEV=True,
**kwargs
):
"""
SPE 90408 Table 1
"""
dpde = np.full((len(error.survey_rad), 3), [0., 1., 0.])
dpde[:, 1] = (
1.3 * sin(error.survey.inc_rad) * cos(error.survey.inc_rad)
)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def AXYZ_ZB(
code, error, mag=0.0017, propagation='systematic', NEV=True,
**kwargs
):
"""
SPE 90408 Table 1
"""
dpde = np.full((len(error.survey_rad), 3), [0., 1., 0.])
dpde[:, 1] = (
sin(error.survey.inc_rad) / error.survey.header.G
)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def _get_ref_init_error(dpde, error, **kwargs):
"""
Function that identifies where the continuous gyro begins, initiates and
then carries the static errors during the continuous modes.
"""
temp = [0.0]
for coeff, inc in zip(dpde[1:, 2], error.survey.inc_rad[1:]):
if inc > kwargs['header']['XY Static Gyro']['End Inc']:
temp.append(temp[-1])
else:
temp.append(coeff)
dpde[:, 2] = temp
return dpde
def CNA(
code, error, mag=0.35, propagation='systematic', NEV=True,
**kwargs
):
dpde = np.full((len(error.survey_rad), 3), [0., 0., 0.])
with np.errstate(divide='ignore', invalid='ignore'):
dpde[:, 2] = np.nan_to_num(
1 / sin(error.survey.inc_rad),
posinf=1,
neginf=-1
)
e_DIA = dpde * mag
sing = np.where(
error.survey.inc_rad < error.survey.header.vertical_inc_limit
)
if len(sing[0]) < 1:
return error._generate_error(code, e_DIA, propagation, NEV)
else:
e_NEV = error._e_NEV(e_DIA)
n = (
np.array(0.5 * error.drdp_sing['double_delta_md'])
* -sin(getattr(
error.survey, f"azi_{error.survey.header.azi_reference}_rad"
)[1: -1])
* mag
)
e = (
np.array(0.5 * error.drdp_sing['double_delta_md'])
* cos(getattr(
error.survey, f"azi_{error.survey.header.azi_reference}_rad"
)[1: -1])
* mag
)
v = np.zeros_like(n)
e_NEV_sing = np.vstack(
(
np.zeros((1, 3)),
np.stack((n, e, v), axis=-1),
np.zeros((1, 3))
)
)
e_NEV[sing] = e_NEV_sing[sing]
e_NEV_star = error._e_NEV_star(e_DIA)
n = (
np.array(0.5 * error.drdp_sing['delta_md'])
* -sin(getattr(
error.survey, f"azi_{error.survey.header.azi_reference}_rad"
)[1: -1])
* mag
)
e = (
np.array(0.5 * error.drdp_sing['delta_md'])
* cos(getattr(
error.survey, f"azi_{error.survey.header.azi_reference}_rad"
)[1: -1])
* mag
)
e_NEV_star_sing = np.vstack(
(
np.zeros((1, 3)),
np.stack((n, e, v), axis=-1),
np.zeros((1, 3))
)
)
e_NEV_star[sing] = e_NEV_star_sing[sing]
return error._generate_error(
code, e_DIA, propagation, NEV, e_NEV, e_NEV_star
)
# result = error._generate_error(code, e_DIA, propagation, NEV)
# return result
def CNI(
code, error, mag=0.35, propagation='systematic', NEV=True,
**kwargs
):
dpde = np.full((len(error.survey_rad), 3), [0., 1., 0.])
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def GXY_B1(
code, error, mag=0.002617993877991494, propagation='random',
NEV=True, **kwargs
):
"""
SPE 90408 Table 4
"""
dpde = np.full((len(error.survey_rad), 3), [0., 0., 1.])
dpde[:, 2] = np.where(
error.survey.inc_rad <= kwargs['header']['XY Static Gyro']['End Inc'],
sin(error.survey.azi_true_rad)
/ (
error.survey.header.earth_rate
* cos(np.radians(error.survey.header.latitude))
* cos(error.survey.inc_rad)
),
np.zeros_like(error.survey.md)
)
dpde = _get_ref_init_error(dpde, error, **kwargs)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def GXY_B2(
code, error, mag=0.002617993877991494, propagation='random',
NEV=True, **kwargs
):
"""
SPE 90408 Table 4
"""
dpde = np.full((len(error.survey_rad), 3), [0., 0., 1.])
dpde[:, 2] = np.where(
error.survey.inc_rad <= kwargs['header']['XY Static Gyro']['End Inc'],
cos(error.survey.azi_true_rad)
/ (
error.survey.header.earth_rate
* cos(np.radians(error.survey.header.latitude))
),
np.zeros_like(error.survey.md)
)
dpde = _get_ref_init_error(dpde, error, **kwargs)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def GXY_G1(
code, error, mag=0.006981317007977318, propagation='systematic',
NEV=True, **kwargs
):
"""
SPE 90408 Table 4
"""
dpde = np.full((len(error.survey_rad), 3), [0., 0., 1.])
dpde[:, 2] = np.where(
error.survey.inc_rad <= kwargs['header']['XY Static Gyro']['End Inc'],
cos(error.survey.azi_true_rad) * sin(error.survey.inc_rad)
/ (
error.survey.header.earth_rate
* cos(np.radians(error.survey.header.latitude))
),
np.zeros_like(error.survey.md)
)
dpde = _get_ref_init_error(dpde, error, **kwargs)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def GXY_G4(
code, error, mag=0.010471975511965976, propagation='systematic',
NEV=True, **kwargs
):
"""
SPE 90408 Table 4
"""
dpde = np.full((len(error.survey_rad), 3), [0., 0., 1.])
dpde[:, 2] = np.where(
error.survey.inc_rad <= kwargs['header']['XY Static Gyro']['End Inc'],
sin(error.survey.azi_true_rad) * tan(error.survey.inc_rad)
/ (
error.survey.header.earth_rate
* cos(np.radians(error.survey.header.latitude))
),
np.zeros_like(error.survey.md)
)
dpde = _get_ref_init_error(dpde, error, **kwargs)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def GXY_RN(
code, error, mag=0.006981317007977318, propagation='random',
NEV=True, **kwargs
):
"""
SPE 90408 Table 4
"""
dpde = np.full((len(error.survey_rad), 3), [0., 0., 1.])
dpde[:, 2] = np.where(
error.survey.inc_rad <= kwargs['header']['XY Static Gyro']['End Inc'],
1.0
* (
np.sqrt(
1 - cos(error.survey.azi_true_rad) ** 2
* sin(error.survey.inc_rad) ** 2
)
/ (
error.survey.header.earth_rate
* cos(np.radians(error.survey.header.latitude))
* cos(error.survey.inc_rad)
)
),
np.zeros_like(error.survey.md)
)
dpde = _get_ref_init_error(dpde, error, **kwargs)
dpde_systematic = np.zeros_like(dpde)
index_systematic = np.where(
error.survey.inc_rad > kwargs['header']['XY Static Gyro']['End Inc']
)
np.put(
dpde_systematic[:, 2],
index_systematic,
(
dpde[index_systematic][:, 2]
* kwargs['header']['Noise Reduction Factor']
)
)
e_DIA_systematic = dpde_systematic * mag
result_systematic = error._generate_error(
code, e_DIA_systematic, 'systematic', NEV
)
np.put(
dpde[:, 2],
index_systematic,
np.zeros(len(index_systematic))
)
# dpde[:, 2] = np.where(
# error.survey.inc_rad > kwargs['header']['XY Static Gyro']['End Inc'],
# dpde[:, 2],
# dpde[:, 2] * kwargs['header']['Noise Reduction Factor'],
# )
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
result.cov_NEV += result_systematic.cov_NEV
return result
def GXY_GD(
code, error, mag=0.008726646259971648, propagation='systematic',
NEV=True, **kwargs
):
"""
SPE 90408 Table 7
"""
dpde = np.full((len(error.survey_rad), 3), [0., 0., 1.])
with np.errstate(divide='ignore', invalid='ignore'):
dpde[:, 2] = np.where(
error.survey.inc_rad > kwargs['header']['XY Static Gyro']['End Inc'],
np.append(
np.array([0]),
(
(error.survey.md[1:] - error.survey.md[:-1])
/ (
float(
kwargs['header']['XY Continuous Gyro']['Running Speed'].split()[0]
)
* sin(
(error.survey.inc_rad[1:] + error.survey.inc_rad[:-1])
/ 2
)
)
)
),
np.zeros_like(error.survey.md)
)
init_error = []
for i, (u, l) in enumerate(zip(
error.survey.inc_rad[1:], error.survey.inc_rad[:-1]
)):
init_error.append(0.0)
if all((
u > kwargs['header']['XY Static Gyro']['End Inc'],
l <= kwargs['header']['XY Static Gyro']['End Inc']
)):
for tool in kwargs['errors'].gyro_stationary:
temp = kwargs['errors'].errors[tool].e_DIA[i - 1][2]
if tool in ['GXY_RN']:
temp *= kwargs['header']['Noise Reduction Factor']
init_error[-1] += temp
temp = [0.0]
for i, (u, e) in enumerate(zip(dpde[1:, 2], init_error)):
temp.append(0.0)
if u != 0.0:
temp[-1] += temp[-2] + u * mag
dpde[:, 2] = temp
e_DIA = dpde
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def GXY_GRW(
code, error, mag=0.004363323129985824, propagation='systematic',
NEV=True, **kwargs
):
"""
SPE 90408 Table 7
"""
dpde = np.full((len(error.survey_rad), 3), [0., 0., 1.])
with np.errstate(divide='ignore', invalid='ignore'):
dpde[:, 2] = np.where(
error.survey.inc_rad > kwargs['header']['XY Static Gyro']['End Inc'],
np.append(
np.array([0]),
(error.survey.md[1:] - error.survey.md[:-1])
/ (
float(
kwargs['header']['XY Continuous Gyro']['Running Speed'].split()[0]
)
* sin(
(error.survey.inc_rad[1:] + error.survey.inc_rad[:-1])
/ 2
) ** 2
)
),
np.zeros_like(error.survey.md)
)
init_error = []
for i, (u, l) in enumerate(zip(
error.survey.inc_rad[1:], error.survey.inc_rad[:-1]
)):
init_error.append(0.0)
if all((
u > kwargs['header']['XY Static Gyro']['End Inc'],
l <= kwargs['header']['XY Static Gyro']['End Inc']
)):
for tool in kwargs['errors'].gyro_stationary:
temp = kwargs['errors'].errors[tool].e_DIA[i - 1][2]
if tool in ['GXY_RN']:
temp *= kwargs['header']['Noise Reduction Factor']
init_error[-1] += temp
temp = [0.0]
for i, (u, e) in enumerate(zip(dpde[1:, 2], init_error)):
temp.append(0.0)
if u != 0.0:
temp[-1] += np.sqrt(temp[-2] ** 2 + u * mag)
dpde[:, 2] = temp
e_DIA = dpde
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def MBXY_TI1(
code, error, mag=70.0, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
-cos(np.array(error.survey_rad)[:, 1])
* sin(error.survey.azi_mag_rad)
) / (error.survey.header.b_total * cos(error.survey.header.dip))
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def MBIXY_TI1(
code, error, mag=70.0, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
-cos(error.survey.inc_rad)
* sin(error.survey.azi_mag_rad)
) / (
error.survey.header.b_total
* cos(error.survey.header.dip)
* (
_funky_denominator(error)
)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def MBXY_TI2(
code, error, mag=70.0, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
cos(error.survey.azi_mag_rad)
/ (
error.survey.header.b_total
* cos(error.survey.header.dip)
)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def MBIXY_TI2(
code, error, mag=70.0, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
cos(error.survey.azi_mag_rad)
/ (
error.survey.header.b_total
* cos(error.survey.header.dip)
* (
_funky_denominator(error)
)
)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def MBZ(code, error, mag=70.0, propagation='systematic', NEV=True, **kwargs):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
-sin(np.array(error.survey_rad)[:, 1])
* sin(error.survey.azi_mag_rad)
) / (error.survey.header.b_total * cos(error.survey.header.dip))
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def MFI(
code, error, mag=70, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
-sin(error.survey.inc_rad)
* sin(error.survey.azi_mag_rad)
* (
tan(error.survey.header.dip)
* cos(error.survey.inc_rad)
+ sin(error.survey.inc_rad)
* cos(error.survey.azi_mag_rad)
) / (
_funky_denominator(error)
)
/ error.survey.header.b_total
)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def MSXY_TI1(
code, error, mag=0.0016, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
sin(np.array(error.survey_rad)[:, 1])
* sin(error.survey.azi_mag_rad)
* (
tan(error.survey.header.dip)
* cos(np.array(error.survey_rad)[:, 1])
+ sin(np.array(error.survey_rad)[:, 1])
* cos(error.survey.azi_mag_rad)
) / sqrt(2)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def MSXY_TI2(
code, error, mag=0.0016, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
sin(error.survey.azi_mag_rad) * (
tan(error.survey.header.dip)
* sin(np.array(error.survey_rad)[:, 1])
* cos(np.array(error.survey_rad)[:, 1])
- cos(np.array(error.survey_rad)[:, 1])
* cos(np.array(error.survey_rad)[:, 1])
* cos(error.survey.azi_mag_rad) - cos(error.survey.azi_mag_rad)
) / 2
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def MSXY_TI3(
code, error, mag=0.0016, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
cos(np.array(error.survey_rad)[:, 1])
* cos(error.survey.azi_mag_rad) * cos(error.survey.azi_mag_rad)
- cos(np.array(error.survey_rad)[:, 1])
* sin(error.survey.azi_mag_rad) * sin(error.survey.azi_mag_rad)
- tan(error.survey.header.dip) * sin(np.array(error.survey_rad)[:, 1])
* cos(error.survey.azi_mag_rad)
) / 2
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def MSIXY_TI1(
code, error, mag=0.0016, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
sin(error.survey.inc_rad)
* sin(error.survey.azi_mag_rad)
* (
tan(error.survey.header.dip)
* cos(error.survey.inc_rad)
+ sin(error.survey.inc_rad)
* cos(error.survey.azi_mag_rad)
) / (
sqrt(2)
* (
_funky_denominator(error)
)
)
)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def MSIXY_TI2(
code, error, mag=0.0016, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
sin(error.survey.azi_mag_rad)
* (
tan(error.survey.header.dip)
* sin(error.survey.inc_rad)
* cos(error.survey.inc_rad)
- cos(error.survey.inc_rad) ** 2
* cos(error.survey.azi_mag_rad)
- cos(error.survey.azi_mag_rad)
) / (
2 * (
_funky_denominator(error)
)
)
)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def MSIXY_TI3(
code, error, mag=0.0016, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
(
cos(error.survey.inc_rad)
* cos(error.survey.azi_mag_rad) ** 2
- cos(error.survey.inc_rad)
* sin(error.survey.azi_mag_rad) ** 2
- tan(error.survey.header.dip)
* sin(error.survey.inc_rad)
* cos(error.survey.azi_mag_rad)
) / (
2 * (
_funky_denominator(error)
)
)
)
e_DIA = dpde * mag
result = error._generate_error(code, e_DIA, propagation, NEV)
return result
def MSZ(
code, error, mag=0.0016, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = -(
sin(np.array(error.survey_rad)[:, 1])
* cos(error.survey.azi_mag_rad)
+ tan(error.survey.header.dip) * cos(np.array(error.survey_rad)[:, 1])
) * sin(np.array(error.survey_rad)[:, 1]) * sin(error.survey.azi_mag_rad)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def AZ(code, error, mag=0.00628, propagation='systematic', NEV=True, **kwargs):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = 1
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def DBH(
code, error, mag=np.radians(0.09), propagation='systematic', NEV=True,
**kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = 1 / (
error.survey.header.b_total * cos(error.survey.header.dip)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def MDI(
code, error, mag=np.radians(5000), propagation='systematic', NEV=True,
**kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
-sin(error.survey.inc_rad)
* sin(error.survey.azi_mag_rad)
* (
cos(error.survey.inc_rad)
- tan(error.survey.header.dip)
* sin(error.survey.inc_rad)
* cos(error.survey.azi_mag_rad)
)
) / (
_funky_denominator(error)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def DBHR(
code, error, mag=np.radians(3000), propagation='random', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = 1 / (
error.survey.header.b_total * cos(error.survey.header.dip)
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def AMIL(code, error, mag=220.0, propagation='systematic', NEV=True, **kwargs):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = (
-sin(np.array(error.survey_rad)[:, 1])
* sin(error.survey.azi_mag_rad)
/ (error.survey.header.b_total * cos(error.survey.header.dip))
)
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def SAG(
code, error, mag=0.00349, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = sin(np.array(error.survey_rad)[:, 1])
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def SAGE(
code, error, mag=0.00175, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = sin(np.array(error.survey.inc_rad)) ** 0.25
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def XYM1(
code, error, mag=0.00175, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = np.absolute(sin(np.array(error.survey.inc_rad)))
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def XYM2(
code, error, mag=0.00175, propagation='systematic', NEV=True, **kwargs
):
propagation = 'systematic' # incorrect in the rev5 model tab
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 2] = -1
e_DIA = dpde * mag
return error._generate_error(code, e_DIA, propagation, NEV)
def XYM3(
code, error, mag=0.00175, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = (
np.absolute(cos(np.array(error.survey_rad)[:, 1]))
* cos(error.survey.azi_true_rad)
)
with np.errstate(divide='ignore', invalid='ignore'):
dpde[:, 2] = np.nan_to_num(
-(
np.absolute(cos(np.array(error.survey_rad)[:, 1]))
* sin(error.survey.azi_true_rad)
) / sin(np.array(error.survey_rad)[:, 1]),
posinf=0.0,
neginf=0.0
)
e_DIA = dpde * mag
sing = np.where(
error.survey_rad[:, 1] < error.survey.header.vertical_inc_limit
)
if len(sing[0]) < 1:
return error._generate_error(code, e_DIA, propagation, NEV)
else:
e_NEV = error._e_NEV(e_DIA)
n = np.array(0.5 * error.drdp_sing['double_delta_md'] * mag)
e = np.zeros(len(error.drdp_sing['double_delta_md']))
v = np.zeros_like(n)
e_NEV_sing = np.vstack(
(
np.zeros((1, 3)),
np.stack((n, e, v), axis=-1),
np.zeros((1, 3))
)
)
e_NEV[sing] = e_NEV_sing[sing]
e_NEV_star = error._e_NEV_star(e_DIA)
n = np.array(0.5 * error.drdp_sing['delta_md'] * mag)
e = np.zeros(len(error.drdp_sing['delta_md']))
v = np.zeros_like(n)
e_NEV_star_sing = np.vstack(
(
np.zeros((1, 3)),
np.stack((n, e, v), axis=-1),
np.zeros((1, 3))
)
)
e_NEV_star[sing] = e_NEV_star_sing[sing]
return error._generate_error(
code, e_DIA, propagation, NEV, e_NEV, e_NEV_star
)
def XYM3E(code, error, mag=0.00524, propagation='random', NEV=True, **kwargs):
coeff = np.ones(len(error.survey.md))
coeff[1:-1] = np.amax(np.stack((
coeff[1:-1],
sqrt(
10 / error.drdp_sing['delta_md']
)
), axis=-1), axis=-1)
coeff[-1] = np.amax(np.stack((
coeff[-1],
sqrt(
10 / (error.survey.md[-1] - error.survey.md[-2])
)
), axis=-1), axis=-1)
dpde = np.zeros((len(error.survey.md), 3))
dpde[1:, 1] = np.absolute(
cos(error.survey.inc_rad[1:])
* cos(error.survey.azi_true_rad[1:])
* coeff[1:]
)
with np.errstate(divide='ignore', invalid='ignore'):
dpde[1:, 2] = (
(
-np.absolute(cos(error.survey.inc_rad[1:]))
* sin(error.survey.azi_true_rad[1:])
/ sin(error.survey.inc_rad[1:])
)
* coeff[1:]
)
dpde[1:, 2] = np.where(
error.survey.inc_rad[1:] < error.survey.header.vertical_inc_limit,
coeff[1:],
dpde[1:, 2]
)
e_DIA = dpde * mag
sing = np.where(
error.survey.inc_rad < error.survey.header.vertical_inc_limit
)
if len(sing[0]) < 1:
return error._generate_error(code, e_DIA, propagation, NEV)
else:
e_NEV = error._e_NEV(e_DIA)
e_NEV_sing = np.zeros_like(e_NEV)
e_NEV_sing[:, 0] = e_NEV[:, 0]
e_NEV[sing] = e_NEV_sing[sing]
e_NEV_star = error._e_NEV_star(e_DIA)
e_NEV_star_sing = np.zeros_like(e_NEV_star)
e_NEV_star_sing[:, 0] = e_NEV_star[:, 0]
e_NEV_star[sing] = e_NEV_star_sing[sing]
return error._generate_error(
code, e_DIA, propagation, NEV, e_NEV, e_NEV_star
)
return error._generate_error(code, e_DIA, propagation, NEV)
def XYM4(
code, error, mag=0.00175, propagation='systematic', NEV=True, **kwargs
):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[:, 1] = np.absolute(
cos(np.array(error.survey_rad)[:, 1])
) * sin(error.survey.azi_true_rad)
with np.errstate(divide='ignore', invalid='ignore'):
dpde[:, 2] = np.nan_to_num(
(
np.absolute(np.cos(np.array(error.survey_rad)[:, 1]))
* cos(error.survey.azi_true_rad)
)
/ sin(np.array(error.survey_rad)[:, 1]),
posinf=0.0,
neginf=0.0
)
e_DIA = dpde * mag
sing = np.where(
error.survey_rad[:, 1] < error.survey.header.vertical_inc_limit
)
if len(sing[0]) < 1:
return error._generate_error(code, e_DIA, propagation, NEV)
else:
e_NEV = error._e_NEV(e_DIA)
n = np.zeros(len(error.drdp_sing['double_delta_md']))
e = np.array(0.5 * error.drdp_sing['double_delta_md'] * mag)
v = np.zeros_like(n)
e_NEV_sing = np.vstack(
(
np.zeros((1, 3)),
np.stack((n, e, v), axis=-1),
np.zeros((1, 3))
)
)
e_NEV[sing] = e_NEV_sing[sing]
e_NEV_star = error._e_NEV_star(e_DIA)
n = np.zeros(len(error.drdp_sing['delta_md']))
e = np.array(0.5 * error.drdp_sing['delta_md'] * mag)
v = np.zeros_like(n)
e_NEV_star_sing = np.vstack(
(
np.zeros((1, 3)),
np.stack((n, e, v), axis=-1),
np.zeros((1, 3))
)
)
e_NEV_star[sing] = e_NEV_star_sing[sing]
return error._generate_error(
code, e_DIA, propagation, NEV, e_NEV, e_NEV_star
)
def XYM4E(code, error, mag=0.00524, propagation='random', NEV=True, **kwargs):
coeff = np.ones(len(error.survey.md))
coeff[1:-1] = np.amax(np.stack((
coeff[1:-1],
sqrt(
10 / error.drdp_sing['delta_md']
)
), axis=-1), axis=-1)
coeff[-1] = np.amax(np.stack((
coeff[-1],
sqrt(
10 / (error.survey.md[-1] - error.survey.md[-2])
)
), axis=-1), axis=-1)
dpde = np.zeros((len(error.survey.md), 3))
dpde[1:, 1] = (
cos(error.survey.inc_rad[1:])
* sin(error.survey.azi_true_rad[1:])
* coeff[1:]
)
with np.errstate(divide='ignore', invalid='ignore'):
dpde[1:, 2] = np.nan_to_num(
(
(
cos(error.survey.inc_rad[1:])
* cos(error.survey.azi_true_rad[1:])
/ sin(error.survey.inc_rad[1:])
)
* coeff[1:]
),
posinf=0,
neginf=0
)
e_DIA = dpde * mag
sing = np.where(
error.survey.inc_rad < error.survey.header.vertical_inc_limit
)
if len(sing[0]) < 1:
return error._generate_error(code, e_DIA, propagation, NEV)
else:
# this is a bit of a cop out way of handling these exceptions, but it's
# simple and it works...
xym3e = XYM3E(
code, error, mag=mag, propagation=propagation, NEV=NEV
)
e_NEV = error._e_NEV(e_DIA)
e_NEV_sing = np.zeros_like(e_NEV)
e_NEV_sing[:, 1] = xym3e.e_NEV[:, 0]
e_NEV[sing] = e_NEV_sing[sing]
e_NEV_star = error._e_NEV_star(e_DIA)
e_NEV_star_sing = np.zeros_like(e_NEV_star)
e_NEV_star_sing[:, 1] = xym3e.e_NEV_star[:, 0]
e_NEV_star[sing] = e_NEV_star_sing[sing]
return error._generate_error(
code, e_DIA, propagation, NEV, e_NEV, e_NEV_star
)
def XCL(code, error, mag=0.0167, propagation='random', NEV=True, **kwargs):
"""
Dummy function to manage the ISCWSA workbook not correctly defining the
weighting functions.
"""
tortuosity = kwargs['tortuosity']
if code == "XCLA":
return XCLA(
code, error, mag=mag, propagation=propagation, NEV=NEV,
tortuosity=tortuosity
)
else:
return XCLH(
code, error, mag=mag, propagation=propagation, NEV=NEV,
tortuosity=tortuosity
)
def XCLA(code, error, mag=0.167, propagation='random', NEV=True, **kwargs):
dpde = np.zeros((len(error.survey_rad), 3))
def manage_sing(error, kwargs):
temp = np.absolute(
sin(error.survey.inc_rad[1:])
* (((
error.survey.azi_true_rad[1:]
- error.survey.azi_true_rad[:-1]
+ pi
) % (2 * pi)) - pi)
)
temp[np.where(
error.survey.inc_rad[:-1] < error.survey.header.vertical_inc_limit
)] = 0
return temp
dpde[1:, 0] = (
(error.survey.md[1:] - error.survey.md[0:-1])
* np.amax(np.stack((
manage_sing(error, kwargs),
(
kwargs['tortuosity']
* (error.survey.md[1:] - error.survey.md[0:-1])
)
), axis=-1), axis=-1)
* -sin(error.survey.azi_true_rad[1:])
)
dpde[1:, 1] = (
(error.survey.md[1:] - error.survey.md[0:-1])
* np.amax(np.stack((
manage_sing(error, kwargs),
(
kwargs['tortuosity']
* (error.survey.md[1:] - error.survey.md[0:-1])
)
), axis=-1), axis=-1)
* cos(error.survey.azi_true_rad[1:])
)
e_DIA = dpde * mag
return error._generate_error(
code, e_DIA, propagation, NEV, e_NEV=e_DIA, e_NEV_star=e_DIA
)
def XCLH(code, error, mag=0.0167, propagation='random', NEV=True, **kwargs):
dpde = np.zeros((len(error.survey_rad), 3))
dpde[1:, 0] = (
(error.survey.md[1:] - error.survey.md[0:-1])
* np.amax(np.stack((
np.absolute(
(error.survey.inc_rad[1:] - error.survey.inc_rad[:-1])
),
(
kwargs['tortuosity']
* (error.survey.md[1:] - error.survey.md[0:-1])
)
), axis=-1), axis=-1)
* cos(error.survey.inc_rad[1:])
* cos(error.survey.azi_true_rad[1:])
)
dpde[1:, 1] = (
(error.survey.md[1:] - error.survey.md[0:-1])
* np.amax(np.stack((
np.absolute(
(error.survey.inc_rad[1:] - error.survey.inc_rad[:-1])
),
(
kwargs['tortuosity']
* (error.survey.md[1:] - error.survey.md[0:-1])
)
), axis=-1), axis=-1)
* cos(error.survey.inc_rad[1:])
* sin(error.survey.azi_true_rad[1:])
)
dpde[1:, 2] = (
(error.survey.md[1:] - error.survey.md[0:-1])
* np.amax(np.stack((
np.absolute(
(error.survey.inc_rad[1:] - error.survey.inc_rad[:-1])
),
(
kwargs['tortuosity']
* (error.survey.md[1:] - error.survey.md[0:-1])
)
), axis=-1), axis=-1)
* -sin(error.survey.inc_rad[1:])
)
e_DIA = dpde * mag
return error._generate_error(
code, e_DIA, propagation, NEV, e_NEV=e_DIA, e_NEV_star=e_DIA
)
def XYM3L(code, error, mag=0.0167, propagation='random', NEV=True, **kwargs):
coeff = np.ones(len(error.survey.md) - 1)
coeff = np.amax(np.stack((
coeff,
sqrt(
10 / (error.survey.md[1:] - error.survey.md[:-1])
)
), axis=-1), axis=-1)
dpde = np.zeros((len(error.survey_rad), 3))
dpde[1:, 1] = np.absolute(
cos(error.survey.inc_rad[1:])
* cos(error.survey.azi_true_rad[1:])
* coeff
)
dpde[0, 1] = dpde[1, 1]
with np.errstate(divide='ignore', invalid='ignore'):
dpde[1:, 2] = np.nan_to_num(
(
-np.absolute(
cos(error.survey.inc_rad[1:])
)
* (
sin(error.survey.azi_true_rad[1:])
/ sin(error.survey.inc_rad[1:])
)
* coeff
),
posinf=0,
neginf=0
)
dpde[0, 2] = dpde[1, 2]
e_DIA = dpde * mag
sing = np.where(
error.survey_rad[:, 1] < error.survey.header.vertical_inc_limit
)
if len(sing[0]) < 1:
return error._generate_error(code, e_DIA, propagation, NEV)
else:
e_NEV = error._e_NEV(e_DIA)
e_NEV_sing = np.zeros_like(e_NEV)
e_NEV_sing[1:-1, 0] = (
coeff[:-1]
* (
error.survey.md[2:]
- error.survey.md[:-2]
) / 2
* mag
)
e_NEV_sing[1, 0] = (
coeff[1]
* (
error.survey.md[2] + error.survey.md[1]
- 2 * error.survey.md[0]
) / 2
* mag
)
e_NEV_sing[-1, 0] = (
coeff[-1]
* (
error.survey.md[-1]
- error.survey.md[-2]
) / 2
* mag
)
e_NEV[sing] = e_NEV_sing[sing]
e_NEV_star = error._e_NEV_star(e_DIA)
e_NEV_star_sing = np.zeros_like(e_NEV)
e_NEV_star_sing[1:, 0] = (
(
error.survey.md[1:]
- error.survey.md[:-1]
) / 2
* mag
)
e_NEV_star[sing] = e_NEV_star_sing[sing]
return error._generate_error(
code, e_DIA, propagation, NEV, e_NEV, e_NEV_star
)
def XYM4L(code, error, mag=0.0167, propagation='random', NEV=True, **kwargs):
propagation = 'random'
coeff = np.ones(len(error.survey.md))
coeff[1:] = np.amax(np.stack((
coeff[1:],
sqrt(
10 / (error.survey.md[1:] - error.survey.md[:-1])
)
), axis=-1), axis=-1)
dpde = np.zeros((len(error.survey_rad), 3))
with np.errstate(divide='ignore', invalid='ignore'):
dpde[:, 2] = np.nan_to_num(
np.absolute(
cos(error.survey.inc_rad)
* cos(error.survey.azi_true_rad)
/ sin(error.survey.inc_rad)
* coeff
),
posinf=0,
neginf=0,
)
dpde[:, 1] = (
np.absolute(
cos(error.survey.inc_rad)
)
* (
sin(error.survey.azi_true_rad)
)
* coeff
)
e_DIA = dpde * mag
sing = np.where(
error.survey_rad[:, 1] < error.survey.header.vertical_inc_limit
)
if len(sing[0]) < 1:
return error._generate_error(code, e_DIA, propagation, NEV)
else:
e_NEV = error._e_NEV(e_DIA)
e_NEV_sing = np.zeros_like(e_NEV)
e_NEV_sing[1:-1, 1] = (
coeff[1:-1]
* (
error.survey.md[2:]
- error.survey.md[:-2]
) / 2
* mag
)
e_NEV_sing[1, 1] = (
coeff[1]
* (
error.survey.md[2] + error.survey.md[1]
- 2 * error.survey.md[0]
) / 2
* mag
)
e_NEV_sing[-1, 1] = (
coeff[-1]
* (
error.survey.md[-1]
- error.survey.md[-2]
) / 2
* mag
)
e_NEV[sing] = e_NEV_sing[sing]
e_NEV_star = error._e_NEV_star(e_DIA)
e_NEV_star_sing = np.zeros_like(e_NEV)
e_NEV_star_sing[1:, 1] = (
(
error.survey.md[1:]
- error.survey.md[:-1]
) / 2
* mag
)
e_NEV_star_sing[1, 1] = (
(
error.survey.md[1]
- error.survey.md[0]
)
* mag
)
e_NEV_star[sing] = e_NEV_star_sing[sing]
return error._generate_error(
code, e_DIA, propagation, NEV, e_NEV, e_NEV_star
)
| 29.237414
| 94
| 0.526074
| 7,624
| 59,235
| 3.910677
| 0.047613
| 0.169344
| 0.044676
| 0.06272
| 0.840047
| 0.830756
| 0.819587
| 0.810599
| 0.801643
| 0.797149
| 0
| 0.033181
| 0.325872
| 59,235
| 2,025
| 95
| 29.251852
| 0.713463
| 0.04212
| 0
| 0.656881
| 0
| 0
| 0.044935
| 0.003054
| 0
| 0
| 0
| 0.000494
| 0.001223
| 1
| 0.04159
| false
| 0
| 0.004281
| 0
| 0.093578
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
55ddad3ef2d8bd73c07b4436b88d53cd4aca1f2e
| 37,737
|
py
|
Python
|
instances/passenger_demand/pas-20210421-2109-int12e/45.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int12e/45.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int12e/45.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
"""
PASSENGERS
"""
numPassengers = 2757
passenger_arriving = (
(3, 9, 6, 2, 3, 0, 9, 5, 3, 3, 1, 0), # 0
(2, 11, 10, 1, 1, 0, 2, 4, 2, 2, 0, 0), # 1
(1, 10, 5, 3, 2, 0, 5, 12, 6, 3, 1, 0), # 2
(5, 8, 7, 2, 1, 0, 6, 7, 5, 5, 1, 0), # 3
(3, 5, 5, 3, 2, 0, 9, 8, 4, 0, 1, 0), # 4
(1, 9, 11, 4, 1, 0, 7, 7, 6, 4, 0, 0), # 5
(3, 8, 5, 3, 0, 0, 6, 9, 1, 6, 1, 0), # 6
(3, 6, 7, 2, 1, 0, 2, 2, 5, 1, 1, 0), # 7
(4, 4, 2, 2, 0, 0, 7, 8, 2, 5, 1, 0), # 8
(8, 7, 4, 4, 1, 0, 1, 9, 4, 6, 4, 0), # 9
(2, 5, 8, 5, 3, 0, 7, 8, 2, 6, 0, 0), # 10
(4, 10, 2, 2, 1, 0, 7, 7, 6, 4, 6, 0), # 11
(5, 10, 5, 4, 4, 0, 3, 10, 8, 6, 3, 0), # 12
(4, 11, 7, 5, 0, 0, 10, 9, 4, 4, 2, 0), # 13
(4, 11, 5, 4, 2, 0, 7, 7, 5, 5, 3, 0), # 14
(1, 8, 3, 3, 0, 0, 3, 7, 3, 4, 2, 0), # 15
(1, 10, 8, 1, 3, 0, 8, 7, 7, 9, 3, 0), # 16
(8, 9, 7, 3, 4, 0, 7, 6, 5, 2, 1, 0), # 17
(4, 11, 6, 1, 0, 0, 5, 4, 7, 3, 2, 0), # 18
(3, 10, 7, 5, 2, 0, 5, 4, 5, 4, 0, 0), # 19
(5, 10, 6, 4, 2, 0, 1, 13, 8, 7, 3, 0), # 20
(4, 4, 7, 5, 1, 0, 7, 8, 1, 9, 5, 0), # 21
(4, 6, 4, 3, 2, 0, 7, 10, 1, 6, 2, 0), # 22
(1, 5, 6, 7, 2, 0, 4, 5, 6, 3, 2, 0), # 23
(4, 10, 10, 0, 1, 0, 8, 4, 2, 6, 3, 0), # 24
(4, 8, 4, 3, 1, 0, 4, 6, 5, 2, 5, 0), # 25
(3, 13, 9, 5, 4, 0, 3, 10, 5, 4, 1, 0), # 26
(2, 10, 5, 6, 3, 0, 8, 6, 3, 1, 2, 0), # 27
(6, 9, 8, 5, 6, 0, 4, 10, 8, 3, 1, 0), # 28
(2, 9, 5, 3, 3, 0, 4, 11, 3, 5, 0, 0), # 29
(6, 7, 7, 2, 1, 0, 5, 8, 7, 4, 1, 0), # 30
(5, 3, 5, 3, 1, 0, 4, 5, 7, 6, 2, 0), # 31
(3, 9, 7, 4, 0, 0, 5, 5, 8, 3, 1, 0), # 32
(3, 6, 7, 1, 3, 0, 4, 5, 2, 4, 2, 0), # 33
(2, 8, 13, 5, 1, 0, 8, 4, 9, 4, 5, 0), # 34
(5, 11, 5, 3, 4, 0, 4, 6, 9, 5, 1, 0), # 35
(3, 9, 8, 3, 2, 0, 7, 9, 7, 3, 5, 0), # 36
(2, 8, 0, 2, 2, 0, 7, 10, 11, 4, 3, 0), # 37
(1, 13, 5, 4, 0, 0, 5, 6, 1, 5, 1, 0), # 38
(4, 10, 5, 3, 2, 0, 13, 7, 5, 5, 4, 0), # 39
(5, 6, 10, 4, 3, 0, 2, 10, 4, 2, 1, 0), # 40
(4, 9, 4, 1, 2, 0, 5, 10, 3, 1, 3, 0), # 41
(3, 7, 7, 5, 1, 0, 3, 9, 7, 2, 6, 0), # 42
(4, 7, 9, 2, 2, 0, 4, 8, 4, 4, 1, 0), # 43
(3, 9, 7, 3, 4, 0, 5, 9, 6, 1, 2, 0), # 44
(1, 10, 5, 2, 3, 0, 10, 7, 5, 0, 1, 0), # 45
(5, 8, 8, 2, 2, 0, 6, 4, 6, 5, 4, 0), # 46
(3, 5, 8, 1, 1, 0, 2, 7, 1, 3, 3, 0), # 47
(1, 3, 6, 3, 2, 0, 7, 9, 6, 7, 2, 0), # 48
(4, 8, 5, 6, 0, 0, 5, 5, 3, 6, 4, 0), # 49
(8, 1, 6, 9, 3, 0, 5, 5, 3, 3, 4, 0), # 50
(4, 7, 4, 4, 2, 0, 6, 5, 5, 5, 5, 0), # 51
(6, 6, 7, 1, 3, 0, 10, 3, 6, 2, 0, 0), # 52
(5, 7, 5, 4, 3, 0, 5, 5, 7, 3, 1, 0), # 53
(4, 6, 0, 3, 2, 0, 9, 11, 2, 1, 2, 0), # 54
(6, 9, 6, 1, 3, 0, 3, 6, 7, 4, 1, 0), # 55
(3, 7, 7, 4, 2, 0, 7, 8, 5, 4, 2, 0), # 56
(3, 10, 5, 4, 1, 0, 5, 7, 6, 1, 0, 0), # 57
(3, 12, 4, 3, 2, 0, 4, 6, 7, 10, 0, 0), # 58
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # 59
)
station_arriving_intensity = (
(3.1795818700614573, 8.15575284090909, 9.59308322622108, 7.603532608695652, 8.571634615384614, 5.708152173913044), # 0
(3.20942641205736, 8.246449918455387, 9.644898645029993, 7.6458772644927535, 8.635879807692307, 5.706206567028985), # 1
(3.238930172666081, 8.335801683501682, 9.695484147386459, 7.687289855072463, 8.69876923076923, 5.704201449275362), # 2
(3.268068107989464, 8.42371171875, 9.744802779562981, 7.727735054347824, 8.760245192307693, 5.702137092391305), # 3
(3.296815174129353, 8.510083606902358, 9.792817587832047, 7.767177536231884, 8.82025, 5.700013768115941), # 4
(3.3251463271875914, 8.594820930660775, 9.839491618466152, 7.805581974637681, 8.87872596153846, 5.697831748188405), # 5
(3.353036523266023, 8.677827272727273, 9.88478791773779, 7.842913043478261, 8.935615384615383, 5.695591304347826), # 6
(3.380460718466491, 8.75900621580387, 9.92866953191945, 7.879135416666666, 8.990860576923078, 5.693292708333334), # 7
(3.40739386889084, 8.83826134259259, 9.971099507283634, 7.914213768115941, 9.044403846153847, 5.6909362318840575), # 8
(3.4338109306409126, 8.915496235795453, 10.012040890102828, 7.9481127717391304, 9.0961875, 5.68852214673913), # 9
(3.459686859818554, 8.990614478114479, 10.051456726649528, 7.980797101449276, 9.146153846153846, 5.68605072463768), # 10
(3.4849966125256073, 9.063519652251683, 10.089310063196228, 8.012231431159421, 9.194245192307692, 5.683522237318841), # 11
(3.509715144863916, 9.134115340909089, 10.125563946015424, 8.042380434782608, 9.240403846153844, 5.680936956521738), # 12
(3.5338174129353224, 9.20230512678872, 10.160181421379605, 8.071208786231884, 9.284572115384616, 5.678295153985506), # 13
(3.5572783728416737, 9.267992592592593, 10.193125535561265, 8.098681159420288, 9.326692307692307, 5.6755971014492745), # 14
(3.5800729806848106, 9.331081321022726, 10.224359334832902, 8.124762228260868, 9.36670673076923, 5.672843070652174), # 15
(3.6021761925665783, 9.391474894781144, 10.25384586546701, 8.149416666666665, 9.404557692307693, 5.6700333333333335), # 16
(3.6235629645888205, 9.449076896569863, 10.281548173736075, 8.172609148550725, 9.4401875, 5.667168161231884), # 17
(3.64420825285338, 9.503790909090908, 10.307429305912597, 8.194304347826087, 9.473538461538464, 5.664247826086956), # 18
(3.664087013462101, 9.555520515046295, 10.331452308269066, 8.214466938405796, 9.504552884615384, 5.661272599637681), # 19
(3.683174202516827, 9.604169297138045, 10.353580227077975, 8.2330615942029, 9.533173076923077, 5.658242753623187), # 20
(3.7014447761194034, 9.649640838068178, 10.373776108611827, 8.250052989130435, 9.559341346153845, 5.655158559782609), # 21
(3.7188736903716704, 9.69183872053872, 10.3920029991431, 8.26540579710145, 9.582999999999998, 5.652020289855073), # 22
(3.7354359013754754, 9.730666527251683, 10.408223944944302, 8.279084692028986, 9.604091346153846, 5.6488282155797105), # 23
(3.75110636523266, 9.76602784090909, 10.422401992287917, 8.291054347826087, 9.62255769230769, 5.645582608695652), # 24
(3.7658600380450684, 9.797826244212962, 10.434500187446444, 8.301279438405798, 9.638341346153844, 5.642283740942029), # 25
(3.779671875914545, 9.825965319865318, 10.444481576692374, 8.309724637681159, 9.651384615384615, 5.63893188405797), # 26
(3.792516834942932, 9.85034865056818, 10.452309206298198, 8.316354619565217, 9.661629807692309, 5.635527309782609), # 27
(3.804369871232075, 9.870879819023568, 10.457946122536418, 8.321134057971014, 9.66901923076923, 5.632070289855072), # 28
(3.815205940883816, 9.887462407933501, 10.461355371679518, 8.324027626811594, 9.673495192307692, 5.628561096014493), # 29
(3.8249999999999997, 9.9, 10.4625, 8.325, 9.674999999999999, 5.625), # 30
(3.834164434143222, 9.910414559659088, 10.461641938405796, 8.324824387254901, 9.674452393617022, 5.620051511744128), # 31
(3.843131010230179, 9.920691477272728, 10.459092028985506, 8.324300980392156, 9.672821276595744, 5.612429710144928), # 32
(3.8519037563938614, 9.930829474431818, 10.45488668478261, 8.323434926470588, 9.670124202127658, 5.6022092203898035), # 33
(3.860486700767263, 9.940827272727272, 10.449062318840578, 8.32223137254902, 9.666378723404256, 5.589464667666167), # 34
(3.8688838714833755, 9.950683593749998, 10.441655344202898, 8.320695465686274, 9.661602393617022, 5.574270677161419), # 35
(3.8770992966751923, 9.96039715909091, 10.432702173913043, 8.318832352941177, 9.655812765957448, 5.556701874062968), # 36
(3.885137004475703, 9.96996669034091, 10.422239221014491, 8.316647181372549, 9.64902739361702, 5.536832883558221), # 37
(3.893001023017902, 9.979390909090908, 10.410302898550723, 8.314145098039214, 9.641263829787233, 5.514738330834581), # 38
(3.900695380434782, 9.988668536931817, 10.396929619565215, 8.31133125, 9.632539627659574, 5.490492841079459), # 39
(3.908224104859335, 9.997798295454546, 10.382155797101449, 8.308210784313726, 9.62287234042553, 5.464171039480259), # 40
(3.915591224424552, 10.006778906249998, 10.366017844202899, 8.304788848039216, 9.612279521276594, 5.435847551224389), # 41
(3.9228007672634266, 10.015609090909093, 10.348552173913044, 8.301070588235293, 9.600778723404256, 5.40559700149925), # 42
(3.929856761508952, 10.024287571022725, 10.329795199275361, 8.297061151960785, 9.5883875, 5.373494015492254), # 43
(3.936763235294117, 10.032813068181818, 10.309783333333334, 8.292765686274508, 9.575123404255319, 5.339613218390804), # 44
(3.9435242167519178, 10.041184303977271, 10.288552989130435, 8.288189338235293, 9.561003989361701, 5.304029235382309), # 45
(3.9501437340153456, 10.0494, 10.266140579710147, 8.28333725490196, 9.546046808510638, 5.266816691654173), # 46
(3.956625815217391, 10.05745887784091, 10.24258251811594, 8.278214583333332, 9.530269414893617, 5.228050212393803), # 47
(3.962974488491049, 10.065359659090909, 10.217915217391303, 8.272826470588234, 9.513689361702127, 5.187804422788607), # 48
(3.9691937819693086, 10.073101065340907, 10.19217509057971, 8.26717806372549, 9.49632420212766, 5.146153948025987), # 49
(3.9752877237851663, 10.080681818181816, 10.165398550724637, 8.261274509803922, 9.478191489361702, 5.103173413293353), # 50
(3.9812603420716113, 10.088100639204544, 10.137622010869565, 8.255120955882353, 9.459308776595744, 5.0589374437781105), # 51
(3.987115664961637, 10.09535625, 10.10888188405797, 8.248722549019607, 9.439693617021277, 5.013520664667666), # 52
(3.992857720588235, 10.10244737215909, 10.079214583333332, 8.24208443627451, 9.419363563829787, 4.966997701149425), # 53
(3.9984905370843995, 10.109372727272726, 10.04865652173913, 8.235211764705882, 9.398336170212765, 4.919443178410794), # 54
(4.00401814258312, 10.116131036931817, 10.017244112318838, 8.22810968137255, 9.376628989361702, 4.87093172163918), # 55
(4.0094445652173905, 10.122721022727271, 9.985013768115941, 8.220783333333333, 9.354259574468085, 4.821537956021989), # 56
(4.014773833120205, 10.129141406250001, 9.952001902173912, 8.213237867647058, 9.331245478723403, 4.771336506746626), # 57
(4.0200099744245525, 10.135390909090907, 9.91824492753623, 8.20547843137255, 9.307604255319148, 4.7204019990005), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_arriving_acc = (
(3, 9, 6, 2, 3, 0, 9, 5, 3, 3, 1, 0), # 0
(5, 20, 16, 3, 4, 0, 11, 9, 5, 5, 1, 0), # 1
(6, 30, 21, 6, 6, 0, 16, 21, 11, 8, 2, 0), # 2
(11, 38, 28, 8, 7, 0, 22, 28, 16, 13, 3, 0), # 3
(14, 43, 33, 11, 9, 0, 31, 36, 20, 13, 4, 0), # 4
(15, 52, 44, 15, 10, 0, 38, 43, 26, 17, 4, 0), # 5
(18, 60, 49, 18, 10, 0, 44, 52, 27, 23, 5, 0), # 6
(21, 66, 56, 20, 11, 0, 46, 54, 32, 24, 6, 0), # 7
(25, 70, 58, 22, 11, 0, 53, 62, 34, 29, 7, 0), # 8
(33, 77, 62, 26, 12, 0, 54, 71, 38, 35, 11, 0), # 9
(35, 82, 70, 31, 15, 0, 61, 79, 40, 41, 11, 0), # 10
(39, 92, 72, 33, 16, 0, 68, 86, 46, 45, 17, 0), # 11
(44, 102, 77, 37, 20, 0, 71, 96, 54, 51, 20, 0), # 12
(48, 113, 84, 42, 20, 0, 81, 105, 58, 55, 22, 0), # 13
(52, 124, 89, 46, 22, 0, 88, 112, 63, 60, 25, 0), # 14
(53, 132, 92, 49, 22, 0, 91, 119, 66, 64, 27, 0), # 15
(54, 142, 100, 50, 25, 0, 99, 126, 73, 73, 30, 0), # 16
(62, 151, 107, 53, 29, 0, 106, 132, 78, 75, 31, 0), # 17
(66, 162, 113, 54, 29, 0, 111, 136, 85, 78, 33, 0), # 18
(69, 172, 120, 59, 31, 0, 116, 140, 90, 82, 33, 0), # 19
(74, 182, 126, 63, 33, 0, 117, 153, 98, 89, 36, 0), # 20
(78, 186, 133, 68, 34, 0, 124, 161, 99, 98, 41, 0), # 21
(82, 192, 137, 71, 36, 0, 131, 171, 100, 104, 43, 0), # 22
(83, 197, 143, 78, 38, 0, 135, 176, 106, 107, 45, 0), # 23
(87, 207, 153, 78, 39, 0, 143, 180, 108, 113, 48, 0), # 24
(91, 215, 157, 81, 40, 0, 147, 186, 113, 115, 53, 0), # 25
(94, 228, 166, 86, 44, 0, 150, 196, 118, 119, 54, 0), # 26
(96, 238, 171, 92, 47, 0, 158, 202, 121, 120, 56, 0), # 27
(102, 247, 179, 97, 53, 0, 162, 212, 129, 123, 57, 0), # 28
(104, 256, 184, 100, 56, 0, 166, 223, 132, 128, 57, 0), # 29
(110, 263, 191, 102, 57, 0, 171, 231, 139, 132, 58, 0), # 30
(115, 266, 196, 105, 58, 0, 175, 236, 146, 138, 60, 0), # 31
(118, 275, 203, 109, 58, 0, 180, 241, 154, 141, 61, 0), # 32
(121, 281, 210, 110, 61, 0, 184, 246, 156, 145, 63, 0), # 33
(123, 289, 223, 115, 62, 0, 192, 250, 165, 149, 68, 0), # 34
(128, 300, 228, 118, 66, 0, 196, 256, 174, 154, 69, 0), # 35
(131, 309, 236, 121, 68, 0, 203, 265, 181, 157, 74, 0), # 36
(133, 317, 236, 123, 70, 0, 210, 275, 192, 161, 77, 0), # 37
(134, 330, 241, 127, 70, 0, 215, 281, 193, 166, 78, 0), # 38
(138, 340, 246, 130, 72, 0, 228, 288, 198, 171, 82, 0), # 39
(143, 346, 256, 134, 75, 0, 230, 298, 202, 173, 83, 0), # 40
(147, 355, 260, 135, 77, 0, 235, 308, 205, 174, 86, 0), # 41
(150, 362, 267, 140, 78, 0, 238, 317, 212, 176, 92, 0), # 42
(154, 369, 276, 142, 80, 0, 242, 325, 216, 180, 93, 0), # 43
(157, 378, 283, 145, 84, 0, 247, 334, 222, 181, 95, 0), # 44
(158, 388, 288, 147, 87, 0, 257, 341, 227, 181, 96, 0), # 45
(163, 396, 296, 149, 89, 0, 263, 345, 233, 186, 100, 0), # 46
(166, 401, 304, 150, 90, 0, 265, 352, 234, 189, 103, 0), # 47
(167, 404, 310, 153, 92, 0, 272, 361, 240, 196, 105, 0), # 48
(171, 412, 315, 159, 92, 0, 277, 366, 243, 202, 109, 0), # 49
(179, 413, 321, 168, 95, 0, 282, 371, 246, 205, 113, 0), # 50
(183, 420, 325, 172, 97, 0, 288, 376, 251, 210, 118, 0), # 51
(189, 426, 332, 173, 100, 0, 298, 379, 257, 212, 118, 0), # 52
(194, 433, 337, 177, 103, 0, 303, 384, 264, 215, 119, 0), # 53
(198, 439, 337, 180, 105, 0, 312, 395, 266, 216, 121, 0), # 54
(204, 448, 343, 181, 108, 0, 315, 401, 273, 220, 122, 0), # 55
(207, 455, 350, 185, 110, 0, 322, 409, 278, 224, 124, 0), # 56
(210, 465, 355, 189, 111, 0, 327, 416, 284, 225, 124, 0), # 57
(213, 477, 359, 192, 113, 0, 331, 422, 291, 235, 124, 0), # 58
(213, 477, 359, 192, 113, 0, 331, 422, 291, 235, 124, 0), # 59
)
passenger_arriving_rate = (
(3.1795818700614573, 6.524602272727271, 5.755849935732647, 3.0414130434782605, 1.7143269230769227, 0.0, 5.708152173913044, 6.857307692307691, 4.562119565217391, 3.8372332904884314, 1.6311505681818177, 0.0), # 0
(3.20942641205736, 6.597159934764309, 5.786939187017996, 3.0583509057971012, 1.7271759615384612, 0.0, 5.706206567028985, 6.908703846153845, 4.587526358695652, 3.857959458011997, 1.6492899836910773, 0.0), # 1
(3.238930172666081, 6.668641346801345, 5.817290488431875, 3.074915942028985, 1.7397538461538458, 0.0, 5.704201449275362, 6.959015384615383, 4.612373913043478, 3.8781936589545833, 1.6671603367003363, 0.0), # 2
(3.268068107989464, 6.738969375, 5.846881667737788, 3.091094021739129, 1.7520490384615384, 0.0, 5.702137092391305, 7.0081961538461535, 4.636641032608694, 3.897921111825192, 1.68474234375, 0.0), # 3
(3.296815174129353, 6.808066885521885, 5.875690552699228, 3.106871014492753, 1.76405, 0.0, 5.700013768115941, 7.0562, 4.66030652173913, 3.9171270351328187, 1.7020167213804713, 0.0), # 4
(3.3251463271875914, 6.87585674452862, 5.903694971079691, 3.122232789855072, 1.775745192307692, 0.0, 5.697831748188405, 7.102980769230768, 4.6833491847826085, 3.9357966473864603, 1.718964186132155, 0.0), # 5
(3.353036523266023, 6.942261818181818, 5.930872750642674, 3.137165217391304, 1.7871230769230766, 0.0, 5.695591304347826, 7.148492307692306, 4.705747826086957, 3.953915167095116, 1.7355654545454544, 0.0), # 6
(3.380460718466491, 7.007204972643096, 5.95720171915167, 3.1516541666666664, 1.7981721153846155, 0.0, 5.693292708333334, 7.192688461538462, 4.727481249999999, 3.97146781276778, 1.751801243160774, 0.0), # 7
(3.40739386889084, 7.0706090740740715, 5.982659704370181, 3.165685507246376, 1.8088807692307691, 0.0, 5.6909362318840575, 7.2355230769230765, 4.7485282608695645, 3.9884398029134536, 1.7676522685185179, 0.0), # 8
(3.4338109306409126, 7.132396988636362, 6.007224534061696, 3.179245108695652, 1.8192374999999996, 0.0, 5.68852214673913, 7.2769499999999985, 4.768867663043478, 4.004816356041131, 1.7830992471590905, 0.0), # 9
(3.459686859818554, 7.1924915824915825, 6.030874035989717, 3.19231884057971, 1.829230769230769, 0.0, 5.68605072463768, 7.316923076923076, 4.7884782608695655, 4.020582690659811, 1.7981228956228956, 0.0), # 10
(3.4849966125256073, 7.250815721801346, 6.053586037917737, 3.204892572463768, 1.8388490384615384, 0.0, 5.683522237318841, 7.355396153846153, 4.807338858695652, 4.0357240252784905, 1.8127039304503365, 0.0), # 11
(3.509715144863916, 7.30729227272727, 6.0753383676092545, 3.2169521739130427, 1.8480807692307688, 0.0, 5.680936956521738, 7.392323076923075, 4.825428260869565, 4.050225578406169, 1.8268230681818176, 0.0), # 12
(3.5338174129353224, 7.361844101430976, 6.096108852827762, 3.228483514492753, 1.8569144230769232, 0.0, 5.678295153985506, 7.427657692307693, 4.84272527173913, 4.0640725685518415, 1.840461025357744, 0.0), # 13
(3.5572783728416737, 7.414394074074074, 6.115875321336759, 3.2394724637681147, 1.8653384615384612, 0.0, 5.6755971014492745, 7.461353846153845, 4.859208695652172, 4.077250214224506, 1.8535985185185184, 0.0), # 14
(3.5800729806848106, 7.46486505681818, 6.134615600899742, 3.249904891304347, 1.873341346153846, 0.0, 5.672843070652174, 7.493365384615384, 4.874857336956521, 4.089743733933161, 1.866216264204545, 0.0), # 15
(3.6021761925665783, 7.513179915824915, 6.152307519280206, 3.259766666666666, 1.8809115384615382, 0.0, 5.6700333333333335, 7.523646153846153, 4.889649999999999, 4.101538346186803, 1.8782949789562287, 0.0), # 16
(3.6235629645888205, 7.55926151725589, 6.168928904241645, 3.26904365942029, 1.8880374999999998, 0.0, 5.667168161231884, 7.552149999999999, 4.903565489130435, 4.11261926949443, 1.8898153793139725, 0.0), # 17
(3.64420825285338, 7.603032727272725, 6.184457583547558, 3.2777217391304343, 1.8947076923076926, 0.0, 5.664247826086956, 7.578830769230771, 4.916582608695652, 4.122971722365039, 1.9007581818181813, 0.0), # 18
(3.664087013462101, 7.644416412037035, 6.198871384961439, 3.285786775362318, 1.9009105769230765, 0.0, 5.661272599637681, 7.603642307692306, 4.928680163043477, 4.132580923307626, 1.9111041030092588, 0.0), # 19
(3.683174202516827, 7.683335437710435, 6.2121481362467845, 3.2932246376811594, 1.9066346153846152, 0.0, 5.658242753623187, 7.626538461538461, 4.93983695652174, 4.14143209083119, 1.9208338594276086, 0.0), # 20
(3.7014447761194034, 7.719712670454542, 6.224265665167096, 3.3000211956521737, 1.911868269230769, 0.0, 5.655158559782609, 7.647473076923076, 4.950031793478261, 4.14951044344473, 1.9299281676136355, 0.0), # 21
(3.7188736903716704, 7.753470976430976, 6.23520179948586, 3.3061623188405793, 1.9165999999999994, 0.0, 5.652020289855073, 7.666399999999998, 4.959243478260869, 4.15680119965724, 1.938367744107744, 0.0), # 22
(3.7354359013754754, 7.784533221801346, 6.244934366966581, 3.311633876811594, 1.920818269230769, 0.0, 5.6488282155797105, 7.683273076923076, 4.967450815217392, 4.163289577977721, 1.9461333054503365, 0.0), # 23
(3.75110636523266, 7.812822272727271, 6.25344119537275, 3.3164217391304347, 1.9245115384615379, 0.0, 5.645582608695652, 7.6980461538461515, 4.974632608695652, 4.168960796915166, 1.9532055681818177, 0.0), # 24
(3.7658600380450684, 7.838260995370368, 6.260700112467866, 3.320511775362319, 1.9276682692307685, 0.0, 5.642283740942029, 7.710673076923074, 4.980767663043479, 4.173800074978577, 1.959565248842592, 0.0), # 25
(3.779671875914545, 7.860772255892254, 6.266688946015424, 3.3238898550724634, 1.9302769230769228, 0.0, 5.63893188405797, 7.721107692307691, 4.985834782608695, 4.177792630676949, 1.9651930639730635, 0.0), # 26
(3.792516834942932, 7.8802789204545425, 6.2713855237789184, 3.326541847826087, 1.9323259615384616, 0.0, 5.635527309782609, 7.729303846153846, 4.98981277173913, 4.180923682519278, 1.9700697301136356, 0.0), # 27
(3.804369871232075, 7.8967038552188535, 6.2747676735218505, 3.328453623188405, 1.9338038461538458, 0.0, 5.632070289855072, 7.735215384615383, 4.992680434782608, 4.183178449014567, 1.9741759638047134, 0.0), # 28
(3.815205940883816, 7.9099699263468, 6.276813223007711, 3.3296110507246373, 1.9346990384615383, 0.0, 5.628561096014493, 7.738796153846153, 4.994416576086956, 4.184542148671807, 1.9774924815867, 0.0), # 29
(3.8249999999999997, 7.92, 6.2775, 3.3299999999999996, 1.9349999999999996, 0.0, 5.625, 7.739999999999998, 4.994999999999999, 4.185, 1.98, 0.0), # 30
(3.834164434143222, 7.92833164772727, 6.276985163043477, 3.3299297549019604, 1.9348904787234043, 0.0, 5.620051511744128, 7.739561914893617, 4.994894632352941, 4.184656775362318, 1.9820829119318175, 0.0), # 31
(3.843131010230179, 7.936553181818182, 6.275455217391303, 3.329720392156862, 1.9345642553191487, 0.0, 5.612429710144928, 7.738257021276595, 4.994580588235293, 4.1836368115942015, 1.9841382954545455, 0.0), # 32
(3.8519037563938614, 7.944663579545454, 6.272932010869566, 3.329373970588235, 1.9340248404255314, 0.0, 5.6022092203898035, 7.736099361702125, 4.994060955882353, 4.181954673913044, 1.9861658948863634, 0.0), # 33
(3.860486700767263, 7.952661818181817, 6.269437391304347, 3.3288925490196077, 1.9332757446808508, 0.0, 5.589464667666167, 7.733102978723403, 4.993338823529411, 4.179624927536231, 1.9881654545454543, 0.0), # 34
(3.8688838714833755, 7.960546874999998, 6.264993206521739, 3.328278186274509, 1.9323204787234043, 0.0, 5.574270677161419, 7.729281914893617, 4.9924172794117645, 4.176662137681159, 1.9901367187499994, 0.0), # 35
(3.8770992966751923, 7.968317727272727, 6.259621304347825, 3.3275329411764707, 1.9311625531914893, 0.0, 5.556701874062968, 7.724650212765957, 4.9912994117647065, 4.173080869565217, 1.9920794318181818, 0.0), # 36
(3.885137004475703, 7.975973352272726, 6.253343532608695, 3.3266588725490194, 1.9298054787234038, 0.0, 5.536832883558221, 7.719221914893615, 4.989988308823529, 4.168895688405796, 1.9939933380681816, 0.0), # 37
(3.893001023017902, 7.983512727272726, 6.246181739130434, 3.325658039215685, 1.9282527659574464, 0.0, 5.514738330834581, 7.713011063829786, 4.988487058823528, 4.164121159420289, 1.9958781818181814, 0.0), # 38
(3.900695380434782, 7.990934829545453, 6.238157771739129, 3.3245324999999997, 1.9265079255319146, 0.0, 5.490492841079459, 7.7060317021276585, 4.98679875, 4.1587718478260856, 1.9977337073863632, 0.0), # 39
(3.908224104859335, 7.998238636363636, 6.229293478260869, 3.32328431372549, 1.924574468085106, 0.0, 5.464171039480259, 7.698297872340424, 4.984926470588236, 4.1528623188405795, 1.999559659090909, 0.0), # 40
(3.915591224424552, 8.005423124999998, 6.219610706521739, 3.321915539215686, 1.9224559042553186, 0.0, 5.435847551224389, 7.689823617021275, 4.982873308823529, 4.146407137681159, 2.0013557812499996, 0.0), # 41
(3.9228007672634266, 8.012487272727274, 6.209131304347826, 3.320428235294117, 1.920155744680851, 0.0, 5.40559700149925, 7.680622978723404, 4.980642352941175, 4.1394208695652175, 2.0031218181818184, 0.0), # 42
(3.929856761508952, 8.01943005681818, 6.1978771195652165, 3.3188244607843136, 1.9176774999999997, 0.0, 5.373494015492254, 7.670709999999999, 4.978236691176471, 4.131918079710144, 2.004857514204545, 0.0), # 43
(3.936763235294117, 8.026250454545455, 6.18587, 3.317106274509803, 1.9150246808510636, 0.0, 5.339613218390804, 7.660098723404254, 4.975659411764705, 4.123913333333333, 2.0065626136363637, 0.0), # 44
(3.9435242167519178, 8.032947443181817, 6.1731317934782615, 3.315275735294117, 1.91220079787234, 0.0, 5.304029235382309, 7.64880319148936, 4.972913602941175, 4.115421195652174, 2.008236860795454, 0.0), # 45
(3.9501437340153456, 8.03952, 6.159684347826087, 3.313334901960784, 1.9092093617021275, 0.0, 5.266816691654173, 7.63683744680851, 4.970002352941176, 4.106456231884058, 2.00988, 0.0), # 46
(3.956625815217391, 8.045967102272726, 6.1455495108695635, 3.3112858333333324, 1.9060538829787232, 0.0, 5.228050212393803, 7.624215531914893, 4.966928749999999, 4.097033007246376, 2.0114917755681816, 0.0), # 47
(3.962974488491049, 8.052287727272727, 6.130749130434782, 3.309130588235293, 1.9027378723404254, 0.0, 5.187804422788607, 7.610951489361701, 4.96369588235294, 4.087166086956521, 2.013071931818182, 0.0), # 48
(3.9691937819693086, 8.058480852272725, 6.115305054347826, 3.306871225490196, 1.899264840425532, 0.0, 5.146153948025987, 7.597059361702128, 4.960306838235294, 4.076870036231884, 2.014620213068181, 0.0), # 49
(3.9752877237851663, 8.064545454545453, 6.099239130434782, 3.3045098039215683, 1.8956382978723403, 0.0, 5.103173413293353, 7.582553191489361, 4.956764705882353, 4.066159420289854, 2.016136363636363, 0.0), # 50
(3.9812603420716113, 8.070480511363634, 6.082573206521739, 3.302048382352941, 1.8918617553191486, 0.0, 5.0589374437781105, 7.567447021276594, 4.953072573529411, 4.055048804347826, 2.0176201278409085, 0.0), # 51
(3.987115664961637, 8.076284999999999, 6.065329130434782, 3.299489019607843, 1.8879387234042553, 0.0, 5.013520664667666, 7.551754893617021, 4.949233529411765, 4.043552753623188, 2.0190712499999997, 0.0), # 52
(3.992857720588235, 8.081957897727271, 6.047528749999999, 3.2968337745098037, 1.8838727127659571, 0.0, 4.966997701149425, 7.5354908510638285, 4.945250661764706, 4.0316858333333325, 2.020489474431818, 0.0), # 53
(3.9984905370843995, 8.08749818181818, 6.0291939130434775, 3.294084705882353, 1.8796672340425529, 0.0, 4.919443178410794, 7.5186689361702115, 4.941127058823529, 4.019462608695651, 2.021874545454545, 0.0), # 54
(4.00401814258312, 8.092904829545454, 6.010346467391303, 3.2912438725490194, 1.8753257978723403, 0.0, 4.87093172163918, 7.501303191489361, 4.936865808823529, 4.006897644927535, 2.0232262073863634, 0.0), # 55
(4.0094445652173905, 8.098176818181816, 5.991008260869564, 3.288313333333333, 1.8708519148936167, 0.0, 4.821537956021989, 7.483407659574467, 4.9324699999999995, 3.994005507246376, 2.024544204545454, 0.0), # 56
(4.014773833120205, 8.103313125, 5.971201141304347, 3.285295147058823, 1.8662490957446805, 0.0, 4.771336506746626, 7.464996382978722, 4.927942720588234, 3.980800760869564, 2.02582828125, 0.0), # 57
(4.0200099744245525, 8.108312727272725, 5.950946956521738, 3.2821913725490197, 1.8615208510638295, 0.0, 4.7204019990005, 7.446083404255318, 4.923287058823529, 3.9672979710144918, 2.0270781818181813, 0.0), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_allighting_rate = (
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 0
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 1
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 2
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 3
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 4
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 5
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 6
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 7
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 8
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 9
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 10
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 11
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 12
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 13
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 14
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 15
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 16
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 17
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 18
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 19
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 20
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 21
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 22
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 23
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 24
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 25
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 26
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 27
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 28
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 29
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 30
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 31
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 32
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 33
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 34
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 35
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 36
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 37
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 38
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 39
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 40
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 41
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 42
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 43
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 44
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 45
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 46
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 47
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 48
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 49
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 50
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 51
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 52
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 53
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 54
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 55
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 56
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 57
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 58
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 59
)
"""
parameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.html
"""
#initial entropy
entropy = 258194110137029475889902652135037600173
#index for seed sequence child
child_seed_index = (
1, # 0
44, # 1
)
| 112.647761
| 213
| 0.728012
| 5,147
| 37,737
| 5.335535
| 0.221877
| 0.314617
| 0.249071
| 0.471925
| 0.332532
| 0.330639
| 0.329692
| 0.329692
| 0.329692
| 0.329692
| 0
| 0.818198
| 0.119617
| 37,737
| 334
| 214
| 112.98503
| 0.008398
| 0.032091
| 0
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.015823
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
364586a9386eababb789e1d7acc8b1a28ee598c8
| 1,992
|
py
|
Python
|
template.py
|
vipul43/algorithms
|
86b74b5e618f98ee6b6a20cb7c5b9513453c3a49
|
[
"MIT"
] | null | null | null |
template.py
|
vipul43/algorithms
|
86b74b5e618f98ee6b6a20cb7c5b9513453c3a49
|
[
"MIT"
] | null | null | null |
template.py
|
vipul43/algorithms
|
86b74b5e618f98ee6b6a20cb7c5b9513453c3a49
|
[
"MIT"
] | null | null | null |
##HEADING: [PROBLEM NAME]
#PROBLEM STATEMENT:
"""
[PROBLEM STATEMENT LINE1]
[PROBLEM STATEMENT LINE1]
[PROBLEM STATEMENT LINE1]
"""
#SOLUTION-1: ([METHOD DESCRIPTION LIKE BRUTE_FORCE, DP_ALGORITHM, GREEDY_ALGORITHM, ITERATIVE_ALGORITHM, RECURSIVE_ALGORITHM]) --> [TIME COMPLEXITY LIKE O(n), O(log2(n)), O(n^2), O(sqrt(n)), O(n/2)==O(n), O(n/3)==O(n)]
#SOLUTION-2: ([METHOD DESCRIPTION LIKE BRUTE_FORCE, DP_ALGORITHM, GREEDY_ALGORITHM, ITERATIVE_ALGORITHM, RECURSIVE_ALGORITHM]) --> [TIME COMPLEXITY LIKE O(n), O(log2(n)), O(n^2), O(sqrt(n)), O(n/2)==O(n), O(n/3)==O(n)]
#SOLUTION-3: ([METHOD DESCRIPTION LIKE BRUTE_FORCE, DP_ALGORITHM, GREEDY_ALGORITHM, ITERATIVE_ALGORITHM, RECURSIVE_ALGORITHM]) --> [TIME COMPLEXITY LIKE O(n), O(log2(n)), O(n^2), O(sqrt(n)), O(n/2)==O(n), O(n/3)==O(n)]
#SOLUTION-4: ([METHOD DESCRIPTION LIKE BRUTE_FORCE, DP_ALGORITHM, GREEDY_ALGORITHM, ITERATIVE_ALGORITHM, RECURSIVE_ALGORITHM]) --> [TIME COMPLEXITY LIKE O(n), O(log2(n)), O(n^2), O(sqrt(n)), O(n/2)==O(n), O(n/3)==O(n)]
#DESCRIPTION:
"""
[##DISCLAIMER: POINT TO BE NOTED BEFORE JUMPING INTO DESCRIPTION]
[DESCRIPTION LINE1]
[DESCRIPTION LINE2]
[DESCRIPTION LINE3]
[DESCRIPTION LINE4]
[DESCRIPTION LINE5]
[DESCRIPTION LINE6]
[DESCRIPTION LINE7]
[DESCRIPTION LINE8]
[ADDITIONAL POINTS TO BE NOTED]
[FURTHER MODIFICATIONS TO ALGORITHM]
[NOTE: 1]
[NOTE: 2]
"""
#APPLICATION-1: ([APPLICATION PROBLEM STATEMENT]) --> [TIME COMPLEXITY LIKE O(n), O(log2(n)), O(n^2), O(sqrt(n)), O(n/2)==O(n), O(n/3)==O(n)]
#APPLICATION-2: ([APPLICATION PROBLEM STATEMENT]) --> [TIME COMPLEXITY LIKE O(n), O(log2(n)), O(n^2), O(sqrt(n)), O(n/2)==O(n), O(n/3)==O(n)]
#DESCRIPTION:
"""
[DESCRIPTION LINE1]
[DESCRIPTION LINE2]
[DESCRIPTION LINE3]
[DESCRIPTION LINE4]
[DESCRIPTION LINE5]
[NOTE: 1]
"""
#RELATED ALGORITHMS:
"""
-[RELATED ALGORITHM1]
-[RELATED ALGORITHM2]
-[RELATED ALGORITHM3]
"""
| 30.646154
| 218
| 0.649598
| 288
| 1,992
| 4.423611
| 0.180556
| 0.056515
| 0.042386
| 0.037677
| 0.766091
| 0.766091
| 0.716641
| 0.716641
| 0.716641
| 0.716641
| 0
| 0.031138
| 0.161647
| 1,992
| 65
| 219
| 30.646154
| 0.731737
| 0.657631
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
366227bc61d1a89a3e2394e39765a398ae255034
| 215
|
py
|
Python
|
views/systemview.py
|
KDerec/chesstournamentmanager
|
2b57d2703d654e4ffc3c44293a031bd596463ba0
|
[
"MIT"
] | null | null | null |
views/systemview.py
|
KDerec/chesstournamentmanager
|
2b57d2703d654e4ffc3c44293a031bd596463ba0
|
[
"MIT"
] | null | null | null |
views/systemview.py
|
KDerec/chesstournamentmanager
|
2b57d2703d654e4ffc3c44293a031bd596463ba0
|
[
"MIT"
] | null | null | null |
"""Display system message."""
def display_exit_message():
"""Display exit message and return a user choice."""
print("Tapez la lettre \"q\" pour confirmez l'arrêt de l'application : ")
return input()
| 23.888889
| 77
| 0.669767
| 29
| 215
| 4.896552
| 0.758621
| 0.15493
| 0.253521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190698
| 215
| 8
| 78
| 26.875
| 0.816092
| 0.325581
| 0
| 0
| 0
| 0
| 0.447761
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
369dfa16b7a73fcaadbd3384df0b610ba1be1446
| 685
|
py
|
Python
|
main.py
|
pythonyhd/pratt_project
|
5babd7769bc3bb8f7facb32076c6147746fab947
|
[
"Apache-2.0"
] | 2
|
2019-11-11T11:35:19.000Z
|
2019-11-22T08:29:05.000Z
|
main.py
|
pythonyhd/pratt_project
|
5babd7769bc3bb8f7facb32076c6147746fab947
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
pythonyhd/pratt_project
|
5babd7769bc3bb8f7facb32076c6147746fab947
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# @Time : 2019/11/11 11:47
# @Author : Yasaka.Yu
# @File : main.py
from scrapy import cmdline
# cmdline.execute('scrapy crawl wxapp'.split()) # 不下载列表页图片
# cmdline.execute('scrapy crawl wxapp_with_img'.split()) # 下载列表页图片
# cmdline.execute('scrapy crawl wxapp_signal'.split()) # 测试偏移量
# cmdline.execute('scrapy crawl szse_spider'.split()) # 深圳证券交易所-监管信息公开-监管措施与纪律处分
# cmdline.execute('scrapy crawl splash_jdphone'.split()) # spalsh抓取京东手机信息
# cmdline.execute('scrapy crawl splash_lua'.split()) # spalsh结合lua脚本使用
# cmdline.execute('scrapy crawl splash_csdn'.split()) # spalsh结合lua脚本滑动csdn
cmdline.execute('scrapy crawl jobole'.split()) # selenium集成
| 45.666667
| 81
| 0.724088
| 84
| 685
| 5.821429
| 0.488095
| 0.229039
| 0.327198
| 0.408998
| 0.374233
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021667
| 0.124088
| 685
| 15
| 82
| 45.666667
| 0.793333
| 0.842336
| 0
| 0
| 0
| 0
| 0.213483
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
36ca690cdebc0d40814075b2c7086c5353c4f4ea
| 196
|
py
|
Python
|
job/admin.py
|
HuwangWenjing/online-expirement-project
|
fcddbead974c79b0077e02a33ddb36c674627a1b
|
[
"MIT"
] | null | null | null |
job/admin.py
|
HuwangWenjing/online-expirement-project
|
fcddbead974c79b0077e02a33ddb36c674627a1b
|
[
"MIT"
] | null | null | null |
job/admin.py
|
HuwangWenjing/online-expirement-project
|
fcddbead974c79b0077e02a33ddb36c674627a1b
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import sign, teacher, student, course
admin.site.register(sign)
admin.site.register(teacher)
admin.site.register(student)
admin.site.register(course)
| 28
| 50
| 0.816327
| 28
| 196
| 5.714286
| 0.428571
| 0.225
| 0.425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076531
| 196
| 7
| 51
| 28
| 0.883978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
36ccdccada8922d1f390d11a0d01196312e75c70
| 338
|
py
|
Python
|
quickscms/routines/template/scripts/official/rcf.py
|
GabeCordo/python_node_tor
|
8d06dda51d8472c84fb1f9a8d00128bc376d59ca
|
[
"MIT"
] | 1
|
2021-01-19T02:41:54.000Z
|
2021-01-19T02:41:54.000Z
|
quickscms/routines/template/scripts/official/rcf.py
|
GabeCordo/python_node_tor
|
8d06dda51d8472c84fb1f9a8d00128bc376d59ca
|
[
"MIT"
] | 3
|
2021-05-21T00:20:55.000Z
|
2021-05-21T13:52:21.000Z
|
common/collection/template/scripts/official/rcf.py
|
GabeCordo/py-acyclic-network
|
eaef31e819fdc527927b0f10854ec6ac2bf30d26
|
[
"MIT"
] | 1
|
2021-01-14T02:44:45.000Z
|
2021-01-14T02:44:45.000Z
|
###############################
# python imports
###############################
from sys import path
###############################
# python imports
###############################
path.append('../custom/')
import custom #import all the custom scripts here
###############################
# rcf
###############################
| 21.125
| 49
| 0.304734
| 20
| 338
| 5.15
| 0.65
| 0.252427
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106509
| 338
| 16
| 50
| 21.125
| 0.34106
| 0.213018
| 0
| 0
| 0
| 0
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
36d6f22f67d7f006dffa292ba46691552258f7bc
| 260
|
py
|
Python
|
npipes/triggers/uri.py
|
praxik/nPipes
|
4edf8fa0d0467e3455941c46e960fdf3f43e2d31
|
[
"Apache-2.0"
] | null | null | null |
npipes/triggers/uri.py
|
praxik/nPipes
|
4edf8fa0d0467e3455941c46e960fdf3f43e2d31
|
[
"Apache-2.0"
] | null | null | null |
npipes/triggers/uri.py
|
praxik/nPipes
|
4edf8fa0d0467e3455941c46e960fdf3f43e2d31
|
[
"Apache-2.0"
] | null | null | null |
# -*- mode: python;-*-
from ..message.header import Message
from ..outcome import Outcome, Success, Failure
def sendMessageGet(uri, message:Message) -> Outcome[str, None]:
pass
def sendMessagePost(uri, message:Message) -> Outcome[str, None]:
pass
| 20
| 64
| 0.703846
| 31
| 260
| 5.903226
| 0.516129
| 0.10929
| 0.185792
| 0.262295
| 0.382514
| 0.382514
| 0.382514
| 0
| 0
| 0
| 0
| 0
| 0.157692
| 260
| 12
| 65
| 21.666667
| 0.835616
| 0.076923
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
3d47d8e580f35d8c00c882ea0de6e744941e33e1
| 88
|
py
|
Python
|
src/utils/__init__.py
|
taonguyen740/flask_based_3tier_framework
|
f02e492eff0206e661925dddcf0ba978ead38b5e
|
[
"MIT"
] | null | null | null |
src/utils/__init__.py
|
taonguyen740/flask_based_3tier_framework
|
f02e492eff0206e661925dddcf0ba978ead38b5e
|
[
"MIT"
] | null | null | null |
src/utils/__init__.py
|
taonguyen740/flask_based_3tier_framework
|
f02e492eff0206e661925dddcf0ba978ead38b5e
|
[
"MIT"
] | null | null | null |
from .decorator_all_methods import decorate_all_methods
from .exec_time import exec_time
| 44
| 55
| 0.897727
| 14
| 88
| 5.214286
| 0.571429
| 0.273973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079545
| 88
| 2
| 56
| 44
| 0.901235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3d5fa2101502e7de9ca80bf5be6d3888e879841a
| 9,869
|
py
|
Python
|
notebooks/mnist/parse.py
|
formigone/machine-learning-research
|
df33eb2ddc9442abc169dc5cc7bc2a8e09712ea4
|
[
"MIT"
] | null | null | null |
notebooks/mnist/parse.py
|
formigone/machine-learning-research
|
df33eb2ddc9442abc169dc5cc7bc2a8e09712ea4
|
[
"MIT"
] | null | null | null |
notebooks/mnist/parse.py
|
formigone/machine-learning-research
|
df33eb2ddc9442abc169dc5cc7bc2a8e09712ea4
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
import sys
if len(sys.argv) < 2:
print('Usage: %s <file-to-classify>' % sys.argv[0])
file = sys.argv[1]
print('Parsing file %s' % file)
img = Image.open(file)
img = img.resize((28, 28))
img = np.array(img) * 255
# print(img.shape)
# print(img)
# plt.imshow(img)
# plt.show()
img2 = [0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0.,
0.32941177, 0.72549021, 0.62352943, 0.59215689, 0.23529413, 0.14117648,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0.8705883, 0.99607849, 0.99607849, 0.99607849, 0.99607849,
0.9450981, 0.77647066, 0.77647066, 0.77647066, 0.77647066, 0.77647066,
0.77647066, 0.77647066, 0.77647066, 0.66666669, 0.20392159, 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0.26274511, 0.44705886, 0.28235295,
0.44705886, 0.63921571, 0.89019614, 0.99607849, 0.88235301, 0.99607849,
0.99607849, 0.99607849, 0.98039222, 0.89803928, 0.99607849, 0.99607849,
0.54901963, 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0.06666667, 0.25882354,
0.05490196, 0.26274511, 0.26274511, 0.26274511, 0.23137257, 0.08235294,
0.92549026, 0.99607849, 0.41568631, 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0.,
0.32549021, 0.99215692, 0.81960791, 0.07058824, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0.,
0.08627451, 0.91372555, 1., 0.32549021, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0.50588238, 0.99607849, 0.9333334, 0.17254902, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0.,
0.23137257, 0.97647065, 0.99607849, 0.24313727, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0.52156866, 0.99607849, 0.73333335, 0.01960784, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0.,
0.03529412, 0.80392164, 0.97254908, 0.227451, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0.49411768, 0.99607849, 0.71372551, 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0.,
0.29411766, 0.98431379, 0.94117653, 0.22352943, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0.,
0.07450981, 0.86666673, 0.99607849, 0.65098041, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0.,
0.01176471, 0.7960785, 0.99607849, 0.8588236, 0.13725491, 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0.14901961, 0.99607849, 0.99607849, 0.3019608, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0.,
0.12156864, 0.87843144, 0.99607849, 0.45098042, 0.00392157, 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0.52156866, 0.99607849, 0.99607849, 0.20392159, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.2392157,
0.94901967, 0.99607849, 0.99607849, 0.20392159, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0.,
0.47450984, 0.99607849, 0.99607849, 0.8588236, 0.15686275, 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0.47450984, 0.99607849, 0.81176478, 0.07058824, 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0.]
img2 = np.array(img2)
img2 = img2.reshape((28, 28))
fig, (ax1, ax2) = plt.subplots(1, 2)
ax1.imshow(img)
ax2.imshow(img2)
plt.show()
print(img)
| 68.062069
| 89
| 0.205695
| 988
| 9,869
| 2.054656
| 0.110324
| 0.657143
| 0.95468
| 1.231527
| 0.527094
| 0.47734
| 0.47734
| 0.44532
| 0.44532
| 0.44532
| 0
| 0.465872
| 0.625899
| 9,869
| 144
| 90
| 68.534722
| 0.083965
| 0.005472
| 0
| 0.639098
| 0
| 0
| 0.004383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030075
| 0
| 0.030075
| 0.022556
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
18710972a72793fd618de5a5c9d69bf04c134f38
| 142
|
py
|
Python
|
tests/test_ratpy.py
|
dimatura/ratpy
|
70e14eec331bf3126821904d7e90b93d1d79a196
|
[
"MIT"
] | 2
|
2019-06-19T15:15:32.000Z
|
2021-12-22T21:33:58.000Z
|
tests/test_ratpy.py
|
dimatura/ratpy
|
70e14eec331bf3126821904d7e90b93d1d79a196
|
[
"MIT"
] | null | null | null |
tests/test_ratpy.py
|
dimatura/ratpy
|
70e14eec331bf3126821904d7e90b93d1d79a196
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `ratpy` package."""
import pytest
from ratpy import ratpy
from ratpy import cli
| 14.2
| 32
| 0.669014
| 21
| 142
| 4.52381
| 0.714286
| 0.189474
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008475
| 0.169014
| 142
| 9
| 33
| 15.777778
| 0.79661
| 0.485915
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a10caf6baec75c9705f34dd01a128c617feca32a
| 16,948
|
py
|
Python
|
extra/slothclasses/db_commands.py
|
costaluu/sloth-bot
|
48727aff5859ec96c48691a638b3b8c0a90c70f9
|
[
"MIT"
] | null | null | null |
extra/slothclasses/db_commands.py
|
costaluu/sloth-bot
|
48727aff5859ec96c48691a638b3b8c0a90c70f9
|
[
"MIT"
] | null | null | null |
extra/slothclasses/db_commands.py
|
costaluu/sloth-bot
|
48727aff5859ec96c48691a638b3b8c0a90c70f9
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands
from mysqldb import the_database
class SlothClassDatabaseCommands(commands.Cog):
""" A class for organizing the bot's table creation/drop/delete/check commands. """
# ======== SlothSkills =========
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def create_table_sloth_skills(self, ctx) -> None:
""" (Owner) Creates the SlothSkills table. """
if await self.table_sloth_skills_exists():
return await ctx.send("**The `SlothSkills` table already exists!**")
mycursor, db = await the_database()
await mycursor.execute("""
CREATE TABLE SlothSkills (
user_id BIGINT NOT NULL, skill_type VARCHAR(30) NOT NULL,
skill_timestamp BIGINT NOT NULL, target_id BIGINT DEFAULT NULL,
message_id BIGINT DEFAULT NULL, channel_id BIGINT DEFAULT NULL,
emoji VARCHAR(50) DEFAULT NULL, PRICE INT DEFAULT 0,
PRIMARY KEY (target_id, skill_type)
) DEFAULT CHARSET=utf8mb4""")
await db.commit()
await mycursor.close()
await ctx.send("**Created `SlothSkills` table!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def drop_table_sloth_skills(self, ctx) -> None:
""" (Owner) Drops the SlothSkills table. """
if not await self.table_sloth_skills_exists():
return await ctx.send("**The `SlothSkills` table doesn't exist!**")
mycursor, db = await the_database()
await mycursor.execute("DROP TABLE SlothSkills")
await db.commit()
await mycursor.close()
await ctx.send("**Dropped `SlothSkills` table!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def reset_table_sloth_skills(self, ctx) -> None:
""" (Owner) Resets the SlothSkills table. """
if not await self.table_sloth_skills_exists():
return await ctx.send("**The `SlothSkills` table doesn't exist yet!**")
mycursor, db = await the_database()
await mycursor.execute("DELETE FROM SlothSkills")
await db.commit()
await mycursor.close()
await ctx.send("**Reset `SlothSkills` table!**")
async def table_sloth_skills_exists(self) -> bool:
""" Checks whether the SlothSkills table exists. """
mycursor, db = await the_database()
await mycursor.execute("SHOW TABLE STATUS LIKE 'SlothSkills'")
table_info = await mycursor.fetchall()
await mycursor.close()
if len(table_info) == 0:
return False
else:
return True
# ======== SkillsCooldown =========
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def create_table_skills_cooldown(self, ctx) -> None:
""" Creates the SkillsCooldown table. """
member = ctx.author
if await self.table_skills_cooldown_exists():
return await ctx.send(f"**Table `SkillsCooldown` already exists, {member.mention}!**")
mycursor, db = await the_database()
await mycursor.execute("""
CREATE TABLE SkillsCooldown (
user_id BIGINT NOT NULL,
skill_one_ts BIGINT DEFAULT NULL,
skill_two_ts BIGINT DEFAULT NULL,
skill_three_ts BIGINT DEFAULT NULL,
skill_four_ts BIGINT DEFAULT NULL,
skill_five_ts BIGINT DEFAULT NULL,
PRIMARY KEY (user_id),
CONSTRAINT fk_skills_user_id FOREIGN KEY (user_id) REFERENCES UserCurrency (user_id) ON DELETE CASCADE ON UPDATE CASCADE
)
""")
await db.commit()
await mycursor.close()
await ctx.send(f"**Table `SkillsCooldown` created, {member.mention}!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def drop_table_skills_cooldown(self, ctx) -> None:
""" Drops the SkillsCooldown table. """
member = ctx.author
if not await self.table_skills_cooldown_exists():
return await ctx.send(f"**Table `SkillsCooldown` doesn't exist, {member.mention}!**")
mycursor, db = await the_database()
await mycursor.execute("DROP TABLE SkillsCooldown")
await db.commit()
await mycursor.close()
await ctx.send(f"**Table `SkillsCooldown` dropped, {member.mention}!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def reset_table_skills_cooldown(self, ctx) -> None:
""" Resets the SkillsCooldown table. """
member = ctx.author
if not await self.table_skills_cooldown_exists():
return await ctx.send(f"**Table `SkillsCooldown` doesn't exist yet, {member.mention}!**")
mycursor, db = await the_database()
await mycursor.execute("DELETE FROM SkillsCooldown")
await db.commit()
await mycursor.close()
await ctx.send(f"**Table `SkillsCooldown` reset, {member.mention}!**")
async def table_skills_cooldown_exists(self) -> bool:
""" Checks whether the SkillsCooldown table exists. """
mycursor, db = await the_database()
await mycursor.execute("SHOW TABLE STATUS LIKE 'SkillsCooldown'")
exists = await mycursor.fetchall()
await mycursor.close()
if exists:
return True
else:
return False
# ======== UserTribe =========
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def create_table_user_tribe(self, ctx) -> None:
""" (Owner) Creates the UserTribe table. """
if await self.table_user_tribe_exists():
return await ctx.send("**The `UserTribe` table already exists!**")
mycursor, db = await the_database()
await mycursor.execute("""
CREATE TABLE UserTribe (
user_id BIGINT NOT NULL, tribe_name VARCHAR(50) NOT NULL,
tribe_description VARCHAR(200) NOT NULL, two_emojis VARCHAR(2) NOT NULL,
tribe_thumbnail VARCHAR(200) DEFAULT NULL, tribe_form VARCHAR(100) DEFAULT NULL,
slug VARCHAR(75) NOT NULL,
PRIMARY KEY (tribe_name),
CONSTRAINT fk_tribe_owner_id FOREIGN KEY (user_id) REFERENCES UserCurrency (user_id) ON DELETE CASCADE ON UPDATE CASCADE
) DEFAULT CHARSET=utf8mb4""")
await db.commit()
await mycursor.close()
await ctx.send("**Created `UserTribe` table!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def drop_table_user_tribe(self, ctx) -> None:
""" (Owner) Drops the UserTribe table. """
if not await self.table_user_tribe_exists():
return await ctx.send("**The `UserTribe` table doesn't exist!**")
mycursor, db = await the_database()
await mycursor.execute("DROP TABLE UserTribe")
await db.commit()
await mycursor.close()
await ctx.send("**Dropped `UserTribe` table!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def reset_table_user_tribe(self, ctx) -> None:
""" (Owner) Resets the UserTribe table. """
if not await self.table_user_tribe_exists():
return await ctx.send("**The `UserTribe` table doesn't exist yet!**")
mycursor, db = await the_database()
await mycursor.execute("DELETE FROM UserTribe")
await db.commit()
await mycursor.close()
await ctx.send("**Reset `UserTribe` table!**")
async def table_user_tribe_exists(self) -> bool:
""" Checks whether the UserTribe table exists. """
mycursor, db = await the_database()
await mycursor.execute("SHOW TABLE STATUS LIKE 'UserTribe'")
table_info = await mycursor.fetchall()
await mycursor.close()
if len(table_info) == 0:
return False
else:
return True
# ======== TribeMember =========
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def create_table_tribe_member(self, ctx) -> None:
""" (Owner) Creates the TribeMember table. """
if await self.table_tribe_member_exists():
return await ctx.send("**The `TribeMember` table already exists!**")
mycursor, db = await the_database()
await mycursor.execute("""
CREATE TABLE TribeMember (
owner_id BIGINT NOT NULL,
tribe_name VARCHAR(50) NOT NULL,
member_id BIGINT NOT NULL,
tribe_role VARCHAR(30) DEFAULT NULL,
PRIMARY KEY (member_id),
CONSTRAINT fk_tribe_owner FOREIGN KEY (owner_id) REFERENCES UserTribe (user_id) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT fk_tribe_name FOREIGN KEY (tribe_name) REFERENCES UserTribe (tribe_name) ON DELETE CASCADE ON UPDATE CASCADE
) DEFAULT CHARSET=utf8mb4""")
await db.commit()
await mycursor.close()
await ctx.send("**Created `TribeMember` table!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def drop_table_tribe_member(self, ctx) -> None:
""" (Owner) Drops the TribeMember table. """
if not await self.table_tribe_member_exists():
return await ctx.send("**The `TribeMember` table doesn't exist!**")
mycursor, db = await the_database()
await mycursor.execute("DROP TABLE TribeMember")
await db.commit()
await mycursor.close()
await ctx.send("**Dropped `TribeMember` table!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def reset_table_tribe_member(self, ctx) -> None:
""" (Owner) Resets the TribeMember table. """
if not await self.table_tribe_member_exists():
return await ctx.send("**The `TribeMember` table doesn't exist yet!**")
mycursor, db = await the_database()
await mycursor.execute("DELETE FROM TribeMember")
await db.commit()
await mycursor.close()
await ctx.send("**Reset `TribeMember` table!**")
async def table_tribe_member_exists(self) -> bool:
""" Checks whether the TribeMember table exists. """
mycursor, db = await the_database()
await mycursor.execute("SHOW TABLE STATUS LIKE 'TribeMember'")
table_info = await mycursor.fetchall()
await mycursor.close()
if len(table_info) == 0:
return False
else:
return True
# ======== TribeRole =========
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def create_table_tribe_role(self, ctx) -> None:
""" (Owner) Creates the TribeRole table. """
if await self.table_tribe_role_exists():
return await ctx.send("**The `TribeRole` table already exists!**")
mycursor, db = await the_database()
await mycursor.execute("""
CREATE TABLE TribeRole (
owner_id BIGINT NOT NULL,
tribe_name VARCHAR(50) NOT NULL,
role_name VARCHAR(30) NOT NULL,
PRIMARY KEY (tribe_name, role_name),
CONSTRAINT fk_tr_tribe_owner FOREIGN KEY (owner_id) REFERENCES UserTribe (user_id) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT fk_tr_tribe_name FOREIGN KEY (tribe_name) REFERENCES UserTribe (tribe_name) ON DELETE CASCADE ON UPDATE CASCADE
) DEFAULT CHARSET=utf8mb4""")#COLLATE=utf8mb4_unicode_ci
await db.commit()
await mycursor.close()
await ctx.send("**Created `TribeRole` table!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def drop_table_tribe_role(self, ctx) -> None:
""" (Owner) Drops the TribeRole table. """
if not await self.table_tribe_role_exists():
return await ctx.send("**The `TribeRole` table doesn't exist!**")
mycursor, db = await the_database()
await mycursor.execute("DROP TABLE TribeRole")
await db.commit()
await mycursor.close()
await ctx.send("**Dropped `TribeRole` table!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def reset_table_tribe_role(self, ctx) -> None:
""" (Owner) Resets the TribeRole table. """
if not await self.table_tribe_role_exists():
return await ctx.send("**The `TribeRole` table doesn't exist yet!**")
mycursor, db = await the_database()
await mycursor.execute("DELETE FROM TribeRole")
await db.commit()
await mycursor.close()
await ctx.send("**Reset `TribeRole` table!**")
async def table_tribe_role_exists(self) -> bool:
""" Checks whether the TribeRole table exists. """
mycursor, _ = await the_database()
await mycursor.execute("SHOW TABLE STATUS LIKE 'TribeRole'")
table_info = await mycursor.fetchall()
await mycursor.close()
if len(table_info) == 0:
return False
else:
return True
# ======== SlothProfile =========
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def create_table_sloth_profile(self, ctx) -> None:
""" Creates the SlothProfile table. """
member = ctx.author
if await self.table_sloth_profile_exists():
return await ctx.send(f"**Table `SlothProfile` already exists, {member.mention}!**")
mycursor, db = await the_database()
await mycursor.execute("""
CREATE TABLE SlothProfile (
user_id BIGINT NOT NULL,
sloth_class VARCHAR(30) DEFAULT 'default',
skills_used INT DEFAULT 0,
tribe VARCHAR(50) DEFAULT NULL,
change_class_ts BIGINT DEFAULT 0,
has_potion TINYINT(1) DEFAULT 0,
knife_sharpness_stack TINYINT(1) DEFAULT 0,
rings TINYINT(1) DEFAULT 0,
tribe_user_id BIGINT DEFAULT NULL,
PRIMARY KEY (user_id),
CONSTRAINT fk_sloth_pfl_user_id FOREIGN KEY (user_id) REFERENCES UserCurrency (user_id) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT fk_sloth_pfl_tribe_name FOREIGN KEY (tribe, tribe_user_id) REFERENCES TribeMember (tribe_name, member_id) ON DELETE SET NULL ON UPDATE CASCADE
) DEFAULT CHARSET=utf8mb4""")
await db.commit()
await mycursor.close()
await ctx.send(f"**Table `SlothProfile` created, {member.mention}!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def drop_table_sloth_profile(self, ctx) -> None:
""" Drops the SlothProfile table. """
member = ctx.author
if not await self.table_sloth_profile_exists():
return await ctx.send(f"**Table `SlothProfile` doesn't exist, {member.mention}!**")
mycursor, db = await the_database()
await mycursor.execute("DROP TABLE SlothProfile")
await db.commit()
await mycursor.close()
await ctx.send(f"**Table `SlothProfile` dropped, {member.mention}!**")
@commands.command(hidden=True)
@commands.has_permissions(administrator=True)
async def reset_table_sloth_profile(self, ctx) -> None:
""" Resets the SlothProfile table. """
member = ctx.author
if not await self.table_sloth_profile_exists():
return await ctx.send(f"**Table `SlothProfile` doesn't exist yet, {member.mention}!**")
mycursor, db = await the_database()
await mycursor.execute("DELETE FROM SlothProfile")
await db.commit()
await mycursor.close()
await ctx.send(f"**Table `SlothProfile` reset, {member.mention}!**")
async def table_sloth_profile_exists(self) -> bool:
""" Checks whether the SlothProfile table exists. """
mycursor, db = await the_database()
await mycursor.execute("SHOW TABLE STATUS LIKE 'SlothProfile'")
exists = await mycursor.fetchall()
await mycursor.close()
if exists:
return True
else:
return False
async def update_sloth_profile_class(self, user_id: int, sloth_class: str) -> None:
""" Updates the user's Sloth Profile's class.
:param user_id: The ID of the user to update.
:param sloth_class: The sloth class to update to. """
mycursor, db = await the_database()
await mycursor.execute("UPDATE SlothProfile SET sloth_class = %s WHERE user_id = %s", (sloth_class, user_id))
await db.commit()
await mycursor.close()
| 40.448687
| 165
| 0.634765
| 1,962
| 16,948
| 5.33945
| 0.076453
| 0.069492
| 0.041237
| 0.050115
| 0.844311
| 0.819588
| 0.762314
| 0.733772
| 0.725181
| 0.71659
| 0
| 0.004368
| 0.256962
| 16,948
| 419
| 166
| 40.448687
| 0.827523
| 0.017052
| 0
| 0.558824
| 0
| 0.003268
| 0.357384
| 0.002855
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009804
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a15f6c0da2e1b17bde96258a612a766266e306d9
| 7,900
|
py
|
Python
|
testing/modules/sparsegpregression_test.py
|
jnkm/MXFusion
|
cfe90d22b8359dbbdac23714c06fda150eb8851e
|
[
"Apache-2.0"
] | null | null | null |
testing/modules/sparsegpregression_test.py
|
jnkm/MXFusion
|
cfe90d22b8359dbbdac23714c06fda150eb8851e
|
[
"Apache-2.0"
] | null | null | null |
testing/modules/sparsegpregression_test.py
|
jnkm/MXFusion
|
cfe90d22b8359dbbdac23714c06fda150eb8851e
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import mxnet as mx
import numpy as np
from mxfusion.models import Model
from mxfusion.modules.gp_modules import SparseGPRegression
from mxfusion.components.distributions.gp.kernels import RBF
from mxfusion.components import Variable
from mxfusion.inference import Inference, MAP, ModulePredictionAlgorithm, TransferInference
from mxfusion.components.variables.var_trans import PositiveTransformation
from mxfusion.modules.gp_modules.sparsegp_regression import SparseGPRegressionSamplingPrediction
import matplotlib
matplotlib.use('Agg')
import GPy
class TestSparseGPRegressionModule(object):
def test_log_pdf(self):
np.random.seed(0)
D = 2
X = np.random.rand(10, 3)
Y = np.random.rand(10, D)
Z = np.random.rand(3, 3)
noise_var = np.random.rand(1)
lengthscale = np.random.rand(3)
variance = np.random.rand(1)
m_gpy = GPy.models.SparseGPRegression(X=X, Y=Y, Z=Z, kernel=GPy.kern.RBF(3, ARD=True, lengthscale=lengthscale, variance=variance), num_inducing=3)
m_gpy.likelihood.variance = noise_var
l_gpy = m_gpy.log_likelihood()
dtype = 'float64'
m = Model()
m.N = Variable()
m.X = Variable(shape=(m.N, 3))
m.Z = Variable(shape=(3, 3), initial_value=mx.nd.array(Z, dtype=dtype))
m.noise_var = Variable(transformation=PositiveTransformation(), initial_value=mx.nd.array(noise_var, dtype=dtype))
kernel = RBF(input_dim=3, ARD=True, variance=mx.nd.array(variance, dtype=dtype), lengthscale=mx.nd.array(lengthscale, dtype=dtype), dtype=dtype)
m.Y = SparseGPRegression.define_variable(X=m.X, kernel=kernel, noise_var=m.noise_var, inducing_inputs=m.Z, shape=(m.N, D), dtype=dtype)
m.Y.factor.sgp_log_pdf.jitter = 1e-8
observed = [m.X, m.Y]
infr = Inference(MAP(model=m, observed=observed), dtype=dtype)
loss, _ = infr.run(X=mx.nd.array(X, dtype=dtype), Y=mx.nd.array(Y, dtype=dtype))
l_mf = -loss
assert np.allclose(l_mf.asnumpy(), l_gpy)
def test_prediction(self):
np.random.seed(0)
X = np.random.rand(10, 3)
Y = np.random.rand(10, 1)
Z = np.random.rand(3, 3)
noise_var = np.random.rand(1)
lengthscale = np.random.rand(3)/10.
variance = np.random.rand(1)
Xt = np.random.rand(20, 3)
m_gpy = GPy.models.SparseGPRegression(X=X, Y=Y, Z=Z, kernel=GPy.kern.RBF(3, ARD=True, lengthscale=lengthscale, variance=variance), num_inducing=3)
m_gpy.likelihood.variance = noise_var
dtype = 'float64'
m = Model()
m.N = Variable()
m.X = Variable(shape=(m.N, 3))
m.Z = Variable(shape=(3, 3), initial_value=mx.nd.array(Z, dtype=dtype))
m.noise_var = Variable(transformation=PositiveTransformation(), initial_value=mx.nd.array(noise_var, dtype=dtype))
kernel = RBF(input_dim=3, ARD=True, variance=mx.nd.array(variance, dtype=dtype), lengthscale=mx.nd.array(lengthscale, dtype=dtype), dtype=dtype)
m.Y = SparseGPRegression.define_variable(X=m.X, kernel=kernel, noise_var=m.noise_var, inducing_inputs=m.Z, shape=(m.N, 1), dtype=dtype)
m.Y.factor.sgp_log_pdf.jitter = 1e-8
observed = [m.X, m.Y]
infr = Inference(MAP(model=m, observed=observed), dtype=dtype)
loss, _ = infr.run(X=mx.nd.array(X, dtype=dtype), Y=mx.nd.array(Y, dtype=dtype))
# noise_free, diagonal
mu_gpy, var_gpy = m_gpy.predict_noiseless(Xt)
infr2 = TransferInference(ModulePredictionAlgorithm(m, observed=[m.X], target_variables=[m.Y]), infr_params=infr.params, dtype=np.float64)
res = infr2.run(X=mx.nd.array(Xt, dtype=dtype))[0]
mu_mf, var_mf = res[0].asnumpy()[0], res[1].asnumpy()[0]
assert np.allclose(mu_gpy, mu_mf), (mu_gpy, mu_mf)
assert np.allclose(var_gpy[:,0], var_mf), (var_gpy[:,0], var_mf)
# noisy, diagonal
mu_gpy, var_gpy = m_gpy.predict(Xt)
infr2 = TransferInference(ModulePredictionAlgorithm(m, observed=[m.X], target_variables=[m.Y]), infr_params=infr.params, dtype=np.float64)
infr2.inference_algorithm.model.Y.factor.sgp_predict.noise_free = False
res = infr2.run(X=mx.nd.array(Xt, dtype=dtype))[0]
mu_mf, var_mf = res[0].asnumpy()[0], res[1].asnumpy()[0]
assert np.allclose(mu_gpy, mu_mf), (mu_gpy, mu_mf)
assert np.allclose(var_gpy[:,0], var_mf), (var_gpy[:,0], var_mf)
# noise_free, full_cov
mu_gpy, var_gpy = m_gpy.predict_noiseless(Xt, full_cov=True)
infr2 = TransferInference(ModulePredictionAlgorithm(m, observed=[m.X], target_variables=[m.Y]), infr_params=infr.params, dtype=np.float64)
infr2.inference_algorithm.model.Y.factor.sgp_predict.diagonal_variance = False
infr2.inference_algorithm.model.Y.factor.sgp_predict.noise_free = True
res = infr2.run(X=mx.nd.array(Xt, dtype=dtype))[0]
mu_mf, var_mf = res[0].asnumpy()[0], res[1].asnumpy()[0]
assert np.allclose(mu_gpy, mu_mf), (mu_gpy, mu_mf)
assert np.allclose(var_gpy, var_mf), (var_gpy, var_mf)
# noisy, full_cov
mu_gpy, var_gpy = m_gpy.predict(Xt, full_cov=True)
infr2 = TransferInference(ModulePredictionAlgorithm(m, observed=[m.X], target_variables=[m.Y]), infr_params=infr.params, dtype=np.float64)
infr2.inference_algorithm.model.Y.factor.sgp_predict.diagonal_variance = False
infr2.inference_algorithm.model.Y.factor.sgp_predict.noise_free = False
res = infr2.run(X=mx.nd.array(Xt, dtype=dtype))[0]
mu_mf, var_mf = res[0].asnumpy()[0], res[1].asnumpy()[0]
assert np.allclose(mu_gpy, mu_mf), (mu_gpy, mu_mf)
assert np.allclose(var_gpy, var_mf), (var_gpy, var_mf)
def test_sampling_prediction(self):
np.random.seed(0)
X = np.random.rand(10, 3)
Y = np.random.rand(10, 1)
Z = np.random.rand(3, 3)
noise_var = np.random.rand(1)
lengthscale = np.random.rand(3)/10.
variance = np.random.rand(1)
Xt = np.random.rand(20, 3)
m_gpy = GPy.models.SparseGPRegression(X=X, Y=Y, Z=Z, kernel=GPy.kern.RBF(3, ARD=True, lengthscale=lengthscale, variance=variance), num_inducing=3)
m_gpy.likelihood.variance = noise_var
dtype = 'float64'
m = Model()
m.N = Variable()
m.X = Variable(shape=(m.N, 3))
m.Z = Variable(shape=(3, 3), initial_value=mx.nd.array(Z, dtype=dtype))
m.noise_var = Variable(transformation=PositiveTransformation(), initial_value=mx.nd.array(noise_var, dtype=dtype))
kernel = RBF(input_dim=3, ARD=True, variance=mx.nd.array(variance, dtype=dtype), lengthscale=mx.nd.array(lengthscale, dtype=dtype), dtype=dtype)
m.Y = SparseGPRegression.define_variable(X=m.X, kernel=kernel, noise_var=m.noise_var, inducing_inputs=m.Z, shape=(m.N, 1), dtype=dtype)
m.Y.factor.sgp_log_pdf.jitter = 1e-8
observed = [m.X, m.Y]
infr = Inference(MAP(model=m, observed=observed), dtype=dtype)
loss, _ = infr.run(X=mx.nd.array(X, dtype=dtype), Y=mx.nd.array(Y, dtype=dtype))
# noise_free, diagonal
infr_pred = TransferInference(ModulePredictionAlgorithm(model=m, observed=[m.X], target_variables=[m.Y], num_samples=5),
infr_params=infr.params)
gp = m.Y.factor
gp.attach_prediction_algorithms(
targets=gp.output_names, conditionals=gp.input_names,
algorithm=SparseGPRegressionSamplingPrediction(
gp._module_graph, gp._extra_graphs[0], [gp._module_graph.X]),
alg_name='sgp_predict')
gp.sgp_predict.diagonal_variance = False
gp.sgp_predict.jitter = 1e-6
y_samples = infr_pred.run(X=mx.nd.array(Xt, dtype=dtype))[0].asnumpy()
# TODO: Check the correctness of the sampling
| 46.470588
| 154
| 0.661899
| 1,169
| 7,900
| 4.326775
| 0.112062
| 0.069197
| 0.040925
| 0.012653
| 0.813167
| 0.788454
| 0.788454
| 0.788454
| 0.776987
| 0.752471
| 0
| 0.020218
| 0.198608
| 7,900
| 169
| 155
| 46.745562
| 0.778708
| 0.017468
| 0
| 0.68254
| 0
| 0
| 0.004513
| 0
| 0
| 0
| 0
| 0.005917
| 0.071429
| 1
| 0.02381
| false
| 0
| 0.095238
| 0
| 0.126984
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a18b8847929d442afe840a57650ba173dd1412b3
| 3,046
|
py
|
Python
|
TSSystem/apps/graduation_design/models.py
|
LittleBai0606/TeachingSecretarySystem
|
c9067b83f8e1edaf06974db73b7cc47a5b49e0d4
|
[
"MIT"
] | null | null | null |
TSSystem/apps/graduation_design/models.py
|
LittleBai0606/TeachingSecretarySystem
|
c9067b83f8e1edaf06974db73b7cc47a5b49e0d4
|
[
"MIT"
] | 5
|
2020-06-05T18:13:28.000Z
|
2022-02-11T03:39:14.000Z
|
TSSystem/apps/graduation_design/models.py
|
WhiteBrownBottle/TeachingSecretarySystem
|
c9067b83f8e1edaf06974db73b7cc47a5b49e0d4
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.utils import timezone
from student.models import Student
from teacher.models import Teacher
# Create your models here.
class ModelFile(models.Model):
"""
文档模板
"""
file_name = models.CharField(blank=True, null=True, max_length=100, default='暂未命名' , verbose_name=u'文件名称')
file_url = models.FileField(blank=True, null=True, unique=True, upload_to='GradModelfile/', default='', verbose_name=u'文件路径')
file_date = models.DateField(default=timezone.now, verbose_name=u'发布日期')
class Meta:
verbose_name = u'文档模板'
verbose_name_plural = verbose_name
def __str__(self):
return self.file_name
def save(self, *args, **kwargs):
file_url = str(self.file_url)
self.file_name = file_url
super(ModelFile, self).save(*args, **kwargs)
class OpeningReport(models.Model):
"""
开题报告
"""
file_name = models.CharField(blank=True, null=True, max_length=100, default='暂未命名' , verbose_name=u'文件名称')
file_url = models.CharField(blank=True, null=True, max_length=100, verbose_name=u'文件路径')
file_date = models.DateField(default=timezone.now, verbose_name=u'上传日期')
student_belong = models.OneToOneField(Student, blank=True, null=True, on_delete=models.CASCADE, verbose_name=u'创作学生')
teacher_to = models.ForeignKey(Teacher, on_delete=models.CASCADE, verbose_name=u'指导老师')
class Meta:
verbose_name = u'开题报告'
verbose_name_plural = verbose_name
def __str__(self):
return '[%s: %s]' % (self.student_belong, self.file_name)
class MidtermReport(models.Model):
"""
中期报告
"""
file_name = models.CharField(blank=True, null=True, max_length=100, default='暂未命名', verbose_name=u'文档名称')
file_url = models.CharField(blank=True, null=True, max_length=100, verbose_name=u'文件路径')
file_date = models.DateField(default=timezone.now, verbose_name=u'上传日期')
student_belong = models.OneToOneField(Student, blank=True, null=True, on_delete=models.CASCADE, verbose_name=u'创作学生')
teacher_to = models.ForeignKey(Teacher, on_delete=models.CASCADE, verbose_name=u'指导老师')
class Meta:
verbose_name = u'中期报告'
verbose_name_plural = verbose_name
def __str__(self):
return '[%s: %s]' % (self.student_belong, self.file_name)
class Dissertation(models.Model):
"""
毕业论文
"""
file_name = models.CharField(blank=True, null=True, max_length=100, default='暂未命名', verbose_name=u'文档名称')
file_url = models.CharField(blank=True, null=True, max_length=100, verbose_name=u'文件路径')
file_date = models.DateField(default=timezone.now, verbose_name=u'上传日期')
student_belong = models.OneToOneField(Student, on_delete=models.CASCADE, verbose_name=u'创作学生')
teacher_to = models.ForeignKey(Teacher, on_delete=models.CASCADE, verbose_name=u'指导老师')
class Meta:
verbose_name = u'毕业论文'
verbose_name_plural = verbose_name
def __str__(self):
return '[%s: %s]' % (self.student_belong, self.file_name)
| 32.063158
| 129
| 0.700591
| 414
| 3,046
| 4.929952
| 0.164251
| 0.161685
| 0.129348
| 0.083293
| 0.784909
| 0.77462
| 0.77462
| 0.77462
| 0.77462
| 0.753062
| 0
| 0.008337
| 0.173014
| 3,046
| 94
| 130
| 32.404255
| 0.801906
| 0.014773
| 0
| 0.6
| 0
| 0
| 0.048481
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.08
| 0.08
| 0.78
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
a1affdbf6cc8e4481db9f321e2c1f41aaf894d7e
| 49
|
py
|
Python
|
tf_tensor_dumper/__init__.py
|
samikama/tf_tensor_dumper
|
c6b91612e995c7f2de2b65aca09dd5c577f42013
|
[
"Apache-2.0"
] | null | null | null |
tf_tensor_dumper/__init__.py
|
samikama/tf_tensor_dumper
|
c6b91612e995c7f2de2b65aca09dd5c577f42013
|
[
"Apache-2.0"
] | null | null | null |
tf_tensor_dumper/__init__.py
|
samikama/tf_tensor_dumper
|
c6b91612e995c7f2de2b65aca09dd5c577f42013
|
[
"Apache-2.0"
] | null | null | null |
from .tensor_dumper import add_dumper, get_dumper
| 49
| 49
| 0.877551
| 8
| 49
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a1ca161a19ccdd01d456623a562c4d8d3337c3e6
| 111
|
py
|
Python
|
temboo/core/Library/Socrata/SODA/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Socrata/SODA/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Socrata/SODA/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Socrata.SODA.Query import Query, QueryInputSet, QueryResultSet, QueryChoreographyExecution
| 55.5
| 110
| 0.873874
| 11
| 111
| 8.818182
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063063
| 111
| 1
| 111
| 111
| 0.932692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a1cb3fed643d4be0bac1c59fbe716bfd17d2f116
| 37
|
py
|
Python
|
serverless_secrets/__init__.py
|
trek10inc/serverless-secrets-python
|
503bf75a587d9b58613c9dc04df0e97d6e131391
|
[
"MIT"
] | 5
|
2017-09-24T06:21:00.000Z
|
2020-12-19T07:32:48.000Z
|
serverless_secrets/__init__.py
|
imbi7py/serverless-secrets-python
|
503bf75a587d9b58613c9dc04df0e97d6e131391
|
[
"MIT"
] | 1
|
2017-10-25T14:30:31.000Z
|
2017-10-25T14:30:31.000Z
|
serverless_secrets/__init__.py
|
imbi7py/serverless-secrets-python
|
503bf75a587d9b58613c9dc04df0e97d6e131391
|
[
"MIT"
] | 3
|
2017-10-19T13:46:14.000Z
|
2020-12-19T07:35:11.000Z
|
from serverless_secrets.lib import *
| 18.5
| 36
| 0.837838
| 5
| 37
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a1e3cf9ccb63e9b8cccb6b6d13c283146df8f691
| 1,950
|
py
|
Python
|
src/nuclipy/banners.py
|
prasant-paudel/nuclei-python
|
53174821f93e4e5a48708b6ac832cf2f74bfa63d
|
[
"MIT"
] | 2
|
2022-01-06T10:59:22.000Z
|
2022-03-11T07:16:32.000Z
|
src/nuclipy/banners.py
|
prasant-paudel/nuclei-python
|
53174821f93e4e5a48708b6ac832cf2f74bfa63d
|
[
"MIT"
] | 1
|
2021-08-18T19:15:24.000Z
|
2021-12-25T14:49:50.000Z
|
src/nuclipy/banners.py
|
prasant-paudel/nuclei-python
|
53174821f93e4e5a48708b6ac832cf2f74bfa63d
|
[
"MIT"
] | 1
|
2021-07-26T07:03:23.000Z
|
2021-07-26T07:03:23.000Z
|
BANNERS = '''
___
____ __ _______/ (_)___ __ __
/ __ \/ / / / ___/ / / __ \/ / / /
/ / / / /_/ / /__/ / / /_/ / /_/ /
/_/ /_/\__,_/\___/_/_/ .___/\__, /
/_/ /____/
---split---
__ _
[ | (_)
_ .--. __ _ .---. | | __ _ .--. _ __
[ `.-. |[ | | | / /'`\] | | [ |[ '/'`\ \[ \ [ ]
| | | | | \_/ |,| \__. | | | | | \__/ | \ '/ /
[___||__]'.__.'_/'.___.'[___][___]| ;.__/[\_: /
[__| \__.'
---split---
▄▄ ▄ ▄▄ ▄▄ ▄▄▄▄▄▄▄ ▄▄▄ ▄▄▄ ▄▄▄▄▄▄▄ ▄▄ ▄▄
█ █ █ █ █ █ █ █ █ █ █ █ █ █ █
█ █▄█ █ █ █ █ █ █ █ █ ▄ █ █▄█ █
█ █ █▄█ █ ▄▄█ █ █ █ █▄█ █ █
█ ▄ █ █ █ █ █▄▄▄█ █ ▄▄▄█▄ ▄█
█ █ █ █ █ █▄▄█ █ █ █ █ █
█▄█ █▄▄█▄▄▄▄▄▄▄█▄▄▄▄▄▄▄█▄▄▄▄▄▄▄█▄▄▄█▄▄▄█ █▄▄▄█
---split---
_ _ _ __ _ _
_ _ _ _ __ | |(_)| '_ \| || |
| ' \ | || |/ _|| || || .__/ \_. |
|_||_| \_._|\__||_||_||_| |__/
---split---
╔╗
║║
╔═╗ ╔╗╔╗╔══╗║║ ╔╗╔══╗╔╗ ╔╗
║╔╗╗║║║║║╔═╝║║ ╠╣║╔╗║║║ ║║
║║║║║╚╝║║╚═╗║╚╗║║║╚╝║║╚═╝║
╚╝╚╝╚══╝╚══╝╚═╝╚╝║╔═╝╚═╗╔╝
║║ ╔═╝║
╚╝ ╚══╝
---split---
/ \---------------,
\_,| |
| nuclipy |
| ,-------------
\_/____________/
---split---
___ ___
(o o) (o o)
( V ) nuclipy ( V )
--m-m-------------m-m--
---split---
^ ^
(O,O)
( ) nuclipy
-"-"-------------
---split---
\\ =o)
(o> /\\
_(()_nuclipy_\_V_
// \\
\\
---split---
/~_______~\
.---------.
(| nuclipy |)
'---------'
\_~~~~~~~_/
'''
| 29.104478
| 54
| 0.135897
| 119
| 1,950
| 3.151261
| 0.252101
| 0.208
| 0.248
| 0.245333
| 0.168
| 0.117333
| 0.096
| 0.064
| 0.042667
| 0.042667
| 0
| 0
| 0.510256
| 1,950
| 67
| 55
| 29.104478
| 0.106806
| 0
| 0
| 0.166667
| 0
| 0.075758
| 0.990981
| 0.077454
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a1e76027227de03e172178b7cac6899461baf120
| 115
|
py
|
Python
|
project/__init__.py
|
jlazic/glog-sms-gateway
|
03fea3715470c19c757e85adf1778fbb086bf8ba
|
[
"MIT"
] | 1
|
2016-09-02T19:35:32.000Z
|
2016-09-02T19:35:32.000Z
|
project/__init__.py
|
jlazic/glog-sms-gateway
|
03fea3715470c19c757e85adf1778fbb086bf8ba
|
[
"MIT"
] | null | null | null |
project/__init__.py
|
jlazic/glog-sms-gateway
|
03fea3715470c19c757e85adf1778fbb086bf8ba
|
[
"MIT"
] | 1
|
2018-02-28T23:39:52.000Z
|
2018-02-28T23:39:52.000Z
|
from __future__ import absolute_import
try:
from .celery import app as celery_app
except ImportError:
pass
| 19.166667
| 41
| 0.782609
| 16
| 115
| 5.25
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191304
| 115
| 6
| 42
| 19.166667
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
62e0b1b4c36b47c4cbbe7f896e51e398dc21a7f1
| 22
|
py
|
Python
|
openacademy/__init__.py
|
Erzihark/openacademy-project
|
1731af91c55d1d77b8c41ed9b17a0760d1b79e68
|
[
"Apache-2.0"
] | null | null | null |
openacademy/__init__.py
|
Erzihark/openacademy-project
|
1731af91c55d1d77b8c41ed9b17a0760d1b79e68
|
[
"Apache-2.0"
] | null | null | null |
openacademy/__init__.py
|
Erzihark/openacademy-project
|
1731af91c55d1d77b8c41ed9b17a0760d1b79e68
|
[
"Apache-2.0"
] | null | null | null |
from . import model
| 5.5
| 19
| 0.681818
| 3
| 22
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 22
| 3
| 20
| 7.333333
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
62f8542f9fb7ec5bf18dc7494046c3e37c0e91d5
| 48
|
py
|
Python
|
string_h/reverse.py
|
hanlingzhi/PackageToPypi-Demo
|
fcc645279e78d9f0b8b5186f585da2ff41f85dcd
|
[
"MIT"
] | 1
|
2020-03-10T14:50:32.000Z
|
2020-03-10T14:50:32.000Z
|
string_h/reverse.py
|
hanlingzhi/PackageToPypi-Demo
|
fcc645279e78d9f0b8b5186f585da2ff41f85dcd
|
[
"MIT"
] | null | null | null |
string_h/reverse.py
|
hanlingzhi/PackageToPypi-Demo
|
fcc645279e78d9f0b8b5186f585da2ff41f85dcd
|
[
"MIT"
] | null | null | null |
def str_reverse(a:str)->str:
return a[::-1]
| 16
| 28
| 0.604167
| 9
| 48
| 3.111111
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.166667
| 48
| 3
| 29
| 16
| 0.675
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
1a0dd924b41ad3199947af991065f5bce166b6bc
| 202
|
py
|
Python
|
flax/error.py
|
chunkybanana/flax
|
16ae5432bee26fa5241259c2b5aec9cba2b33df0
|
[
"MIT"
] | null | null | null |
flax/error.py
|
chunkybanana/flax
|
16ae5432bee26fa5241259c2b5aec9cba2b33df0
|
[
"MIT"
] | null | null | null |
flax/error.py
|
chunkybanana/flax
|
16ae5432bee26fa5241259c2b5aec9cba2b33df0
|
[
"MIT"
] | null | null | null |
import sys
from prompt_toolkit import print_formatted_text, HTML
def error(msg, exit_status=1):
print_formatted_text(HTML("<ansired>" + msg + "</ansired>"), file=sys.stderr)
exit(exit_status)
| 25.25
| 81
| 0.737624
| 29
| 202
| 4.896552
| 0.62069
| 0.197183
| 0.253521
| 0.309859
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005714
| 0.133663
| 202
| 7
| 82
| 28.857143
| 0.805714
| 0
| 0
| 0
| 0
| 0
| 0.094059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.6
| 0.4
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1a0eea123487cb9d26c063f74b8aede0f84ef73c
| 1,047
|
py
|
Python
|
tests/test_extract.py
|
Mark-McAdam/kondoboard-etl
|
e3d36b87693f71a02f1d295c6b6f9291eb1c5006
|
[
"MIT"
] | 2
|
2020-05-14T19:53:08.000Z
|
2020-05-18T17:22:45.000Z
|
tests/test_extract.py
|
Mark-McAdam/kondoboard-etl
|
e3d36b87693f71a02f1d295c6b6f9291eb1c5006
|
[
"MIT"
] | null | null | null |
tests/test_extract.py
|
Mark-McAdam/kondoboard-etl
|
e3d36b87693f71a02f1d295c6b6f9291eb1c5006
|
[
"MIT"
] | 3
|
2020-05-27T18:11:10.000Z
|
2020-08-31T15:45:05.000Z
|
from src.app.extract import adzuna, jobsearcher, monster_scraper
import pandas as pd
def test_adzuna():
df = adzuna()
assert list(df.columns) == [
"id",
"post_url",
"title",
"title_keyword",
"tags",
"description",
"company",
"publication_date",
"latitude",
"longitude",
"city",
"state",
]
def test_jobsearcher():
df = jobsearcher()
assert list(df.columns) == [
"id",
"post_url",
"title",
"title_keyword",
"tags",
"description",
"company",
"publication_date",
"latitude",
"longitude",
"city",
"state",
]
def test_monster():
df = monster_scraper()
assert list(df.columns) == [
"id",
"post_url",
"title",
"title_keyword",
"tags",
"description",
"company",
"publication_date",
"latitude",
"longitude",
"city",
"state",
]
| 18.368421
| 64
| 0.468004
| 86
| 1,047
| 5.534884
| 0.372093
| 0.044118
| 0.07563
| 0.119748
| 0.710084
| 0.710084
| 0.710084
| 0.710084
| 0.710084
| 0.710084
| 0
| 0
| 0.387775
| 1,047
| 56
| 65
| 18.696429
| 0.74259
| 0
| 0
| 0.78
| 0
| 0
| 0.26361
| 0
| 0
| 0
| 0
| 0
| 0.06
| 1
| 0.06
| false
| 0
| 0.04
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c52ed76e903daa07b7f916c7ed6177d65109206d
| 83
|
py
|
Python
|
npnlp/__init__.py
|
msparapa/npnlp
|
9158f47def6e6583e662b913ae46be49dafca4f8
|
[
"MIT"
] | null | null | null |
npnlp/__init__.py
|
msparapa/npnlp
|
9158f47def6e6583e662b913ae46be49dafca4f8
|
[
"MIT"
] | null | null | null |
npnlp/__init__.py
|
msparapa/npnlp
|
9158f47def6e6583e662b913ae46be49dafca4f8
|
[
"MIT"
] | null | null | null |
from .npnlp import minimize, kkt_multipliers
from npnlp.release import __version__
| 27.666667
| 44
| 0.855422
| 11
| 83
| 6
| 0.727273
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108434
| 83
| 2
| 45
| 41.5
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c53947a6856398971cf84c83418ea06a15edf9b0
| 268
|
py
|
Python
|
src/security_webcam/__init__.py
|
hsuanhauliu/security-webcam
|
0f9a972002508074d8a7397e017c7438d2c3680a
|
[
"MIT"
] | 2
|
2019-08-13T12:49:33.000Z
|
2020-10-08T12:56:59.000Z
|
src/security_webcam/__init__.py
|
hsuanhauliu/security-webcam
|
0f9a972002508074d8a7397e017c7438d2c3680a
|
[
"MIT"
] | null | null | null |
src/security_webcam/__init__.py
|
hsuanhauliu/security-webcam
|
0f9a972002508074d8a7397e017c7438d2c3680a
|
[
"MIT"
] | null | null | null |
from security_webcam.camera_control import CameraControl
from security_webcam.motion_detector import MotionDetector
from security_webcam.parser import parse_inputs
from security_webcam.video_buffer import VideoBuffer, TemporaryBuffer
from security_webcam import utils
| 44.666667
| 69
| 0.902985
| 34
| 268
| 6.852941
| 0.529412
| 0.257511
| 0.386266
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078358
| 268
| 5
| 70
| 53.6
| 0.94332
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c54876e834d5ae416f203c11447a480c93df1d6d
| 56
|
py
|
Python
|
pyia/setup_package.py
|
alexji/pyia
|
12ca2636cfccaccdea1461101c7fb3dd2d0b57d4
|
[
"MIT"
] | 15
|
2018-04-24T17:14:26.000Z
|
2021-05-14T19:28:10.000Z
|
pyia/setup_package.py
|
alexji/pyia
|
12ca2636cfccaccdea1461101c7fb3dd2d0b57d4
|
[
"MIT"
] | 4
|
2019-03-11T22:59:36.000Z
|
2021-10-17T14:37:24.000Z
|
pyia/setup_package.py
|
alexji/pyia
|
12ca2636cfccaccdea1461101c7fb3dd2d0b57d4
|
[
"MIT"
] | 7
|
2018-04-24T04:15:34.000Z
|
2021-10-15T21:14:59.000Z
|
def get_package_data():
return {'pyia': ['data/*']}
| 18.666667
| 31
| 0.589286
| 7
| 56
| 4.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160714
| 56
| 2
| 32
| 28
| 0.659574
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
c56d25ef382780458c58a73ce6bd95341666226b
| 168
|
py
|
Python
|
pages/admin.py
|
mixnix/subject_rate
|
224fdc7c17afd972596c628bda65a384274ed4a1
|
[
"MIT"
] | null | null | null |
pages/admin.py
|
mixnix/subject_rate
|
224fdc7c17afd972596c628bda65a384274ed4a1
|
[
"MIT"
] | null | null | null |
pages/admin.py
|
mixnix/subject_rate
|
224fdc7c17afd972596c628bda65a384274ed4a1
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from . import models
admin.site.register(models.Review)
admin.site.register(models.Professor)
admin.site.register(models.CourseName)
| 21
| 38
| 0.821429
| 23
| 168
| 6
| 0.478261
| 0.195652
| 0.369565
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077381
| 168
| 7
| 39
| 24
| 0.890323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
3d6a47b9828d84e08534381333da11fea5de687b
| 32,019
|
py
|
Python
|
snip-tensorflow/network.py
|
isabuster/snip
|
8e7644edd1f4dcca0f833666cf54474bcacf2aea
|
[
"MIT"
] | 1
|
2020-09-13T09:18:13.000Z
|
2020-09-13T09:18:13.000Z
|
snip-tensorflow/network.py
|
isabuster/snip
|
8e7644edd1f4dcca0f833666cf54474bcacf2aea
|
[
"MIT"
] | null | null | null |
snip-tensorflow/network.py
|
isabuster/snip
|
8e7644edd1f4dcca0f833666cf54474bcacf2aea
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
from functools import reduce
from helpers import static_size
def load_network(
datasource, arch, num_classes,
initializer_w_bp, initializer_b_bp, initializer_w_ap, initializer_b_ap,
):
networks = {
'lenet300': lambda: LeNet300(
initializer_w_bp, initializer_b_bp, initializer_w_ap, initializer_b_ap,
),
'lenet5': lambda: LeNet5(
initializer_w_bp, initializer_b_bp, initializer_w_ap, initializer_b_ap),
'alexnet-v1': lambda: AlexNet(
initializer_w_bp, initializer_b_bp, initializer_w_ap, initializer_b_ap,
datasource, num_classes, k=1),
'alexnet-v2': lambda: AlexNet(
initializer_w_bp, initializer_b_bp, initializer_w_ap, initializer_b_ap,
datasource, num_classes, k=2),
'vgg-c': lambda: VGG(
initializer_w_bp, initializer_b_bp, initializer_w_ap, initializer_b_ap,
datasource, num_classes, version='C'),
'vgg-d': lambda: VGG(
initializer_w_bp, initializer_b_bp, initializer_w_ap, initializer_b_ap,
datasource, num_classes, version='D'),
'vgg-like': lambda: VGG(
initializer_w_bp, initializer_b_bp, initializer_w_ap, initializer_b_ap,
datasource, num_classes, version='like'),
'resnet': lambda: ResNet20_V1(
initializer_w_bp, initializer_b_bp, initializer_w_ap, initializer_b_ap,
datasource, num_classes)
}
return networks[arch]()
def get_initializer(initializer, dtype):
if initializer == 'zeros':
return tf.zeros_initializer()
elif initializer == 'vs':
return tf.compat.v1.variance_scaling_initializer(dtype=dtype)
else:
raise NotImplementedError
class LeNet300(object):
def __init__(self,
initializer_w_bp,
initializer_b_bp,
initializer_w_ap,
initializer_b_ap,
):
self.name = 'lenet300'
self.input_dims = [28, 28, 1] # height, width, channel
self.inputs = self.construct_inputs()
self.weights_bp = self.construct_weights(initializer_w_bp, initializer_b_bp, False, 'bp')
self.weights_ap = {k: tf.Variable(self.weights_bp[k].initialized_value(), trainable=True, name='ap/'+k) for k in self.weights_bp}
self.num_params = sum([static_size(v) for v in self.weights_ap.values()])
def construct_inputs(self):
return {
'input': tf.compat.v1.placeholder(tf.float32, [None] + self.input_dims),
'label': tf.compat.v1.placeholder(tf.int32, [None]),
}
def construct_weights(self, initializer_w, initializer_b, trainable, scope):
dtype = tf.float32
w_params = {
'initializer': get_initializer(initializer_w, dtype),
'dtype': dtype,
'trainable': trainable,
'collections': [self.name, tf.compat.v1.GraphKeys.GLOBAL_VARIABLES],
}
b_params = {
'initializer': get_initializer(initializer_b, dtype),
'dtype': dtype,
'trainable': trainable,
'collections': [self.name, tf.compat.v1.GraphKeys.GLOBAL_VARIABLES],
}
weights = {}
with tf.compat.v1.variable_scope(scope):
weights['w1'] = tf.compat.v1.get_variable('w1', [784, 300], **w_params)
weights['w2'] = tf.compat.v1.get_variable('w2', [300, 100], **w_params)
weights['w3'] = tf.compat.v1.get_variable('w3', [100, 10], **w_params)
weights['b1'] = tf.compat.v1.get_variable('b1', [300], **b_params)
weights['b2'] = tf.compat.v1.get_variable('b2', [100], **b_params)
weights['b3'] = tf.compat.v1.get_variable('b3', [10], **b_params)
return weights
def forward_pass(self, weights, inputs, is_train, trainable=True):
inputs_flat = tf.reshape(inputs, [-1, reduce(lambda x, y: x*y, inputs.shape.as_list()[1:])])
fc1 = tf.matmul(inputs_flat, weights['w1']) + weights['b1']
fc1 = tf.nn.relu(fc1)
fc2 = tf.matmul(fc1, weights['w2']) + weights['b2']
fc2 = tf.nn.relu(fc2)
fc3 = tf.matmul(fc2, weights['w3']) + weights['b3']
return fc3
class LeNet5(object):
def __init__(self,
initializer_w_bp,
initializer_b_bp,
initializer_w_ap,
initializer_b_ap,
):
self.name = 'lenet5'
self.input_dims = [28, 28, 1] # height, width, channel
self.inputs = self.construct_inputs()
self.weights_bp = self.construct_weights(initializer_w_bp, initializer_b_bp, False, 'bp')
self.weights_ap = {k: tf.Variable(self.weights_bp[k].initialized_value(), trainable=True, name='ap/'+k) for k in self.weights_bp}
self.num_params = sum([static_size(v) for v in self.weights_ap.values()])
def construct_inputs(self):
return {
'input': tf.compat.v1.placeholder(tf.float32, [None] + self.input_dims),
'label': tf.compat.v1.placeholder(tf.int32, [None]),
}
def construct_weights(self, initializer_w, initializer_b, trainable, scope):
dtype = tf.float32
w_params = {
'initializer': get_initializer(initializer_w, dtype),
'dtype': dtype,
'trainable': trainable,
'collections': [self.name, tf.compat.v1.GraphKeys.GLOBAL_VARIABLES],
}
b_params = {
'initializer': get_initializer(initializer_b, dtype),
'dtype': dtype,
'trainable': trainable,
'collections': [self.name, tf.compat.v1.GraphKeys.GLOBAL_VARIABLES],
}
weights = {}
with tf.compat.v1.variable_scope(scope):
weights['w1'] = tf.compat.v1.get_variable('w1', [5, 5, 1, 20], **w_params)
weights['w2'] = tf.compat.v1.get_variable('w2', [5, 5, 20, 50], **w_params)
weights['w3'] = tf.compat.v1.get_variable('w3', [800, 500], **w_params)
weights['w4'] = tf.compat.v1.get_variable('w4', [500, 10], **w_params)
weights['b1'] = tf.compat.v1.get_variable('b1', [20], **b_params)
weights['b2'] = tf.compat.v1.get_variable('b2', [50], **b_params)
weights['b3'] = tf.compat.v1.get_variable('b3', [500], **b_params)
weights['b4'] = tf.compat.v1.get_variable('b4', [10], **b_params)
return weights
def forward_pass(self, weights, inputs, is_train, trainable=True):
conv1 = tf.nn.conv2d(inputs, weights['w1'], [1, 1, 1, 1], 'VALID') + weights['b1']
pool1 = tf.nn.max_pool(conv1, [1, 2, 2, 1], [1, 2, 2, 1], 'VALID')
conv2 = tf.nn.conv2d(pool1, weights['w2'], [1, 1, 1, 1], 'VALID') + weights['b2']
pool2 = tf.nn.max_pool(conv2, [1, 2, 2, 1], [1, 2, 2, 1], 'VALID')
flatten = tf.reshape(pool2, [-1, reduce(lambda x, y: x*y, pool2.shape.as_list()[1:])])
fc1 = tf.matmul(flatten, weights['w3']) + weights['b3']
fc1 = tf.nn.relu(fc1)
fc2 = tf.matmul(fc1, weights['w4']) + weights['b4'] # logits
return fc2
class AlexNet(object):
''' Similar to Alexnet in terms of the total number of conv and fc layers.
Conv layers:
The size of kernels and the number of conv filters are the same as the original.
Due to the smaller input size (CIFAR rather than IMAGENET) we use different strides.
FC layers:
The size of fc layers are controlled by k (multiplied by 1024).
In the original Alexnet, k=4 making the size of largest fc layers to be 4096.
'''
def __init__(self,
initializer_w_bp,
initializer_b_bp,
initializer_w_ap,
initializer_b_ap,
datasource,
num_classes,
k,
):
self.datasource = datasource
self.num_classes = num_classes
self.k = k
self.name = 'alexnet'
self.input_dims = [64, 64, 3] if self.datasource == 'tiny-imagenet' else [32, 32, 3] # h,w,c
self.inputs = self.construct_inputs()
self.weights_bp = self.construct_weights(initializer_w_bp, initializer_b_bp, False, 'bp')
self.weights_ap = {k: tf.Variable(self.weights_bp[k].initialized_value(), trainable=True, name='ap/'+k) for k in self.weights_bp}
self.num_params = sum([static_size(v) for v in self.weights_ap.values()])
def construct_inputs(self):
return {
'input': tf.compat.v1.placeholder(tf.float32, [None] + self.input_dims),
'label': tf.compat.v1.placeholder(tf.int32, [None]),
}
def construct_weights(self, initializer_w, initializer_b, trainable, scope):
dtype = tf.float32
w_params = {
'initializer': get_initializer(initializer_w, dtype),
'dtype': dtype,
'trainable': trainable,
'collections': [self.name, tf.compat.v1.GraphKeys.GLOBAL_VARIABLES],
}
b_params = {
'initializer': get_initializer(initializer_b, dtype),
'dtype': dtype,
'trainable': trainable,
'collections': [self.name, tf.compat.v1.GraphKeys.GLOBAL_VARIABLES],
}
k = self.k
weights = {}
with tf.compat.v1.variable_scope(scope):
weights['w1'] = tf.compat.v1.get_variable('w1', [11, 11, 3, 96], **w_params)
weights['w2'] = tf.compat.v1.get_variable('w2', [5, 5, 96, 256], **w_params)
weights['w3'] = tf.compat.v1.get_variable('w3', [3, 3, 256, 384], **w_params)
weights['w4'] = tf.compat.v1.get_variable('w4', [3, 3, 384, 384], **w_params)
weights['w5'] = tf.compat.v1.get_variable('w5', [3, 3, 384, 256], **w_params)
weights['w6'] = tf.compat.v1.get_variable('w6', [256, 1024*k], **w_params)
weights['w7'] = tf.compat.v1.get_variable('w7', [1024*k, 1024*k], **w_params)
weights['w8'] = tf.compat.v1.get_variable('w8', [1024*k, self.num_classes], **w_params)
weights['b1'] = tf.compat.v1.get_variable('b1', [96], **b_params)
weights['b2'] = tf.compat.v1.get_variable('b2', [256], **b_params)
weights['b3'] = tf.compat.v1.get_variable('b3', [384], **b_params)
weights['b4'] = tf.compat.v1.get_variable('b4', [384], **b_params)
weights['b5'] = tf.compat.v1.get_variable('b5', [256], **b_params)
weights['b6'] = tf.compat.v1.get_variable('b6', [1024*k], **b_params)
weights['b7'] = tf.compat.v1.get_variable('b7', [1024*k], **b_params)
weights['b8'] = tf.compat.v1.get_variable('b8', [self.num_classes], **b_params)
return weights
def forward_pass(self, weights, inputs, is_train, trainable=True):
bn_params = {
'training': is_train,
'trainable': trainable,
}
init_st = 4 if self.datasource == 'tiny-imagenet' else 2
inputs = tf.nn.conv2d(inputs, weights['w1'], [1,init_st,init_st,1], 'SAME') + weights['b1']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
inputs = tf.nn.relu(inputs)
inputs = tf.nn.conv2d(inputs, weights['w2'], [1, 2, 2, 1], 'SAME') + weights['b2']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
inputs = tf.nn.relu(inputs)
inputs = tf.nn.conv2d(inputs, weights['w3'], [1, 2, 2, 1], 'SAME') + weights['b3']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
inputs = tf.nn.relu(inputs)
inputs = tf.nn.conv2d(inputs, weights['w4'], [1, 2, 2, 1], 'SAME') + weights['b4']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
inputs = tf.nn.relu(inputs)
inputs = tf.nn.conv2d(inputs, weights['w5'], [1, 2, 2, 1], 'SAME') + weights['b5']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
inputs = tf.nn.relu(inputs)
inputs = tf.reshape(inputs, [-1, reduce(lambda x, y: x*y, inputs.shape.as_list()[1:])])
inputs = tf.matmul(inputs, weights['w6']) + weights['b6']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
inputs = tf.nn.relu(inputs)
inputs = tf.matmul(inputs, weights['w7']) + weights['b7']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
inputs = tf.nn.relu(inputs)
inputs = tf.matmul(inputs, weights['w8']) + weights['b8'] # logits
return inputs
class VGG(object):
'''
Similar to the original VGG.
Available models:
- VGG-C
- VGG-D
- VGG-like
Differences:
The number of parameters in conv layers are the same as the original.
The number of parameters in fc layers are reduced to 512 (4096 -> 512).
The number of total parameters are different, not just because of the size of fc layers,
but also due to the fact that the first fc layer receives 1x1 image rather than 7x7 image
because the input is CIFAR not IMAGENET.
No dropout is used. Instead, batch norm is used.
Other refereneces.
(1) The original paper:
- paper: https://arxiv.org/pdf/1409.1556.pdf
- code: http://www.robots.ox.ac.uk/~vgg/research/very_deep/
* Dropout between fc layers.
* There is no BatchNorm.
(2) VGG-like by Zagoruyko, adapted for CIFAR-10.
- project and code: http://torch.ch/blog/2015/07/30/cifar.html
* Differences to the original VGG-16 (1):
- # of fc layers 3 -> 2, so there are 15 (learnable) layers in total.
- size of fc layers 4096 -> 512.
- use BatchNorm and add more Dropout.
'''
def __init__(self,
initializer_w_bp,
initializer_b_bp,
initializer_w_ap,
initializer_b_ap,
datasource,
num_classes,
version,
):
self.datasource = datasource
self.num_classes = num_classes
self.version = version
self.name = 'VGG-{}'.format(version)
self.input_dims = [64, 64, 3] if self.datasource == 'tiny-imagenet' else [32, 32, 3] # h,w,c
self.inputs = self.construct_inputs()
self.weights_bp = self.construct_weights(initializer_w_bp, initializer_b_bp, False, 'bp')
self.weights_ap = {k: tf.Variable(self.weights_bp[k].initialized_value(), trainable=True, name='ap/'+k) for k in self.weights_bp}
self.num_params = sum([static_size(v) for v in self.weights_ap.values()])
def construct_inputs(self):
return {
'input': tf.compat.v1.placeholder(tf.float32, [None] + self.input_dims),
'label': tf.compat.v1.placeholder(tf.int32, [None]),
}
def construct_weights(self, initializer_w, initializer_b, trainable, scope):
dtype = tf.float32
w_params = {
'initializer': get_initializer(initializer_w, dtype),
'dtype': dtype,
'trainable': trainable,
'collections': [self.name, tf.compat.v1.GraphKeys.GLOBAL_VARIABLES],
}
b_params = {
'initializer': get_initializer(initializer_b, dtype),
'dtype': dtype,
'trainable': trainable,
'collections': [self.name, tf.compat.v1.GraphKeys.GLOBAL_VARIABLES],
}
weights = {}
with tf.compat.v1.variable_scope(scope):
weights['w1'] = tf.compat.v1.get_variable('w1', [3, 3, 3, 64], **w_params)
weights['w2'] = tf.compat.v1.get_variable('w2', [3, 3, 64, 64], **w_params)
weights['w3'] = tf.compat.v1.get_variable('w3', [3, 3, 64, 128], **w_params)
weights['w4'] = tf.compat.v1.get_variable('w4', [3, 3, 128, 128], **w_params)
weights['b1'] = tf.compat.v1.get_variable('b1', [64], **b_params)
weights['b2'] = tf.compat.v1.get_variable('b2', [64], **b_params)
weights['b3'] = tf.compat.v1.get_variable('b3', [128], **b_params)
weights['b4'] = tf.compat.v1.get_variable('b4', [128], **b_params)
if self.version == 'C':
weights['w5'] = tf.compat.v1.get_variable('w5', [3, 3, 128, 256], **w_params)
weights['w6'] = tf.compat.v1.get_variable('w6', [3, 3, 256, 256], **w_params)
weights['w7'] = tf.compat.v1.get_variable('w7', [1, 1, 256, 256], **w_params)
weights['w8'] = tf.compat.v1.get_variable('w8', [3, 3, 256, 512], **w_params)
weights['w9'] = tf.compat.v1.get_variable('w9', [3, 3, 512, 512], **w_params)
weights['w10'] = tf.compat.v1.get_variable('w10', [1, 1, 512, 512], **w_params)
weights['w11'] = tf.compat.v1.get_variable('w11', [3, 3, 512, 512], **w_params)
weights['w12'] = tf.compat.v1.get_variable('w12', [3, 3, 512, 512], **w_params)
weights['w13'] = tf.compat.v1.get_variable('w13', [1, 1, 512, 512], **w_params)
weights['b5'] = tf.compat.v1.get_variable('b5', [256], **b_params)
weights['b6'] = tf.compat.v1.get_variable('b6', [256], **b_params)
weights['b7'] = tf.compat.v1.get_variable('b7', [256], **b_params)
weights['b8'] = tf.compat.v1.get_variable('b8', [512], **b_params)
weights['b9'] = tf.compat.v1.get_variable('b9', [512], **b_params)
weights['b10'] = tf.compat.v1.get_variable('b10', [512], **b_params)
weights['b11'] = tf.compat.v1.get_variable('b11', [512], **b_params)
weights['b12'] = tf.compat.v1.get_variable('b12', [512], **b_params)
weights['b13'] = tf.compat.v1.get_variable('b13', [512], **b_params)
elif self.version == 'D' or self.version == 'like':
weights['w5'] = tf.compat.v1.get_variable('w5', [3, 3, 128, 256], **w_params)
weights['w6'] = tf.compat.v1.get_variable('w6', [3, 3, 256, 256], **w_params)
weights['w7'] = tf.compat.v1.get_variable('w7', [3, 3, 256, 256], **w_params)
weights['w8'] = tf.compat.v1.get_variable('w8', [3, 3, 256, 512], **w_params)
weights['w9'] = tf.compat.v1.get_variable('w9', [3, 3, 512, 512], **w_params)
weights['w10'] = tf.compat.v1.get_variable('w10', [3, 3, 512, 512], **w_params)
weights['w11'] = tf.compat.v1.get_variable('w11', [3, 3, 512, 512], **w_params)
weights['w12'] = tf.compat.v1.get_variable('w12', [3, 3, 512, 512], **w_params)
weights['w13'] = tf.compat.v1.get_variable('w13', [3, 3, 512, 512], **w_params)
weights['b5'] = tf.compat.v1.get_variable('b5', [256], **b_params)
weights['b6'] = tf.compat.v1.get_variable('b6', [256], **b_params)
weights['b7'] = tf.compat.v1.get_variable('b7', [256], **b_params)
weights['b8'] = tf.compat.v1.get_variable('b8', [512], **b_params)
weights['b9'] = tf.compat.v1.get_variable('b9', [512], **b_params)
weights['b10'] = tf.compat.v1.get_variable('b10', [512], **b_params)
weights['b11'] = tf.compat.v1.get_variable('b11', [512], **b_params)
weights['b12'] = tf.compat.v1.get_variable('b12', [512], **b_params)
weights['b13'] = tf.compat.v1.get_variable('b13', [512], **b_params)
weights['w14'] = tf.compat.v1.get_variable('w14', [512, 512], **w_params)
weights['b14'] = tf.compat.v1.get_variable('b14', [512], **b_params)
if not self.version == 'like':
weights['w15'] = tf.compat.v1.get_variable('w15', [512, 512], **w_params)
weights['w16'] = tf.compat.v1.get_variable('w16', [512, self.num_classes], **w_params)
weights['b15'] = tf.compat.v1.get_variable('b15', [512], **b_params)
weights['b16'] = tf.compat.v1.get_variable('b16', [self.num_classes], **b_params)
else:
weights['w15'] = tf.compat.v1.get_variable('w15', [512, self.num_classes], **w_params)
weights['b15'] = tf.compat.v1.get_variable('b15', [self.num_classes], **b_params)
return weights
def forward_pass(self, weights, inputs, is_train, trainable=True):
def _conv_block(inputs, bn_params, filt, st=1):
inputs = tf.nn.conv2d(inputs, filt['w'], [1, st, st, 1], 'SAME') + filt['b']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
inputs = tf.nn.relu(inputs)
return inputs
bn_params = {
'training': is_train,
'trainable': trainable,
}
init_st = 2 if self.datasource == 'tiny-imagenet' else 1
inputs = _conv_block(inputs, bn_params, {'w': weights['w1'], 'b': weights['b1']}, init_st)
inputs = _conv_block(inputs, bn_params, {'w': weights['w2'], 'b': weights['b2']})
inputs = tf.nn.max_pool(inputs, [1, 2, 2, 1], [1, 2, 2, 1], 'SAME')
inputs = _conv_block(inputs, bn_params, {'w': weights['w3'], 'b': weights['b3']})
inputs = _conv_block(inputs, bn_params, {'w': weights['w4'], 'b': weights['b4']})
inputs = tf.nn.max_pool(inputs, [1, 2, 2, 1], [1, 2, 2, 1], 'SAME')
inputs = _conv_block(inputs, bn_params, {'w': weights['w5'], 'b': weights['b5']})
inputs = _conv_block(inputs, bn_params, {'w': weights['w6'], 'b': weights['b6']})
inputs = _conv_block(inputs, bn_params, {'w': weights['w7'], 'b': weights['b7']})
inputs = tf.nn.max_pool(inputs, [1, 2, 2, 1], [1, 2, 2, 1], 'SAME')
inputs = _conv_block(inputs, bn_params, {'w': weights['w8'], 'b': weights['b8']})
inputs = _conv_block(inputs, bn_params, {'w': weights['w9'], 'b': weights['b9']})
inputs = _conv_block(inputs, bn_params, {'w': weights['w10'], 'b': weights['b10']})
inputs = tf.nn.max_pool(inputs, [1, 2, 2, 1], [1, 2, 2, 1], 'SAME')
inputs = _conv_block(inputs, bn_params, {'w': weights['w11'], 'b': weights['b11']})
inputs = _conv_block(inputs, bn_params, {'w': weights['w12'], 'b': weights['b12']})
inputs = _conv_block(inputs, bn_params, {'w': weights['w13'], 'b': weights['b13']})
inputs = tf.nn.max_pool(inputs, [1, 2, 2, 1], [1, 2, 2, 1], 'SAME')
assert reduce(lambda x, y: x*y, inputs.shape.as_list()[1:3]) == 1
inputs = tf.reshape(inputs, [-1, reduce(lambda x, y: x*y, inputs.shape.as_list()[1:])])
inputs = tf.matmul(inputs, weights['w14']) + weights['b14']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
inputs = tf.nn.relu(inputs)
if not self.version == 'like':
inputs = tf.matmul(inputs, weights['w15']) + weights['b15']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
inputs = tf.nn.relu(inputs)
inputs = tf.matmul(inputs, weights['w16']) + weights['b16']
else:
inputs = tf.matmul(inputs, weights['w15']) + weights['b15']
return inputs
class ResNet20_V1(object):
def __init__(self,
initializer_w_bp,
initializer_b_bp,
initializer_w_ap,
initializer_b_ap,
datasource,
num_classes,
):
self.datasource = datasource
self.num_classes = num_classes
self.name = 'ResNet20-V1'
self.input_dims = [64, 64, 3] if self.datasource == 'tiny-imagenet' else [32, 32, 3] # h,w,c
self.inputs = self.construct_inputs()
self.weights_bp = self.construct_weights(initializer_w_bp, initializer_b_bp, False, 'bp')
self.weights_ap = {k: tf.Variable(self.weights_bp[k].initialized_value(), trainable=True, name='ap/'+k) for k in self.weights_bp}
self.num_params = sum([static_size(v) for v in self.weights_ap.values()])
def construct_inputs(self):
return {
'input': tf.compat.v1.placeholder(tf.float32, [None] + self.input_dims),
'label': tf.compat.v1.placeholder(tf.int32, [None]),
}
def construct_weights(self, initializer_w, initializer_b, trainable, scope):
dtype = tf.float32
w_params = {
'initializer': get_initializer(initializer_w, dtype),
'dtype': dtype,
'trainable': trainable,
'collections': [self.name, tf.compat.v1.GraphKeys.GLOBAL_VARIABLES],
}
b_params = {
'initializer': get_initializer(initializer_b, dtype),
'dtype': dtype,
'trainable': trainable,
'collections': [self.name, tf.compat.v1.GraphKeys.GLOBAL_VARIABLES],
}
weights = {}
with tf.compat.v1.variable_scope(scope):
weights['w1'] = tf.compat.v1.get_variable('w1', [3, 3, 3, 16], **w_params)
weights['w2'] = tf.compat.v1.get_variable('w2', [3, 3, 16, 16], **w_params)
weights['w3'] = tf.compat.v1.get_variable('w3', [3, 3, 16, 16], **w_params)
weights['w4'] = tf.compat.v1.get_variable('w4', [3, 3, 16, 16], **w_params)
weights['w5'] = tf.compat.v1.get_variable('w5', [3, 3, 16, 16], **w_params)
weights['w6'] = tf.compat.v1.get_variable('w6', [3, 3, 16, 16], **w_params)
weights['w7'] = tf.compat.v1.get_variable('w7', [3, 3, 16, 16], **w_params)
weights['wsp1'] = tf.compat.v1.get_variable('wsp1', [1, 1, 16, 32], **w_params)
weights['w8'] = tf.compat.v1.get_variable('w8', [3, 3, 16, 32], **w_params)
weights['w9'] = tf.compat.v1.get_variable('w9', [3, 3, 32, 32], **w_params)
weights['w10'] = tf.compat.v1.get_variable('w10', [3, 3, 32, 32], **w_params)
weights['w11'] = tf.compat.v1.get_variable('w11', [3, 3, 32, 32], **w_params)
weights['w12'] = tf.compat.v1.get_variable('w12', [3, 3, 32, 32], **w_params)
weights['w13'] = tf.compat.v1.get_variable('w13', [3, 3, 32, 32], **w_params)
weights['wsp2'] = tf.compat.v1.get_variable('wsp2', [1, 1, 32, 64], **w_params)
weights['w14'] = tf.compat.v1.get_variable('w14', [3, 3, 32, 64], **w_params)
weights['w15'] = tf.compat.v1.get_variable('w15', [3, 3, 64, 64], **w_params)
weights['w16'] = tf.compat.v1.get_variable('w16', [3, 3, 64, 64], **w_params)
weights['w17'] = tf.compat.v1.get_variable('w17', [3, 3, 64, 64], **w_params)
weights['w18'] = tf.compat.v1.get_variable('w18', [3, 3, 64, 64], **w_params)
weights['w19'] = tf.compat.v1.get_variable('w19', [3, 3, 64, 64], **w_params)
weights['wfc'] = tf.compat.v1.get_variable('wfc', [1024, self.num_classes], **w_params)
weights['b1'] = tf.compat.v1.get_variable('b1', [16], **b_params)
weights['b2'] = tf.compat.v1.get_variable('b2', [16], **b_params)
weights['b3'] = tf.compat.v1.get_variable('b3', [16], **b_params)
weights['b4'] = tf.compat.v1.get_variable('b4', [16], **b_params)
weights['b5'] = tf.compat.v1.get_variable('b5', [16], **b_params)
weights['b6'] = tf.compat.v1.get_variable('b6', [16], **b_params)
weights['b7'] = tf.compat.v1.get_variable('b7', [16], **b_params)
weights['bsp1'] = tf.compat.v1.get_variable('bsp1', [32], **b_params)
weights['b8'] = tf.compat.v1.get_variable('b8', [32], **b_params)
weights['b9'] = tf.compat.v1.get_variable('b9', [32], **b_params)
weights['b10'] = tf.compat.v1.get_variable('b10', [32], **b_params)
weights['b11'] = tf.compat.v1.get_variable('b11', [32], **b_params)
weights['b12'] = tf.compat.v1.get_variable('b12', [32], **b_params)
weights['b13'] = tf.compat.v1.get_variable('b13', [32], **b_params)
weights['bsp2'] = tf.compat.v1.get_variable('bsp2', [64], **b_params)
weights['b14'] = tf.compat.v1.get_variable('b14', [64], **b_params)
weights['b15'] = tf.compat.v1.get_variable('b15', [64], **b_params)
weights['b16'] = tf.compat.v1.get_variable('b16', [64], **b_params)
weights['b17'] = tf.compat.v1.get_variable('b17', [64], **b_params)
weights['b18'] = tf.compat.v1.get_variable('b18', [64], **b_params)
weights['b19'] = tf.compat.v1.get_variable('b19', [64], **b_params)
weights['bfc'] = tf.compat.v1.get_variable('bfc', [self.num_classes], **b_params)
return weights
def forward_pass(self, weights, inputs, is_train, trainable=True):
def _conv_block(inputs, bn_params, filt, st1=1, st2=1, subsampling=False, subsampling_filt={}):
padding = [[0, 0], [1, 1], [1, 1], [0, 0]]
shortcut = inputs
inputs = tf.nn.conv2d(inputs, filt['c1'], [1, st1, st1, 1], padding) + filt['b1']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
inputs = tf.nn.relu(inputs)
inputs = tf.nn.conv2d(inputs, filt['c2'], [1, st2, st2, 1], padding) + filt['b2']
inputs = tf.compat.v1.layers.batch_normalization(inputs, **bn_params)
if subsampling:
shortcut = tf.nn.conv2d(shortcut, subsampling_filt['c'], [1, 2, 2, 1], 'VALID') + subsampling_filt['b']
inputs = inputs + shortcut
inputs = tf.nn.relu(inputs)
return inputs
bn_params = {
'training': is_train,
'trainable': trainable,
}
# 3 * 3 convolution layer
inputs = tf.nn.conv2d(inputs, weights['w1'], [1, 1, 1, 1], 'SAME') + weights['b1']
# Layer 1
inputs = _conv_block(inputs, bn_params,
{'c1': weights['w2'], 'b1': weights['b2'], 'c2': weights['w3'], 'b2': weights['b3']},
st1=1, st2=1)
inputs = _conv_block(inputs, bn_params,
{'c1': weights['w4'], 'b1': weights['b4'], 'c2': weights['w5'], 'b2': weights['b5']},
st1=1, st2=1)
inputs = _conv_block(inputs, bn_params,
{'c1': weights['w6'], 'b1': weights['b6'], 'c2': weights['w7'], 'b2': weights['b7']},
st1=1, st2=1)
# Layer 2
inputs = _conv_block(inputs, bn_params,
{'c1': weights['w8'], 'b1': weights['b8'], 'c2': weights['w9'], 'b2': weights['b9']},
st1=2, st2=1, subsampling=True, subsampling_filt={'c': weights['wsp1'], 'b': weights['bsp1']})
inputs = _conv_block(inputs, bn_params,
{'c1': weights['w10'], 'b1': weights['b10'], 'c2': weights['w11'], 'b2': weights['b11']},
st1=1, st2=1)
inputs = _conv_block(inputs, bn_params,
{'c1': weights['w12'], 'b1': weights['b12'], 'c2': weights['w13'], 'b2': weights['b13']},
st1=1, st2=1)
# Layer 3
inputs = _conv_block(inputs, bn_params,
{'c1': weights['w14'], 'b1': weights['b14'], 'c2': weights['w15'], 'b2': weights['b15']},
st1=2, st2=1, subsampling=True, subsampling_filt={'c': weights['wsp2'], 'b': weights['bsp2']})
inputs = _conv_block(inputs, bn_params,
{'c1': weights['w16'], 'b1': weights['b16'], 'c2': weights['w17'], 'b2': weights['b17']},
st1=1, st2=1)
inputs = _conv_block(inputs, bn_params,
{'c1': weights['w18'], 'b1': weights['b18'], 'c2': weights['w19'], 'b2': weights['b19']},
st1=1, st2=1)
# Average pooling + fully connected layer
inputs = tf.nn.avg_pool(inputs, [1, 2, 2, 1], [1, 2, 2, 1], 'VALID')
inputs = tf.reshape(inputs, [-1, reduce(lambda x, y: x*y, inputs.shape.as_list()[1:])])
inputs = tf.matmul(inputs, weights['wfc']) + weights['bfc'] # logits
return inputs
| 55.015464
| 137
| 0.571817
| 4,240
| 32,019
| 4.153066
| 0.070519
| 0.074507
| 0.093134
| 0.093021
| 0.818786
| 0.791413
| 0.778693
| 0.764268
| 0.728094
| 0.6972
| 0
| 0.067881
| 0.255567
| 32,019
| 581
| 138
| 55.110155
| 0.670876
| 0.052531
| 0
| 0.553785
| 0
| 0
| 0.05765
| 0
| 0
| 0
| 0
| 0
| 0.001992
| 1
| 0.047809
| false
| 0.00996
| 0.005976
| 0.00996
| 0.103586
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3d6dae3d660b4f079b286cd4dab43a1cb7235a05
| 258,287
|
py
|
Python
|
instances/passenger_demand/pas-20210422-1717-int1/31.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210422-1717-int1/31.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210422-1717-int1/31.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
"""
PASSENGERS
"""
numPassengers = 19048
passenger_arriving = (
(2, 7, 5, 2, 8, 1, 2, 1, 2, 1, 0, 1, 0, 11, 2, 3, 4, 3, 0, 0, 0, 1, 0, 0, 0, 0), # 0
(10, 5, 4, 4, 7, 4, 2, 0, 1, 3, 0, 0, 0, 6, 8, 4, 3, 2, 3, 5, 3, 1, 1, 0, 0, 0), # 1
(8, 6, 11, 4, 2, 2, 4, 3, 3, 0, 1, 1, 0, 4, 4, 5, 3, 6, 1, 1, 0, 4, 0, 2, 0, 0), # 2
(6, 4, 3, 4, 3, 1, 1, 0, 2, 6, 1, 0, 0, 7, 5, 2, 6, 2, 3, 2, 4, 0, 0, 1, 0, 0), # 3
(5, 10, 2, 8, 0, 2, 6, 1, 1, 2, 0, 0, 0, 3, 5, 4, 2, 6, 2, 0, 0, 4, 4, 0, 1, 0), # 4
(11, 7, 5, 1, 6, 1, 3, 1, 2, 2, 1, 0, 0, 11, 6, 3, 5, 4, 4, 3, 3, 2, 2, 1, 0, 0), # 5
(9, 9, 2, 7, 11, 0, 1, 2, 3, 0, 3, 0, 0, 4, 4, 9, 1, 13, 3, 2, 2, 3, 2, 1, 1, 0), # 6
(7, 5, 5, 6, 8, 2, 4, 3, 2, 0, 0, 0, 0, 7, 5, 7, 3, 6, 5, 1, 2, 4, 3, 1, 0, 0), # 7
(9, 6, 8, 10, 7, 1, 1, 1, 3, 2, 2, 3, 0, 12, 3, 10, 5, 6, 6, 4, 0, 5, 1, 0, 0, 0), # 8
(7, 9, 12, 9, 4, 1, 5, 5, 3, 3, 2, 2, 0, 6, 12, 7, 6, 5, 7, 4, 3, 2, 2, 0, 0, 0), # 9
(9, 9, 10, 14, 6, 5, 6, 2, 2, 2, 0, 2, 0, 8, 6, 10, 5, 4, 2, 4, 1, 1, 4, 1, 1, 0), # 10
(6, 10, 5, 4, 4, 1, 4, 6, 5, 1, 1, 2, 0, 10, 10, 7, 4, 7, 2, 4, 4, 5, 1, 1, 2, 0), # 11
(11, 7, 8, 8, 3, 0, 4, 4, 7, 0, 0, 1, 0, 6, 9, 4, 6, 6, 5, 4, 4, 6, 3, 1, 2, 0), # 12
(7, 9, 5, 13, 7, 7, 2, 4, 1, 1, 2, 1, 0, 11, 6, 8, 5, 8, 3, 3, 6, 3, 1, 3, 1, 0), # 13
(2, 10, 8, 13, 6, 3, 2, 3, 2, 3, 1, 0, 0, 12, 9, 3, 7, 5, 3, 1, 1, 5, 1, 1, 1, 0), # 14
(6, 16, 9, 8, 8, 4, 2, 1, 3, 0, 3, 2, 0, 5, 5, 8, 4, 5, 7, 5, 4, 4, 5, 3, 0, 0), # 15
(10, 12, 6, 9, 8, 4, 6, 4, 2, 1, 1, 2, 0, 8, 13, 6, 6, 5, 5, 4, 4, 4, 3, 0, 0, 0), # 16
(6, 9, 9, 7, 6, 3, 2, 0, 5, 0, 1, 0, 0, 10, 6, 11, 6, 9, 4, 4, 3, 7, 3, 2, 2, 0), # 17
(9, 4, 9, 8, 8, 4, 1, 2, 6, 2, 0, 1, 0, 9, 10, 7, 3, 5, 7, 2, 2, 5, 3, 1, 0, 0), # 18
(8, 13, 15, 10, 8, 3, 3, 2, 0, 1, 1, 2, 0, 7, 13, 9, 3, 8, 4, 5, 0, 3, 1, 4, 2, 0), # 19
(14, 9, 11, 8, 8, 2, 2, 3, 5, 0, 2, 2, 0, 12, 14, 4, 5, 7, 5, 7, 1, 4, 4, 1, 0, 0), # 20
(8, 7, 5, 14, 7, 3, 3, 5, 4, 2, 3, 0, 0, 14, 6, 6, 5, 9, 7, 7, 3, 3, 2, 0, 3, 0), # 21
(16, 11, 12, 15, 4, 6, 2, 3, 5, 0, 3, 1, 0, 11, 14, 5, 7, 6, 9, 6, 1, 2, 4, 0, 0, 0), # 22
(24, 8, 8, 10, 4, 5, 3, 4, 2, 3, 3, 1, 0, 16, 8, 6, 2, 2, 3, 4, 2, 4, 4, 3, 0, 0), # 23
(11, 10, 6, 7, 8, 4, 3, 5, 4, 2, 2, 0, 0, 15, 10, 5, 9, 7, 5, 5, 4, 4, 2, 0, 1, 0), # 24
(9, 8, 8, 5, 8, 3, 3, 5, 3, 1, 2, 1, 0, 7, 9, 12, 5, 2, 1, 3, 4, 3, 2, 1, 0, 0), # 25
(7, 12, 12, 11, 9, 1, 8, 5, 3, 3, 2, 0, 0, 7, 8, 7, 5, 8, 12, 2, 4, 2, 1, 2, 0, 0), # 26
(6, 7, 6, 15, 7, 4, 5, 5, 2, 1, 0, 0, 0, 10, 8, 10, 6, 9, 4, 3, 2, 5, 3, 1, 0, 0), # 27
(11, 11, 11, 7, 7, 4, 2, 9, 5, 1, 1, 0, 0, 10, 11, 4, 5, 15, 8, 2, 1, 1, 1, 1, 1, 0), # 28
(7, 10, 7, 11, 9, 5, 4, 4, 3, 3, 1, 0, 0, 9, 8, 9, 9, 5, 1, 3, 1, 3, 5, 1, 2, 0), # 29
(15, 7, 8, 9, 6, 3, 4, 9, 4, 5, 1, 1, 0, 4, 12, 6, 8, 5, 5, 3, 3, 3, 2, 2, 0, 0), # 30
(9, 12, 12, 7, 9, 5, 2, 4, 3, 3, 3, 0, 0, 10, 5, 8, 5, 6, 7, 7, 2, 4, 4, 1, 0, 0), # 31
(5, 11, 5, 11, 5, 2, 4, 2, 6, 0, 2, 1, 0, 13, 8, 10, 6, 8, 3, 7, 3, 4, 1, 2, 0, 0), # 32
(11, 11, 8, 8, 11, 4, 10, 6, 4, 1, 2, 0, 0, 8, 12, 4, 10, 9, 5, 4, 3, 4, 7, 2, 0, 0), # 33
(6, 10, 9, 10, 5, 2, 4, 3, 6, 3, 0, 2, 0, 10, 10, 6, 6, 11, 5, 6, 6, 6, 3, 0, 2, 0), # 34
(7, 7, 8, 6, 7, 2, 6, 2, 6, 2, 3, 0, 0, 9, 8, 7, 3, 5, 7, 7, 5, 4, 1, 3, 1, 0), # 35
(3, 7, 9, 10, 5, 7, 2, 5, 6, 4, 0, 1, 0, 10, 9, 6, 6, 9, 2, 2, 0, 4, 2, 2, 1, 0), # 36
(10, 7, 8, 20, 3, 4, 2, 7, 3, 1, 2, 1, 0, 10, 6, 5, 4, 9, 5, 2, 6, 5, 3, 2, 1, 0), # 37
(8, 10, 9, 6, 7, 4, 5, 3, 2, 1, 0, 0, 0, 11, 6, 4, 6, 6, 10, 3, 2, 3, 5, 0, 2, 0), # 38
(9, 6, 3, 9, 6, 4, 2, 6, 6, 2, 2, 0, 0, 10, 12, 12, 4, 4, 0, 2, 2, 8, 2, 4, 0, 0), # 39
(11, 8, 7, 8, 5, 1, 3, 5, 7, 1, 0, 0, 0, 9, 6, 11, 8, 11, 2, 4, 4, 1, 2, 0, 1, 0), # 40
(9, 5, 15, 12, 7, 2, 3, 1, 1, 0, 0, 0, 0, 9, 15, 5, 6, 8, 2, 3, 5, 2, 1, 0, 2, 0), # 41
(14, 9, 4, 8, 7, 3, 2, 5, 3, 0, 2, 0, 0, 9, 3, 1, 3, 10, 4, 4, 3, 3, 3, 4, 0, 0), # 42
(10, 3, 6, 14, 10, 3, 2, 5, 4, 1, 2, 1, 0, 14, 7, 9, 8, 7, 8, 3, 5, 5, 2, 2, 0, 0), # 43
(11, 13, 9, 5, 9, 3, 6, 7, 6, 0, 0, 0, 0, 12, 8, 4, 2, 9, 7, 5, 1, 1, 3, 0, 0, 0), # 44
(12, 12, 4, 11, 9, 1, 1, 6, 1, 0, 1, 0, 0, 3, 6, 3, 8, 10, 4, 3, 2, 4, 6, 0, 0, 0), # 45
(6, 12, 5, 6, 5, 6, 3, 4, 3, 2, 1, 1, 0, 8, 5, 4, 3, 8, 4, 3, 1, 3, 7, 1, 1, 0), # 46
(14, 7, 10, 8, 9, 2, 1, 3, 6, 1, 1, 1, 0, 8, 6, 5, 5, 7, 5, 4, 2, 8, 2, 0, 2, 0), # 47
(11, 5, 8, 10, 6, 1, 3, 4, 5, 3, 0, 0, 0, 8, 6, 9, 6, 14, 7, 11, 5, 1, 2, 2, 1, 0), # 48
(9, 3, 9, 9, 6, 2, 5, 7, 5, 2, 1, 0, 0, 7, 8, 5, 6, 10, 4, 5, 4, 5, 2, 3, 6, 0), # 49
(9, 9, 11, 7, 9, 4, 2, 6, 8, 2, 0, 0, 0, 10, 7, 9, 8, 5, 4, 6, 3, 5, 1, 0, 2, 0), # 50
(9, 8, 12, 12, 11, 5, 5, 6, 5, 4, 0, 0, 0, 16, 6, 4, 8, 10, 6, 2, 3, 4, 4, 3, 0, 0), # 51
(4, 6, 9, 9, 7, 2, 3, 4, 3, 4, 3, 1, 0, 9, 10, 6, 2, 4, 8, 2, 0, 2, 4, 1, 0, 0), # 52
(9, 11, 6, 5, 7, 3, 4, 0, 4, 0, 2, 1, 0, 9, 12, 8, 4, 11, 4, 5, 0, 0, 1, 2, 1, 0), # 53
(5, 13, 8, 8, 6, 3, 2, 2, 4, 5, 2, 2, 0, 6, 5, 5, 7, 7, 5, 5, 0, 4, 2, 2, 0, 0), # 54
(16, 7, 8, 10, 7, 5, 1, 6, 6, 4, 2, 2, 0, 7, 11, 4, 5, 7, 4, 2, 1, 5, 4, 2, 0, 0), # 55
(11, 11, 8, 6, 5, 6, 2, 2, 4, 1, 3, 1, 0, 10, 10, 4, 7, 9, 5, 4, 2, 4, 1, 1, 1, 0), # 56
(13, 14, 3, 12, 7, 6, 4, 3, 2, 0, 0, 1, 0, 6, 13, 2, 10, 8, 6, 5, 5, 2, 4, 3, 1, 0), # 57
(3, 6, 6, 8, 12, 5, 3, 3, 5, 1, 1, 1, 0, 10, 2, 9, 5, 11, 6, 1, 1, 3, 2, 1, 1, 0), # 58
(6, 10, 6, 10, 7, 3, 3, 2, 4, 1, 1, 1, 0, 17, 9, 11, 6, 8, 4, 3, 0, 0, 3, 1, 0, 0), # 59
(10, 9, 12, 13, 4, 3, 6, 5, 7, 0, 0, 1, 0, 14, 7, 6, 6, 8, 5, 8, 4, 3, 2, 1, 2, 0), # 60
(13, 10, 5, 6, 6, 2, 1, 4, 4, 4, 2, 2, 0, 8, 4, 11, 5, 11, 3, 2, 1, 4, 2, 4, 0, 0), # 61
(9, 12, 5, 8, 10, 5, 3, 4, 4, 4, 4, 0, 0, 9, 9, 9, 4, 7, 7, 5, 1, 2, 1, 2, 1, 0), # 62
(17, 9, 7, 11, 6, 2, 4, 2, 3, 2, 1, 1, 0, 7, 9, 11, 5, 10, 5, 2, 1, 5, 5, 1, 2, 0), # 63
(8, 7, 7, 9, 9, 7, 6, 8, 3, 3, 3, 1, 0, 8, 6, 4, 4, 13, 9, 3, 1, 3, 5, 1, 0, 0), # 64
(16, 8, 10, 5, 9, 6, 7, 4, 2, 3, 3, 0, 0, 10, 6, 7, 4, 8, 4, 6, 1, 2, 3, 1, 1, 0), # 65
(8, 16, 13, 8, 5, 2, 3, 4, 5, 1, 1, 0, 0, 9, 16, 3, 5, 8, 4, 2, 2, 3, 6, 0, 0, 0), # 66
(7, 9, 9, 6, 7, 3, 2, 3, 4, 1, 2, 2, 0, 13, 9, 7, 6, 7, 0, 3, 2, 3, 4, 2, 1, 0), # 67
(11, 6, 9, 5, 5, 2, 1, 4, 2, 1, 0, 1, 0, 12, 7, 8, 6, 7, 6, 0, 1, 0, 1, 1, 1, 0), # 68
(6, 7, 8, 7, 5, 3, 2, 1, 4, 1, 0, 0, 0, 13, 13, 6, 5, 6, 2, 4, 3, 0, 2, 1, 1, 0), # 69
(9, 9, 8, 3, 9, 5, 0, 0, 2, 2, 1, 1, 0, 10, 7, 5, 4, 5, 3, 4, 2, 3, 1, 1, 0, 0), # 70
(5, 11, 10, 8, 7, 1, 3, 3, 5, 1, 2, 1, 0, 9, 8, 6, 3, 7, 3, 3, 5, 3, 4, 3, 0, 0), # 71
(8, 7, 5, 9, 6, 5, 7, 7, 4, 1, 0, 0, 0, 9, 5, 8, 1, 6, 5, 2, 4, 3, 2, 1, 1, 0), # 72
(11, 3, 8, 6, 13, 7, 7, 3, 2, 1, 2, 1, 0, 17, 12, 9, 1, 11, 7, 4, 2, 3, 1, 2, 0, 0), # 73
(8, 9, 7, 6, 10, 9, 4, 2, 4, 3, 0, 0, 0, 11, 6, 4, 3, 5, 3, 5, 1, 3, 0, 2, 1, 0), # 74
(16, 12, 3, 16, 11, 3, 1, 0, 6, 0, 0, 1, 0, 11, 8, 8, 7, 6, 5, 4, 3, 5, 6, 3, 1, 0), # 75
(14, 4, 9, 4, 5, 2, 7, 4, 4, 1, 1, 0, 0, 8, 9, 2, 3, 10, 6, 5, 0, 4, 2, 2, 1, 0), # 76
(6, 4, 4, 14, 8, 5, 5, 2, 4, 3, 2, 0, 0, 12, 8, 8, 8, 14, 2, 5, 4, 6, 3, 0, 1, 0), # 77
(3, 9, 9, 6, 8, 1, 3, 6, 8, 3, 1, 0, 0, 7, 5, 7, 4, 8, 2, 1, 0, 3, 3, 3, 2, 0), # 78
(5, 6, 11, 13, 4, 4, 1, 4, 5, 4, 3, 0, 0, 12, 6, 5, 8, 9, 10, 5, 5, 6, 2, 2, 3, 0), # 79
(15, 10, 6, 9, 12, 6, 2, 6, 2, 1, 1, 3, 0, 13, 9, 5, 6, 8, 4, 3, 5, 4, 2, 1, 1, 0), # 80
(13, 5, 6, 10, 11, 5, 4, 8, 3, 1, 0, 0, 0, 9, 6, 5, 3, 3, 5, 4, 1, 6, 1, 1, 0, 0), # 81
(14, 6, 6, 6, 9, 4, 5, 4, 3, 1, 0, 1, 0, 11, 9, 5, 6, 7, 4, 4, 2, 2, 2, 1, 0, 0), # 82
(8, 10, 10, 8, 6, 1, 5, 4, 6, 2, 2, 0, 0, 12, 12, 10, 4, 9, 5, 7, 2, 2, 6, 2, 0, 0), # 83
(12, 4, 7, 7, 8, 4, 4, 4, 1, 2, 0, 1, 0, 7, 12, 7, 0, 5, 5, 1, 1, 7, 4, 0, 0, 0), # 84
(10, 10, 2, 8, 6, 1, 2, 3, 2, 1, 4, 2, 0, 13, 8, 7, 4, 11, 8, 4, 3, 3, 1, 0, 0, 0), # 85
(14, 10, 4, 5, 7, 0, 4, 4, 8, 2, 0, 2, 0, 4, 12, 5, 4, 4, 1, 1, 4, 2, 3, 4, 0, 0), # 86
(8, 12, 6, 14, 7, 3, 5, 0, 4, 1, 2, 1, 0, 7, 10, 13, 5, 7, 8, 4, 3, 3, 3, 0, 0, 0), # 87
(13, 12, 10, 10, 6, 5, 3, 0, 5, 4, 0, 2, 0, 10, 6, 9, 2, 6, 4, 3, 4, 6, 2, 3, 1, 0), # 88
(3, 7, 10, 11, 7, 5, 5, 1, 2, 1, 0, 0, 0, 12, 9, 8, 2, 9, 7, 2, 4, 3, 2, 1, 1, 0), # 89
(15, 9, 8, 6, 6, 4, 4, 4, 6, 1, 2, 3, 0, 10, 3, 4, 5, 9, 7, 5, 2, 3, 2, 4, 1, 0), # 90
(9, 11, 8, 15, 9, 2, 3, 3, 4, 7, 1, 2, 0, 9, 16, 7, 9, 7, 3, 3, 6, 3, 2, 2, 0, 0), # 91
(13, 4, 5, 7, 3, 3, 3, 3, 6, 1, 5, 0, 0, 11, 3, 9, 1, 1, 2, 3, 0, 5, 1, 1, 1, 0), # 92
(9, 6, 4, 11, 6, 3, 2, 2, 3, 2, 1, 1, 0, 3, 4, 8, 4, 9, 2, 1, 3, 3, 5, 0, 0, 0), # 93
(15, 6, 8, 10, 7, 4, 2, 3, 5, 1, 0, 1, 0, 5, 5, 4, 5, 11, 5, 3, 2, 2, 1, 1, 0, 0), # 94
(9, 5, 7, 12, 7, 1, 6, 2, 4, 1, 3, 1, 0, 12, 6, 11, 2, 3, 3, 3, 3, 4, 1, 1, 0, 0), # 95
(7, 6, 6, 7, 6, 3, 4, 3, 4, 0, 1, 0, 0, 6, 9, 8, 3, 8, 5, 2, 3, 6, 1, 2, 2, 0), # 96
(9, 7, 10, 8, 13, 4, 3, 3, 5, 0, 4, 1, 0, 8, 10, 6, 5, 5, 8, 4, 3, 6, 4, 0, 1, 0), # 97
(10, 7, 11, 7, 3, 3, 4, 4, 4, 5, 2, 0, 0, 11, 8, 11, 2, 8, 3, 6, 2, 3, 4, 1, 0, 0), # 98
(15, 3, 6, 12, 7, 4, 3, 2, 5, 2, 2, 0, 0, 7, 9, 5, 4, 11, 1, 1, 1, 7, 3, 3, 0, 0), # 99
(8, 6, 7, 7, 9, 5, 4, 3, 2, 3, 1, 0, 0, 12, 7, 6, 6, 6, 3, 1, 2, 2, 1, 0, 0, 0), # 100
(7, 15, 7, 9, 10, 1, 8, 1, 4, 0, 2, 0, 0, 8, 6, 6, 5, 6, 0, 0, 0, 4, 5, 3, 1, 0), # 101
(13, 8, 9, 8, 3, 2, 4, 4, 5, 2, 4, 1, 0, 11, 5, 5, 6, 4, 6, 2, 0, 4, 1, 1, 1, 0), # 102
(11, 5, 9, 7, 4, 9, 6, 2, 1, 3, 1, 0, 0, 10, 5, 7, 3, 7, 6, 3, 1, 3, 4, 2, 0, 0), # 103
(10, 8, 8, 4, 9, 2, 6, 7, 7, 6, 0, 2, 0, 10, 8, 7, 9, 2, 3, 2, 4, 7, 1, 0, 0, 0), # 104
(13, 3, 5, 6, 11, 3, 4, 1, 4, 0, 2, 0, 0, 11, 8, 4, 1, 8, 7, 7, 4, 5, 2, 1, 0, 0), # 105
(9, 7, 9, 11, 5, 1, 5, 4, 3, 0, 0, 1, 0, 10, 10, 6, 5, 11, 1, 4, 3, 5, 0, 0, 2, 0), # 106
(9, 5, 10, 6, 10, 6, 3, 5, 4, 2, 0, 0, 0, 6, 10, 7, 7, 6, 3, 7, 2, 6, 4, 0, 1, 0), # 107
(9, 7, 9, 8, 4, 4, 4, 3, 2, 2, 1, 1, 0, 12, 10, 5, 8, 6, 5, 2, 1, 3, 3, 3, 3, 0), # 108
(7, 10, 7, 5, 13, 2, 7, 1, 4, 2, 2, 1, 0, 6, 9, 7, 2, 8, 2, 5, 2, 4, 5, 2, 1, 0), # 109
(17, 5, 8, 8, 6, 2, 1, 2, 3, 1, 2, 2, 0, 7, 11, 2, 3, 7, 4, 5, 3, 5, 0, 3, 1, 0), # 110
(9, 8, 11, 10, 6, 7, 1, 2, 4, 2, 0, 1, 0, 10, 7, 8, 4, 4, 3, 1, 3, 3, 5, 2, 1, 0), # 111
(10, 3, 9, 8, 7, 3, 0, 2, 4, 2, 0, 3, 0, 8, 8, 7, 5, 7, 6, 3, 0, 3, 3, 2, 0, 0), # 112
(6, 9, 6, 7, 9, 3, 3, 2, 9, 0, 6, 0, 0, 9, 10, 6, 7, 9, 1, 7, 2, 2, 2, 1, 0, 0), # 113
(9, 4, 5, 7, 6, 1, 0, 4, 2, 1, 0, 0, 0, 5, 6, 3, 7, 9, 3, 6, 0, 7, 2, 4, 2, 0), # 114
(10, 8, 6, 4, 2, 3, 5, 2, 3, 2, 2, 1, 0, 12, 10, 5, 2, 6, 1, 5, 3, 0, 2, 1, 1, 0), # 115
(14, 4, 8, 3, 11, 7, 3, 3, 10, 1, 0, 0, 0, 7, 8, 8, 4, 7, 1, 2, 0, 2, 3, 1, 0, 0), # 116
(13, 5, 8, 2, 5, 7, 1, 4, 6, 1, 1, 2, 0, 12, 14, 3, 2, 10, 5, 1, 2, 7, 6, 3, 1, 0), # 117
(7, 6, 4, 14, 4, 1, 6, 3, 2, 1, 0, 2, 0, 11, 6, 6, 5, 9, 4, 3, 3, 4, 1, 0, 0, 0), # 118
(11, 5, 10, 8, 9, 2, 2, 2, 2, 0, 2, 0, 0, 7, 6, 3, 5, 6, 4, 2, 2, 1, 3, 0, 0, 0), # 119
(9, 5, 11, 8, 7, 1, 6, 8, 0, 0, 1, 0, 0, 12, 7, 6, 3, 5, 4, 2, 1, 6, 8, 3, 1, 0), # 120
(5, 10, 8, 7, 8, 6, 5, 3, 4, 0, 0, 0, 0, 18, 7, 5, 3, 4, 4, 5, 4, 1, 3, 1, 0, 0), # 121
(6, 4, 9, 5, 12, 5, 2, 2, 6, 1, 0, 0, 0, 17, 8, 7, 4, 8, 2, 3, 0, 2, 4, 1, 1, 0), # 122
(3, 11, 7, 7, 7, 7, 5, 1, 4, 2, 1, 1, 0, 13, 4, 11, 4, 10, 2, 3, 1, 3, 6, 2, 1, 0), # 123
(17, 6, 8, 13, 6, 5, 3, 3, 1, 0, 1, 0, 0, 5, 12, 7, 3, 12, 3, 4, 6, 5, 5, 1, 2, 0), # 124
(9, 6, 7, 11, 3, 3, 2, 2, 4, 1, 1, 3, 0, 5, 8, 9, 2, 11, 4, 5, 2, 3, 1, 1, 0, 0), # 125
(10, 4, 6, 8, 3, 3, 6, 3, 0, 3, 0, 1, 0, 10, 11, 2, 5, 11, 5, 5, 2, 4, 3, 1, 0, 0), # 126
(9, 9, 13, 7, 3, 1, 1, 3, 3, 1, 1, 3, 0, 10, 5, 9, 5, 12, 2, 0, 1, 3, 0, 1, 0, 0), # 127
(14, 7, 9, 13, 7, 3, 2, 5, 7, 1, 2, 2, 0, 11, 4, 6, 4, 4, 6, 6, 2, 1, 1, 0, 0, 0), # 128
(9, 5, 11, 9, 6, 3, 3, 1, 4, 0, 0, 0, 0, 9, 3, 7, 3, 2, 2, 3, 1, 3, 4, 3, 0, 0), # 129
(8, 2, 6, 8, 6, 1, 5, 1, 4, 0, 1, 1, 0, 7, 7, 3, 6, 10, 3, 4, 5, 2, 1, 1, 1, 0), # 130
(10, 8, 10, 10, 6, 6, 5, 4, 0, 3, 3, 0, 0, 8, 11, 3, 6, 4, 8, 4, 2, 3, 0, 0, 0, 0), # 131
(8, 7, 5, 13, 7, 4, 2, 7, 1, 0, 1, 0, 0, 15, 8, 10, 4, 11, 6, 0, 1, 2, 2, 1, 1, 0), # 132
(16, 7, 4, 13, 4, 2, 0, 1, 2, 1, 1, 0, 0, 10, 4, 4, 6, 6, 5, 4, 2, 4, 2, 2, 1, 0), # 133
(9, 5, 4, 7, 14, 7, 4, 5, 2, 3, 2, 2, 0, 6, 10, 7, 4, 7, 2, 3, 6, 4, 1, 0, 1, 0), # 134
(11, 10, 9, 6, 12, 4, 3, 3, 3, 0, 1, 1, 0, 11, 6, 2, 4, 9, 7, 3, 0, 4, 3, 1, 0, 0), # 135
(6, 4, 9, 9, 7, 4, 4, 3, 3, 0, 2, 1, 0, 14, 10, 5, 3, 10, 3, 4, 2, 4, 1, 3, 1, 0), # 136
(12, 11, 4, 8, 2, 3, 5, 2, 7, 0, 1, 0, 0, 10, 7, 7, 3, 7, 5, 6, 2, 5, 2, 1, 1, 0), # 137
(7, 6, 5, 9, 5, 2, 2, 3, 5, 2, 1, 0, 0, 7, 7, 6, 4, 4, 2, 3, 2, 2, 5, 1, 1, 0), # 138
(7, 4, 11, 10, 9, 3, 1, 4, 1, 2, 0, 1, 0, 7, 5, 9, 4, 5, 4, 7, 6, 2, 4, 2, 2, 0), # 139
(11, 4, 7, 11, 10, 2, 3, 2, 4, 2, 2, 0, 0, 7, 11, 4, 2, 4, 3, 1, 1, 2, 2, 2, 0, 0), # 140
(6, 1, 5, 11, 7, 1, 2, 1, 2, 4, 0, 0, 0, 3, 5, 4, 3, 6, 5, 2, 2, 1, 2, 1, 0, 0), # 141
(4, 3, 4, 3, 8, 2, 2, 4, 3, 0, 1, 1, 0, 10, 11, 4, 9, 7, 1, 1, 3, 2, 2, 2, 2, 0), # 142
(7, 7, 7, 8, 7, 4, 6, 0, 3, 0, 1, 1, 0, 13, 12, 2, 5, 7, 2, 2, 0, 6, 2, 0, 0, 0), # 143
(8, 3, 8, 7, 9, 6, 2, 0, 3, 1, 2, 0, 0, 10, 10, 6, 4, 4, 4, 2, 4, 0, 3, 1, 1, 0), # 144
(15, 6, 3, 8, 7, 4, 2, 4, 6, 3, 2, 0, 0, 8, 5, 7, 1, 11, 2, 3, 1, 3, 3, 2, 0, 0), # 145
(7, 4, 10, 4, 10, 1, 1, 0, 6, 1, 1, 1, 0, 11, 7, 3, 4, 6, 3, 2, 3, 5, 2, 2, 0, 0), # 146
(13, 8, 11, 3, 8, 7, 2, 1, 2, 0, 2, 0, 0, 12, 8, 4, 4, 6, 1, 3, 1, 3, 1, 3, 1, 0), # 147
(7, 5, 10, 3, 7, 2, 3, 6, 2, 1, 1, 1, 0, 9, 8, 5, 4, 7, 3, 1, 0, 5, 2, 2, 0, 0), # 148
(10, 4, 7, 8, 4, 3, 3, 3, 2, 1, 0, 0, 0, 8, 7, 9, 0, 7, 3, 0, 1, 1, 4, 1, 0, 0), # 149
(11, 4, 2, 6, 6, 5, 4, 1, 3, 1, 0, 0, 0, 11, 3, 5, 5, 7, 4, 2, 2, 4, 1, 3, 0, 0), # 150
(11, 12, 7, 9, 5, 4, 1, 6, 4, 0, 1, 1, 0, 6, 7, 6, 4, 8, 4, 3, 4, 3, 4, 1, 0, 0), # 151
(10, 4, 7, 6, 10, 4, 2, 2, 6, 1, 0, 0, 0, 3, 8, 9, 3, 3, 2, 4, 4, 3, 3, 3, 1, 0), # 152
(11, 7, 8, 5, 8, 4, 4, 2, 4, 1, 0, 0, 0, 4, 5, 5, 5, 8, 4, 2, 2, 4, 3, 1, 1, 0), # 153
(12, 5, 7, 7, 5, 5, 0, 2, 2, 2, 0, 0, 0, 7, 8, 4, 4, 6, 2, 1, 5, 5, 2, 1, 3, 0), # 154
(8, 3, 5, 9, 4, 2, 1, 5, 3, 0, 1, 0, 0, 12, 7, 3, 5, 9, 4, 2, 1, 2, 4, 1, 1, 0), # 155
(8, 6, 8, 8, 5, 5, 7, 1, 5, 0, 1, 0, 0, 11, 10, 2, 2, 7, 6, 3, 2, 6, 2, 2, 0, 0), # 156
(6, 7, 5, 6, 6, 2, 2, 1, 3, 1, 0, 0, 0, 7, 7, 2, 4, 8, 2, 3, 3, 4, 3, 3, 0, 0), # 157
(8, 5, 7, 3, 7, 5, 3, 5, 3, 0, 0, 0, 0, 7, 2, 6, 8, 4, 2, 6, 1, 2, 2, 1, 0, 0), # 158
(13, 4, 9, 4, 7, 2, 4, 0, 2, 0, 2, 1, 0, 3, 9, 5, 3, 7, 4, 4, 1, 2, 4, 2, 1, 0), # 159
(8, 4, 2, 6, 5, 6, 1, 5, 2, 1, 2, 0, 0, 6, 7, 4, 3, 8, 2, 4, 1, 3, 3, 2, 0, 0), # 160
(8, 2, 7, 13, 3, 6, 5, 3, 2, 1, 0, 0, 0, 8, 3, 5, 3, 7, 3, 2, 1, 1, 3, 0, 0, 0), # 161
(6, 4, 10, 9, 7, 5, 2, 1, 2, 0, 0, 0, 0, 7, 9, 4, 3, 6, 2, 2, 2, 2, 3, 0, 0, 0), # 162
(2, 8, 2, 3, 12, 3, 5, 5, 0, 1, 1, 0, 0, 14, 3, 4, 2, 9, 4, 6, 2, 1, 1, 2, 0, 0), # 163
(8, 3, 8, 8, 6, 0, 2, 1, 2, 0, 2, 1, 0, 6, 6, 8, 1, 2, 3, 3, 3, 3, 0, 1, 0, 0), # 164
(6, 9, 5, 4, 4, 3, 0, 1, 3, 1, 1, 1, 0, 6, 3, 4, 3, 7, 2, 1, 3, 4, 0, 1, 1, 0), # 165
(6, 2, 5, 7, 8, 2, 0, 0, 3, 1, 3, 2, 0, 10, 8, 7, 2, 2, 8, 2, 1, 3, 3, 0, 0, 0), # 166
(3, 3, 5, 5, 9, 3, 4, 4, 4, 2, 0, 1, 0, 6, 5, 4, 4, 5, 0, 1, 0, 4, 0, 1, 0, 0), # 167
(7, 6, 8, 8, 8, 3, 2, 4, 2, 1, 1, 1, 0, 3, 2, 2, 4, 6, 3, 7, 3, 4, 1, 1, 0, 0), # 168
(12, 3, 7, 7, 3, 2, 0, 4, 2, 2, 2, 0, 0, 5, 5, 2, 5, 6, 0, 3, 2, 3, 1, 1, 0, 0), # 169
(9, 2, 4, 8, 4, 2, 2, 1, 3, 1, 0, 0, 0, 6, 4, 9, 8, 4, 0, 1, 2, 2, 0, 1, 1, 0), # 170
(9, 7, 4, 5, 7, 4, 0, 1, 6, 2, 0, 0, 0, 4, 3, 7, 2, 5, 2, 2, 1, 5, 1, 0, 0, 0), # 171
(3, 1, 7, 9, 4, 1, 0, 1, 4, 1, 1, 1, 0, 8, 5, 2, 0, 4, 1, 5, 2, 3, 3, 1, 0, 0), # 172
(5, 1, 7, 7, 2, 4, 1, 2, 3, 0, 0, 2, 0, 11, 5, 9, 2, 6, 3, 0, 1, 5, 4, 2, 0, 0), # 173
(1, 4, 3, 3, 3, 2, 1, 2, 0, 1, 0, 0, 0, 7, 4, 3, 3, 1, 1, 0, 0, 3, 1, 0, 0, 0), # 174
(6, 2, 4, 6, 4, 1, 0, 2, 2, 0, 1, 0, 0, 4, 6, 5, 0, 4, 2, 2, 0, 1, 0, 0, 0, 0), # 175
(8, 5, 1, 6, 5, 0, 1, 1, 0, 0, 1, 1, 0, 6, 5, 4, 1, 5, 1, 1, 2, 2, 2, 1, 0, 0), # 176
(1, 2, 2, 8, 1, 1, 2, 1, 1, 1, 0, 0, 0, 6, 4, 1, 2, 1, 1, 1, 3, 0, 0, 0, 0, 0), # 177
(6, 7, 5, 3, 1, 1, 0, 2, 2, 1, 0, 0, 0, 6, 4, 5, 3, 6, 4, 1, 0, 1, 2, 0, 0, 0), # 178
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # 179
)
station_arriving_intensity = (
(5.020865578371768, 5.525288559693166, 5.211283229612507, 6.214667773863432, 5.554685607609612, 3.1386549320373387, 4.146035615373915, 4.653176172979423, 6.090099062168007, 3.9580150155223697, 4.205265163885603, 4.897915078306173, 5.083880212578363), # 0
(5.354327152019974, 5.890060694144759, 5.555346591330152, 6.625144253276616, 5.922490337474237, 3.3459835840425556, 4.419468941263694, 4.959513722905708, 6.492245326332909, 4.21898069227715, 4.483096135956131, 5.221216660814354, 5.419791647439855), # 1
(5.686723008979731, 6.253385170890979, 5.8980422855474135, 7.033987704664794, 6.288962973749744, 3.5524851145124448, 4.691818507960704, 5.264625247904419, 6.892786806877549, 4.478913775020546, 4.759823148776313, 5.543232652053055, 5.75436482820969), # 2
(6.016757793146562, 6.613820501936447, 6.238010869319854, 7.439576407532074, 6.652661676001902, 3.757340622585113, 4.962003641647955, 5.567301157494507, 7.290135160921093, 4.736782698426181, 5.0343484118273825, 5.862685684930461, 6.086272806254225), # 3
(6.343136148415981, 6.9699251992857745, 6.573892899703036, 7.840288641382569, 7.012144603796492, 3.9597312073986677, 5.2289436685084585, 5.866331861194915, 7.682702045582707, 4.991555897167679, 5.305574134590575, 6.178298392354764, 6.414188632939817), # 4
(6.66456271868351, 7.320257774943588, 6.9043289337525175, 8.234502685720393, 7.36596991669928, 4.158837968091214, 5.491557914725224, 6.160507768524592, 8.068899117981559, 5.242201805918663, 5.572402526547132, 6.488793407234148, 6.736785359632827), # 5
(6.979742147844666, 7.663376740914501, 7.227959528523866, 8.620596820049652, 7.712695774276043, 4.353842003800864, 5.7487657064812625, 6.4486192890024885, 8.447138035236815, 5.487688859352758, 5.833735797178282, 6.792893362476808, 7.052736037699606), # 6
(7.2873790797949685, 7.997840609203132, 7.543425241072635, 8.996949323874462, 8.050880336092554, 4.543924413665721, 5.999486369959585, 6.729456832147552, 8.815830454467644, 5.726985492143586, 6.088476155965268, 7.089320890990929, 7.360713718506519), # 7
(7.586178158429934, 8.322207891814099, 7.849366628454396, 9.361938476698928, 8.379081761714586, 4.7282662968238895, 6.2426392313431975, 7.001810807478725, 9.173388032793206, 5.959060138964774, 6.335525812389321, 7.376798625684702, 7.659391453419917), # 8
(7.874844027645085, 8.635037100752022, 8.144424247724704, 9.713942558027169, 8.69585821070791, 4.906048752413484, 6.47714361681512, 7.264471624514963, 9.518222427332674, 6.182881234489941, 6.573786975931678, 7.654049199466313, 7.947442293806162), # 9
(8.152081331335932, 8.934886748021516, 8.427238655939124, 10.051339847363288, 8.9997678426383, 5.076452879572607, 6.701918852558355, 7.516229692775211, 9.848745295205214, 6.397417213392714, 6.802161856073574, 7.919795245243952, 8.22353929103161), # 10
(8.416594713398005, 9.220315345627206, 8.696450410153215, 10.372508624211397, 9.289368817071534, 5.238659777439368, 6.915884264755916, 7.7558754217784145, 10.163368293529993, 6.601636510346719, 7.019552662296249, 8.17275939592581, 8.486355496462611), # 11
(8.667088817726812, 9.489881405573698, 8.95070006742254, 10.675827168075612, 9.563219293573377, 5.391850545151869, 7.1179591795908115, 7.982199221043521, 10.460503079426179, 6.794507560025572, 7.224861604080934, 8.411664284420068, 8.734563961465534), # 12
(8.902268288217876, 9.74214343986562, 9.188628184802662, 10.959673758460044, 9.819877431709601, 5.5352062818482235, 7.307062923246056, 8.193991500089481, 10.738561310012932, 6.974998797102904, 7.416990890908869, 8.63523254363492, 8.966837737406735), # 13
(9.120837768766716, 9.975659960507588, 9.408875319349146, 11.222426674868792, 10.05790139104599, 5.667908086666534, 7.482114821904661, 8.390042668435246, 10.995954642409421, 7.142078656252334, 7.594842732261284, 8.84218680647856, 9.181849875652563), # 14
(9.321501903268855, 10.188989479504217, 9.610082028117542, 11.462464196805985, 10.275849331148308, 5.789137058744912, 7.642034201749626, 8.569143135599756, 11.23109473373482, 7.29471557214749, 7.757319337619419, 9.031249705859171, 9.37827342756938), # 15
(9.5029653356198, 10.380690508860132, 9.790888868163425, 11.678164603775716, 10.472279411582333, 5.898074297221459, 7.785740388963976, 8.73008331110196, 11.442393241108286, 7.431877979461996, 7.9033229164645125, 9.20114387468494, 9.554781444523545), # 16
(9.663932709715075, 10.549321560579946, 9.949936396542352, 11.867906175282112, 10.645749791913838, 5.993900901234285, 7.9121527097307105, 8.871653604460818, 11.628261821648984, 7.552534312869467, 8.031755678277799, 9.350591945864055, 9.710046977881415), # 17
(9.803108669450204, 10.693441146668274, 10.08586517030988, 12.030067190829278, 10.794818631708589, 6.075797969921503, 8.020190490232851, 8.99264442519526, 11.787112132476096, 7.6556530070435365, 8.141519832540508, 9.478316552304715, 9.842743079009345), # 18
(9.919197858720699, 10.811607779129744, 10.197315746521578, 12.163025929921314, 10.918044090532366, 6.142946602421208, 8.108773056653394, 9.091846182824245, 11.917355830708779, 7.740202496657828, 8.231517588733878, 9.583040326915096, 9.951542799273696), # 19
(10.010904921422082, 10.902379969968962, 10.282928682233003, 12.265160672062354, 11.013984327950944, 6.194527897871518, 8.176819735175362, 9.168049286866717, 12.017404573466198, 7.805151216385958, 8.30065115633915, 9.66348590260339, 10.035119190040824), # 20
(10.076934501449866, 10.964316231190558, 10.341344534499719, 12.334849696756486, 11.081197503530088, 6.229722955410535, 8.223249851981759, 9.220044146841623, 12.085670017867521, 7.849467600901555, 8.34782274483756, 9.718375912277793, 10.092145302677078), # 21
(10.115991242699579, 10.995975074799144, 10.371203860377285, 12.370471283507836, 11.118241776835575, 6.247712874176367, 8.2469827332556, 9.246621172267915, 12.120563821031915, 7.872120084878242, 8.37193456371034, 9.74643298884649, 10.121294188548827), # 22
(10.13039336334264, 10.999723593964335, 10.374923182441702, 12.374930812757203, 11.127732056032597, 6.25, 8.249804002259339, 9.249493827160494, 12.124926234567901, 7.874792272519433, 8.37495803716174, 9.749897576588934, 10.125), # 23
(10.141012413034153, 10.997537037037038, 10.374314814814815, 12.374381944444446, 11.133107613614852, 6.25, 8.248253812636166, 9.2455, 12.124341666666666, 7.87315061728395, 8.37462457912458, 9.749086419753086, 10.125), # 24
(10.15140723021158, 10.993227023319616, 10.373113854595337, 12.373296039094651, 11.138364945594503, 6.25, 8.24519890260631, 9.237654320987655, 12.123186728395062, 7.869918838591678, 8.373963399426362, 9.747485139460448, 10.125), # 25
(10.161577019048034, 10.986859396433472, 10.371336762688616, 12.37168544238683, 11.143503868421105, 6.25, 8.240686718308721, 9.226104938271606, 12.1214762345679, 7.865150708733425, 8.372980483850855, 9.745115683584821, 10.125), # 26
(10.171520983716636, 10.978499999999999, 10.369, 12.369562499999999, 11.148524198544214, 6.25, 8.234764705882354, 9.211, 12.119225, 7.858899999999999, 8.371681818181818, 9.742, 10.125), # 27
(10.181238328390501, 10.968214677640603, 10.366120027434842, 12.366939557613168, 11.153425752413401, 6.25, 8.22748031146615, 9.192487654320988, 12.116447839506172, 7.851220484682213, 8.370073388203018, 9.73816003657979, 10.125), # 28
(10.19072825724275, 10.95606927297668, 10.362713305898492, 12.36382896090535, 11.15820834647822, 6.25, 8.218880981199066, 9.170716049382715, 12.113159567901235, 7.842165935070874, 8.368161179698216, 9.733617741197987, 10.125), # 29
(10.199989974446497, 10.94212962962963, 10.358796296296296, 12.360243055555555, 11.162871797188236, 6.25, 8.209014161220043, 9.145833333333332, 12.109375, 7.83179012345679, 8.365951178451178, 9.728395061728394, 10.125), # 30
(10.209022684174858, 10.926461591220852, 10.354385459533608, 12.356194187242798, 11.167415920993008, 6.25, 8.19792729766804, 9.117987654320988, 12.105108950617284, 7.820146822130773, 8.363449370245666, 9.722513946044812, 10.125), # 31
(10.217825590600954, 10.909131001371742, 10.349497256515773, 12.35169470164609, 11.171840534342095, 6.25, 8.185667836681999, 9.087327160493828, 12.100376234567902, 7.807289803383631, 8.360661740865444, 9.715996342021034, 10.125), # 32
(10.226397897897897, 10.890203703703703, 10.344148148148149, 12.346756944444444, 11.176145453685063, 6.25, 8.172283224400871, 9.054, 12.095191666666667, 7.793272839506173, 8.357594276094275, 9.708864197530863, 10.125), # 33
(10.23473881023881, 10.869745541838133, 10.338354595336076, 12.341393261316872, 11.180330495471466, 6.25, 8.15782090696361, 9.018154320987653, 12.089570061728397, 7.778149702789209, 8.354252961715924, 9.701139460448102, 10.125), # 34
(10.242847531796807, 10.847822359396433, 10.332133058984912, 12.335615997942385, 11.18439547615087, 6.25, 8.142328330509159, 8.979938271604938, 12.083526234567902, 7.761974165523548, 8.350643783514153, 9.692844078646548, 10.125), # 35
(10.250723266745005, 10.824499999999999, 10.3255, 12.3294375, 11.188340212172836, 6.25, 8.12585294117647, 8.9395, 12.077074999999999, 7.7448, 8.346772727272727, 9.684000000000001, 10.125), # 36
(10.258365219256524, 10.799844307270233, 10.318471879286694, 12.322870113168724, 11.192164519986921, 6.25, 8.108442185104494, 8.896987654320988, 12.070231172839506, 7.726680978509374, 8.34264577877541, 9.674629172382259, 10.125), # 37
(10.265772593504476, 10.773921124828533, 10.311065157750342, 12.315926183127573, 11.19586821604269, 6.25, 8.09014350843218, 8.85254938271605, 12.063009567901235, 7.707670873342479, 8.33826892380596, 9.664753543667125, 10.125), # 38
(10.272944593661986, 10.746796296296296, 10.303296296296297, 12.308618055555556, 11.199451116789703, 6.25, 8.071004357298476, 8.806333333333333, 12.055425000000001, 7.687823456790124, 8.333648148148148, 9.654395061728394, 10.125), # 39
(10.279880423902163, 10.718535665294924, 10.295181755829903, 12.300958076131687, 11.202913038677519, 6.25, 8.05107217784233, 8.758487654320989, 12.047492283950618, 7.667192501143119, 8.328789437585733, 9.643575674439873, 10.125), # 40
(10.286579288398128, 10.689205075445816, 10.286737997256516, 12.29295859053498, 11.206253798155702, 6.25, 8.030394416202695, 8.709160493827161, 12.0392262345679, 7.645831778692272, 8.323698777902482, 9.632317329675354, 10.125), # 41
(10.293040391323, 10.658870370370371, 10.277981481481483, 12.284631944444445, 11.209473211673808, 6.25, 8.009018518518518, 8.6585, 12.030641666666668, 7.623795061728395, 8.318382154882155, 9.620641975308642, 10.125), # 42
(10.299262936849892, 10.627597393689987, 10.268928669410151, 12.275990483539095, 11.212571095681403, 6.25, 7.98699193092875, 8.606654320987655, 12.021753395061728, 7.601136122542296, 8.312845554308517, 9.608571559213535, 10.125), # 43
(10.305246129151927, 10.595451989026063, 10.259596021947875, 12.267046553497943, 11.215547266628045, 6.25, 7.964362099572339, 8.553771604938273, 12.0125762345679, 7.577908733424783, 8.307094961965332, 9.596128029263832, 10.125), # 44
(10.310989172402216, 10.5625, 10.25, 12.2578125, 11.218401540963296, 6.25, 7.9411764705882355, 8.5, 12.003124999999999, 7.554166666666667, 8.301136363636363, 9.583333333333332, 10.125), # 45
(10.31649127077388, 10.528807270233196, 10.240157064471878, 12.24830066872428, 11.221133735136716, 6.25, 7.917482490115388, 8.445487654320988, 11.993414506172838, 7.529963694558756, 8.294975745105374, 9.57020941929584, 10.125), # 46
(10.321751628440035, 10.49443964334705, 10.230083676268862, 12.238523405349794, 11.223743665597867, 6.25, 7.893327604292747, 8.390382716049382, 11.983459567901235, 7.505353589391861, 8.288619092156129, 9.55677823502515, 10.125), # 47
(10.326769449573796, 10.459462962962963, 10.219796296296296, 12.228493055555557, 11.22623114879631, 6.25, 7.868759259259259, 8.334833333333334, 11.973275000000001, 7.4803901234567896, 8.28207239057239, 9.543061728395061, 10.125), # 48
(10.331543938348286, 10.42394307270233, 10.209311385459534, 12.218221965020577, 11.228596001181607, 6.25, 7.8438249011538765, 8.278987654320987, 11.96287561728395, 7.455127069044353, 8.275341626137923, 9.529081847279379, 10.125), # 49
(10.336074298936616, 10.387945816186559, 10.198645404663925, 12.207722479423868, 11.230838039203315, 6.25, 7.81857197611555, 8.222993827160494, 11.9522762345679, 7.429618198445358, 8.268432784636488, 9.514860539551899, 10.125), # 50
(10.34035973551191, 10.351537037037037, 10.187814814814814, 12.197006944444444, 11.232957079310998, 6.25, 7.793047930283224, 8.167, 11.941491666666668, 7.403917283950617, 8.261351851851853, 9.50041975308642, 10.125), # 51
(10.344399452247279, 10.314782578875173, 10.176836076817558, 12.186087705761317, 11.234952937954214, 6.25, 7.767300209795852, 8.111154320987653, 11.930536728395062, 7.3780780978509375, 8.254104813567777, 9.485781435756746, 10.125), # 52
(10.348192653315843, 10.27774828532236, 10.165725651577505, 12.174977109053497, 11.23682543158253, 6.25, 7.741376260792383, 8.055604938271605, 11.919426234567903, 7.3521544124371285, 8.246697655568026, 9.470967535436671, 10.125), # 53
(10.351738542890716, 10.2405, 10.154499999999999, 12.1636875, 11.238574376645502, 6.25, 7.715323529411765, 8.000499999999999, 11.908175, 7.3262, 8.239136363636362, 9.456, 10.125), # 54
(10.355036325145022, 10.203103566529492, 10.143175582990398, 12.152231224279834, 11.24019958959269, 6.25, 7.689189461792948, 7.945987654320987, 11.896797839506172, 7.300268632830361, 8.231426923556553, 9.44090077732053, 10.125), # 55
(10.358085204251871, 10.165624828532236, 10.131768861454047, 12.140620627572016, 11.241700886873659, 6.25, 7.663021504074881, 7.892216049382716, 11.885309567901235, 7.274414083219022, 8.223575321112358, 9.425691815272062, 10.125), # 56
(10.360884384384383, 10.12812962962963, 10.120296296296297, 12.128868055555555, 11.243078084937967, 6.25, 7.636867102396514, 7.839333333333334, 11.873725, 7.24869012345679, 8.215587542087542, 9.410395061728394, 10.125), # 57
(10.36343306971568, 10.090683813443073, 10.108774348422497, 12.116985853909464, 11.244331000235174, 6.25, 7.610773702896797, 7.787487654320987, 11.862058950617284, 7.223150525834477, 8.20746957226587, 9.395032464563329, 10.125), # 58
(10.36573046441887, 10.053353223593964, 10.097219478737998, 12.104986368312757, 11.245459449214845, 6.25, 7.584788751714678, 7.736827160493827, 11.850326234567902, 7.197849062642891, 8.1992273974311, 9.379625971650663, 10.125), # 59
(10.367775772667077, 10.016203703703704, 10.085648148148147, 12.092881944444445, 11.246463248326537, 6.25, 7.558959694989106, 7.6875, 11.838541666666668, 7.172839506172839, 8.190867003367003, 9.364197530864198, 10.125), # 60
(10.369568198633415, 9.97930109739369, 10.0740768175583, 12.080684927983539, 11.247342214019811, 6.25, 7.533333978859033, 7.639654320987654, 11.826720061728395, 7.148175628715135, 8.182394375857339, 9.348769090077733, 10.125), # 61
(10.371106946491004, 9.942711248285322, 10.062521947873801, 12.068407664609055, 11.248096162744234, 6.25, 7.507959049463406, 7.5934382716049384, 11.814876234567901, 7.123911202560586, 8.17381550068587, 9.333362597165067, 10.125), # 62
(10.37239122041296, 9.9065, 10.051, 12.056062500000001, 11.248724910949356, 6.25, 7.482882352941176, 7.549, 11.803025, 7.100099999999999, 8.165136363636364, 9.318, 10.125), # 63
(10.373420224572397, 9.870733196159122, 10.039527434842249, 12.043661779835391, 11.249228275084748, 6.25, 7.458151335431292, 7.506487654320988, 11.791181172839506, 7.076795793324188, 8.156362950492579, 9.302703246456334, 10.125), # 64
(10.374193163142438, 9.835476680384087, 10.0281207133059, 12.031217849794238, 11.249606071599967, 6.25, 7.433813443072703, 7.466049382716049, 11.779359567901235, 7.054052354823959, 8.147501247038285, 9.287494284407863, 10.125), # 65
(10.374709240296196, 9.800796296296298, 10.016796296296297, 12.018743055555555, 11.249858116944573, 6.25, 7.409916122004357, 7.427833333333334, 11.767575, 7.031923456790123, 8.138557239057238, 9.272395061728396, 10.125), # 66
(10.374967660206792, 9.766757887517146, 10.005570644718793, 12.006249742798353, 11.24998422756813, 6.25, 7.386506818365206, 7.391987654320989, 11.755842283950617, 7.010462871513489, 8.12953691233321, 9.257427526291723, 10.125), # 67
(10.374791614480825, 9.733248639320323, 9.994405949931412, 11.993641740472357, 11.249877955297345, 6.2498840115836, 7.363515194829646, 7.358343850022862, 11.744087848651121, 6.989620441647166, 8.120285988540376, 9.242530021899743, 10.124875150034294), # 68
(10.373141706924315, 9.699245519713262, 9.982988425925925, 11.980283514492752, 11.248910675381262, 6.248967078189301, 7.340268181346613, 7.325098765432099, 11.731797839506173, 6.968806390704429, 8.10986283891547, 9.227218973359324, 10.12388599537037), # 69
(10.369885787558895, 9.664592459843355, 9.971268432784635, 11.966087124261943, 11.246999314128942, 6.247161255906112, 7.31666013456137, 7.291952446273434, 11.718902892089622, 6.947919524462734, 8.09814888652608, 9.211422761292809, 10.121932334533609), # 70
(10.365069660642929, 9.62931016859153, 9.959250085733881, 11.951073503757382, 11.244168078754136, 6.244495808565767, 7.292701659538988, 7.258915866483768, 11.705422210791038, 6.926960359342639, 8.085187370783862, 9.195152937212715, 10.119039887688615), # 71
(10.358739130434783, 9.593419354838709, 9.946937499999999, 11.935263586956522, 11.240441176470588, 6.2410000000000005, 7.268403361344538, 7.226, 11.691375, 6.905929411764705, 8.07102153110048, 9.17842105263158, 10.115234375), # 72
(10.35094000119282, 9.556940727465816, 9.934334790809327, 11.918678307836823, 11.23584281449205, 6.236703094040542, 7.243775845043092, 7.193215820759031, 11.676780464106082, 6.884827198149493, 8.055694606887588, 9.161238659061919, 10.110541516632374), # 73
(10.341718077175404, 9.519894995353777, 9.921446073388202, 11.901338600375738, 11.230397200032275, 6.231634354519128, 7.218829715699722, 7.160574302697759, 11.661657807498857, 6.863654234917561, 8.039249837556856, 9.143617308016267, 10.104987032750344), # 74
(10.331119162640901, 9.482302867383511, 9.908275462962962, 11.883265398550725, 11.224128540305012, 6.22582304526749, 7.1935755783795, 7.128086419753086, 11.6460262345679, 6.84241103848947, 8.021730462519935, 9.125568551007147, 10.098596643518519), # 75
(10.319189061847677, 9.44418505243595, 9.894827074759945, 11.864479636339238, 11.217061042524005, 6.219298430117361, 7.168024038147495, 7.095763145861912, 11.629904949702789, 6.821098125285779, 8.003179721188491, 9.107103939547082, 10.091396069101508), # 76
(10.305973579054093, 9.40556225939201, 9.881105024005485, 11.845002247718732, 11.209218913903008, 6.212089772900472, 7.142185700068779, 7.063615454961135, 11.613313157293096, 6.7997160117270505, 7.983640852974187, 9.088235025148606, 10.083411029663925), # 77
(10.291518518518519, 9.366455197132618, 9.867113425925925, 11.824854166666666, 11.200626361655774, 6.204226337448559, 7.116071169208425, 7.031654320987655, 11.596270061728394, 6.7782652142338415, 7.9631570972886765, 9.068973359324238, 10.074667245370371), # 78
(10.275869684499314, 9.326884574538697, 9.8528563957476, 11.804056327160493, 11.191307592996047, 6.195737387593354, 7.089691050631501, 6.9998907178783725, 11.578794867398262, 6.756746249226714, 7.941771693543622, 9.049330493586504, 10.065190436385459), # 79
(10.259072881254847, 9.286871100491172, 9.838338048696844, 11.782629663177671, 11.181286815137579, 6.18665218716659, 7.063055949403081, 6.968335619570188, 11.560906778692273, 6.7351596331262265, 7.919527881150688, 9.029317979447935, 10.0550063228738), # 80
(10.241173913043479, 9.246435483870968, 9.8235625, 11.760595108695654, 11.170588235294117, 6.177, 7.036176470588235, 6.937, 11.542625, 6.713505882352941, 7.8964688995215315, 9.008947368421053, 10.044140624999999), # 81
(10.222218584123576, 9.205598433559008, 9.808533864883403, 11.737973597691894, 11.159236060679415, 6.166810089925317, 7.009063219252036, 6.90589483310471, 11.52396873571102, 6.691785513327416, 7.872637988067813, 8.988230212018387, 10.03261906292867), # 82
(10.202252698753504, 9.164380658436214, 9.793256258573388, 11.714786064143853, 11.147254498507221, 6.156111720774272, 6.981726800459553, 6.875031092821216, 11.504957190214906, 6.669999042470211, 7.848078386201194, 8.967178061752461, 10.020467356824417), # 83
(10.181322061191626, 9.122802867383513, 9.777733796296296, 11.691053442028986, 11.134667755991286, 6.144934156378601, 6.954177819275858, 6.844419753086419, 11.485609567901234, 6.648146986201889, 7.822833333333333, 8.945802469135803, 10.007711226851852), # 84
(10.159472475696308, 9.080885769281826, 9.761970593278463, 11.666796665324746, 11.121500040345357, 6.133306660570035, 6.926426880766024, 6.814071787837221, 11.465945073159578, 6.626229860943005, 7.796946068875894, 8.924114985680937, 9.994376393175584), # 85
(10.136749746525913, 9.03865007301208, 9.745970764746229, 11.64203666800859, 11.107775558783183, 6.121258497180309, 6.89848458999512, 6.783998171010516, 11.445982910379517, 6.604248183114124, 7.770459832240534, 8.902127162900394, 9.98048857596022), # 86
(10.113199677938807, 8.996116487455197, 9.729738425925925, 11.61679438405797, 11.09351851851852, 6.108818930041152, 6.870361552028219, 6.75420987654321, 11.425742283950619, 6.582202469135802, 7.743417862838915, 8.879850552306692, 9.96607349537037), # 87
(10.088868074193357, 8.9533057214921, 9.713277692043896, 11.59109074745035, 11.07875312676511, 6.096017222984301, 6.842068371930391, 6.724717878372199, 11.40524239826246, 6.560093235428601, 7.715863400082698, 8.857296705412365, 9.951156871570646), # 88
(10.063800739547922, 8.910238484003717, 9.696592678326475, 11.564946692163177, 11.063503590736707, 6.082882639841488, 6.813615654766708, 6.695533150434385, 11.384502457704619, 6.537920998413083, 7.687839683383544, 8.834477173729935, 9.935764424725651), # 89
(10.03804347826087, 8.866935483870968, 9.6796875, 11.538383152173914, 11.04779411764706, 6.069444444444445, 6.785014005602241, 6.666666666666666, 11.363541666666668, 6.515686274509804, 7.65938995215311, 8.81140350877193, 9.919921875), # 90
(10.011642094590563, 8.823417429974777, 9.662566272290809, 11.511421061460013, 11.031648914709915, 6.055731900624904, 6.756274029502062, 6.638129401005944, 11.342379229538182, 6.4933895801393255, 7.63055744580306, 8.788087262050874, 9.903654942558298), # 91
(9.984642392795372, 8.779705031196071, 9.64523311042524, 11.484081353998926, 11.015092189139029, 6.041774272214601, 6.727406331531242, 6.609932327389118, 11.321034350708734, 6.471031431722209, 7.601385403745053, 8.764539985079297, 9.886989347565157), # 92
(9.957090177133654, 8.735818996415771, 9.62769212962963, 11.456384963768118, 10.998148148148148, 6.027600823045267, 6.69842151675485, 6.582086419753087, 11.299526234567901, 6.448612345679011, 7.57191706539075, 8.74077322936972, 9.869950810185184), # 93
(9.92903125186378, 8.691780034514801, 9.609947445130317, 11.428352824745035, 10.98084099895102, 6.0132408169486355, 6.669330190237961, 6.554602652034752, 11.277874085505259, 6.426132838430297, 7.54219567015181, 8.716798546434674, 9.85256505058299), # 94
(9.90051142124411, 8.647608854374088, 9.592003172153635, 11.400005870907139, 10.963194948761398, 5.9987235177564395, 6.640142957045644, 6.527491998171011, 11.25609710791038, 6.403593426396621, 7.512264457439896, 8.69262748778668, 9.834857788923182), # 95
(9.871576489533012, 8.603326164874554, 9.573863425925927, 11.371365036231884, 10.945234204793028, 5.984078189300411, 6.610870422242971, 6.500765432098766, 11.234214506172838, 6.3809946259985475, 7.482166666666667, 8.668271604938273, 9.816854745370371), # 96
(9.842272260988848, 8.558952674897121, 9.555532321673525, 11.342451254696725, 10.926982974259664, 5.969334095412284, 6.581523190895013, 6.474433927754916, 11.212245484682214, 6.358336953656634, 7.451945537243782, 8.64374244940197, 9.798581640089164), # 97
(9.812644539869984, 8.514509093322713, 9.53701397462277, 11.31328546027912, 10.908465464375052, 5.954520499923793, 6.552111868066842, 6.44850845907636, 11.190209247828074, 6.335620925791441, 7.421644308582906, 8.619051572690298, 9.78006419324417), # 98
(9.782739130434782, 8.470016129032258, 9.5183125, 11.283888586956522, 10.889705882352942, 5.939666666666667, 6.52264705882353, 6.423, 11.168125, 6.312847058823529, 7.391306220095694, 8.59421052631579, 9.761328125), # 99
(9.752601836941611, 8.425494490906676, 9.49943201303155, 11.254281568706388, 10.870728435407084, 5.924801859472641, 6.493139368230145, 6.3979195244627345, 11.146011945587563, 6.290015869173458, 7.36097451119381, 8.569230861790967, 9.742399155521262), # 100
(9.722278463648834, 8.380964887826895, 9.480376628943759, 11.224485339506174, 10.85155733075123, 5.909955342173449, 6.463599401351762, 6.3732780064014625, 11.123889288980338, 6.267127873261788, 7.330692421288912, 8.544124130628353, 9.723303004972564), # 101
(9.691814814814816, 8.336448028673836, 9.461150462962962, 11.194520833333334, 10.832216775599129, 5.895156378600824, 6.43403776325345, 6.349086419753086, 11.1017762345679, 6.244183587509078, 7.300503189792663, 8.518901884340481, 9.704065393518519), # 102
(9.661256694697919, 8.291964622328422, 9.4417576303155, 11.164408984165325, 10.812730977164529, 5.880434232586496, 6.40446505900028, 6.325355738454504, 11.079691986739826, 6.221183528335889, 7.270450056116723, 8.493575674439873, 9.68471204132373), # 103
(9.63064990755651, 8.247535377671579, 9.422202246227709, 11.134170725979603, 10.79312414266118, 5.865818167962201, 6.374891893657326, 6.302096936442616, 11.057655749885688, 6.19812821216278, 7.24057625967275, 8.468157052439054, 9.665268668552812), # 104
(9.600040257648953, 8.203181003584229, 9.402488425925926, 11.103826992753623, 10.773420479302832, 5.851337448559671, 6.345328872289658, 6.279320987654321, 11.035686728395062, 6.175018155410313, 7.210925039872408, 8.442657569850553, 9.64576099537037), # 105
(9.569473549233614, 8.158922208947299, 9.382620284636488, 11.073398718464842, 10.753644194303236, 5.837021338210638, 6.315786599962345, 6.25703886602652, 11.01380412665752, 6.151853874499045, 7.181539636127355, 8.417088778186894, 9.626214741941014), # 106
(9.538995586568856, 8.11477970264171, 9.362601937585735, 11.042906837090714, 10.733819494876139, 5.822899100746838, 6.286275681740461, 6.235261545496114, 10.992027149062643, 6.128635885849539, 7.152463287849252, 8.391462228960604, 9.606655628429355), # 107
(9.508652173913044, 8.070774193548388, 9.3424375, 11.012372282608696, 10.713970588235293, 5.809, 6.256806722689075, 6.214, 10.970375, 6.105364705882353, 7.1237392344497605, 8.365789473684211, 9.587109375), # 108
(9.478489115524543, 8.026926390548255, 9.322131087105625, 10.98181598899624, 10.69412168159445, 5.795353299801859, 6.227390327873262, 6.193265203475081, 10.948866883859168, 6.082040851018047, 7.09541071534054, 8.340082063870238, 9.567601701817559), # 109
(9.448552215661715, 7.983257002522237, 9.301686814128946, 10.951258890230811, 10.674296982167354, 5.7819882639841484, 6.198037102358089, 6.173068129858253, 10.92752200502972, 6.058664837677183, 7.06752096993325, 8.314351551031214, 9.54815832904664), # 110
(9.41888727858293, 7.9397867383512555, 9.281108796296298, 10.920721920289855, 10.654520697167756, 5.768934156378601, 6.168757651208631, 6.153419753086419, 10.906359567901236, 6.035237182280319, 7.040113237639553, 8.288609486679663, 9.528804976851852), # 111
(9.38954010854655, 7.896536306916234, 9.26040114883402, 10.890226013150832, 10.634817033809409, 5.756220240816949, 6.139562579489958, 6.134331047096479, 10.885398776863282, 6.011758401248016, 7.013230757871109, 8.26286742232811, 9.509567365397805), # 112
(9.360504223703044, 7.853598618785952, 9.239617828252069, 10.85983388249204, 10.615175680173705, 5.7438697692145135, 6.1105259636567695, 6.115852568780606, 10.86471281125862, 5.988304736612729, 6.9869239061528665, 8.237192936504428, 9.490443900843221), # 113
(9.331480897900065, 7.811397183525536, 9.219045675021619, 10.829789421277336, 10.595393354566326, 5.731854608529901, 6.082018208410579, 6.09821125950512, 10.84461903571306, 5.965315167912783, 6.961244337113197, 8.211912172112974, 9.471275414160035), # 114
(9.302384903003995, 7.769947198683046, 9.198696932707318, 10.800084505181779, 10.5754076778886, 5.7201435124987645, 6.054059650191562, 6.081402654278709, 10.82512497866879, 5.942825327988077, 6.936154511427094, 8.187037582558851, 9.452006631660376), # 115
(9.273179873237634, 7.729188281291702, 9.178532189983873, 10.770666150266404, 10.555188526383779, 5.708708877287098, 6.026604817527893, 6.065380312898993, 10.80618133922783, 5.920793358449547, 6.911605931271481, 8.162523197487346, 9.43260725975589), # 116
(9.243829442823772, 7.689060048384721, 9.158512035525986, 10.741481372592244, 10.53470577629511, 5.6975230990608905, 5.9996082389477525, 6.050097795163585, 10.787738816492203, 5.899177400908129, 6.887550098823283, 8.13832304654375, 9.413047004858225), # 117
(9.214297245985211, 7.649502116995324, 9.138597058008367, 10.712477188220333, 10.513929303865842, 5.686558573986138, 5.973024442979315, 6.0355086608700965, 10.769748109563935, 5.877935596974759, 6.863938516259424, 8.11439115937335, 9.393295573379024), # 118
(9.184546916944742, 7.610454104156729, 9.118747846105723, 10.683600613211706, 10.492828985339221, 5.675787698228833, 5.946807958150756, 6.021566469816145, 10.752159917545043, 5.857026088260372, 6.840722685756828, 8.090681565621434, 9.373322671729932), # 119
(9.154542089925162, 7.571855626902158, 9.098924988492762, 10.654798663627394, 10.471374696958497, 5.665182867954965, 5.920913312990253, 6.008224781799343, 10.734924939537558, 5.836407016375905, 6.817854109492416, 8.067148294933297, 9.353098006322597), # 120
(9.124246399149268, 7.533646302264829, 9.079089073844187, 10.626018355528434, 10.449536314966918, 5.6547164793305305, 5.89529503602598, 5.995437156617307, 10.717993874643499, 5.816036522932296, 6.795284289643116, 8.043745376954222, 9.33259128356866), # 121
(9.093623478839854, 7.495765747277961, 9.059200690834711, 10.597206704975855, 10.427283715607734, 5.644360928521519, 5.869907655786117, 5.983157154067649, 10.70131742196489, 5.795872749540477, 6.772964728385851, 8.0204268413295, 9.31177220987977), # 122
(9.062636963219719, 7.458153578974774, 9.039220428139036, 10.568310728030694, 10.40458677512419, 5.634088611693925, 5.844705700798839, 5.971338333947983, 10.684846280603754, 5.775873837811387, 6.750846927897544, 7.997146717704421, 9.290610491667572), # 123
(9.031250486511654, 7.420749414388487, 9.01910887443187, 10.539277440753986, 10.381415369759537, 5.623871925013739, 5.819643699592319, 5.959934256055926, 10.668531149662115, 5.755997929355961, 6.728882390355119, 7.973859035724275, 9.269075835343711), # 124
(8.999427682938459, 7.38349287055232, 8.998826618387923, 10.51005385920676, 10.357739375757022, 5.613683264646956, 5.794676180694739, 5.948898480189091, 10.652322728241993, 5.736203165785134, 6.707022617935501, 7.950517825034348, 9.247137947319828), # 125
(8.967132186722928, 7.346323564499494, 8.978334248681898, 10.480586999450054, 10.333528669359893, 5.603495026759568, 5.76975767263427, 5.938184566145092, 10.636171715445418, 5.7164476887098425, 6.685219112815613, 7.927077115279934, 9.224766534007578), # 126
(8.93432763208786, 7.309181113263224, 8.957592353988504, 10.450823877544899, 10.308753126811398, 5.593279607517565, 5.744842703939094, 5.927746073721545, 10.620028810374407, 5.696689639741024, 6.6634233771723785, 7.903490936106316, 9.201931301818599), # 127
(8.900977653256046, 7.272005133876735, 8.93656152298245, 10.420711509552332, 10.28338262435479, 5.583009403086944, 5.719885803137382, 5.917536562716062, 10.603844712130984, 5.6768871604896125, 6.641586913182724, 7.879713317158788, 9.178601957164537), # 128
(8.867045884450281, 7.234735243373241, 8.91520234433844, 10.390196911533382, 10.257387038233311, 5.572656809633695, 5.694841498757313, 5.90750959292626, 10.587570119817174, 5.656998392566545, 6.619661223023571, 7.855698288082636, 9.154748206457038), # 129
(8.832495959893366, 7.197311058785966, 8.893475406731179, 10.359227099549086, 10.230736244690213, 5.562194223323808, 5.669664319327063, 5.89761872414975, 10.571155732535, 5.636981477582757, 6.5975978088718445, 7.831399878523152, 9.130339756107748), # 130
(8.797291513808094, 7.159672197148127, 8.87134129883538, 10.327749089660475, 10.203400119968745, 5.55159404032328, 5.644308793374809, 5.88781751618415, 10.554552249386486, 5.616794557149185, 6.575348172904468, 7.806772118125624, 9.105346312528312), # 131
(8.76139618041726, 7.121758275492944, 8.848760609325746, 10.295709897928587, 10.175348540312154, 5.540828656798102, 5.618729449428725, 5.878059528827073, 10.537710369473654, 5.596395772876765, 6.552863817298364, 7.781769036535342, 9.079737582130376), # 132
(8.724773593943663, 7.083508910853635, 8.825693926876983, 10.263056540414452, 10.146551381963686, 5.529870468914266, 5.592880816016989, 5.868298321876132, 10.520580791898526, 5.575743266376432, 6.53009624423046, 7.756344663397592, 9.053483271325586), # 133
(8.687387388610095, 7.044863720263423, 8.802101840163804, 10.229736033179103, 10.116978521166592, 5.518691872837765, 5.566717421667779, 5.858487455128944, 10.503114215763128, 5.5547951792591235, 6.506996955877678, 7.730453028357666, 9.026553086525583), # 134
(8.649201198639354, 7.005762320755524, 8.777944937860909, 10.195695392283579, 10.08659983416412, 5.507265264734592, 5.540193794909268, 5.84858048838312, 10.48526134016948, 5.533509653135776, 6.483517454416942, 7.704048161060852, 8.99891673414202), # 135
(8.610178658254235, 6.966144329363159, 8.753183808643008, 10.160881633788906, 10.055385197199517, 5.495563040770739, 5.513264464269635, 5.838530981436277, 10.466972864219606, 5.511844829617322, 6.459609242025177, 7.677084091152441, 8.970543920586536), # 136
(8.570283401677534, 6.925949363119547, 8.72777904118481, 10.125241773756125, 10.023304486516034, 5.483557597112198, 5.485883958277055, 5.828292494086029, 10.448199487015533, 5.4897588503147015, 6.435223820879306, 7.649514848277719, 8.941404352270776), # 137
(8.529479063132047, 6.885117039057908, 8.701691224161017, 10.088722828246263, 9.990327578356919, 5.471221329924964, 5.458006805459704, 5.81781858612999, 10.428891907659281, 5.4672098568388465, 6.410312693156252, 7.621294462081978, 8.91146773560639), # 138
(8.487729276840568, 6.843586974211461, 8.67488094624634, 10.051271813320358, 9.956424348965415, 5.458526635375026, 5.429587534345759, 5.807062817365774, 10.409000825252871, 5.444155990800697, 6.38482736103294, 7.592376962210506, 8.880703777005019), # 139
(8.444997677025897, 6.801298785613425, 8.647308796115487, 10.012835745039444, 9.92156467458478, 5.445445909628379, 5.400580673463397, 5.795978747590996, 10.388476938898332, 5.420555393811186, 6.358719326686294, 7.562716378308592, 8.849082182878314), # 140
(8.40124789791083, 6.758192090297021, 8.61893536244316, 9.973361639464553, 9.885718431458253, 5.431951548851015, 5.370940751340795, 5.78451993660327, 10.36727094769768, 5.396366207481251, 6.331940092293238, 7.532266740021525, 8.816572659637913), # 141
(8.356443573718156, 6.714206505295466, 8.58972123390407, 9.93279651265672, 9.848855495829087, 5.418015949208927, 5.340622296506126, 5.772639944200211, 10.345333550752942, 5.371546573421828, 6.304441160030697, 7.500982076994594, 8.783144913695466), # 142
(8.310548338670674, 6.669281647641981, 8.559626999172925, 9.891087380676975, 9.810945743940529, 5.403611506868106, 5.3095798374875685, 5.760292330179432, 10.322615447166147, 5.3460546332438525, 6.276174032075593, 7.4688164188730894, 8.748768651462617), # 143
(8.263525826991184, 6.623357134369786, 8.528613246924428, 9.848181259586356, 9.771959052035829, 5.388710617994547, 5.277767902813299, 5.747430654338549, 10.29906733603931, 5.31984852855826, 6.247090210604851, 7.435723795302299, 8.713413579351014), # 144
(8.215339672902477, 6.576372582512099, 8.496640565833289, 9.804025165445895, 9.731865296358233, 5.3732856787542405, 5.245141021011493, 5.734008476475176, 10.274639916474454, 5.292886400975988, 6.217141197795395, 7.401658235927513, 8.6770494037723), # 145
(8.16595351062735, 6.528267609102142, 8.463669544574216, 9.758566114316626, 9.690634353150992, 5.35730908531318, 5.21165372061033, 5.719979356386927, 10.249283887573606, 5.2651263921079705, 6.186278495824149, 7.3665737703940195, 8.639645831138118), # 146
(8.1153309743886, 6.47898183117313, 8.42966077182191, 9.71175112225958, 9.648236098657351, 5.340753233837358, 5.177260530137981, 5.705296853871415, 10.22294994843879, 5.236526643565146, 6.154453606868036, 7.3304244283471105, 8.601172567860118), # 147
(8.063435698409021, 6.428454865758288, 8.394574836251083, 9.663527205335797, 9.604640409120561, 5.323590520492767, 5.1419159781226265, 5.689914528726257, 10.195588798172029, 5.207045296958447, 6.1216180331039824, 7.29316423943207, 8.561599320349941), # 148
(8.010231316911412, 6.37662632989083, 8.358372326536443, 9.613841379606303, 9.55981716078387, 5.3057933414453995, 5.105574593092441, 5.673785940749067, 10.167151135875338, 5.176640493898813, 6.08772327670891, 7.254747233294191, 8.520895795019237), # 149
(7.955681464118564, 6.323435840603979, 8.321013831352694, 9.562640661132138, 9.513736229890526, 5.287334092861249, 5.0681909035756005, 5.656864649737456, 10.137587660650752, 5.1452703759971765, 6.0527208398597425, 7.215127439578763, 8.479031698279647), # 150
(7.899749774253275, 6.268823014930954, 8.282459939374542, 9.50987206597433, 9.466367492683776, 5.268185170906305, 5.029719438100283, 5.639104215489043, 10.106849071600289, 5.112893084864478, 6.016562224733405, 7.174258887931072, 8.435976736542818), # 151
(7.842399881538343, 6.212727469904973, 8.242671239276701, 9.455482610193918, 9.417680825406869, 5.2483189717465635, 4.9901147251946645, 5.620458197801441, 10.07488606782597, 5.079466762111649, 5.979198933506821, 7.132095607996409, 8.391700616220398), # 152
(7.78359542019656, 6.155088822559256, 8.201608319733868, 9.399419309851933, 9.367646104303056, 5.2277078915480155, 4.949331293386919, 5.600880156472262, 10.041649348429823, 5.044949549349629, 5.940582468356916, 7.088591629420064, 8.346173043724027), # 153
(7.723300024450729, 6.095846689927024, 8.159231769420758, 9.34162918100941, 9.31623320561558, 5.206324326476654, 4.907323671205228, 5.580323651299123, 10.007089612513866, 5.009299588189353, 5.900664331460612, 7.043700981847325, 8.299363725465357), # 154
(7.6614773285236355, 6.034940689041495, 8.115502177012075, 9.282059239727378, 9.263412005587696, 5.184140672698471, 4.864046387177761, 5.558742242079636, 9.971157559180128, 4.972475020241754, 5.859396024994833, 6.997377694923482, 8.251242367856026), # 155
(7.598090966638081, 5.972310436935888, 8.070380131182526, 9.220656502066875, 9.209152380462648, 5.161129326379461, 4.8194539698327, 5.5360894886114185, 9.933803887530626, 4.934433987117773, 5.816729051136504, 6.949575798293822, 8.201778677307685), # 156
(7.533104573016862, 5.907895550643423, 8.023826220606818, 9.157367984088937, 9.153424206483685, 5.137262683685614, 4.773500947698219, 5.512318950692082, 9.894979296667389, 4.895134630428341, 5.772614912062549, 6.900249321603637, 8.150942360231976), # 157
(7.464680946405239, 5.840453120772258, 7.973591953902355, 9.089769581651243, 9.093681105870997, 5.11102447631711, 4.725106720927857, 5.485796952349372, 9.851662091599097, 4.8533659162911436, 5.7255957525389425, 6.847599564194339, 8.096485859415345), # 158
(7.382286766978402, 5.763065319599478, 7.906737818402988, 9.003977158788453, 9.015191309781628, 5.073689648007103, 4.668212763385716, 5.4472135327643825, 9.786427261222144, 4.802280994098745, 5.667416935618994, 6.781362523683108, 8.025427646920194), # 159
(7.284872094904309, 5.675096728540714, 7.821920957955888, 8.89857751040886, 8.916420131346795, 5.024341296047684, 4.602243748383784, 5.3955991895273465, 9.697425227228651, 4.741205651862893, 5.59725950860954, 6.700501948887847, 7.93642060889358), # 160
(7.17322205458596, 5.577120868080469, 7.720046971910309, 8.774572503756728, 8.798393124282113, 4.963577241570314, 4.527681446006876, 5.33160053310978, 9.585829766999018, 4.6706581931709374, 5.515741654599707, 6.605767468907571, 7.830374044819097), # 161
(7.048121770426357, 5.469711258703239, 7.602021459615496, 8.632964006076326, 8.662135842303204, 4.891995305706455, 4.445007626339809, 5.255864173983202, 9.452814657913637, 4.5911569216102315, 5.42348155667862, 6.497908712841293, 7.708197254180333), # 162
(6.9103563668284975, 5.353441420893524, 7.468750020420702, 8.474753884611934, 8.508673839125688, 4.810193309587572, 4.354704059467401, 5.169036722619125, 9.299553677352906, 4.503220140768125, 5.321097397935408, 6.3776753097880325, 7.570799536460879), # 163
(6.760710968195384, 5.228884875135821, 7.321138253675176, 8.300944006607818, 8.339032668465189, 4.718769074345129, 4.257252515474466, 5.071764789489069, 9.127220602697223, 4.407366154231968, 5.209207361459196, 6.245816888846803, 7.419090191144328), # 164
(6.599970698930017, 5.096615141914632, 7.160091758728169, 8.112536239308252, 8.154237884037324, 4.618320421110586, 4.153134764445822, 4.964694985064546, 8.93698921132698, 4.3041132655891134, 5.088429630339111, 6.10308307911662, 7.25397851771427), # 165
(6.428920683435397, 4.957205741714454, 6.9865161349289275, 7.910532449957501, 7.955315039557714, 4.509445171015408, 4.042832576466286, 4.848473919817077, 8.730033280622573, 4.193979778426912, 4.959382387664279, 5.950223509696501, 7.0763738156542955), # 166
(6.248346046114523, 4.811230195019787, 6.801316981626704, 7.695934505799843, 7.74328968874198, 4.392741145191058, 3.9268277216206746, 4.723748204218176, 8.5075265879644, 4.077483996332714, 4.822683816523827, 5.7879878096854585, 6.887185384447996), # 167
(6.059031911370395, 4.659262022315128, 6.605399898170748, 7.469744274079546, 7.519187385305742, 4.268806164768999, 3.805601969993804, 4.5911644487393595, 8.270642910732855, 3.955144222893872, 4.678952100006881, 5.617125608182511, 6.6873225235789615), # 168
(5.861763403606015, 4.501874744084979, 6.399670483910309, 7.232963622040883, 7.28403368296462, 4.138238050880695, 3.6796370916704917, 4.451369263852145, 8.020556026308338, 3.8274787616977366, 4.528805421202568, 5.438386534286672, 6.477694532530785), # 169
(5.657325647224384, 4.339641880813837, 6.185034338194635, 6.98659441692812, 7.038854135434233, 4.001634624657607, 3.549414856735553, 4.305009260028047, 7.7584397120712385, 3.6950059163316578, 4.372861963200016, 5.252520217096959, 6.259210710787055), # 170
(5.4465037666285, 4.173136952986201, 5.962397060372978, 6.731638525985535, 6.784674296430206, 3.8595937072311983, 3.4154170352738054, 4.152731047738583, 7.485467745401956, 3.5582439903829886, 4.211739909088348, 5.060276285712386, 6.032780357831365), # 171
(5.230082886221365, 4.002933481086569, 5.7326642497945866, 6.4690978164573965, 6.5225197196681535, 3.7127131197329337, 3.2781253973700655, 3.9951812374552707, 7.202813903680886, 3.41771128743908, 4.046057441956694, 4.862404369231971, 5.799312773147303), # 172
(5.00884813040598, 3.8296049855994423, 5.4967415058087115, 6.1999741555879755, 6.253415958863702, 3.5615906832942748, 3.1380217131091497, 3.8330064396496235, 6.911651964288422, 3.2739261110872815, 3.8764327448941778, 4.659654096754725, 5.5597172562184625), # 173
(4.783584623585344, 3.653724987009318, 5.2555344277646014, 5.9252694106215404, 5.978388567732466, 3.406824219046685, 2.9955877525758754, 3.6668532647931604, 6.613155704604964, 3.1274067649149466, 3.7034840009899277, 4.452775097379668, 5.314903106528433), # 174
(4.555077490162455, 3.4758670058006946, 5.009948615011508, 5.645985448802367, 5.698463099990069, 3.2490115481216284, 2.851305285855058, 3.497368323357396, 6.308498902010905, 2.9786715525094243, 3.5278293933330693, 4.242517000205814, 5.0657796235608075), # 175
(4.324111854540319, 3.296604562458073, 4.760889666898678, 5.363124137374725, 5.41466510935213, 3.0887504916505666, 2.705656083031515, 3.325198225813849, 5.998855333886642, 2.828238777458067, 3.35008710501273, 4.029629434332179, 4.813256106799174), # 176
(4.0914728411219325, 3.1165111774659513, 4.5092631827753635, 5.077687343582883, 5.128020149534273, 2.9266388707649633, 2.5591219141900625, 3.1509895826340326, 5.68539877761257, 2.6766267433482245, 3.1708753191180357, 3.8148620288577786, 4.5582418557271245), # 177
(3.8579455743102966, 2.9361603713088282, 4.255974761990814, 4.790676934671116, 4.8395537742521135, 2.7632745065962827, 2.4121845494155174, 2.9753890042894655, 5.3693030105690855, 2.52435375376725, 2.9908122187381125, 3.598964412881627, 4.301646169828252), # 178
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 179
)
passenger_arriving_acc = (
(2, 7, 5, 2, 8, 1, 2, 1, 2, 1, 0, 1, 0, 11, 2, 3, 4, 3, 0, 0, 0, 1, 0, 0, 0, 0), # 0
(12, 12, 9, 6, 15, 5, 4, 1, 3, 4, 0, 1, 0, 17, 10, 7, 7, 5, 3, 5, 3, 2, 1, 0, 0, 0), # 1
(20, 18, 20, 10, 17, 7, 8, 4, 6, 4, 1, 2, 0, 21, 14, 12, 10, 11, 4, 6, 3, 6, 1, 2, 0, 0), # 2
(26, 22, 23, 14, 20, 8, 9, 4, 8, 10, 2, 2, 0, 28, 19, 14, 16, 13, 7, 8, 7, 6, 1, 3, 0, 0), # 3
(31, 32, 25, 22, 20, 10, 15, 5, 9, 12, 2, 2, 0, 31, 24, 18, 18, 19, 9, 8, 7, 10, 5, 3, 1, 0), # 4
(42, 39, 30, 23, 26, 11, 18, 6, 11, 14, 3, 2, 0, 42, 30, 21, 23, 23, 13, 11, 10, 12, 7, 4, 1, 0), # 5
(51, 48, 32, 30, 37, 11, 19, 8, 14, 14, 6, 2, 0, 46, 34, 30, 24, 36, 16, 13, 12, 15, 9, 5, 2, 0), # 6
(58, 53, 37, 36, 45, 13, 23, 11, 16, 14, 6, 2, 0, 53, 39, 37, 27, 42, 21, 14, 14, 19, 12, 6, 2, 0), # 7
(67, 59, 45, 46, 52, 14, 24, 12, 19, 16, 8, 5, 0, 65, 42, 47, 32, 48, 27, 18, 14, 24, 13, 6, 2, 0), # 8
(74, 68, 57, 55, 56, 15, 29, 17, 22, 19, 10, 7, 0, 71, 54, 54, 38, 53, 34, 22, 17, 26, 15, 6, 2, 0), # 9
(83, 77, 67, 69, 62, 20, 35, 19, 24, 21, 10, 9, 0, 79, 60, 64, 43, 57, 36, 26, 18, 27, 19, 7, 3, 0), # 10
(89, 87, 72, 73, 66, 21, 39, 25, 29, 22, 11, 11, 0, 89, 70, 71, 47, 64, 38, 30, 22, 32, 20, 8, 5, 0), # 11
(100, 94, 80, 81, 69, 21, 43, 29, 36, 22, 11, 12, 0, 95, 79, 75, 53, 70, 43, 34, 26, 38, 23, 9, 7, 0), # 12
(107, 103, 85, 94, 76, 28, 45, 33, 37, 23, 13, 13, 0, 106, 85, 83, 58, 78, 46, 37, 32, 41, 24, 12, 8, 0), # 13
(109, 113, 93, 107, 82, 31, 47, 36, 39, 26, 14, 13, 0, 118, 94, 86, 65, 83, 49, 38, 33, 46, 25, 13, 9, 0), # 14
(115, 129, 102, 115, 90, 35, 49, 37, 42, 26, 17, 15, 0, 123, 99, 94, 69, 88, 56, 43, 37, 50, 30, 16, 9, 0), # 15
(125, 141, 108, 124, 98, 39, 55, 41, 44, 27, 18, 17, 0, 131, 112, 100, 75, 93, 61, 47, 41, 54, 33, 16, 9, 0), # 16
(131, 150, 117, 131, 104, 42, 57, 41, 49, 27, 19, 17, 0, 141, 118, 111, 81, 102, 65, 51, 44, 61, 36, 18, 11, 0), # 17
(140, 154, 126, 139, 112, 46, 58, 43, 55, 29, 19, 18, 0, 150, 128, 118, 84, 107, 72, 53, 46, 66, 39, 19, 11, 0), # 18
(148, 167, 141, 149, 120, 49, 61, 45, 55, 30, 20, 20, 0, 157, 141, 127, 87, 115, 76, 58, 46, 69, 40, 23, 13, 0), # 19
(162, 176, 152, 157, 128, 51, 63, 48, 60, 30, 22, 22, 0, 169, 155, 131, 92, 122, 81, 65, 47, 73, 44, 24, 13, 0), # 20
(170, 183, 157, 171, 135, 54, 66, 53, 64, 32, 25, 22, 0, 183, 161, 137, 97, 131, 88, 72, 50, 76, 46, 24, 16, 0), # 21
(186, 194, 169, 186, 139, 60, 68, 56, 69, 32, 28, 23, 0, 194, 175, 142, 104, 137, 97, 78, 51, 78, 50, 24, 16, 0), # 22
(210, 202, 177, 196, 143, 65, 71, 60, 71, 35, 31, 24, 0, 210, 183, 148, 106, 139, 100, 82, 53, 82, 54, 27, 16, 0), # 23
(221, 212, 183, 203, 151, 69, 74, 65, 75, 37, 33, 24, 0, 225, 193, 153, 115, 146, 105, 87, 57, 86, 56, 27, 17, 0), # 24
(230, 220, 191, 208, 159, 72, 77, 70, 78, 38, 35, 25, 0, 232, 202, 165, 120, 148, 106, 90, 61, 89, 58, 28, 17, 0), # 25
(237, 232, 203, 219, 168, 73, 85, 75, 81, 41, 37, 25, 0, 239, 210, 172, 125, 156, 118, 92, 65, 91, 59, 30, 17, 0), # 26
(243, 239, 209, 234, 175, 77, 90, 80, 83, 42, 37, 25, 0, 249, 218, 182, 131, 165, 122, 95, 67, 96, 62, 31, 17, 0), # 27
(254, 250, 220, 241, 182, 81, 92, 89, 88, 43, 38, 25, 0, 259, 229, 186, 136, 180, 130, 97, 68, 97, 63, 32, 18, 0), # 28
(261, 260, 227, 252, 191, 86, 96, 93, 91, 46, 39, 25, 0, 268, 237, 195, 145, 185, 131, 100, 69, 100, 68, 33, 20, 0), # 29
(276, 267, 235, 261, 197, 89, 100, 102, 95, 51, 40, 26, 0, 272, 249, 201, 153, 190, 136, 103, 72, 103, 70, 35, 20, 0), # 30
(285, 279, 247, 268, 206, 94, 102, 106, 98, 54, 43, 26, 0, 282, 254, 209, 158, 196, 143, 110, 74, 107, 74, 36, 20, 0), # 31
(290, 290, 252, 279, 211, 96, 106, 108, 104, 54, 45, 27, 0, 295, 262, 219, 164, 204, 146, 117, 77, 111, 75, 38, 20, 0), # 32
(301, 301, 260, 287, 222, 100, 116, 114, 108, 55, 47, 27, 0, 303, 274, 223, 174, 213, 151, 121, 80, 115, 82, 40, 20, 0), # 33
(307, 311, 269, 297, 227, 102, 120, 117, 114, 58, 47, 29, 0, 313, 284, 229, 180, 224, 156, 127, 86, 121, 85, 40, 22, 0), # 34
(314, 318, 277, 303, 234, 104, 126, 119, 120, 60, 50, 29, 0, 322, 292, 236, 183, 229, 163, 134, 91, 125, 86, 43, 23, 0), # 35
(317, 325, 286, 313, 239, 111, 128, 124, 126, 64, 50, 30, 0, 332, 301, 242, 189, 238, 165, 136, 91, 129, 88, 45, 24, 0), # 36
(327, 332, 294, 333, 242, 115, 130, 131, 129, 65, 52, 31, 0, 342, 307, 247, 193, 247, 170, 138, 97, 134, 91, 47, 25, 0), # 37
(335, 342, 303, 339, 249, 119, 135, 134, 131, 66, 52, 31, 0, 353, 313, 251, 199, 253, 180, 141, 99, 137, 96, 47, 27, 0), # 38
(344, 348, 306, 348, 255, 123, 137, 140, 137, 68, 54, 31, 0, 363, 325, 263, 203, 257, 180, 143, 101, 145, 98, 51, 27, 0), # 39
(355, 356, 313, 356, 260, 124, 140, 145, 144, 69, 54, 31, 0, 372, 331, 274, 211, 268, 182, 147, 105, 146, 100, 51, 28, 0), # 40
(364, 361, 328, 368, 267, 126, 143, 146, 145, 69, 54, 31, 0, 381, 346, 279, 217, 276, 184, 150, 110, 148, 101, 51, 30, 0), # 41
(378, 370, 332, 376, 274, 129, 145, 151, 148, 69, 56, 31, 0, 390, 349, 280, 220, 286, 188, 154, 113, 151, 104, 55, 30, 0), # 42
(388, 373, 338, 390, 284, 132, 147, 156, 152, 70, 58, 32, 0, 404, 356, 289, 228, 293, 196, 157, 118, 156, 106, 57, 30, 0), # 43
(399, 386, 347, 395, 293, 135, 153, 163, 158, 70, 58, 32, 0, 416, 364, 293, 230, 302, 203, 162, 119, 157, 109, 57, 30, 0), # 44
(411, 398, 351, 406, 302, 136, 154, 169, 159, 70, 59, 32, 0, 419, 370, 296, 238, 312, 207, 165, 121, 161, 115, 57, 30, 0), # 45
(417, 410, 356, 412, 307, 142, 157, 173, 162, 72, 60, 33, 0, 427, 375, 300, 241, 320, 211, 168, 122, 164, 122, 58, 31, 0), # 46
(431, 417, 366, 420, 316, 144, 158, 176, 168, 73, 61, 34, 0, 435, 381, 305, 246, 327, 216, 172, 124, 172, 124, 58, 33, 0), # 47
(442, 422, 374, 430, 322, 145, 161, 180, 173, 76, 61, 34, 0, 443, 387, 314, 252, 341, 223, 183, 129, 173, 126, 60, 34, 0), # 48
(451, 425, 383, 439, 328, 147, 166, 187, 178, 78, 62, 34, 0, 450, 395, 319, 258, 351, 227, 188, 133, 178, 128, 63, 40, 0), # 49
(460, 434, 394, 446, 337, 151, 168, 193, 186, 80, 62, 34, 0, 460, 402, 328, 266, 356, 231, 194, 136, 183, 129, 63, 42, 0), # 50
(469, 442, 406, 458, 348, 156, 173, 199, 191, 84, 62, 34, 0, 476, 408, 332, 274, 366, 237, 196, 139, 187, 133, 66, 42, 0), # 51
(473, 448, 415, 467, 355, 158, 176, 203, 194, 88, 65, 35, 0, 485, 418, 338, 276, 370, 245, 198, 139, 189, 137, 67, 42, 0), # 52
(482, 459, 421, 472, 362, 161, 180, 203, 198, 88, 67, 36, 0, 494, 430, 346, 280, 381, 249, 203, 139, 189, 138, 69, 43, 0), # 53
(487, 472, 429, 480, 368, 164, 182, 205, 202, 93, 69, 38, 0, 500, 435, 351, 287, 388, 254, 208, 139, 193, 140, 71, 43, 0), # 54
(503, 479, 437, 490, 375, 169, 183, 211, 208, 97, 71, 40, 0, 507, 446, 355, 292, 395, 258, 210, 140, 198, 144, 73, 43, 0), # 55
(514, 490, 445, 496, 380, 175, 185, 213, 212, 98, 74, 41, 0, 517, 456, 359, 299, 404, 263, 214, 142, 202, 145, 74, 44, 0), # 56
(527, 504, 448, 508, 387, 181, 189, 216, 214, 98, 74, 42, 0, 523, 469, 361, 309, 412, 269, 219, 147, 204, 149, 77, 45, 0), # 57
(530, 510, 454, 516, 399, 186, 192, 219, 219, 99, 75, 43, 0, 533, 471, 370, 314, 423, 275, 220, 148, 207, 151, 78, 46, 0), # 58
(536, 520, 460, 526, 406, 189, 195, 221, 223, 100, 76, 44, 0, 550, 480, 381, 320, 431, 279, 223, 148, 207, 154, 79, 46, 0), # 59
(546, 529, 472, 539, 410, 192, 201, 226, 230, 100, 76, 45, 0, 564, 487, 387, 326, 439, 284, 231, 152, 210, 156, 80, 48, 0), # 60
(559, 539, 477, 545, 416, 194, 202, 230, 234, 104, 78, 47, 0, 572, 491, 398, 331, 450, 287, 233, 153, 214, 158, 84, 48, 0), # 61
(568, 551, 482, 553, 426, 199, 205, 234, 238, 108, 82, 47, 0, 581, 500, 407, 335, 457, 294, 238, 154, 216, 159, 86, 49, 0), # 62
(585, 560, 489, 564, 432, 201, 209, 236, 241, 110, 83, 48, 0, 588, 509, 418, 340, 467, 299, 240, 155, 221, 164, 87, 51, 0), # 63
(593, 567, 496, 573, 441, 208, 215, 244, 244, 113, 86, 49, 0, 596, 515, 422, 344, 480, 308, 243, 156, 224, 169, 88, 51, 0), # 64
(609, 575, 506, 578, 450, 214, 222, 248, 246, 116, 89, 49, 0, 606, 521, 429, 348, 488, 312, 249, 157, 226, 172, 89, 52, 0), # 65
(617, 591, 519, 586, 455, 216, 225, 252, 251, 117, 90, 49, 0, 615, 537, 432, 353, 496, 316, 251, 159, 229, 178, 89, 52, 0), # 66
(624, 600, 528, 592, 462, 219, 227, 255, 255, 118, 92, 51, 0, 628, 546, 439, 359, 503, 316, 254, 161, 232, 182, 91, 53, 0), # 67
(635, 606, 537, 597, 467, 221, 228, 259, 257, 119, 92, 52, 0, 640, 553, 447, 365, 510, 322, 254, 162, 232, 183, 92, 54, 0), # 68
(641, 613, 545, 604, 472, 224, 230, 260, 261, 120, 92, 52, 0, 653, 566, 453, 370, 516, 324, 258, 165, 232, 185, 93, 55, 0), # 69
(650, 622, 553, 607, 481, 229, 230, 260, 263, 122, 93, 53, 0, 663, 573, 458, 374, 521, 327, 262, 167, 235, 186, 94, 55, 0), # 70
(655, 633, 563, 615, 488, 230, 233, 263, 268, 123, 95, 54, 0, 672, 581, 464, 377, 528, 330, 265, 172, 238, 190, 97, 55, 0), # 71
(663, 640, 568, 624, 494, 235, 240, 270, 272, 124, 95, 54, 0, 681, 586, 472, 378, 534, 335, 267, 176, 241, 192, 98, 56, 0), # 72
(674, 643, 576, 630, 507, 242, 247, 273, 274, 125, 97, 55, 0, 698, 598, 481, 379, 545, 342, 271, 178, 244, 193, 100, 56, 0), # 73
(682, 652, 583, 636, 517, 251, 251, 275, 278, 128, 97, 55, 0, 709, 604, 485, 382, 550, 345, 276, 179, 247, 193, 102, 57, 0), # 74
(698, 664, 586, 652, 528, 254, 252, 275, 284, 128, 97, 56, 0, 720, 612, 493, 389, 556, 350, 280, 182, 252, 199, 105, 58, 0), # 75
(712, 668, 595, 656, 533, 256, 259, 279, 288, 129, 98, 56, 0, 728, 621, 495, 392, 566, 356, 285, 182, 256, 201, 107, 59, 0), # 76
(718, 672, 599, 670, 541, 261, 264, 281, 292, 132, 100, 56, 0, 740, 629, 503, 400, 580, 358, 290, 186, 262, 204, 107, 60, 0), # 77
(721, 681, 608, 676, 549, 262, 267, 287, 300, 135, 101, 56, 0, 747, 634, 510, 404, 588, 360, 291, 186, 265, 207, 110, 62, 0), # 78
(726, 687, 619, 689, 553, 266, 268, 291, 305, 139, 104, 56, 0, 759, 640, 515, 412, 597, 370, 296, 191, 271, 209, 112, 65, 0), # 79
(741, 697, 625, 698, 565, 272, 270, 297, 307, 140, 105, 59, 0, 772, 649, 520, 418, 605, 374, 299, 196, 275, 211, 113, 66, 0), # 80
(754, 702, 631, 708, 576, 277, 274, 305, 310, 141, 105, 59, 0, 781, 655, 525, 421, 608, 379, 303, 197, 281, 212, 114, 66, 0), # 81
(768, 708, 637, 714, 585, 281, 279, 309, 313, 142, 105, 60, 0, 792, 664, 530, 427, 615, 383, 307, 199, 283, 214, 115, 66, 0), # 82
(776, 718, 647, 722, 591, 282, 284, 313, 319, 144, 107, 60, 0, 804, 676, 540, 431, 624, 388, 314, 201, 285, 220, 117, 66, 0), # 83
(788, 722, 654, 729, 599, 286, 288, 317, 320, 146, 107, 61, 0, 811, 688, 547, 431, 629, 393, 315, 202, 292, 224, 117, 66, 0), # 84
(798, 732, 656, 737, 605, 287, 290, 320, 322, 147, 111, 63, 0, 824, 696, 554, 435, 640, 401, 319, 205, 295, 225, 117, 66, 0), # 85
(812, 742, 660, 742, 612, 287, 294, 324, 330, 149, 111, 65, 0, 828, 708, 559, 439, 644, 402, 320, 209, 297, 228, 121, 66, 0), # 86
(820, 754, 666, 756, 619, 290, 299, 324, 334, 150, 113, 66, 0, 835, 718, 572, 444, 651, 410, 324, 212, 300, 231, 121, 66, 0), # 87
(833, 766, 676, 766, 625, 295, 302, 324, 339, 154, 113, 68, 0, 845, 724, 581, 446, 657, 414, 327, 216, 306, 233, 124, 67, 0), # 88
(836, 773, 686, 777, 632, 300, 307, 325, 341, 155, 113, 68, 0, 857, 733, 589, 448, 666, 421, 329, 220, 309, 235, 125, 68, 0), # 89
(851, 782, 694, 783, 638, 304, 311, 329, 347, 156, 115, 71, 0, 867, 736, 593, 453, 675, 428, 334, 222, 312, 237, 129, 69, 0), # 90
(860, 793, 702, 798, 647, 306, 314, 332, 351, 163, 116, 73, 0, 876, 752, 600, 462, 682, 431, 337, 228, 315, 239, 131, 69, 0), # 91
(873, 797, 707, 805, 650, 309, 317, 335, 357, 164, 121, 73, 0, 887, 755, 609, 463, 683, 433, 340, 228, 320, 240, 132, 70, 0), # 92
(882, 803, 711, 816, 656, 312, 319, 337, 360, 166, 122, 74, 0, 890, 759, 617, 467, 692, 435, 341, 231, 323, 245, 132, 70, 0), # 93
(897, 809, 719, 826, 663, 316, 321, 340, 365, 167, 122, 75, 0, 895, 764, 621, 472, 703, 440, 344, 233, 325, 246, 133, 70, 0), # 94
(906, 814, 726, 838, 670, 317, 327, 342, 369, 168, 125, 76, 0, 907, 770, 632, 474, 706, 443, 347, 236, 329, 247, 134, 70, 0), # 95
(913, 820, 732, 845, 676, 320, 331, 345, 373, 168, 126, 76, 0, 913, 779, 640, 477, 714, 448, 349, 239, 335, 248, 136, 72, 0), # 96
(922, 827, 742, 853, 689, 324, 334, 348, 378, 168, 130, 77, 0, 921, 789, 646, 482, 719, 456, 353, 242, 341, 252, 136, 73, 0), # 97
(932, 834, 753, 860, 692, 327, 338, 352, 382, 173, 132, 77, 0, 932, 797, 657, 484, 727, 459, 359, 244, 344, 256, 137, 73, 0), # 98
(947, 837, 759, 872, 699, 331, 341, 354, 387, 175, 134, 77, 0, 939, 806, 662, 488, 738, 460, 360, 245, 351, 259, 140, 73, 0), # 99
(955, 843, 766, 879, 708, 336, 345, 357, 389, 178, 135, 77, 0, 951, 813, 668, 494, 744, 463, 361, 247, 353, 260, 140, 73, 0), # 100
(962, 858, 773, 888, 718, 337, 353, 358, 393, 178, 137, 77, 0, 959, 819, 674, 499, 750, 463, 361, 247, 357, 265, 143, 74, 0), # 101
(975, 866, 782, 896, 721, 339, 357, 362, 398, 180, 141, 78, 0, 970, 824, 679, 505, 754, 469, 363, 247, 361, 266, 144, 75, 0), # 102
(986, 871, 791, 903, 725, 348, 363, 364, 399, 183, 142, 78, 0, 980, 829, 686, 508, 761, 475, 366, 248, 364, 270, 146, 75, 0), # 103
(996, 879, 799, 907, 734, 350, 369, 371, 406, 189, 142, 80, 0, 990, 837, 693, 517, 763, 478, 368, 252, 371, 271, 146, 75, 0), # 104
(1009, 882, 804, 913, 745, 353, 373, 372, 410, 189, 144, 80, 0, 1001, 845, 697, 518, 771, 485, 375, 256, 376, 273, 147, 75, 0), # 105
(1018, 889, 813, 924, 750, 354, 378, 376, 413, 189, 144, 81, 0, 1011, 855, 703, 523, 782, 486, 379, 259, 381, 273, 147, 77, 0), # 106
(1027, 894, 823, 930, 760, 360, 381, 381, 417, 191, 144, 81, 0, 1017, 865, 710, 530, 788, 489, 386, 261, 387, 277, 147, 78, 0), # 107
(1036, 901, 832, 938, 764, 364, 385, 384, 419, 193, 145, 82, 0, 1029, 875, 715, 538, 794, 494, 388, 262, 390, 280, 150, 81, 0), # 108
(1043, 911, 839, 943, 777, 366, 392, 385, 423, 195, 147, 83, 0, 1035, 884, 722, 540, 802, 496, 393, 264, 394, 285, 152, 82, 0), # 109
(1060, 916, 847, 951, 783, 368, 393, 387, 426, 196, 149, 85, 0, 1042, 895, 724, 543, 809, 500, 398, 267, 399, 285, 155, 83, 0), # 110
(1069, 924, 858, 961, 789, 375, 394, 389, 430, 198, 149, 86, 0, 1052, 902, 732, 547, 813, 503, 399, 270, 402, 290, 157, 84, 0), # 111
(1079, 927, 867, 969, 796, 378, 394, 391, 434, 200, 149, 89, 0, 1060, 910, 739, 552, 820, 509, 402, 270, 405, 293, 159, 84, 0), # 112
(1085, 936, 873, 976, 805, 381, 397, 393, 443, 200, 155, 89, 0, 1069, 920, 745, 559, 829, 510, 409, 272, 407, 295, 160, 84, 0), # 113
(1094, 940, 878, 983, 811, 382, 397, 397, 445, 201, 155, 89, 0, 1074, 926, 748, 566, 838, 513, 415, 272, 414, 297, 164, 86, 0), # 114
(1104, 948, 884, 987, 813, 385, 402, 399, 448, 203, 157, 90, 0, 1086, 936, 753, 568, 844, 514, 420, 275, 414, 299, 165, 87, 0), # 115
(1118, 952, 892, 990, 824, 392, 405, 402, 458, 204, 157, 90, 0, 1093, 944, 761, 572, 851, 515, 422, 275, 416, 302, 166, 87, 0), # 116
(1131, 957, 900, 992, 829, 399, 406, 406, 464, 205, 158, 92, 0, 1105, 958, 764, 574, 861, 520, 423, 277, 423, 308, 169, 88, 0), # 117
(1138, 963, 904, 1006, 833, 400, 412, 409, 466, 206, 158, 94, 0, 1116, 964, 770, 579, 870, 524, 426, 280, 427, 309, 169, 88, 0), # 118
(1149, 968, 914, 1014, 842, 402, 414, 411, 468, 206, 160, 94, 0, 1123, 970, 773, 584, 876, 528, 428, 282, 428, 312, 169, 88, 0), # 119
(1158, 973, 925, 1022, 849, 403, 420, 419, 468, 206, 161, 94, 0, 1135, 977, 779, 587, 881, 532, 430, 283, 434, 320, 172, 89, 0), # 120
(1163, 983, 933, 1029, 857, 409, 425, 422, 472, 206, 161, 94, 0, 1153, 984, 784, 590, 885, 536, 435, 287, 435, 323, 173, 89, 0), # 121
(1169, 987, 942, 1034, 869, 414, 427, 424, 478, 207, 161, 94, 0, 1170, 992, 791, 594, 893, 538, 438, 287, 437, 327, 174, 90, 0), # 122
(1172, 998, 949, 1041, 876, 421, 432, 425, 482, 209, 162, 95, 0, 1183, 996, 802, 598, 903, 540, 441, 288, 440, 333, 176, 91, 0), # 123
(1189, 1004, 957, 1054, 882, 426, 435, 428, 483, 209, 163, 95, 0, 1188, 1008, 809, 601, 915, 543, 445, 294, 445, 338, 177, 93, 0), # 124
(1198, 1010, 964, 1065, 885, 429, 437, 430, 487, 210, 164, 98, 0, 1193, 1016, 818, 603, 926, 547, 450, 296, 448, 339, 178, 93, 0), # 125
(1208, 1014, 970, 1073, 888, 432, 443, 433, 487, 213, 164, 99, 0, 1203, 1027, 820, 608, 937, 552, 455, 298, 452, 342, 179, 93, 0), # 126
(1217, 1023, 983, 1080, 891, 433, 444, 436, 490, 214, 165, 102, 0, 1213, 1032, 829, 613, 949, 554, 455, 299, 455, 342, 180, 93, 0), # 127
(1231, 1030, 992, 1093, 898, 436, 446, 441, 497, 215, 167, 104, 0, 1224, 1036, 835, 617, 953, 560, 461, 301, 456, 343, 180, 93, 0), # 128
(1240, 1035, 1003, 1102, 904, 439, 449, 442, 501, 215, 167, 104, 0, 1233, 1039, 842, 620, 955, 562, 464, 302, 459, 347, 183, 93, 0), # 129
(1248, 1037, 1009, 1110, 910, 440, 454, 443, 505, 215, 168, 105, 0, 1240, 1046, 845, 626, 965, 565, 468, 307, 461, 348, 184, 94, 0), # 130
(1258, 1045, 1019, 1120, 916, 446, 459, 447, 505, 218, 171, 105, 0, 1248, 1057, 848, 632, 969, 573, 472, 309, 464, 348, 184, 94, 0), # 131
(1266, 1052, 1024, 1133, 923, 450, 461, 454, 506, 218, 172, 105, 0, 1263, 1065, 858, 636, 980, 579, 472, 310, 466, 350, 185, 95, 0), # 132
(1282, 1059, 1028, 1146, 927, 452, 461, 455, 508, 219, 173, 105, 0, 1273, 1069, 862, 642, 986, 584, 476, 312, 470, 352, 187, 96, 0), # 133
(1291, 1064, 1032, 1153, 941, 459, 465, 460, 510, 222, 175, 107, 0, 1279, 1079, 869, 646, 993, 586, 479, 318, 474, 353, 187, 97, 0), # 134
(1302, 1074, 1041, 1159, 953, 463, 468, 463, 513, 222, 176, 108, 0, 1290, 1085, 871, 650, 1002, 593, 482, 318, 478, 356, 188, 97, 0), # 135
(1308, 1078, 1050, 1168, 960, 467, 472, 466, 516, 222, 178, 109, 0, 1304, 1095, 876, 653, 1012, 596, 486, 320, 482, 357, 191, 98, 0), # 136
(1320, 1089, 1054, 1176, 962, 470, 477, 468, 523, 222, 179, 109, 0, 1314, 1102, 883, 656, 1019, 601, 492, 322, 487, 359, 192, 99, 0), # 137
(1327, 1095, 1059, 1185, 967, 472, 479, 471, 528, 224, 180, 109, 0, 1321, 1109, 889, 660, 1023, 603, 495, 324, 489, 364, 193, 100, 0), # 138
(1334, 1099, 1070, 1195, 976, 475, 480, 475, 529, 226, 180, 110, 0, 1328, 1114, 898, 664, 1028, 607, 502, 330, 491, 368, 195, 102, 0), # 139
(1345, 1103, 1077, 1206, 986, 477, 483, 477, 533, 228, 182, 110, 0, 1335, 1125, 902, 666, 1032, 610, 503, 331, 493, 370, 197, 102, 0), # 140
(1351, 1104, 1082, 1217, 993, 478, 485, 478, 535, 232, 182, 110, 0, 1338, 1130, 906, 669, 1038, 615, 505, 333, 494, 372, 198, 102, 0), # 141
(1355, 1107, 1086, 1220, 1001, 480, 487, 482, 538, 232, 183, 111, 0, 1348, 1141, 910, 678, 1045, 616, 506, 336, 496, 374, 200, 104, 0), # 142
(1362, 1114, 1093, 1228, 1008, 484, 493, 482, 541, 232, 184, 112, 0, 1361, 1153, 912, 683, 1052, 618, 508, 336, 502, 376, 200, 104, 0), # 143
(1370, 1117, 1101, 1235, 1017, 490, 495, 482, 544, 233, 186, 112, 0, 1371, 1163, 918, 687, 1056, 622, 510, 340, 502, 379, 201, 105, 0), # 144
(1385, 1123, 1104, 1243, 1024, 494, 497, 486, 550, 236, 188, 112, 0, 1379, 1168, 925, 688, 1067, 624, 513, 341, 505, 382, 203, 105, 0), # 145
(1392, 1127, 1114, 1247, 1034, 495, 498, 486, 556, 237, 189, 113, 0, 1390, 1175, 928, 692, 1073, 627, 515, 344, 510, 384, 205, 105, 0), # 146
(1405, 1135, 1125, 1250, 1042, 502, 500, 487, 558, 237, 191, 113, 0, 1402, 1183, 932, 696, 1079, 628, 518, 345, 513, 385, 208, 106, 0), # 147
(1412, 1140, 1135, 1253, 1049, 504, 503, 493, 560, 238, 192, 114, 0, 1411, 1191, 937, 700, 1086, 631, 519, 345, 518, 387, 210, 106, 0), # 148
(1422, 1144, 1142, 1261, 1053, 507, 506, 496, 562, 239, 192, 114, 0, 1419, 1198, 946, 700, 1093, 634, 519, 346, 519, 391, 211, 106, 0), # 149
(1433, 1148, 1144, 1267, 1059, 512, 510, 497, 565, 240, 192, 114, 0, 1430, 1201, 951, 705, 1100, 638, 521, 348, 523, 392, 214, 106, 0), # 150
(1444, 1160, 1151, 1276, 1064, 516, 511, 503, 569, 240, 193, 115, 0, 1436, 1208, 957, 709, 1108, 642, 524, 352, 526, 396, 215, 106, 0), # 151
(1454, 1164, 1158, 1282, 1074, 520, 513, 505, 575, 241, 193, 115, 0, 1439, 1216, 966, 712, 1111, 644, 528, 356, 529, 399, 218, 107, 0), # 152
(1465, 1171, 1166, 1287, 1082, 524, 517, 507, 579, 242, 193, 115, 0, 1443, 1221, 971, 717, 1119, 648, 530, 358, 533, 402, 219, 108, 0), # 153
(1477, 1176, 1173, 1294, 1087, 529, 517, 509, 581, 244, 193, 115, 0, 1450, 1229, 975, 721, 1125, 650, 531, 363, 538, 404, 220, 111, 0), # 154
(1485, 1179, 1178, 1303, 1091, 531, 518, 514, 584, 244, 194, 115, 0, 1462, 1236, 978, 726, 1134, 654, 533, 364, 540, 408, 221, 112, 0), # 155
(1493, 1185, 1186, 1311, 1096, 536, 525, 515, 589, 244, 195, 115, 0, 1473, 1246, 980, 728, 1141, 660, 536, 366, 546, 410, 223, 112, 0), # 156
(1499, 1192, 1191, 1317, 1102, 538, 527, 516, 592, 245, 195, 115, 0, 1480, 1253, 982, 732, 1149, 662, 539, 369, 550, 413, 226, 112, 0), # 157
(1507, 1197, 1198, 1320, 1109, 543, 530, 521, 595, 245, 195, 115, 0, 1487, 1255, 988, 740, 1153, 664, 545, 370, 552, 415, 227, 112, 0), # 158
(1520, 1201, 1207, 1324, 1116, 545, 534, 521, 597, 245, 197, 116, 0, 1490, 1264, 993, 743, 1160, 668, 549, 371, 554, 419, 229, 113, 0), # 159
(1528, 1205, 1209, 1330, 1121, 551, 535, 526, 599, 246, 199, 116, 0, 1496, 1271, 997, 746, 1168, 670, 553, 372, 557, 422, 231, 113, 0), # 160
(1536, 1207, 1216, 1343, 1124, 557, 540, 529, 601, 247, 199, 116, 0, 1504, 1274, 1002, 749, 1175, 673, 555, 373, 558, 425, 231, 113, 0), # 161
(1542, 1211, 1226, 1352, 1131, 562, 542, 530, 603, 247, 199, 116, 0, 1511, 1283, 1006, 752, 1181, 675, 557, 375, 560, 428, 231, 113, 0), # 162
(1544, 1219, 1228, 1355, 1143, 565, 547, 535, 603, 248, 200, 116, 0, 1525, 1286, 1010, 754, 1190, 679, 563, 377, 561, 429, 233, 113, 0), # 163
(1552, 1222, 1236, 1363, 1149, 565, 549, 536, 605, 248, 202, 117, 0, 1531, 1292, 1018, 755, 1192, 682, 566, 380, 564, 429, 234, 113, 0), # 164
(1558, 1231, 1241, 1367, 1153, 568, 549, 537, 608, 249, 203, 118, 0, 1537, 1295, 1022, 758, 1199, 684, 567, 383, 568, 429, 235, 114, 0), # 165
(1564, 1233, 1246, 1374, 1161, 570, 549, 537, 611, 250, 206, 120, 0, 1547, 1303, 1029, 760, 1201, 692, 569, 384, 571, 432, 235, 114, 0), # 166
(1567, 1236, 1251, 1379, 1170, 573, 553, 541, 615, 252, 206, 121, 0, 1553, 1308, 1033, 764, 1206, 692, 570, 384, 575, 432, 236, 114, 0), # 167
(1574, 1242, 1259, 1387, 1178, 576, 555, 545, 617, 253, 207, 122, 0, 1556, 1310, 1035, 768, 1212, 695, 577, 387, 579, 433, 237, 114, 0), # 168
(1586, 1245, 1266, 1394, 1181, 578, 555, 549, 619, 255, 209, 122, 0, 1561, 1315, 1037, 773, 1218, 695, 580, 389, 582, 434, 238, 114, 0), # 169
(1595, 1247, 1270, 1402, 1185, 580, 557, 550, 622, 256, 209, 122, 0, 1567, 1319, 1046, 781, 1222, 695, 581, 391, 584, 434, 239, 115, 0), # 170
(1604, 1254, 1274, 1407, 1192, 584, 557, 551, 628, 258, 209, 122, 0, 1571, 1322, 1053, 783, 1227, 697, 583, 392, 589, 435, 239, 115, 0), # 171
(1607, 1255, 1281, 1416, 1196, 585, 557, 552, 632, 259, 210, 123, 0, 1579, 1327, 1055, 783, 1231, 698, 588, 394, 592, 438, 240, 115, 0), # 172
(1612, 1256, 1288, 1423, 1198, 589, 558, 554, 635, 259, 210, 125, 0, 1590, 1332, 1064, 785, 1237, 701, 588, 395, 597, 442, 242, 115, 0), # 173
(1613, 1260, 1291, 1426, 1201, 591, 559, 556, 635, 260, 210, 125, 0, 1597, 1336, 1067, 788, 1238, 702, 588, 395, 600, 443, 242, 115, 0), # 174
(1619, 1262, 1295, 1432, 1205, 592, 559, 558, 637, 260, 211, 125, 0, 1601, 1342, 1072, 788, 1242, 704, 590, 395, 601, 443, 242, 115, 0), # 175
(1627, 1267, 1296, 1438, 1210, 592, 560, 559, 637, 260, 212, 126, 0, 1607, 1347, 1076, 789, 1247, 705, 591, 397, 603, 445, 243, 115, 0), # 176
(1628, 1269, 1298, 1446, 1211, 593, 562, 560, 638, 261, 212, 126, 0, 1613, 1351, 1077, 791, 1248, 706, 592, 400, 603, 445, 243, 115, 0), # 177
(1634, 1276, 1303, 1449, 1212, 594, 562, 562, 640, 262, 212, 126, 0, 1619, 1355, 1082, 794, 1254, 710, 593, 400, 604, 447, 243, 115, 0), # 178
(1634, 1276, 1303, 1449, 1212, 594, 562, 562, 640, 262, 212, 126, 0, 1619, 1355, 1082, 794, 1254, 710, 593, 400, 604, 447, 243, 115, 0), # 179
)
passenger_arriving_rate = (
(5.020865578371768, 5.064847846385402, 4.342736024677089, 4.661000830397574, 3.7031237384064077, 1.8308820436884476, 2.0730178076869574, 1.938823405408093, 2.030033020722669, 0.9895037538805926, 0.7008775273142672, 0.4081595898588478, 0.0, 5.083880212578363, 4.489755488447325, 3.5043876365713356, 2.968511261641777, 4.060066041445338, 2.7143527675713304, 2.0730178076869574, 1.3077728883488913, 1.8515618692032039, 1.5536669434658585, 0.8685472049354179, 0.4604407133077639, 0.0), # 0
(5.354327152019974, 5.399222302966028, 4.629455492775127, 4.968858189957462, 3.948326891649491, 1.9518237573581576, 2.209734470631847, 2.066464051210712, 2.164081775444303, 1.0547451730692876, 0.7471826893260219, 0.4351013884011963, 0.0, 5.419791647439855, 4.786115272413158, 3.73591344663011, 3.164235519207862, 4.328163550888606, 2.8930496716949965, 2.209734470631847, 1.3941598266843982, 1.9741634458247455, 1.6562860633191545, 0.9258910985550255, 0.49083839117872996, 0.0), # 1
(5.686723008979731, 5.732269739983398, 4.915035237956178, 5.275490778498595, 4.192641982499829, 2.072282983465593, 2.345909253980352, 2.193593853293508, 2.297595602292516, 1.1197284437551367, 0.7933038581293855, 0.46193605433775464, 0.0, 5.75436482820969, 5.0812965977153, 3.9665192906469278, 3.3591853312654094, 4.595191204585032, 3.0710313946109116, 2.345909253980352, 1.480202131046852, 2.0963209912499146, 1.758496926166199, 0.9830070475912357, 0.5211154309075817, 0.0), # 2
(6.016757793146562, 6.062668793441743, 5.198342391099879, 5.579682305649055, 4.435107784001268, 2.191782029841316, 2.4810018208239777, 2.3197088156227115, 2.430045053640364, 1.1841956746065454, 0.8390580686378972, 0.4885571404108718, 0.0, 6.086272806254225, 5.374128544519589, 4.195290343189486, 3.5525870238196355, 4.860090107280728, 3.247592341871796, 2.4810018208239777, 1.5655585927437972, 2.217553892000634, 1.8598941018830188, 1.0396684782199759, 0.551151708494704, 0.0), # 3
(6.343136148415981, 6.389098099345293, 5.478244083085864, 5.880216481036927, 4.674763069197661, 2.3098432043158894, 2.6144718342542292, 2.444304942164548, 2.560900681860902, 1.24788897429192, 0.8842623557650959, 0.514858199362897, 0.0, 6.414188632939817, 5.6634401929918665, 4.42131177882548, 3.743666922875759, 5.121801363721804, 3.422026919030367, 2.6144718342542292, 1.6498880030827783, 2.3373815345988307, 1.9600721603456428, 1.095648816617173, 0.5808270999404813, 0.0), # 4
(6.66456271868351, 6.710236293698289, 5.753607444793765, 6.175877014290295, 4.910646611132853, 2.4259888147198754, 2.745778957362612, 2.566878236885247, 2.689633039327186, 1.310550451479666, 0.9287337544245222, 0.5407327839361791, 0.0, 6.736785359632827, 5.948060623297969, 4.64366877212261, 3.9316513544389973, 5.379266078654372, 3.593629531639346, 2.745778957362612, 1.7328491533713395, 2.4553233055664263, 2.058625671430099, 1.1507214889587531, 0.6100214812452991, 0.0), # 5
(6.979742147844666, 7.024762012504959, 6.023299607103222, 6.465447615037239, 5.141797182850695, 2.5397411688838374, 2.8743828532406313, 2.686924703751037, 2.8157126784122717, 1.3719222148381898, 0.9722892995297139, 0.5660744468730674, 0.0, 7.052736037699606, 6.22681891560374, 4.8614464976485685, 4.115766644514569, 5.631425356824543, 3.761694585251452, 2.8743828532406313, 1.8141008349170267, 2.5708985914253475, 2.1551492050124135, 1.2046599214206444, 0.6386147284095418, 0.0), # 6
(7.2873790797949685, 7.331353891769537, 6.286187700893863, 6.747711992905847, 5.367253557395036, 2.650622574638337, 2.9997431849797924, 2.8039403467281465, 2.9386101514892147, 1.4317463730358968, 1.0147460259942116, 0.5907767409159108, 0.0, 7.360713718506519, 6.498544150075018, 5.073730129971057, 4.2952391191076895, 5.877220302978429, 3.9255164854194056, 2.9997431849797924, 1.8933018390273837, 2.683626778697518, 2.249237330968616, 1.2572375401787725, 0.6664867174335943, 0.0), # 7
(7.586178158429934, 7.628690567496257, 6.54113885704533, 7.021453857524196, 5.586054507809724, 2.7581553398139356, 3.1213196156715988, 2.917421169782802, 3.0577960109310682, 1.4897650347411937, 1.0559209687315536, 0.6147332188070586, 0.0, 7.659391453419917, 6.762065406877643, 5.279604843657768, 4.469295104223581, 6.1155920218621365, 4.084389637695923, 3.1213196156715988, 1.970110957009954, 2.793027253904862, 2.3404846191747324, 1.3082277714090662, 0.6935173243178416, 0.0), # 8
(7.874844027645085, 7.915450675689353, 6.787020206437253, 7.285456918520376, 5.797238807138606, 2.861861772241199, 3.23857180840756, 3.0268631768812346, 3.1727408091108913, 1.5457203086224858, 1.0956311626552797, 0.6378374332888596, 0.0, 7.947442293806162, 7.016211766177453, 5.478155813276398, 4.637160925867456, 6.345481618221783, 4.237608447633728, 3.23857180840756, 2.044186980172285, 2.898619403569303, 2.4284856395067926, 1.3574040412874508, 0.7195864250626686, 0.0), # 9
(8.152081331335932, 8.190312852353056, 7.022698879949271, 7.538504885522466, 5.999845228425533, 2.961264179750688, 3.3509594262791773, 3.1317623719896712, 3.282915098401738, 1.599354303348179, 1.133693642678929, 0.6599829371036627, 0.0, 8.22353929103161, 7.259812308140289, 5.668468213394645, 4.798062910044536, 6.565830196803476, 4.384467320785539, 3.3509594262791773, 2.11518869982192, 2.9999226142127666, 2.5128349618408223, 1.4045397759898541, 0.7445738956684597, 0.0), # 10
(8.416594713398005, 8.451955733491605, 7.247042008461013, 7.779381468158547, 6.192912544714355, 3.055884870172965, 3.457942132377958, 3.2316147590743394, 3.3877894311766643, 1.6504091275866801, 1.1699254437160416, 0.6810632829938176, 0.0, 8.486355496462611, 7.491696112931993, 5.849627218580208, 4.951227382760039, 6.775578862353329, 4.524260662704076, 3.457942132377958, 2.1827749072664036, 3.0964562723571776, 2.5931271560528497, 1.4494084016922026, 0.7683596121356006, 0.0), # 11
(8.667088817726812, 8.699057955109222, 7.458916722852117, 8.006870376056709, 6.375479529048918, 3.1452461513385908, 3.5589795897954057, 3.325916342101467, 3.486834359808726, 1.6986268900063934, 1.2041436006801558, 0.7009720237016724, 0.0, 8.734563961465534, 7.710692260718395, 6.020718003400779, 5.095880670019179, 6.973668719617452, 4.656282878942054, 3.5589795897954057, 2.246604393813279, 3.187739764524459, 2.6689567920189035, 1.4917833445704234, 0.7908234504644749, 0.0), # 12
(8.902268288217876, 8.93029815321015, 7.657190154002218, 8.219755318845033, 6.546584954473067, 3.2288703310781304, 3.653531461623028, 3.414163125037284, 3.579520436670977, 1.7437496992757264, 1.2361651484848115, 0.7196027119695768, 0.0, 8.966837737406735, 7.915629831665344, 6.180825742424058, 5.2312490978271775, 7.159040873341954, 4.7798283750521975, 3.653531461623028, 2.306335950770093, 3.2732924772365335, 2.7399184396150114, 1.5314380308004438, 0.8118452866554684, 0.0), # 13
(9.120837768766716, 9.144354963798623, 7.840729432790956, 8.416820006151594, 6.705267594030659, 3.306279717222145, 3.7410574109523305, 3.4958511118480193, 3.6653182141364735, 1.785519664063084, 1.2658071220435476, 0.7368489005398801, 0.0, 9.181849875652563, 8.10533790593868, 6.329035610217737, 5.3565589921892505, 7.330636428272947, 4.894191556587227, 3.7410574109523305, 2.3616283694443894, 3.3526337970153297, 2.8056066687171985, 1.5681458865581912, 0.8313049967089657, 0.0), # 14
(9.321501903268855, 9.339907022878865, 8.008401690097953, 8.59684814760449, 6.850566220765538, 3.376996617601199, 3.821017100874813, 3.5704763064998986, 3.743698244578273, 1.823678893036873, 1.2928865562699035, 0.752604142154931, 0.0, 9.37827342756938, 8.27864556370424, 6.464432781349516, 5.471036679110618, 7.487396489156546, 4.998666829099858, 3.821017100874813, 2.4121404411437135, 3.425283110382769, 2.865616049201497, 1.6016803380195905, 0.8490824566253515, 0.0), # 15
(9.5029653356198, 9.51563296645512, 8.159074056802854, 8.758623452831788, 6.981519607721555, 3.4405433400458514, 3.892870194481988, 3.6375347129591504, 3.8141310803694286, 1.8579694948654994, 1.3172204860774188, 0.7667619895570784, 0.0, 9.554781444523545, 8.434381885127861, 6.586102430387094, 5.5739084845964975, 7.628262160738857, 5.092548598142811, 3.892870194481988, 2.4575309571756083, 3.4907598038607777, 2.9195411509439295, 1.6318148113605708, 0.8650575424050111, 0.0), # 16
(9.663932709715075, 9.670211430531618, 8.291613663785293, 8.900929631461583, 7.097166527942559, 3.4964421923866666, 3.9560763548653552, 3.6965223351920073, 3.8760872738829946, 1.8881335782173672, 1.3386259463796333, 0.7792159954886714, 0.0, 9.710046977881415, 8.571375950375383, 6.693129731898166, 5.6644007346521, 7.752174547765989, 5.17513126926881, 3.9560763548653552, 2.4974587088476192, 3.5485832639712793, 2.9669765438205284, 1.6583227327570589, 0.8791101300483289, 0.0), # 17
(9.803108669450204, 9.802321051112584, 8.404887641924901, 9.022550393121959, 7.1965457544723925, 3.5442154824542103, 4.010095245116426, 3.746935177164692, 3.929037377492032, 1.9139132517608846, 1.3569199720900849, 0.7898597126920597, 0.0, 9.842743079009345, 8.688456839612655, 6.784599860450424, 5.741739755282652, 7.858074754984064, 5.245709248030569, 4.010095245116426, 2.531582487467293, 3.5982728772361963, 3.0075167977073205, 1.6809775283849802, 0.8911200955556896, 0.0), # 18
(9.919197858720699, 9.910640464202265, 8.497763122101317, 9.122269447440985, 7.2786960603549105, 3.5833855180790386, 4.054386528326697, 3.7882692428434357, 3.9724519435695926, 1.9350506241644574, 1.3719195981223131, 0.7985866939095915, 0.0, 9.951542799273696, 8.784453633005505, 6.859597990611565, 5.80515187249337, 7.944903887139185, 5.30357693998081, 4.054386528326697, 2.55956108434217, 3.6393480301774552, 3.0407564824803295, 1.6995526244202632, 0.9009673149274788, 0.0), # 19
(10.010904921422082, 9.993848305804882, 8.569107235194169, 9.198870504046766, 7.342656218633962, 3.613474607091719, 4.088409867587681, 3.8200205361944657, 4.005801524488732, 1.95128780409649, 1.3834418593898585, 0.805290491883616, 0.0, 10.035119190040824, 8.858195410719775, 6.9172092969492915, 5.853863412289469, 8.011603048977465, 5.348028750672252, 4.088409867587681, 2.5810532907797996, 3.671328109316981, 3.0662901680155894, 1.713821447038834, 0.9085316641640803, 0.0), # 20
(10.076934501449866, 10.050623211924679, 8.6177871120831, 9.251137272567364, 7.387465002353392, 3.6340050573228124, 4.1116249259908795, 3.84168506118401, 4.028556672622507, 1.9623669002253892, 1.39130379080626, 0.8098646593564828, 0.0, 10.092145302677078, 8.90851125292131, 6.9565189540313, 5.887100700676166, 8.057113345245014, 5.378359085657614, 4.1116249259908795, 2.5957178980877234, 3.693732501176696, 3.0837124241891223, 1.72355742241662, 0.91369301926588, 0.0), # 21
(10.115991242699579, 10.079643818565883, 8.642669883647738, 9.277853462630876, 7.41216118455705, 3.644499176602881, 4.1234913666278, 3.852758821778298, 4.040187940343971, 1.968030021219561, 1.3953224272850568, 0.8122027490705409, 0.0, 10.121294188548827, 8.934230239775948, 6.976612136425284, 5.904090063658682, 8.080375880687942, 5.393862350489617, 4.1234913666278, 2.6032136975734863, 3.706080592278525, 3.09261782087696, 1.7285339767295478, 0.9163312562332622, 0.0), # 22
(10.13039336334264, 10.083079961133974, 8.645769318701419, 9.281198109567903, 7.418488037355065, 3.6458333333333335, 4.124902001129669, 3.8539557613168727, 4.0416420781893, 1.9686980681298587, 1.3958263395269568, 0.8124914647157445, 0.0, 10.125, 8.93740611187319, 6.9791316976347835, 5.906094204389575, 8.0832841563786, 5.395538065843622, 4.124902001129669, 2.604166666666667, 3.7092440186775324, 3.0937327031893016, 1.729153863740284, 0.9166436328303613, 0.0), # 23
(10.141012413034153, 10.08107561728395, 8.645262345679013, 9.280786458333335, 7.422071742409901, 3.6458333333333335, 4.124126906318083, 3.852291666666667, 4.041447222222222, 1.968287654320988, 1.39577076318743, 0.8124238683127573, 0.0, 10.125, 8.936662551440328, 6.978853815937151, 5.904862962962962, 8.082894444444443, 5.393208333333334, 4.124126906318083, 2.604166666666667, 3.7110358712049507, 3.0935954861111123, 1.7290524691358027, 0.9164614197530866, 0.0), # 24
(10.15140723021158, 10.077124771376313, 8.644261545496114, 9.279972029320987, 7.4255766303963355, 3.6458333333333335, 4.122599451303155, 3.8490226337448563, 4.041062242798354, 1.96747970964792, 1.3956605665710604, 0.8122904282883707, 0.0, 10.125, 8.935194711172077, 6.978302832855302, 5.902439128943758, 8.082124485596708, 5.388631687242799, 4.122599451303155, 2.604166666666667, 3.7127883151981678, 3.0933240097736636, 1.728852309099223, 0.9161022519433014, 0.0), # 25
(10.161577019048034, 10.071287780064015, 8.642780635573846, 9.278764081790122, 7.429002578947403, 3.6458333333333335, 4.120343359154361, 3.8442103909465026, 4.0404920781893, 1.9662876771833566, 1.3954967473084758, 0.8120929736320684, 0.0, 10.125, 8.933022709952752, 6.977483736542379, 5.898863031550069, 8.0809841563786, 5.381894547325103, 4.120343359154361, 2.604166666666667, 3.7145012894737013, 3.0929213605967085, 1.7285561271147696, 0.915571616369456, 0.0), # 26
(10.171520983716636, 10.063624999999998, 8.640833333333333, 9.277171874999999, 7.432349465696142, 3.6458333333333335, 4.117382352941177, 3.837916666666667, 4.039741666666666, 1.9647250000000003, 1.3952803030303031, 0.8118333333333335, 0.0, 10.125, 8.930166666666667, 6.976401515151515, 5.894175, 8.079483333333332, 5.373083333333334, 4.117382352941177, 2.604166666666667, 3.716174732848071, 3.0923906250000006, 1.7281666666666669, 0.914875, 0.0), # 27
(10.181238328390501, 10.054196787837219, 8.638433356195703, 9.275204668209877, 7.4356171682756, 3.6458333333333335, 4.113740155733075, 3.830203189300412, 4.038815946502057, 1.9628051211705537, 1.3950122313671698, 0.8115133363816492, 0.0, 10.125, 8.926646700198141, 6.9750611568358485, 5.88841536351166, 8.077631893004114, 5.3622844650205765, 4.113740155733075, 2.604166666666667, 3.7178085841378, 3.091734889403293, 1.7276866712391405, 0.9140178898033837, 0.0), # 28
(10.19072825724275, 10.043063500228623, 8.635594421582077, 9.272871720679012, 7.438805564318813, 3.6458333333333335, 4.109440490599533, 3.821131687242798, 4.037719855967078, 1.9605414837677189, 1.3946935299497027, 0.811134811766499, 0.0, 10.125, 8.922482929431489, 6.973467649748514, 5.881624451303155, 8.075439711934155, 5.349584362139917, 4.109440490599533, 2.604166666666667, 3.7194027821594067, 3.0909572402263383, 1.7271188843164156, 0.9130057727480568, 0.0), # 29
(10.199989974446497, 10.03028549382716, 8.63233024691358, 9.270182291666666, 7.441914531458824, 3.6458333333333335, 4.104507080610022, 3.8107638888888884, 4.036458333333333, 1.957947530864198, 1.39432519640853, 0.8106995884773662, 0.0, 10.125, 8.917695473251028, 6.9716259820426485, 5.873842592592593, 8.072916666666666, 5.335069444444444, 4.104507080610022, 2.604166666666667, 3.720957265729412, 3.0900607638888897, 1.7264660493827162, 0.9118441358024693, 0.0), # 30
(10.209022684174858, 10.01592312528578, 8.62865454961134, 9.267145640432098, 7.444943947328672, 3.6458333333333335, 4.09896364883402, 3.799161522633745, 4.035036316872428, 1.9550367055326936, 1.3939082283742779, 0.8102094955037343, 0.0, 10.125, 8.912304450541077, 6.969541141871389, 5.865110116598079, 8.070072633744855, 5.318826131687243, 4.09896364883402, 2.604166666666667, 3.722471973664336, 3.0890485468107003, 1.7257309099222682, 0.910538465935071, 0.0), # 31
(10.217825590600954, 10.00003675125743, 8.624581047096479, 9.263771026234568, 7.447893689561397, 3.6458333333333335, 4.092833918340999, 3.7863863168724285, 4.033458744855967, 1.951822450845908, 1.3934436234775742, 0.8096663618350862, 0.0, 10.125, 8.906329980185948, 6.96721811738787, 5.8554673525377225, 8.066917489711933, 5.3009408436214, 4.092833918340999, 2.604166666666667, 3.7239468447806985, 3.0879236754115236, 1.7249162094192958, 0.909094250114312, 0.0), # 32
(10.226397897897897, 9.98268672839506, 8.620123456790123, 9.260067708333333, 7.450763635790041, 3.6458333333333335, 4.086141612200436, 3.7725000000000004, 4.031730555555555, 1.9483182098765437, 1.392932379349046, 0.8090720164609053, 0.0, 10.125, 8.899792181069957, 6.96466189674523, 5.84495462962963, 8.06346111111111, 5.2815, 4.086141612200436, 2.604166666666667, 3.7253818178950207, 3.086689236111112, 1.724024691358025, 0.9075169753086421, 0.0), # 33
(10.23473881023881, 9.963933413351622, 8.615295496113397, 9.256044945987654, 7.453553663647644, 3.6458333333333335, 4.078910453481805, 3.7575643004115222, 4.029856687242798, 1.9445374256973027, 1.3923754936193207, 0.8084282883706753, 0.0, 10.125, 8.892711172077426, 6.961877468096604, 5.833612277091907, 8.059713374485597, 5.260590020576132, 4.078910453481805, 2.604166666666667, 3.726776831823822, 3.085348315329219, 1.7230590992226795, 0.9058121284865113, 0.0), # 34
(10.242847531796807, 9.943837162780063, 8.610110882487428, 9.25171199845679, 7.456263650767246, 3.6458333333333335, 4.071164165254579, 3.741640946502058, 4.0278420781893, 1.9404935413808875, 1.3917739639190256, 0.807737006553879, 0.0, 10.125, 8.88510707209267, 6.958869819595128, 5.821480624142661, 8.0556841563786, 5.238297325102881, 4.071164165254579, 2.604166666666667, 3.728131825383623, 3.0839039994855972, 1.7220221764974855, 0.9039851966163696, 0.0), # 35
(10.250723266745005, 9.922458333333331, 8.604583333333334, 9.247078125, 7.45889347478189, 3.6458333333333335, 4.062926470588235, 3.724791666666667, 4.025691666666666, 1.9362000000000004, 1.391128787878788, 0.8070000000000002, 0.0, 10.125, 8.877, 6.95564393939394, 5.8086, 8.051383333333332, 5.214708333333334, 4.062926470588235, 2.604166666666667, 3.729446737390945, 3.0823593750000007, 1.7209166666666669, 0.9020416666666666, 0.0), # 36
(10.258365219256524, 9.89985728166438, 8.598726566072246, 9.242152584876543, 7.4614430133246135, 3.6458333333333335, 4.054221092552247, 3.707078189300412, 4.023410390946502, 1.931670244627344, 1.3904409631292352, 0.8062190976985216, 0.0, 10.125, 8.868410074683737, 6.952204815646175, 5.79501073388203, 8.046820781893004, 5.189909465020577, 4.054221092552247, 2.604166666666667, 3.7307215066623067, 3.080717528292182, 1.7197453132144491, 0.8999870256058529, 0.0), # 37
(10.265772593504476, 9.876094364426155, 8.592554298125286, 9.23694463734568, 7.46391214402846, 3.6458333333333335, 4.04507175421609, 3.6885622427983544, 4.021003189300411, 1.92691771833562, 1.3897114873009937, 0.8053961286389272, 0.0, 10.125, 8.859357415028198, 6.948557436504967, 5.780753155006859, 8.042006378600822, 5.163987139917697, 4.04507175421609, 2.604166666666667, 3.73195607201423, 3.078981545781894, 1.7185108596250571, 0.8978267604023779, 0.0), # 38
(10.272944593661986, 9.851229938271604, 8.586080246913582, 9.231463541666667, 7.466300744526468, 3.6458333333333335, 4.035502178649238, 3.6693055555555554, 4.0184750000000005, 1.9219558641975314, 1.3889413580246914, 0.8045329218106996, 0.0, 10.125, 8.849862139917693, 6.944706790123457, 5.765867592592593, 8.036950000000001, 5.137027777777778, 4.035502178649238, 2.604166666666667, 3.733150372263234, 3.07715451388889, 1.7172160493827164, 0.8955663580246914, 0.0), # 39
(10.279880423902163, 9.82532435985368, 8.579318129858253, 9.225718557098766, 7.468608692451679, 3.6458333333333335, 4.025536088921165, 3.649369855967079, 4.015830761316872, 1.9167981252857802, 1.3881315729309558, 0.8036313062033228, 0.0, 10.125, 8.83994436823655, 6.940657864654778, 5.750394375857339, 8.031661522633744, 5.1091177983539104, 4.025536088921165, 2.604166666666667, 3.7343043462258394, 3.0752395190329227, 1.7158636259716507, 0.8932113054412438, 0.0), # 40
(10.286579288398128, 9.79843798582533, 8.57228166438043, 9.219718942901235, 7.4708358654371345, 3.6458333333333335, 4.015197208101347, 3.628816872427984, 4.0130754115226335, 1.9114579446730684, 1.3872831296504138, 0.8026931108062796, 0.0, 10.125, 8.829624218869075, 6.936415648252069, 5.734373834019204, 8.026150823045267, 5.0803436213991775, 4.015197208101347, 2.604166666666667, 3.7354179327185673, 3.073239647633746, 1.7144563328760862, 0.8907670896204848, 0.0), # 41
(10.293040391323, 9.770631172839506, 8.564984567901236, 9.213473958333335, 7.472982141115872, 3.6458333333333335, 4.004509259259259, 3.6077083333333335, 4.010213888888889, 1.9059487654320992, 1.3863970258136926, 0.8017201646090536, 0.0, 10.125, 8.818921810699589, 6.931985129068463, 5.717846296296297, 8.020427777777778, 5.050791666666667, 4.004509259259259, 2.604166666666667, 3.736491070557936, 3.0711579861111122, 1.7129969135802474, 0.8882391975308643, 0.0), # 42
(10.299262936849892, 9.741964277549155, 8.557440557841794, 9.206992862654321, 7.475047397120935, 3.6458333333333335, 3.993495965464375, 3.58610596707819, 4.007251131687243, 1.9002840306355744, 1.3854742590514195, 0.800714296601128, 0.0, 10.125, 8.807857262612407, 6.927371295257098, 5.700852091906722, 8.014502263374485, 5.020548353909466, 3.993495965464375, 2.604166666666667, 3.7375236985604676, 3.0689976208847747, 1.7114881115683587, 0.8856331161408324, 0.0), # 43
(10.305246129151927, 9.712497656607225, 8.549663351623229, 9.200284915123458, 7.477031511085363, 3.6458333333333335, 3.9821810497861696, 3.564071502057614, 4.0041920781893, 1.8944771833561962, 1.3845158269942222, 0.7996773357719861, 0.0, 10.125, 8.796450693491845, 6.92257913497111, 5.683431550068587, 8.0083841563786, 4.98970010288066, 3.9821810497861696, 2.604166666666667, 3.7385157555426813, 3.0667616383744867, 1.709932670324646, 0.8829543324188387, 0.0), # 44
(10.310989172402216, 9.682291666666666, 8.541666666666668, 9.193359375, 7.478934360642197, 3.6458333333333335, 3.9705882352941178, 3.541666666666667, 4.001041666666666, 1.8885416666666672, 1.3835227272727273, 0.798611111111111, 0.0, 10.125, 8.784722222222221, 6.917613636363637, 5.665625, 8.002083333333331, 4.958333333333334, 3.9705882352941178, 2.604166666666667, 3.7394671803210984, 3.064453125000001, 1.7083333333333335, 0.8802083333333335, 0.0), # 45
(10.31649127077388, 9.65140666438043, 8.533464220393233, 9.186225501543209, 7.480755823424477, 3.6458333333333335, 3.958741245057694, 3.518953189300412, 3.997804835390946, 1.8824909236396894, 1.3824959575175624, 0.7975174516079867, 0.0, 10.125, 8.772691967687852, 6.912479787587812, 5.647472770919067, 7.995609670781892, 4.926534465020577, 3.958741245057694, 2.604166666666667, 3.7403779117122387, 3.062075167181071, 1.7066928440786466, 0.8774006058527665, 0.0), # 46
(10.321751628440035, 9.619903006401461, 8.525069730224052, 9.178892554012345, 7.482495777065244, 3.6458333333333335, 3.9466638021463734, 3.4959927983539094, 3.994486522633745, 1.8763383973479657, 1.3814365153593549, 0.7963981862520958, 0.0, 10.125, 8.760380048773053, 6.9071825767967745, 5.629015192043896, 7.98897304526749, 4.894389917695474, 3.9466638021463734, 2.604166666666667, 3.741247888532622, 3.0596308513374493, 1.7050139460448106, 0.8745366369455876, 0.0), # 47
(10.326769449573796, 9.587841049382716, 8.516496913580248, 9.171369791666667, 7.48415409919754, 3.6458333333333335, 3.9343796296296296, 3.4728472222222226, 3.9910916666666667, 1.8700975308641978, 1.3803453984287317, 0.7952551440329219, 0.0, 10.125, 8.74780658436214, 6.901726992143659, 5.610292592592592, 7.982183333333333, 4.861986111111112, 3.9343796296296296, 2.604166666666667, 3.74207704959877, 3.05712326388889, 1.7032993827160496, 0.871621913580247, 0.0), # 48
(10.331543938348286, 9.555281149977136, 8.507759487882945, 9.163666473765433, 7.485730667454405, 3.6458333333333335, 3.9219124505769383, 3.4495781893004116, 3.987625205761317, 1.8637817672610888, 1.3792236043563206, 0.7940901539399483, 0.0, 10.125, 8.73499169333943, 6.896118021781603, 5.5913453017832655, 7.975250411522634, 4.829409465020577, 3.9219124505769383, 2.604166666666667, 3.7428653337272024, 3.054555491255145, 1.7015518975765893, 0.8686619227251944, 0.0), # 49
(10.336074298936616, 9.522283664837678, 8.49887117055327, 9.155791859567902, 7.4872253594688765, 3.6458333333333335, 3.909285988057775, 3.4262474279835393, 3.9840920781893, 1.85740454961134, 1.3780721307727481, 0.7929050449626583, 0.0, 10.125, 8.72195549458924, 6.89036065386374, 5.572213648834019, 7.9681841563786, 4.796746399176955, 3.909285988057775, 2.604166666666667, 3.7436126797344382, 3.051930619855968, 1.6997742341106543, 0.86566215134888, 0.0), # 50
(10.34035973551191, 9.488908950617283, 8.489845679012346, 9.147755208333333, 7.488638052873998, 3.6458333333333335, 3.896523965141612, 3.4029166666666666, 3.9804972222222226, 1.8509793209876546, 1.3768919753086422, 0.7917016460905352, 0.0, 10.125, 8.708718106995885, 6.884459876543211, 5.552937962962963, 7.960994444444445, 4.764083333333334, 3.896523965141612, 2.604166666666667, 3.744319026436999, 3.049251736111112, 1.6979691358024693, 0.8626280864197532, 0.0), # 51
(10.344399452247279, 9.455217363968908, 8.480696730681299, 9.139565779320987, 7.489968625302809, 3.6458333333333335, 3.883650104897926, 3.3796476337448556, 3.976845576131687, 1.8445195244627348, 1.3756841355946297, 0.7904817863130622, 0.0, 10.125, 8.695299649443683, 6.878420677973147, 5.533558573388203, 7.953691152263374, 4.731506687242798, 3.883650104897926, 2.604166666666667, 3.7449843126514044, 3.04652192644033, 1.69613934613626, 0.8595652149062645, 0.0), # 52
(10.348192653315843, 9.421269261545497, 8.471438042981255, 9.131232831790122, 7.491216954388353, 3.6458333333333335, 3.8706881303961915, 3.3565020576131688, 3.9731420781893005, 1.8380386031092826, 1.3744496092613379, 0.7892472946197227, 0.0, 10.125, 8.681720240816947, 6.872248046306688, 5.514115809327846, 7.946284156378601, 4.699102880658437, 3.8706881303961915, 2.604166666666667, 3.7456084771941764, 3.043744277263375, 1.694287608596251, 0.8564790237768635, 0.0), # 53
(10.351738542890716, 9.387125000000001, 8.462083333333332, 9.122765625, 7.492382917763668, 3.6458333333333335, 3.8576617647058824, 3.333541666666666, 3.9693916666666667, 1.8315500000000005, 1.3731893939393938, 0.788, 0.0, 10.125, 8.668, 6.865946969696969, 5.49465, 7.938783333333333, 4.666958333333333, 3.8576617647058824, 2.604166666666667, 3.746191458881834, 3.040921875000001, 1.6924166666666667, 0.8533750000000002, 0.0), # 54
(10.355036325145022, 9.352844935985367, 8.452646319158665, 9.114173418209877, 7.493466393061793, 3.6458333333333335, 3.844594730896474, 3.3108281893004117, 3.9655992798353905, 1.8250671582075908, 1.3719044872594257, 0.7867417314433777, 0.0, 10.125, 8.654159045877153, 6.859522436297127, 5.4752014746227715, 7.931198559670781, 4.6351594650205765, 3.844594730896474, 2.604166666666667, 3.7467331965308963, 3.0380578060699595, 1.6905292638317333, 0.8502586305441244, 0.0), # 55
(10.358085204251871, 9.31848942615455, 8.443140717878373, 9.105465470679011, 7.4944672579157725, 3.6458333333333335, 3.8315107520374405, 3.288423353909465, 3.961769855967078, 1.818603520804756, 1.3705958868520598, 0.7854743179393385, 0.0, 10.125, 8.640217497332722, 6.852979434260299, 5.455810562414267, 7.923539711934156, 4.603792695473251, 3.8315107520374405, 2.604166666666667, 3.7472336289578863, 3.035155156893005, 1.6886281435756747, 0.8471354023776865, 0.0), # 56
(10.360884384384383, 9.284118827160494, 8.433580246913582, 9.096651041666666, 7.495385389958644, 3.6458333333333335, 3.818433551198257, 3.2663888888888892, 3.957908333333333, 1.812172530864198, 1.369264590347924, 0.7841995884773663, 0.0, 10.125, 8.626195473251027, 6.8463229517396185, 5.436517592592593, 7.915816666666666, 4.572944444444445, 3.818433551198257, 2.604166666666667, 3.747692694979322, 3.0322170138888898, 1.6867160493827165, 0.844010802469136, 0.0), # 57
(10.36343306971568, 9.24979349565615, 8.423978623685414, 9.087739390432098, 7.496220666823449, 3.6458333333333335, 3.8053868514483984, 3.2447865226337447, 3.954019650205761, 1.8057876314586196, 1.367911595377645, 0.7829193720469442, 0.0, 10.125, 8.612113092516385, 6.8395579768882255, 5.417362894375858, 7.908039300411522, 4.5427011316872425, 3.8053868514483984, 2.604166666666667, 3.7481103334117245, 3.029246463477367, 1.684795724737083, 0.8408903177869229, 0.0), # 58
(10.36573046441887, 9.215573788294467, 8.414349565614998, 9.078739776234567, 7.49697296614323, 3.6458333333333335, 3.792394375857339, 3.2236779835390945, 3.9501087448559673, 1.799462265660723, 1.3665378995718502, 0.7816354976375554, 0.0, 10.125, 8.597990474013107, 6.83268949785925, 5.398386796982168, 7.900217489711935, 4.513149176954733, 3.792394375857339, 2.604166666666667, 3.748486483071615, 3.02624659207819, 1.6828699131229998, 0.8377794352994972, 0.0), # 59
(10.367775772667077, 9.181520061728396, 8.404706790123456, 9.069661458333334, 7.497642165551024, 3.6458333333333335, 3.779479847494553, 3.203125, 3.946180555555556, 1.7932098765432103, 1.3651445005611673, 0.7803497942386832, 0.0, 10.125, 8.583847736625515, 6.825722502805837, 5.37962962962963, 7.892361111111112, 4.484375, 3.779479847494553, 2.604166666666667, 3.748821082775512, 3.023220486111112, 1.6809413580246915, 0.8346836419753088, 0.0), # 60
(10.369568198633415, 9.147692672610884, 8.395064014631917, 9.060513695987654, 7.498228142679874, 3.6458333333333335, 3.7666669894295164, 3.183189300411523, 3.9422400205761314, 1.7870439071787843, 1.3637323959762233, 0.7790640908398111, 0.0, 10.125, 8.56970499923792, 6.818661979881115, 5.361131721536351, 7.884480041152263, 4.456465020576132, 3.7666669894295164, 2.604166666666667, 3.749114071339937, 3.0201712319958856, 1.6790128029263836, 0.8316084247828076, 0.0), # 61
(10.371106946491004, 9.114151977594878, 8.385434956561502, 9.051305748456791, 7.498730775162823, 3.6458333333333335, 3.753979524731703, 3.1639326131687247, 3.9382920781893, 1.7809778006401469, 1.3623025834476452, 0.7777802164304223, 0.0, 10.125, 8.555582380734645, 6.811512917238226, 5.3429334019204395, 7.8765841563786, 4.429505658436215, 3.753979524731703, 2.604166666666667, 3.7493653875814115, 3.0171019161522645, 1.6770869913123003, 0.8285592706904436, 0.0), # 62
(10.37239122041296, 9.080958333333333, 8.375833333333334, 9.042046875, 7.499149940632904, 3.6458333333333335, 3.741441176470588, 3.1454166666666667, 3.9343416666666666, 1.7750250000000003, 1.360856060606061, 0.7765000000000001, 0.0, 10.125, 8.5415, 6.804280303030303, 5.325075, 7.868683333333333, 4.403583333333334, 3.741441176470588, 2.604166666666667, 3.749574970316452, 3.014015625000001, 1.675166666666667, 0.8255416666666667, 0.0), # 63
(10.373420224572397, 9.048172096479195, 8.366272862368541, 9.032746334876544, 7.4994855167231655, 3.6458333333333335, 3.729075667715646, 3.127703189300412, 3.9303937242798352, 1.7691989483310475, 1.3593938250820965, 0.7752252705380279, 0.0, 10.125, 8.527477975918305, 6.796969125410483, 5.307596844993141, 7.8607874485596705, 4.378784465020577, 3.729075667715646, 2.604166666666667, 3.7497427583615828, 3.0109154449588487, 1.6732545724737085, 0.822561099679927, 0.0), # 64
(10.374193163142438, 9.015853623685413, 8.35676726108825, 9.023413387345679, 7.499737381066645, 3.6458333333333335, 3.7169067215363514, 3.1108539094650207, 3.9264531893004113, 1.7635130887059902, 1.357916874506381, 0.7739578570339887, 0.0, 10.125, 8.513536427373873, 6.7895843725319045, 5.290539266117969, 7.852906378600823, 4.355195473251029, 3.7169067215363514, 2.604166666666667, 3.7498686905333223, 3.0078044624485605, 1.67135345221765, 0.819623056698674, 0.0), # 65
(10.374709240296196, 8.984063271604938, 8.34733024691358, 9.014057291666667, 7.499905411296382, 3.6458333333333335, 3.7049580610021784, 3.094930555555556, 3.9225250000000003, 1.7579808641975312, 1.3564262065095398, 0.7726995884773664, 0.0, 10.125, 8.499695473251029, 6.782131032547699, 5.273942592592592, 7.8450500000000005, 4.332902777777778, 3.7049580610021784, 2.604166666666667, 3.749952705648191, 3.0046857638888897, 1.6694660493827165, 0.8167330246913582, 0.0), # 66
(10.374967660206792, 8.952861396890716, 8.337975537265661, 9.004687307098765, 7.499989485045419, 3.6458333333333335, 3.693253409182603, 3.0799948559670787, 3.9186140946502057, 1.7526157178783728, 1.3549228187222018, 0.7714522938576437, 0.0, 10.125, 8.485975232434079, 6.774614093611008, 5.257847153635117, 7.837228189300411, 4.31199279835391, 3.693253409182603, 2.604166666666667, 3.7499947425227096, 3.001562435699589, 1.6675951074531323, 0.8138964906264289, 0.0), # 67
(10.374791614480825, 8.922144586043629, 8.328671624942844, 8.995231305354269, 7.499918636864896, 3.645765673423767, 3.681757597414823, 3.0659766041761927, 3.9146959495503735, 1.747405110411792, 1.3533809980900628, 0.770210835158312, 0.0, 10.124875150034294, 8.47231918674143, 6.766904990450313, 5.242215331235375, 7.829391899100747, 4.29236724584667, 3.681757597414823, 2.604118338159833, 3.749959318432448, 2.99841043511809, 1.6657343249885688, 0.8111040532766937, 0.0), # 68
(10.373141706924315, 8.890975059737157, 8.319157021604937, 8.985212635869564, 7.499273783587508, 3.6452307956104257, 3.6701340906733066, 3.052124485596708, 3.910599279835391, 1.7422015976761076, 1.3516438064859118, 0.7689349144466104, 0.0, 10.12388599537037, 8.458284058912714, 6.758219032429559, 5.226604793028321, 7.821198559670782, 4.272974279835391, 3.6701340906733066, 2.6037362825788755, 3.749636891793754, 2.9950708786231885, 1.6638314043209876, 0.8082704599761052, 0.0), # 69
(10.369885787558895, 8.859209754856408, 8.309390360653863, 8.974565343196456, 7.497999542752628, 3.6441773992785653, 3.658330067280685, 3.0383135192805977, 3.9063009640298736, 1.736979881115684, 1.3496914810876801, 0.7676185634410675, 0.0, 10.121932334533609, 8.44380419785174, 6.7484574054383994, 5.210939643347051, 7.812601928059747, 4.253638926992837, 3.658330067280685, 2.6029838566275467, 3.748999771376314, 2.991521781065486, 1.6618780721307727, 0.8053827049869463, 0.0), # 70
(10.365069660642929, 8.826867654542236, 8.299375071444901, 8.963305127818035, 7.496112052502757, 3.6426225549966977, 3.646350829769494, 3.0245482777015704, 3.9018074035970125, 1.7317400898356603, 1.347531228463977, 0.7662627447677263, 0.0, 10.119039887688615, 8.428890192444989, 6.737656142319885, 5.195220269506979, 7.803614807194025, 4.234367588782199, 3.646350829769494, 2.6018732535690696, 3.7480560262513785, 2.987768375939346, 1.6598750142889804, 0.8024425140492942, 0.0), # 71
(10.358739130434783, 8.793967741935482, 8.289114583333333, 8.95144769021739, 7.493627450980392, 3.6405833333333337, 3.634201680672269, 3.0108333333333333, 3.897125, 1.7264823529411768, 1.3451702551834133, 0.7648684210526316, 0.0, 10.115234375, 8.413552631578947, 6.7258512759170666, 5.179447058823529, 7.79425, 4.215166666666667, 3.634201680672269, 2.600416666666667, 3.746813725490196, 2.983815896739131, 1.6578229166666667, 0.7994516129032258, 0.0), # 72
(10.35094000119282, 8.760529000176998, 8.27861232567444, 8.939008730877617, 7.490561876328034, 3.638076804856983, 3.621887922521546, 2.9971732586495965, 3.8922601547020275, 1.7212067995373737, 1.3426157678145982, 0.7634365549218266, 0.0, 10.110541516632374, 8.397802104140093, 6.71307883907299, 5.163620398612119, 7.784520309404055, 4.196042562109435, 3.621887922521546, 2.598626289183559, 3.745280938164017, 2.979669576959206, 1.655722465134888, 0.7964117272888181, 0.0), # 73
(10.341718077175404, 8.726570412407629, 8.267871727823502, 8.926003950281803, 7.486931466688183, 3.6351200401361585, 3.609414857849861, 2.9835726261240665, 3.8872192691662857, 1.7159135587293908, 1.3398749729261428, 0.7619681090013557, 0.0, 10.104987032750344, 8.38164919901491, 6.699374864630713, 5.147740676188171, 7.774438538332571, 4.177001676573693, 3.609414857849861, 2.5965143143829703, 3.7434657333440917, 2.975334650093935, 1.6535743455647005, 0.7933245829461482, 0.0), # 74
(10.331119162640901, 8.692110961768218, 8.256896219135802, 8.912449048913043, 7.482752360203341, 3.6317301097393697, 3.59678778918975, 2.9700360082304527, 3.8820087448559666, 1.7106027596223679, 1.336955077086656, 0.7604640459172624, 0.0, 10.098596643518519, 8.365104505089885, 6.684775385433279, 5.131808278867102, 7.764017489711933, 4.158050411522634, 3.59678778918975, 2.594092935528121, 3.7413761801016703, 2.9708163496376816, 1.6513792438271604, 0.7901919056152927, 0.0), # 75
(10.319189061847677, 8.65716963139962, 8.245689228966622, 8.898359727254428, 7.478040695016003, 3.6279240842351275, 3.5840120190737474, 2.956567977442463, 3.876634983234263, 1.7052745313214452, 1.3338632868647486, 0.7589253282955902, 0.0, 10.091396069101508, 8.348178611251491, 6.669316434323743, 5.115823593964334, 7.753269966468526, 4.139195168419449, 3.5840120190737474, 2.5913743458822336, 3.7390203475080015, 2.96611990908481, 1.6491378457933243, 0.7870154210363293, 0.0), # 76
(10.305973579054093, 8.621765404442675, 8.234254186671238, 8.883751685789049, 7.472812609268672, 3.6237190341919425, 3.5710928500343897, 2.9431731062338065, 3.871104385764365, 1.699929002931763, 1.3306068088290313, 0.7573529187623839, 0.0, 10.083411029663925, 8.330882106386222, 6.653034044145156, 5.099787008795288, 7.74220877152873, 4.120442348727329, 3.5710928500343897, 2.58837073870853, 3.736406304634336, 2.9612505619296834, 1.6468508373342476, 0.7837968549493343, 0.0), # 77
(10.291518518518519, 8.585917264038233, 8.222594521604938, 8.868640625, 7.467084241103849, 3.6191320301783265, 3.5580355846042124, 2.9298559670781894, 3.8654233539094642, 1.6945663035584608, 1.327192849548113, 0.7557477799436866, 0.0, 10.074667245370371, 8.313225579380552, 6.635964247740564, 5.083698910675381, 7.7308467078189285, 4.101798353909466, 3.5580355846042124, 2.585094307270233, 3.7335421205519244, 2.956213541666667, 1.6445189043209878, 0.7805379330943849, 0.0), # 78
(10.275869684499314, 8.549644193327138, 8.210713663123, 8.85304224537037, 7.460871728664031, 3.61418014276279, 3.5448455253157505, 2.916621132449322, 3.859598289132754, 1.6891865623066789, 1.3236286155906039, 0.7541108744655421, 0.0, 10.065190436385459, 8.295219619120962, 6.618143077953018, 5.067559686920035, 7.719196578265508, 4.083269585429051, 3.5448455253157505, 2.5815572448305644, 3.7304358643320157, 2.951014081790124, 1.6421427326246, 0.7772403812115581, 0.0), # 79
(10.259072881254847, 8.51296517545024, 8.198615040580703, 8.836972247383253, 7.454191210091719, 3.6088804425138448, 3.5315279747015405, 2.9034731748209115, 3.853635592897424, 1.683789908281557, 1.3199213135251149, 0.7524431649539947, 0.0, 10.0550063228738, 8.27687481449394, 6.599606567625574, 5.05136972484467, 7.707271185794848, 4.064862444749276, 3.5315279747015405, 2.577771744652746, 3.7270956050458595, 2.945657415794418, 1.639723008116141, 0.7739059250409311, 0.0), # 80
(10.241173913043479, 8.475899193548386, 8.186302083333333, 8.82044633152174, 7.447058823529411, 3.60325, 3.5180882352941176, 2.890416666666667, 3.8475416666666664, 1.6783764705882358, 1.3160781499202554, 0.7507456140350878, 0.0, 10.044140624999999, 8.258201754385965, 6.580390749601277, 5.035129411764706, 7.695083333333333, 4.046583333333333, 3.5180882352941176, 2.57375, 3.7235294117647055, 2.940148777173914, 1.6372604166666667, 0.7705362903225808, 0.0), # 81
(10.222218584123576, 8.438465230762423, 8.17377822073617, 8.803480198268922, 7.43949070711961, 3.5973058857897686, 3.504531609626018, 2.8774561804602956, 3.841322911903673, 1.6729463783318543, 1.3121063313446355, 0.7490191843348656, 0.0, 10.03261906292867, 8.23921102768352, 6.560531656723177, 5.018839134995561, 7.682645823807346, 4.0284386526444145, 3.504531609626018, 2.5695042041355487, 3.719745353559805, 2.934493399422974, 1.634755644147234, 0.767133202796584, 0.0), # 82
(10.202252698753504, 8.400682270233196, 8.16104688214449, 8.78608954810789, 7.431502999004814, 3.591065170451659, 3.4908634002297765, 2.8645962886755068, 3.8349857300716352, 1.6674997606175532, 1.3080130643668657, 0.7472648384793719, 0.0, 10.020467356824417, 8.219913223273089, 6.540065321834328, 5.002499281852659, 7.6699714601432705, 4.01043480414571, 3.4908634002297765, 2.5650465503226134, 3.715751499502407, 2.9286965160359637, 1.632209376428898, 0.7636983882030178, 0.0), # 83
(10.181322061191626, 8.362569295101553, 8.14811149691358, 8.768290081521739, 7.423111837327523, 3.584544924554184, 3.477088909637929, 2.851841563786008, 3.8285365226337444, 1.6620367465504726, 1.3038055555555557, 0.7454835390946503, 0.0, 10.007711226851852, 8.200318930041153, 6.519027777777778, 4.986110239651417, 7.657073045267489, 3.9925781893004113, 3.477088909637929, 2.5603892318244172, 3.7115559186637617, 2.922763360507247, 1.629622299382716, 0.7602335722819594, 0.0), # 84
(10.159472475696308, 8.32414528850834, 8.13497549439872, 8.75009749899356, 7.414333360230238, 3.577762218665854, 3.463213440383012, 2.8391965782655086, 3.8219816910531925, 1.6565574652357518, 1.2994910114793157, 0.7436762488067449, 0.0, 9.994376393175584, 8.180438736874192, 6.497455057396579, 4.969672395707254, 7.643963382106385, 3.9748752095717124, 3.463213440383012, 2.5555444419041815, 3.707166680115119, 2.916699166331187, 1.626995098879744, 0.7567404807734855, 0.0), # 85
(10.136749746525913, 8.285429233594407, 8.121642303955191, 8.731527501006443, 7.405183705855455, 3.57073412335518, 3.44924229499756, 2.826665904587715, 3.815327636793172, 1.6510620457785314, 1.2950766387067558, 0.7418439302416996, 0.0, 9.98048857596022, 8.160283232658694, 6.475383193533778, 4.953186137335593, 7.630655273586344, 3.9573322664228017, 3.44924229499756, 2.550524373825129, 3.7025918529277275, 2.910509167002148, 1.6243284607910382, 0.7532208394176735, 0.0), # 86
(10.113199677938807, 8.246440113500597, 8.10811535493827, 8.712595788043478, 7.3956790123456795, 3.563477709190672, 3.4351807760141093, 2.8142541152263374, 3.8085807613168727, 1.645550617283951, 1.290569643806486, 0.7399875460255577, 0.0, 9.96607349537037, 8.139863006281134, 6.452848219032429, 4.936651851851852, 7.6171615226337455, 3.9399557613168725, 3.4351807760141093, 2.54534122085048, 3.6978395061728397, 2.904198596014493, 1.6216230709876542, 0.7496763739545999, 0.0), # 87
(10.088868074193357, 8.207196911367758, 8.094398076703246, 8.693318060587762, 7.385835417843406, 3.5560100467408424, 3.4210341859651954, 2.801965782655083, 3.8017474660874866, 1.6400233088571508, 1.2859772333471164, 0.7381080587843638, 0.0, 9.951156871570646, 8.119188646628, 6.429886166735582, 4.9200699265714505, 7.603494932174973, 3.9227520957171165, 3.4210341859651954, 2.540007176243459, 3.692917708921703, 2.897772686862588, 1.6188796153406495, 0.7461088101243417, 0.0), # 88
(10.063800739547922, 8.16771861033674, 8.080493898605397, 8.673710019122383, 7.375669060491138, 3.5483482065742016, 3.406807827383354, 2.7898054793476605, 3.794834152568206, 1.634480249603271, 1.2813066138972575, 0.7362064311441613, 0.0, 9.935764424725651, 8.098270742585774, 6.4065330694862865, 4.903440748809812, 7.589668305136412, 3.905727671086725, 3.406807827383354, 2.534534433267287, 3.687834530245569, 2.891236673040795, 1.6160987797210793, 0.7425198736669765, 0.0), # 89
(10.03804347826087, 8.128024193548386, 8.06640625, 8.653787364130435, 7.365196078431373, 3.5405092592592595, 3.3925070028011204, 2.7777777777777777, 3.7878472222222226, 1.6289215686274514, 1.2765649920255184, 0.7342836257309943, 0.0, 9.919921875, 8.077119883040936, 6.382824960127592, 4.886764705882353, 7.575694444444445, 3.888888888888889, 3.3925070028011204, 2.5289351851851856, 3.6825980392156863, 2.884595788043479, 1.6132812500000002, 0.7389112903225807, 0.0), # 90
(10.011642094590563, 8.088132644143545, 8.05213856024234, 8.63356579609501, 7.35443260980661, 3.532510275364528, 3.378137014751031, 2.7658872504191434, 3.780793076512727, 1.6233473950348318, 1.2717595743005101, 0.7323406051709063, 0.0, 9.903654942558298, 8.055746656879968, 6.35879787150255, 4.870042185104494, 7.561586153025454, 3.872242150586801, 3.378137014751031, 2.5232216252603767, 3.677216304903305, 2.8778552653650036, 1.6104277120484682, 0.7352847858312315, 0.0), # 91
(9.984642392795372, 8.048062945263066, 8.0376942586877, 8.613061015499195, 7.343394792759352, 3.524368325458518, 3.363703165765621, 2.754138469745466, 3.773678116902911, 1.6177578579305527, 1.2668975672908422, 0.7303783320899415, 0.0, 9.886989347565157, 8.034161652989356, 6.334487836454211, 4.853273573791657, 7.547356233805822, 3.8557938576436523, 3.363703165765621, 2.517405946756084, 3.671697396379676, 2.871020338499732, 1.6075388517375402, 0.7316420859330061, 0.0), # 92
(9.957090177133654, 8.00783408004779, 8.023076774691358, 8.592288722826089, 7.332098765432098, 3.5161004801097393, 3.349210758377425, 2.742536008230453, 3.766508744855967, 1.6121530864197533, 1.261986177565125, 0.7283977691141434, 0.0, 9.869950810185184, 8.012375460255576, 6.309930887825625, 4.836459259259259, 7.533017489711934, 3.839550411522634, 3.349210758377425, 2.5115003429355283, 3.666049382716049, 2.86409624094203, 1.6046153549382718, 0.727984916367981, 0.0), # 93
(9.92903125186378, 7.967465031638567, 8.008289537608597, 8.571264618558777, 7.320560665967347, 3.5077238098867043, 3.3346650951189805, 2.7310844383478132, 3.759291361835086, 1.6065332096075746, 1.2570326116919686, 0.7263998788695563, 0.0, 9.85256505058299, 7.990398667565118, 6.285163058459842, 4.819599628822722, 7.518582723670172, 3.823518213686939, 3.3346650951189805, 2.5055170070619317, 3.6602803329836733, 2.8570882061862592, 1.6016579075217197, 0.7243150028762335, 0.0), # 94
(9.90051142124411, 7.926974783176247, 7.993335976794697, 8.550004403180354, 7.308796632507598, 3.499255385357923, 3.320071478522822, 2.719788332571255, 3.7520323693034596, 1.6008983565991557, 1.2520440762399827, 0.7243856239822234, 0.0, 9.834857788923182, 7.968241863804456, 6.260220381199914, 4.8026950697974655, 7.504064738606919, 3.8077036655997567, 3.320071478522822, 2.4994681323985164, 3.654398316253799, 2.850001467726785, 1.5986671953589393, 0.7206340711978407, 0.0), # 95
(9.871576489533012, 7.886382317801674, 7.978219521604939, 8.528523777173913, 7.296822803195352, 3.4907122770919066, 3.3054352111214853, 2.708652263374486, 3.7447381687242793, 1.5952486564996373, 1.247027777777778, 0.7223559670781895, 0.0, 9.816854745370371, 7.945915637860083, 6.23513888888889, 4.785745969498911, 7.489476337448559, 3.7921131687242804, 3.3054352111214853, 2.4933659122085046, 3.648411401597676, 2.8428412590579715, 1.595643904320988, 0.7169438470728796, 0.0), # 96
(9.842272260988848, 7.845706618655694, 7.962943601394604, 8.506838441022543, 7.284655316173109, 3.482111555657166, 3.2907615954475067, 2.697680803231215, 3.7374151615607376, 1.589584238414159, 1.2419909228739638, 0.7203118707834976, 0.0, 9.798581640089164, 7.923430578618472, 6.209954614369819, 4.768752715242476, 7.474830323121475, 3.7767531245237014, 3.2907615954475067, 2.4872225397551184, 3.6423276580865545, 2.8356128136741816, 1.5925887202789208, 0.7132460562414268, 0.0), # 97
(9.812644539869984, 7.804966668879153, 7.947511645518976, 8.48496409520934, 7.272310309583368, 3.4734702916222124, 3.276055934033421, 2.68687852461515, 3.7300697492760246, 1.5839052314478608, 1.236940718097151, 0.7182542977241916, 0.0, 9.78006419324417, 7.900797274966106, 6.184703590485755, 4.751715694343581, 7.460139498552049, 3.7616299344612103, 3.276055934033421, 2.48105020830158, 3.636155154791684, 2.8283213650697805, 1.589502329103795, 0.7095424244435595, 0.0), # 98
(9.782739130434782, 7.764181451612902, 7.931927083333334, 8.462916440217391, 7.259803921568627, 3.464805555555556, 3.261323529411765, 2.67625, 3.7227083333333333, 1.5782117647058826, 1.2318843700159492, 0.7161842105263159, 0.0, 9.761328125, 7.878026315789473, 6.159421850079745, 4.734635294117647, 7.445416666666667, 3.7467500000000005, 3.261323529411765, 2.474861111111111, 3.6299019607843137, 2.820972146739131, 1.5863854166666669, 0.7058346774193549, 0.0), # 99
(9.752601836941611, 7.723369949997786, 7.916193344192958, 8.44071117652979, 7.247152290271389, 3.4561344180257074, 3.2465696841150726, 2.665799801859473, 3.715337315195854, 1.572503967293365, 1.2268290851989685, 0.714102571815914, 0.0, 9.742399155521262, 7.8551282899750525, 6.134145425994841, 4.717511901880093, 7.430674630391708, 3.732119722603262, 3.2465696841150726, 2.468667441446934, 3.6235761451356945, 2.8135703921765973, 1.5832386688385918, 0.7021245409088898, 0.0), # 100
(9.722278463648834, 7.682551147174654, 7.900313857453133, 8.41836400462963, 7.234371553834153, 3.4474739496011786, 3.231799700675881, 2.6555325026672763, 3.7079630963267793, 1.5667819683154474, 1.2217820702148188, 0.7120103442190294, 0.0, 9.723303004972564, 7.832113786409323, 6.108910351074094, 4.7003459049463405, 7.415926192653559, 3.7177455037341867, 3.231799700675881, 2.4624813925722706, 3.6171857769170765, 2.806121334876544, 1.5800627714906266, 0.6984137406522414, 0.0), # 101
(9.691814814814816, 7.641744026284349, 7.884292052469135, 8.395890625, 7.221477850399419, 3.4388412208504806, 3.217018881626725, 2.645452674897119, 3.7005920781893, 1.56104589687727, 1.2167505316321108, 0.7099084903617069, 0.0, 9.704065393518519, 7.808993393978774, 6.083752658160553, 4.683137690631809, 7.4011841563786, 3.703633744855967, 3.217018881626725, 2.4563151577503435, 3.6107389251997093, 2.798630208333334, 1.5768584104938272, 0.6947040023894864, 0.0), # 102
(9.661256694697919, 7.60096757046772, 7.8681313585962505, 8.373306738123993, 7.208487318109686, 3.430253302342123, 3.20223252950014, 2.63556489102271, 3.6932306622466085, 1.5552958820839726, 1.211741676019454, 0.7077979728699895, 0.0, 9.68471204132373, 7.785777701569883, 6.058708380097269, 4.6658876462519165, 7.386461324493217, 3.689790847431794, 3.20223252950014, 2.4501809302443736, 3.604243659054843, 2.7911022460413317, 1.5736262717192502, 0.6909970518607019, 0.0), # 103
(9.63064990755651, 7.560240762865614, 7.851835205189758, 8.350628044484703, 7.195416095107452, 3.421727264644617, 3.187445946828663, 2.6258737235177567, 3.685885249961896, 1.5495320530406955, 1.2067627099454585, 0.7056797543699213, 0.0, 9.665268668552812, 7.762477298069133, 6.033813549727292, 4.648596159122086, 7.371770499923792, 3.6762232129248593, 3.187445946828663, 2.4440909033175835, 3.597708047553726, 2.783542681494901, 1.5703670410379515, 0.687294614805965, 0.0), # 104
(9.600040257648953, 7.519582586618876, 7.835407021604938, 8.327870244565217, 7.182280319535221, 3.4132801783264752, 3.172664436144829, 2.6163837448559675, 3.6785622427983538, 1.5437545388525786, 1.201820839978735, 0.7035547974875461, 0.0, 9.64576099537037, 7.739102772363006, 6.009104199893674, 4.631263616557734, 7.3571244855967075, 3.662937242798354, 3.172664436144829, 2.4380572702331964, 3.5911401597676105, 2.775956748188406, 1.5670814043209877, 0.6835984169653525, 0.0), # 105
(9.569473549233614, 7.479012024868357, 7.818850237197074, 8.305049038848631, 7.1690961295354905, 3.404929113956206, 3.1578932999811724, 2.6070995275110502, 3.6712680422191735, 1.5379634686247616, 1.1969232726878927, 0.701424064848908, 0.0, 9.626214741941014, 7.715664713337986, 5.9846163634394625, 4.613890405874283, 7.342536084438347, 3.6499393385154706, 3.1578932999811724, 2.4320922242544327, 3.5845480647677452, 2.768349679616211, 1.5637700474394147, 0.6799101840789417, 0.0), # 106
(9.538995586568856, 7.438548060754901, 7.802168281321446, 8.282180127818036, 7.155879663250759, 3.3966911421023225, 3.1431378408702306, 2.5980256439567144, 3.6640090496875475, 1.532158971462385, 1.1920772146415421, 0.6992885190800504, 0.0, 9.606655628429355, 7.692173709880553, 5.96038607320771, 4.596476914387154, 7.328018099375095, 3.6372359015394005, 3.1431378408702306, 2.426207958644516, 3.5779398316253794, 2.760726709272679, 1.5604336562642893, 0.6762316418868093, 0.0), # 107
(9.508652173913044, 7.398209677419356, 7.785364583333334, 8.259279211956523, 7.1426470588235285, 3.3885833333333335, 3.1284033613445374, 2.589166666666667, 3.656791666666667, 1.5263411764705888, 1.1872898724082936, 0.6971491228070177, 0.0, 9.587109375, 7.668640350877193, 5.936449362041468, 4.579023529411765, 7.313583333333334, 3.624833333333334, 3.1284033613445374, 2.4204166666666667, 3.5713235294117642, 2.7530930706521746, 1.557072916666667, 0.6725645161290325, 0.0), # 108
(9.478489115524543, 7.358015858002567, 7.768442572588021, 8.23636199174718, 7.129414454396299, 3.3806227582177515, 3.113695163936631, 2.580527168114617, 3.6496222946197223, 1.5205102127545123, 1.1825684525567568, 0.6950068386558532, 0.0, 9.567601701817559, 7.645075225214384, 5.9128422627837836, 4.561530638263536, 7.299244589239445, 3.612738035360464, 3.113695163936631, 2.4147305415841083, 3.5647072271981495, 2.7454539972490606, 1.5536885145176043, 0.668910532545688, 0.0), # 109
(9.448552215661715, 7.317985585645383, 7.751405678440788, 8.213444167673108, 7.116197988111569, 3.3728264873240867, 3.0990185511790447, 2.5721117207742723, 3.6425073350099066, 1.5146662094192962, 1.177920161655542, 0.6928626292526012, 0.0, 9.54815832904664, 7.621488921778612, 5.8896008082777085, 4.543998628257887, 7.285014670019813, 3.600956409083981, 3.0990185511790447, 2.409161776660062, 3.5580989940557846, 2.737814722557703, 1.5502811356881578, 0.6652714168768531, 0.0), # 110
(9.41888727858293, 7.278137843488651, 7.7342573302469155, 8.190541440217391, 7.103013798111837, 3.365211591220851, 3.0843788256043156, 2.5639248971193416, 3.635453189300412, 1.5088092955700803, 1.173352206273259, 0.6907174572233054, 0.0, 9.528804976851852, 7.597892029456357, 5.866761031366295, 4.526427886710239, 7.270906378600824, 3.5894948559670783, 3.0843788256043156, 2.4037225651577505, 3.5515068990559184, 2.7301804800724643, 1.546851466049383, 0.6616488948626047, 0.0), # 111
(9.38954010854655, 7.238491614673214, 7.717000957361684, 8.167669509863124, 7.089878022539605, 3.357795140476554, 3.069781289744979, 2.5559712696235333, 3.628466258954427, 1.5029396003120044, 1.1688717929785184, 0.6885722851940093, 0.0, 9.509567365397805, 7.574295137134101, 5.844358964892591, 4.5088188009360115, 7.256932517908854, 3.5783597774729463, 3.069781289744979, 2.3984251003403956, 3.5449390112698027, 2.7225565032877084, 1.543400191472337, 0.6580446922430195, 0.0), # 112
(9.360504223703044, 7.1991320672204555, 7.699681523543391, 8.14487541186903, 7.076783786782469, 3.3505906987084666, 3.0552629818283847, 2.548271903658586, 3.6215709370862066, 1.4970761841531826, 1.1644873176921446, 0.6864327447087024, 0.0, 9.490443900843221, 7.550760191795725, 5.8224365884607225, 4.491228552459547, 7.243141874172413, 3.5675806651220205, 3.0552629818283847, 2.3932790705060474, 3.5383918933912346, 2.7149584706230105, 1.5399363047086783, 0.654466551565496, 0.0), # 113
(9.331480897900065, 7.16044741823174, 7.682538062518016, 8.122342065958001, 7.063595569710884, 3.343581854975776, 3.0410091042052896, 2.5409213581271333, 3.6148730119043533, 1.491328791978196, 1.1602073895188663, 0.684326014342748, 0.0, 9.471275414160035, 7.5275861577702265, 5.801036947594331, 4.473986375934587, 7.229746023808707, 3.557289901377987, 3.0410091042052896, 2.3882727535541255, 3.531797784855442, 2.7074473553193346, 1.5365076125036032, 0.6509497652937947, 0.0), # 114
(9.302384903003995, 7.122451598792792, 7.665580777256098, 8.100063378886334, 7.050271785259067, 3.3367503822909463, 3.027029825095781, 2.533917772616129, 3.6083749928895963, 1.4857063319970194, 1.1560257519045158, 0.6822531318799043, 0.0, 9.452006631660376, 7.5047844506789465, 5.7801287595225785, 4.457118995991058, 7.216749985779193, 3.5474848816625806, 3.027029825095781, 2.3833931302078186, 3.5251358926295335, 2.700021126295445, 1.5331161554512198, 0.647495599890254, 0.0), # 115
(9.273179873237634, 7.0850892578507265, 7.648776824986561, 8.077999612699802, 7.036792350922519, 3.330080178417474, 3.0133024087639466, 2.5272417970412473, 3.6020604464092765, 1.480198339612387, 1.1519343218785802, 0.6802102664572789, 0.0, 9.43260725975589, 7.482312931030067, 5.7596716093929015, 4.44059501883716, 7.204120892818553, 3.5381385158577463, 3.0133024087639466, 2.3786286988696244, 3.5183961754612594, 2.6926665375666015, 1.5297553649973124, 0.6440990234409752, 0.0), # 116
(9.243829442823772, 7.04830504435266, 7.632093362938321, 8.056111029444182, 7.02313718419674, 3.323555141118853, 2.9998041194738763, 2.5208740813181603, 3.5959129388307343, 1.4747943502270324, 1.1479250164705472, 0.6781935872119792, 0.0, 9.413047004858225, 7.46012945933177, 5.739625082352736, 4.424383050681096, 7.1918258776614685, 3.5292237138454245, 2.9998041194738763, 2.3739679579420376, 3.51156859209837, 2.6853703431480613, 1.5264186725876645, 0.6407550040320601, 0.0), # 117
(9.214297245985211, 7.0120436072457135, 7.615497548340306, 8.03435789116525, 7.009286202577227, 3.317159168158581, 2.9865122214896576, 2.51479527536254, 3.5899160365213114, 1.46948389924369, 1.143989752709904, 0.6761992632811126, 0.0, 9.393295573379024, 7.438191896092237, 5.71994876354952, 4.40845169773107, 7.179832073042623, 3.5207133855075567, 2.9865122214896576, 2.369399405827558, 3.5046431012886137, 2.678119297055084, 1.5230995096680613, 0.6374585097496104, 0.0), # 118
(9.184546916944742, 6.976249595477001, 7.598956538421437, 8.012700459908778, 6.99521932355948, 3.3108761573001524, 2.973403979075378, 2.5089860290900607, 3.5840533058483475, 1.4642565220650932, 1.1401204476261382, 0.6742234638017862, 0.0, 9.373322671729932, 7.416458101819647, 5.70060223813069, 4.392769566195279, 7.168106611696695, 3.5125804407260848, 2.973403979075378, 2.3649115409286803, 3.49760966177974, 2.670900153302927, 1.5197913076842873, 0.6342045086797276, 0.0), # 119
(9.154542089925162, 6.940867657993644, 7.582437490410635, 7.991098997720545, 6.980916464638998, 3.304690006307063, 2.9604566564951265, 2.5034269924163928, 3.578308313179186, 1.4591017540939766, 1.136309018248736, 0.6722623579111081, 0.0, 9.353098006322597, 7.394885937022188, 5.68154509124368, 4.377305262281929, 7.156616626358372, 3.50479778938295, 2.9604566564951265, 2.360492861647902, 3.490458232319499, 2.663699665906849, 1.516487498082127, 0.6309879689085133, 0.0), # 120
(9.124246399149268, 6.90584244374276, 7.565907561536823, 7.969513766646325, 6.966357543311279, 3.29858461294281, 2.94764751801299, 2.4980988152572112, 3.572664624881166, 1.4540091307330743, 1.1325473816071863, 0.6703121147461852, 0.0, 9.33259128356866, 7.373433262208036, 5.662736908035931, 4.362027392199222, 7.145329249762332, 3.497338341360096, 2.94764751801299, 2.356131866387721, 3.4831787716556395, 2.656504588882109, 1.5131815123073646, 0.6278038585220692, 0.0), # 121
(9.093623478839854, 6.871118601671464, 7.549333909028926, 7.947905028731892, 6.951522477071823, 3.292543874970886, 2.9349538278930587, 2.492982147528187, 3.5671058073216297, 1.4489681873851195, 1.1288274547309753, 0.6683689034441251, 0.0, 9.31177220987977, 7.352057937885375, 5.644137273654876, 4.346904562155357, 7.1342116146432595, 3.490175006539462, 2.9349538278930587, 2.351817053550633, 3.4757612385359113, 2.6493016762439643, 1.5098667818057854, 0.6246471456064968, 0.0), # 122
(9.062636963219719, 6.836640780726876, 7.532683690115864, 7.92623304602302, 6.936391183416127, 3.28655169015479, 2.9223528503994194, 2.4880576391449933, 3.5616154268679177, 1.443968459452847, 1.1251411546495909, 0.6664288931420351, 0.0, 9.290610491667572, 7.330717824562385, 5.625705773247954, 4.33190537835854, 7.123230853735835, 3.4832806948029904, 2.9223528503994194, 2.3475369215391355, 3.4681955917080636, 2.642077682007674, 1.5065367380231727, 0.621512798247898, 0.0), # 123
(9.031250486511654, 6.802353629856113, 7.515924062026559, 7.90445808056549, 6.920943579839691, 3.2805919562580144, 2.9098218497961597, 2.483305940023303, 3.5561770498873715, 1.4389994823389904, 1.1214803983925201, 0.664488252977023, 0.0, 9.269075835343711, 7.309370782747252, 5.6074019919625995, 4.316998447016971, 7.112354099774743, 3.476628316032624, 2.9098218497961597, 2.3432799687557244, 3.4604717899198456, 2.634819360188497, 1.5031848124053118, 0.618395784532374, 0.0), # 124
(8.999427682938459, 6.768201798006293, 7.499022181989936, 7.88254039440507, 6.905159583838015, 3.274648571044058, 2.8973380903473696, 2.478707700078788, 3.5507742427473308, 1.4340507914462837, 1.1178371029892504, 0.6625431520861957, 0.0, 9.247137947319828, 7.2879746729481525, 5.5891855149462515, 4.30215237433885, 7.1015484854946616, 3.470190780110303, 2.8973380903473696, 2.3390346936028985, 3.4525797919190073, 2.6275134648016905, 1.4998044363979874, 0.6152910725460268, 0.0), # 125
(8.967132186722928, 6.734129934124536, 7.481945207234916, 7.8604402495875405, 6.889019112906595, 3.2687054322764144, 2.884878836317135, 2.474243569227122, 3.545390571815139, 1.4291119221774609, 1.1142031854692689, 0.6605897596066612, 0.0, 9.224766534007578, 7.266487355673273, 5.571015927346345, 4.287335766532382, 7.090781143630278, 3.463940996917971, 2.884878836317135, 2.334789594483153, 3.4445095564532977, 2.620146749862514, 1.4963890414469831, 0.6121936303749579, 0.0), # 126
(8.93432763208786, 6.7000826871579555, 7.464660294990421, 7.838117908158674, 6.8725020845409315, 3.26274643771858, 2.872421351969547, 2.469894197383977, 3.5400096034581354, 1.4241724099352562, 1.1105705628620632, 0.6586242446755264, 0.0, 9.201931301818599, 7.244866691430789, 5.552852814310316, 4.272517229805768, 7.080019206916271, 3.457851876337568, 2.872421351969547, 2.3305331697989855, 3.4362510422704657, 2.612705969386225, 1.4929320589980841, 0.6090984261052688, 0.0), # 127
(8.900977653256046, 6.666004706053673, 7.447134602485375, 7.815533632164248, 6.855588416236526, 3.2567554851340508, 2.859942901568691, 2.465640234465026, 3.534614904043661, 1.4192217901224033, 1.1069311521971208, 0.6566427764298991, 0.0, 9.178601957164537, 7.223070540728888, 5.534655760985604, 4.257665370367209, 7.069229808087322, 3.4518963282510366, 2.859942901568691, 2.3262539179528936, 3.427794208118263, 2.6051778773880834, 1.4894269204970751, 0.6060004278230613, 0.0), # 128
(8.867045884450281, 6.631840639758805, 7.4293352869486995, 7.792647683650037, 6.838258025488874, 3.250716472286322, 2.8474207493786565, 2.4614623303859418, 3.529190039939058, 1.4142495981416365, 1.1032768705039286, 0.6546415240068865, 0.0, 9.154748206457038, 7.20105676407575, 5.516384352519642, 4.242748794424909, 7.058380079878116, 3.4460472625403185, 2.8474207493786565, 2.321940337347373, 3.419129012744437, 2.597549227883346, 1.4858670573897401, 0.6028946036144368, 0.0), # 129
(8.832495959893366, 6.5975351372204685, 7.411229505609316, 7.769420324661814, 6.820490829793475, 3.2446132969388883, 2.8348321596635313, 2.457341135062396, 3.5237185775116666, 1.4092453693956895, 1.0995996348119743, 0.6526166565435961, 0.0, 9.130339756107748, 7.178783221979556, 5.4979981740598705, 4.2277361081870675, 7.047437155023333, 3.4402775890873545, 2.8348321596635313, 2.3175809263849203, 3.4102454148967376, 2.589806774887272, 1.4822459011218634, 0.5997759215654973, 0.0), # 130
(8.797291513808094, 6.563032847385783, 7.392784415696151, 7.7458118172453565, 6.802266746645829, 3.238429856855247, 2.8221543966874045, 2.4532572984100627, 3.5181840831288285, 1.4041986392872965, 1.0958913621507447, 0.6505643431771354, 0.0, 9.105346312528312, 7.156207774948489, 5.479456810753724, 4.212595917861889, 7.036368166257657, 3.4345602177740875, 2.8221543966874045, 2.3131641834680337, 3.4011333733229145, 2.5819372724151193, 1.4785568831392302, 0.596639349762344, 0.0), # 131
(8.76139618041726, 6.528278419201865, 7.373967174438122, 7.72178242344644, 6.783565693541435, 3.2321500497988933, 2.8093647247143627, 2.449191470344614, 3.5125701231578845, 1.3990989432191914, 1.0921439695497275, 0.6484807530446118, 0.0, 9.079737582130376, 7.13328828349073, 5.460719847748638, 4.1972968296575734, 7.025140246315769, 3.4288680584824593, 2.8093647247143627, 2.3086786069992096, 3.3917828467707176, 2.573927474482147, 1.4747934348876244, 0.5934798562910787, 0.0), # 132
(8.724773593943663, 6.493216501615832, 7.354744939064153, 7.697292405310838, 6.764367587975791, 3.225757773533322, 2.7964404080084946, 2.445124300781722, 3.5068602639661752, 1.3939358165941083, 1.0883493740384103, 0.6463620552831327, 0.0, 9.053483271325586, 7.10998260811446, 5.44174687019205, 4.181807449782324, 7.0137205279323505, 3.4231740210944106, 2.7964404080084946, 2.3041126953809443, 3.3821837939878954, 2.5657641351036133, 1.4709489878128308, 0.590292409237803, 0.0), # 133
(8.687387388610095, 6.457791743574804, 7.33508486680317, 7.672302024884328, 6.7446523474443945, 3.2192369258220297, 2.7833587108338893, 2.44103643963706, 3.5010380719210428, 1.388698794814781, 1.0844994926462799, 0.6442044190298056, 0.0, 9.026553086525583, 7.0862486093278605, 5.422497463231399, 4.166096384444343, 7.0020761438420855, 3.417451015491884, 2.7833587108338893, 2.2994549470157355, 3.3723261737221972, 2.557434008294776, 1.4670169733606342, 0.5870719766886187, 0.0), # 134
(8.649201198639354, 6.421948794025897, 7.314954114884091, 7.646771544212684, 6.724399889442747, 3.212571404428512, 2.770096897454634, 2.4369085368263, 3.4950871133898262, 1.3833774132839443, 1.0805862424028239, 0.6420040134217377, 0.0, 8.99891673414202, 7.0620441476391145, 5.402931212014119, 4.150132239851832, 6.9901742267796525, 3.41167195155682, 2.770096897454634, 2.2946938603060802, 3.3621999447213735, 2.548923848070895, 1.4629908229768183, 0.583813526729627, 0.0), # 135
(8.610178658254235, 6.385632301916229, 7.294319840535841, 7.62066122534168, 6.703590131466344, 3.205745107116265, 2.7566322321348173, 2.4327212422651154, 3.4889909547398688, 1.3779612074043308, 1.0766015403375297, 0.6397570075960368, 0.0, 8.970543920586536, 7.037327083556404, 5.383007701687648, 4.133883622212991, 6.9779819094797375, 3.4058097391711617, 2.7566322321348173, 2.289817933654475, 3.351795065733172, 2.540220408447227, 1.4588639681071682, 0.58051202744693, 0.0), # 136
(8.570283401677534, 6.348786916192918, 7.273149200987342, 7.593931330317094, 6.682202991010689, 3.1987419316487826, 2.7429419791385277, 2.428455205869179, 3.4827331623385107, 1.3724397125786756, 1.0725373034798844, 0.63745957068981, 0.0, 8.941404352270776, 7.012055277587909, 5.362686517399421, 4.117319137736026, 6.965466324677021, 3.3998372882168506, 2.7429419791385277, 2.284815665463416, 3.3411014955053444, 2.5313104434390317, 1.4546298401974684, 0.577162446926629, 0.0), # 137
(8.529479063132047, 6.311357285803083, 7.251409353467515, 7.566542121184698, 6.660218385571278, 3.1915457757895624, 2.729003402729852, 2.4240910775541624, 3.4762973025530934, 1.3668024642097119, 1.0683854488593754, 0.6351078718401649, 0.0, 8.91146773560639, 6.986186590241813, 5.341927244296877, 4.100407392629135, 6.952594605106187, 3.3937275085758274, 2.729003402729852, 2.2796755541354017, 3.330109192785639, 2.5221807070615663, 1.450281870693503, 0.5737597532548258, 0.0), # 138
(8.487729276840568, 6.273288059693839, 7.229067455205284, 7.538453859990269, 6.63761623264361, 3.184140537302099, 2.7147937671728797, 2.4196095072357395, 3.469666941750957, 1.3610389977001744, 1.0641378935054902, 0.6326980801842089, 0.0, 8.880703777005019, 6.959678882026297, 5.32068946752745, 4.083116993100523, 6.939333883501914, 3.3874533101300353, 2.7147937671728797, 2.274386098072928, 3.318808116321805, 2.51281795333009, 1.4458134910410567, 0.5702989145176218, 0.0), # 139
(8.444997677025897, 6.234523886812306, 7.206090663429573, 7.509626808779583, 6.614376449723186, 3.176510113949888, 2.7002903367316984, 2.4149911448295818, 3.462825646299444, 1.3551388484527966, 1.0597865544477159, 0.6302263648590494, 0.0, 8.849082182878314, 6.932490013449542, 5.298932772238579, 4.0654165453583895, 6.925651292598888, 3.3809876027614147, 2.7002903367316984, 2.2689357956784915, 3.307188224861593, 2.5032089362598615, 1.4412181326859146, 0.5667748988011189, 0.0), # 140
(8.40124789791083, 6.195009416105602, 7.1824461353693, 7.480021229598415, 6.590478954305501, 3.1686384034964257, 2.6854703756703975, 2.4102166402513627, 3.455756982565893, 1.349091551870313, 1.0553233487155398, 0.6276888950017938, 0.0, 8.816572659637913, 6.904577845019731, 5.276616743577699, 4.047274655610939, 6.911513965131786, 3.3743032963519077, 2.6854703756703975, 2.26331314535459, 3.2952394771527507, 2.4933404098661387, 1.4364892270738603, 0.5631826741914184, 0.0), # 141
(8.356443573718156, 6.154689296520844, 7.158101028253392, 7.44959738449254, 6.565903663886058, 3.1605093037052074, 2.670311148253063, 2.4052666434167547, 3.448444516917647, 1.3428866433554572, 1.0507401933384497, 0.6250818397495496, 0.0, 8.783144913695466, 6.875900237245045, 5.253700966692247, 4.028659930066371, 6.896889033835294, 3.3673733007834565, 2.670311148253063, 2.2575066455037196, 3.282951831943029, 2.4831991281641805, 1.4316202056506786, 0.5595172087746222, 0.0), # 142
(8.310548338670674, 6.113508177005149, 7.133022499310772, 7.418315535507731, 6.540630495960352, 3.152106712339729, 2.6547899187437842, 2.4001218042414303, 3.4408718157220486, 1.3365136583109634, 1.0460290053459322, 0.6224013682394242, 0.0, 8.748768651462617, 6.846415050633665, 5.230145026729661, 4.009540974932889, 6.881743631444097, 3.360170525938002, 2.6547899187437842, 2.251504794528378, 3.270315247980176, 2.472771845169244, 1.4266044998621543, 0.5557734706368318, 0.0), # 143
(8.263525826991184, 6.071410706505636, 7.107177705770357, 7.386135944689768, 6.514639368023886, 3.1434145271634857, 2.6388839514066493, 2.3947627726410623, 3.4330224453464364, 1.3299621321395652, 1.0411817017674754, 0.619643649608525, 0.0, 8.713413579351014, 6.816080145693774, 5.205908508837376, 3.9898863964186946, 6.866044890692873, 3.3526678816974873, 2.6388839514066493, 2.245296090831061, 3.257319684011943, 2.4620453148965895, 1.4214355411540713, 0.5519464278641489, 0.0), # 144
(8.215339672902477, 6.0283415339694235, 7.080533804861075, 7.353018874084421, 6.487910197572155, 3.134416645939974, 2.6225705105057466, 2.3891701985313234, 3.424879972158151, 1.3232216002439972, 1.036190199632566, 0.6168048529939595, 0.0, 8.6770494037723, 6.784853382933553, 5.180950998162829, 3.969664800731991, 6.849759944316302, 3.344838277943853, 2.6225705105057466, 2.238869032814267, 3.2439550987860777, 2.451006291361474, 1.4161067609722149, 0.548031048542675, 0.0), # 145
(8.16595351062735, 5.984245308343629, 7.053057953811847, 7.318924585737469, 6.460422902100661, 3.1250969664326886, 2.605826860305165, 2.3833247318278863, 3.4164279625245353, 1.3162815980269928, 1.0310464159706916, 0.6138811475328351, 0.0, 8.639645831138118, 6.7526926228611845, 5.155232079853457, 3.948844794080978, 6.832855925049071, 3.3366546245590407, 2.605826860305165, 2.2322121188804918, 3.2302114510503306, 2.439641528579157, 1.4106115907623695, 0.5440223007585119, 0.0), # 146
(8.1153309743886, 5.93906667857537, 7.024717309851591, 7.283813341694685, 6.4321573991049, 3.1154393864051255, 2.5886302650689905, 2.3772070224464232, 3.40764998281293, 1.3091316608912866, 1.0257422678113395, 0.6108687023622593, 0.0, 8.601172567860118, 6.719555725984851, 5.1287113390566965, 3.9273949826738592, 6.81529996562586, 3.3280898314249923, 2.5886302650689905, 2.2253138474322327, 3.21607869955245, 2.4279377805648954, 1.4049434619703185, 0.5399151525977609, 0.0), # 147
(8.063435698409021, 5.892750293611764, 6.9954790302092364, 7.247645404001847, 6.403093606080374, 3.105427803620781, 2.5709579890613132, 2.3707977203026074, 3.398529599390676, 1.301761324239612, 1.0202696721839972, 0.6077636866193392, 0.0, 8.561599320349941, 6.68540055281273, 5.101348360919985, 3.905283972718835, 6.797059198781352, 3.3191168084236504, 2.5709579890613132, 2.2181627168719866, 3.201546803040187, 2.4158818013339496, 1.3990958060418472, 0.535704572146524, 0.0), # 148
(8.010231316911412, 5.845240802399927, 6.965310272113703, 7.210381034704727, 6.37321144052258, 3.0950461158431497, 2.5527872965462204, 2.3640774753121114, 3.3890503786251127, 1.2941601234747035, 1.0146205461181517, 0.6045622694411826, 0.0, 8.520895795019237, 6.650184963853008, 5.073102730590758, 3.88248037042411, 6.778100757250225, 3.3097084654369557, 2.5527872965462204, 2.21074722560225, 3.18660572026129, 2.403460344901576, 1.3930620544227408, 0.5313855274909026, 0.0), # 149
(7.955681464118564, 5.796482853886981, 6.934178192793912, 7.171980495849104, 6.342490819927017, 3.0842782208357287, 2.5340954517878003, 2.3570269373906068, 3.3791958868835836, 1.2863175939992944, 1.0087868066432906, 0.601260619964897, 0.0, 8.479031698279647, 6.6138668196138655, 5.043934033216452, 3.8589527819978824, 6.758391773767167, 3.2998377123468496, 2.5340954517878003, 2.2030558720255207, 3.1712454099635083, 2.390660165283035, 1.3868356385587826, 0.5269529867169983, 0.0), # 150
(7.899749774253275, 5.746421097020041, 6.902049949478785, 7.132404049480748, 6.310911661789184, 3.0731080163620113, 2.5148597190501416, 2.3496267564537683, 3.3689496905334293, 1.2782232712161197, 1.002760370788901, 0.5978549073275894, 0.0, 8.435976736542818, 6.576403980603482, 5.013801853944504, 3.8346698136483583, 6.737899381066859, 3.2894774590352753, 2.5148597190501416, 2.1950771545442938, 3.155455830894592, 2.377468016493583, 1.3804099898957571, 0.5224019179109128, 0.0), # 151
(7.842399881538343, 5.6950001807462245, 6.868892699397251, 7.091611957645439, 6.278453883604579, 3.0615194001854955, 2.4950573625973322, 2.3418575824172674, 3.3582953559419897, 1.2698666905279126, 0.9965331555844703, 0.5943413006663675, 0.0, 8.391700616220398, 6.537754307330042, 4.982665777922351, 3.809600071583737, 6.716590711883979, 3.2786006153841742, 2.4950573625973322, 2.1867995715610684, 3.1392269418022893, 2.36387065254848, 1.3737785398794504, 0.5177272891587478, 0.0), # 152
(7.78359542019656, 5.642164754012652, 6.834673599778224, 7.049564482388949, 6.245097402868703, 3.049496270069676, 2.4746656466934596, 2.333700065196776, 3.3472164494766075, 1.2612373873374074, 0.9900970780594861, 0.5907159691183387, 0.0, 8.346173043724027, 6.497875660301725, 4.95048539029743, 3.783712162012222, 6.694432898953215, 3.2671800912754865, 2.4746656466934596, 2.17821162147834, 3.1225487014343516, 2.3498548274629836, 1.3669347199556448, 0.5129240685466048, 0.0), # 153
(7.723300024450729, 5.587859465766439, 6.7993598078506325, 7.006221885757057, 6.210822137077053, 3.0370225237780484, 2.453661835602614, 2.325134854707968, 3.3356965375046217, 1.2523248970473384, 0.9834440552434354, 0.5869750818206104, 0.0, 8.299363725465357, 6.456725900026714, 4.917220276217177, 3.7569746911420143, 6.671393075009243, 3.2551887965911552, 2.453661835602614, 2.169301802698606, 3.1054110685385266, 2.335407295252353, 1.3598719615701265, 0.5079872241605854, 0.0), # 154
(7.6614773285236355, 5.532028964954703, 6.762918480843396, 6.961544429795533, 6.175608003725131, 3.0240820590741087, 2.4320231935888805, 2.316142600866515, 3.323719186393376, 1.2431187550604388, 0.9765660041658056, 0.5831148079102902, 0.0, 8.251242367856026, 6.414262887013191, 4.882830020829028, 3.7293562651813157, 6.647438372786752, 3.242599641213121, 2.4320231935888805, 2.160058613624363, 3.0878040018625654, 2.320514809931845, 1.3525836961686795, 0.5029117240867913, 0.0), # 155
(7.598090966638081, 5.474617900524564, 6.725316775985439, 6.915492376550157, 6.139434920308432, 3.0106587737213526, 2.40972698491635, 2.3067039535880913, 3.3112679625102084, 1.2336084967794434, 0.9694548418560842, 0.5791313165244852, 0.0, 8.201778677307685, 6.370444481769337, 4.84727420928042, 3.7008254903383295, 6.622535925020417, 3.2293855350233276, 2.40972698491635, 2.150470552658109, 3.069717460154216, 2.3051641255167192, 1.3450633551970879, 0.49769253641132405, 0.0), # 156
(7.533104573016862, 5.415570921423138, 6.686521850505682, 6.868025988066703, 6.102282804322456, 2.9967365654832747, 2.3867504738491094, 2.2967995627883675, 3.2983264322224626, 1.2237836576070855, 0.9621024853437583, 0.5750207768003032, 0.0, 8.150942360231976, 6.325228544803333, 4.810512426718791, 3.671350972821256, 6.596652864444925, 3.2155193879037145, 2.3867504738491094, 2.140526118202339, 3.051141402161228, 2.2893419960222348, 1.3373043701011365, 0.4923246292202853, 0.0), # 157
(7.464680946405239, 5.353748694041236, 6.644659961585297, 6.817327186238432, 6.062454070580665, 2.9814309445183143, 2.3625533604639286, 2.285748730145572, 3.2838873638663655, 1.213341479072786, 0.9542659587564906, 0.570633297016195, 0.0, 8.096485859415345, 6.276966267178143, 4.771329793782452, 3.640024437218358, 6.567774727732731, 3.200048222203801, 2.3625533604639286, 2.129593531798796, 3.0312270352903323, 2.2724423954128112, 1.3289319923170593, 0.48670442673102154, 0.0), # 158
(7.382286766978402, 5.282809876299521, 6.58894818200249, 6.7529828690913405, 6.010127539854418, 2.95965229467081, 2.334106381692858, 2.2696723053184926, 3.2621424204073812, 1.2005702485246865, 0.9445694892698324, 0.5651135436402591, 0.0, 8.025427646920194, 6.216248980042849, 4.722847446349162, 3.601710745574059, 6.5242848408147625, 3.17754122744589, 2.334106381692858, 2.114037353336293, 3.005063769927209, 2.250994289697114, 1.3177896364004982, 0.4802554432999565, 0.0), # 159
(7.284872094904309, 5.202172001162321, 6.51826746496324, 6.673933132806645, 5.94428008756453, 2.9308657560278157, 2.301121874191892, 2.248166328969728, 3.2324750757428835, 1.1853014129657236, 0.9328765847682567, 0.5583751624073207, 0.0, 7.93642060889358, 6.142126786480525, 4.664382923841283, 3.55590423889717, 6.464950151485767, 3.147432860557619, 2.301121874191892, 2.0934755400198686, 2.972140043782265, 2.2246443776022153, 1.3036534929926482, 0.47292472737839286, 0.0), # 160
(7.17322205458596, 5.11236079574043, 6.4333724765919245, 6.5809293778175455, 5.865595416188075, 2.895420057582683, 2.263840723003438, 2.2215002221290754, 3.1952765889996724, 1.1676645482927346, 0.9192902757666179, 0.5504806224089643, 0.0, 7.830374044819097, 6.055286846498606, 4.596451378833089, 3.5029936448782033, 6.390553177999345, 3.1101003109807053, 2.263840723003438, 2.0681571839876307, 2.9327977080940375, 2.1936431259391824, 1.2866744953183848, 0.46476007234003913, 0.0), # 161
(7.048121770426357, 5.013901987144635, 6.335017883012913, 6.474723004557244, 5.7747572282021356, 2.853663928328766, 2.2225038131699044, 2.1899434058263343, 3.150938219304545, 1.147789230402558, 0.9039135927797701, 0.5414923927367745, 0.0, 7.708197254180333, 5.956416320104519, 4.519567963898851, 3.4433676912076736, 6.30187643860909, 3.065920768156868, 2.2225038131699044, 2.03833137737769, 2.8873786141010678, 2.158241001519082, 1.2670035766025827, 0.4558092715586033, 0.0), # 162
(6.9103563668284975, 4.90732130248573, 6.223958350350585, 6.35606541345895, 5.672449226083792, 2.8059460972594175, 2.1773520297337003, 2.153765301091302, 3.0998512257843016, 1.1258050351920315, 0.8868495663225682, 0.5314729424823361, 0.0, 7.570799536460879, 5.846202367305696, 4.43424783161284, 3.3774151055760937, 6.199702451568603, 3.015271421527823, 2.1773520297337003, 2.0042472123281554, 2.836224613041896, 2.118688471152984, 1.2447916700701172, 0.4461201184077937, 0.0), # 163
(6.760710968195384, 4.793144468874502, 6.100948544729314, 6.225708004955863, 5.559355112310126, 2.752615293367992, 2.128626257737233, 2.113235328953779, 3.0424068675657407, 1.1018415385579923, 0.8682012269098661, 0.5204847407372336, 0.0, 7.419090191144328, 5.725332148109569, 4.34100613454933, 3.305524615673976, 6.0848137351314815, 2.9585294605352903, 2.128626257737233, 1.9661537809771372, 2.779677556155063, 2.075236001651955, 1.2201897089458629, 0.43574040626131844, 0.0), # 164
(6.599970698930017, 4.671897213421746, 5.966743132273474, 6.084402179481189, 5.436158589358215, 2.694020245647842, 2.076567382222911, 2.068622910443561, 2.9789964037756596, 1.0760283163972786, 0.8480716050565187, 0.5085902565930517, 0.0, 7.25397851771427, 5.594492822523568, 4.2403580252825925, 3.2280849491918353, 5.957992807551319, 2.8960720746209856, 2.076567382222911, 1.9243001754627442, 2.7180792946791077, 2.0281340598270634, 1.1933486264546949, 0.42471792849288603, 0.0), # 165
(6.428920683435397, 4.54410526323825, 5.82209677910744, 5.932899337468126, 5.3035433597051425, 2.630509683092322, 2.021416288233143, 2.020197466590449, 2.9100110935408576, 1.0484949446067282, 0.8265637312773799, 0.49585195914137514, 0.0, 7.0763738156542955, 5.454371550555126, 4.1328186563869, 3.145484833820184, 5.820022187081715, 2.8282764532266285, 2.021416288233143, 1.8789354879230868, 2.6517716798525712, 1.9776331124893758, 1.1644193558214881, 0.41310047847620457, 0.0), # 166
(6.248346046114523, 4.410294345434805, 5.667764151355587, 5.771950879349882, 5.1621931258279865, 2.562432334694784, 1.9634138608103373, 1.9682284184242402, 2.835842195988133, 1.0193709990831787, 0.8037806360873045, 0.48233231747378824, 0.0, 6.887185384447996, 5.30565549221167, 4.0189031804365225, 3.058112997249536, 5.671684391976266, 2.755519785793936, 1.9634138608103373, 1.8303088104962744, 2.5810965629139933, 1.9239836264499612, 1.1335528302711175, 0.4009358495849823, 0.0), # 167
(6.059031911370395, 4.270990187122201, 5.50449991514229, 5.60230820555966, 5.012791590203827, 2.490136929448583, 1.902800984996902, 1.9129851869747332, 2.7568809702442847, 0.9887860557234682, 0.7798253500011468, 0.468093800681876, 0.0, 6.6873225235789615, 5.149031807500635, 3.8991267500057343, 2.9663581671704042, 5.513761940488569, 2.6781792617646265, 1.902800984996902, 1.7786692353204163, 2.5063957951019136, 1.867436068519887, 1.100899983028458, 0.3882718351929274, 0.0), # 168
(5.861763403606015, 4.1267185154112305, 5.333058736591924, 5.4247227165306615, 4.856022455309747, 2.413972196347072, 1.8398185458352458, 1.8547371932717271, 2.6735186754361124, 0.9568696904244344, 0.7548009035337614, 0.45319887785722274, 0.0, 6.477694532530785, 4.985187656429449, 3.774004517668807, 2.8706090712733023, 5.347037350872225, 2.596632070580418, 1.8398185458352458, 1.724265854533623, 2.4280112276548733, 1.808240905510221, 1.066611747318385, 0.3751562286737483, 0.0), # 169
(5.657325647224384, 3.978005057412684, 5.154195281828863, 5.23994581269609, 4.692569423622822, 2.334286864383604, 1.7747074283677764, 1.7937538583450197, 2.5861465706904125, 0.9237514790829147, 0.7288103272000027, 0.4377100180914133, 0.0, 6.259210710787055, 4.814810199005545, 3.6440516360000137, 2.7712544372487433, 5.172293141380825, 2.5112554016830275, 1.7747074283677764, 1.6673477602740028, 2.346284711811411, 1.7466486042320304, 1.0308390563657726, 0.36163682340115316, 0.0), # 170
(5.4465037666285, 3.82537554023735, 4.968664216977482, 5.048728894489152, 4.523116197620137, 2.2514296625515327, 1.7077085176369027, 1.7303046032244096, 2.495155915133985, 0.8895609975957474, 0.7019566515147247, 0.4216896904760322, 0.0, 6.032780357831365, 4.638586595236354, 3.509783257573624, 2.6686829927872413, 4.99031183026797, 2.4224264445141737, 1.7077085176369027, 1.6081640446796661, 2.2615580988100685, 1.6829096314963843, 0.9937328433954964, 0.3477614127488501, 0.0), # 171
(5.230082886221365, 3.6693556909960217, 4.777220208162156, 4.851823362343048, 4.348346479778769, 2.1657493198442115, 1.6390626986850327, 1.664658848939696, 2.4009379678936282, 0.8544278218597702, 0.6743429069927823, 0.4052003641026643, 0.0, 5.799312773147303, 4.457204005129307, 3.3717145349639117, 2.56328346557931, 4.8018759357872565, 2.3305223885155746, 1.6390626986850327, 1.5469637998887225, 2.1741732398893845, 1.6172744541143496, 0.9554440416324312, 0.3335777900905475, 0.0), # 172
(5.00884813040598, 3.510471236799489, 4.58061792150726, 4.649980616690982, 4.168943972575801, 2.077594565254994, 1.5690108565545748, 1.5970860165206766, 2.303883988096141, 0.8184815277718206, 0.6460721241490297, 0.3883045080628938, 0.0, 5.5597172562184625, 4.271349588691831, 3.2303606207451483, 2.4554445833154612, 4.607767976192282, 2.235920423128947, 1.5690108565545748, 1.483996118039281, 2.0844719862879004, 1.5499935388969943, 0.916123584301452, 0.31913374879995354, 0.0), # 173
(4.783584623585344, 3.349247904758541, 4.3796120231371685, 4.443952057966156, 3.9855923784883105, 1.987314127777233, 1.4977938762879377, 1.5278555269971503, 2.204385234868321, 0.7818516912287369, 0.6172473334983214, 0.37106459144830567, 0.0, 5.314903106528433, 4.081710505931362, 3.0862366674916064, 2.34555507368621, 4.408770469736642, 2.1389977377960103, 1.4977938762879377, 1.4195100912694523, 1.9927961892441552, 1.4813173526553853, 0.8759224046274336, 0.3044770822507765, 0.0), # 174
(4.555077490162455, 3.18621142198397, 4.174957179176257, 4.2344890866017755, 3.7989753999933793, 1.8952567364042834, 1.425652642927529, 1.457236801398915, 2.102832967336968, 0.7446678881273562, 0.5879715655555117, 0.35354308335048457, 0.0, 5.0657796235608075, 3.8889739168553294, 2.939857827777558, 2.234003664382068, 4.205665934673936, 2.040131521958481, 1.425652642927529, 1.3537548117173452, 1.8994876999966896, 1.411496362200592, 0.8349914358352515, 0.28965558381672457, 0.0), # 175
(4.324111854540319, 3.0218875155865668, 3.9674080557488987, 4.0223431030310435, 3.609776739568087, 1.8017711201294973, 1.3528280415157574, 1.3854992607557703, 1.9996184446288805, 0.7070596943645169, 0.558347850835455, 0.33580245286101496, 0.0, 4.813256106799174, 3.693826981471164, 2.791739254177275, 2.1211790830935504, 3.999236889257761, 1.9396989650580787, 1.3528280415157574, 1.2869793715210696, 1.8048883697840434, 1.3407810343436815, 0.7934816111497798, 0.2747170468715061, 0.0), # 176
(4.0914728411219325, 2.856801912677122, 3.7577193189794698, 3.808265507687162, 3.4186800996895155, 1.7072060079462288, 1.2795609570950313, 1.3129123260975137, 1.8951329258708567, 0.6691566858370562, 0.528479219853006, 0.3179051690714816, 0.0, 4.5582418557271245, 3.496956859786297, 2.6423960992650297, 2.0074700575111684, 3.7902658517417134, 1.838077256536519, 1.2795609570950313, 1.2194328628187348, 1.7093400498447577, 1.269421835895721, 0.751543863795894, 0.25970926478882933, 0.0), # 177
(3.8579455743102966, 2.6914803403664256, 3.5466456349923448, 3.593007701003337, 3.226369182834742, 1.6119101288478317, 1.2060922747077587, 1.239745418453944, 1.7897676701896952, 0.6310884384418126, 0.49846870312301883, 0.299913701073469, 0.0, 4.301646169828252, 3.299050711808158, 2.4923435156150937, 1.8932653153254375, 3.5795353403793904, 1.7356435858355217, 1.2060922747077587, 1.1513643777484512, 1.613184591417371, 1.1976692336677792, 0.7093291269984691, 0.24468003094240237, 0.0), # 178
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 179
)
passenger_allighting_rate = (
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 0
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 1
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 2
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 3
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 4
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 5
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 6
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 7
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 8
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 9
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 10
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 11
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 12
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 13
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 14
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 15
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 16
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 17
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 18
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 19
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 20
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 21
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 22
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 23
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 24
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 25
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 26
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 27
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 28
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 29
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 30
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 31
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 32
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 33
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 34
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 35
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 36
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 37
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 38
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 39
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 40
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 41
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 42
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 43
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 44
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 45
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 46
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 47
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 48
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 49
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 50
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 51
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 52
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 53
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 54
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 55
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 56
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 57
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 58
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 59
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 60
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 61
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 62
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 63
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 64
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 65
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 66
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 67
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 68
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 69
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 70
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 71
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 72
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 73
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 74
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 75
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 76
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 77
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 78
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 79
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 80
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 81
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 82
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 83
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 84
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 85
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 86
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 87
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 88
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 89
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 90
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 91
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 92
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 93
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 94
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 95
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 96
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 97
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 98
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 99
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 100
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 101
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 102
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 103
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 104
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 105
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 106
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 107
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 108
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 109
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 110
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 111
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 112
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 113
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 114
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 115
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 116
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 117
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 118
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 119
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 120
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 121
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 122
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 123
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 124
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 125
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 126
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 127
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 128
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 129
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 130
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 131
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 132
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 133
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 134
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 135
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 136
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 137
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 138
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 139
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 140
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 141
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 142
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 143
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 144
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 145
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 146
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 147
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 148
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 149
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 150
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 151
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 152
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 153
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 154
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 155
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 156
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 157
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 158
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 159
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 160
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 161
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 162
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 163
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 164
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 165
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 166
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 167
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 168
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 169
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 170
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 171
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 172
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 173
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 174
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 175
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 176
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 177
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 178
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 179
)
"""
parameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.html
"""
#initial entropy
entropy = 8991598675325360468762009371570610170
#index for seed sequence child
child_seed_index = (
1, # 0
30, # 1
)
| 276.242781
| 494
| 0.769543
| 32,987
| 258,287
| 6.025161
| 0.216904
| 0.358638
| 0.344147
| 0.652069
| 0.376444
| 0.367911
| 0.365008
| 0.364273
| 0.364112
| 0.364112
| 0
| 0.849799
| 0.095762
| 258,287
| 934
| 495
| 276.538544
| 0.001195
| 0.015529
| 0
| 0.200873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.005459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3dc9d67e179aef854c1b01040ce903f476ccf04c
| 192
|
py
|
Python
|
0301-0400/0383-Ransom Note/0383-Ransom Note.py
|
jiadaizhao/LeetCode
|
4ddea0a532fe7c5d053ffbd6870174ec99fc2d60
|
[
"MIT"
] | 49
|
2018-05-05T02:53:10.000Z
|
2022-03-30T12:08:09.000Z
|
0301-0400/0383-Ransom Note/0383-Ransom Note.py
|
jolly-fellow/LeetCode
|
ab20b3ec137ed05fad1edda1c30db04ab355486f
|
[
"MIT"
] | 11
|
2017-12-15T22:31:44.000Z
|
2020-10-02T12:42:49.000Z
|
0301-0400/0383-Ransom Note/0383-Ransom Note.py
|
jolly-fellow/LeetCode
|
ab20b3ec137ed05fad1edda1c30db04ab355486f
|
[
"MIT"
] | 28
|
2017-12-05T10:56:51.000Z
|
2022-01-26T18:18:27.000Z
|
import collections
class Solution:
def canConstruct(self, ransomNote: 'str', magazine: 'str') -> 'bool':
return not collections.Counter(ransomNote) - collections.Counter(magazine)
| 38.4
| 82
| 0.729167
| 20
| 192
| 7
| 0.7
| 0.257143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151042
| 192
| 4
| 83
| 48
| 0.858896
| 0
| 0
| 0
| 0
| 0
| 0.052083
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
3dd953d7cbf961786ed0a3c8fb1ebf70972afdb9
| 138
|
py
|
Python
|
katas/kyu_7/random_case.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
katas/kyu_7/random_case.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
katas/kyu_7/random_case.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
from random import choice
UP_LOW = (str.upper, str.lower)
def random_case(strng):
return ''.join(choice(UP_LOW)(a) for a in strng)
| 17.25
| 52
| 0.702899
| 24
| 138
| 3.916667
| 0.708333
| 0.170213
| 0.234043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 138
| 7
| 53
| 19.714286
| 0.817391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
9ac59074cc96c2f9c915fb7c81432eaa598edb01
| 37
|
py
|
Python
|
pyavro/field/__init__.py
|
mitchelllisle/pavro
|
b60368d21c1bbc5216d3aa8e6cbe4f873111ef0f
|
[
"MIT"
] | null | null | null |
pyavro/field/__init__.py
|
mitchelllisle/pavro
|
b60368d21c1bbc5216d3aa8e6cbe4f873111ef0f
|
[
"MIT"
] | null | null | null |
pyavro/field/__init__.py
|
mitchelllisle/pavro
|
b60368d21c1bbc5216d3aa8e6cbe4f873111ef0f
|
[
"MIT"
] | 1
|
2020-03-15T19:59:02.000Z
|
2020-03-15T19:59:02.000Z
|
from pyavro.field.field import Field
| 18.5
| 36
| 0.837838
| 6
| 37
| 5.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9ac69ca0b8530c2b9af8e029aedd7c6d9aea7a34
| 20
|
py
|
Python
|
keras/wrappers/__init__.py
|
NPoe/keras
|
298553d6018d3644d0e865015499b9405e3d6a2c
|
[
"MIT"
] | 1
|
2018-07-22T03:59:02.000Z
|
2018-07-22T03:59:02.000Z
|
keras/wrappers/__init__.py
|
NPoe/keras
|
298553d6018d3644d0e865015499b9405e3d6a2c
|
[
"MIT"
] | null | null | null |
keras/wrappers/__init__.py
|
NPoe/keras
|
298553d6018d3644d0e865015499b9405e3d6a2c
|
[
"MIT"
] | 1
|
2022-03-18T03:19:36.000Z
|
2022-03-18T03:19:36.000Z
|
from .lime import *
| 10
| 19
| 0.7
| 3
| 20
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9ad7fef74069e39d59d1676ff27c054f9532bbfa
| 49
|
py
|
Python
|
src/python/bd/common/__init__.py
|
BombDash/BombDash-server
|
2403c5396145ea8a3a63bd2089dd7276ef723085
|
[
"MIT"
] | 8
|
2020-06-12T19:29:32.000Z
|
2021-11-10T14:06:46.000Z
|
src/python/bd/common/__init__.py
|
BombDash/BombDash-server
|
2403c5396145ea8a3a63bd2089dd7276ef723085
|
[
"MIT"
] | 2
|
2021-01-20T05:15:13.000Z
|
2021-12-21T08:33:01.000Z
|
src/python/bd/common/__init__.py
|
BombDash/BombDash-server
|
2403c5396145ea8a3a63bd2089dd7276ef723085
|
[
"MIT"
] | 2
|
2021-02-05T22:30:16.000Z
|
2021-03-16T05:49:45.000Z
|
from . import glowing_profiles, fatality, prefix
| 24.5
| 48
| 0.816327
| 6
| 49
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 49
| 1
| 49
| 49
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9aed292b16d87ffe08e71c855fa80effa9c5c425
| 27
|
py
|
Python
|
py/exts/assetimport_maya/__init__.py
|
ddesmond/assetexchange
|
0f8133b449b41595e22f27f3970bec7ebeee19c1
|
[
"MIT"
] | null | null | null |
py/exts/assetimport_maya/__init__.py
|
ddesmond/assetexchange
|
0f8133b449b41595e22f27f3970bec7ebeee19c1
|
[
"MIT"
] | null | null | null |
py/exts/assetimport_maya/__init__.py
|
ddesmond/assetexchange
|
0f8133b449b41595e22f27f3970bec7ebeee19c1
|
[
"MIT"
] | null | null | null |
from .pushservice import *
| 13.5
| 26
| 0.777778
| 3
| 27
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b108db7055698904073f51b0221b982a66fb0b88
| 376
|
py
|
Python
|
tests/snippets/hash.py
|
ypyf/RustPython
|
86103bfd0187a6073cab91142f698cb6b0a0de51
|
[
"MIT"
] | 1
|
2021-09-03T15:59:36.000Z
|
2021-09-03T15:59:36.000Z
|
tests/snippets/hash.py
|
ypyf/RustPython
|
86103bfd0187a6073cab91142f698cb6b0a0de51
|
[
"MIT"
] | null | null | null |
tests/snippets/hash.py
|
ypyf/RustPython
|
86103bfd0187a6073cab91142f698cb6b0a0de51
|
[
"MIT"
] | null | null | null |
from testutils import assertRaises
class A:
pass
assert type(hash(None)) is int
assert type(hash(object())) is int
assert type(hash(A())) is int
assert type(hash(1)) is int
assert type(hash(1.1)) is int
assert type(hash("")) is int
with assertRaises(TypeError):
hash({})
with assertRaises(TypeError):
hash(set())
with assertRaises(TypeError):
hash([])
| 15.666667
| 34
| 0.68883
| 56
| 376
| 4.625
| 0.321429
| 0.23166
| 0.324324
| 0.289575
| 0.378378
| 0.23166
| 0
| 0
| 0
| 0
| 0
| 0.009615
| 0.170213
| 376
| 23
| 35
| 16.347826
| 0.820513
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| true
| 0.066667
| 0.066667
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
b11b881476430e3b5ca078a0c8415b80c727919a
| 9,280
|
py
|
Python
|
tests/unit/plugins/modules/test_managed_serviceaccount_rbac.py
|
hanqiuzh/ocmplus.cm
|
98079701c86ea0d3aa4085642eb978caca1e6203
|
[
"Apache-2.0"
] | 12
|
2021-11-01T19:15:56.000Z
|
2021-12-14T16:05:37.000Z
|
tests/unit/plugins/modules/test_managed_serviceaccount_rbac.py
|
dtrieu80/ocmplus.cm
|
972831dc85fd09757ad6e1cb165371948da41ce9
|
[
"Apache-2.0"
] | 92
|
2022-01-05T16:47:27.000Z
|
2022-03-31T17:43:02.000Z
|
tests/unit/plugins/modules/test_managed_serviceaccount_rbac.py
|
dtrieu80/ocmplus.cm
|
972831dc85fd09757ad6e1cb165371948da41ce9
|
[
"Apache-2.0"
] | 16
|
2022-01-04T18:49:36.000Z
|
2022-03-24T17:07:36.000Z
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import unittest
import string
import random
from unittest.mock import MagicMock
from pathlib import Path
import ansible_collections.ocmplus.cm.plugins.modules.managed_serviceaccount_rbac as msa_rbac
class TestGetRBACTemplateFilepaths(unittest.TestCase):
def setUp(self):
self.test_fixture_dir = f"{Path(__file__).resolve().parent}/fixtures/rbac_template"
def test_empty_input(self):
module = MagicMock()
msa_rbac.get_rbac_template_filepaths(module, None)
module.fail_json.assert_called()
def test_file_not_exist(self):
module = MagicMock()
random_name = ''.join(random.choice(string.ascii_lowercase) for i in range(10))
msa_rbac.get_rbac_template_filepaths(module, random_name)
module.fail_json.assert_called()
def test_empty_file(self):
module = MagicMock()
rbac_template = f"{self.test_fixture_dir}/empty_file.yml"
result = msa_rbac.get_rbac_template_filepaths(module, rbac_template)
module.fail_json.assert_not_called()
assert result == [rbac_template]
def test_empty_dir(self):
module = MagicMock()
rbac_template = f"{self.test_fixture_dir}/empty_dir"
msa_rbac.get_rbac_template_filepaths(module, rbac_template)
module.fail_json.assert_called()
def test_non_empty_dir(self):
module = MagicMock()
rbac_template = f"{self.test_fixture_dir}"
result = msa_rbac.get_rbac_template_filepaths(module, rbac_template)
module.fail_json.assert_not_called()
assert len(result) == 6
class TestGetYamlResourceFromFiles(unittest.TestCase):
def setUp(self):
self.test_fixture_dir = f"{Path(__file__).resolve().parent}/fixtures/rbac_template"
def test_empty_input(self):
module = MagicMock()
msa_rbac.get_yaml_resource_from_files(module, None)
module.fail_json.assert_called()
def test_empty_list(self):
module = MagicMock()
files = []
msa_rbac.get_yaml_resource_from_files(module, files)
module.fail_json.assert_called()
def test_empty_file(self):
module = MagicMock()
files = [f"{self.test_fixture_dir}/empty_file.yml"]
msa_rbac.get_yaml_resource_from_files(module, files)
module.fail_json.assert_called()
def test_single_object_file(self):
module = MagicMock()
rbac_template = [f"{self.test_fixture_dir}/single_object_file.yml"]
result = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
module.fail_json.assert_not_called()
assert len(result) == 1
def test_multi_object_file(self):
module = MagicMock()
rbac_template = [f"{self.test_fixture_dir}/five_object_file.yml"]
result = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
module.fail_json.assert_not_called()
assert len(result) == 5
def test_multi_files(self):
module = MagicMock()
rbac_template = [
f"{self.test_fixture_dir}/single_object_file.yml",
f"{self.test_fixture_dir}/five_object_file.yml",
]
result = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
module.fail_json.assert_not_called()
assert len(result) == 6
def test_non_kube_resource_file(self):
module = MagicMock()
rbac_template = [f"{self.test_fixture_dir}/lorem_ipsum.txt"]
result = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
module.fail_json.assert_not_called()
assert len(result) == 1
def test_mixed_resource_files(self):
module = MagicMock()
rbac_template = [
f"{self.test_fixture_dir}/lorem_ipsum.txt",
f"{self.test_fixture_dir}/single_object_file.yml",
f"{self.test_fixture_dir}/five_object_file.yml",
]
result = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
module.fail_json.assert_not_called()
assert len(result) == 7
class TestGetRbacResourceFromYaml(unittest.TestCase):
def setUp(self) -> None:
self.test_fixture_dir = f"{Path(__file__).resolve().parent}/fixtures/rbac_template"
def test_non_kube_yaml(self):
module = MagicMock()
rbac_template = [f"{self.test_fixture_dir}/lorem_ipsum.txt"]
yaml = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
msa_rbac.get_rbac_resource_from_yaml(module, yaml)
module.warn.assert_called()
module.fail_json.assert_called()
def test_non_rbac_yaml(self):
module = MagicMock()
rbac_template = [f"{self.test_fixture_dir}/non_rbac_resource.yml"]
yaml = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
msa_rbac.get_rbac_resource_from_yaml(module, yaml)
module.warn.assert_called()
module.fail_json.assert_called()
def test_single_role_yaml(self):
module = MagicMock()
rbac_template = [f"{self.test_fixture_dir}/single_object_file.yml"]
yaml = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
result = msa_rbac.get_rbac_resource_from_yaml(module, yaml)
module.fail_json.assert_not_called()
assert len(result.get('Role')) == 1
assert len(result.get('RoleBinding')) == 0
assert len(result.get('ClusterRoleBinding')) == 0
assert len(result.get('ClusterRole')) == 0
def test_multi_object_yaml(self):
module = MagicMock()
rbac_template = [f"{self.test_fixture_dir}/five_object_file.yml"]
yaml = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
result = msa_rbac.get_rbac_resource_from_yaml(module, yaml)
module.fail_json.assert_not_called()
assert len(result.get('Role')) == 2
assert len(result.get('RoleBinding')) == 2
assert len(result.get('ClusterRoleBinding')) == 1
assert len(result.get('ClusterRole')) == 0
def test_bad_rbac_yaml(self):
module = MagicMock()
rbac_template = [f"{self.test_fixture_dir}/bad_rbac.yml"]
yaml = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
msa_rbac.get_rbac_resource_from_yaml(module, yaml)
module.warn.assert_called()
module.fail_json.assert_called()
def test_good_and_bad_yaml(self):
module = MagicMock()
rbac_template = [
f"{self.test_fixture_dir}/bad_rbac.yml",
f"{self.test_fixture_dir}/single_object_file.yml"
]
yaml = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
result = msa_rbac.get_rbac_resource_from_yaml(module, yaml)
module.warn.assert_called()
module.fail_json.assert_not_called()
assert len(result.get('Role')) == 1
assert len(result.get('RoleBinding')) == 0
assert len(result.get('ClusterRoleBinding')) == 0
assert len(result.get('ClusterRole')) == 0
class TestGenerateRbacManifest(unittest.TestCase):
def setUp(self) -> None:
self.test_fixture_dir = f"{Path(__file__).resolve().parent}/fixtures/rbac_template"
self.role_subject = {
'kind': 'ServiceAccount',
'name': 'foo',
'namespace': 'bar',
}
def test_no_resource(self):
module = MagicMock()
rbac_resources = {'Role': {}, 'ClusterRole': {}, 'RoleBinding': {}, 'ClusterRoleBinding': {}}
msa_rbac.generate_rbac_manifest(module, rbac_resources, 'postfix', self.role_subject)
module.fail_json.assert_called()
def test_single_unused_role(self):
module = MagicMock()
rbac_template = [f"{self.test_fixture_dir}/single_object_file.yml"]
yaml = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
rbac_resources = msa_rbac.get_rbac_resource_from_yaml(module, yaml)
result = msa_rbac.generate_rbac_manifest(module, rbac_resources, 'postfix', self.role_subject)
module.warn.assert_called()
module.fail_json.assert_not_called()
assert len(result) == 1
def test_no_unused_role(self):
module = MagicMock()
rbac_template = [f"{self.test_fixture_dir}/five_object_file.yml"]
yaml = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
rbac_resources = msa_rbac.get_rbac_resource_from_yaml(module, yaml)
result = msa_rbac.generate_rbac_manifest(module, rbac_resources, 'postfix', self.role_subject)
module.warn.assert_not_called()
module.fail_json.assert_not_called()
assert len(result) == 5
def test_unused_role(self):
module = MagicMock()
rbac_template = [
f"{self.test_fixture_dir}/five_object_file.yml",
f"{self.test_fixture_dir}/single_object_file.yml",
]
yaml = msa_rbac.get_yaml_resource_from_files(module, rbac_template)
rbac_resources = msa_rbac.get_rbac_resource_from_yaml(module, yaml)
result = msa_rbac.generate_rbac_manifest(module, rbac_resources, 'postfix', self.role_subject)
module.warn.assert_called()
module.fail_json.assert_not_called()
assert len(result) == 6
| 40.881057
| 102
| 0.687716
| 1,188
| 9,280
| 4.979798
| 0.091751
| 0.089249
| 0.0524
| 0.08215
| 0.873225
| 0.858519
| 0.858519
| 0.848715
| 0.819642
| 0.805105
| 0
| 0.00313
| 0.208082
| 9,280
| 226
| 103
| 41.061947
| 0.801878
| 0
| 0
| 0.671875
| 0
| 0
| 0.152694
| 0.126724
| 0
| 0
| 0
| 0
| 0.270833
| 1
| 0.140625
| false
| 0
| 0.036458
| 0
| 0.197917
| 0.005208
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b152100d1670c9c7cd1bc43739595460d03127f3
| 122
|
py
|
Python
|
TVpy/Layers/all.py
|
Jitrixis/2ARC-Network-stack
|
f0f7f68b989c5c6eaca3be46554dd5c7010e1551
|
[
"MIT"
] | 1
|
2017-08-22T20:44:12.000Z
|
2017-08-22T20:44:12.000Z
|
TVpy/Layers/all.py
|
Jitrixis/2ARC-Network-stack
|
f0f7f68b989c5c6eaca3be46554dd5c7010e1551
|
[
"MIT"
] | null | null | null |
TVpy/Layers/all.py
|
Jitrixis/2ARC-Network-stack
|
f0f7f68b989c5c6eaca3be46554dd5c7010e1551
|
[
"MIT"
] | null | null | null |
__author__ = 'jitrixis'
from Data.all import *
from Frame.all import *
from Packet.all import *
from Segment.all import *
| 20.333333
| 25
| 0.754098
| 18
| 122
| 4.888889
| 0.5
| 0.409091
| 0.443182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155738
| 122
| 6
| 25
| 20.333333
| 0.854369
| 0
| 0
| 0
| 0
| 0
| 0.065041
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b15ce1ab6f2c6345a514b30cb8c5d14e4506aabd
| 35
|
py
|
Python
|
beluga/numeric/data_classes/__init__.py
|
doublefloyd/beluga
|
740bda376634945ef51bf1cf946fcbe002e9bc7f
|
[
"MIT"
] | 20
|
2017-10-02T13:09:58.000Z
|
2022-03-28T20:50:35.000Z
|
beluga/numeric/data_classes/__init__.py
|
doublefloyd/beluga
|
740bda376634945ef51bf1cf946fcbe002e9bc7f
|
[
"MIT"
] | 187
|
2018-02-04T20:35:03.000Z
|
2021-01-27T15:04:18.000Z
|
beluga/numeric/data_classes/__init__.py
|
doublefloyd/beluga
|
740bda376634945ef51bf1cf946fcbe002e9bc7f
|
[
"MIT"
] | 12
|
2018-01-19T04:00:09.000Z
|
2022-03-28T16:44:17.000Z
|
from .Trajectory import Trajectory
| 17.5
| 34
| 0.857143
| 4
| 35
| 7.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b162a2edfb255c6f79bada717cd082369aa6c2eb
| 11,827
|
py
|
Python
|
gr-gsm/python/qa_burst_timeslot_splitter.py
|
ossiemarks/hackrf-gsm
|
fc3d690354e3bed8b7f8b2f70c3eaf0ecb74d88c
|
[
"MIT"
] | 6
|
2021-12-19T07:16:38.000Z
|
2022-03-19T17:50:51.000Z
|
gr-gsm/python/qa_burst_timeslot_splitter.py
|
mapennell/hackrf-gsm
|
fc3d690354e3bed8b7f8b2f70c3eaf0ecb74d88c
|
[
"MIT"
] | null | null | null |
gr-gsm/python/qa_burst_timeslot_splitter.py
|
mapennell/hackrf-gsm
|
fc3d690354e3bed8b7f8b2f70c3eaf0ecb74d88c
|
[
"MIT"
] | 5
|
2019-09-05T05:49:35.000Z
|
2021-07-10T20:42:11.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @file
# @author Roman Khassraf <rkhassraf@gmail.com>
# @section LICENSE
#
# Gr-gsm is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# Gr-gsm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with gr-gsm; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
#
from gnuradio import gr, gr_unittest, blocks
import grgsm
import pmt
class qa_burst_timeslot_splitter (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_001 (self):
"""
24 random framenumbers, timeslots and bursts as input
"""
framenumbers_input = [1259192, 1076346, 1076242, 235879, 1259218, 2194302, 2714322, 1588, 1259244, 1563637, 1435624, 1928543, 503726, 1571144, 2658397, 1807445, 869789, 624070, 2005511, 1306953, 2284894, 1600339, 551375, 1259270]
timeslots_input = [6, 3, 4, 3, 5, 3, 2, 7, 1, 6, 0, 7, 2, 3, 2, 0, 7, 1, 0, 6, 0, 6, 5, 7]
bursts_input = [
"0001100001000111100111101111100101000100101011000010011110011101001111101100010100111111100000110100011111101011101100100111110011000100010001010000",
"0001000101000000001001111110000110010110110111110111101000001101001111101100010100111111001110001001110101110001010001000111011010010001011011000000",
"0001001101101101000111001000101011001101001110110001001100111101001111101100010100111111111001001010011010011111010010010101011001001011011100110000",
"0000010010100000001001101010100001011100010001101100111111101101001111101100010100111111101101001110100010101110010110101111100010010000110010110000",
"0000010101010110010011110101010101101100000000001000100100101010000111011101001000011101011101110000101011001111000100001000000000001110010001111000",
"0001000000000010111010100000010101000010001010111010000000011010000111011101001000011101000000100010111110101000000001000000000010111010100000000000",
"0001010101111111111010000001010101011111111111101000000001001010000111011101001000011101010111111111111010101000000001010101011011101010000001000000",
"0000000000111110101010100001000000100010101110101010000101001010000111011101001000011101001010001111101010001000010000000000101110101010100000010000",
"0000010000000010000001001000011001010010000011000101000000001010000111011101001000011101010100100000000001001000001000000100100011000101001000111000",
"0001010100110111100000110111100110010100011100011000110110001010000111011101001000011101011111111001111001101010010100000000011111001101000111110000",
"0001100110000001011110001000001100101001010100111111000100111010000111011101001000011101000011010010001010111101000100110011111010100010010101000000",
"0000010101100101010110000011010000000000000010111001110110101010000111011101001000011101000001000100100001111001100011000101010001110001010100111000",
"0001000100000011001010111001111100011010000000000000001001001010000111011101001000011101010110000101111010011001110110001001011010101000011110110000",
"0001100001000111111111100001011000000011010110111010110000111010000111011101001000011101100010111100100101110001101000110100110000001010101110011000",
"0000000100111011000000000010100100001100101010000000010010101010000111011101001000011101000110110001110110000100110100110110011001100100000101100000",
"0000100101111010011110111010100111010100011011011101100111001010000111011101001000011101010000111010000110100000001000010011101011001001110100011000",
"0001111101101110110000010100100111000001001000100000001111100011100010111000101110001010111010010100011001100111001111010011111000100101111101010000",
"0000110101000011011010110000110011010000000001001010110010001010000111011101001000011101010000011000111001101110000000110010100001101110101000100000",
"0000001000010001011111111111101010100000010101011101101010101010000111011101001000011101100010010101010101011110101010101000010001011101111010101000",
"0000101110101111011001011001000011110010100010011100110010001010000111011101001000011101100000001110000100010100110111001001100010101101100010101000",
"0001100010000001000111011100101101101010100001111101001000101010000111011101001000011101111010000011010110010111011111010010001000001101100011111000",
"0001011101101101011100001111001100010001000011011001101110011010000111011101001000011101010010111011100111000001011100100001111010100101111000100000",
"0000001000100011000000000000110100000000010000001010100100001010000111011101001000011101000010010000000000001001000001011000000001010000000100010000",
"0000100000110001000000000100000110001011100001001000000000001010000111011101001000011101001010010001010000000111010000000011000001000000000101010000"
]
bursts_expected_0 = [
"0001100110000001011110001000001100101001010100111111000100111010000111011101001000011101000011010010001010111101000100110011111010100010010101000000",
"0000100101111010011110111010100111010100011011011101100111001010000111011101001000011101010000111010000110100000001000010011101011001001110100011000",
"0000001000010001011111111111101010100000010101011101101010101010000111011101001000011101100010010101010101011110101010101000010001011101111010101000",
"0001100010000001000111011100101101101010100001111101001000101010000111011101001000011101111010000011010110010111011111010010001000001101100011111000"
]
bursts_expected_1 = [
"0000010000000010000001001000011001010010000011000101000000001010000111011101001000011101010100100000000001001000001000000100100011000101001000111000",
"0000110101000011011010110000110011010000000001001010110010001010000111011101001000011101010000011000111001101110000000110010100001101110101000100000"
]
bursts_expected_2 = [
"0001010101111111111010000001010101011111111111101000000001001010000111011101001000011101010111111111111010101000000001010101011011101010000001000000",
"0001000100000011001010111001111100011010000000000000001001001010000111011101001000011101010110000101111010011001110110001001011010101000011110110000",
"0000000100111011000000000010100100001100101010000000010010101010000111011101001000011101000110110001110110000100110100110110011001100100000101100000"
]
bursts_expected_3 = [
"0001000101000000001001111110000110010110110111110111101000001101001111101100010100111111001110001001110101110001010001000111011010010001011011000000",
"0000010010100000001001101010100001011100010001101100111111101101001111101100010100111111101101001110100010101110010110101111100010010000110010110000",
"0001000000000010111010100000010101000010001010111010000000011010000111011101001000011101000000100010111110101000000001000000000010111010100000000000",
"0001100001000111111111100001011000000011010110111010110000111010000111011101001000011101100010111100100101110001101000110100110000001010101110011000"
]
bursts_expected_4 = [
"0001001101101101000111001000101011001101001110110001001100111101001111101100010100111111111001001010011010011111010010010101011001001011011100110000"
]
bursts_expected_5 = [
"0000010101010110010011110101010101101100000000001000100100101010000111011101001000011101011101110000101011001111000100001000000000001110010001111000",
"0000001000100011000000000000110100000000010000001010100100001010000111011101001000011101000010010000000000001001000001011000000001010000000100010000"
]
bursts_expected_6 = [
"0001100001000111100111101111100101000100101011000010011110011101001111101100010100111111100000110100011111101011101100100111110011000100010001010000",
"0001010100110111100000110111100110010100011100011000110110001010000111011101001000011101011111111001111001101010010100000000011111001101000111110000",
"0000101110101111011001011001000011110010100010011100110010001010000111011101001000011101100000001110000100010100110111001001100010101101100010101000",
"0001011101101101011100001111001100010001000011011001101110011010000111011101001000011101010010111011100111000001011100100001111010100101111000100000"
]
bursts_expected_7 = [
"0000000000111110101010100001000000100010101110101010000101001010000111011101001000011101001010001111101010001000010000000000101110101010100000010000",
"0000010101100101010110000011010000000000000010111001110110101010000111011101001000011101000001000100100001111001100011000101010001110001010100111000",
"0001111101101110110000010100100111000001001000100000001111100011100010111000101110001010111010010100011001100111001111010011111000100101111101010000",
"0000100000110001000000000100000110001011100001001000000000001010000111011101001000011101001010010001010000000111010000000011000001000000000101010000"
]
src = grgsm.burst_source(framenumbers_input, timeslots_input, bursts_input)
splitter = grgsm.burst_timeslot_splitter()
sink_0 = grgsm.burst_sink()
sink_1 = grgsm.burst_sink()
sink_2 = grgsm.burst_sink()
sink_3 = grgsm.burst_sink()
sink_4 = grgsm.burst_sink()
sink_5 = grgsm.burst_sink()
sink_6 = grgsm.burst_sink()
sink_7 = grgsm.burst_sink()
self.tb.msg_connect(src, "out", splitter, "in")
self.tb.msg_connect(splitter, "out0", sink_0, "in")
self.tb.msg_connect(splitter, "out1", sink_1, "in")
self.tb.msg_connect(splitter, "out2", sink_2, "in")
self.tb.msg_connect(splitter, "out3", sink_3, "in")
self.tb.msg_connect(splitter, "out4", sink_4, "in")
self.tb.msg_connect(splitter, "out5", sink_5, "in")
self.tb.msg_connect(splitter, "out6", sink_6, "in")
self.tb.msg_connect(splitter, "out7", sink_7, "in")
self.tb.run ()
bursts_result_0 = list(sink_0.get_burst_data())
bursts_result_1 = list(sink_1.get_burst_data())
bursts_result_2 = list(sink_2.get_burst_data())
bursts_result_3 = list(sink_3.get_burst_data())
bursts_result_4 = list(sink_4.get_burst_data())
bursts_result_5 = list(sink_5.get_burst_data())
bursts_result_6 = list(sink_6.get_burst_data())
bursts_result_7 = list(sink_7.get_burst_data())
self.assertEqual(bursts_expected_0, bursts_result_0)
self.assertEqual(bursts_expected_1, bursts_result_1)
self.assertEqual(bursts_expected_2, bursts_result_2)
self.assertEqual(bursts_expected_3, bursts_result_3)
self.assertEqual(bursts_expected_4, bursts_result_4)
self.assertEqual(bursts_expected_5, bursts_result_5)
self.assertEqual(bursts_expected_6, bursts_result_6)
self.assertEqual(bursts_expected_7, bursts_result_7)
if __name__ == '__main__':
gr_unittest.run(qa_burst_timeslot_splitter, "qa_burst_timeslot_splitter.xml")
| 73.006173
| 237
| 0.817705
| 590
| 11,827
| 16.133898
| 0.332203
| 0.023532
| 0.008509
| 0.015128
| 0.048114
| 0.027734
| 0
| 0
| 0
| 0
| 0
| 0.721347
| 0.136129
| 11,827
| 161
| 238
| 73.459627
| 0.210336
| 0.069587
| 0
| 0.295652
| 0
| 0
| 0.656898
| 0.651328
| 0
| 1
| 0
| 0
| 0.069565
| 1
| 0.026087
| false
| 0
| 0.026087
| 0
| 0.06087
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b16ace4eeb4180d51795b5167c78ac6994100d59
| 9,375
|
py
|
Python
|
pbj/electrostatics/pb_formulation/formulations.py
|
kstylesc/PBJ
|
0a4440b684c1d028341762a275fb3d51956b8301
|
[
"MIT"
] | null | null | null |
pbj/electrostatics/pb_formulation/formulations.py
|
kstylesc/PBJ
|
0a4440b684c1d028341762a275fb3d51956b8301
|
[
"MIT"
] | null | null | null |
pbj/electrostatics/pb_formulation/formulations.py
|
kstylesc/PBJ
|
0a4440b684c1d028341762a275fb3d51956b8301
|
[
"MIT"
] | null | null | null |
import numpy as np
import bempp.api
def direct(dirichl_space, neumann_space, q, x_q, ep_in, ep_out, kappa, operator_assembler):
from bempp.api.operators.boundary import sparse, laplace, modified_helmholtz
identity = sparse.identity(dirichl_space, dirichl_space, dirichl_space)
slp_in = laplace.single_layer(neumann_space, dirichl_space, dirichl_space, assembler=operator_assembler)
dlp_in = laplace.double_layer(dirichl_space, dirichl_space, dirichl_space, assembler=operator_assembler)
slp_out = modified_helmholtz.single_layer(neumann_space, dirichl_space, dirichl_space, kappa, assembler=operator_assembler)
dlp_out = modified_helmholtz.double_layer(dirichl_space, dirichl_space, dirichl_space, kappa, assembler=operator_assembler)
# Matrix Assembly
A = bempp.api.BlockedOperator(2, 2)
A[0, 0] = 0.5*identity + dlp_in
A[0, 1] = -slp_in
A[1, 0] = 0.5*identity - dlp_out
A[1, 1] = (ep_in/ep_out)*slp_out
@bempp.api.real_callable
def charges_fun(x, n, domain_index, result):
nrm = np.sqrt((x[0]-x_q[:,0])**2 + (x[1]-x_q[:,1])**2 + (x[2]-x_q[:,2])**2)
aux = np.sum(q/nrm)
result[0] = aux/(4*np.pi*ep_in)
@bempp.api.real_callable
def zero(x, n, domain_index, result):
result[0] = 0
rhs_1 = bempp.api.GridFunction(dirichl_space, fun=charges_fun)
rhs_2 = bempp.api.GridFunction(neumann_space, fun=zero)
return A, rhs_1, rhs_2
def juffer(dirichl_space, neumann_space, q, x_q, ep_in, ep_ex, kappa, operator_assembler):
from bempp.api.operators.boundary import sparse, laplace, modified_helmholtz
phi_id = sparse.identity(dirichl_space, dirichl_space, dirichl_space)
dph_id = sparse.identity(neumann_space, neumann_space, neumann_space)
ep = ep_ex/ep_in
dF = laplace.double_layer(dirichl_space, dirichl_space, dirichl_space, assembler=operator_assembler)
dP = modified_helmholtz.double_layer(dirichl_space, dirichl_space, dirichl_space, kappa, assembler=operator_assembler)
L1 = (ep*dP) - dF
F = laplace.single_layer(neumann_space, dirichl_space, dirichl_space, assembler=operator_assembler)
P = modified_helmholtz.single_layer(neumann_space, dirichl_space, dirichl_space, kappa, assembler=operator_assembler)
L2 = F - P
ddF = laplace.hypersingular(dirichl_space, neumann_space, neumann_space, assembler=operator_assembler)
ddP = modified_helmholtz.hypersingular(dirichl_space, neumann_space, neumann_space, kappa, assembler=operator_assembler)
L3 = ddP - ddF
dF0 = laplace.adjoint_double_layer(neumann_space, neumann_space, neumann_space, assembler=operator_assembler)
dP0 = modified_helmholtz.adjoint_double_layer(neumann_space, neumann_space, neumann_space, kappa, assembler=operator_assembler)
L4 = dF0 - ((1.0/ep)*dP0)
A = bempp.api.BlockedOperator(2, 2)
A[0, 0] = (0.5*(1.0 + ep)*phi_id) - L1
A[0, 1] = (-1.0)*L2
A[1, 0] = L3 # Cambio de signo por definicion de bempp
A[1, 1] = (0.5*(1.0 + (1.0/ep))*dph_id) - L4
@bempp.api.real_callable
def d_green_func(x, n, domain_index, result):
nrm = np.sqrt((x[0]-x_q[:,0])**2 + (x[1]-x_q[:,1])**2 + (x[2]-x_q[:,2])**2)
const = -1./(4.*np.pi*ep_in)
result[:] = const*np.sum(q*np.dot(x-x_q, n)/(nrm**3))
@bempp.api.real_callable
def green_func(x, n, domain_index, result):
nrm = np.sqrt((x[0]-x_q[:,0])**2 + (x[1]-x_q[:,1])**2 + (x[2]-x_q[:,2])**2)
result[:] = np.sum(q/nrm)/(4.*np.pi*ep_in)
rhs_1 = bempp.api.GridFunction(dirichl_space, fun=green_func)
rhs_2 = bempp.api.GridFunction(dirichl_space, fun=d_green_func)
return A, rhs_1, rhs_2
def laplaceMultitrace(dirichl_space, neumann_space, operator_assembler):
from bempp.api.operators.boundary import laplace
A = bempp.api.BlockedOperator(2, 2)
A[0, 0] = (-1.0)*laplace.double_layer(dirichl_space, dirichl_space, dirichl_space, assembler=operator_assembler)
A[0, 1] = laplace.single_layer(neumann_space, dirichl_space, dirichl_space, assembler=operator_assembler)
A[1, 0] = laplace.hypersingular(dirichl_space, neumann_space, neumann_space, assembler=operator_assembler)
A[1, 1] = laplace.adjoint_double_layer(neumann_space, neumann_space, neumann_space, assembler=operator_assembler)
return A
def modHelmMultitrace(dirichl_space, neumann_space, kappa, operator_assembler):
from bempp.api.operators.boundary import modified_helmholtz
A = bempp.api.BlockedOperator(2, 2)
A[0, 0] = (-1.0)*modified_helmholtz.double_layer(dirichl_space, dirichl_space, dirichl_space, kappa, assembler=operator_assembler)
A[0, 1] = modified_helmholtz.single_layer(neumann_space, dirichl_space, dirichl_space, kappa, assembler=operator_assembler)
A[1, 0] = modified_helmholtz.hypersingular(dirichl_space, neumann_space, neumann_space, kappa, assembler=operator_assembler)
A[1, 1] = modified_helmholtz.adjoint_double_layer(neumann_space, neumann_space, neumann_space, kappa, assembler=operator_assembler)
return A
def alpha_beta(dirichl_space, neumann_space, q, x_q, ep_in, ep_ex, kappa, alpha, beta, operator_assembler):
from bempp.api.operators.boundary import sparse
phi_id = sparse.identity(dirichl_space, dirichl_space, dirichl_space)
dph_id = sparse.identity(neumann_space, neumann_space, neumann_space)
ep = ep_ex/ep_in
A_in = laplaceMultitrace(dirichl_space, neumann_space, operator_assembler)
A_ex = modHelmMultitrace(dirichl_space, neumann_space, kappa, operator_assembler)
D = bempp.api.BlockedOperator(2, 2)
D[0, 0] = alpha*phi_id
D[0, 1] = 0.0*phi_id
D[1, 0] = 0.0*phi_id
D[1, 1] = beta*dph_id
E = bempp.api.BlockedOperator(2, 2)
E[0, 0] = phi_id
E[0, 1] = 0.0*phi_id
E[1, 0] = 0.0*phi_id
E[1, 1] = dph_id*(1.0/ep)
F = bempp.api.BlockedOperator(2, 2)
F[0, 0] = alpha*phi_id
F[0, 1] = 0.0*phi_id
F[1, 0] = 0.0*phi_id
F[1, 1] = dph_id*(beta/ep)
Id = bempp.api.BlockedOperator(2, 2)
Id[0, 0] = phi_id
Id[0, 1] = 0.0*phi_id
Id[1, 0] = 0.0*phi_id
Id[1, 1] = dph_id
interior_projector = ((0.5*Id)+A_in)
scaled_exterior_projector = (D*((0.5*Id)-A_ex)*E)
A = ((0.5*Id)+A_in)+(D*((0.5*Id)-A_ex)*E)-(Id+F)
@bempp.api.real_callable
def d_green_func(x, n, domain_index, result):
nrm = np.sqrt((x[0]-x_q[:,0])**2 + (x[1]-x_q[:,1])**2 + (x[2]-x_q[:,2])**2)
const = -1./(4.*np.pi*ep_in)
result[:] = (-1.0)*const*np.sum(q*np.dot(x-x_q, n)/(nrm**3))
@bempp.api.real_callable
def green_func(x, n, domain_index, result):
nrm = np.sqrt((x[0]-x_q[:,0])**2 + (x[1]-x_q[:,1])**2 + (x[2]-x_q[:,2])**2)
result[:] = (-1.0)*np.sum(q/nrm)/(4.*np.pi*ep_in)
rhs_1 = bempp.api.GridFunction(dirichl_space, fun=green_func)
rhs_2 = bempp.api.GridFunction(dirichl_space, fun=d_green_func)
return A, rhs_1, rhs_2, A_in, A_ex, interior_projector, scaled_exterior_projector
def alpha_beta_single_blocked_operator(dirichl_space, neumann_space, q, x_q, ep_in, ep_ex, kappa, alpha, beta, operator_assembler):
from bempp.api.operators.boundary import sparse, laplace, modified_helmholtz
dlp_in = laplace.double_layer(dirichl_space, dirichl_space, dirichl_space, assembler=operator_assembler)
slp_in = laplace.single_layer(neumann_space, dirichl_space, dirichl_space, assembler=operator_assembler)
hlp_in = laplace.hypersingular(dirichl_space, neumann_space, neumann_space, assembler=operator_assembler)
adlp_in = laplace.adjoint_double_layer(neumann_space, neumann_space, neumann_space, assembler=operator_assembler)
dlp_out = modified_helmholtz.double_layer(dirichl_space, dirichl_space, dirichl_space, kappa, assembler=operator_assembler)
slp_out = modified_helmholtz.single_layer(neumann_space, dirichl_space, dirichl_space, kappa, assembler=operator_assembler)
hlp_out = modified_helmholtz.hypersingular(dirichl_space, neumann_space, neumann_space, kappa, assembler=operator_assembler)
adlp_out = modified_helmholtz.adjoint_double_layer(neumann_space, neumann_space, neumann_space, kappa, assembler=operator_assembler)
phi_identity = sparse.identity(dirichl_space, dirichl_space, dirichl_space)
dph_identity = sparse.identity(neumann_space, neumann_space, neumann_space)
ep = ep_ex/ep_in
A = bempp.api.BlockedOperator(2, 2)
A[0, 0] = (-0.5*(1+alpha))*phi_identity + (alpha*dlp_out) - dlp_in
A[0, 1] = slp_in - ((alpha/ep)*slp_out)
A[1, 0] = hlp_in - (beta*hlp_out)
A[1, 1] = (-0.5*(1+(beta/ep)))*dph_identity + adlp_in - ((beta/ep)*adlp_out)
@bempp.api.real_callable
def d_green_func(x, n, domain_index, result):
nrm = np.sqrt((x[0]-x_q[:,0])**2 + (x[1]-x_q[:,1])**2 + (x[2]-x_q[:,2])**2)
const = -1./(4.*np.pi*ep_in)
result[:] = (-1.0)*const*np.sum(q*np.dot(x-x_q, n)/(nrm**3))
@bempp.api.real_callable
def green_func(x, n, domain_index, result):
nrm = np.sqrt((x[0]-x_q[:,0])**2 + (x[1]-x_q[:,1])**2 + (x[2]-x_q[:,2])**2)
result[:] = (-1.0)*np.sum(q/nrm)/(4.*np.pi*ep_in)
rhs_1 = bempp.api.GridFunction(dirichl_space, fun=green_func)
rhs_2 = bempp.api.GridFunction(dirichl_space, fun=d_green_func)
return A, rhs_1, rhs_2
| 45.731707
| 136
| 0.693013
| 1,480
| 9,375
| 4.131081
| 0.067568
| 0.143278
| 0.11122
| 0.125613
| 0.900556
| 0.859339
| 0.833497
| 0.806183
| 0.774289
| 0.745175
| 0
| 0.033065
| 0.16448
| 9,375
| 205
| 137
| 45.731707
| 0.747479
| 0.005867
| 0
| 0.421769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.054422
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
492c4742de47fef56ab716bf9df548bbb54cdf64
| 62
|
py
|
Python
|
what/models/detection/ssd/ssd/__init__.py
|
wuhanstudio/whitebox-adversarial-toolbox
|
3c6eaecc130fa987bc470225e259d0b4b58017ce
|
[
"MIT"
] | 2
|
2022-02-06T17:25:31.000Z
|
2022-03-25T13:39:48.000Z
|
what/models/detection/ssd/ssd/__init__.py
|
wuhanstudio/whitebox-adversarial-toolbox
|
3c6eaecc130fa987bc470225e259d0b4b58017ce
|
[
"MIT"
] | null | null | null |
what/models/detection/ssd/ssd/__init__.py
|
wuhanstudio/whitebox-adversarial-toolbox
|
3c6eaecc130fa987bc470225e259d0b4b58017ce
|
[
"MIT"
] | null | null | null |
from what.models.detection.ssd.ssd.ssd import SSD, GraphPath
| 20.666667
| 60
| 0.806452
| 10
| 62
| 5
| 0.7
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 62
| 2
| 61
| 31
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
496e706ce2281410ec0594641f157d57b584ded4
| 45
|
py
|
Python
|
__init__.py
|
projectgus/solar_usage
|
d283ee8a6ef91b6169219a5978e7e7dc180c8214
|
[
"MIT"
] | 1
|
2022-02-01T01:15:26.000Z
|
2022-02-01T01:15:26.000Z
|
__init__.py
|
projectgus/solar_usage
|
d283ee8a6ef91b6169219a5978e7e7dc180c8214
|
[
"MIT"
] | null | null | null |
__init__.py
|
projectgus/solar_usage
|
d283ee8a6ef91b6169219a5978e7e7dc180c8214
|
[
"MIT"
] | null | null | null |
from . import solar_usage
solar_usage.main()
| 15
| 25
| 0.8
| 7
| 45
| 4.857143
| 0.714286
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 45
| 2
| 26
| 22.5
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
4970f299e77842368cbe5730084d4662a3265355
| 42
|
py
|
Python
|
bumpfontversion/__init__.py
|
m4rc1e/bumpfontversion
|
2cfb5b779875d84edc80154a27d6a28f5907ebb1
|
[
"Apache-2.0"
] | 1
|
2021-07-16T14:41:23.000Z
|
2021-07-16T14:41:23.000Z
|
bumpfontversion/__init__.py
|
m4rc1e/bumpfontversion
|
2cfb5b779875d84edc80154a27d6a28f5907ebb1
|
[
"Apache-2.0"
] | 5
|
2021-08-05T10:57:21.000Z
|
2022-03-30T11:26:40.000Z
|
bumpfontversion/__init__.py
|
m4rc1e/bumpfontversion
|
2cfb5b779875d84edc80154a27d6a28f5907ebb1
|
[
"Apache-2.0"
] | null | null | null |
from bumpfontversion.__main__ import main
| 21
| 41
| 0.880952
| 5
| 42
| 6.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 1
| 42
| 42
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
498530866a5b5c6de0552eaf419abf3ca8e45b13
| 14,588
|
py
|
Python
|
src/onevision/cv/imgproc/color/yuv.py
|
phlong3105/onevision
|
90552b64df7213e7fbe23c80ffd8a89583289433
|
[
"MIT"
] | 2
|
2022-03-28T09:46:38.000Z
|
2022-03-28T14:12:32.000Z
|
src/onevision/cv/imgproc/color/yuv.py
|
phlong3105/onevision
|
90552b64df7213e7fbe23c80ffd8a89583289433
|
[
"MIT"
] | null | null | null |
src/onevision/cv/imgproc/color/yuv.py
|
phlong3105/onevision
|
90552b64df7213e7fbe23c80ffd8a89583289433
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""YUV color space.
"""
from __future__ import annotations
import cv2
import numpy as np
import torch
from multipledispatch import dispatch
from torch import nn
from torch import Tensor
from torch.nn import functional as F
from onevision.cv.imgproc.color.rgb import bgr_to_rgb
from onevision.cv.imgproc.color.rgb import rgb_to_bgr
from onevision.cv.utils import batch_image_processing
from onevision.cv.utils import channel_last_processing
from onevision.factory import TRANSFORMS
from onevision.type import ListOrTuple2T
from onevision.type import TensorOrArray
__all__ = [
"bgr_to_yuv",
"rgb_to_yuv",
"rgb_to_yuv420",
"rgb_to_yuv422",
"yuv420_to_rgb",
"yuv422_to_rgb",
"yuv_to_rgb",
"BgrToYuv",
"RgbToYuv",
"RgbToYuv420",
"RgbToYuv422",
"Yuv420ToRgb",
"Yuv422ToRgb",
"YuvToBgr",
"YuvToRgb",
]
# MARK: - Functional
@dispatch(Tensor)
def bgr_to_yuv(image: Tensor) -> Tensor:
"""Convert an BGR image to YUV. Image data is assumed to be in the
range of [0.0, 1.0].
Args:
image (Tensor[B, 3, H, W]):
BGR Image to be converted to YUV.
Returns:
yuv (Tensor[B, 3, H, W]):
YUV version of the image.
"""
rgb = bgr_to_rgb(image)
return rgb_to_yuv(rgb)
@batch_image_processing
@channel_last_processing
@dispatch(np.ndarray)
def bgr_to_yuv(image: np.ndarray) -> np.ndarray:
"""Convert an BGR image to YUV. Image data is assumed to be in the
range of [0.0, 1.0].
Args:
image (np.ndarray[B, 3, H, W]):
BGR Image to be converted to YUV.
Returns:
yuv (np.ndarray[B, 3, H, W]):
YUV version of the image.
"""
return cv2.cvtColor(image, cv2.COLOR_BGR2YUV)
@dispatch(Tensor)
def rgb_to_yuv(image: Tensor) -> Tensor:
"""Convert an RGB image to YUV. Image data is assumed to be in the
range of [0.0, 1.0].
Args:
image (Tensor[B, 3, H, W]):
RGB Image to be converted to YUV.
Returns:
yuv (Tensor[B, 3, H, W]):
YUV version of the image.
"""
r = image[..., 0, :, :]
g = image[..., 1, :, :]
b = image[..., 2, :, :]
y = 0.299 * r + 0.587 * g + 0.114 * b
u = -0.147 * r - 0.289 * g + 0.436 * b
v = 0.615 * r - 0.515 * g - 0.100 * b
yuv = torch.stack([y, u, v], -3)
return yuv
@batch_image_processing
@channel_last_processing
@dispatch(np.ndarray)
def rgb_to_yuv(image: np.ndarray) -> np.ndarray:
"""Convert an RGB image to YUV. Image data is assumed to be in the
range of [0.0, 1.0].
Args:
image (np.ndarray[B, 3, H, W]):
RGB Image to be converted to YUV.
Returns:
yuv (np.ndarray[B, 3, H, W]):
YUV version of the image.
"""
return cv2.cvtColor(image, cv2.COLOR_RGB2YUV)
def rgb_to_yuv420(image: Tensor) -> ListOrTuple2T[Tensor]:
"""Convert an RGB image to YUV 420 (subsampled). Image data is assumed
to be in the range of [0.0, 1.0]. Input need to be padded to be evenly
divisible by 2 horizontal and vertical. This function will output chroma
siting [0.5, 0.5]
Args:
image (Tensor[B, 3, H, W]):
RGB Image to be converted to YUV.
Returns:
A Tensor containing the Y plane with shape [*, 1, H, W]
A Tensor containing the UV planes with shape [*, 2, H/2, W/2]
"""
if not isinstance(image, Tensor):
raise TypeError(f"`image` must be a `Tensor`. But got: {type(image)}.")
if len(image.shape) < 3 or image.shape[-3] != 3:
raise ValueError(f"`image` must have a shape of [*, 3, H, W]. "
f"But got: {image.shape}.")
if (len(image.shape) < 2 or
image.shape[-2] % 2 == 1 or
image.shape[-1] % 2 == 1):
raise ValueError(f"`image` H, W must be evenly divisible by 2. "
f"But got: {image.shape}.")
yuvimage = rgb_to_yuv(image)
return (
yuvimage[..., :1, :, :],
F.avg_pool2d(yuvimage[..., 1:3, :, :], (2, 2))
)
def rgb_to_yuv422(image: Tensor) -> ListOrTuple2T[Tensor]:
"""Convert an RGB image to YUV 422 (subsampled). Image data is assumed
to be in the range of [0.0, 1.0]. Input need to be padded to be evenly
divisible by 2 vertical. This function will output chroma siting (0.5)
Args:
image (Tensor[B, 3, H, W]):
RGB Image to be converted to YUV.
Returns:
A Tensor containing the Y plane with shape [*, 1, H, W].
A Tensor containing the UV planes with shape [*, 2, H, W/2].
"""
if not isinstance(image, Tensor):
raise TypeError(f"`image` must be a Tensor. But got: {type(image)}.")
if len(image.shape) < 3 or image.shape[-3] != 3:
raise ValueError(f"`image` must have a shape of [*, 3, H, W]. "
f"But got: {image.shape}.")
if (len(image.shape) < 2 or
image.shape[-2] % 2 == 1 or
image.shape[-1] % 2 == 1):
raise ValueError(f"`image` H, W must be evenly divisible by 2. "
f"But got: {image.shape}.")
yuvimage = rgb_to_yuv(image)
return (
yuvimage[..., :1, :, :],
F.avg_pool2d(yuvimage[..., 1:3, :, :], (1, 2))
)
def yuv420_to_rgb(image_y: Tensor, image_uv: Tensor) -> Tensor:
"""Convert an YUV420 image to RGB. Image data is assumed to be in the
range of [0.0, 1.0] for luma and [-0.5, 0.5] for chroma. Input need to be
padded to be evenly divisible by 2 horizontal and vertical. This function
assumed chroma siting is [0.5, 0.5]
Args:
image_y (Tensor[B, 1, H, W]):
Y (luma) Image plane to be converted to RGB.
image_uv (Tensor[B, 2, H/2, W/2]):
UV (chroma) Image planes to be converted to RGB.
Returns:
rgb (Tensor[B, 3, H, W]):
RGB version of the image.
"""
if not isinstance(image_y, Tensor):
raise TypeError(f"`image` must be a `Tensor`. But got: {type(image_y)}.")
if not isinstance(image_uv, Tensor):
raise TypeError(f"`image` must be a `Tensor`. But got: {type(image_uv)}.")
if len(image_y.shape) < 3 or image_y.shape[-3] != 1:
raise ValueError(f"`image_y` must have a shape of [*, 1, H, W]. "
f"But got: {image_y.shape}.")
if len(image_uv.shape) < 3 or image_uv.shape[-3] != 2:
raise ValueError(f"`image_uv` must have a shape of [*, 2, H/2, W/2]. "
f"But got: {image_uv.shape}.")
if (len(image_y.shape) < 2 or
image_y.shape[-2] % 2 == 1 or
image_y.shape[-1] % 2 == 1):
raise ValueError(f"`image_y` H, W must be evenly divisible by 2. "
f"But got: {image_y.shape}.")
if (len(image_uv.shape) < 2 or
len(image_y.shape) < 2 or
image_y.shape[-2] / image_uv.shape[-2] != 2 or
image_y.shape[-1] / image_uv.shape[-1] != 2):
raise ValueError(f"`image_uv` H, W must be half the size of the luma "
f"plane. But got: {image_y.shape} and {image_uv.shape}.")
# First upsample
yuv444image = torch.cat([
image_y, image_uv.repeat_interleave(2, dim=-1).repeat_interleave(2, dim=-2)
], dim=-3)
# Then convert the yuv444 image
return yuv_to_rgb(yuv444image)
def yuv422_to_rgb(image_y: Tensor, image_uv: Tensor) -> Tensor:
"""Convert an YUV422 image to RGB. Image data is assumed to be in the
range of [0.0, 1.0] for luma and [-0.5, 0.5] for chroma. Input need to be
padded to be evenly divisible by 2 vertical. This function assumed chroma
siting is (0.5)
Args:
image_y (Tensor[B, 1, H, W]):
Y (luma) Image plane to be converted to RGB.
image_uv (Tensor[B, 2, H/2, W/2]):
UV (luma) Image planes to be converted to RGB.
Returns:
rgb (Tensor[B, 3, H, W]):
RGB version of the image.
"""
if not isinstance(image_y, Tensor):
raise TypeError(f"`image_y` must be a `Tensor`. But got: {type(image_y)}.")
if not isinstance(image_uv, Tensor):
raise TypeError(f"`image_y` must be `Tensor`. But got: {type(image_uv)}.")
if len(image_y.shape) < 3 or image_y.shape[-3] != 1:
raise ValueError(f"`image_y` must have a shape of [*, 1, H, W]. "
f"But got: {image_y.shape}.")
if len(image_uv.shape) < 3 or image_uv.shape[-3] != 2:
raise ValueError(f"`image_uv` must have a shape of [*, 2, H, W/2]. "
f"But got: {image_uv.shape}.")
if (len(image_y.shape) < 2 or
image_y.shape[-2] % 2 == 1 or
image_y.shape[-1] % 2 == 1):
raise ValueError(f"`image_y` H, W must be evenly divisible by 2. "
f"But got: {image_y.shape}.")
if (len(image_uv.shape) < 2 or
len(image_y.shape) < 2 or
image_y.shape[-1] / image_uv.shape[-1] != 2):
raise ValueError(f"`image_uv` W must be half the size of the luma "
f"plane. But got: {image_y.shape} and {image_uv.shape}")
# First upsample
yuv444image = torch.cat([
image_y, image_uv.repeat_interleave(2, dim=-1)
], dim=-3)
# Then convert the yuv444 image
return yuv_to_rgb(yuv444image)
@dispatch(Tensor)
def yuv_to_bgr(image: Tensor) -> Tensor:
"""Convert an YUV image to RGB. Image data is assumed to be in the range of
[0.0, 1.0] for luma and [-0.5, 0.5] for chroma.
Args:
image (Tensor[B, 3, H, W]):
YUV Image to be converted to BGR.
Returns:
bgr (Tensor[B, 3, H, W]):
BGR version of the image.
"""
rgb = yuv_to_rgb(image)
return rgb_to_bgr(rgb)
@batch_image_processing
@channel_last_processing
@dispatch(np.ndarray)
def yuv_to_bgr(image: np.ndarray) -> np.ndarray:
"""Convert an YUV image to BGR. Image data is assumed to be in the range of
[0.0, 1.0] for luma and [-0.5, 0.5] for chroma.
Args:
image (np.ndarray[B, 3, H, W]):
YUV Image to be converted to BGR.
Returns:
bgr (np.ndarray[B, 3, H, W]):
BGR version of the image.
"""
return cv2.cvtColor(image, cv2.COLOR_YUV2BGR)
@dispatch(Tensor)
def yuv_to_rgb(image: Tensor) -> Tensor:
"""Convert an YUV image to RGB. Image data is assumed to be in the range of
[0.0, 1.0] for luma and [-0.5, 0.5] for chroma.
Args:
image (Tensor[B, 3, H, W]):
YUV Image to be converted to RGB.
Returns:
rgb (Tensor[B, 3, H, W]):
RGB version of the image.
"""
y = image[..., 0, :, :]
u = image[..., 1, :, :]
v = image[..., 2, :, :]
r = y + 1.14 * v # coefficient for g is 0
g = y + -0.396 * u - 0.581 * v
b = y + 2.029 * u # coefficient for b is 0
rgb = torch.stack([r, g, b], -3)
return rgb
@batch_image_processing
@channel_last_processing
@dispatch(np.ndarray)
def yuv_to_rgb(image: np.ndarray) -> np.ndarray:
"""Convert an YUV image to RGB. Image data is assumed to be in the range of
[0.0, 1.0] for luma and [-0.5, 0.5] for chroma.
Args:
image (np.ndarray[B, 3, H, W]):
YUV Image to be converted to RGB.
Returns:
rgb (np.ndarray[B, 3, H, W]):
RGB version of the image.
"""
return cv2.cvtColor(image, cv2.COLOR_YUV2RGB)
# MARK: - Modules
@TRANSFORMS.register(name="bgr_to_yuv")
class BgrToYuv(nn.Module):
"""Convert an image from BGR to YUV. Image data is assumed to be in the
range of [0.0, 1.0].
Reference:
[1] https://es.wikipedia.org/wiki/YUV#RGB_a_Y'UV
"""
# MARK: Forward Pass
def forward(self, image: TensorOrArray) -> TensorOrArray:
return bgr_to_yuv(image)
@TRANSFORMS.register(name="rgb_to_yuv")
class RgbToYuv(nn.Module):
"""Convert an image from RGB to YUV. Image data is assumed to be in the
range of [0.0, 1.0].
Reference:
[1] https://es.wikipedia.org/wiki/YUV#RGB_a_Y'UV
"""
# MARK: Forward Pass
def forward(self, image: TensorOrArray) -> TensorOrArray:
return rgb_to_yuv(image)
@TRANSFORMS.register(name="rgb_to_yuv420")
class RgbToYuv420(nn.Module):
"""Convert an image from RGB to YUV420. Image data is assumed to be in
the range of [0.0, 1.0]. Width and Height evenly divisible by 2.
Reference:
[1] https://es.wikipedia.org/wiki/YUV#RGB_a_Y'UV
"""
# MARK: Forward Pass
def forward(self, yuv_input: Tensor) -> ListOrTuple2T[Tensor]:
return rgb_to_yuv420(yuv_input)
@TRANSFORMS.register(name="rgb_to_yuv422")
class RgbToYuv422(nn.Module):
"""Convert an image from RGB to YUV422. Image data is assumed to be in
the range of [0.0, 1.0]. Width evenly disvisible by 2.
Reference:
[1] https://es.wikipedia.org/wiki/YUV#RGB_a_Y'UV
"""
# MARK: Forward Pass
def forward(self, yuv_input: Tensor) -> ListOrTuple2T[Tensor]:
return rgb_to_yuv422(yuv_input)
@TRANSFORMS.register(name="yuv420_to_rgb")
class Yuv420ToRgb(nn.Module):
"""Convert an image from YUV to RGB. Image data is assumed to be in the
range of [0.0, 1.0] for luma and [-0.5, 0.5] for chroma. Width and Height
evenly divisible by 2.
"""
# MARK: Forward Pass
def forward(self, input_y: Tensor, input_uv: Tensor) -> Tensor: # skipcq: PYL-R0201
return yuv420_to_rgb(input_y, input_uv)
@TRANSFORMS.register(name="yuv422_to_rgb")
class Yuv422ToRgb(nn.Module):
"""Convert an image from YUV to RGB. Image data is assumed to be in the
range of [0.0, 1.0] for luma and [-0.5, 0.5] for chroma. Width evenly
divisible by 2.
"""
# MARK: Forward Pass
def forward(self, input_y: Tensor, input_uv: Tensor) -> Tensor:
return yuv422_to_rgb(input_y, input_uv)
@TRANSFORMS.register(name="yuv_to_bgr")
class YuvToBgr(nn.Module):
"""Convert an image from YUV to Bgr. Image data is assumed to be in the
range of [0.0, 1.0] for luma and [-0.5, 0.5] for chroma.
"""
# MARK: Forward Pass
def forward(self, image: TensorOrArray) -> TensorOrArray:
return yuv_to_bgr(image)
@TRANSFORMS.register(name="yuv_to_rgb")
class YuvToRgb(nn.Module):
"""Convert an image from YUV to RGB. Image data is assumed to be in the
range of [0.0, 1.0] for luma and [-0.5, 0.5] for chroma.
"""
# MARK: Forward Pass
def forward(self, image: TensorOrArray) -> TensorOrArray:
return yuv_to_rgb(image)
| 31.304721
| 88
| 0.596312
| 2,310
| 14,588
| 3.674459
| 0.076623
| 0.019793
| 0.007776
| 0.042413
| 0.85721
| 0.829995
| 0.821277
| 0.801602
| 0.777804
| 0.746701
| 0
| 0.043679
| 0.273375
| 14,588
| 465
| 89
| 31.372043
| 0.757075
| 0.38703
| 0
| 0.430769
| 0
| 0
| 0.177396
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.102564
| false
| 0
| 0.076923
| 0.041026
| 0.323077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b8e9de306e49f06b80af3d03b16cbcef69839dd7
| 166
|
py
|
Python
|
app/meda_sync_search/transformers/str_transformer.py
|
DEV3L/meda-sync-search
|
c67feb2f2b54ba153dc50e9aba5058d4e7948c92
|
[
"Beerware"
] | null | null | null |
app/meda_sync_search/transformers/str_transformer.py
|
DEV3L/meda-sync-search
|
c67feb2f2b54ba153dc50e9aba5058d4e7948c92
|
[
"Beerware"
] | null | null | null |
app/meda_sync_search/transformers/str_transformer.py
|
DEV3L/meda-sync-search
|
c67feb2f2b54ba153dc50e9aba5058d4e7948c92
|
[
"Beerware"
] | null | null | null |
import fuzzy
class StrTransformer:
def __init__(self, _str):
self._str = _str
@property
def fuzzy(self):
return fuzzy.nysiis(self._str)
| 16.6
| 38
| 0.638554
| 20
| 166
| 4.9
| 0.55
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.271084
| 166
| 9
| 39
| 18.444444
| 0.809917
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
b8eebc26344e767d2761ad6061ca3cf443574d9c
| 253
|
py
|
Python
|
mmseg/models/segmentors/__init__.py
|
XDong18/mmsegmentation
|
9a14288a654b66babfdfe4f6fa77edc4cd127d41
|
[
"Apache-2.0"
] | null | null | null |
mmseg/models/segmentors/__init__.py
|
XDong18/mmsegmentation
|
9a14288a654b66babfdfe4f6fa77edc4cd127d41
|
[
"Apache-2.0"
] | null | null | null |
mmseg/models/segmentors/__init__.py
|
XDong18/mmsegmentation
|
9a14288a654b66babfdfe4f6fa77edc4cd127d41
|
[
"Apache-2.0"
] | null | null | null |
from .cascade_encoder_decoder import CascadeEncoderDecoder
from .encoder_decoder import EncoderDecoder
from .multi_head_encoder_decoder import Multi_head_EncoderDecoder
__all__ = ['EncoderDecoder', 'CascadeEncoderDecoder', 'Multi_head_EncoderDecoder']
| 42.166667
| 82
| 0.873518
| 26
| 253
| 7.961538
| 0.384615
| 0.202899
| 0.289855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071146
| 253
| 5
| 83
| 50.6
| 0.880851
| 0
| 0
| 0
| 0
| 0
| 0.237154
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b8ffd2ff85c47bef29fcdada2125d014ad9faa81
| 4,973
|
py
|
Python
|
src/parsing/arg-kp.py
|
UKPLab/acl2022-structure-batches
|
d7e116c1254ad00d8b59da3116043424a30f6f64
|
[
"Apache-2.0"
] | null | null | null |
src/parsing/arg-kp.py
|
UKPLab/acl2022-structure-batches
|
d7e116c1254ad00d8b59da3116043424a30f6f64
|
[
"Apache-2.0"
] | null | null | null |
src/parsing/arg-kp.py
|
UKPLab/acl2022-structure-batches
|
d7e116c1254ad00d8b59da3116043424a30f6f64
|
[
"Apache-2.0"
] | null | null | null |
import glob
import pandas
from parsing.parsing_util import save_topic_folds
fold_topics = [
[
['we should ban the use of child actors', 'we should ban private military companies', 'we should subsidize space exploration', 'we should end affirmative action', 'we should adopt an austerity regime', 'we should ban human cloning', 'we should abandon marriage', 'we should fight for the abolition of nuclear weapons', 'we should close guantanamo bay detention camp', 'we should abolish capital punishment', 'we should abandon the use of school uniform', 'we should fight urbanization', 'we should legalize sex selection', 'we should prohibit women in combat', 'we should adopt libertarianism', 'homeschooling should be banned', 'we should legalize prostitution'],
['we should prohibit flag burning', 'the vow of celibacy should be abandoned', 'we should legalize cannabis', 'we should abolish intellectual property rights'],
['we should adopt atheism', 'assisted suicide should be a criminal offence', 'we should subsidize vocational education', 'we should subsidize journalism', 'we should end mandatory retirement', 'we should introduce compulsory voting', 'we should abolish the right to keep and bear arms']
],
[
['we should abandon the use of school uniform', 'we should legalize cannabis', 'we should legalize prostitution', 'we should subsidize space exploration', 'we should adopt libertarianism', 'we should subsidize vocational education', 'we should adopt an austerity regime', 'we should abolish the right to keep and bear arms', 'we should close guantanamo bay detention camp', 'we should subsidize journalism', 'the vow of celibacy should be abandoned', 'we should end affirmative action', 'we should adopt atheism', 'we should ban human cloning', 'we should abandon marriage', 'we should fight for the abolition of nuclear weapons', 'homeschooling should be banned'],
['we should introduce compulsory voting', 'we should prohibit flag burning', 'we should ban private military companies', 'we should abolish intellectual property rights'],
['we should ban the use of child actors', 'assisted suicide should be a criminal offence', 'we should prohibit women in combat', 'we should end mandatory retirement', 'we should fight urbanization', 'we should abolish capital punishment', 'we should legalize sex selection']
],
[
['we should ban the use of child actors', 'we should prohibit flag burning', 'we should end affirmative action', 'we should abolish intellectual property rights', 'we should ban human cloning', 'we should end mandatory retirement', 'we should legalize sex selection', 'we should abandon marriage', 'we should ban private military companies', 'we should adopt atheism', 'we should close guantanamo bay detention camp', 'we should abandon the use of school uniform', 'we should introduce compulsory voting', 'we should abolish capital punishment', 'we should subsidize space exploration', 'we should subsidize journalism', 'we should adopt libertarianism'],
['we should legalize cannabis', 'homeschooling should be banned', 'assisted suicide should be a criminal offence', 'the vow of celibacy should be abandoned'],
['we should fight for the abolition of nuclear weapons', 'we should adopt an austerity regime', 'we should legalize prostitution', 'we should fight urbanization', 'we should prohibit women in combat', 'we should subsidize vocational education', 'we should abolish the right to keep and bear arms']
],
[
['homeschooling should be banned', 'we should fight urbanization', 'we should adopt libertarianism', 'we should end mandatory retirement', 'we should subsidize vocational education', 'we should adopt an austerity regime', 'we should introduce compulsory voting', 'we should subsidize space exploration', 'the vow of celibacy should be abandoned', 'we should ban private military companies', 'we should abolish the right to keep and bear arms', 'we should abandon the use of school uniform', 'we should subsidize journalism', 'we should ban the use of child actors', 'we should adopt atheism', 'assisted suicide should be a criminal offence', 'we should fight for the abolition of nuclear weapons'],
['we should ban human cloning', 'we should prohibit women in combat', 'we should legalize cannabis', 'we should prohibit flag burning'],
['we should abolish intellectual property rights', 'we should close guantanamo bay detention camp', 'we should legalize prostitution', 'we should legalize sex selection', 'we should end affirmative action', 'we should abolish capital punishment', 'we should abandon marriage']
]
]
data_path = "../../data/"
task = "arg-kp"
samples = pandas.read_csv(
"../../data/IBM_Arg_KP/ArgKP_dataset.csv"
)
samples.columns = ["topic","sentence1","sentence2","stance","label"]
samples["id"] = samples.index
save_topic_folds(samples, fold_topics, data_path, task)
| 118.404762
| 706
| 0.751056
| 679
| 4,973
| 5.481591
| 0.163476
| 0.214938
| 0.035465
| 0.015046
| 0.907845
| 0.896024
| 0.764643
| 0.585975
| 0.414025
| 0.270822
| 0
| 0.00048
| 0.162678
| 4,973
| 41
| 707
| 121.292683
| 0.893372
| 0
| 0
| 0.090909
| 0
| 0
| 0.819626
| 0.007842
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
77028038be2afbb9f871b101ee70e5bab977f5fd
| 28
|
py
|
Python
|
__init__.py
|
FarmCodeGary/pelican-yaml-metadata
|
b26634656ef33197e27ff1eb4e13733cc97647c8
|
[
"MIT"
] | null | null | null |
__init__.py
|
FarmCodeGary/pelican-yaml-metadata
|
b26634656ef33197e27ff1eb4e13733cc97647c8
|
[
"MIT"
] | null | null | null |
__init__.py
|
FarmCodeGary/pelican-yaml-metadata
|
b26634656ef33197e27ff1eb4e13733cc97647c8
|
[
"MIT"
] | null | null | null |
from .yamlmetadata import *
| 14
| 27
| 0.785714
| 3
| 28
| 7.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7743f60c6077222d7cbbde52915a7d7598bbb39f
| 149
|
py
|
Python
|
docs/conf.py
|
parth-choudhary/drum
|
fd62cfc1d7ca36fe2767c7eda9a65f15c74afb34
|
[
"BSD-2-Clause"
] | 265
|
2015-01-01T10:51:39.000Z
|
2022-01-29T22:42:52.000Z
|
docs/conf.py
|
parth-choudhary/drum
|
fd62cfc1d7ca36fe2767c7eda9a65f15c74afb34
|
[
"BSD-2-Clause"
] | 39
|
2015-01-20T01:23:42.000Z
|
2018-05-28T04:11:08.000Z
|
docs/conf.py
|
parth-choudhary/drum
|
fd62cfc1d7ca36fe2767c7eda9a65f15c74afb34
|
[
"BSD-2-Clause"
] | 104
|
2015-01-20T01:13:20.000Z
|
2022-03-26T20:54:10.000Z
|
from __future__ import unicode_literals
# This file is automatically generated via sphinx-me
from sphinx_me import setup_conf; setup_conf(globals())
| 37.25
| 55
| 0.838926
| 22
| 149
| 5.318182
| 0.727273
| 0.136752
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114094
| 149
| 3
| 56
| 49.666667
| 0.886364
| 0.33557
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7746f99f85eee310d406451a5b733a129379c77c
| 48
|
py
|
Python
|
src/ralph/urls/__init__.py
|
DoNnMyTh/ralph
|
97b91639fa68965ad3fd9d0d2652a6545a2a5b72
|
[
"Apache-2.0"
] | 1
|
2021-09-14T01:52:23.000Z
|
2021-09-14T01:52:23.000Z
|
src/ralph/urls/__init__.py
|
hq-git/ralph
|
e2448caf02d6e5abfd81da2cff92aefe0a534883
|
[
"Apache-2.0"
] | 1
|
2019-05-27T11:57:15.000Z
|
2019-05-27T11:57:15.000Z
|
src/ralph/urls/__init__.py
|
hq-git/ralph
|
e2448caf02d6e5abfd81da2cff92aefe0a534883
|
[
"Apache-2.0"
] | null | null | null |
from ralph.urls.base import urlpatterns # noqa
| 24
| 47
| 0.791667
| 7
| 48
| 5.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 48
| 1
| 48
| 48
| 0.926829
| 0.083333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
77565c10408044c9941d17b23c0cd1ff3ae92b91
| 3,396
|
py
|
Python
|
common/utilities/tests/test_certs.py
|
tomzo/integration-adaptors
|
d4f296d3e44475df6f69a78a27fac6ed5b67513b
|
[
"Apache-2.0"
] | 15
|
2019-08-06T16:08:12.000Z
|
2021-05-24T13:14:39.000Z
|
common/utilities/tests/test_certs.py
|
tomzo/integration-adaptors
|
d4f296d3e44475df6f69a78a27fac6ed5b67513b
|
[
"Apache-2.0"
] | 179
|
2020-07-01T08:53:50.000Z
|
2022-03-11T14:18:39.000Z
|
common/utilities/tests/test_certs.py
|
tomzo/integration-adaptors
|
d4f296d3e44475df6f69a78a27fac6ed5b67513b
|
[
"Apache-2.0"
] | 7
|
2019-11-12T15:26:34.000Z
|
2021-04-11T07:23:56.000Z
|
import pathlib
import tempfile
import unittest
from utilities import certs
_TEST_FILE_CONTENTS = 'test-file-contents'
class TestCerts(unittest.TestCase):
def test_create_certs_files_creates_folders(self):
with tempfile.TemporaryDirectory() as temp_dir:
certs.Certs.create_certs_files(temp_dir)
self.assertTrue((pathlib.Path(temp_dir) / 'data' / 'certs').exists(), msg='data/certs folders not created')
def test_create_certs_files_creates_private_key(self):
with tempfile.TemporaryDirectory() as temp_dir:
returned_certs = certs.Certs.create_certs_files(temp_dir, private_key=_TEST_FILE_CONTENTS)
expected_private_key_filepath = pathlib.Path(temp_dir) / 'data' / 'certs' / 'client.key'
self.assertEqual(str(expected_private_key_filepath), returned_certs.private_key_path)
self.assertTrue(expected_private_key_filepath.read_text(), _TEST_FILE_CONTENTS)
def test_create_certs_files_creates_local_cert(self):
with tempfile.TemporaryDirectory() as temp_dir:
returned_certs = certs.Certs.create_certs_files(temp_dir, local_cert=_TEST_FILE_CONTENTS)
expected_local_cert_filepath = pathlib.Path(temp_dir) / 'data' / 'certs' / 'client.pem'
self.assertEqual(str(expected_local_cert_filepath), returned_certs.local_cert_path)
self.assertTrue(expected_local_cert_filepath.read_text(), _TEST_FILE_CONTENTS)
def test_create_certs_files_creates_ca_certs(self):
with tempfile.TemporaryDirectory() as temp_dir:
returned_certs = certs.Certs.create_certs_files(temp_dir, ca_certs=_TEST_FILE_CONTENTS)
expected_ca_certs_filepath = pathlib.Path(temp_dir) / 'data' / 'certs' / 'ca_certs.pem'
self.assertEqual(str(expected_ca_certs_filepath), returned_certs.ca_certs_path)
self.assertTrue(expected_ca_certs_filepath.read_text(), _TEST_FILE_CONTENTS)
def test_create_certs_files_creates_multiple_file(self):
with tempfile.TemporaryDirectory() as temp_dir:
test_file_contents_1 = 'test-file-contents1'
test_file_contents_2 = 'test-file-contents2'
test_file_contents_3 = 'test-file-contents3'
returned_certs = certs.Certs.create_certs_files(temp_dir, private_key=test_file_contents_1,
local_cert=test_file_contents_2,
ca_certs=test_file_contents_3)
expected_private_key_filepath = pathlib.Path(temp_dir) / 'data' / 'certs' / 'client.key'
self.assertEqual(str(expected_private_key_filepath), returned_certs.private_key_path)
self.assertTrue(expected_private_key_filepath.read_text(), test_file_contents_1)
expected_local_cert_filepath = pathlib.Path(temp_dir) / 'data' / 'certs' / 'client.pem'
self.assertEqual(str(expected_local_cert_filepath), returned_certs.local_cert_path)
self.assertTrue(expected_local_cert_filepath.read_text(), test_file_contents_2)
expected_ca_certs_filepath = pathlib.Path(temp_dir) / 'data' / 'certs' / 'ca_certs.pem'
self.assertEqual(str(expected_ca_certs_filepath), returned_certs.ca_certs_path)
self.assertTrue(expected_ca_certs_filepath.read_text(), test_file_contents_3)
| 55.672131
| 119
| 0.716137
| 421
| 3,396
| 5.315914
| 0.123515
| 0.071492
| 0.121537
| 0.0563
| 0.852994
| 0.810098
| 0.771224
| 0.718052
| 0.718052
| 0.718052
| 0
| 0.004417
| 0.199941
| 3,396
| 60
| 120
| 56.6
| 0.819286
| 0
| 0
| 0.377778
| 0
| 0
| 0.068316
| 0
| 0
| 0
| 0
| 0
| 0.288889
| 1
| 0.111111
| false
| 0
| 0.088889
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
620c292634ed75d83bd9cecaf7e5c207135220fb
| 160
|
py
|
Python
|
frontend/views.py
|
dark0ghost/tp_django_react_test
|
86df391c272e17b45a1595c7c3536c535055119f
|
[
"MIT"
] | 2
|
2021-06-13T17:29:02.000Z
|
2021-11-29T08:18:20.000Z
|
frontend/views.py
|
dark0ghost/rgram
|
e459fe7ba1542993473bc0eb2c610d7f7d433d6d
|
[
"MIT"
] | 1
|
2021-04-15T20:14:36.000Z
|
2021-04-15T20:14:36.000Z
|
frontend/views.py
|
dark0ghost/rgram
|
e459fe7ba1542993473bc0eb2c610d7f7d433d6d
|
[
"MIT"
] | null | null | null |
from django.http import HttpRequest
from django.shortcuts import render
def get_react(request: HttpRequest):
return render(request, "./build/index.html")
| 22.857143
| 48
| 0.78125
| 21
| 160
| 5.904762
| 0.714286
| 0.16129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 160
| 6
| 49
| 26.666667
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0.1125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
623e58a64fd2e785caf37aa5623238f9af7ddac2
| 56
|
py
|
Python
|
qt_material/resources/__init__.py
|
5yutan5/qt-material
|
63caf6755d1d00ea66c37d6583077c376a764435
|
[
"BSD-2-Clause"
] | 692
|
2020-12-06T17:30:05.000Z
|
2022-03-31T14:12:40.000Z
|
qt_material/resources/__init__.py
|
5yutan5/qt-material
|
63caf6755d1d00ea66c37d6583077c376a764435
|
[
"BSD-2-Clause"
] | 43
|
2020-12-06T04:19:02.000Z
|
2022-03-16T15:20:34.000Z
|
qt_material/resources/__init__.py
|
5yutan5/qt-material
|
63caf6755d1d00ea66c37d6583077c376a764435
|
[
"BSD-2-Clause"
] | 93
|
2020-12-10T08:26:25.000Z
|
2022-03-29T08:46:46.000Z
|
from .generate import ResourseGenerator, RESOURCES_PATH
| 28
| 55
| 0.875
| 6
| 56
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089286
| 56
| 1
| 56
| 56
| 0.941176
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6251f762661c2e82e27ec9486e83e9f89cebb517
| 489
|
py
|
Python
|
test_pyship/test_module_info.py
|
daobook/pyship
|
31b8e0b4c1cfc7677d418024f27642183cb1966d
|
[
"MIT"
] | 16
|
2020-10-28T02:49:39.000Z
|
2022-03-18T16:50:11.000Z
|
test_pyship/test_module_info.py
|
daobook/pyship
|
31b8e0b4c1cfc7677d418024f27642183cb1966d
|
[
"MIT"
] | 4
|
2020-12-07T23:20:09.000Z
|
2020-12-18T03:25:49.000Z
|
test_pyship/test_module_info.py
|
daobook/pyship
|
31b8e0b4c1cfc7677d418024f27642183cb1966d
|
[
"MIT"
] | 1
|
2022-01-26T11:26:00.000Z
|
2022-01-26T11:26:00.000Z
|
from semver import VersionInfo
from test_pyship import TstAppDirs, TST_APP_NAME
def test_module_info():
# todo: use TargetAppInfo's get_module_info()
# for version_string in ["0.0.1", "0.0.2"]:
# version = VersionInfo.parse(version_string)
# tst_app_dirs = TstAppDirs(TST_APP_NAME, version)
# module_info = ModuleInfo(TST_APP_NAME, tst_app_dirs.project_dir)
# print(module_info.version)
# assert module_info.version == version
pass
| 28.764706
| 74
| 0.699387
| 67
| 489
| 4.791045
| 0.492537
| 0.093458
| 0.093458
| 0.124611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015464
| 0.206544
| 489
| 16
| 75
| 30.5625
| 0.811856
| 0.670757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 0
| 1
| 0.25
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
65a5f3d23cd25aee90d56e7f1071bf1547268236
| 278
|
py
|
Python
|
batchglm/api/data.py
|
le-ander/batchglm
|
31b905b99b6baa7c94b82550d6a74f00d81966ea
|
[
"BSD-3-Clause"
] | null | null | null |
batchglm/api/data.py
|
le-ander/batchglm
|
31b905b99b6baa7c94b82550d6a74f00d81966ea
|
[
"BSD-3-Clause"
] | null | null | null |
batchglm/api/data.py
|
le-ander/batchglm
|
31b905b99b6baa7c94b82550d6a74f00d81966ea
|
[
"BSD-3-Clause"
] | null | null | null |
from batchglm.data import design_matrix
from batchglm.data import constraint_matrix_from_dict, constraint_matrix_from_string, string_constraints_from_dict, \
constraint_system_from_star
from batchglm.data import view_coef_names, preview_coef_names, bin_continuous_covariate
| 55.6
| 117
| 0.888489
| 39
| 278
| 5.846154
| 0.487179
| 0.157895
| 0.210526
| 0.289474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079137
| 278
| 4
| 118
| 69.5
| 0.890625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
029669fb1c907cfc00872204ba351fb30da19646
| 30
|
py
|
Python
|
odoo_vps/__init__.py
|
mario21ic/odoo_vps
|
b18e4adf879ca781334042b546f2e3d065e90349
|
[
"MIT"
] | null | null | null |
odoo_vps/__init__.py
|
mario21ic/odoo_vps
|
b18e4adf879ca781334042b546f2e3d065e90349
|
[
"MIT"
] | null | null | null |
odoo_vps/__init__.py
|
mario21ic/odoo_vps
|
b18e4adf879ca781334042b546f2e3d065e90349
|
[
"MIT"
] | null | null | null |
import res_partner
import pdg
| 10
| 18
| 0.866667
| 5
| 30
| 5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 30
| 2
| 19
| 15
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f30960c9f871efa41d32a96a569dbebb64fef9c8
| 390
|
py
|
Python
|
pydow/signals/__init__.py
|
gijswobben/pydow-2
|
e04c1fc37f344988b3a07c4f39b3c43edf3d5bba
|
[
"MIT"
] | null | null | null |
pydow/signals/__init__.py
|
gijswobben/pydow-2
|
e04c1fc37f344988b3a07c4f39b3c43edf3d5bba
|
[
"MIT"
] | null | null | null |
pydow/signals/__init__.py
|
gijswobben/pydow-2
|
e04c1fc37f344988b3a07c4f39b3c43edf3d5bba
|
[
"MIT"
] | null | null | null |
from blinker import signal
signal_navigation_event = signal("signal_navigation_event")
signal_state_update = signal("signal_state_update")
signal_clear_input_field_event = signal("signal_clear_input_field_event")
signal_default_event = signal("signal_default_event")
__all__ = ["signal_navigation_event", "signal_state_update", "signal_clear_input_field_event", "signal_default_event"]
| 35.454545
| 118
| 0.846154
| 51
| 390
| 5.803922
| 0.254902
| 0.260135
| 0.212838
| 0.273649
| 0.851351
| 0.739865
| 0.739865
| 0.52027
| 0.331081
| 0
| 0
| 0
| 0.066667
| 390
| 10
| 119
| 39
| 0.813187
| 0
| 0
| 0
| 0
| 0
| 0.471795
| 0.271795
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b261679480f1c75cd9d6c501b09a4ab9d2891893
| 330
|
py
|
Python
|
Codewars/8kyu/return-the-day/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/8kyu/return-the-day/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/8kyu/return-the-day/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
test.describe('Fixed tests')
test.assert_equals(whatday(1), 'Sunday')
test.assert_equals(whatday(2), 'Monday')
test.assert_equals(whatday(3), 'Tuesday')
test.assert_equals(whatday(8), 'Wrong, please enter a number between 1 and 7')
test.assert_equals(whatday(20), 'Wrong, please enter a number between 1 and 7')
| 36.666667
| 79
| 0.745455
| 54
| 330
| 4.462963
| 0.481481
| 0.207469
| 0.33195
| 0.477178
| 0.290456
| 0.290456
| 0.290456
| 0.290456
| 0.290456
| 0
| 0
| 0.043771
| 0.1
| 330
| 8
| 80
| 41.25
| 0.767677
| 0.042424
| 0
| 0
| 0
| 0
| 0.375796
| 0
| 0
| 0
| 0
| 0
| 0.833333
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a238c06b26a018246a47faa9f7469c99f51710b2
| 181
|
py
|
Python
|
rpython/memory/test/test_generational_gc.py
|
nanjekyejoannah/pypy
|
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
|
[
"Apache-2.0",
"OpenSSL"
] | 381
|
2018-08-18T03:37:22.000Z
|
2022-02-06T23:57:36.000Z
|
rpython/memory/test/test_generational_gc.py
|
nanjekyejoannah/pypy
|
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
|
[
"Apache-2.0",
"OpenSSL"
] | 16
|
2018-09-22T18:12:47.000Z
|
2022-02-22T20:03:59.000Z
|
rpython/memory/test/test_generational_gc.py
|
nanjekyejoannah/pypy
|
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
|
[
"Apache-2.0",
"OpenSSL"
] | 55
|
2015-08-16T02:41:30.000Z
|
2022-03-20T20:33:35.000Z
|
from rpython.memory.test import test_semispace_gc
class TestGenerationalGC(test_semispace_gc.TestSemiSpaceGC):
from rpython.memory.gc.generation import GenerationGC as GCClass
| 36.2
| 68
| 0.856354
| 23
| 181
| 6.565217
| 0.608696
| 0.145695
| 0.225166
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093923
| 181
| 4
| 69
| 45.25
| 0.920732
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a23d70e1ba3afebfe9ebb568d1da68ea55764115
| 28,425
|
py
|
Python
|
src/the_tale/the_tale/game/cards/tests/test_requests.py
|
serhii73/the-tale
|
ee69b033f57fae5004c14afb51f4a6679b189410
|
[
"BSD-3-Clause"
] | null | null | null |
src/the_tale/the_tale/game/cards/tests/test_requests.py
|
serhii73/the-tale
|
ee69b033f57fae5004c14afb51f4a6679b189410
|
[
"BSD-3-Clause"
] | 1
|
2022-01-18T12:04:47.000Z
|
2022-01-18T12:04:47.000Z
|
src/the_tale/the_tale/game/cards/tests/test_requests.py
|
serhii73/the-tale
|
ee69b033f57fae5004c14afb51f4a6679b189410
|
[
"BSD-3-Clause"
] | null | null | null |
import smart_imports
smart_imports.all()
class CardsRequestsTestsBase(utils_testcase.TestCase):
def setUp(self):
super(CardsRequestsTestsBase, self).setUp()
self.place_1, self.place_2, self.place_3 = game_logic.create_test_map()
self.account = self.accounts_factory.create_account()
self.storage = game_logic_storage.LogicStorage()
self.storage.load_account_data(self.account.id)
self.hero = self.storage.accounts_to_heroes[self.account.id]
tt_services.storage.cmd_debug_clear_service()
self.card = objects.Card(types.CARD.KEEPERS_GOODS_COMMON, uid=uuid.uuid4())
self.building_1 = places_logic.create_building(person=self.place_1.persons[0], utg_name=game_names.generator().get_test_name('building-1-name'))
class UseDialogRequestTests(CardsRequestsTestsBase):
def test_unlogined(self):
self.check_html_ok(self.request_ajax_html(utils_urls.url('game:cards:use-dialog', card=uuid.uuid4().hex)), texts=['common.login_required'])
def test_no_cards(self):
self.request_login(self.account.email)
self.check_html_ok(self.request_ajax_html(utils_urls.url('game:cards:use-dialog', card=uuid.uuid4().hex)), texts=['pgf-error-card.wrong_value'])
def test_has_cards(self):
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[self.card])
self.request_login(self.account.email)
self.check_html_ok(self.request_ajax_html(utils_urls.url('game:cards:use-dialog', card=self.card.uid)))
def test_every_card(self):
self.request_login(self.account.email)
for card_type in types.CARD.records:
if card_type in (types.CARD.GET_COMPANION_UNCOMMON,
types.CARD.GET_COMPANION_RARE,
types.CARD.GET_COMPANION_EPIC,
types.CARD.GET_COMPANION_LEGENDARY):
continue
card = card_type.effect.create_card(available_for_auction=True, type=card_type)
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[card])
self.check_html_ok(self.request_ajax_html(utils_urls.url('game:cards:use-dialog', card=card.uid)))
class UseRequestTests(CardsRequestsTestsBase):
def test_unlogined(self):
self.check_ajax_error(self.post_ajax_json(logic.use_card_url(uuid.uuid4().hex), {}), 'common.login_required')
def test_no_cards(self):
self.request_login(self.account.email)
self.check_ajax_error(self.post_ajax_json(logic.use_card_url(uuid.uuid4().hex)), 'card.wrong_value')
def test_form_invalid(self):
self.request_login(self.account.email)
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[self.card])
self.check_ajax_error(self.post_ajax_json(logic.use_card_url(self.card.uid), {'value': 6666666}), 'form_errors')
def test_success(self):
self.request_login(self.account.email)
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[self.card])
response = self.post_ajax_json(logic.use_card_url(self.card.uid), {'value': self.place_1.id})
task = PostponedTaskPrototype._db_get_object(0)
self.check_ajax_processing(response, task.status_url)
task.remove()
class TestIndexRequests(CardsRequestsTestsBase):
def setUp(self):
super(TestIndexRequests, self).setUp()
def test_simple(self):
texts = [card.text for card in types.CARD.records]
self.check_html_ok(self.request_html(utils_urls.url('guide:cards:')), texts=texts)
def test_rarity_filter(self):
for rarity in relations.RARITY.records:
texts = [card.text for card in types.CARD.records if card.rarity == rarity]
self.check_html_ok(self.request_html(utils_urls.url('guide:cards:')), texts=texts)
def test_availability_filter(self):
for availability in relations.AVAILABILITY.records:
texts = [card.text for card in types.CARD.records if card.availability == availability]
self.check_html_ok(self.request_html(utils_urls.url('guide:cards:')), texts=texts)
class GetCardRequestsTests(CardsRequestsTestsBase):
def setUp(self):
super(GetCardRequestsTests, self).setUp()
def test_unlogined(self):
self.check_ajax_error(self.post_ajax_json(logic.receive_cards_url()), 'common.login_required')
def test_no_new_cards(self):
self.request_login(self.account.email)
response = self.post_ajax_json(logic.receive_cards_url())
data = self.check_ajax_ok(response)
self.assertEqual(data['cards'], [])
def test_has_new_cards(self):
self.request_login(self.account.email)
cards = [logic.create_card(allow_premium_cards=True, available_for_auction=True),
logic.create_card(allow_premium_cards=True, available_for_auction=True)]
logic.change_cards(owner_id=self.account.id,
operation_type='#test',
storage=relations.STORAGE.NEW,
to_add=cards)
response = self.post_ajax_json(logic.receive_cards_url())
data = self.check_ajax_ok(response)
self.assertEqual(len(data['cards']), 2)
self.assertEqual({card.uid.hex for card in cards},
{card['uid'] for card in data['cards']})
class CombineCardsRequestsTests(CardsRequestsTestsBase):
def test_unlogined(self):
self.check_ajax_error(self.post_ajax_json(logic.combine_cards_url()), 'common.login_required')
def test_created(self):
self.request_login(self.account.email)
card_1 = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
card_2 = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[card_1, card_2])
response = self.post_ajax_json(logic.combine_cards_url(), {'card': [card_1.uid, card_2.uid]})
account_cards = tt_services.storage.cmd_get_items(self.hero.account_id)
self.assertEqual(len(account_cards), 1)
new_card = list(account_cards.values())[0]
data = self.check_ajax_ok(response)
self.assertEqual(data['cards'], [new_card.ui_info()])
def test_created__old_api(self):
self.request_login(self.account.email)
card_1 = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
card_2 = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[card_1, card_2])
response = self.post_ajax_json(logic.combine_cards_url(api_version='2.0'), {'card': [card_1.uid, card_2.uid]})
account_cards = tt_services.storage.cmd_get_items(self.hero.account_id)
self.assertEqual(len(account_cards), 1)
new_card = list(account_cards.values())[0]
data = self.check_ajax_ok(response)
self.assertEqual(data['card'], new_card.ui_info())
def test_created__no_premium_cards(self):
# account always use personal_only mode for not premium players
self.assertTrue(self.account._model.cards_receive_mode.is_ALL)
self.assertTrue(self.account.cards_receive_mode().is_PERSONAL_ONLY)
self.request_login(self.account.email)
card_type = types.CARD.ADD_GOLD_COMMON
self.assertTrue(card_type.availability.is_FOR_PREMIUMS)
for i in range(100):
card_1 = objects.Card(card_type, uid=uuid.uuid4())
card_2 = objects.Card(card_type, uid=uuid.uuid4())
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[card_1, card_2])
response = self.post_ajax_json(logic.combine_cards_url(), {'card': [card_1.uid, card_2.uid]})
account_cards = tt_services.storage.cmd_get_items(self.hero.account_id)
self.assertEqual(len(account_cards), 1)
new_card = list(account_cards.values())[0]
self.assertTrue(new_card.type.availability.is_FOR_ALL)
data = self.check_ajax_ok(response)
self.assertEqual(data['cards'], [new_card.ui_info()])
tt_services.storage.cmd_debug_clear_service()
def test_created__allow_premium_cards(self):
self.account.prolong_premium(30)
self.account.set_cards_receive_mode(relations.RECEIVE_MODE.ALL)
self.account.save()
self.request_login(self.account.email)
card_type = types.CARD.ADD_GOLD_COMMON
self.assertTrue(card_type.availability.is_FOR_PREMIUMS)
premium_constructed = False
for i in range(100):
card_1 = objects.Card(card_type, uid=uuid.uuid4())
card_2 = objects.Card(card_type, uid=uuid.uuid4())
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[card_1, card_2])
response = self.post_ajax_json(logic.combine_cards_url(), {'card': [card_1.uid, card_2.uid]})
account_cards = tt_services.storage.cmd_get_items(self.hero.account_id)
self.assertEqual(len(account_cards), 1)
new_card = list(account_cards.values())[0]
premium_constructed = premium_constructed or new_card.type.availability.is_FOR_PREMIUMS
data = self.check_ajax_ok(response)
self.assertEqual(data['cards'], [new_card.ui_info()])
tt_services.storage.cmd_debug_clear_service()
self.assertTrue(premium_constructed)
def test_wrong_cards(self):
self.request_login(self.account.email)
with self.check_not_changed(PostponedTaskPrototype._db_count):
self.check_ajax_error(self.post_ajax_json(logic.combine_cards_url(), {'card': [uuid.uuid4().hex]}),
'card.wrong_value')
class MoveToStorageRequestsTests(CardsRequestsTestsBase):
def test_unlogined(self):
self.check_ajax_error(self.post_ajax_json(logic.move_to_storage_url()), 'common.login_required')
def test_move(self):
self.request_login(self.account.email)
card_1 = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
card_2 = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
card_3 = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[card_1, card_2, card_3])
data = self.check_ajax_ok(self.request_json(logic.get_cards_url()))
self.assertFalse(any(card['in_storage'] for card in data['cards']))
self.check_ajax_ok(self.post_ajax_json(logic.move_to_storage_url(), {'card': [card_1.uid.hex, card_3.uid.hex]}))
data = self.check_ajax_ok(self.request_json(logic.get_cards_url()))
for card in data['cards']:
if card['uid'] in (card_1.uid.hex, card_3.uid.hex):
self.assertTrue(card['in_storage'])
else:
self.assertFalse(card['in_storage'])
def test_already_moved(self):
self.request_login(self.account.email)
card = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[card])
self.check_ajax_ok(self.post_ajax_json(logic.move_to_storage_url(), {'card': [card.uid.hex]}))
self.check_ajax_ok(self.post_ajax_json(logic.move_to_storage_url(), {'card': [card.uid.hex]}))
data = self.check_ajax_ok(self.request_json(logic.get_cards_url()))
self.assertTrue(data['cards'][0]['in_storage'])
def test_no_card(self):
self.request_login(self.account.email)
self.check_ajax_error(self.post_ajax_json(logic.move_to_storage_url(), {'card': [uuid.uuid4().hex]}), 'card.wrong_value')
def test_no_card__in_list(self):
self.request_login(self.account.email)
card = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[card])
self.check_ajax_error(self.post_ajax_json(logic.move_to_storage_url(), {'card': [card.uid.hex, uuid.uuid4().hex]}), 'card.wrong_value')
class MoveToHandRequestsTests(CardsRequestsTestsBase):
def test_unlogined(self):
self.check_ajax_error(self.post_ajax_json(logic.move_to_hand_url()), 'common.login_required')
def test_move(self):
self.request_login(self.account.email)
card_1 = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
card_2 = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
card_3 = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[card_1, card_2, card_3])
self.check_ajax_ok(self.post_ajax_json(logic.move_to_storage_url(), {'card': [card_1.uid.hex, card_2.uid.hex, card_3.uid.hex]}))
data = self.check_ajax_ok(self.request_json(logic.get_cards_url()))
self.assertTrue(any(card['in_storage'] for card in data['cards']))
self.check_ajax_ok(self.post_ajax_json(logic.move_to_hand_url(), {'card': [card_1.uid.hex, card_3.uid.hex]}))
data = self.check_ajax_ok(self.request_json(logic.get_cards_url()))
for card in data['cards']:
if card['uid'] in (card_1.uid.hex, card_3.uid.hex):
self.assertFalse(card['in_storage'])
else:
self.assertTrue(card['in_storage'])
def test_already_moved(self):
self.request_login(self.account.email)
card = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[card])
self.check_ajax_ok(self.post_ajax_json(logic.move_to_hand_url(), {'card': [card.uid.hex]}))
data = self.check_ajax_ok(self.request_json(logic.get_cards_url()))
self.assertFalse(data['cards'][0]['in_storage'])
def test_no_card(self):
self.request_login(self.account.email)
self.check_ajax_error(self.post_ajax_json(logic.move_to_hand_url(), {'card': [uuid.uuid4().hex]}), 'card.wrong_value')
def test_no_card__in_list(self):
self.request_login(self.account.email)
card = objects.Card(types.CARD.ADD_GOLD_COMMON, uid=uuid.uuid4())
logic.change_cards(self.hero.account_id, operation_type='#test', to_add=[card])
self.check_ajax_error(self.post_ajax_json(logic.move_to_hand_url(), {'card': [card.uid.hex, uuid.uuid4().hex]}), 'card.wrong_value')
class TakeCardCallbackTests(CardsRequestsTestsBase, tt_api_testcase.TestCaseMixin):
def setUp(self):
super().setUp()
self.postprocess_remove = tt_protocol_timers_pb2.CallbackAnswer.PostprocessType.Value('REMOVE')
self.postprocess_restart = tt_protocol_timers_pb2.CallbackAnswer.PostprocessType.Value('RESTART')
def create_data(self, secret, account_id=None):
if account_id is None:
account_id = self.account.id
new_card_timer = accounts_tt_services.players_timers.get_or_create_timer(account_id)
speed = new_card_timer.speed if new_card_timer else 666
return tt_protocol_timers_pb2.CallbackBody(timer=tt_protocol_timers_pb2.Timer(owner_id=account_id,
entity_id=0,
type=0,
speed=speed),
callback_data='xy',
secret=secret).SerializeToString()
def test_no_post_data(self):
self.check_ajax_error(self.post_ajax_json(utils_urls.url('game:cards:tt-take-card-callback')), 'common.wrong_tt_post_data', status_code=500)
cards = tt_services.storage.cmd_get_items(self.account.id)
self.assertEqual(cards, {})
def test_wrong_secret_key(self):
data = self.create_data(secret='wrong.secret')
self.check_ajax_error(self.post_ajax_binary(utils_urls.url('game:cards:tt-take-card-callback'), data), 'common.wrong_tt_secret', status_code=500)
cards = tt_services.storage.cmd_get_items(self.account.id)
self.assertEqual(cards, {})
@mock.patch('tt_logic.common.checkers.is_player_participate_in_game', mock.Mock(return_value=False))
def test_no_account(self):
account_id = 9999999
data = self.create_data(secret=django_settings.TT_SECRET, account_id=account_id)
answer = self.check_protobuf_ok(self.post_protobuf(utils_urls.url('game:cards:tt-take-card-callback'), data),
answer_type=tt_protocol_timers_pb2.CallbackAnswer)
self.assertEqual(answer.postprocess_type, self.postprocess_remove)
cards = tt_services.storage.cmd_get_items(account_id)
self.assertEqual(cards, {})
@mock.patch('tt_logic.common.checkers.is_player_participate_in_game', mock.Mock(return_value=False))
def test_account_removed(self):
tt_services.storage.cmd_debug_clear_service()
accounts_data_protection.first_step_removing(self.account)
data = self.create_data(secret=django_settings.TT_SECRET,)
answer = self.check_protobuf_ok(self.post_protobuf(utils_urls.url('game:cards:tt-take-card-callback'), data),
answer_type=tt_protocol_timers_pb2.CallbackAnswer)
self.assertEqual(answer.postprocess_type, self.postprocess_remove)
cards = tt_services.storage.cmd_get_items(self.account.id)
self.assertEqual(cards, {})
@mock.patch('tt_logic.common.checkers.is_player_participate_in_game', mock.Mock(return_value=False))
def test_does_not_participate_in_game(self):
data = self.create_data(secret=django_settings.TT_SECRET)
answer = self.check_protobuf_ok(self.post_protobuf(utils_urls.url('game:cards:tt-take-card-callback'), data),
answer_type=tt_protocol_timers_pb2.CallbackAnswer)
self.assertEqual(answer.postprocess_type, self.postprocess_restart)
cards = tt_services.storage.cmd_get_items(self.account.id)
self.assertEqual(cards, {})
def check_cards_timer_speed(self, speed):
new_card_timer = accounts_tt_services.players_timers.get_timer(self.account.id)
self.assertEqual(new_card_timer.speed, speed)
def test_premium(self):
self.check_cards_timer_speed(tt_cards_constants.NORMAL_PLAYER_SPEED)
self.account.prolong_premium(30)
self.account.set_cards_receive_mode(relations.RECEIVE_MODE.ALL)
self.account.save()
tt_services.storage.cmd_debug_clear_service()
data = self.create_data(secret=django_settings.TT_SECRET)
answer = self.check_protobuf_ok(self.post_ajax_binary(utils_urls.url('game:cards:tt-take-card-callback'), data),
answer_type=tt_protocol_timers_pb2.CallbackAnswer)
self.assertEqual(answer.postprocess_type, self.postprocess_restart)
cards = tt_services.storage.cmd_get_items(self.account.id)
self.assertEqual(len(cards), 1)
self.assertTrue(list(cards.values())[0].storage.is_NEW)
self.assertTrue(list(cards.values())[0].available_for_auction)
self.check_cards_timer_speed(tt_cards_constants.PREMIUM_PLAYER_SPEED)
def test_not_premium(self):
accounts_tt_services.players_timers.cmd_change_timer_speed(owner_id=self.account.id,
speed=tt_cards_constants.PREMIUM_PLAYER_SPEED,
type=accounts_relations.PLAYER_TIMERS_TYPES.CARDS_MINER)
self.check_cards_timer_speed(tt_cards_constants.PREMIUM_PLAYER_SPEED)
data = self.create_data(secret=django_settings.TT_SECRET)
answer = self.check_protobuf_ok(self.post_ajax_binary(utils_urls.url('game:cards:tt-take-card-callback'), data),
answer_type=tt_protocol_timers_pb2.CallbackAnswer)
self.assertEqual(answer.postprocess_type, self.postprocess_restart)
cards = tt_services.storage.cmd_get_items(self.account.id)
self.assertEqual(len(cards), 1)
self.assertTrue(list(cards.values())[0].storage.is_NEW)
self.assertFalse(list(cards.values())[0].available_for_auction)
self.check_cards_timer_speed(tt_cards_constants.NORMAL_PLAYER_SPEED)
def test_no_premium_cards_for_not_premium_player(self):
# account always use personal_only mode for not premium players
self.assertTrue(self.account._model.cards_receive_mode.is_ALL)
self.assertTrue(self.account.cards_receive_mode().is_PERSONAL_ONLY)
accounts_tt_services.players_timers.cmd_change_timer_speed(owner_id=self.account.id,
speed=tt_cards_constants.PREMIUM_PLAYER_SPEED,
type=accounts_relations.PLAYER_TIMERS_TYPES.CARDS_MINER)
self.check_cards_timer_speed(tt_cards_constants.PREMIUM_PLAYER_SPEED)
for i in range(100):
data = self.create_data(secret=django_settings.TT_SECRET)
answer = self.check_protobuf_ok(self.post_ajax_binary(utils_urls.url('game:cards:tt-take-card-callback'), data),
answer_type=tt_protocol_timers_pb2.CallbackAnswer)
self.assertEqual(answer.postprocess_type, self.postprocess_restart)
cards = tt_services.storage.cmd_get_items(self.account.id)
self.assertEqual(len(cards), 100)
for card in cards.values():
self.assertFalse(card.available_for_auction)
self.assertTrue(card.type.availability.is_FOR_ALL)
def test_premium_cards_for_premium_player(self):
self.check_cards_timer_speed(tt_cards_constants.NORMAL_PLAYER_SPEED)
self.account.prolong_premium(30)
self.account.set_cards_receive_mode(relations.RECEIVE_MODE.ALL)
self.account.save()
tt_services.storage.cmd_debug_clear_service()
for i in range(100):
data = self.create_data(secret=django_settings.TT_SECRET)
answer = self.check_protobuf_ok(self.post_ajax_binary(utils_urls.url('game:cards:tt-take-card-callback'), data),
answer_type=tt_protocol_timers_pb2.CallbackAnswer)
self.assertEqual(answer.postprocess_type, self.postprocess_restart)
card_types = set()
cards = tt_services.storage.cmd_get_items(self.account.id)
self.assertEqual(len(cards), 100)
for card in cards.values():
card_types.add(card.type.availability)
self.assertEqual(card_types,
{relations.AVAILABILITY.FOR_ALL,
relations.AVAILABILITY.FOR_PREMIUMS})
def test_only_not_premium_cards_for_premium_player(self):
self.check_cards_timer_speed(tt_cards_constants.NORMAL_PLAYER_SPEED)
self.account.prolong_premium(30)
self.account.set_cards_receive_mode(relations.RECEIVE_MODE.PERSONAL_ONLY)
self.account.save()
tt_services.storage.cmd_debug_clear_service()
for i in range(100):
data = self.create_data(secret=django_settings.TT_SECRET)
answer = self.check_protobuf_ok(self.post_ajax_binary(utils_urls.url('game:cards:tt-take-card-callback'), data),
answer_type=tt_protocol_timers_pb2.CallbackAnswer)
self.assertEqual(answer.postprocess_type, self.postprocess_restart)
card_types = set()
cards = tt_services.storage.cmd_get_items(self.account.id)
self.assertEqual(len(cards), 100)
for card in cards.values():
card_types.add(card.type.availability)
self.assertEqual(card_types,
{relations.AVAILABILITY.FOR_ALL})
class GetCardsTests(CardsRequestsTestsBase):
def test_unlogined(self):
self.check_ajax_error(self.request_ajax_json(logic.get_cards_url()), 'common.login_required')
def test_no_cards(self):
self.request_login(self.account.email)
response = self.request_ajax_json(logic.get_cards_url())
data = self.check_ajax_ok(response)
self.assertEqual(data['cards'], [])
self.assertEqual(data['new_cards'], 0)
self.assertEqual(data['new_card_timer'], {'border': tt_cards_constants.RECEIVE_TIME,
'finish_at': data['new_card_timer']['finish_at'],
'id': data['new_card_timer']['id'],
'owner_id': self.account.id,
'resources': 0.0,
'resources_at': data['new_card_timer']['resources_at'],
'speed': 1.0,
'type': accounts_relations.PLAYER_TIMERS_TYPES.CARDS_MINER.value})
def test_has_cards(self):
self.request_login(self.account.email)
cards = [logic.create_card(allow_premium_cards=True, available_for_auction=True),
logic.create_card(allow_premium_cards=True, available_for_auction=True)]
logic.change_cards(owner_id=self.account.id,
operation_type='#test',
storage=relations.STORAGE.FAST,
to_add=cards)
response = self.request_ajax_json(logic.get_cards_url())
data = self.check_ajax_ok(response)
self.assertEqual(len(data['cards']), 2)
self.assertEqual({card.uid.hex for card in cards},
{card['uid'] for card in data['cards']})
def test_has_not_received_cards(self):
self.request_login(self.account.email)
cards = [logic.create_card(allow_premium_cards=True, available_for_auction=True),
logic.create_card(allow_premium_cards=True, available_for_auction=True)]
logic.change_cards(owner_id=self.account.id,
operation_type='#test',
storage=relations.STORAGE.NEW,
to_add=cards)
visible_card = logic.create_card(allow_premium_cards=True, available_for_auction=True)
logic.change_cards(owner_id=self.account.id,
operation_type='#test',
storage=relations.STORAGE.FAST,
to_add=[visible_card])
response = self.request_ajax_json(logic.get_cards_url())
data = self.check_ajax_ok(response)
self.assertEqual(data['new_cards'], 2)
self.assertEqual(len(data['cards']), 1)
self.assertEqual(visible_card.uid.hex, data['cards'][0]['uid'])
class ChangeReceiveModeRequestTests(CardsRequestsTestsBase):
def setUp(self):
super().setUp()
self.url_personal_only = logic.change_receive_mode_url(relations.RECEIVE_MODE.PERSONAL_ONLY)
self.url_all = logic.change_receive_mode_url(relations.RECEIVE_MODE.ALL)
def test_unlogined(self):
self.check_ajax_error(self.post_ajax_json(self.url_all, {}), 'common.login_required')
def test_not_premium(self):
self.request_login(self.account.email)
self.check_ajax_error(self.post_ajax_json(self.url_all, {}), 'common.premium_account')
def test_success(self):
self.request_login(self.account.email)
self.account.prolong_premium(30)
self.account.save()
self.assertTrue(self.account.cards_receive_mode().is_ALL)
self.check_ajax_ok(self.post_ajax_json(self.url_personal_only))
self.account.reload()
self.assertTrue(self.account.cards_receive_mode().is_PERSONAL_ONLY)
self.check_ajax_ok(self.post_ajax_json(self.url_all))
self.account.reload()
self.assertTrue(self.account.cards_receive_mode().is_ALL)
| 42.552395
| 153
| 0.669974
| 3,684
| 28,425
| 4.849349
| 0.062432
| 0.045564
| 0.029835
| 0.026868
| 0.852617
| 0.826756
| 0.812035
| 0.792387
| 0.770613
| 0.753149
| 0
| 0.008234
| 0.218153
| 28,425
| 667
| 154
| 42.616192
| 0.795626
| 0.004327
| 0
| 0.662037
| 0
| 0
| 0.053467
| 0.029295
| 0
| 0
| 0
| 0
| 0.150463
| 1
| 0.125
| false
| 0
| 0.00463
| 0
| 0.157407
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0c00eb36574ccb0512daf5ec46cf007ed601e93f
| 3,231
|
py
|
Python
|
nature_datasets/cifar.py
|
DSciLab/nature_datasets
|
20644200e5ba8af0439ca6c37f579559ac253292
|
[
"MIT"
] | null | null | null |
nature_datasets/cifar.py
|
DSciLab/nature_datasets
|
20644200e5ba8af0439ca6c37f579559ac253292
|
[
"MIT"
] | null | null | null |
nature_datasets/cifar.py
|
DSciLab/nature_datasets
|
20644200e5ba8af0439ca6c37f579559ac253292
|
[
"MIT"
] | null | null | null |
import os
from torchvision import datasets
from torchvision import transforms
from torchvision.transforms.transforms import Resize
from .utils import LinearNormalize
normalize_fn = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
linear_normalize_fn = LinearNormalize()
identity_normalize_fn = transforms.Lambda(lambda X: X)
def get_cifar10(opt):
norm_opt = opt.get('normalize', 'linear')
if norm_opt == 'linear':
norm = linear_normalize_fn
elif norm_opt == 'identity':
norm = identity_normalize_fn
else:
norm = normalize_fn
training_transformer = transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.Resize(opt.image_size),
transforms.RandomCrop(opt.image_size, 4),
transforms.RandomRotation(15),
transforms.ToTensor(),
norm,
])
eval_transformer = transforms.Compose([
transforms.Resize(opt.image_size),
transforms.ToTensor(),
norm,
])
data_root = os.path.join(opt.data_root, 'cifar10')
training_dataset = datasets.CIFAR10(root=data_root,
train=True,
transform=training_transformer,
download=True)
eval_dataset = datasets.CIFAR10(root=data_root,
train=False,
download=True,
transform=eval_transformer)
return training_dataset, eval_dataset
def get_cifar100(opt):
norm_opt = opt.get('normalize', 'linear')
if norm_opt == 'linear':
norm = linear_normalize_fn
elif norm_opt == 'identity':
norm = identity_normalize_fn
else:
norm = normalize_fn
training_transformer = transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.Resize(opt.image_size),
transforms.RandomCrop(opt.image_size, 4),
transforms.RandomRotation(15),
transforms.ToTensor(),
norm,
])
eval_transformer = transforms.Compose([
transforms.Resize(opt.image_size),
transforms.ToTensor(),
norm,
])
data_root = os.path.join(opt.data_root, 'cifar100')
training_dataset = datasets.CIFAR100(root=data_root,
train=True,
transform=training_transformer,
download=True)
eval_dataset = datasets.CIFAR100(root=data_root,
train=False,
download=True,
transform=eval_transformer)
return training_dataset, eval_dataset
| 37.569767
| 72
| 0.501393
| 264
| 3,231
| 5.935606
| 0.212121
| 0.063178
| 0.045948
| 0.097001
| 0.783663
| 0.783663
| 0.783663
| 0.745373
| 0.745373
| 0.745373
| 0
| 0.026882
| 0.424327
| 3,231
| 85
| 73
| 38.011765
| 0.815591
| 0
| 0
| 0.760563
| 0
| 0
| 0.022594
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028169
| false
| 0
| 0.070423
| 0
| 0.126761
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0c3f7faa319c414c3ca4285127a690a3d365d76b
| 40
|
py
|
Python
|
rest-api/flask_app/long_task/__init__.py
|
sinedie/Flask-Svelte-Websockets-Nginx-Docker
|
76daeec2c76f9f27ca526f53393ab4363020b92b
|
[
"WTFPL"
] | 4
|
2021-11-21T14:04:15.000Z
|
2022-03-20T15:28:14.000Z
|
rest-api/flask_app/long_task/__init__.py
|
sinedie/Utimate-flask-websocket-template
|
76daeec2c76f9f27ca526f53393ab4363020b92b
|
[
"WTFPL"
] | null | null | null |
rest-api/flask_app/long_task/__init__.py
|
sinedie/Utimate-flask-websocket-template
|
76daeec2c76f9f27ca526f53393ab4363020b92b
|
[
"WTFPL"
] | null | null | null |
from flask_app.long_task.routes import *
| 40
| 40
| 0.85
| 7
| 40
| 4.571429
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 40
| 1
| 40
| 40
| 0.864865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0c63fef5822d7b4b3f9e2df4bf61a5b7098baab9
| 60
|
py
|
Python
|
skforecast/ForecasterAutoregCustom/__init__.py
|
JoaquinAmatRodrigo/skforecaster
|
3ab526d63bbb94ae4bd18ae964197042a675a34a
|
[
"MIT"
] | 86
|
2021-02-25T08:56:45.000Z
|
2022-03-31T01:33:53.000Z
|
skforecast/ForecasterAutoregCustom/__init__.py
|
hdiazsqlr/skforecast
|
5ee79a51960a27db9e169706014528eae403e1c2
|
[
"MIT"
] | 5
|
2021-11-30T22:30:45.000Z
|
2022-03-29T10:21:36.000Z
|
skforecast/ForecasterAutoregCustom/__init__.py
|
hdiazsqlr/skforecast
|
5ee79a51960a27db9e169706014528eae403e1c2
|
[
"MIT"
] | 24
|
2021-04-04T09:58:26.000Z
|
2022-03-09T15:55:44.000Z
|
from .ForecasterAutoregCustom import ForecasterAutoregCustom
| 60
| 60
| 0.933333
| 4
| 60
| 14
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 60
| 1
| 60
| 60
| 0.982456
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a76fa9b357630943e305f4985792533e00971285
| 240
|
py
|
Python
|
src/mobot_client/models/exceptions.py
|
mobilecoinofficial/mobot
|
4872e4308beb5305d88dcace94394aaa251f65e1
|
[
"MIT"
] | 6
|
2021-07-28T13:49:16.000Z
|
2022-02-16T22:08:03.000Z
|
src/mobot_client/models/exceptions.py
|
mobilecoinofficial/mobot
|
4872e4308beb5305d88dcace94394aaa251f65e1
|
[
"MIT"
] | 10
|
2021-08-18T15:18:34.000Z
|
2021-09-27T21:40:24.000Z
|
src/mobot_client/models/exceptions.py
|
mobilecoinofficial/mobot
|
4872e4308beb5305d88dcace94394aaa251f65e1
|
[
"MIT"
] | 3
|
2021-07-28T01:17:06.000Z
|
2021-09-20T21:19:50.000Z
|
# Copyright (c) 2021 MobileCoin. All rights reserved.
from django.db.transaction import DatabaseError
class ConcurrentModificationException(DatabaseError):
"""Raise if we're unable to update a coin with optimistic locking"""
pass
| 34.285714
| 72
| 0.779167
| 29
| 240
| 6.448276
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.15
| 240
| 7
| 73
| 34.285714
| 0.897059
| 0.483333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
a776c1673c0efd0315b927220e0403b3ebfa93e6
| 2,642
|
py
|
Python
|
lemur/tests/test_domains.py
|
bunjiboys/lemur
|
b5fd8020055d8af07bd6f82f4dd38246dca8d0c5
|
[
"Apache-2.0"
] | null | null | null |
lemur/tests/test_domains.py
|
bunjiboys/lemur
|
b5fd8020055d8af07bd6f82f4dd38246dca8d0c5
|
[
"Apache-2.0"
] | 2
|
2020-04-03T09:28:20.000Z
|
2020-04-04T04:56:35.000Z
|
lemur/tests/test_domains.py
|
scriptsrc/lemur
|
914de78576baf66d8f4c0365d8cedb27c6f70663
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from lemur.domains.views import * # noqa
from .vectors import VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN
@pytest.mark.parametrize("token,status", [
(VALID_USER_HEADER_TOKEN, 200),
(VALID_ADMIN_HEADER_TOKEN, 200),
('', 401)
])
def test_domain_get(client, token, status):
assert client.get(api.url_for(Domains, domain_id=1), headers=token).status_code == status
@pytest.mark.parametrize("token,status", [
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
('', 405)
])
def test_domain_post_(client, token, status):
assert client.post(api.url_for(Domains, domain_id=1), data={}, headers=token).status_code == status
@pytest.mark.parametrize("token,status", [
(VALID_USER_HEADER_TOKEN, 400),
(VALID_ADMIN_HEADER_TOKEN, 400),
('', 401)
])
def test_domain_put(client, token, status):
assert client.put(api.url_for(Domains, domain_id=1), data={}, headers=token).status_code == status
@pytest.mark.parametrize("token,status", [
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
('', 405)
])
def test_domain_delete(client, token, status):
assert client.delete(api.url_for(Domains, domain_id=1), headers=token).status_code == status
@pytest.mark.parametrize("token,status", [
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
('', 405)
])
def test_domain_patch(client, token, status):
assert client.patch(api.url_for(Domains, domain_id=1), data={}, headers=token).status_code == status
@pytest.mark.parametrize("token,status", [
(VALID_USER_HEADER_TOKEN, 400),
(VALID_ADMIN_HEADER_TOKEN, 400),
('', 401)
])
def test_domain_list_post_(client, token, status):
assert client.post(api.url_for(DomainsList), data={}, headers=token).status_code == status
@pytest.mark.parametrize("token,status", [
(VALID_USER_HEADER_TOKEN, 200),
(VALID_ADMIN_HEADER_TOKEN, 200),
('', 401)
])
def test_domain_list_get(client, token, status):
assert client.get(api.url_for(DomainsList), headers=token).status_code == status
@pytest.mark.parametrize("token,status", [
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
('', 405)
])
def test_domain_list_delete(client, token, status):
assert client.delete(api.url_for(DomainsList), headers=token).status_code == status
@pytest.mark.parametrize("token,status", [
(VALID_USER_HEADER_TOKEN, 405),
(VALID_ADMIN_HEADER_TOKEN, 405),
('', 405)
])
def test_domain_list_patch(client, token, status):
assert client.patch(api.url_for(DomainsList), data={}, headers=token).status_code == status
| 30.022727
| 104
| 0.71726
| 358
| 2,642
| 4.994413
| 0.111732
| 0.166107
| 0.089485
| 0.11745
| 0.940157
| 0.919463
| 0.919463
| 0.919463
| 0.919463
| 0.919463
| 0
| 0.037538
| 0.132854
| 2,642
| 87
| 105
| 30.367816
| 0.742907
| 0.001514
| 0
| 0.681818
| 0
| 0
| 0.040971
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 1
| 0.136364
| false
| 0
| 0.045455
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a7c8a4dbb99ebccd4aefe3170c0e80a9a26db499
| 47
|
py
|
Python
|
learnhtml/__init__.py
|
nikitautiu/deephtml
|
9ef580132feefd862246e8bc1f594329ddd742b0
|
[
"Apache-2.0"
] | 28
|
2018-12-03T15:41:43.000Z
|
2021-09-17T10:41:46.000Z
|
learnhtml/__init__.py
|
microvn/learnhtml
|
9ef580132feefd862246e8bc1f594329ddd742b0
|
[
"Apache-2.0"
] | 1
|
2019-10-23T06:52:14.000Z
|
2019-10-23T07:59:00.000Z
|
learnhtml/__init__.py
|
microvn/learnhtml
|
9ef580132feefd862246e8bc1f594329ddd742b0
|
[
"Apache-2.0"
] | 5
|
2020-04-11T06:37:22.000Z
|
2021-03-02T12:28:05.000Z
|
from . import features, model_selection, utils
| 23.5
| 46
| 0.808511
| 6
| 47
| 6.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 47
| 1
| 47
| 47
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a7d4b02feaa2510e27d0cf92281a56b358cb7d3d
| 141
|
py
|
Python
|
shiyanlou_cs596-1805f3c438/mymodule/bars.py
|
tongxindao/shiyanlou
|
1d002ea342deb69066c287db9935f77f49f0a09e
|
[
"Apache-2.0"
] | null | null | null |
shiyanlou_cs596-1805f3c438/mymodule/bars.py
|
tongxindao/shiyanlou
|
1d002ea342deb69066c287db9935f77f49f0a09e
|
[
"Apache-2.0"
] | null | null | null |
shiyanlou_cs596-1805f3c438/mymodule/bars.py
|
tongxindao/shiyanlou
|
1d002ea342deb69066c287db9935f77f49f0a09e
|
[
"Apache-2.0"
] | null | null | null |
"""
Bars Module
"""
def starbar(num):
print('*' * num)
def hashbar(num):
print('#' * num)
def simplebar(num):
print('-' * num)
| 11.75
| 20
| 0.531915
| 17
| 141
| 4.411765
| 0.470588
| 0.32
| 0.44
| 0.373333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.234043
| 141
| 11
| 21
| 12.818182
| 0.694444
| 0.078014
| 0
| 0
| 0
| 0
| 0.02459
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
38fe67d26781d71f4e3d156fdcbde14f85fb3266
| 2,930
|
py
|
Python
|
tests/utils/test_audio.py
|
toddrme2178/audiomate
|
14e932ce9c0b0bebb895d496cb6054521fc80ab1
|
[
"MIT"
] | null | null | null |
tests/utils/test_audio.py
|
toddrme2178/audiomate
|
14e932ce9c0b0bebb895d496cb6054521fc80ab1
|
[
"MIT"
] | null | null | null |
tests/utils/test_audio.py
|
toddrme2178/audiomate
|
14e932ce9c0b0bebb895d496cb6054521fc80ab1
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import librosa
from audiomate.utils import audio
def test_read_blocks(tmpdir):
wav_path = os.path.join(tmpdir.strpath, 'file.wav')
wav_content = np.random.random(10000)
librosa.output.write_wav(wav_path, wav_content, 16000)
data = [x for x in audio.read_blocks(wav_path, buffer_size=1000)]
blocks = [x[0] for x in data]
sr = [x[1] for x in data]
assert np.allclose(np.concatenate(blocks), wav_content, atol=0.0001)
assert np.concatenate(blocks).dtype == np.float32
assert sr == [16000] * len(data)
def test_read_blocks_with_resampling(tmpdir):
wav_path = os.path.join(tmpdir.strpath, 'file.wav')
wav_content = np.random.random(10000)
librosa.output.write_wav(wav_path, wav_content, 16000)
data = [x for x in audio.read_blocks(wav_path, sr_target=8000, buffer_size=1000)]
blocks = [x[0] for x in data]
sr = [x[1] for x in data]
assert np.concatenate(blocks).size == 5000
assert np.concatenate(blocks).dtype == np.float32
assert sr == [8000] * len(data)
def test_read_blocks_with_start_end(tmpdir):
wav_path = os.path.join(tmpdir.strpath, 'file.wav')
wav_content = np.random.random(10000)
librosa.output.write_wav(wav_path, wav_content, 16000)
blocks = [x[0] for x in audio.read_blocks(wav_path, start=0.1, end=0.3, buffer_size=1000)]
assert np.concatenate(blocks).dtype == np.float32
assert np.allclose(np.concatenate(blocks), wav_content[1600:4800], atol=0.0001)
def test_read_frames(tmpdir):
wav_path = os.path.join(tmpdir.strpath, 'file.wav')
wav_content = np.random.random(10044)
librosa.output.write_wav(wav_path, wav_content, 16000)
data = list(audio.read_frames(wav_path, frame_size=400, hop_size=160))
frames = np.array([x[0] for x in data])
sr = [x[1] for x in data]
last = [x[2] for x in data]
assert frames.shape == (62, 400)
assert frames.dtype == np.float32
assert np.allclose(frames[0], wav_content[:400], atol=0.0001)
assert np.allclose(frames[61], np.pad(wav_content[9760:], (0, 116), mode='constant'), atol=0.0001)
assert sr == [16000] * len(data)
assert last[:-1] == [False] * (len(data) - 1)
assert last[-1]
def test_read_frames_matches_length(tmpdir):
wav_path = os.path.join(tmpdir.strpath, 'file.wav')
wav_content = np.random.random(10000)
librosa.output.write_wav(wav_path, wav_content, 16000)
data = list(audio.read_frames(wav_path, frame_size=400, hop_size=160))
frames = np.array([x[0] for x in data])
sr = [x[1] for x in data]
last = [x[2] for x in data]
assert frames.shape == (61, 400)
assert frames.dtype == np.float32
assert np.allclose(frames[0], wav_content[:400], atol=0.0001)
assert np.allclose(frames[60], wav_content[9600:], atol=0.0001)
assert sr == [16000] * len(data)
assert last[:-1] == [False] * (len(data) - 1)
assert last[-1]
| 34.069767
| 102
| 0.679863
| 475
| 2,930
| 4.054737
| 0.162105
| 0.083074
| 0.040498
| 0.051921
| 0.847871
| 0.840602
| 0.836449
| 0.807373
| 0.73001
| 0.681205
| 0
| 0.084507
| 0.176109
| 2,930
| 85
| 103
| 34.470588
| 0.713339
| 0
| 0
| 0.655738
| 0
| 0
| 0.016382
| 0
| 0
| 0
| 0
| 0
| 0.360656
| 1
| 0.081967
| false
| 0
| 0.065574
| 0
| 0.147541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ac395fec13ccaa2a4986eb9c70df628b4d6a14e7
| 84
|
py
|
Python
|
service1/create.py
|
pstyp/fortuneteller
|
c6e3bdcdb6ff2330965a4adc875576575d19293b
|
[
"MIT"
] | null | null | null |
service1/create.py
|
pstyp/fortuneteller
|
c6e3bdcdb6ff2330965a4adc875576575d19293b
|
[
"MIT"
] | null | null | null |
service1/create.py
|
pstyp/fortuneteller
|
c6e3bdcdb6ff2330965a4adc875576575d19293b
|
[
"MIT"
] | null | null | null |
from application import db
from application.models import Fortune
db.create_all()
| 16.8
| 39
| 0.821429
| 12
| 84
| 5.666667
| 0.666667
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130952
| 84
| 4
| 40
| 21
| 0.931507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ba99c834c7b68bd0ed17ce8925b4382af2e050c
| 27,843
|
py
|
Python
|
optimization/pre_optimization/no_gg_sdeta_plots/ma_files/Output/Histos/MadAnalysis5job_0/selection_0.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
optimization/pre_optimization/no_gg_sdeta_plots/ma_files/Output/Histos/MadAnalysis5job_0/selection_0.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
optimization/pre_optimization/no_gg_sdeta_plots/ma_files/Output/Histos/MadAnalysis5job_0/selection_0.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
def selection_0():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(-15.0,15.0,101,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([-14.85,-14.55,-14.25,-13.95,-13.65,-13.35,-13.05,-12.75,-12.45,-12.15,-11.85,-11.55,-11.25,-10.95,-10.65,-10.35,-10.05,-9.75,-9.45,-9.15,-8.85,-8.55,-8.25,-7.95,-7.65,-7.35,-7.05,-6.75,-6.45,-6.15,-5.85,-5.55,-5.25,-4.95,-4.65,-4.35,-4.05,-3.75,-3.45,-3.15,-2.85,-2.55,-2.25,-1.95,-1.65,-1.35,-1.05,-0.75,-0.45,-0.15,0.15,0.45,0.75,1.05,1.35,1.65,1.95,2.25,2.55,2.85,3.15,3.45,3.75,4.05,4.35,4.65,4.95,5.25,5.55,5.85,6.15,6.45,6.75,7.05,7.35,7.65,7.95,8.25,8.55,8.85,9.15,9.45,9.75,10.05,10.35,10.65,10.95,11.25,11.55,11.85,12.15,12.45,12.75,13.05,13.35,13.65,13.95,14.25,14.55,14.85])
# Creating weights for histo: y1_sdETA_0
y1_sdETA_0_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.002324775003,0.116098978734,0.350692617867,0.801082689435,1.25611749726,2.12939443139,3.16506844143,4.6927738116,5.97018421383,8.25038706579,10.5978352989,13.8262332034,17.2878313101,21.3678854515,26.2004266812,31.0364776865,36.5501632203,42.4971861133,48.1863004638,53.5154029827,57.6934350275,62.8279009033,65.2634772826,65.9369304611,66.4727508686,64.3452592313,61.684745413,56.9707730366,53.6170185807,50.4736507858,50.1584105789,53.9007987264,62.3007150234,60.4743525363,60.8712210088,62.7867269985,53.4203432297,50.6379066836,50.4211640509,53.8954021466,57.1728049927,61.7686522345,64.0358153507,65.7659188449,66.5552186022,65.4569946351,62.2581020307,58.9323698149,54.0698915586,48.3625887357,42.5154545352,36.5408651133,31.1096912852,26.1912604906,21.1448907811,17.1393134378,13.5584988882,10.6214723183,8.5453761075,6.28580817753,4.44214865293,3.07241756175,2.11783175959,1.27962459892,0.750176552751,0.343744061726,0.118376615213,0.0023206420224,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_1
y1_sdETA_1_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.194465719627,0.522598609472,1.68908977948,2.83086046781,5.0548365953,8.53051292647,14.2914017924,20.6224431684,30.876532136,42.1509968984,56.245124328,70.7654130899,83.4386555888,96.02297261,109.144527698,120.389114301,126.810655376,131.308281723,137.609613177,144.360498972,142.171129676,142.155507633,140.298967974,144.829280443,168.214758374,234.436640343,329.28231173,431.710362184,530.015875559,602.482128608,651.839373838,674.576256573,674.739687181,675.412636742,674.754908659,650.523116541,602.730879607,530.34313734,436.977794762,333.588708243,233.904809907,169.969474355,143.931653853,141.184577608,140.17603451,143.801750553,142.174334198,140.365982535,135.112169174,127.100704648,119.549850062,109.094537159,96.9016124091,82.0888709838,69.4040521501,54.3904673259,42.5895356958,30.2918591368,20.6802727685,13.548681782,9.30830242143,5.21125731177,3.21990422555,1.4941615272,0.631970137477,0.242966276032,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_2
y1_sdETA_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0200891513355,0.210960422995,0.813291874286,1.81715978951,4.0768251729,7.76100852958,14.9793682201,25.8632242305,42.3808226792,67.046645192,92.7604524644,120.221479498,152.045101553,181.64391502,205.503357019,225.714603827,246.70541378,263.855100505,275.662676447,280.288899841,281.154162867,277.305891096,265.570833617,251.48915306,229.467093378,207.625937302,187.196267364,184.103261954,181.785150408,182.220095901,186.932886918,193.505456268,193.124476082,185.971097893,182.557523688,181.865643836,181.972210853,188.68209009,204.904573518,233.845762293,248.059546283,270.439859568,279.901143001,283.383144883,281.333785521,277.503488412,265.418606827,248.064215563,228.809303401,204.306451154,181.666641604,154.265240862,122.321415839,94.0843379511,65.211990065,43.5628116411,25.2802997529,14.6793112005,8.30363670811,4.63876020416,2.16889334172,0.994014494921,0.240985669816,0.0301123640144,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_3
y1_sdETA_3_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00548472661476,0.0220239359413,0.15957571917,0.418096017784,1.24830714538,2.77226215965,6.44653478053,13.1739227943,24.5589642203,39.5854247115,56.9462715288,77.0496299372,98.8034459416,118.747145551,138.53195825,159.086667613,172.763576561,184.944120998,191.252065442,193.835287927,190.826593025,181.954642028,166.26095401,150.077483175,128.03136563,108.417058778,90.8730755997,79.4305366934,67.9507034417,60.3125533895,61.1149505731,68.8689243492,77.9232438826,90.3582267562,108.692744429,129.666385479,149.477198593,168.421425998,183.260959755,190.785073612,193.969311942,192.006930616,183.797665198,172.583929944,158.192700218,138.363564937,119.96522437,98.4411057826,78.016357869,57.6292293058,38.7568849231,24.4439042586,12.8656066227,6.29268544964,2.96432478803,1.37509654431,0.418075704959,0.115543650664,0.00549235611155,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_4
y1_sdETA_4_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000988581123071,0.00098844684425,0.0138263414802,0.0463834321174,0.187496203419,0.525004136234,1.33132961757,2.88750330892,5.37338146989,9.06618132362,14.1427745324,19.0967050402,24.2768091826,29.3222296632,33.9800092333,37.2841380374,39.9479452562,40.5325631765,40.3501043128,38.3393451048,35.1808707425,30.6452047331,25.5209766136,20.850855417,17.2375285837,14.3871700034,12.7599792498,12.7706574223,14.5352494806,16.9265147842,21.0000131331,26.1127373655,30.7785335816,35.1765417536,38.1663458834,39.9721435023,41.1577814006,39.7175869324,37.3396372779,33.43538636,29.5148977257,24.5106707806,18.9409335911,13.9106765964,9.13736513208,5.3427859403,2.9694771221,1.34801546462,0.525009347054,0.201286918924,0.056261262777,0.0108547391452,0.00295790830021,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_5
y1_sdETA_5_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000251541979663,0.0,0.000252557070749,0.00352871523829,0.00781308058891,0.0420994934359,0.146209125266,0.385702524676,0.922642618418,1.69072140501,2.85028822734,4.31373901628,6.00978653872,7.6835005369,8.94370825305,10.2148746799,10.9732742723,11.3376484013,11.2320022694,10.4631440901,9.61346591962,8.10301630538,6.81618200306,5.53184031729,4.57997287002,4.02965274669,4.03995129407,4.64328052936,5.4768307105,6.84575331761,8.18773326037,9.48287761821,10.6953135179,11.2009425867,11.3456103707,10.9270388358,10.3241977213,9.06281371415,7.57272913726,5.96335505375,4.37784287256,2.87694041984,1.69772873829,0.88378540697,0.384409444829,0.133352065033,0.0433532035449,0.0113445100986,0.00302568521231,0.00100827740044,0.000252122443242,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_6
y1_sdETA_6_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000286809860417,0.00114481585491,0.00830488528027,0.0274878563986,0.0813327586413,0.237313069157,0.500386994212,1.01341894538,1.67145514011,2.50439564537,3.35066967655,4.21260145373,4.9419457209,5.54091073708,5.64573756224,5.57250813451,5.19193783558,4.64609199517,3.93838046909,3.28110701843,2.70165045615,2.39508732398,2.37765917203,2.74075433478,3.22328642023,3.97882889655,4.57305250371,5.24639656186,5.53650021703,5.61827477743,5.42867939651,4.92386478799,4.21121491899,3.39101313922,2.50899010369,1.66082270785,1.00430200459,0.516844571841,0.237070550535,0.0841368277307,0.0254693437143,0.00858950177667,0.00143135570593,0.00056999483717,0.00028630203082,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_7
y1_sdETA_7_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.64683218038e-05,0.000129502469587,0.000561791901271,0.0011666328282,0.00391036144547,0.0113573881662,0.0360616590487,0.0938693729581,0.195748826014,0.353847953781,0.537447507319,0.736798874485,0.926206792213,1.05379315918,1.12979437226,1.14448030142,1.05212013169,0.933213223457,0.797500184148,0.683565419728,0.602565659839,0.607627070901,0.68655775847,0.79954369167,0.929337432325,1.06511920322,1.13125366127,1.13279299743,1.06110217706,0.923602116404,0.739212862041,0.538804118034,0.349594096993,0.191609885093,0.0907138805089,0.0380906868359,0.0119381994396,0.00291461169507,0.00110164459515,0.000345604814919,0.000151121530263,8.6405331816e-05,2.15983448062e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_8
y1_sdETA_8_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.69022241353e-05,5.69022241353e-05,0.000283918767842,0.000511197972289,0.00130015418353,0.00439431805346,0.0174378042312,0.0446313136602,0.092675137998,0.164311905025,0.248514808568,0.337002135273,0.416009292175,0.459955665386,0.472977775482,0.452984790695,0.41106168331,0.364079000643,0.338184613495,0.335307937856,0.362870981011,0.410584118327,0.452566921335,0.471439350971,0.461773857442,0.420396771961,0.338622084403,0.249102409512,0.166384174721,0.0914023144605,0.0463894240028,0.01759261191,0.00546677664815,0.00144783417524,0.00045207123977,0.000170328095239,5.63633449166e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_9
y1_sdETA_9_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.80655640546,49.5592131288,67.7372000009,122.530058974,344.090873292,664.607422803,1214.66318572,2158.2359906,3539.94725561,5411.11022735,8252.47450548,12081.1439928,17291.4916764,24084.5569554,32751.2496047,43220.2216517,55150.848531,69389.8736011,82756.6115464,96474.5330188,109026.969688,118305.859089,126183.859598,131892.351098,136530.21926,141291.55729,137714.660975,137622.145091,141568.72042,137818.404888,131177.06412,126244.229481,117881.424208,108509.24988,95882.2929353,81987.8760738,68268.3011588,55651.3417739,43194.8047776,33268.0811681,24199.8249789,17620.572905,11982.5911209,8500.03716655,5502.52637945,3255.61318372,2192.12233627,1227.71000174,664.573200386,310.179918244,171.9991018,104.19837884,46.9089368324,7.81340473417,5.21636160048,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_10
y1_sdETA_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.05142410245,2.1038412727,12.6409567157,18.9587225093,50.5763546765,113.753666329,248.555368254,441.3053277,766.738765053,1399.81622092,2206.49354907,3353.5942133,5152.51749825,7591.73212062,10472.3020896,14338.4716561,18931.7199915,24597.1737486,30938.5484693,37335.7827731,43916.3363936,49178.8262438,54327.0421155,57876.9998084,60025.658412,62362.5805095,61482.3245906,61478.1306971,62282.4732963,60088.0666252,57813.2449322,54247.9352806,49550.1204835,43954.6201002,37553.1880552,31113.2568377,24491.0874815,19113.0808753,14386.1242899,10645.0250919,7468.52014715,5218.65404423,3457.73551424,2240.29440675,1400.74195558,836.311225605,456.07706703,222.26565837,124.289534762,42.1403956449,20.0130596363,8.42314649463,2.10677892194,4.21495529536,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_11
y1_sdETA_11_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230404669904,0.691645769606,1.38164511121,7.83116084013,14.282887816,36.8483843414,77.3845752982,135.679461468,261.89286485,464.784512468,755.019223372,1171.00491616,1752.36306329,2506.50739365,3498.774395,4702.13690463,6168.81032046,7716.47954105,9407.33228462,10983.0234409,12456.6322186,13499.4877856,14342.5741932,14800.1044324,14884.8772237,14845.7356553,14933.3402259,14365.4628084,13549.964156,12406.6399787,10960.830283,9419.71219368,7760.36635708,6215.3214313,4764.97397252,3553.88958107,2505.29975695,1728.64143618,1144.73391921,759.659576001,453.300629106,258.662868962,151.104244123,69.7898353718,29.2498712709,14.9699228187,6.67869197526,2.99595874043,0.921341111449,0.23009056141,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_12
y1_sdETA_12_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0831369042095,0.470672767517,0.858441806458,2.52038160746,5.56611895773,13.4308895394,28.4671592658,50.2300554973,89.8859557395,145.929051687,228.508734997,336.841226232,495.032523193,663.16788953,885.590359634,1100.54859342,1333.29373072,1554.37008378,1725.64972408,1856.36926897,1951.48346021,1959.45013405,1929.33350628,1928.45520242,1859.83978108,1729.13831778,1545.25080444,1340.49174441,1100.3193034,877.030070881,669.89770532,490.521742301,339.775753694,228.502887332,145.876422704,87.6128684132,49.7353122796,27.0554675861,12.4870918323,5.73211569455,2.43688041682,1.07943159869,0.166168358096,0.0554608301155,0.0553504169713,0.0553269108978,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_13
y1_sdETA_13_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0201690944339,0.0100703200698,0.0705860686607,0.161359758298,0.503894483834,1.31086913488,2.83335168592,6.01926354839,12.390762012,23.2070287948,40.6814002097,67.215123783,100.477641704,142.193008711,194.18651105,251.627775863,315.06580294,370.136483385,417.702427125,454.283702478,473.467746743,472.630016877,473.523878588,472.453368443,455.597187661,411.537936839,365.509580881,311.470694779,253.07215444,193.401757509,139.062186765,100.455553064,64.2531221553,40.297337021,23.1606669247,13.2793625003,6.20031757425,3.07505419854,1.06887777082,0.463769134787,0.19154345967,0.040295461914,0.0402770810102,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_14
y1_sdETA_14_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0028319583193,0.00849222491575,0.00848584205508,0.0452684867618,0.0735599492022,0.172557079442,0.461073385476,1.32688631316,3.08656482196,6.38267984472,13.0848605327,22.4809469971,36.4565801744,56.1384715564,80.6250718905,107.237407216,138.236679989,166.368051511,189.7507132,207.247061532,220.258094028,221.960113259,221.91594494,218.487283025,208.558991406,189.560650923,165.702641172,138.170581408,108.715314552,80.2587210808,56.2612040793,36.6261241896,22.5206638571,12.8247445324,6.17326122549,3.03596901269,1.32967299549,0.520612125028,0.158452150052,0.0792435038139,0.00566539866267,0.014150398134,0.0,0.00282706133011,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_15
y1_sdETA_15_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00153120635241,0.00609276270288,0.00913543960286,0.0426453496927,0.140060287377,0.47678180803,1.18521295017,2.3486931185,4.72510640497,7.97506597452,12.3573410545,17.811569765,23.871839643,30.0147181975,34.2791503638,39.1304574448,41.0027238391,41.9747553568,41.8021291008,40.9431808379,38.8209732727,34.8228716686,29.978878959,23.688165023,17.3162485658,12.217328151,7.80504878675,4.53096833444,2.37953636947,1.16091115783,0.467479912712,0.152332006565,0.0396285909548,0.0182911869259,0.00457546783593,0.0,0.00304840432461,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_sdETA_16
y1_sdETA_16_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180374718839,0.000180468882384,0.00108247715548,0.0,0.00198774391088,0.00650127067548,0.0386420615294,0.135615570047,0.399041996479,0.950515708675,1.93650738854,3.52494587609,5.62722500847,8.25810439181,10.9687127613,13.4005046068,15.3413905039,16.4555435827,16.8901940341,17.0050388976,16.4829376644,15.3076572323,13.3811558276,10.93188692,8.22410923383,5.5444227091,3.51137669742,1.92515577099,0.971054915314,0.385656523378,0.131452462998,0.034848553135,0.00649980719298,0.00180587347441,0.00108351853882,0.000361005932242,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights+y1_sdETA_8_weights+y1_sdETA_9_weights+y1_sdETA_10_weights+y1_sdETA_11_weights+y1_sdETA_12_weights+y1_sdETA_13_weights+y1_sdETA_14_weights+y1_sdETA_15_weights+y1_sdETA_16_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#e5e5e5", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights+y1_sdETA_8_weights+y1_sdETA_9_weights+y1_sdETA_10_weights+y1_sdETA_11_weights+y1_sdETA_12_weights+y1_sdETA_13_weights+y1_sdETA_14_weights+y1_sdETA_15_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#f2f2f2", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights+y1_sdETA_8_weights+y1_sdETA_9_weights+y1_sdETA_10_weights+y1_sdETA_11_weights+y1_sdETA_12_weights+y1_sdETA_13_weights+y1_sdETA_14_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights+y1_sdETA_8_weights+y1_sdETA_9_weights+y1_sdETA_10_weights+y1_sdETA_11_weights+y1_sdETA_12_weights+y1_sdETA_13_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights+y1_sdETA_8_weights+y1_sdETA_9_weights+y1_sdETA_10_weights+y1_sdETA_11_weights+y1_sdETA_12_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights+y1_sdETA_8_weights+y1_sdETA_9_weights+y1_sdETA_10_weights+y1_sdETA_11_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights+y1_sdETA_8_weights+y1_sdETA_9_weights+y1_sdETA_10_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights+y1_sdETA_8_weights+y1_sdETA_9_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights+y1_sdETA_8_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights+y1_sdETA_1_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_sdETA_0_weights,\
label="$signal$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"\Delta\eta ( j_{1} , j_{2} ) ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights+y1_sdETA_8_weights+y1_sdETA_9_weights+y1_sdETA_10_weights+y1_sdETA_11_weights+y1_sdETA_12_weights+y1_sdETA_13_weights+y1_sdETA_14_weights+y1_sdETA_15_weights+y1_sdETA_16_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y1_sdETA_0_weights+y1_sdETA_1_weights+y1_sdETA_2_weights+y1_sdETA_3_weights+y1_sdETA_4_weights+y1_sdETA_5_weights+y1_sdETA_6_weights+y1_sdETA_7_weights+y1_sdETA_8_weights+y1_sdETA_9_weights+y1_sdETA_10_weights+y1_sdETA_11_weights+y1_sdETA_12_weights+y1_sdETA_13_weights+y1_sdETA_14_weights+y1_sdETA_15_weights+y1_sdETA_16_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_0.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_0.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_0.eps')
# Running!
if __name__ == '__main__':
selection_0()
| 143.520619
| 1,125
| 0.756564
| 5,396
| 27,843
| 3.768347
| 0.243514
| 0.165732
| 0.242992
| 0.316514
| 0.404642
| 0.404642
| 0.403167
| 0.399331
| 0.397462
| 0.397462
| 0
| 0.505021
| 0.059333
| 27,843
| 193
| 1,126
| 144.264249
| 0.271353
| 0.049743
| 0
| 0.185841
| 0
| 0.00885
| 0.039437
| 0.007569
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00885
| false
| 0
| 0.035398
| 0
| 0.044248
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3bb042b059d140b2ca24d651b9f81ffa05bffed2
| 41
|
py
|
Python
|
spvcm/both_levels/generic/__init__.py
|
weikang9009/spvcm
|
00ec35331e0e1a67bcd841a6b3761a23099617f7
|
[
"MIT"
] | 14
|
2017-05-21T08:29:08.000Z
|
2021-09-22T00:29:15.000Z
|
spvcm/both_levels/generic/__init__.py
|
weikang9009/spvcm
|
00ec35331e0e1a67bcd841a6b3761a23099617f7
|
[
"MIT"
] | 12
|
2018-05-11T11:13:21.000Z
|
2020-02-07T14:23:12.000Z
|
spvcm/both_levels/generic/__init__.py
|
weikang9009/spvcm
|
00ec35331e0e1a67bcd841a6b3761a23099617f7
|
[
"MIT"
] | 8
|
2017-05-20T00:55:40.000Z
|
2020-07-02T14:52:49.000Z
|
from .model import Generic, Base_Generic
| 20.5
| 40
| 0.829268
| 6
| 41
| 5.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 41
| 1
| 41
| 41
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3bd689e55d3fc1fd7cdb42709809f2ada575b55b
| 743
|
py
|
Python
|
src/py/Mapping.py
|
gul2u/decipher
|
512a9860f625472cc0ada0c69f822b8cdcc33d43
|
[
"MIT"
] | null | null | null |
src/py/Mapping.py
|
gul2u/decipher
|
512a9860f625472cc0ada0c69f822b8cdcc33d43
|
[
"MIT"
] | 1
|
2021-06-11T19:53:25.000Z
|
2021-06-11T19:53:25.000Z
|
src/py/Mapping.py
|
gul2u/decipher
|
512a9860f625472cc0ada0c69f822b8cdcc33d43
|
[
"MIT"
] | 1
|
2017-10-09T10:54:00.000Z
|
2017-10-09T10:54:00.000Z
|
#!/usr/bin/env python
""" generated source for module Mapping """
class Mapping(object):
""" generated source for class Mapping """
key = []
def __init__(self):
""" generated source for method __init__ """
self.key = ["#"]*26
@classmethod
def fromTranslation(self, cipher, translation):
""" generated source for method __init___0 """
newKey = ["#"]*26
for i in range(len(translation)):
newKey[ord(cipher[i]) - ord('A')] = translation[i]
return newKey[:]
def getKey(self):
""" generated source for method getKey """
return self.key
def setKey(self, newKey):
""" generated source for method setKey """
self.key = newKey
| 27.518519
| 62
| 0.581427
| 82
| 743
| 5.109756
| 0.390244
| 0.214797
| 0.257757
| 0.229117
| 0.210024
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009346
| 0.279946
| 743
| 26
| 63
| 28.576923
| 0.773832
| 0.327052
| 0
| 0
| 1
| 0
| 0.006494
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
3bfcd517f16a12784f8b4c18b2b187cc31e2580b
| 44
|
py
|
Python
|
venv/Lib/site-packages/bilibili_api/user.py
|
Lparksi/bot
|
8a38953d09436b60e8edff4ebe86bf19fe3b7046
|
[
"MIT"
] | 3
|
2020-03-31T10:36:31.000Z
|
2020-04-23T12:01:10.000Z
|
bilibili_api/user.py
|
DeSireFire/bilibili_api
|
23fc1b982cbb7b4a2afbb350fa6fae1b05a41df5
|
[
"MIT"
] | 1
|
2020-07-16T14:51:26.000Z
|
2020-07-30T12:46:55.000Z
|
bilibili_api/user.py
|
DeSireFire/bilibili_api
|
23fc1b982cbb7b4a2afbb350fa6fae1b05a41df5
|
[
"MIT"
] | null | null | null |
from .src.user import UserInfo, UserOperate
| 22
| 43
| 0.818182
| 6
| 44
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 44
| 1
| 44
| 44
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0218b3113ab8415a9b8afa092eabf915ad20827c
| 22,888
|
py
|
Python
|
purequant/trade/bitmex.py
|
cyqdev/PureQuant
|
ebaac21c2d263ad4632a4318ed0bf92e718b78c2
|
[
"MIT"
] | null | null | null |
purequant/trade/bitmex.py
|
cyqdev/PureQuant
|
ebaac21c2d263ad4632a4318ed0bf92e718b78c2
|
[
"MIT"
] | null | null | null |
purequant/trade/bitmex.py
|
cyqdev/PureQuant
|
ebaac21c2d263ad4632a4318ed0bf92e718b78c2
|
[
"MIT"
] | 1
|
2020-12-09T06:42:44.000Z
|
2020-12-09T06:42:44.000Z
|
"""
bitmex
Author: Gary-Hertel
Date: 2020/10/27
email: purequant@foxmail.com
"""
import time
from purequant.exchange.bitmex.bitmex import Bitmex
from purequant.config import config
from purequant.exceptions import *
class BITMEX:
def __init__(self, access_key, secret_key, instrument_id, leverage=None, testing=None):
"""
BITMEX rest api
:param access_key: api key
:param secret_key: secret key
:param instrument_id: 合约id,例如:"XBTUSD"
:param testing:是否是测试账户,默认为False
:param leverage:开仓杠杆倍数,如不填则默认设置为20倍
"""
self.__access_key = access_key
self.__secret_key = secret_key
self.__instrument_id = instrument_id
self.__testing = False or testing
self.__bitmex = Bitmex(self.__access_key, self.__secret_key, testing=self.__testing)
self.__leverage = leverage or 20
self.__bitmex.set_leverage(self.__instrument_id, leverage=self.__leverage)
def get_single_equity(self, currency=None):
"""
获取合约的权益
:param currency: 默认为"XBt",BITMEX所有的交易是用XBT来结算的
:return:返回浮点数
"""
currency = "XBt"
data = self.__bitmex.get_wallet(currency=currency)
XBT = data["prevAmount"] * 0.00000001
return XBT
def get_depth(self, type=None, depth=None):
"""
BITMEX获取深度数据
:param type:如不传参,返回asks和bids;只获取asks传入type="asks";只获取"bids"传入type="bids"
:param depth:返回深度档位数量,默认10档
:return:
"""
depth = depth or 10
response = self.__bitmex.get_orderbook(self.__instrument_id, depth=depth)
asks_list = [] # 卖盘
bids_list = [] # 买盘
for i in response:
if i['side'] == "Sell":
asks_list.append(i['price'])
elif i['side'] == "Buy":
bids_list.append(i['price'])
result = {"asks": asks_list, "bids": bids_list}
if type == "asks":
return asks_list
elif type == "bids":
return bids_list
else:
return result
def get_ticker(self):
"""获取最新成交价"""
receipt = self.__bitmex.get_trade(symbol=self.__instrument_id, reverse=True, count=10)[0]
last = receipt["price"]
return {"last": last}
def get_position(self):
try:
result = self.__bitmex.get_positions(symbol=self.__instrument_id)[0]
if result["currentQty"] > 0:
dict = {'direction': 'long', 'amount': result["currentQty"],
'price': result["avgCostPrice"]}
return dict
elif result["currentQty"] < 0:
dict = {'direction': 'short', 'amount': abs(result['currentQty']),
'price': result['avgCostPrice']}
return dict
else:
dict = {'direction': 'none', 'amount': 0, 'price': 0.0}
return dict
except Exception as e:
raise GetPositionError(e)
def get_kline(self, time_frame, count=None):
"""
获取k线数据
:param time_frame: k线周期
:param count: 返回的k线数量,默认为200条
:return:
"""
count = count or 200
records = []
response = self.__bitmex.get_bucket_trades(binSize=time_frame, partial=False, symbol=self.__instrument_id,
columns="timestamp, open, high, low, close, volume", count=count,
reverse=True)
for i in response:
records.append([i['timestamp'], i['open'], i['high'], i['low'], i['close'], i['volume']])
return records
def revoke_order(self, order_id):
receipt = self.__bitmex.cancel_order(order_id)
return receipt
def get_order_info(self):
result = self.__bitmex.get_orders(symbol=self.__instrument_id, count=1, reverse=True)[0]
action = "买入" if result['side'] == "Buy" else "卖出"
symbol = result["symbol"]
price = result["avgPx"]
amount = result["cumQty"]
order_status = result['ordStatus']
if order_status == "Filled":
dict = {"交易所": "BITMEX", "合约ID": symbol, "方向": action,
"订单状态": "完全成交", "成交均价": price, "已成交数量": amount}
return dict
elif order_status == "Rejected":
dict = {"交易所": "BITMEX", "合约ID": symbol, "方向": action, "订单状态": "失败"}
return dict
elif order_status == "Canceled":
dict = {"交易所": "BITMEX", "合约ID": symbol, "方向": action, "订单状态": "撤单成功",
"成交均价": price, "已成交数量": amount}
return dict
elif order_status == "New":
dict = {"交易所": "BITMEX", "合约ID": symbol, "方向": action, "订单状态": "等待成交"}
return dict
elif order_status == "PartiallyFilled":
dict = {"交易所": "BITMEX", "合约ID": symbol, "方向": action, "订单状态": "部分成交",
"成交均价": price, "已成交数量": amount}
return dict
def buy(self, price, size, order_type=None, timeInForce=None):
"""
买入开多
:param price: 价格
:param amount: 数量
:param order_type: Market, Limit, Stop, StopLimit, MarketIfTouched, LimitIfTouched, Pegged,默认是"Limit"
:param timeInForce:Day, GoodTillCancel, ImmediateOrCancel, FillOrKill, 默认是"GoodTillCancel"
:return:
"""
order_type = order_type or "Limit"
timeInForce = timeInForce or "GoodTillCancel"
result = self.__bitmex.create_order(symbol=self.__instrument_id, side="Buy", price=price, orderQty=size,
ordType=order_type, timeInForce=timeInForce)
try:
raise SendOrderError(msg=result['error']['message'])
except:
order_id = result["orderID"]
order_info = self.get_order_info() # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return order_info
# 如果订单状态不是"完全成交"或者"失败"
if config.price_cancellation: # 选择了价格撤单时,如果最新价超过委托价一定幅度,撤单重发,返回下单结果
if order_info["订单状态"] == "等待成交":
if float(self.get_ticker()['last']) >= price * (1 + config.price_cancellation_amplitude):
try: # 如果撤单失败,则订单可能在此期间已完全成交或部分成交
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功": # 已完全成交时,以原下单数量重发;部分成交时,重发委托数量为原下单数量减去已成交数量
return self.buy(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except: # 撤单失败时,说明订单已完全成交
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if order_info["订单状态"] == "部分成交":
if float(self.get_ticker()['last']) >= price * (1 + config.price_cancellation_amplitude):
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.buy(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except: # 撤单失败时,说明订单已完全成交,再查询一次订单状态,如果已完全成交,返回下单结果
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if config.time_cancellation: # 选择了时间撤单时,如果委托单发出多少秒后不成交,撤单重发,直至完全成交,返回成交结果
time.sleep(config.time_cancellation_seconds)
order_info = self.get_order_info()
if order_info["订单状态"] == "等待成交":
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.buy(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if order_info["订单状态"] == "部分成交":
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.buy(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if config.automatic_cancellation:
# 如果订单未完全成交,且未设置价格撤单和时间撤单,且设置了自动撤单,就自动撤单并返回下单结果与撤单结果
try:
self.revoke_order(order_id)
state = self.get_order_info()
return state
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
else: # 未启用交易助手时,下单并查询订单状态后直接返回下单结果
return order_info
def sell(self, price, size, order_type=None, timeInForce=None):
order_type = order_type or "Limit"
timeInForce = timeInForce or "GoodTillCancel"
result = self.__bitmex.create_order(symbol=self.__instrument_id, side="Sell", price=price, orderQty=size,
ordType=order_type, timeInForce=timeInForce)
try:
raise SendOrderError(msg=result['error']['message'])
except:
order_id = result["orderID"]
order_info = self.get_order_info() # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return order_info
# 如果订单状态不是"完全成交"或者"失败"
if config.price_cancellation: # 选择了价格撤单时,如果最新价超过委托价一定幅度,撤单重发,返回下单结果
if order_info["订单状态"] == "等待成交":
if float(self.get_ticker()['last']) <= price * (1 - config.price_cancellation_amplitude):
try: # 如果撤单失败,则订单可能在此期间已完全成交或部分成交
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功": # 已完全成交时,以原下单数量重发;部分成交时,重发委托数量为原下单数量减去已成交数量
return self.sell(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size + state["已成交数量"])
except: # 撤单失败时,说明订单已完全成交
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if order_info["订单状态"] == "部分成交":
if float(self.get_ticker()['last']) <= price * (1 - config.price_cancellation_amplitude):
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.sell(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size + state["已成交数量"])
except: # 撤单失败时,说明订单已完全成交,再查询一次订单状态,如果已完全成交,返回下单结果
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if config.time_cancellation: # 选择了时间撤单时,如果委托单发出多少秒后不成交,撤单重发,直至完全成交,返回成交结果
time.sleep(config.time_cancellation_seconds)
order_info = self.get_order_info()
if order_info["订单状态"] == "等待成交":
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.sell(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size + state["已成交数量"])
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if order_info["订单状态"] == "部分成交":
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.sell(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size + state["已成交数量"])
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if config.automatic_cancellation:
# 如果订单未完全成交,且未设置价格撤单和时间撤单,且设置了自动撤单,就自动撤单并返回下单结果与撤单结果
try:
self.revoke_order(order_id)
state = self.get_order_info()
return state
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
else: # 未启用交易助手时,下单并查询订单状态后直接返回下单结果
return order_info
def sellshort(self, price, size, order_type=None, timeInForce=None):
order_type = order_type or "Limit"
timeInForce = timeInForce or "GoodTillCancel"
result = self.__bitmex.create_order(symbol=self.__instrument_id, side="Sell", price=price, orderQty=size,
ordType=order_type, timeInForce=timeInForce)
try:
raise SendOrderError(msg=result['error']['message'])
except:
order_id = result["orderID"]
order_info = self.get_order_info() # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return order_info
# 如果订单状态不是"完全成交"或者"失败"
if config.price_cancellation: # 选择了价格撤单时,如果最新价超过委托价一定幅度,撤单重发,返回下单结果
if order_info["订单状态"] == "等待成交":
if float(self.get_ticker()['last']) <= price * (1 - config.price_cancellation_amplitude):
try: # 如果撤单失败,则订单可能在此期间已完全成交或部分成交
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功": # 已完全成交时,以原下单数量重发;部分成交时,重发委托数量为原下单数量减去已成交数量
return self.sellshort(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size + state["已成交数量"])
except: # 撤单失败时,说明订单已完全成交
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if order_info["订单状态"] == "部分成交":
if float(self.get_ticker()['last']) <= price * (1 - config.price_cancellation_amplitude):
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.sellshort(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size + state["已成交数量"])
except: # 撤单失败时,说明订单已完全成交,再查询一次订单状态,如果已完全成交,返回下单结果
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if config.time_cancellation: # 选择了时间撤单时,如果委托单发出多少秒后不成交,撤单重发,直至完全成交,返回成交结果
time.sleep(config.time_cancellation_seconds)
order_info = self.get_order_info()
if order_info["订单状态"] == "等待成交":
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.sellshort(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size + state["已成交数量"])
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if order_info["订单状态"] == "部分成交":
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.sellshort(float(self.get_ticker()['last']) * (1 - config.reissue_order),
size + state["已成交数量"])
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if config.automatic_cancellation:
# 如果订单未完全成交,且未设置价格撤单和时间撤单,且设置了自动撤单,就自动撤单并返回下单结果与撤单结果
try:
self.revoke_order(order_id)
state = self.get_order_info()
return state
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
else: # 未启用交易助手时,下单并查询订单状态后直接返回下单结果
return order_info
def buytocover(self, price, size, order_type=None, timeInForce=None):
order_type = order_type or "Limit"
timeInForce = timeInForce or "GoodTillCancel"
result = self.__bitmex.create_order(symbol=self.__instrument_id, side="Buy", price=price, orderQty=size,
ordType=order_type, timeInForce=timeInForce)
try:
raise SendOrderError(msg=result['error']['message'])
except:
order_id = result["orderID"]
order_info = self.get_order_info() # 下单后查询一次订单状态
if order_info["订单状态"] == "完全成交" or order_info["订单状态"] == "失败 ": # 如果订单状态为"完全成交"或者"失败",返回结果
return order_info
# 如果订单状态不是"完全成交"或者"失败"
if config.price_cancellation: # 选择了价格撤单时,如果最新价超过委托价一定幅度,撤单重发,返回下单结果
if order_info["订单状态"] == "等待成交":
if float(self.get_ticker()['last']) >= price * (1 + config.price_cancellation_amplitude):
try: # 如果撤单失败,则订单可能在此期间已完全成交或部分成交
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功": # 已完全成交时,以原下单数量重发;部分成交时,重发委托数量为原下单数量减去已成交数量
return self.buytocover(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except: # 撤单失败时,说明订单已完全成交
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if order_info["订单状态"] == "部分成交":
if float(self.get_ticker()['last']) >= price * (1 + config.price_cancellation_amplitude):
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.buytocover(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except: # 撤单失败时,说明订单已完全成交,再查询一次订单状态,如果已完全成交,返回下单结果
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if config.time_cancellation: # 选择了时间撤单时,如果委托单发出多少秒后不成交,撤单重发,直至完全成交,返回成交结果
time.sleep(config.time_cancellation_seconds)
order_info = self.get_order_info()
if order_info["订单状态"] == "等待成交":
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.buytocover(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if order_info["订单状态"] == "部分成交":
try:
self.revoke_order(order_id)
state = self.get_order_info()
if state['订单状态'] == "撤单成功":
return self.buytocover(float(self.get_ticker()['last']) * (1 + config.reissue_order),
size - state["已成交数量"])
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
if config.automatic_cancellation:
# 如果订单未完全成交,且未设置价格撤单和时间撤单,且设置了自动撤单,就自动撤单并返回下单结果与撤单结果
try:
self.revoke_order(order_id)
state = self.get_order_info()
return state
except:
order_info = self.get_order_info() # 再查询一次订单状态
if order_info["订单状态"] == "完全成交":
return order_info
else: # 未启用交易助手时,下单并查询订单状态后直接返回下单结果
return order_info
def BUY(self, cover_short_price, cover_short_size, open_long_price, open_long_size, order_type=None):
result1 = self.buytocover(cover_short_price, cover_short_size, order_type)
if "完全成交" in str(result1):
result2 = self.buy(open_long_price, open_long_size, order_type)
return {"平仓结果": result1, "开仓结果": result2}
else:
return result1
def SELL(self, cover_long_price, cover_long_size, open_short_price, open_short_size, order_type=None):
result1 = self.sell(cover_long_price, cover_long_size, order_type)
if "完全成交" in str(result1):
result2 = self.sellshort(open_short_price, open_short_size, order_type)
return {"平仓结果": result1, "开仓结果": result2}
else:
return result1
| 49.434125
| 116
| 0.517258
| 2,248
| 22,888
| 5.04226
| 0.102758
| 0.118306
| 0.051875
| 0.067755
| 0.789149
| 0.775562
| 0.761094
| 0.752448
| 0.723335
| 0.715395
| 0
| 0.005436
| 0.373034
| 22,888
| 463
| 117
| 49.434125
| 0.78446
| 0.102281
| 0
| 0.781407
| 0
| 0
| 0.065855
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035176
| false
| 0
| 0.01005
| 0
| 0.21608
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
021ae2ac2f22937831f2234fa4818ce632b624f2
| 45
|
py
|
Python
|
PersianStemmer/__init__.py
|
MrHTZ/PersianStemmer-Python
|
0fed1e51d11bc718608e42daad4685e1fc50a955
|
[
"BSD-2-Clause"
] | 44
|
2017-02-16T03:56:08.000Z
|
2022-02-27T17:31:47.000Z
|
PersianStemmer/__init__.py
|
MrHTZ/PersianStemmer-Python
|
0fed1e51d11bc718608e42daad4685e1fc50a955
|
[
"BSD-2-Clause"
] | 4
|
2017-03-18T07:07:52.000Z
|
2021-06-20T10:15:43.000Z
|
PersianStemmer/__init__.py
|
MrHTZ/PersianStemmer-Python
|
0fed1e51d11bc718608e42daad4685e1fc50a955
|
[
"BSD-2-Clause"
] | 13
|
2017-02-16T03:30:06.000Z
|
2022-01-27T21:34:12.000Z
|
from .persian_stemmer import PersianStemmer
| 15
| 43
| 0.866667
| 5
| 45
| 7.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 45
| 2
| 44
| 22.5
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0230456d4eca9070c12d4f497a54b24929a4c7e6
| 3,072
|
py
|
Python
|
generativepy/drawing3d.py
|
LloydTao/generativepy
|
8bf6afed57200cbebd3163e4fdc730fc8761e753
|
[
"MIT"
] | 58
|
2019-06-15T16:09:28.000Z
|
2022-03-25T03:24:26.000Z
|
generativepy/drawing3d.py
|
LloydTao/generativepy
|
8bf6afed57200cbebd3163e4fdc730fc8761e753
|
[
"MIT"
] | 1
|
2021-09-09T16:12:18.000Z
|
2021-09-09T18:13:05.000Z
|
generativepy/drawing3d.py
|
LloydTao/generativepy
|
8bf6afed57200cbebd3163e4fdc730fc8761e753
|
[
"MIT"
] | 4
|
2020-07-26T10:54:19.000Z
|
2021-11-17T17:24:13.000Z
|
# Author: Martin McBride
# Created: 2021-019-01
# Copyright (C) 2018, Martin McBride
# License: MIT
import moderngl
import numpy as np
from PIL import Image
from generativepy.color import Color
def make_3dimage(outfile, draw, width, height, background=Color(0), channels=3):
'''
Create a PNG file using moderngl
:param outfile: Name of output file
:param draw: the draw function
:param width: width in pixels, int
:param height: height in pixels, int
:param background: background colour
:param channels: 3 for rgb, 4 for rgba
:return:
'''
if outfile.lower().endswith('.png'):
outfile = outfile[:-4]
frame = make_3dimage_frame(draw, width, height, background, channels)
image = Image.fromarray(frame)
image.save(outfile + '.png')
def make_3dimage_frame(draw, width, height, background=Color(0), channels=3):
'''
Create a numpy frame file using moderngl
:param draw: the draw function
:param width: width in pixels, int
:param height: height in pixels, int
:param background: background colour
:param channels: 3 for rgb, 4 for rgba
:return:
'''
ctx = moderngl.create_standalone_context()
fbo = ctx.simple_framebuffer((width, height))
fbo.use()
fbo.clear(*background)
draw(ctx, width, height, 0, 1)
data = fbo.read()
frame = np.frombuffer(data, dtype=np.uint8)
frame = frame.reshape((height, width, 3))
frame = frame[::-1]
ctx.release()
return frame
def make_3dimage_frames(draw, width, height, count, background=Color(0), channels=3):
'''
Create a sequence of numpy frame file using moderngl
:param draw: the draw function
:param width: width in pixels, int
:param height: height in pixels, int
:param count: number of frames to create
:param background: background colour
:param channels: 3 for rgb, 4 for rgba
:return:
'''
for i in range(count):
ctx = moderngl.create_standalone_context()
fbo = ctx.simple_framebuffer((width, height))
fbo.use()
fbo.clear(*background)
draw(ctx, width, height, i, count)
data = fbo.read()
frame = np.frombuffer(data, dtype=np.uint8)
frame = frame.reshape((height, width, 3))
frame = frame[::-1]
ctx.release()
yield frame
def make_3dimages(outfile, draw, width, height, background=Color(0), channels=3):
'''
Create a sequence of PNG files using moderngl
:param outfile: Name of output file
:param draw: the draw function
:param width: width in pixels, int
:param height: height in pixels, int
:param count: number of frames to create
:param background: background colour
:param channels: 3 for rgb, 4 for rgba
:return:
'''
if outfile.lower().endswith('.png'):
outfile = outfile[:-4]
frames = make_3dimage_frames(draw, width, height, background, channels)
for i, frame in enumerate(frames):
image = Image.fromarray(frame)
image.save(outfile + str(i).zfill(8) + '.png')
| 30.415842
| 85
| 0.655599
| 409
| 3,072
| 4.885086
| 0.217604
| 0.055055
| 0.044044
| 0.064064
| 0.847848
| 0.834835
| 0.802803
| 0.734234
| 0.734234
| 0.708208
| 0
| 0.019583
| 0.235352
| 3,072
| 100
| 86
| 30.72
| 0.830992
| 0.38151
| 0
| 0.571429
| 0
| 0
| 0.009292
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.095238
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0253f2d791e34784a2716988f07264ef04a2e9f7
| 23
|
py
|
Python
|
examples/pytorch/transformer/optims/__init__.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 9,516
|
2018-12-08T22:11:31.000Z
|
2022-03-31T13:04:33.000Z
|
examples/pytorch/transformer/optims/__init__.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 2,494
|
2018-12-08T22:43:00.000Z
|
2022-03-31T21:16:27.000Z
|
examples/pytorch/transformer/optims/__init__.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 2,529
|
2018-12-08T22:56:14.000Z
|
2022-03-31T13:07:41.000Z
|
from .noamopt import *
| 11.5
| 22
| 0.73913
| 3
| 23
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
02580c94d5bd172f63846fe2186b41ccecc18df3
| 2,871
|
py
|
Python
|
huaweicloud-sdk-moderation/huaweicloudsdkmoderation/v1/model/__init__.py
|
handsome-baby/huaweicloud-sdk-python-v3
|
6cdcf1da8b098427e58fc3335a387c14df7776d0
|
[
"Apache-2.0"
] | 1
|
2021-04-16T07:59:28.000Z
|
2021-04-16T07:59:28.000Z
|
huaweicloud-sdk-moderation/huaweicloudsdkmoderation/v1/model/__init__.py
|
Lencof/huaweicloud-sdk-python-v3
|
d13dc4e2830a83e295be6e4de021999b3376e34e
|
[
"Apache-2.0"
] | null | null | null |
huaweicloud-sdk-moderation/huaweicloudsdkmoderation/v1/model/__init__.py
|
Lencof/huaweicloud-sdk-python-v3
|
d13dc4e2830a83e295be6e4de021999b3376e34e
|
[
"Apache-2.0"
] | 1
|
2022-01-17T02:24:18.000Z
|
2022-01-17T02:24:18.000Z
|
# coding: utf-8
from __future__ import absolute_import
# import models into model package
from huaweicloudsdkmoderation.v1.model.check_result_items_body import CheckResultItemsBody
from huaweicloudsdkmoderation.v1.model.check_result_result_body import CheckResultResultBody
from huaweicloudsdkmoderation.v1.model.check_task_jobs_items_body import CheckTaskJobsItemsBody
from huaweicloudsdkmoderation.v1.model.image_batch_moderation_req import ImageBatchModerationReq
from huaweicloudsdkmoderation.v1.model.image_batch_moderation_result_body import ImageBatchModerationResultBody
from huaweicloudsdkmoderation.v1.model.image_detection_req import ImageDetectionReq
from huaweicloudsdkmoderation.v1.model.image_detection_result_body import ImageDetectionResultBody
from huaweicloudsdkmoderation.v1.model.image_detection_result_detail import ImageDetectionResultDetail
from huaweicloudsdkmoderation.v1.model.image_detection_result_detail_face_detail import ImageDetectionResultDetailFaceDetail
from huaweicloudsdkmoderation.v1.model.image_detection_result_detail_politics import ImageDetectionResultDetailPolitics
from huaweicloudsdkmoderation.v1.model.image_detection_result_detail_porn import ImageDetectionResultDetailPorn
from huaweicloudsdkmoderation.v1.model.run_check_result_request import RunCheckResultRequest
from huaweicloudsdkmoderation.v1.model.run_check_result_response import RunCheckResultResponse
from huaweicloudsdkmoderation.v1.model.run_check_task_jobs_request import RunCheckTaskJobsRequest
from huaweicloudsdkmoderation.v1.model.run_check_task_jobs_response import RunCheckTaskJobsResponse
from huaweicloudsdkmoderation.v1.model.run_image_batch_moderation_request import RunImageBatchModerationRequest
from huaweicloudsdkmoderation.v1.model.run_image_batch_moderation_response import RunImageBatchModerationResponse
from huaweicloudsdkmoderation.v1.model.run_image_moderation_request import RunImageModerationRequest
from huaweicloudsdkmoderation.v1.model.run_image_moderation_response import RunImageModerationResponse
from huaweicloudsdkmoderation.v1.model.run_task_sumbit_request import RunTaskSumbitRequest
from huaweicloudsdkmoderation.v1.model.run_task_sumbit_response import RunTaskSumbitResponse
from huaweicloudsdkmoderation.v1.model.run_text_moderation_request import RunTextModerationRequest
from huaweicloudsdkmoderation.v1.model.run_text_moderation_response import RunTextModerationResponse
from huaweicloudsdkmoderation.v1.model.task_sumbit_req import TaskSumbitReq
from huaweicloudsdkmoderation.v1.model.task_sumbit_result_body import TaskSumbitResultBody
from huaweicloudsdkmoderation.v1.model.text_detection_items_req import TextDetectionItemsReq
from huaweicloudsdkmoderation.v1.model.text_detection_req import TextDetectionReq
from huaweicloudsdkmoderation.v1.model.text_detection_response_result import TextDetectionResponseResult
| 84.441176
| 124
| 0.925113
| 300
| 2,871
| 8.533333
| 0.203333
| 0.30625
| 0.328125
| 0.382813
| 0.564844
| 0.549219
| 0.402734
| 0.180469
| 0
| 0
| 0
| 0.010569
| 0.044235
| 2,871
| 33
| 125
| 87
| 0.922376
| 0.016022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
025b41bb8183cef7680fc9922ad47c596b8c661c
| 2,547
|
py
|
Python
|
epytope/Data/pssms/smmpmbec/mat/B_15_01_10.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 7
|
2021-02-01T18:11:28.000Z
|
2022-01-31T19:14:07.000Z
|
epytope/Data/pssms/smmpmbec/mat/B_15_01_10.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 22
|
2021-01-02T15:25:23.000Z
|
2022-03-14T11:32:53.000Z
|
epytope/Data/pssms/smmpmbec/mat/B_15_01_10.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 4
|
2021-05-28T08:50:38.000Z
|
2022-03-14T11:45:32.000Z
|
B_15_01_10 = {0: {'A': 0.021, 'C': 0.026, 'E': 0.281, 'D': 0.246, 'G': 0.122, 'F': -0.243, 'I': -0.322, 'H': -0.032, 'K': 0.025, 'M': -0.21, 'L': -0.24, 'N': 0.097, 'Q': 0.141, 'P': 0.331, 'S': 0.061, 'R': -0.192, 'T': 0.154, 'W': -0.088, 'V': -0.018, 'Y': -0.16}, 1: {'A': -0.027, 'C': 0.239, 'E': -0.229, 'D': 0.338, 'G': 0.175, 'F': 0.23, 'I': -0.382, 'H': 0.185, 'K': 0.527, 'M': -0.959, 'L': -0.752, 'N': 0.287, 'Q': -1.189, 'P': 1.005, 'S': -0.052, 'R': 0.465, 'T': 0.031, 'W': 0.246, 'V': -0.543, 'Y': 0.405}, 2: {'A': -0.12, 'C': 0.044, 'E': 0.207, 'D': 0.292, 'G': 0.225, 'F': -0.16, 'I': -0.325, 'H': -0.101, 'K': -0.088, 'M': -0.294, 'L': -0.24, 'N': 0.008, 'Q': 0.045, 'P': 0.246, 'S': 0.21, 'R': -0.061, 'T': 0.315, 'W': 0.053, 'V': -0.118, 'Y': -0.14}, 3: {'A': -0.156, 'C': 0.1, 'E': -0.121, 'D': -0.126, 'G': 0.102, 'F': -0.07, 'I': -0.134, 'H': 0.17, 'K': 0.323, 'M': -0.319, 'L': 0.069, 'N': 0.044, 'Q': -0.046, 'P': -0.364, 'S': 0.016, 'R': 0.398, 'T': -0.021, 'W': 0.007, 'V': -0.12, 'Y': 0.247}, 4: {'A': -0.122, 'C': 0.0, 'E': 0.127, 'D': 0.122, 'G': 0.013, 'F': 0.05, 'I': -0.138, 'H': 0.036, 'K': 0.067, 'M': -0.047, 'L': 0.022, 'N': -0.046, 'Q': 0.1, 'P': 0.151, 'S': -0.157, 'R': 0.079, 'T': -0.116, 'W': -0.027, 'V': -0.155, 'Y': 0.04}, 5: {'A': 0.038, 'C': -0.014, 'E': -0.034, 'D': -0.097, 'G': -0.008, 'F': 0.061, 'I': -0.0, 'H': 0.043, 'K': 0.084, 'M': 0.026, 'L': 0.047, 'N': -0.044, 'Q': -0.071, 'P': -0.066, 'S': -0.059, 'R': 0.081, 'T': -0.096, 'W': -0.003, 'V': 0.006, 'Y': 0.106}, 6: {'A': -0.026, 'C': -0.005, 'E': -0.023, 'D': -0.036, 'G': -0.015, 'F': 0.017, 'I': 0.027, 'H': 0.021, 'K': 0.051, 'M': 0.015, 'L': -0.015, 'N': -0.009, 'Q': 0.004, 'P': -0.075, 'S': 0.015, 'R': 0.067, 'T': -0.015, 'W': 0.009, 'V': -0.02, 'Y': 0.013}, 7: {'A': -0.118, 'C': -0.036, 'E': 0.029, 'D': 0.101, 'G': 0.001, 'F': -0.101, 'I': -0.126, 'H': 0.054, 'K': 0.081, 'M': -0.079, 'L': -0.1, 'N': 0.035, 'Q': 0.059, 'P': 0.152, 'S': 0.023, 'R': 0.119, 'T': 0.012, 'W': 0.003, 'V': -0.082, 'Y': -0.029}, 8: {'A': -0.415, 'C': 0.061, 'E': -0.112, 'D': 0.135, 'G': 0.349, 'F': 0.015, 'I': -0.534, 'H': 0.298, 'K': 0.683, 'M': 0.232, 'L': 0.051, 'N': 0.023, 'Q': 0.321, 'P': -0.202, 'S': -0.705, 'R': 0.909, 'T': -0.344, 'W': 0.226, 'V': -0.775, 'Y': -0.214}, 9: {'A': -0.062, 'C': -0.072, 'E': -0.073, 'D': 0.077, 'G': 0.096, 'F': -1.319, 'I': -0.205, 'H': 0.038, 'K': 0.801, 'M': -0.716, 'L': -0.251, 'N': 0.08, 'Q': 0.478, 'P': 0.704, 'S': 0.441, 'R': 0.662, 'T': 0.445, 'W': 0.111, 'V': 0.264, 'Y': -1.5}, -1: {'con': 3.80964}}
| 2,547
| 2,547
| 0.393404
| 618
| 2,547
| 1.616505
| 0.291262
| 0.02002
| 0.008008
| 0.01001
| 0.04004
| 0
| 0
| 0
| 0
| 0
| 0
| 0.372714
| 0.162544
| 2,547
| 1
| 2,547
| 2,547
| 0.09564
| 0
| 0
| 0
| 0
| 0
| 0.07967
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
028b5381922ead5eb2768a8b8b2cf59ad08210fe
| 294
|
py
|
Python
|
RecoParticleFlow/PFClusterProducer/python/particleFlowBadHcalPseudoCluster_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 6
|
2017-09-08T14:12:56.000Z
|
2022-03-09T23:57:01.000Z
|
RecoParticleFlow/PFClusterProducer/python/particleFlowBadHcalPseudoCluster_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 545
|
2017-09-19T17:10:19.000Z
|
2022-03-07T16:55:27.000Z
|
RecoParticleFlow/PFClusterProducer/python/particleFlowBadHcalPseudoCluster_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 14
|
2017-10-04T09:47:21.000Z
|
2019-10-23T18:04:45.000Z
|
from RecoParticleFlow.PFClusterProducer.particleFlowBadHcalPseudoCluster_cfi import *
# OFF by default, turned on via modifier
from Configuration.Eras.Modifier_pf_badHcalMitigation_cff import pf_badHcalMitigation
pf_badHcalMitigation.toModify(particleFlowBadHcalPseudoCluster, enable = True)
| 42
| 85
| 0.884354
| 29
| 294
| 8.758621
| 0.724138
| 0.224409
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07483
| 294
| 6
| 86
| 49
| 0.933824
| 0.129252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5a29cd056bc93e8690bbfd371e1b73176e0ce932
| 14,657
|
py
|
Python
|
multiple-languages/python/ros-cdk-cas-1.0.4/src/ros_cdk_cas/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
multiple-languages/python/ros-cdk-cas-1.0.4/src/ros_cdk_cas/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
multiple-languages/python/ros-cdk-cas-1.0.4/src/ros_cdk_cas/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
'''
## Aliyun ROS CAS Construct Library
This module is part of the AliCloud ROS Cloud Development Kit (ROS CDK) project.
```python
import * as CAS from '@alicloud/ros-cdk-cas';
```
'''
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from ._jsii import *
import ros_cdk_core
class Certificate(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-cas.Certificate",
):
'''A ROS resource type: ``ALIYUN::CAS::Certificate``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "CertificateProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::CAS::Certificate``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrCertId")
def attr_cert_id(self) -> ros_cdk_core.IResolvable:
'''Attribute CertId: Certificate ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrCertId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-cas.CertificateProps",
jsii_struct_bases=[],
name_mapping={
"cert": "cert",
"key": "key",
"name": "name",
"lang": "lang",
"source_ip": "sourceIp",
},
)
class CertificateProps:
def __init__(
self,
*,
cert: typing.Union[builtins.str, ros_cdk_core.IResolvable],
key: typing.Union[builtins.str, ros_cdk_core.IResolvable],
name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
lang: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
source_ip: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::CAS::Certificate``.
:param cert: Property cert: Specify the content of the certificate. To use the PEM encoding format.
:param key: Property key: Specify the certificate private key content. To use the PEM encoding format.
:param name: Property name: Custom certificate name. The certificate name under a user cannot be duplicated.
:param lang: Property lang: Specifies the language type for requesting and receiving messages.
:param source_ip: Property sourceIp: Specifies the source IP address of the request.
'''
self._values: typing.Dict[str, typing.Any] = {
"cert": cert,
"key": key,
"name": name,
}
if lang is not None:
self._values["lang"] = lang
if source_ip is not None:
self._values["source_ip"] = source_ip
@builtins.property
def cert(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property cert: Specify the content of the certificate.
To use the PEM encoding format.
'''
result = self._values.get("cert")
assert result is not None, "Required property 'cert' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def key(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property key: Specify the certificate private key content.
To use the PEM encoding format.
'''
result = self._values.get("key")
assert result is not None, "Required property 'key' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property name: Custom certificate name.
The certificate name under a user cannot be duplicated.
'''
result = self._values.get("name")
assert result is not None, "Required property 'name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def lang(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property lang: Specifies the language type for requesting and receiving messages.'''
result = self._values.get("lang")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def source_ip(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property sourceIp: Specifies the source IP address of the request.'''
result = self._values.get("source_ip")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CertificateProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosCertificate(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-cas.RosCertificate",
):
'''A ROS template type: ``ALIYUN::CAS::Certificate``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosCertificateProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::CAS::Certificate``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrCertId")
def attr_cert_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: CertId: Certificate ID.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrCertId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="cert")
def cert(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: cert: Specify the content of the certificate. To use the PEM encoding format.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "cert"))
@cert.setter
def cert(self, value: typing.Union[builtins.str, ros_cdk_core.IResolvable]) -> None:
jsii.set(self, "cert", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="key")
def key(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: key: Specify the certificate private key content. To use the PEM encoding format.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "key"))
@key.setter
def key(self, value: typing.Union[builtins.str, ros_cdk_core.IResolvable]) -> None:
jsii.set(self, "key", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="name")
def name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: name: Custom certificate name. The certificate name under a user cannot be duplicated.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "name"))
@name.setter
def name(self, value: typing.Union[builtins.str, ros_cdk_core.IResolvable]) -> None:
jsii.set(self, "name", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="lang")
def lang(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: lang: Specifies the language type for requesting and receiving messages.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "lang"))
@lang.setter
def lang(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "lang", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="sourceIp")
def source_ip(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: sourceIp: Specifies the source IP address of the request.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "sourceIp"))
@source_ip.setter
def source_ip(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "sourceIp", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-cas.RosCertificateProps",
jsii_struct_bases=[],
name_mapping={
"cert": "cert",
"key": "key",
"name": "name",
"lang": "lang",
"source_ip": "sourceIp",
},
)
class RosCertificateProps:
def __init__(
self,
*,
cert: typing.Union[builtins.str, ros_cdk_core.IResolvable],
key: typing.Union[builtins.str, ros_cdk_core.IResolvable],
name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
lang: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
source_ip: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::CAS::Certificate``.
:param cert:
:param key:
:param name:
:param lang:
:param source_ip:
'''
self._values: typing.Dict[str, typing.Any] = {
"cert": cert,
"key": key,
"name": name,
}
if lang is not None:
self._values["lang"] = lang
if source_ip is not None:
self._values["source_ip"] = source_ip
@builtins.property
def cert(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: cert: Specify the content of the certificate. To use the PEM encoding format.
'''
result = self._values.get("cert")
assert result is not None, "Required property 'cert' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def key(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: key: Specify the certificate private key content. To use the PEM encoding format.
'''
result = self._values.get("key")
assert result is not None, "Required property 'key' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: name: Custom certificate name. The certificate name under a user cannot be duplicated.
'''
result = self._values.get("name")
assert result is not None, "Required property 'name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def lang(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: lang: Specifies the language type for requesting and receiving messages.
'''
result = self._values.get("lang")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def source_ip(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: sourceIp: Specifies the source IP address of the request.
'''
result = self._values.get("source_ip")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosCertificateProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
__all__ = [
"Certificate",
"CertificateProps",
"RosCertificate",
"RosCertificateProps",
]
publication.publish()
| 37.200508
| 125
| 0.651361
| 1,763
| 14,657
| 5.257516
| 0.080545
| 0.038839
| 0.058259
| 0.111015
| 0.84076
| 0.836013
| 0.816917
| 0.812385
| 0.812385
| 0.784119
| 0
| 0
| 0.225762
| 14,657
| 393
| 126
| 37.295165
| 0.816796
| 0.209252
| 0
| 0.668016
| 0
| 0
| 0.090826
| 0.028332
| 0
| 0
| 0
| 0
| 0.024292
| 1
| 0.149798
| false
| 0
| 0.040486
| 0.032389
| 0.315789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5a2ee27552d1f96d324017b48d32b0254604716e
| 4,165
|
py
|
Python
|
notebook/pandas_index.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 174
|
2018-05-30T21:14:50.000Z
|
2022-03-25T07:59:37.000Z
|
notebook/pandas_index.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 5
|
2019-08-10T03:22:02.000Z
|
2021-07-12T20:31:17.000Z
|
notebook/pandas_index.py
|
vhn0912/python-snippets
|
80b2e1d6b2b8f12ae30d6dbe86d25bb2b3a02038
|
[
"MIT"
] | 53
|
2018-04-27T05:26:35.000Z
|
2022-03-25T07:59:37.000Z
|
import pandas as pd
df = pd.read_csv('data/src/sample_pandas_normal.csv', index_col=0)
print(df)
# age state point
# name
# Alice 24 NY 64
# Bob 42 CA 92
# Charlie 18 CA 70
# Dave 68 TX 70
# Ellen 24 CA 88
# Frank 30 NY 57
print(df['age'])
print(type(df['age']))
# name
# Alice 24
# Bob 42
# Charlie 18
# Dave 68
# Ellen 24
# Frank 30
# Name: age, dtype: int64
# <class 'pandas.core.series.Series'>
print(df.age)
print(type(df.age))
# name
# Alice 24
# Bob 42
# Charlie 18
# Dave 68
# Ellen 24
# Frank 30
# Name: age, dtype: int64
# <class 'pandas.core.series.Series'>
print(df[['age', 'point']])
print(type(df[['age', 'point']]))
# age point
# name
# Alice 24 64
# Bob 42 92
# Charlie 18 70
# Dave 68 70
# Ellen 24 88
# Frank 30 57
# <class 'pandas.core.frame.DataFrame'>
print(df[['age']])
print(type(df[['age']]))
# age
# name
# Alice 24
# Bob 42
# Charlie 18
# Dave 68
# Ellen 24
# Frank 30
# <class 'pandas.core.frame.DataFrame'>
print(df['age':'point'])
# Empty DataFrame
# Columns: [age, state, point]
# Index: []
print(df.loc[:, 'age':'point'])
print(type(df.loc[:, 'age':'point']))
# age state point
# name
# Alice 24 NY 64
# Bob 42 CA 92
# Charlie 18 CA 70
# Dave 68 TX 70
# Ellen 24 CA 88
# Frank 30 NY 57
# <class 'pandas.core.frame.DataFrame'>
print(df.iloc[:, [0, 2]])
print(type(df.iloc[:, [0, 2]]))
# age point
# name
# Alice 24 64
# Bob 42 92
# Charlie 18 70
# Dave 68 70
# Ellen 24 88
# Frank 30 57
# <class 'pandas.core.frame.DataFrame'>
print(df[1:4])
print(type(df[1:4]))
# age state point
# name
# Bob 42 CA 92
# Charlie 18 CA 70
# Dave 68 TX 70
# <class 'pandas.core.frame.DataFrame'>
print(df[:-3])
print(type(df[:-3]))
# age state point
# name
# Alice 24 NY 64
# Bob 42 CA 92
# Charlie 18 CA 70
# <class 'pandas.core.frame.DataFrame'>
print(df[::2])
print(type(df[::2]))
# age state point
# name
# Alice 24 NY 64
# Charlie 18 CA 70
# Ellen 24 CA 88
# <class 'pandas.core.frame.DataFrame'>
print(df[1::2])
print(type(df[1::2]))
# age state point
# name
# Bob 42 CA 92
# Dave 68 TX 70
# Frank 30 NY 57
# <class 'pandas.core.frame.DataFrame'>
# print(df[1])
# KeyError: 1
print(df[1:2])
print(type(df[1:2]))
# age state point
# name
# Bob 42 CA 92
# <class 'pandas.core.frame.DataFrame'>
print(df['Bob':'Ellen'])
print(type(df['Bob':'Ellen']))
# age state point
# name
# Bob 42 CA 92
# Charlie 18 CA 70
# Dave 68 TX 70
# Ellen 24 CA 88
# <class 'pandas.core.frame.DataFrame'>
print(df.loc['Bob'])
print(type(df.loc['Bob']))
# age 42
# state CA
# point 92
# Name: Bob, dtype: object
# <class 'pandas.core.series.Series'>
print(df.loc[['Bob', 'Ellen']])
print(type(df.loc[['Bob', 'Ellen']]))
# age state point
# name
# Bob 42 CA 92
# Ellen 24 CA 88
# <class 'pandas.core.frame.DataFrame'>
print(df.iloc[[1, 4]])
print(type(df.iloc[[1, 4]]))
# age state point
# name
# Bob 42 CA 92
# Ellen 24 CA 88
# <class 'pandas.core.frame.DataFrame'>
print(df['age']['Alice'])
# 24
print(df['Bob':'Dave'][['age', 'point']])
# age point
# name
# Bob 42 92
# Charlie 18 70
# Dave 68 70
print(df.at['Alice', 'age'])
# 24
print(df.loc['Bob':'Dave', ['age', 'point']])
# age point
# name
# Bob 42 92
# Charlie 18 70
# Dave 68 70
| 21.921053
| 66
| 0.482833
| 563
| 4,165
| 3.564831
| 0.101243
| 0.076731
| 0.082212
| 0.119581
| 0.849028
| 0.78575
| 0.78575
| 0.743896
| 0.667663
| 0.666667
| 0
| 0.107184
| 0.37503
| 4,165
| 189
| 67
| 22.037037
| 0.663849
| 0.694838
| 0
| 0
| 0
| 0
| 0.149293
| 0.029152
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026316
| 0
| 0.026316
| 0.947368
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
5a41a4ffbc0d091352cde5c55ba7ea8a74121e44
| 48
|
py
|
Python
|
home_server/__init__.py
|
wwakabobik/home
|
518167bb705a18e5697bb261942dc1e10eac5bf0
|
[
"MIT"
] | null | null | null |
home_server/__init__.py
|
wwakabobik/home
|
518167bb705a18e5697bb261942dc1e10eac5bf0
|
[
"MIT"
] | null | null | null |
home_server/__init__.py
|
wwakabobik/home
|
518167bb705a18e5697bb261942dc1e10eac5bf0
|
[
"MIT"
] | 1
|
2021-12-01T08:34:05.000Z
|
2021-12-01T08:34:05.000Z
|
from .home_server import db, secure_data, pages
| 24
| 47
| 0.8125
| 8
| 48
| 4.625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 48
| 1
| 48
| 48
| 0.880952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5a64afc4a130bda947c81ebe572c99d40d2afbaf
| 600
|
py
|
Python
|
temboo/core/Library/Google/Drive/Comments/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Google/Drive/Comments/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Google/Drive/Comments/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Google.Drive.Comments.Delete import Delete, DeleteInputSet, DeleteResultSet, DeleteChoreographyExecution
from temboo.Library.Google.Drive.Comments.Get import Get, GetInputSet, GetResultSet, GetChoreographyExecution
from temboo.Library.Google.Drive.Comments.Insert import Insert, InsertInputSet, InsertResultSet, InsertChoreographyExecution
from temboo.Library.Google.Drive.Comments.List import List, ListInputSet, ListResultSet, ListChoreographyExecution
from temboo.Library.Google.Drive.Comments.Update import Update, UpdateInputSet, UpdateResultSet, UpdateChoreographyExecution
| 100
| 124
| 0.875
| 60
| 600
| 8.75
| 0.45
| 0.095238
| 0.161905
| 0.219048
| 0.342857
| 0.342857
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058333
| 600
| 5
| 125
| 120
| 0.929204
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ce68278280d2e05ebea1096dd1e6a3a7ce5bee68
| 7,371
|
py
|
Python
|
runs/run_server_Icons.py
|
aliborji/ShapeDefence
|
92da19bb195b5161d997f6ee1cc777b07a714f6f
|
[
"MIT"
] | null | null | null |
runs/run_server_Icons.py
|
aliborji/ShapeDefence
|
92da19bb195b5161d997f6ee1cc777b07a714f6f
|
[
"MIT"
] | 1
|
2022-03-12T00:40:21.000Z
|
2022-03-12T00:40:21.000Z
|
runs/run_server_Icons.py
|
aliborji/ShapeDefense
|
92da19bb195b5161d997f6ee1cc777b07a714f6f
|
[
"MIT"
] | null | null | null |
from lib import *
from config import *
from model import build_model, build_model_resNet
from utils import *
import torchattacks
from torchattacks import PGD, FGSM
import os
from torch.utils.data import Dataset, DataLoader
import pandas as pd
from os.path import isfile, join, abspath, exists, isdir, expanduser
from os import listdir
import torch.nn as nn
from torchvision import transforms, datasets, models
import os
os.environ['KMP_DUPLICATE_LIB_OK'] = 'True'
NUM_EPOCHS = 10
BATCH_SIZE = 100
train_phase = True
attack_type = 'FGSM'
net_type = 'edge'
data_dir = 'Icons-50'
inp_size = 64
n_classes = 50
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
fo = open(f'./{attack_type}-icons/results/results_{net_type}.txt', 'w+')
# --------------------------------------------------------------------------------------------------------------------------------------------
# Train a model first
save_path = f'icons_{net_type}.pth'
if train_phase:
# pass
net, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
net.to(device)
train_model(net, dataloader_dict, criterior, optimizer, NUM_EPOCHS, save_path)
# NUM_EPOCHS = 30 # for adversarial training
# --------------------------------------------------------------------------------------------------------------------------------------------
# Test the clean model on clean and attacks
net, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
load_model(net, save_path)
net.to(device)
acc, images = test_model_clean(net, dataloader_dict)
print('Accuracy of original model on clean images: %f ' % acc)
fo.write('Accuracy of original model on clean images: %f \n' % acc)
for eps_t in [8,32]:
print(f'eps_t={eps_t}')
fo.write(f'eps_t={eps_t} \n')
epsilons = [eps_t/255]
# Test the clean model on clean and attacks
net, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
load_model(net, save_path)
net.to(device)
acc_attack, images = test_model_attack(net, dataloader_dict, epsilons, attack_type, net_type, redetect_edge=False)
print('Accuracy of clean model on adversarial images: %f %%' % acc_attack[0])
fo.write('Accuracy of clean model on adversarial images: %f \n' % acc_attack[0])
net, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
load_model(net, save_path)
net.to(device)
if net_type == 'rgbedge':
acc_attack, images = test_model_attack(net, dataloader_dict, epsilons, attack_type, net_type, redetect_edge=True)
print('Accuracy of clean model on adversarial images with redetect_edge: %f %%' % acc_attack[0])
fo.write('Accuracy of clean model on adversarial images with redetect_edge: %f \n' % acc_attack[0])
# --------------------------------------------------------------------------------------------------------------------------------------------
# Now perform adversarial training
save_path_robust = f'./{attack_type}-icons/icons_{net_type}_{eps_t}_robust_{eps_t}.pth'
if train_phase:
# pass
net_robust, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
net_robust.to(device)
train_robust_model(net_robust, dataloader_dict, criterior, optimizer, NUM_EPOCHS, save_path_robust, attack_type, eps=eps_t/255, net_type=net_type, redetect_edge=False)
# --------------------------------------------------------------------------------------------------------------------------------------------
# Test the robust model on clean and attacks
net_robust, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
load_model(net_robust, save_path_robust)
net_robust.to(device)
acc, images = test_model_clean(net_robust, dataloader_dict)
print('Accuracy of robust model on clean images: %f %%' % acc)
fo.write('Accuracy of robust model on clean images: %f \n' % acc)
net_robust, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
load_model(net_robust, save_path_robust)
net_robust.to(device)
acc_attack, images = test_model_attack(net_robust, dataloader_dict, epsilons, attack_type, net_type, redetect_edge=False)
print('Accuracy of robust model on adversarial images: %f %%' % acc_attack[0])
fo.write('Accuracy of robust model on adversarial images: %f \n' % acc_attack[0])
net_robust, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
load_model(net_robust, save_path_robust)
net_robust.to(device)
if net_type == 'rgbedge':
acc_attack, images = test_model_attack(net_robust, dataloader_dict, epsilons, attack_type, net_type, redetect_edge=True)
print('Accuracy of robust model on adversarial images with redetect_edge: %f %%' % acc_attack[0])
fo.write('Accuracy of robust model on adversarial images with redetect_edge: %f \n' % acc_attack[0])
# --------------------------------------------------------------------------------------------------------------------------------------------
# Now perform adversarial training with redetect
if net_type != 'rgbedge': continue
save_path_robust = f'./{attack_type}-icons/icons_{net_type}_{eps_t}_robust_{eps_t}_redetect.pth'
if train_phase:
net_robust, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
net_robust.to(device)
train_robust_model(net_robust, dataloader_dict, criterior, optimizer, NUM_EPOCHS, save_path_robust, attack_type, eps=eps_t/255, net_type=net_type, redetect_edge=True)
net_robust, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
load_model(net_robust, save_path_robust)
net_robust.to(device)
acc, images = test_model_clean(net_robust, dataloader_dict)
print('Accuracy of robust redetect model on clean images: %f %%' % acc)
fo.write('Accuracy of robust redetect model on clean images: %f \n' % acc)
net_robust, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
load_model(net_robust, save_path_robust)
net_robust.to(device)
acc_attack, images = test_model_attack(net_robust, dataloader_dict, epsilons, attack_type, net_type, redetect_edge=False)
print('Accuracy of robust redetect model on adversarial images: %f %%' % acc_attack[0])
fo.write('Accuracy of robust redetect model on adversarial images: %f \n' % acc_attack[0])
net_robust, dataloader_dict, criterior, optimizer = build_model_resNet(net_type, data_dir, inp_size, n_classes)
load_model(net_robust, save_path_robust)
net_robust.to(device)
acc_attack, images = test_model_attack(net_robust, dataloader_dict, epsilons, attack_type, net_type, redetect_edge=True)
print('Accuracy of robust redtect model on adversarial images with redetect_edge: %f %%' % acc_attack[0])
fo.write('Accuracy of robust redetect model on adversarial images with redetect_edge: %f \n' % acc_attack[0])
fo.close()
| 41.178771
| 175
| 0.669923
| 998
| 7,371
| 4.659319
| 0.114228
| 0.045161
| 0.065376
| 0.07914
| 0.832903
| 0.824301
| 0.809462
| 0.808602
| 0.782366
| 0.777204
| 0
| 0.006055
| 0.148555
| 7,371
| 178
| 176
| 41.410112
| 0.734863
| 0.133903
| 0
| 0.45098
| 0
| 0
| 0.219727
| 0.029998
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.137255
| 0
| 0.137255
| 0.098039
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ce70d9449a471ef6867f6ac5b22a0ff0c870e188
| 165
|
py
|
Python
|
backend/handlers/graphql/action_deserializers/template_deserializer.py
|
al-indigo/vmemperor
|
80eb6d47d839a4736eb6f9d2fcfad35f0a7b3bb1
|
[
"Apache-2.0"
] | null | null | null |
backend/handlers/graphql/action_deserializers/template_deserializer.py
|
al-indigo/vmemperor
|
80eb6d47d839a4736eb6f9d2fcfad35f0a7b3bb1
|
[
"Apache-2.0"
] | 8
|
2017-10-11T13:26:10.000Z
|
2021-12-13T20:27:52.000Z
|
backend/handlers/graphql/action_deserializers/template_deserializer.py
|
ispras/vmemperor
|
80eb6d47d839a4736eb6f9d2fcfad35f0a7b3bb1
|
[
"Apache-2.0"
] | 4
|
2017-07-27T12:25:42.000Z
|
2018-01-28T02:06:26.000Z
|
from handlers.graphql.types.template import TemplateActions
class TemplateDeserializer:
def __init__(self, is_default):
self.is_default = is_default
| 18.333333
| 59
| 0.769697
| 19
| 165
| 6.315789
| 0.736842
| 0.225
| 0.216667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169697
| 165
| 8
| 60
| 20.625
| 0.875912
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
ce8fe1aa282a90ee89f3d228e07b444cb13635d3
| 199
|
py
|
Python
|
TODO.py
|
OsciiArt/Cookpad
|
b2245f84db0650d6282c97c98600de825c6ed6e0
|
[
"MIT"
] | null | null | null |
TODO.py
|
OsciiArt/Cookpad
|
b2245f84db0650d6282c97c98600de825c6ed6e0
|
[
"MIT"
] | null | null | null |
TODO.py
|
OsciiArt/Cookpad
|
b2245f84db0650d6282c97c98600de825c6ed6e0
|
[
"MIT"
] | null | null | null |
# TODO cycle learning
# TODO mix like DSB
# TODO noise mix up
# TODO more simple model
# TODO GAN aug
# TODO remove aug in valid
# TODO
# TODO
# TODO
# TODO
# TODO
# TODO
# TODO
# TODO
# TODO
# TODO
| 11.705882
| 26
| 0.673367
| 33
| 199
| 4.060606
| 0.484848
| 0.537313
| 0.716418
| 0.835821
| 0.298507
| 0.298507
| 0.298507
| 0.298507
| 0.298507
| 0
| 0
| 0
| 0.246231
| 199
| 16
| 27
| 12.4375
| 0.893333
| 0.834171
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.0625
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
cea79ca204fa1d322506ad9d667ab45bf7b5817c
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/pip/_internal/models/candidate.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/pip/_internal/models/candidate.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/pip/_internal/models/candidate.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/6f/66/a2/b9f843e63644234ce111a327fe8d5546575513627e30fb2a3e9718d83b
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.479167
| 0
| 96
| 1
| 96
| 96
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ceb9ffae0467bc42363389809940ed67164f6617
| 82
|
py
|
Python
|
bgcArgoDMQC/util/__init__.py
|
ArgoCanada/BGC-QC
|
c058f3e1a1992fc961ce2c4d5862d426725c1e43
|
[
"MIT"
] | null | null | null |
bgcArgoDMQC/util/__init__.py
|
ArgoCanada/BGC-QC
|
c058f3e1a1992fc961ce2c4d5862d426725c1e43
|
[
"MIT"
] | 16
|
2020-07-15T12:26:26.000Z
|
2020-10-14T14:28:04.000Z
|
bgcArgoDMQC/util/__init__.py
|
ArgoCanada/bgcArgo
|
500cd10526e5b88393310d457eebaef19d49e4d8
|
[
"MIT"
] | 1
|
2020-08-30T02:40:33.000Z
|
2020-08-30T02:40:33.000Z
|
from .array import *
from .geo import *
from .stats import *
from .util import *
| 13.666667
| 20
| 0.695122
| 12
| 82
| 4.75
| 0.5
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.207317
| 82
| 5
| 21
| 16.4
| 0.876923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cebea34fd924c5f022bc61c2b7d1ad8d37a5abd2
| 230
|
py
|
Python
|
algorithms/search/__init__.py
|
zhengli0817/algorithms
|
3c98813f0329d9a5fff1107dbcd40e7f38d2275d
|
[
"MIT"
] | null | null | null |
algorithms/search/__init__.py
|
zhengli0817/algorithms
|
3c98813f0329d9a5fff1107dbcd40e7f38d2275d
|
[
"MIT"
] | null | null | null |
algorithms/search/__init__.py
|
zhengli0817/algorithms
|
3c98813f0329d9a5fff1107dbcd40e7f38d2275d
|
[
"MIT"
] | null | null | null |
from .binary_search import *
from .first_occurance import *
from .last_occurance import *
from .search_insert import *
from .two_sum import *
from .search_range import *
from .find_min_rotate import *
from .search_rotate import *
| 25.555556
| 30
| 0.791304
| 33
| 230
| 5.242424
| 0.424242
| 0.404624
| 0.277457
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13913
| 230
| 8
| 31
| 28.75
| 0.873737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cebeb8fd8b1515ec6fed7278950597724fdf1da8
| 64
|
py
|
Python
|
timesformer_pytorch/__init__.py
|
halixness/generative_timesformer_pytorch
|
c91989f804f3d0c9821b14e2f2e77c48ef47f5f0
|
[
"MIT"
] | 565
|
2021-02-11T04:18:16.000Z
|
2022-03-31T03:54:49.000Z
|
timesformer_pytorch/__init__.py
|
halixness/generative_timesformer_pytorch
|
c91989f804f3d0c9821b14e2f2e77c48ef47f5f0
|
[
"MIT"
] | 20
|
2021-02-11T17:53:25.000Z
|
2021-11-09T09:35:12.000Z
|
timesformer_pytorch/__init__.py
|
halixness/generative_timesformer_pytorch
|
c91989f804f3d0c9821b14e2f2e77c48ef47f5f0
|
[
"MIT"
] | 73
|
2021-02-11T23:46:08.000Z
|
2022-02-01T13:48:31.000Z
|
from timesformer_pytorch.timesformer_pytorch import TimeSformer
| 32
| 63
| 0.921875
| 7
| 64
| 8.142857
| 0.571429
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 64
| 1
| 64
| 64
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0b44c4ac26a16debef16f60723fc74ab637a02a6
| 27,461
|
py
|
Python
|
flash/audio/speech_recognition/data.py
|
dudeperf3ct/lightning-flash
|
a855cd14cf1cd0301b4a2f82c0c95e4d8d986650
|
[
"Apache-2.0"
] | 1
|
2022-03-09T22:40:05.000Z
|
2022-03-09T22:40:05.000Z
|
flash/audio/speech_recognition/data.py
|
dudeperf3ct/lightning-flash
|
a855cd14cf1cd0301b4a2f82c0c95e4d8d986650
|
[
"Apache-2.0"
] | null | null | null |
flash/audio/speech_recognition/data.py
|
dudeperf3ct/lightning-flash
|
a855cd14cf1cd0301b4a2f82c0c95e4d8d986650
|
[
"Apache-2.0"
] | null | null | null |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional, Sequence, Type
from torch.utils.data import Dataset
from flash.audio.speech_recognition.input import (
SpeechRecognitionCSVInput,
SpeechRecognitionDatasetInput,
SpeechRecognitionJSONInput,
SpeechRecognitionPathsInput,
)
from flash.audio.speech_recognition.output_transform import SpeechRecognitionOutputTransform
from flash.core.data.data_module import DataModule
from flash.core.data.io.input import Input
from flash.core.data.io.input_transform import INPUT_TRANSFORM_TYPE, InputTransform
from flash.core.registry import FlashRegistry
from flash.core.utilities.imports import _AUDIO_TESTING
from flash.core.utilities.stages import RunningStage
# Skip doctests if requirements aren't available
if not _AUDIO_TESTING:
__doctest_skip__ = ["SpeechRecognitionData", "SpeechRecognitionData.*"]
class SpeechRecognitionData(DataModule):
"""The ``SpeechRecognitionData`` class is a :class:`~flash.core.data.data_module.DataModule` with a set of
classmethods for loading data for speech recognition."""
input_transform_cls = InputTransform
output_transform_cls = SpeechRecognitionOutputTransform
input_transforms_registry = FlashRegistry("input_transforms")
@classmethod
def from_files(
cls,
train_files: Optional[Sequence[str]] = None,
train_targets: Optional[Sequence[str]] = None,
val_files: Optional[Sequence[str]] = None,
val_targets: Optional[Sequence[str]] = None,
test_files: Optional[Sequence[str]] = None,
test_targets: Optional[Sequence[str]] = None,
predict_files: Optional[Sequence[str]] = None,
sampling_rate: int = 16000,
train_transform: INPUT_TRANSFORM_TYPE = InputTransform,
val_transform: INPUT_TRANSFORM_TYPE = InputTransform,
test_transform: INPUT_TRANSFORM_TYPE = InputTransform,
predict_transform: INPUT_TRANSFORM_TYPE = InputTransform,
input_cls: Type[Input] = SpeechRecognitionPathsInput,
transform_kwargs: Optional[Dict] = None,
**data_module_kwargs: Any,
) -> "SpeechRecognitionData":
"""Load the :class:`~flash.audio.speech_recognition.data.SpeechRecognitionData` from lists of audio files
and corresponding lists of targets.
The supported file extensions are: ``wav``, ``ogg``, ``flac``, ``mat``, and ``mp3``.
To learn how to customize the transforms applied for each stage, read our
:ref:`customizing transforms guide <customizing_transforms>`.
Args:
train_files: The list of audio files to use when training.
train_targets: The list of targets (ground truth speech transcripts) to use when training.
val_files: The list of audio files to use when validating.
val_targets: The list of targets (ground truth speech transcripts) to use when validating.
test_files: The list of audio files to use when testing.
test_targets: The list of targets (ground truth speech transcripts) to use when testing.
predict_files: The list of audio files to use when predicting.
sampling_rate: Sampling rate to use when loading the audio files.
train_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when training.
val_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when validating.
test_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when testing.
predict_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when
predicting.
input_cls: The :class:`~flash.core.data.io.input.Input` type to use for loading the data.
transform_kwargs: Dict of keyword arguments to be provided when instantiating the transforms.
data_module_kwargs: Additional keyword arguments to provide to the
:class:`~flash.core.data.data_module.DataModule` constructor.
Returns:
The constructed :class:`~flash.audio.speech_recognition.data.SpeechRecognitionData`.
Examples
________
.. testsetup::
>>> import numpy as np
>>> import soundfile as sf
>>> samplerate = 44100
>>> data = np.random.uniform(-1, 1, size=(samplerate * 3, 2))
>>> _ = [sf.write(f"speech_{i}.wav", data, samplerate, subtype='PCM_24') for i in range(1, 4)]
>>> _ = [sf.write(f"predict_speech_{i}.wav", data, samplerate, subtype='PCM_24') for i in range(1, 4)]
.. doctest::
>>> from flash import Trainer
>>> from flash.audio import SpeechRecognitionData, SpeechRecognition
>>> datamodule = SpeechRecognitionData.from_files(
... train_files=["speech_1.wav", "speech_2.wav", "speech_3.wav"],
... train_targets=["some speech", "some other speech", "some more speech"],
... predict_files=["predict_speech_1.wav", "predict_speech_2.wav", "predict_speech_3.wav"],
... batch_size=2,
... )
>>> model = SpeechRecognition(backbone="patrickvonplaten/wav2vec2_tiny_random_robust")
>>> trainer = Trainer(fast_dev_run=True)
>>> trainer.fit(model, datamodule=datamodule) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Training...
>>> trainer.predict(model, datamodule=datamodule) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Predicting...
.. testcleanup::
>>> import os
>>> _ = [os.remove(f"speech_{i}.wav") for i in range(1, 4)]
>>> _ = [os.remove(f"predict_speech_{i}.wav") for i in range(1, 4)]
"""
ds_kw = dict(
transform_kwargs=transform_kwargs,
input_transforms_registry=cls.input_transforms_registry,
sampling_rate=sampling_rate,
)
return cls(
input_cls(RunningStage.TRAINING, train_files, train_targets, transform=train_transform, **ds_kw),
input_cls(RunningStage.VALIDATING, val_files, val_targets, transform=val_transform, **ds_kw),
input_cls(RunningStage.TESTING, test_files, test_targets, transform=test_transform, **ds_kw),
input_cls(RunningStage.PREDICTING, predict_files, transform=predict_transform, **ds_kw),
**data_module_kwargs,
)
@classmethod
def from_csv(
cls,
input_field: str,
target_field: Optional[str] = None,
train_file: Optional[str] = None,
val_file: Optional[str] = None,
test_file: Optional[str] = None,
predict_file: Optional[str] = None,
sampling_rate: int = 16000,
train_transform: INPUT_TRANSFORM_TYPE = InputTransform,
val_transform: INPUT_TRANSFORM_TYPE = InputTransform,
test_transform: INPUT_TRANSFORM_TYPE = InputTransform,
predict_transform: INPUT_TRANSFORM_TYPE = InputTransform,
input_cls: Type[Input] = SpeechRecognitionCSVInput,
transform_kwargs: Optional[Dict] = None,
**data_module_kwargs: Any,
) -> "SpeechRecognitionData":
"""Load the :class:`~flash.audio.speech_recognition.data.SpeechRecognitionData` from CSV files containing
audio file paths and their corresponding targets.
Input audio file paths will be extracted from the ``input_field`` column in the CSV files.
The supported file extensions are: ``wav``, ``ogg``, ``flac``, ``mat``, and ``mp3``.
The targets will be extracted from the ``target_field`` in the CSV files.
To learn how to customize the transforms applied for each stage, read our
:ref:`customizing transforms guide <customizing_transforms>`.
Args:
input_field: The field (column name) in the CSV files containing the audio file paths.
target_field: The field (column name) in the CSV files containing the targets.
train_file: The CSV file to use when training.
val_file: The CSV file to use when validating.
test_file: The CSV file to use when testing.
predict_file: The CSV file to use when predicting.
sampling_rate: Sampling rate to use when loading the audio files.
train_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when training.
val_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when validating.
test_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when testing.
predict_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when
predicting.
input_cls: The :class:`~flash.core.data.io.input.Input` type to use for loading the data.
transform_kwargs: Dict of keyword arguments to be provided when instantiating the transforms.
data_module_kwargs: Additional keyword arguments to provide to the
:class:`~flash.core.data.data_module.DataModule` constructor.
Returns:
The constructed :class:`~flash.audio.speech_recognition.data.SpeechRecognitionData`.
Examples
________
.. testsetup::
>>> import numpy as np
>>> from pandas import DataFrame
>>> import soundfile as sf
>>> samplerate = 44100
>>> data = np.random.uniform(-1, 1, size=(samplerate * 3, 2))
>>> _ = [sf.write(f"speech_{i}.wav", data, samplerate, subtype='PCM_24') for i in range(1, 4)]
>>> _ = [sf.write(f"predict_speech_{i}.wav", data, samplerate, subtype='PCM_24') for i in range(1, 4)]
>>> DataFrame.from_dict({
... "speech_files": ["speech_1.wav", "speech_2.wav", "speech_3.wav"],
... "targets": ["some speech", "some other speech", "some more speech"],
... }).to_csv("train_data.csv", index=False)
>>> DataFrame.from_dict({
... "speech_files": ["predict_speech_1.wav", "predict_speech_2.wav", "predict_speech_3.wav"],
... }).to_csv("predict_data.csv", index=False)
The file ``train_data.csv`` contains the following:
.. code-block::
speech_files,targets
speech_1.wav,some speech
speech_2.wav,some other speech
speech_3.wav,some more speech
The file ``predict_data.csv`` contains the following:
.. code-block::
speech_files
predict_speech_1.wav
predict_speech_2.wav
predict_speech_3.wav
.. doctest::
>>> from flash import Trainer
>>> from flash.audio import SpeechRecognitionData, SpeechRecognition
>>> datamodule = SpeechRecognitionData.from_csv(
... "speech_files",
... "targets",
... train_file="train_data.csv",
... predict_file="predict_data.csv",
... batch_size=2,
... ) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Downloading...
>>> model = SpeechRecognition(backbone="patrickvonplaten/wav2vec2_tiny_random_robust")
>>> trainer = Trainer(fast_dev_run=True)
>>> trainer.fit(model, datamodule=datamodule) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Training...
>>> trainer.predict(model, datamodule=datamodule) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Predicting...
.. testcleanup::
>>> import os
>>> _ = [os.remove(f"speech_{i}.wav") for i in range(1, 4)]
>>> _ = [os.remove(f"predict_speech_{i}.wav") for i in range(1, 4)]
>>> os.remove("train_data.csv")
>>> os.remove("predict_data.csv")
"""
ds_kw = dict(
transform_kwargs=transform_kwargs,
input_transforms_registry=cls.input_transforms_registry,
input_key=input_field,
sampling_rate=sampling_rate,
)
return cls(
input_cls(RunningStage.TRAINING, train_file, transform=train_transform, target_key=target_field, **ds_kw),
input_cls(RunningStage.VALIDATING, val_file, transform=val_transform, target_key=target_field, **ds_kw),
input_cls(RunningStage.TESTING, test_file, transform=test_transform, target_key=target_field, **ds_kw),
input_cls(RunningStage.PREDICTING, predict_file, transform=predict_transform, **ds_kw),
**data_module_kwargs,
)
@classmethod
def from_json(
cls,
input_field: str,
target_field: Optional[str] = None,
train_file: Optional[str] = None,
val_file: Optional[str] = None,
test_file: Optional[str] = None,
predict_file: Optional[str] = None,
sampling_rate: int = 16000,
field: Optional[str] = None,
train_transform: INPUT_TRANSFORM_TYPE = InputTransform,
val_transform: INPUT_TRANSFORM_TYPE = InputTransform,
test_transform: INPUT_TRANSFORM_TYPE = InputTransform,
predict_transform: INPUT_TRANSFORM_TYPE = InputTransform,
input_cls: Type[Input] = SpeechRecognitionJSONInput,
transform_kwargs: Optional[Dict] = None,
**data_module_kwargs: Any,
) -> "SpeechRecognitionData":
"""Load the :class:`~flash.audio.speech_recognition.data.SpeechRecognitionData` from JSON files containing
audio file paths and their corresponding targets.
Input audio file paths will be extracted from the ``input_field`` field in the JSON files.
The supported file extensions are: ``wav``, ``ogg``, ``flac``, ``mat``, and ``mp3``.
The targets will be extracted from the ``target_field`` field in the JSON files.
To learn how to customize the transforms applied for each stage, read our
:ref:`customizing transforms guide <customizing_transforms>`.
Args:
input_field: The field in the JSON files containing the audio file paths.
target_field: The field in the JSON files containing the targets.
train_file: The JSON file to use when training.
val_file: The JSON file to use when validating.
test_file: The JSON file to use when testing.
predict_file: The JSON file to use when predicting.
sampling_rate: Sampling rate to use when loading the audio files.
field: The field that holds the data in the JSON file.
train_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when training.
val_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when validating.
test_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when testing.
predict_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when
predicting.
input_cls: The :class:`~flash.core.data.io.input.Input` type to use for loading the data.
transform_kwargs: Dict of keyword arguments to be provided when instantiating the transforms.
data_module_kwargs: Additional keyword arguments to provide to the
:class:`~flash.core.data.data_module.DataModule` constructor.
Returns:
The constructed :class:`~flash.audio.speech_recognition.data.SpeechRecognitionData`.
Examples
________
.. testsetup::
>>> import numpy as np
>>> from pandas import DataFrame
>>> import soundfile as sf
>>> samplerate = 44100
>>> data = np.random.uniform(-1, 1, size=(samplerate * 3, 2))
>>> _ = [sf.write(f"speech_{i}.wav", data, samplerate, subtype='PCM_24') for i in range(1, 4)]
>>> _ = [sf.write(f"predict_speech_{i}.wav", data, samplerate, subtype='PCM_24') for i in range(1, 4)]
>>> DataFrame.from_dict({
... "speech_files": ["speech_1.wav", "speech_2.wav", "speech_3.wav"],
... "targets": ["some speech", "some other speech", "some more speech"],
... }).to_json("train_data.json", orient="records", lines=True)
>>> DataFrame.from_dict({
... "speech_files": ["predict_speech_1.wav", "predict_speech_2.wav", "predict_speech_3.wav"],
... }).to_json("predict_data.json", orient="records", lines=True)
The file ``train_data.json`` contains the following:
.. code-block::
{"speech_files":"speech_1.wav","targets":"some speech"}
{"speech_files":"speech_2.wav","targets":"some other speech"}
{"speech_files":"speech_3.wav","targets":"some more speech"}
The file ``predict_data.json`` contains the following:
.. code-block::
{"speech_files":"predict_speech_1.wav"}
{"speech_files":"predict_speech_2.wav"}
{"speech_files":"predict_speech_3.wav"}
.. doctest::
>>> from flash import Trainer
>>> from flash.audio import SpeechRecognitionData, SpeechRecognition
>>> datamodule = SpeechRecognitionData.from_json(
... "speech_files",
... "targets",
... train_file="train_data.json",
... predict_file="predict_data.json",
... batch_size=2,
... ) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Downloading...
>>> model = SpeechRecognition(backbone="patrickvonplaten/wav2vec2_tiny_random_robust")
>>> trainer = Trainer(fast_dev_run=True)
>>> trainer.fit(model, datamodule=datamodule) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Training...
>>> trainer.predict(model, datamodule=datamodule) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Predicting...
.. testcleanup::
>>> import os
>>> _ = [os.remove(f"speech_{i}.wav") for i in range(1, 4)]
>>> _ = [os.remove(f"predict_speech_{i}.wav") for i in range(1, 4)]
>>> os.remove("train_data.json")
>>> os.remove("predict_data.json")
"""
ds_kw = dict(
transform_kwargs=transform_kwargs,
input_transforms_registry=cls.input_transforms_registry,
input_key=input_field,
sampling_rate=sampling_rate,
field=field,
)
return cls(
input_cls(RunningStage.TRAINING, train_file, transform=train_transform, target_key=target_field, **ds_kw),
input_cls(RunningStage.VALIDATING, val_file, transform=val_transform, target_key=target_field, **ds_kw),
input_cls(RunningStage.TESTING, test_file, transform=test_transform, target_key=target_field, **ds_kw),
input_cls(RunningStage.PREDICTING, predict_file, transform=predict_transform, **ds_kw),
**data_module_kwargs,
)
@classmethod
def from_datasets(
cls,
train_dataset: Optional[Dataset] = None,
val_dataset: Optional[Dataset] = None,
test_dataset: Optional[Dataset] = None,
predict_dataset: Optional[Dataset] = None,
train_transform: INPUT_TRANSFORM_TYPE = InputTransform,
val_transform: INPUT_TRANSFORM_TYPE = InputTransform,
test_transform: INPUT_TRANSFORM_TYPE = InputTransform,
predict_transform: INPUT_TRANSFORM_TYPE = InputTransform,
sampling_rate: int = 16000,
input_cls: Type[Input] = SpeechRecognitionDatasetInput,
transform_kwargs: Optional[Dict] = None,
**data_module_kwargs: Any,
) -> "SpeechRecognitionData":
"""Load the :class:`~flash.audio.speech_recognition.data.SpeechRecognitionData` from PyTorch Dataset
objects.
The Dataset objects should be one of the following:
* A PyTorch Dataset where the ``__getitem__`` returns a tuple: ``(file_path or , target)``
* A PyTorch Dataset where the ``__getitem__`` returns a dict: ``{"input": file_path, "target": target}``
The supported file extensions are: ``wav``, ``ogg``, ``flac``, ``mat``, and ``mp3``.
To learn how to customize the transforms applied for each stage, read our
:ref:`customizing transforms guide <customizing_transforms>`.
Args:
train_dataset: The Dataset to use when training.
val_dataset: The Dataset to use when validating.
test_dataset: The Dataset to use when testing.
predict_dataset: The Dataset to use when predicting.
sampling_rate: Sampling rate to use when loading the audio files.
train_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when training.
val_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when validating.
test_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when testing.
predict_transform: The :class:`~flash.core.data.io.input_transform.InputTransform` type to use when
predicting.
input_cls: The :class:`~flash.core.data.io.input.Input` type to use for loading the data.
transform_kwargs: Dict of keyword arguments to be provided when instantiating the transforms.
data_module_kwargs: Additional keyword arguments to provide to the
:class:`~flash.core.data.data_module.DataModule` constructor.
Returns:
The constructed :class:`~flash.audio.speech_recognition.data.SpeechRecognitionData`.
Examples
________
.. testsetup::
>>> import numpy as np
>>> import soundfile as sf
>>> samplerate = 44100
>>> data = np.random.uniform(-1, 1, size=(samplerate * 3, 2))
>>> _ = [sf.write(f"speech_{i}.wav", data, samplerate, subtype='PCM_24') for i in range(1, 4)]
>>> _ = [sf.write(f"predict_speech_{i}.wav", data, samplerate, subtype='PCM_24') for i in range(1, 4)]
A PyTorch Dataset where the ``__getitem__`` returns a tuple: ``(file_path, target)``:
.. doctest::
>>> from torch.utils.data import Dataset
>>> from flash import Trainer
>>> from flash.audio import SpeechRecognitionData, SpeechRecognition
>>>
>>> class CustomDataset(Dataset):
... def __init__(self, files, targets=None):
... self.files = files
... self.targets = targets
... def __getitem__(self, index):
... if self.targets is not None:
... return self.files[index], self.targets[index]
... return self.files[index]
... def __len__(self):
... return len(self.files)
...
>>>
>>> datamodule = SpeechRecognitionData.from_datasets(
... train_dataset=CustomDataset(
... ["speech_1.wav", "speech_2.wav", "speech_3.wav"],
... ["some speech", "some other speech", "some more speech"],
... ),
... predict_dataset=CustomDataset(
... ["predict_speech_1.wav", "predict_speech_2.wav", "predict_speech_3.wav"],
... ),
... batch_size=2,
... )
>>> model = SpeechRecognition(backbone="patrickvonplaten/wav2vec2_tiny_random_robust")
>>> trainer = Trainer(fast_dev_run=True)
>>> trainer.fit(model, datamodule=datamodule) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Training...
>>> trainer.predict(model, datamodule=datamodule) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Predicting...
A PyTorch Dataset where the ``__getitem__`` returns a dict: ``{"input": file_path, "target": target}``:
.. doctest::
>>> from torch.utils.data import Dataset
>>> from flash import Trainer
>>> from flash.audio import SpeechRecognitionData, SpeechRecognition
>>>
>>> class CustomDataset(Dataset):
... def __init__(self, files, targets=None):
... self.files = files
... self.targets = targets
... def __getitem__(self, index):
... if self.targets is not None:
... return {"input": self.files[index], "target": self.targets[index]}
... return {"input": self.files[index]}
... def __len__(self):
... return len(self.files)
...
>>>
>>> datamodule = SpeechRecognitionData.from_datasets(
... train_dataset=CustomDataset(
... ["speech_1.wav", "speech_2.wav", "speech_3.wav"],
... ["some speech", "some other speech", "some more speech"],
... ),
... predict_dataset=CustomDataset(
... ["predict_speech_1.wav", "predict_speech_2.wav", "predict_speech_3.wav"],
... ),
... batch_size=2,
... )
>>> model = SpeechRecognition(backbone="patrickvonplaten/wav2vec2_tiny_random_robust")
>>> trainer = Trainer(fast_dev_run=True)
>>> trainer.fit(model, datamodule=datamodule) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Training...
>>> trainer.predict(model, datamodule=datamodule) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Predicting...
.. testcleanup::
>>> import os
>>> _ = [os.remove(f"speech_{i}.wav") for i in range(1, 4)]
>>> _ = [os.remove(f"predict_speech_{i}.wav") for i in range(1, 4)]
"""
ds_kw = dict(
transform_kwargs=transform_kwargs,
input_transforms_registry=cls.input_transforms_registry,
sampling_rate=sampling_rate,
)
return cls(
input_cls(RunningStage.TRAINING, train_dataset, transform=train_transform, **ds_kw),
input_cls(RunningStage.VALIDATING, val_dataset, transform=val_transform, **ds_kw),
input_cls(RunningStage.TESTING, test_dataset, transform=test_transform, **ds_kw),
input_cls(RunningStage.PREDICTING, predict_dataset, transform=predict_transform, **ds_kw),
**data_module_kwargs,
)
| 49.479279
| 119
| 0.627326
| 3,053
| 27,461
| 5.438257
| 0.083197
| 0.012949
| 0.021141
| 0.027104
| 0.862013
| 0.833464
| 0.819009
| 0.805035
| 0.78799
| 0.768596
| 0
| 0.008373
| 0.264994
| 27,461
| 554
| 120
| 49.568592
| 0.814209
| 0.649467
| 0
| 0.597222
| 0
| 0
| 0.020376
| 0.018112
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027778
| false
| 0
| 0.069444
| 0
| 0.152778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0b8d614176f053126ea25cc222a3f9f4e44ba447
| 4,353
|
py
|
Python
|
testsuite/splineinverse-knots-ascend-reg/run.py
|
luyatshimbalanga/OpenShadingLanguage
|
2120647911af732f0d12d70e2f7f4e1ebe8fadcb
|
[
"BSD-3-Clause"
] | 1,105
|
2015-01-02T20:47:19.000Z
|
2021-01-25T13:20:56.000Z
|
testsuite/splineinverse-knots-ascend-reg/run.py
|
luyatshimbalanga/OpenShadingLanguage
|
2120647911af732f0d12d70e2f7f4e1ebe8fadcb
|
[
"BSD-3-Clause"
] | 696
|
2015-01-07T23:42:08.000Z
|
2021-01-25T03:55:08.000Z
|
testsuite/splineinverse-knots-ascend-reg/run.py
|
luyatshimbalanga/OpenShadingLanguage
|
2120647911af732f0d12d70e2f7f4e1ebe8fadcb
|
[
"BSD-3-Clause"
] | 248
|
2015-01-05T13:41:28.000Z
|
2021-01-24T23:29:55.000Z
|
#!/usr/bin/env python
# Copyright Contributors to the Open Shading Language project.
# SPDX-License-Identifier: BSD-3-Clause
# https://github.com/AcademySoftwareFoundation/OpenShadingLanguage
command += testshade("-t 1 -g 64 64 --center -od uint8 -o Fout splineinverse_c_float_v_floatarray.tif test_splineinverse_c_float_v_floatarray")
command += testshade("-t 1 -g 64 64 --center -od uint8 -o Fout splineinverse_c_float_u_floatarray.tif test_splineinverse_c_float_u_floatarray")
command += testshade("-t 1 -g 64 64 --center -od uint8 -o Fout splineinverse_c_float_c_floatarray.tif test_splineinverse_c_float_c_floatarray")
outputs.append ("splineinverse_c_float_v_floatarray.tif")
outputs.append ("splineinverse_c_float_u_floatarray.tif")
outputs.append ("splineinverse_c_float_c_floatarray.tif")
command += testshade("-t 1 -g 64 64 --center -od uint8 -o Fout splineinverse_u_float_v_floatarray.tif test_splineinverse_u_float_v_floatarray")
command += testshade("-t 1 -g 64 64 --center -od uint8 -o Fout splineinverse_u_float_u_floatarray.tif test_splineinverse_u_float_u_floatarray")
command += testshade("-t 1 -g 64 64 --center -od uint8 -o Fout splineinverse_u_float_c_floatarray.tif test_splineinverse_u_float_c_floatarray")
outputs.append ("splineinverse_u_float_v_floatarray.tif")
outputs.append ("splineinverse_u_float_u_floatarray.tif")
outputs.append ("splineinverse_u_float_c_floatarray.tif")
command += testshade("-t 1 -g 64 64 --center -od uint8 -o Fout splineinverse_v_float_v_floatarray.tif test_splineinverse_v_float_v_floatarray")
command += testshade("-t 1 -g 64 64 --center -od uint8 -o Fout splineinverse_v_float_u_floatarray.tif test_splineinverse_v_float_u_floatarray")
command += testshade("-t 1 -g 64 64 --center -od uint8 -o Fout splineinverse_v_float_c_floatarray.tif test_splineinverse_v_float_c_floatarray")
outputs.append ("splineinverse_v_float_v_floatarray.tif")
outputs.append ("splineinverse_v_float_u_floatarray.tif")
outputs.append ("splineinverse_v_float_c_floatarray.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 --center -od uint8 -o ValDxDyOut deriv_splineinverse_c_float_v_floatarray.tif test_deriv_splineinverse_c_float_v_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 --center -od uint8 -o ValDxDyOut deriv_splineinverse_c_float_u_floatarray.tif test_deriv_splineinverse_c_float_u_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 --center -od uint8 -o ValDxDyOut deriv_splineinverse_c_float_c_floatarray.tif test_deriv_splineinverse_c_float_c_floatarray")
outputs.append ("deriv_splineinverse_c_float_v_floatarray.tif")
outputs.append ("deriv_splineinverse_c_float_u_floatarray.tif")
outputs.append ("deriv_splineinverse_c_float_c_floatarray.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 --center -od uint8 -o ValDxDyOut deriv_splineinverse_u_float_v_floatarray.tif test_deriv_splineinverse_u_float_v_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 --center -od uint8 -o ValDxDyOut deriv_splineinverse_u_float_u_floatarray.tif test_deriv_splineinverse_u_float_u_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 --center -od uint8 -o ValDxDyOut deriv_splineinverse_u_float_c_floatarray.tif test_deriv_splineinverse_u_float_c_floatarray")
outputs.append ("deriv_splineinverse_u_float_v_floatarray.tif")
outputs.append ("deriv_splineinverse_u_float_u_floatarray.tif")
outputs.append ("deriv_splineinverse_u_float_c_floatarray.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 --center -od uint8 -o ValDxDyOut deriv_splineinverse_v_float_v_floatarray.tif test_deriv_splineinverse_v_float_v_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 --center -od uint8 -o ValDxDyOut deriv_splineinverse_v_float_u_floatarray.tif test_deriv_splineinverse_v_float_u_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 --center -od uint8 -o ValDxDyOut deriv_splineinverse_v_float_c_floatarray.tif test_deriv_splineinverse_v_float_c_floatarray")
outputs.append ("deriv_splineinverse_v_float_v_floatarray.tif")
outputs.append ("deriv_splineinverse_v_float_u_floatarray.tif")
outputs.append ("deriv_splineinverse_v_float_c_floatarray.tif")
# expect a few LSB failures
failthresh = 0.008
failpercent = 3
| 77.732143
| 187
| 0.828164
| 672
| 4,353
| 4.949405
| 0.08631
| 0.14071
| 0.016236
| 0.02706
| 0.938966
| 0.938966
| 0.938966
| 0.687011
| 0.489477
| 0.489477
| 0
| 0.02845
| 0.079485
| 4,353
| 55
| 188
| 79.145455
| 0.801597
| 0.048243
| 0
| 0
| 0
| 0.236842
| 0.791878
| 0.539521
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f02747ff47115ba774a4c8feaed53ea19288b2a4
| 83
|
py
|
Python
|
utils/__init__.py
|
DK-Darkness/Bing-Linkedin-Crawler
|
00d35c4f62816e23c18449a0fd97b08e03730c2f
|
[
"Apache-2.0"
] | 1
|
2020-09-17T07:29:04.000Z
|
2020-09-17T07:29:04.000Z
|
utils/__init__.py
|
DK-Darkness/Bing-Linkedin-Crawler
|
00d35c4f62816e23c18449a0fd97b08e03730c2f
|
[
"Apache-2.0"
] | null | null | null |
utils/__init__.py
|
DK-Darkness/Bing-Linkedin-Crawler
|
00d35c4f62816e23c18449a0fd97b08e03730c2f
|
[
"Apache-2.0"
] | 1
|
2020-09-17T07:29:06.000Z
|
2020-09-17T07:29:06.000Z
|
from .bingSpider import *
from .linkedinSpider import *
from .nameGuessing import *
| 27.666667
| 29
| 0.795181
| 9
| 83
| 7.333333
| 0.555556
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13253
| 83
| 3
| 30
| 27.666667
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f04dac056f012ac2112c994e89ee8cea9fb42239
| 46
|
py
|
Python
|
pdip/dependency/provider/__init__.py
|
ahmetcagriakca/pdip
|
c4c16d5666a740154cabdc6762cd44d98b7bdde8
|
[
"MIT"
] | 2
|
2021-12-09T21:07:46.000Z
|
2021-12-11T22:18:01.000Z
|
pdip/dependency/provider/__init__.py
|
fmuyilmaz/pdip
|
f7e30b0c04d9e85ef46b0b7094fafd3ce18bccab
|
[
"MIT"
] | null | null | null |
pdip/dependency/provider/__init__.py
|
fmuyilmaz/pdip
|
f7e30b0c04d9e85ef46b0b7094fafd3ce18bccab
|
[
"MIT"
] | 3
|
2021-11-15T00:47:00.000Z
|
2021-12-17T11:35:45.000Z
|
from .service_provider import ServiceProvider
| 23
| 45
| 0.891304
| 5
| 46
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 1
| 46
| 46
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f05690f058f7e6cfee91bd2f2ebf1116cd197c27
| 138
|
py
|
Python
|
market_maker/utils/errors.py
|
mwithi/sample-market-maker
|
92d20ae6f1bb52ff0373183f916aba9a90dae70c
|
[
"Apache-2.0"
] | 1,524
|
2016-08-25T07:07:58.000Z
|
2022-03-30T19:51:39.000Z
|
market_maker/utils/errors.py
|
mwithi/sample-market-maker
|
92d20ae6f1bb52ff0373183f916aba9a90dae70c
|
[
"Apache-2.0"
] | 222
|
2016-12-13T13:48:18.000Z
|
2022-03-10T07:30:13.000Z
|
market_maker/utils/errors.py
|
mwithi/sample-market-maker
|
92d20ae6f1bb52ff0373183f916aba9a90dae70c
|
[
"Apache-2.0"
] | 930
|
2016-08-16T13:05:44.000Z
|
2022-03-31T15:29:00.000Z
|
class AuthenticationError(Exception):
pass
class MarketClosedError(Exception):
pass
class MarketEmptyError(Exception):
pass
| 15.333333
| 37
| 0.768116
| 12
| 138
| 8.833333
| 0.5
| 0.367925
| 0.339623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 138
| 8
| 38
| 17.25
| 0.921739
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
f0730c45bb9c4968c86a0758632c4310be1797e1
| 148
|
py
|
Python
|
jointly/__init__.py
|
hpi-dhc/jointly
|
b56fca228b2705cf795ae453cd1d77c0567f099e
|
[
"MIT"
] | 7
|
2020-10-14T11:57:35.000Z
|
2021-12-28T11:32:45.000Z
|
jointly/__init__.py
|
hpi-dhc/jointly
|
b56fca228b2705cf795ae453cd1d77c0567f099e
|
[
"MIT"
] | 5
|
2021-08-18T09:04:16.000Z
|
2021-12-27T19:24:24.000Z
|
jointly/__init__.py
|
hpi-dhc/jointly
|
b56fca228b2705cf795ae453cd1d77c0567f099e
|
[
"MIT"
] | 1
|
2021-05-06T07:57:38.000Z
|
2021-05-06T07:57:38.000Z
|
from .abstract_extractor import *
from .shake_extractor import *
from .synchronizer import *
from .helpers import *
from .helpers_plotting import *
| 24.666667
| 33
| 0.797297
| 18
| 148
| 6.388889
| 0.444444
| 0.347826
| 0.330435
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 148
| 5
| 34
| 29.6
| 0.898438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
b2f83b81f0bb3a61ff834b5aa6b2cffba155bd2f
| 119
|
py
|
Python
|
ditat_etl/utils/entity_resolution/__init__.py
|
ditat-llc/ditat_etl
|
3d4846ecb9663f9d3de2473aaf4bbcf52f735beb
|
[
"MIT"
] | 4
|
2021-08-11T23:05:37.000Z
|
2022-03-22T18:43:35.000Z
|
ditat_etl/utils/entity_resolution/__init__.py
|
ditat-llc/ditat_etl
|
3d4846ecb9663f9d3de2473aaf4bbcf52f735beb
|
[
"MIT"
] | null | null | null |
ditat_etl/utils/entity_resolution/__init__.py
|
ditat-llc/ditat_etl
|
3d4846ecb9663f9d3de2473aaf4bbcf52f735beb
|
[
"MIT"
] | null | null | null |
from .matcher import Matcher
from .industry_standard import IndustryStandard
from .industry_naics import NaicsStandard
| 29.75
| 47
| 0.87395
| 14
| 119
| 7.285714
| 0.571429
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10084
| 119
| 3
| 48
| 39.666667
| 0.953271
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.