hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6646c777a708dfe01cfec4bbc0c8c3685de29bf4
| 31
|
py
|
Python
|
python/geo_calculator/__init__.py
|
JEsperancinhaOrg/geo-calculator
|
1915a2368ec061b71a0925961afa07e36a682f77
|
[
"Apache-2.0"
] | null | null | null |
python/geo_calculator/__init__.py
|
JEsperancinhaOrg/geo-calculator
|
1915a2368ec061b71a0925961afa07e36a682f77
|
[
"Apache-2.0"
] | null | null | null |
python/geo_calculator/__init__.py
|
JEsperancinhaOrg/geo-calculator
|
1915a2368ec061b71a0925961afa07e36a682f77
|
[
"Apache-2.0"
] | null | null | null |
from .src import geo_calculator
| 31
| 31
| 0.870968
| 5
| 31
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 31
| 1
| 31
| 31
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b0e040939a8e84561860594b77b4f778a0832bb8
| 51,752
|
py
|
Python
|
podpac/core/interpolation/test/test_interpolators.py
|
creare-com/podpac
|
7feb5c957513c146ce73ba1c36c630284f513a6e
|
[
"Apache-2.0"
] | 46
|
2018-04-06T19:54:32.000Z
|
2022-02-08T02:00:02.000Z
|
podpac/core/interpolation/test/test_interpolators.py
|
creare-com/podpac
|
7feb5c957513c146ce73ba1c36c630284f513a6e
|
[
"Apache-2.0"
] | 474
|
2018-04-05T22:21:09.000Z
|
2022-02-24T14:21:16.000Z
|
podpac/core/interpolation/test/test_interpolators.py
|
creare-com/podpac
|
7feb5c957513c146ce73ba1c36c630284f513a6e
|
[
"Apache-2.0"
] | 4
|
2019-04-11T17:49:53.000Z
|
2020-11-29T22:36:53.000Z
|
"""
Test interpolation methods
"""
# pylint: disable=C0111,W0212,R0903
import pytest
import traitlets as tl
import numpy as np
import podpac
from podpac.core.utils import ArrayTrait
from podpac.core.units import UnitsDataArray
from podpac.core.coordinates import Coordinates, clinspace
from podpac.core.data.rasterio_source import rasterio
from podpac.core.data.datasource import DataSource
from podpac.core.interpolation.interpolation_manager import InterpolationManager, InterpolationException
from podpac.core.interpolation.nearest_neighbor_interpolator import NearestNeighbor, NearestPreview
from podpac.core.interpolation.rasterio_interpolator import RasterioInterpolator
from podpac.core.interpolation.scipy_interpolator import ScipyGrid, ScipyPoint
from podpac.core.interpolation.xarray_interpolator import XarrayInterpolator
from podpac.core.interpolation.interpolation import InterpolationMixin
class MockArrayDataSource(InterpolationMixin, DataSource):
data = ArrayTrait().tag(attr=True)
coordinates = tl.Instance(Coordinates).tag(attr=True)
def get_data(self, coordinates, coordinates_index):
return self.create_output_array(coordinates, data=self.data[coordinates_index])
class MockArrayDataSourceXR(InterpolationMixin, DataSource):
data = ArrayTrait().tag(attr=True)
coordinates = tl.Instance(Coordinates).tag(attr=True)
def get_data(self, coordinates, coordinates_index):
dataxr = self.create_output_array(self.coordinates, data=self.data)
return self.create_output_array(coordinates, data=dataxr[coordinates_index].data)
class TestNone(object):
def test_none_select(self):
reqcoords = Coordinates([[-0.5, 1.5, 3.5], [0.5, 2.5, 4.5]], dims=["lat", "lon"])
srccoords = Coordinates([[-1, 0, 1, 2, 3, 4, 5], [0, 1, 2, 3, 4, 5]], dims=["lat", "lon"])
# test straight ahead functionality
interp = InterpolationManager("none")
coords, cidx = interp.select_coordinates(srccoords, reqcoords)
assert coords == srccoords[1:5, 1:-1]
assert srccoords[cidx] == coords
# test when selection is applied serially
interp = InterpolationManager([{"method": "none", "dims": ["lat"]}, {"method": "none", "dims": ["lon"]}])
coords, cidx = interp.select_coordinates(srccoords, reqcoords)
assert coords == srccoords[1:5, 1:-1]
assert srccoords[cidx] == coords
# Test Case where rounding issues causes problem with endpoint
reqcoords = Coordinates([[0, 2, 4], [0, 2, 4]], dims=["lat", "lon"])
lat = np.arange(0, 6.1, 1.3333333333333334)
lon = np.arange(0, 6.1, 1.333333333333334) # Notice one decimal less on this number
srccoords = Coordinates([lat, lon], dims=["lat", "lon"])
# test straight ahead functionality
interp = InterpolationManager("none")
coords, cidx = interp.select_coordinates(srccoords, reqcoords)
srccoords = Coordinates([lat, lon], dims=["lat", "lon"])
assert srccoords[cidx] == coords
def test_none_interpolation(self):
node = podpac.data.Array(
source=[0, 1, 2],
coordinates=podpac.Coordinates([[1, 5, 9]], dims=["lat"]),
interpolation="none",
)
o = node.eval(podpac.Coordinates([podpac.crange(1, 9, 1)], dims=["lat"]))
np.testing.assert_array_equal(o.data, node.source)
def test_none_heterogeneous(self):
# Heterogeneous
node = podpac.data.Array(
source=[[0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2]],
coordinates=podpac.Coordinates([[1, 5, 9, 13], [0, 1, 2]], dims=["lat", "lon"]),
interpolation=[{"method": "none", "dims": ["lat"]}, {"method": "linear", "dims": ["lon"]}],
)
o = node.eval(podpac.Coordinates([podpac.crange(1, 9, 2), [0.5, 1.5]], dims=["lat", "lon"]))
np.testing.assert_array_equal(
o.data,
[
[0.5, 1.5],
[
0.5,
1.5,
],
[0.5, 1.5],
],
)
# Heterogeneous _flipped
node = podpac.data.Array(
source=[[0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2]],
coordinates=podpac.Coordinates([[1, 5, 9, 13], [0, 1, 2]], dims=["lat", "lon"]),
interpolation=[{"method": "linear", "dims": ["lon"]}, {"method": "none", "dims": ["lat"]}],
)
o = node.eval(podpac.Coordinates([podpac.crange(1, 9, 2), [0.5, 1.5]], dims=["lat", "lon"]))
np.testing.assert_array_equal(
o.data,
[
[0.5, 1.5],
[
0.5,
1.5,
],
[0.5, 1.5],
],
)
# Examples
# source eval
# lat_lon lat, lon
node = podpac.data.Array(
source=[0, 1, 2],
coordinates=podpac.Coordinates([[[1, 5, 9], [1, 5, 9]]], dims=[["lat", "lon"]]),
interpolation=[{"method": "none", "dims": ["lon", "lat"]}],
)
o = node.eval(podpac.Coordinates([podpac.crange(1, 9, 1), podpac.crange(1, 9, 1)], dims=["lon", "lat"]))
np.testing.assert_array_equal(o.data, node.source)
# source eval
# lat, lon lat_lon
node = podpac.data.Array(
source=[[0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2]],
coordinates=podpac.Coordinates([[1, 5, 9, 13], [0, 1, 2]], dims=["lat", "lon"]),
interpolation=[{"method": "none", "dims": ["lat", "lon"]}],
)
o = node.eval(podpac.Coordinates([[podpac.crange(1, 9, 2), podpac.crange(1, 9, 2)]], dims=[["lat", "lon"]]))
np.testing.assert_array_equal(o.data, node.source[:-1, 1:])
class TestNearest(object):
def test_nearest_preview_select(self):
reqcoords = Coordinates([[-0.5, 1.5, 3.5], [0.5, 2.5, 4.5]], dims=["lat", "lon"])
srccoords = Coordinates([[0, 1, 2, 3, 4, 5], [0, 1, 2, 3, 4, 5]], dims=["lat", "lon"])
# test straight ahead functionality
interp = InterpolationManager("nearest_preview")
coords, cidx = interp.select_coordinates(srccoords, reqcoords)
np.testing.assert_array_equal(coords["lat"].coordinates, [0, 2, 4])
np.testing.assert_array_equal(coords["lon"].coordinates, [0, 2, 4])
assert srccoords[cidx] == coords
# test when selection is applied serially
interp = InterpolationManager(
[{"method": "nearest_preview", "dims": ["lat"]}, {"method": "nearest_preview", "dims": ["lon"]}]
)
coords, cidx = interp.select_coordinates(srccoords, reqcoords)
np.testing.assert_array_equal(coords["lat"].coordinates, [0, 2, 4])
np.testing.assert_array_equal(coords["lon"].coordinates, [0, 2, 4])
assert srccoords[cidx] == coords
# Test reverse selection
reqcoords = Coordinates([[-0.5, 1.5, 3.5], [0.5, 2.5, 4.5]], dims=["lat", "lon"])
srccoords = Coordinates([[0, 1, 2, 3, 4, 5][::-1], [0, 1, 2, 3, 4, 5][::-1]], dims=["lat", "lon"])
# test straight ahead functionality
interp = InterpolationManager("nearest_preview")
coords, cidx = interp.select_coordinates(srccoords, reqcoords)
np.testing.assert_array_equal(coords["lat"].coordinates, [4, 2, 0])
np.testing.assert_array_equal(coords["lon"].coordinates, [5, 3, 1]) # Yes, this is expected behavior
assert srccoords[cidx] == coords
coords, cidx = interp.select_coordinates(srccoords, reqcoords)
np.testing.assert_array_equal(coords["lat"].coordinates, [4, 2, 0])
np.testing.assert_array_equal(coords["lon"].coordinates, [5, 3, 1])
assert srccoords[cidx] == coords
# Test Case where rounding issues causes problem with endpoint
reqcoords = Coordinates([[0, 2, 4], [0, 2, 4]], dims=["lat", "lon"])
lat = np.arange(0, 6.1, 1.3333333333333334)
lon = np.arange(0, 6.1, 1.333333333333334) # Notice one decimal less on this number
srccoords = Coordinates([lat, lon], dims=["lat", "lon"])
# test straight ahead functionality
interp = InterpolationManager("nearest_preview")
coords, cidx = interp.select_coordinates(srccoords, reqcoords)
np.testing.assert_almost_equal(coords["lat"].coordinates, lat[::2])
np.testing.assert_array_equal(coords["lon"].coordinates, lon[:4])
np.testing.assert_almost_equal(list(srccoords[cidx].bounds.values()), list(coords.bounds.values()))
assert srccoords[cidx].shape == coords.shape
coords, cidx = interp.select_coordinates(srccoords, reqcoords)
np.testing.assert_almost_equal(coords["lat"].coordinates, lat[::2])
np.testing.assert_array_equal(coords["lon"].coordinates, lon[:4])
np.testing.assert_almost_equal(list(srccoords[cidx].bounds.values()), list(coords.bounds.values()))
assert srccoords[cidx].shape == coords.shape
# def test_nearest_preview_select_stacked(self):
# # TODO: how to handle stacked/unstacked coordinate asynchrony?
# reqcoords = Coordinates([[-.5, 1.5, 3.5], [.5, 2.5, 4.5]], dims=['lat', 'lon'])
# srccoords = Coordinates([([0, 1, 2, 3, 4, 5], [0, 1, 2, 3, 4, 5])], dims=['lat_lon'])
# interp = InterpolationManager('nearest_preview')
# srccoords, srccoords_index = srccoords.intersect(reqcoords, outer=True, return_index=True)
# coords, cidx = interp.select_coordinates(reqcoords, srccoords, srccoords_index)
# assert len(coords) == len(srcoords) == len(cidx)
# assert len(coords['lat']) == len(reqcoords['lat'])
# assert len(coords['lon']) == len(reqcoords['lon'])
# assert np.all(coords['lat'].coordinates == np.array([0, 2, 4]))
def test_nearest_select_issue226(self):
reqcoords = Coordinates([[-0.5, 1.5, 3.5], [0.5, 2.5, 4.5]], dims=["lat", "lon"])
srccoords = Coordinates([[0, 1, 2, 3, 4, 5], [0, 1, 2, 3, 4, 5]], dims=["lat", "lon"])
# test straight ahead functionality
interp = InterpolationManager("nearest")
coords, cidx = interp.select_coordinates(srccoords, reqcoords)
np.testing.assert_array_equal(coords["lat"].coordinates, [0, 2, 4])
np.testing.assert_array_equal(coords["lon"].coordinates, [0, 3, 5])
assert srccoords[cidx] == coords
# test when selection is applied serially
interp = InterpolationManager([{"method": "nearest", "dims": ["lat"]}, {"method": "nearest", "dims": ["lon"]}])
coords, cidx = interp.select_coordinates(srccoords, reqcoords)
np.testing.assert_array_equal(coords["lat"].coordinates, [0, 2, 4])
np.testing.assert_array_equal(coords["lon"].coordinates, [0, 3, 5])
assert srccoords[cidx] == coords
def test_nearest_select_issue445(self):
sc = Coordinates([clinspace(-59.9, 89.9, 100, name="lat"), clinspace(-179.9, 179.9, 100, name="lon")])
node = podpac.data.Array(
interpolation="nearest_preview", source=np.arange(sc.size).reshape(sc.shape), coordinates=sc
)
coords = Coordinates([-61, 72], dims=["lat", "lon"])
out = node.eval(coords)
assert out.shape == (1, 1)
assert np.isnan(out.data[0, 0])
def test_interpolation(self):
for interpolation in ["nearest", "nearest_preview"]:
# unstacked 1D
source = np.random.rand(5)
coords_src = Coordinates([np.linspace(0, 10, 5)], dims=["lat"])
node = MockArrayDataSource(data=source, coordinates=coords_src, interpolation=interpolation)
coords_dst = Coordinates([[1, 1.2, 1.5, 5, 9]], dims=["lat"])
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert output.values[0] == source[0] and output.values[1] == source[0] and output.values[2] == source[1]
# unstacked N-D
source = np.random.rand(5, 5)
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([clinspace(2, 12, 5), clinspace(2, 12, 5)], dims=["lat", "lon"])
node = MockArrayDataSource(data=source, coordinates=coords_src, interpolation=interpolation)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert output.values[0, 0] == source[1, 1]
# source = stacked, dest = stacked
source = np.random.rand(5)
coords_src = Coordinates([(np.linspace(0, 10, 5), np.linspace(0, 10, 5))], dims=["lat_lon"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [NearestNeighbor]},
)
coords_dst = Coordinates([(np.linspace(1, 9, 3), np.linspace(1, 9, 3))], dims=["lat_lon"])
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert all(output.values == source[[0, 2, 4]])
# source = stacked, dest = unstacked
source = np.random.rand(5)
coords_src = Coordinates([(np.linspace(0, 10, 5), np.linspace(0, 10, 5))], dims=["lat_lon"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [NearestNeighbor]},
)
coords_dst = Coordinates([np.linspace(1, 9, 3), np.linspace(1, 9, 3)], dims=["lat", "lon"])
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(output.values == source[np.array([[0, 1, 2], [1, 2, 3], [2, 3, 4]])])
# source = unstacked, dest = stacked
source = np.random.rand(5, 5)
coords_src = Coordinates([np.linspace(0, 10, 5), np.linspace(0, 10, 5)], dims=["lat", "lon"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [NearestNeighbor]},
)
coords_dst = Coordinates([(np.linspace(1, 9, 3), np.linspace(1, 9, 3))], dims=["lat_lon"])
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(output.values == source[[0, 2, 4], [0, 2, 4]])
# source = unstacked and non-uniform, dest = stacked
source = np.random.rand(5, 5)
coords_src = Coordinates([[0, 1.1, 1.2, 6.1, 10], [0, 1.1, 4, 7.1, 9.9]], dims=["lat", "lon"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [NearestNeighbor]},
)
coords_dst = Coordinates([(np.linspace(1, 9, 3), np.linspace(1, 9, 3))], dims=["lat_lon"])
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(output.values == source[[1, 3, 4], [1, 2, 4]])
# lat_lon_time_alt --> lon, alt_time, lat
source = np.random.rand(5)
coords_src = Coordinates([[[0, 1, 2, 3, 4]] * 4], dims=[["lat", "lon", "time", "alt"]])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [NearestNeighbor]},
)
coords_dst = Coordinates(
[[1, 2.4, 3.9], [[1, 2.4, 3.9], [1, 2.4, 3.9]], [1, 2.4, 3.9]], dims=["lon", "alt_time", "lat"]
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(output.values[[0, 1, 2], [0, 1, 2], [0, 1, 2]] == source[[1, 2, 4]])
def test_spatial_tolerance(self):
# unstacked 1D
source = np.random.rand(5)
coords_src = Coordinates([np.linspace(0, 10, 5)], dims=["lat"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "params": {"spatial_tolerance": 1.1}},
)
coords_dst = Coordinates([[1, 1.2, 1.5, 5, 9]], dims=["lat"])
output = node.eval(coords_dst)
print(output)
print(source)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert output.values[0] == source[0] and np.isnan(output.values[1]) and output.values[2] == source[1]
# stacked 1D
source = np.random.rand(5)
coords_src = Coordinates([[np.linspace(0, 10, 5), np.linspace(0, 10, 5)]], dims=[["lat", "lon"]])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "params": {"spatial_tolerance": 1.1}},
)
coords_dst = Coordinates([[[1, 1.2, 1.5, 5, 9], [1, 1.2, 1.5, 5, 9]]], dims=[["lat", "lon"]])
output = node.eval(coords_dst)
print(output)
print(source)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert output.values[0] == source[0] and np.isnan(output.values[1]) and output.values[2] == source[1]
def test_time_tolerance(self):
# unstacked 1D
source = np.random.rand(5, 5)
coords_src = Coordinates(
[np.linspace(0, 10, 5), clinspace("2018-01-01", "2018-01-09", 5)], dims=["lat", "time"]
)
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={
"method": "nearest",
"params": {"spatial_tolerance": 1.1, "time_tolerance": np.timedelta64(1, "D")},
},
)
coords_dst = Coordinates([[1, 1.2, 1.5, 5, 9], clinspace("2018-01-01", "2018-01-09", 3)], dims=["lat", "time"])
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert (
output.values[0, 0] == source[0, 0]
and output.values[0, 1] == source[0, 2]
and np.isnan(output.values[1, 0])
and np.isnan(output.values[1, 1])
and output.values[2, 0] == source[1, 0]
and output.values[2, 1] == source[1, 2]
)
def test_stacked_source_unstacked_region_non_square(self):
# unstacked 1D
source = np.random.rand(5)
coords_src = Coordinates(
[[np.linspace(0, 10, 5), clinspace("2018-01-01", "2018-01-09", 5)]], dims=[["lat", "time"]]
)
node = MockArrayDataSource(
data=source, coordinates=coords_src, interpolation={"method": "nearest", "interpolators": [NearestNeighbor]}
)
coords_dst = Coordinates([[1, 1.2, 1.5, 5, 9], clinspace("2018-01-01", "2018-01-09", 3)], dims=["lat", "time"])
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(output.values == source[np.array([[0, 2, 4]] * 5)])
def test_time_space_scale_grid(self):
# Grid
source = np.random.rand(5, 3, 2)
source[2, 1, 0] = np.nan
coords_src = Coordinates(
[np.linspace(0, 10, 5), ["2018-01-01", "2018-01-02", "2018-01-03"], [0, 10]], dims=["lat", "time", "alt"]
)
coords_dst = Coordinates([5.1, "2018-01-02T11", 1], dims=["lat", "time", "alt"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={
"method": "nearest",
"interpolators": [NearestNeighbor],
"params": {
"spatial_scale": 1,
"time_scale": "1,D",
"alt_scale": 10,
"remove_nan": True,
"use_selector": False,
},
},
)
output = node.eval(coords_dst)
assert output == source[2, 2, 0]
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={
"method": "nearest",
"interpolators": [NearestNeighbor],
"params": {
"spatial_scale": 1,
"time_scale": "1,s",
"alt_scale": 10,
"remove_nan": True,
"use_selector": False,
},
},
)
output = node.eval(coords_dst)
assert output == source[2, 1, 1]
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={
"method": "nearest",
"interpolators": [NearestNeighbor],
"params": {
"spatial_scale": 1,
"time_scale": "1,s",
"alt_scale": 1,
"remove_nan": True,
"use_selector": False,
},
},
)
output = node.eval(coords_dst)
assert output == source[3, 1, 0]
def test_remove_nan(self):
# Stacked
source = np.random.rand(5)
source[2] = np.nan
coords_src = Coordinates(
[[np.linspace(0, 10, 5), clinspace("2018-01-01", "2018-01-09", 5)]], dims=[["lat", "time"]]
)
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [NearestNeighbor], "params": {"remove_nan": False}},
)
coords_dst = Coordinates([[5.1]], dims=["lat"])
output = node.eval(coords_dst)
assert np.isnan(output)
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={
"method": "nearest",
"interpolators": [NearestNeighbor],
"params": {"remove_nan": True, "use_selector": False},
},
)
output = node.eval(coords_dst)
assert (
output == source[3]
) # This fails because the selector selects the nan value... can we turn off the selector?
# Grid
source = np.random.rand(5, 3)
source[2, 1] = np.nan
coords_src = Coordinates([np.linspace(0, 10, 5), [1, 2, 3]], dims=["lat", "time"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [NearestNeighbor], "params": {"remove_nan": False}},
)
coords_dst = Coordinates([5.1, 2.01], dims=["lat", "time"])
output = node.eval(coords_dst)
assert np.isnan(output)
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={
"method": "nearest",
"interpolators": [NearestNeighbor],
"params": {"remove_nan": True, "use_selector": False},
},
)
output = node.eval(coords_dst)
assert output == source[2, 2]
def test_respect_bounds(self):
source = np.random.rand(5)
coords_src = Coordinates([[1, 2, 3, 4, 5]], ["alt"])
coords_dst = Coordinates([[-0.5, 1.1, 2.6]], ["alt"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={
"method": "nearest",
"interpolators": [NearestNeighbor],
"params": {"respect_bounds": False},
},
)
output = node.eval(coords_dst)
np.testing.assert_array_equal(output.data, source[[0, 0, 2]])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [NearestNeighbor], "params": {"respect_bounds": True}},
)
output = node.eval(coords_dst)
np.testing.assert_array_equal(output.data[1:], source[[0, 2]])
assert np.isnan(output.data[0])
def test_2Dstacked(self):
# With Time
source = np.random.rand(5, 4, 2)
coords_src = Coordinates(
[
[
np.arange(5)[:, None] + 0.1 * np.ones((5, 4)),
np.arange(4)[None, :] + 0.1 * np.ones((5, 4)),
],
[0.4, 0.7],
],
["lat_lon", "time"],
)
coords_dst = Coordinates([np.arange(4) + 0.2, np.arange(1, 4) - 0.2, [0.5]], ["lat", "lon", "time"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={
"method": "nearest",
"interpolators": [NearestNeighbor],
},
)
output = node.eval(coords_dst)
np.testing.assert_array_equal(output, source[:4, 1:, :1])
# Using 'xarray' coordinates type
node = MockArrayDataSourceXR(
data=source,
coordinates=coords_src,
coordinate_index_type="xarray",
interpolation={
"method": "nearest",
"interpolators": [NearestNeighbor],
},
)
output = node.eval(coords_dst)
np.testing.assert_array_equal(output, source[:4, 1:, :1])
# Using 'slice' coordinates type
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
coordinate_index_type="slice",
interpolation={
"method": "nearest",
"interpolators": [NearestNeighbor],
},
)
output = node.eval(coords_dst)
np.testing.assert_array_equal(output, source[:4, 1:, :1])
# Without Time
source = np.random.rand(5, 4)
node = MockArrayDataSource(
data=source,
coordinates=coords_src.drop("time"),
interpolation={
"method": "nearest",
"interpolators": [NearestNeighbor],
},
)
output = node.eval(coords_dst)
np.testing.assert_array_equal(output, source[:4, 1:])
# def test_3Dstacked(self):
# # With Time
# source = np.random.rand(5, 4, 2)
# coords_src = Coordinates([[
# np.arange(5)[:, None, None] + 0.1 * np.ones((5, 4, 2)),
# np.arange(4)[None, :, None] + 0.1 * np.ones((5, 4, 2)),
# np.arange(2)[None, None, :] + 0.1 * np.ones((5, 4, 2))]], ["lat_lon_time"])
# coords_dst = Coordinates([np.arange(4)+0.2, np.arange(1, 4)-0.2, [0.5]], ["lat", "lon", "time"])
# node = MockArrayDataSource(
# data=source,
# coordinates=coords_src,
# interpolation={
# "method": "nearest",
# "interpolators": [NearestNeighbor],
# },
# )
# output = node.eval(coords_dst)
# np.testing.assert_array_equal(output, source[:4, 1:, :1])
# # Using 'xarray' coordinates type
# node = MockArrayDataSourceXR(
# data=source,
# coordinates=coords_src,
# coordinate_index_type='xarray',
# interpolation={
# "method": "nearest",
# "interpolators": [NearestNeighbor],
# },
# )
# output = node.eval(coords_dst)
# np.testing.assert_array_equal(output, source[:4, 1:, :1])
# # Using 'slice' coordinates type
# node = MockArrayDataSource(
# data=source,
# coordinates=coords_src,
# coordinate_index_type='slice',
# interpolation={
# "method": "nearest",
# "interpolators": [NearestNeighbor],
# },
# )
# output = node.eval(coords_dst)
# np.testing.assert_array_equal(output, source[:4, 1:, :1])
# # Without Time
# source = np.random.rand(5, 4)
# node = MockArrayDataSource(
# data=source,
# coordinates=coords_src.drop('time'),
# interpolation={
# "method": "nearest",
# "interpolators": [NearestNeighbor],
# },
# )
# output = node.eval(coords_dst)
# np.testing.assert_array_equal(output, source[:4, 1:])
class TestInterpolateRasterioInterpolator(object):
"""test interpolation functions"""
def test_interpolate_rasterio(self):
""" regular interpolation using rasterio"""
assert rasterio is not None
source = np.arange(0, 15)
source.resize((3, 5))
coords_src = Coordinates([clinspace(0, 10, 3), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([clinspace(1, 11, 3), clinspace(1, 11, 5)], dims=["lat", "lon"])
# try one specific rasterio case to measure output
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "min", "interpolators": [RasterioInterpolator]},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert output.data[0, 3] == 3.0
assert output.data[0, 4] == 4.0
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "max", "interpolators": [RasterioInterpolator]},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert output.data[0, 3] == 9.0
assert output.data[0, 4] == 9.0
# TODO boundary should be able to use a default
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "bilinear", "interpolators": [RasterioInterpolator]},
boundary={"lat": 2.5, "lon": 1.25},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
np.testing.assert_allclose(
output, [[1.4, 2.4, 3.4, 4.4, 5.0], [6.4, 7.4, 8.4, 9.4, 10.0], [10.4, 11.4, 12.4, 13.4, 14.0]]
)
def test_interpolate_rasterio_descending(self):
"""should handle descending"""
source = np.random.rand(5, 5)
coords_src = Coordinates([clinspace(10, 0, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([clinspace(2, 12, 5), clinspace(2, 12, 5)], dims=["lat", "lon"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [RasterioInterpolator]},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(output.lon.values == coords_dst["lon"].coordinates)
class TestInterpolateScipyGrid(object):
"""test interpolation functions"""
def test_interpolate_scipy_grid(self):
source = np.arange(0, 25)
source.resize((5, 5))
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([clinspace(1, 11, 5), clinspace(1, 11, 5)], dims=["lat", "lon"])
# try one specific rasterio case to measure output
node = MockArrayDataSource(
data=source, coordinates=coords_src, interpolation={"method": "nearest", "interpolators": [ScipyGrid]}
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
print(output)
assert output.data[0, 0] == 0.0
assert output.data[0, 3] == 3.0
assert output.data[1, 3] == 8.0
assert np.isnan(output.data[0, 4]) # TODO: how to handle outside bounds
node = MockArrayDataSource(
data=source, coordinates=coords_src, interpolation={"method": "cubic_spline", "interpolators": [ScipyGrid]}
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert int(output.data[0, 0]) == 2
assert int(output.data[2, 4]) == 16
node = MockArrayDataSource(
data=source, coordinates=coords_src, interpolation={"method": "bilinear", "interpolators": [ScipyGrid]}
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert int(output.data[0, 0]) == 2
assert int(output.data[3, 3]) == 20
assert np.isnan(output.data[4, 4]) # TODO: how to handle outside bounds
def test_interpolate_irregular_arbitrary_2dims(self):
""" irregular interpolation """
# Note, this test also tests the looper helper
# try >2 dims
source = np.random.rand(5, 5, 3)
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5), [2, 3, 5]], dims=["lat", "lon", "time"])
coords_dst = Coordinates([clinspace(1, 11, 5), clinspace(1, 11, 5), [2, 3, 4]], dims=["lat", "lon", "time"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation=[{"method": "nearest", "interpolators": [ScipyGrid]}, {"method": "linear", "dims": ["time"]}],
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(output.lon.values == coords_dst["lon"].coordinates)
assert np.all(output.time.values == coords_dst["time"].coordinates)
# assert output.data[0, 0] == source[]
def test_interpolate_looper_helper(self):
""" irregular interpolation """
# Note, this test also tests the looper helper
# try >2 dims
source = np.random.rand(5, 5, 3, 2)
result = source.copy()
result[:, :, 2, :] = (result[:, :, 1, :] + result[:, :, 2, :]) / 2
result = (result[..., 0:1] + result[..., 1:]) / 2
result = result[[0, 1, 2, 3, 4]]
result = result[:, [0, 1, 2, 3, 4]]
result[-1] = np.nan
result[:, -1] = np.nan
coords_src = Coordinates(
[clinspace(0, 10, 5), clinspace(0, 10, 5), [2, 3, 5], [0, 2]], dims=["lat", "lon", "time", "alt"]
)
coords_dst = Coordinates(
[clinspace(1, 11, 5), clinspace(1, 11, 5), [2, 3, 4], [1]], dims=["lat", "lon", "time", "alt"]
)
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation=[
{"method": "nearest", "interpolators": [ScipyGrid]},
{"method": "linear", "dims": ["time", "alt"]},
],
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(output.lon.values == coords_dst["lon"].coordinates)
assert np.all(output.time.values == coords_dst["time"].coordinates)
assert np.all(output.alt.values == coords_dst["alt"].coordinates)
np.testing.assert_array_almost_equal(result, output.data)
def test_interpolate_irregular_arbitrary_descending(self):
"""should handle descending"""
source = np.random.rand(5, 5)
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([clinspace(2, 12, 5), clinspace(2, 12, 5)], dims=["lat", "lon"])
node = MockArrayDataSource(
data=source, coordinates=coords_src, interpolation={"method": "nearest", "interpolators": [ScipyGrid]}
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
np.testing.assert_array_equal(output.lat.values, coords_dst["lat"].coordinates)
np.testing.assert_array_equal(output.lon.values, coords_dst["lon"].coordinates)
def test_interpolate_irregular_arbitrary_swap(self):
"""should handle descending"""
source = np.random.rand(5, 5)
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([clinspace(2, 12, 5), clinspace(2, 12, 5)], dims=["lat", "lon"])
node = MockArrayDataSource(
data=source, coordinates=coords_src, interpolation={"method": "nearest", "interpolators": [ScipyGrid]}
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
np.testing.assert_array_equal(output.lat.values, coords_dst["lat"].coordinates)
np.testing.assert_array_equal(output.lon.values, coords_dst["lon"].coordinates)
def test_interpolate_irregular_lat_lon(self):
""" irregular interpolation """
source = np.random.rand(5, 5)
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([[[0, 2, 4, 6, 8, 10], [0, 2, 4, 5, 6, 10]]], dims=["lat_lon"])
node = MockArrayDataSource(
data=source, coordinates=coords_src, interpolation={"method": "nearest", "interpolators": [ScipyGrid]}
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert "lat_lon" in output.dims
np.testing.assert_array_equal(output["lat"].values, coords_dst["lat"].coordinates)
np.testing.assert_array_equal(output["lon"].values, coords_dst["lon"].coordinates)
assert output.values[0] == source[0, 0]
assert output.values[1] == source[1, 1]
assert output.values[-1] == source[-1, -1]
class TestInterpolateScipyPoint(object):
def test_interpolate_scipy_point(self):
""" interpolate point data to nearest neighbor with various coords_dst"""
source = np.random.rand(6)
coords_src = Coordinates([[[0, 2, 4, 6, 8, 10], [0, 2, 4, 5, 6, 10]]], dims=["lat_lon"])
coords_dst = Coordinates([[[1, 2, 3, 4, 5], [1, 2, 3, 4, 5]]], dims=["lat_lon"])
node = MockArrayDataSource(
data=source, coordinates=coords_src, interpolation={"method": "nearest", "interpolators": [ScipyPoint]}
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert "lat_lon" in output.dims
np.testing.assert_array_equal(output.lat.values, coords_dst["lat"].coordinates)
np.testing.assert_array_equal(output.lon.values, coords_dst["lon"].coordinates)
assert output.values[0] == source[0]
assert output.values[-1] == source[3]
coords_dst = Coordinates([[1, 2, 3, 4, 5], [1, 2, 3, 4, 5]], dims=["lat", "lon"])
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
np.testing.assert_array_equal(output.lat.values, coords_dst["lat"].coordinates)
assert output.values[0, 0] == source[0]
assert output.values[-1, -1] == source[3]
class TestXarrayInterpolator(object):
"""test interpolation functions"""
def test_nearest_interpolation(self):
interpolation = {
"method": "nearest",
"interpolators": [XarrayInterpolator],
"params": {"fill_value": "extrapolate"},
}
# unstacked 1D
source = np.random.rand(5)
coords_src = Coordinates([np.linspace(0, 10, 5)], dims=["lat"])
node = MockArrayDataSource(data=source, coordinates=coords_src, interpolation=interpolation)
coords_dst = Coordinates([[1, 1.2, 1.5, 5, 9]], dims=["lat"])
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert output.values[0] == source[0] and output.values[1] == source[0] and output.values[2] == source[1]
# unstacked N-D
source = np.random.rand(5, 5)
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([clinspace(2, 12, 5), clinspace(2, 12, 5)], dims=["lat", "lon"])
node = MockArrayDataSource(data=source, coordinates=coords_src, interpolation=interpolation)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert output.values[0, 0] == source[1, 1]
# stacked
# TODO: implement stacked handling
source = np.random.rand(5)
coords_src = Coordinates([(np.linspace(0, 10, 5), np.linspace(0, 10, 5))], dims=["lat_lon"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [XarrayInterpolator]},
)
coords_dst = Coordinates([(np.linspace(1, 9, 3), np.linspace(1, 9, 3))], dims=["lat_lon"])
with pytest.raises(InterpolationException):
output = node.eval(coords_dst)
# TODO: implement stacked handling
# source = stacked, dest = unstacked
source = np.random.rand(5)
coords_src = Coordinates([(np.linspace(0, 10, 5), np.linspace(0, 10, 5))], dims=["lat_lon"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [XarrayInterpolator]},
)
coords_dst = Coordinates([np.linspace(1, 9, 3), np.linspace(1, 9, 3)], dims=["lat", "lon"])
with pytest.raises(InterpolationException):
output = node.eval(coords_dst)
# source = unstacked, dest = stacked
source = np.random.rand(5, 5)
coords_src = Coordinates([np.linspace(0, 10, 5), np.linspace(0, 10, 5)], dims=["lat", "lon"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [XarrayInterpolator]},
)
coords_dst = Coordinates([(np.linspace(1, 9, 3), np.linspace(1, 9, 3))], dims=["lat_lon"])
output = node.eval(coords_dst)
np.testing.assert_array_equal(output.data, source[[0, 2, 4], [0, 2, 4]])
def test_interpolate_xarray_grid(self):
source = np.arange(0, 25)
source.resize((5, 5))
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([clinspace(1, 11, 5), clinspace(1, 11, 5)], dims=["lat", "lon"])
# try one specific rasterio case to measure output
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [XarrayInterpolator]},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
# print(output)
assert output.data[0, 0] == 0.0
assert output.data[0, 3] == 3.0
assert output.data[1, 3] == 8.0
assert np.isnan(output.data[0, 4]) # TODO: how to handle outside bounds
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "linear", "interpolators": [XarrayInterpolator], "params": {"fill_nan": True}},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert int(output.data[0, 0]) == 2
assert int(output.data[2, 3]) == 15
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "slinear", "interpolators": [XarrayInterpolator], "params": {"fill_nan": True}},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert int(output.data[0, 0]) == 2
assert int(output.data[3, 3]) == 20
assert np.isnan(output.data[4, 4])
# Check extrapolation
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={
"method": "linear",
"interpolators": [XarrayInterpolator],
"params": {"fill_nan": True, "fill_value": "extrapolate"},
},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert int(output.data[0, 0]) == 2
assert int(output.data[4, 4]) == 26
assert np.all(~np.isnan(output.data))
def test_interpolate_irregular_arbitrary_2dims(self):
""" irregular interpolation """
# try >2 dims
source = np.random.rand(5, 5, 3)
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5), [2, 3, 5]], dims=["lat", "lon", "time"])
coords_dst = Coordinates([clinspace(1, 11, 5), clinspace(1, 11, 5), [2, 3, 5]], dims=["lat", "lon", "time"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [XarrayInterpolator]},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(output.lon.values == coords_dst["lon"].coordinates)
assert np.all(output.time.values == coords_dst["time"].coordinates)
# assert output.data[0, 0] == source[]
def test_interpolate_irregular_arbitrary_descending(self):
"""should handle descending"""
source = np.random.rand(5, 5)
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([clinspace(2, 12, 5), clinspace(2, 12, 5)], dims=["lat", "lon"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [XarrayInterpolator]},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(output.lon.values == coords_dst["lon"].coordinates)
def test_interpolate_irregular_arbitrary_swap(self):
"""should handle descending"""
source = np.random.rand(5, 5)
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([clinspace(2, 12, 5), clinspace(2, 12, 5)], dims=["lat", "lon"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [XarrayInterpolator]},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(output.lon.values == coords_dst["lon"].coordinates)
def test_interpolate_irregular_lat_lon(self):
""" irregular interpolation """
source = np.random.rand(5, 5)
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([[[0, 2, 4, 6, 8, 10], [0, 2, 4, 5, 6, 10]]], dims=["lat_lon"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "nearest", "interpolators": [XarrayInterpolator]},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat_lon.values == coords_dst.xcoords["lat_lon"])
assert output.values[0] == source[0, 0]
assert output.values[1] == source[1, 1]
assert output.values[-1] == source[-1, -1]
def test_interpolate_fill_nan(self):
source = np.arange(0, 25).astype(float)
source.resize((5, 5))
source[2, 2] = np.nan
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lat", "lon"])
coords_dst = Coordinates([clinspace(1, 11, 5), clinspace(1, 11, 5)], dims=["lat", "lon"])
# Ensure nan present
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "linear", "interpolators": [XarrayInterpolator], "params": {"fill_nan": False}},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
assert np.all(np.isnan(output.data[1:3, 1:3]))
# Ensure nan gone
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "linear", "interpolators": [XarrayInterpolator], "params": {"fill_nan": True}},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
np.testing.assert_array_almost_equal(output.data[1:3, 1:3].ravel(), [8.4, 9.4, 13.4, 14.4])
# Ensure nan gone, flip lat-lon on source
coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lon", "lat"])
node = MockArrayDataSource(
data=source,
coordinates=coords_src,
interpolation={"method": "linear", "interpolators": [XarrayInterpolator], "params": {"fill_nan": True}},
)
output = node.eval(coords_dst)
assert isinstance(output, UnitsDataArray)
assert np.all(output.lat.values == coords_dst["lat"].coordinates)
np.testing.assert_array_almost_equal(output.data[1:3, 1:3].T.ravel(), [8.4, 9.4, 13.4, 14.4])
| 42.594239
| 120
| 0.574084
| 5,906
| 51,752
| 4.937521
| 0.047748
| 0.045986
| 0.026062
| 0.039779
| 0.884229
| 0.867186
| 0.858578
| 0.837694
| 0.83327
| 0.822537
| 0
| 0.043972
| 0.271854
| 51,752
| 1,214
| 121
| 42.629325
| 0.729878
| 0.098798
| 0
| 0.678369
| 0
| 0
| 0.069209
| 0
| 0
| 0
| 0
| 0.000824
| 0.225368
| 1
| 0.03624
| false
| 0
| 0.016988
| 0.001133
| 0.069083
| 0.005663
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7c0696098c0bcb5a71618e8b29c64ae2c1e23ff2
| 3,168
|
py
|
Python
|
test-unit/PythonToJavascript/converters_test/ComparisonConverter_test.py
|
stoogoff/python-to-javascript
|
4349b09b15ada544501e7091c7ff1574487e7598
|
[
"MIT"
] | 1
|
2021-11-19T09:56:41.000Z
|
2021-11-19T09:56:41.000Z
|
test-unit/PythonToJavascript/converters_test/ComparisonConverter_test.py
|
stoogoff/python-to-javascript
|
4349b09b15ada544501e7091c7ff1574487e7598
|
[
"MIT"
] | 2
|
2022-02-25T23:11:27.000Z
|
2022-03-04T10:22:14.000Z
|
test-unit/PythonToJavascript/converters_test/ComparisonConverter_test.py
|
stoogoff/python-to-javascript
|
4349b09b15ada544501e7091c7ff1574487e7598
|
[
"MIT"
] | 4
|
2021-05-06T19:03:19.000Z
|
2022-03-06T13:52:30.000Z
|
from utils import parseSource, nodesToString, nodesToLines, dumpNodes, dumpTree
from converters import ComparisonConverter
def test_ComparisonGather_01():
src = """
x == y; x < y; x > y; x >= y; x <= y; x <> y; x != y; x in y; x is y
"""
# dumpTree( parseSource( src ) )
matches = ComparisonConverter().gather( parseSource( src ) )
match = matches[ 0 ]
assert nodesToString( match.left ) == 'x'
assert nodesToString( match.comp_op ) == '=='
assert nodesToString( match.right ) == 'y'
assert nodesToString( matches[ 3 ].comp_op ) == '>='
assert nodesToString( matches[ 7 ].comp_op ) == 'in'
def test_ComparisonGather_02():
src = """
x is not y; x not in y
"""
# dumpTree( parseSource( src ) )
matches = ComparisonConverter().gather( parseSource( src ) )
match = matches[ 0 ]
assert nodesToString( match.left ) == 'x'
assert nodesToString( match.comp_op ) == 'is not'
assert nodesToString( match.right ) == 'y'
assert nodesToString( matches[ 1 ].comp_op ) == 'not in'
def test_ComparisonProcess_01():
src = """
x == y
"""
nodes = parseSource( src )
cvtr = ComparisonConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """x === y"""
def test_ComparisonProcess_02():
src = """
x != y
"""
nodes = parseSource( src )
cvtr = ComparisonConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """x !== y"""
def test_ComparisonProcess_03():
src = """
x is None
"""
nodes = parseSource( src )
cvtr = ComparisonConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """x === null"""
def test_ComparisonProcess_04():
src = """
x is not None
"""
nodes = parseSource( src )
cvtr = ComparisonConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """x !== null"""
def test_ComparisonProcess_05():
src = """
x is y
"""
nodes = parseSource( src )
cvtr = ComparisonConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """Object.is( x, y )"""
def test_ComparisonProcess_06():
src = """
x is not y
"""
nodes = parseSource( src )
cvtr = ComparisonConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """!Object.is( x, y )"""
def test_ComparisonProcess_07():
src = """
dflt is ...
"""
nodes = parseSource( src )
cvtr = ComparisonConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """!_pyjs.isDef( dflt )"""
def test_ComparisonProcess_08():
src = """
dflt is not ...
"""
nodes = parseSource( src )
cvtr = ComparisonConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """_pyjs.isDef( dflt )"""
| 28.285714
| 79
| 0.59596
| 330
| 3,168
| 5.639394
| 0.148485
| 0.173563
| 0.10317
| 0.098872
| 0.818915
| 0.808705
| 0.808705
| 0.808705
| 0.748522
| 0.748522
| 0
| 0.010684
| 0.261364
| 3,168
| 111
| 80
| 28.540541
| 0.784615
| 0.019255
| 0
| 0.631579
| 0
| 0.010526
| 0.140509
| 0
| 0
| 0
| 0
| 0
| 0.178947
| 1
| 0.105263
| false
| 0
| 0.021053
| 0
| 0.126316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9fd1a3b1249f96217996921247779afef2e19654
| 236
|
py
|
Python
|
tests/tibanna/pony/test_start_run.py
|
4dn-dcic/tibanna_ff
|
6fcfc056b832c14500e525207afeb5722f366a26
|
[
"MIT"
] | 2
|
2019-10-08T17:36:02.000Z
|
2019-10-08T18:42:05.000Z
|
tests/tibanna/pony/test_start_run.py
|
4dn-dcic/tibanna_ff
|
6fcfc056b832c14500e525207afeb5722f366a26
|
[
"MIT"
] | null | null | null |
tests/tibanna/pony/test_start_run.py
|
4dn-dcic/tibanna_ff
|
6fcfc056b832c14500e525207afeb5722f366a26
|
[
"MIT"
] | null | null | null |
from tibanna_4dn.start_run import start_run
def test_md5(start_run_md5_data):
res = start_run(start_run_md5_data)
def test_md5_comprehensive(start_run_md5_comprehensive_data):
res = start_run(start_run_md5_comprehensive_data)
| 29.5
| 61
| 0.84322
| 39
| 236
| 4.538462
| 0.307692
| 0.361582
| 0.248588
| 0.169492
| 0.525424
| 0.293785
| 0.293785
| 0
| 0
| 0
| 0
| 0.033019
| 0.101695
| 236
| 7
| 62
| 33.714286
| 0.801887
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
b03b701f3aee4527cf34c3349bc821c853ceddf1
| 56
|
py
|
Python
|
flagging_site/blueprints/__init__.py
|
codeforboston/flagging
|
5e45864f02b92f3d9109be67ae8dbd2b1067d515
|
[
"MIT"
] | 3
|
2020-05-24T18:22:22.000Z
|
2021-04-04T18:51:33.000Z
|
flagging_site/blueprints/__init__.py
|
codeforboston/flagging
|
5e45864f02b92f3d9109be67ae8dbd2b1067d515
|
[
"MIT"
] | 141
|
2020-05-27T02:57:26.000Z
|
2022-03-14T04:12:25.000Z
|
flagging_site/blueprints/__init__.py
|
codeforboston/flagging
|
5e45864f02b92f3d9109be67ae8dbd2b1067d515
|
[
"MIT"
] | 16
|
2020-05-10T17:44:20.000Z
|
2022-03-01T15:46:13.000Z
|
# flake8: noqa
from . import flagging
from . import api
| 14
| 22
| 0.732143
| 8
| 56
| 5.125
| 0.75
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.196429
| 56
| 3
| 23
| 18.666667
| 0.888889
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
05a5416fbe6f2edbbcfc1a7bdc74eed88e094e1d
| 130
|
py
|
Python
|
bubbleimg/imgmeasure/iso/__init__.py
|
aileisun/bubblepy
|
054e7a3993659e7002f243c75253c2cb71d4fa73
|
[
"MIT"
] | 3
|
2017-11-20T23:16:09.000Z
|
2021-05-19T09:38:01.000Z
|
bubbleimg/imgmeasure/iso/__init__.py
|
aileisun/bubblepy
|
054e7a3993659e7002f243c75253c2cb71d4fa73
|
[
"MIT"
] | null | null | null |
bubbleimg/imgmeasure/iso/__init__.py
|
aileisun/bubblepy
|
054e7a3993659e7002f243c75253c2cb71d4fa73
|
[
"MIT"
] | 3
|
2017-07-17T09:31:11.000Z
|
2021-05-19T09:38:07.000Z
|
# __init__.py
__all__ = ['isomeasurer']
from . import isomeasurer
from . import plottools
from .isomeasurer import isoMeasurer
| 14.444444
| 36
| 0.769231
| 14
| 130
| 6.571429
| 0.5
| 0.326087
| 0.456522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 130
| 8
| 37
| 16.25
| 0.836364
| 0.084615
| 0
| 0
| 0
| 0
| 0.094017
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
05b43b44cab8e14e750cf8d3e642dd4051f705fc
| 42
|
py
|
Python
|
Bubblez/sockets/__init__.py
|
MeesMeijer/bubblez.py
|
a85d1e368153d55df3d8e017c5d73935a8a2dbf2
|
[
"MIT"
] | 4
|
2021-09-28T20:05:02.000Z
|
2021-09-30T09:25:50.000Z
|
Bubblez/sockets/__init__.py
|
MeesMeijer/bubblez.py
|
a85d1e368153d55df3d8e017c5d73935a8a2dbf2
|
[
"MIT"
] | 1
|
2021-09-29T17:31:21.000Z
|
2021-09-29T17:31:21.000Z
|
Bubblez/sockets/__init__.py
|
MeesMeijer/bubblez.py
|
a85d1e368153d55df3d8e017c5d73935a8a2dbf2
|
[
"MIT"
] | 2
|
2021-09-29T17:26:31.000Z
|
2021-09-29T22:08:56.000Z
|
from .classes import *
from .core import *
| 21
| 22
| 0.738095
| 6
| 42
| 5.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 42
| 2
| 23
| 21
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
05d15705dc48bcc63083a5867fde6d54c203efd0
| 61
|
py
|
Python
|
src/wasp_engine/src/app_demo/tes1t.py
|
Song-MS/angel_bridge
|
d32da6e781011724e9977c1206afc4555c31ba0a
|
[
"MIT"
] | null | null | null |
src/wasp_engine/src/app_demo/tes1t.py
|
Song-MS/angel_bridge
|
d32da6e781011724e9977c1206afc4555c31ba0a
|
[
"MIT"
] | null | null | null |
src/wasp_engine/src/app_demo/tes1t.py
|
Song-MS/angel_bridge
|
d32da6e781011724e9977c1206afc4555c31ba0a
|
[
"MIT"
] | null | null | null |
from setuptools import find_packages
print(find_packages())
| 15.25
| 36
| 0.836066
| 8
| 61
| 6.125
| 0.75
| 0.489796
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098361
| 61
| 4
| 37
| 15.25
| 0.890909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
05f2b8f380a7165255c36e474215a20664fd9abc
| 83
|
py
|
Python
|
buzz_and_unlock.py
|
trrevvorr/DIY-Smart-Door-Lock
|
7ca7a219c6e8e840672a4640568420ae700b42c3
|
[
"MIT"
] | null | null | null |
buzz_and_unlock.py
|
trrevvorr/DIY-Smart-Door-Lock
|
7ca7a219c6e8e840672a4640568420ae700b42c3
|
[
"MIT"
] | null | null | null |
buzz_and_unlock.py
|
trrevvorr/DIY-Smart-Door-Lock
|
7ca7a219c6e8e840672a4640568420ae700b42c3
|
[
"MIT"
] | null | null | null |
import _control_lock
import commands
_control_lock.main(commands.BUZZ_AND_UNLOCK)
| 16.6
| 44
| 0.879518
| 12
| 83
| 5.583333
| 0.666667
| 0.328358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072289
| 83
| 4
| 45
| 20.75
| 0.87013
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
af7e7021a52a52e528f8ef8f2a8da2d1c4a4e756
| 39
|
py
|
Python
|
quickinfo/__init__.py
|
OneBitPython/quickinfo
|
3bf1cbad8ca4ef1ff588aace53d1b13560e2681f
|
[
"MIT"
] | 2
|
2021-12-12T12:20:49.000Z
|
2021-12-13T00:29:06.000Z
|
quickinfo/__init__.py
|
OneBitPython/quickinfo
|
3bf1cbad8ca4ef1ff588aace53d1b13560e2681f
|
[
"MIT"
] | null | null | null |
quickinfo/__init__.py
|
OneBitPython/quickinfo
|
3bf1cbad8ca4ef1ff588aace53d1b13560e2681f
|
[
"MIT"
] | null | null | null |
from quickinfo.brain import QuickScrape
| 39
| 39
| 0.897436
| 5
| 39
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 39
| 1
| 39
| 39
| 0.972222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
af83eb51bede830a6fe918da3d99df3ca71616f7
| 1,639
|
py
|
Python
|
renovation_core/renovation_core/doctype/renovation_sidebar/test_renovation_sidebar.py
|
Abadulrehman/renovation_core
|
2cb015ec1832ceb6076e20914f504a1049d7a736
|
[
"MIT"
] | 18
|
2020-04-12T20:40:41.000Z
|
2022-03-09T13:50:59.000Z
|
renovation_core/renovation_core/doctype/renovation_sidebar/test_renovation_sidebar.py
|
Abadulrehman/renovation_core
|
2cb015ec1832ceb6076e20914f504a1049d7a736
|
[
"MIT"
] | 28
|
2020-04-21T13:24:28.000Z
|
2021-11-03T12:23:01.000Z
|
renovation_core/renovation_core/doctype/renovation_sidebar/test_renovation_sidebar.py
|
Abadulrehman/renovation_core
|
2cb015ec1832ceb6076e20914f504a1049d7a736
|
[
"MIT"
] | 16
|
2020-04-12T20:31:50.000Z
|
2022-01-30T12:19:45.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, LEAM Technology System and Contributors
# See license.txt
from __future__ import unicode_literals
from renovation_core.utils.test_runner import RenovationTestCase
class TestRenovationSidebar(RenovationTestCase):
pass
# def setUp(self):
# self.make_root_sidebar()
# super(TestRenovationSidebar, self).setUp()
# def test_record_exists_or_not(self):
# records = self.records_made
# print(records)
# def make_root_sidebar(self):
# if frappe.db.exists("Renovation Sidebar", {"renovation_sidebar_name": "All Menu"}):
# return
# d = frappe.new_doc("Renovation Sidebar")
# d.update({
# "renovation_sidebar_name": "All Menu",
# "is_group": 1
# })
# d.save()
# def get_test_records(self):
# parent_menu_name = frappe.get_value("Renovation Sidebar", {"renovation_sidebar_name": "All Menu"}, as_dict=True)
# print(parent_menu_name)
# return frappe._dict(
# order=['Renovation Sidebar'],
# records=frappe._dict(
# renovation_sidebar=generate_json([
# r"{{ repeat(4) }}", frappe._dict(
# renovation_sidebar_name= r"{{ doc.faker.sentence(nb_words=doc.faker.random_choices(elements=[1,3], length=1)[0]) }}",
# doctype= "Renovation Sidebar",
# is_group= r"{{ doc.faker.random_choices(elements=[1,0], length=1)[0] }}",
# parent_renovation_sidebar= r'[unique:renovation_sidebar]<<test_runner.get_filtered_single_data("renovation_sidebar", {"is_group": 1}, test_runner.faker.random_choices(elements=[1,3], length=1)[0],"'+ cstr(parent_menu_name) +r'").get("name")>>'
# )
# ])
# )
# )
| 36.422222
| 252
| 0.679683
| 204
| 1,639
| 5.186275
| 0.416667
| 0.208885
| 0.079395
| 0.068053
| 0.210775
| 0.18431
| 0.153119
| 0.068053
| 0.068053
| 0
| 0
| 0.014567
| 0.162294
| 1,639
| 44
| 253
| 37.25
| 0.756009
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
afc3fa21e5ead07a8ed3ecb7eb5bbf5a04acec5f
| 20
|
py
|
Python
|
BLSeg/blseg/model/fcn/__init__.py
|
ForrestPi/semanticSegmentation
|
1e5519279e2a9574f09eaf91439138b74b0f860c
|
[
"MIT"
] | 7
|
2020-04-06T10:25:30.000Z
|
2021-02-24T14:51:22.000Z
|
BLSeg/blseg/model/fcn/__init__.py
|
ForrestPi/semanticSegmentation
|
1e5519279e2a9574f09eaf91439138b74b0f860c
|
[
"MIT"
] | null | null | null |
BLSeg/blseg/model/fcn/__init__.py
|
ForrestPi/semanticSegmentation
|
1e5519279e2a9574f09eaf91439138b74b0f860c
|
[
"MIT"
] | 2
|
2020-04-08T14:43:21.000Z
|
2020-12-11T03:03:37.000Z
|
from .fcn import FCN
| 20
| 20
| 0.8
| 4
| 20
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 20
| 1
| 20
| 20
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bb7525c94e0c1dd9a6345049a57f1a3018e9f733
| 145
|
py
|
Python
|
bmp180/__init__.py
|
Jeremie-C/python-bmp180
|
f2895eb8e19e982f6555c493f195d62b13dd47e8
|
[
"MIT"
] | null | null | null |
bmp180/__init__.py
|
Jeremie-C/python-bmp180
|
f2895eb8e19e982f6555c493f195d62b13dd47e8
|
[
"MIT"
] | null | null | null |
bmp180/__init__.py
|
Jeremie-C/python-bmp180
|
f2895eb8e19e982f6555c493f195d62b13dd47e8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Jeremie-C'
from .bmp180 import bmp180
from .bmp180 import RES_1, RES_2, RES_4, RES_8
| 24.166667
| 46
| 0.696552
| 25
| 145
| 3.72
| 0.68
| 0.215054
| 0.344086
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112903
| 0.144828
| 145
| 5
| 47
| 29
| 0.637097
| 0.289655
| 0
| 0
| 0
| 0
| 0.089109
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bb9cfe8c1ede4bb7abbe382a16d043ae30b1e6b3
| 61
|
py
|
Python
|
hlrl/torch/algos/dqn/__init__.py
|
Chainso/HLRL
|
584f4ed2fa4d8b311a21dbd862ec9434833dd7cd
|
[
"MIT"
] | null | null | null |
hlrl/torch/algos/dqn/__init__.py
|
Chainso/HLRL
|
584f4ed2fa4d8b311a21dbd862ec9434833dd7cd
|
[
"MIT"
] | null | null | null |
hlrl/torch/algos/dqn/__init__.py
|
Chainso/HLRL
|
584f4ed2fa4d8b311a21dbd862ec9434833dd7cd
|
[
"MIT"
] | null | null | null |
from .dqn import DQN
from .dqn_recurrent import DQNRecurrent
| 20.333333
| 39
| 0.836066
| 9
| 61
| 5.555556
| 0.555556
| 0.28
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131148
| 61
| 2
| 40
| 30.5
| 0.943396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bb9d8f3b03209d4b8ff6cbbf805834acb52fdecb
| 3,495
|
py
|
Python
|
Trabalhos-IA/T1-8Puzzle/test_utils.py
|
lucsmelo/INF01048-IA
|
25901f206b20d8916f9170b703e533d40685ca0f
|
[
"MIT"
] | null | null | null |
Trabalhos-IA/T1-8Puzzle/test_utils.py
|
lucsmelo/INF01048-IA
|
25901f206b20d8916f9170b703e533d40685ca0f
|
[
"MIT"
] | null | null | null |
Trabalhos-IA/T1-8Puzzle/test_utils.py
|
lucsmelo/INF01048-IA
|
25901f206b20d8916f9170b703e533d40685ca0f
|
[
"MIT"
] | 1
|
2021-12-14T22:22:57.000Z
|
2021-12-14T22:22:57.000Z
|
import unittest
import utils
class TestUtils(unittest.TestCase):
# Testing move method
def test_move_right(self):
current_state = '1234567_8'
self.assertEqual('12345678_', utils.move(current_state, 'direita'))
def test_move_right_stops_at_edge_top(self):
current_state = '12_345678'
self.assertEqual('12_345678', utils.move(current_state, 'direita'))
def test_move_right_stops_at_edge_middle(self):
current_state = '12345_678'
self.assertEqual('12345_678', utils.move(current_state, 'direita'))
def test_move_right_stops_at_edge_bottom(self):
current_state = '12345678_'
self.assertEqual('12345678_', utils.move(current_state, 'direita'))
def test_move_left(self):
current_state = '1234567_8'
self.assertEqual('123456_78', utils.move(current_state, 'esquerda'))
def test_move_left_stops_at_edge_top(self):
current_state = '_12345678'
self.assertEqual('_12345678', utils.move(current_state, 'esquerda'))
def test_move_left_stops_at_edge_middle(self):
current_state = '123_45678'
self.assertEqual('123_45678', utils.move(current_state, 'esquerda'))
def test_move_left_stops_at_edge_bottom(self):
current_state = '123456_78'
self.assertEqual('123456_78', utils.move(current_state, 'esquerda'))
def test_move_down(self):
current_state = '12_345678'
self.assertEqual('12534_678', utils.move(current_state, 'abaixo'))
def test_move_down_stops_at_edge_left(self):
current_state = '123456_78'
self.assertEqual('123456_78', utils.move(current_state, 'abaixo'))
def test_move_down_stops_at_edge_center(self):
current_state = '1234567_8'
self.assertEqual('1234567_8', utils.move(current_state, 'abaixo'))
def test_move_down_stops_at_edge_right(self):
current_state = '12345678_'
self.assertEqual('12345678_', utils.move(current_state, 'abaixo'))
def test_move_up(self):
current_state = '1234_5678'
self.assertEqual('1_3425678', utils.move(current_state, 'acima'))
def test_move_up_stops_at_edge_left(self):
current_state = '_12345678'
self.assertEqual('_12345678', utils.move(current_state, 'acima'))
def test_move_up_stops_at_edge_center(self):
current_state = '1_2345678'
self.assertEqual('1_2345678', utils.move(current_state, 'acima'))
def test_move_up_stops_at_edge_right(self):
current_state = '12_345678'
self.assertEqual('12_345678', utils.move(current_state, 'acima'))
# Testing apply_sequence method
def test_apply_sequence_empty(self):
current_state = '12_345678'
self.assertEqual('12_345678', utils.apply_sequence(current_state, []))
def test_apply_sequence_one_move(self):
current_state = '12_345678'
sequence = ['esquerda']
self.assertEqual('1_2345678', utils.apply_sequence(current_state, sequence))
def test_apply_sequence_two_moves_distinct(self):
current_state = '12_345678'
sequence = ['esquerda', 'esquerda']
self.assertEqual('_12345678', utils.apply_sequence(current_state, sequence))
def test_apply_sequence_two_moves_returning(self):
current_state = '12_345678'
sequence = ['esquerda', 'direita']
self.assertEqual('12_345678', utils.apply_sequence(current_state, sequence))
if __name__ == '__main__':
unittest.main()
| 36.40625
| 84
| 0.701001
| 438
| 3,495
| 5.159817
| 0.13242
| 0.212389
| 0.141593
| 0.148673
| 0.831858
| 0.806637
| 0.803097
| 0.64646
| 0.638938
| 0.604425
| 0
| 0.112716
| 0.187697
| 3,495
| 95
| 85
| 36.789474
| 0.683339
| 0.01402
| 0
| 0.294118
| 0
| 0
| 0.148417
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 1
| 0.294118
| false
| 0
| 0.029412
| 0
| 0.338235
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bba243df9135b4091dca63e5ab73c1b1981762db
| 36,972
|
py
|
Python
|
bases/br_sp_alesp/code/scripts/alesp_tamitacao_parser.py
|
lucascr91/mais
|
a137328d683a0252a6159e9135f1326157cd018f
|
[
"MIT"
] | 290
|
2020-10-14T17:18:21.000Z
|
2022-03-31T20:56:07.000Z
|
bases/br_sp_alesp/code/scripts/alesp_tamitacao_parser.py
|
lucascr91/mais
|
a137328d683a0252a6159e9135f1326157cd018f
|
[
"MIT"
] | 756
|
2020-10-09T16:37:57.000Z
|
2022-03-31T18:28:18.000Z
|
bases/br_sp_alesp/code/scripts/alesp_tamitacao_parser.py
|
lucascr91/mais
|
a137328d683a0252a6159e9135f1326157cd018f
|
[
"MIT"
] | 81
|
2020-10-15T18:21:42.000Z
|
2022-03-31T03:25:13.000Z
|
import os
import pandas as pd
import numpy as np
from io import BytesIO
from zipfile import ZipFile
import untangle
import requests
import scripts.manipulation as manipulation
mais_path = "../../bd+/mais_projects/data/alesp"
def download_unzip(url, path_to_save):
# unzip the content
r = requests.get(url)
f = ZipFile(BytesIO(r.content))
file_name = f.namelist()[0]
f.extractall(path=path_to_save)
return file_name.replace(".xml", "")
def parse_autores(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/documento_autor.zip"
path_to_save = "../data/tramitacoes/"
file_name = download_unzip(url, path_to_save)
print("path_to_save = ", path_to_save)
print("file_name = ", file_name)
obj = untangle.parse("{}{}.xml".format(path_to_save, file_name))
obj = obj.documentos_autores.DocumentoAutor
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = ["IdAutor", "IdDocumento", "NomeAutor"]
l = len(obj)
for i in range(l):
line = []
a = obj[i].IdAutor.cdata
try:
b = obj[i].IdDocumento.cdata
except:
b = np.nan
try:
c = obj[i].NomeAutor.cdata
except:
c = np.nan
line = [a, b, c]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/documento_autor.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/documento_autor.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
os.remove(f"{path_to_save}{file_name}.xml")
df = pd.read_csv("../data/tramitacoes/documento_autor.csv")
rename_cols = {
"IdAutor": "id_autor",
"IdDocumento": "id_documento",
"NomeAutor": "nome_autor",
}
df = df.rename(columns=rename_cols)
rename = {
"LUIZ FERNANDO T. FERREIRA": "LUIZ FERNANDO",
"PAULO CORREA JR.": "PAULO CORREA JR",
}
df["nome_autor"] = manipulation.normalize(df["nome_autor"]).replace(rename)
df.to_csv("../data/tramitacoes/documento_autor.csv", index=False, encoding="utf-8")
def parse_comissoes(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/comissoes.xml"
r = requests.get(url)
obj = untangle.parse(r.text)
obj = obj.Comissoes.Comissao
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = ["DataFimComissao", "IdComissao", "NomeComissao", "SiglaComissao"]
l = len(obj)
for i in range(l):
line = []
try:
a = obj[i].DataFimComissao.cdata
except:
a = np.nan
try:
b = obj[i].IdComissao.cdata
except:
b = np.nan
c = obj[i].NomeComissao.cdata
try:
d = obj[i].SiglaComissao.cdata
except:
d = np.nan
line = [a, b, c, d]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/comissoes.csv", index=False, encoding="utf-8"
)
else:
df.to_csv(
"../data/tramitacoes/comissoes.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
df = pd.read_csv("../data/tramitacoes/comissoes.csv")
rename_cols = {
"DataFimComissao": "data_fim_comissao",
"IdComissao": "id_comissao",
"NomeComissao": "nome_comissao",
"SiglaComissao": "sigla_comissao",
}
df = df.rename(columns=rename_cols)
df.to_csv("../data/tramitacoes/comissoes.csv", index=False, encoding="utf-8")
def parse_deliberacoes_comissoes(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/comissoes_permanentes_deliberacoes.xml"
r = requests.get(url)
obj = untangle.parse(r.text)
obj = obj.ComissoesReunioesDeliberacoes.ReuniaoComissaoDeliberacao
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = [
"Deliberacao",
"DataInclusao",
"DataSaida",
"IdDeliberacao",
"IdDocumento",
"IdPauta",
"IdReuniao",
"NrOrdem",
]
l = len(obj)
for i in range(l):
line = []
try:
a = obj[i].Deliberacao.cdata
except:
a = np.nan
try:
b = obj[i].DataInclusao.cdata
except:
b = np.nan
try:
c = obj[i].DataSaida.cdata
except:
c = np.nan
try:
d = obj[i].IdDeliberacao.cdata
except:
d = np.nan
try:
e = obj[i].IdDocumento.cdata
except:
e = np.nan
try:
f = obj[i].IdPauta.cdata
except:
f = np.nan
try:
g = obj[i].IdReuniao.cdata
except:
g = np.nan
try:
h = obj[i].NrOrdem.cdata
except:
h = np.nan
line = [a, b, c, d, e, f, g, h]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_deliberacoes.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_deliberacoes.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
df = pd.read_csv("../data/tramitacoes/comissoes_permanentes_deliberacoes.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"datainclusao": "data_inclusao",
"datasaida": "data_saida",
"iddeliberacao": "id_deliberacao",
"iddocumento": "id_documento",
"idpauta": "id_pauta",
"idreuniao": "id_reuniao",
"nrordem": "nuumero_ordem",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_deliberacoes.csv",
index=False,
encoding="utf-8",
)
def parse_comissoes_membros(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/comissoes_membros.xml"
r = requests.get(url)
obj = untangle.parse(r.text)
obj = obj.ComissoesMembros.MembroComissao
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = [
"DataInicio",
"Efetivo",
"IdComissao",
"IdMembro",
"IdPapel",
"NomeMembro",
"Papel",
"SiglaComissao",
]
l = len(obj)
for i in range(l):
line = []
try:
a = obj[i].DataInicio.cdata
except:
a = np.nan
try:
b = obj[i].Efetivo.cdata
except:
b = np.nan
try:
c = obj[i].IdComissao.cdata
except:
c = np.nan
try:
d = obj[i].IdMembro.cdata
except:
d = np.nan
try:
e = obj[i].IdPapel.cdata
except:
e = np.nan
try:
f = obj[i].NomeMembro.cdata
except:
f = np.nan
try:
g = obj[i].Papel.cdata
except:
g = np.nan
try:
h = obj[i].SiglaComissao.cdata
except:
h = np.nan
line = [a, b, c, d, e, f, g, h]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/comissoes_membros.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/comissoes_membros.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
df = pd.read_csv("../data/tramitacoes/comissoes_membros.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"datainicio": "data_inicio",
"idcomissao": "id_comissao",
"idmembro": "id_membro",
"idpapel": "id_papel",
"nomemembro": "nome_membro",
"siglacomissao": "sigla_comissao",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/comissoes_membros.csv",
index=False,
encoding="utf-8",
)
def parse_naturezasSpl(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/naturezasSpl.xml"
r = requests.get(url)
obj = untangle.parse(r.text)
obj = obj.natureza.natureza
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = ["idNatureza", "nmNatureza", "sgNatureza", "tpNatureza"]
l = len(obj)
for i in range(l):
line = []
try:
a = obj[i].idNatureza.cdata
except:
a = np.nan
try:
b = obj[i].nmNatureza.cdata
except:
b = np.nan
try:
c = obj[i].sgNatureza.cdata
except:
c = np.nan
try:
d = obj[i].tpNatureza.cdata
except:
d = np.nan
line = [a, b, c, d]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/naturezasSpl.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/naturezasSpl.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
df = pd.read_csv("../data/tramitacoes/naturezasSpl.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"idnatureza": "id_natureza",
"nmnatureza": "nome_natureza",
"sgnatureza": "sigla_natureza",
"tpnatureza": "tipo_natureza",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/naturezasSpl.csv",
index=False,
encoding="utf-8",
)
def parse_documento_palavras(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/documento_palavras.zip"
path_to_save = "../data/tramitacoes/"
file_name = download_unzip(url, path_to_save)
print("path_to_save = ", path_to_save)
print("file_name = ", file_name)
obj = untangle.parse("{}{}.xml".format(path_to_save, file_name))
obj = obj.documentos_palavras.DocumentoPalavra
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = ["IdDocumento", "IdPalavra"]
l = len(obj)
for i in range(l):
line = []
a = obj[i].IdDocumento.cdata
try:
b = obj[i].IdPalavra.cdata
except:
b = np.nan
line = [a, b]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/documento_palavras.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/documento_palavras.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
os.remove(f"{path_to_save}{file_name}.xml")
df = pd.read_csv("../data/tramitacoes/documento_palavras.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"iddocumento": "id_documento",
"idpalavra": "id_palavra",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/documento_palavras.csv",
index=False,
encoding="utf-8",
)
def parse_documento_index_palavras(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/palavras_chave.xml"
r = requests.get(url)
obj = untangle.parse(r.text)
obj = obj.palavras_chave.PalavraChave
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = ["IdPalavra", "Palavra", "PalavraSemAcento"]
l = len(obj)
for i in range(l):
line = []
a = obj[i].IdPalavra.cdata
try:
b = obj[i].Palavra.cdata
except:
b = np.nan
try:
c = obj[i].PalavraSemAcento.cdata
except:
c = np.nan
line = [a, b, c]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/index_palavras_chave.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/index_palavras_chave.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
df = pd.read_csv("../data/tramitacoes/index_palavras_chave.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"idpalavra": "id_palavra",
"palavrasemacento": "palavra_sem_acento",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/index_palavras_chave.csv",
index=False,
encoding="utf-8",
)
def parse_propositura_parecer(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/propositura_parecer.zip"
path_to_save = "../data/tramitacoes/"
file_name = download_unzip(url, path_to_save)
print("path_to_save = ", path_to_save)
print("file_name = ", file_name)
obj = untangle.parse("{}{}.xml".format(path_to_save, file_name))
obj = obj.pareceres.ProposituraParecerComissao
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = [
"AnoParecer",
"Descricao",
"Data",
"AdReferendum",
"RelatorEspecial",
"VotoVencido",
"IdComissao",
"IdDocumento",
"IdParecer",
"IdTipoParecer",
"TipoParecer",
"NrParecer",
"SiglaComissao",
"TpParecer",
"URL",
]
l = len(obj)
i = 0
for i in range(l):
line = []
try:
a = obj[i].AnoParecer.cdata
except:
a = np.nan
try:
b = obj[i].Descricao.cdata
except:
b = np.nan
try:
c = obj[i].Data.cdata
except:
c = np.nan
try:
d = obj[i].AdReferendum.cdata
except:
d = np.nan
try:
e = obj[i].RelatorEspecial.cdata
except:
e = np.nan
try:
f = obj[i].VotoVencido.cdata
except:
f = np.nan
try:
g = obj[i].IdComissao.cdata
except:
g = np.nan
try:
h = obj[i].IdDocumento.cdata
except:
h = np.nan
try:
z = obj[i].IdParecer.cdata
except:
z = np.nan
try:
j = obj[i].IdTipoParecer.cdata
except:
j = np.nan
try:
k = obj[i].TipoParecer.cdata
except:
k = np.nan
try:
l = obj[i].NrParecer.cdata
except:
l = np.nan
try:
m = obj[i].SiglaComissao.cdata
except:
m = np.nan
try:
n = obj[i].TpParecer.cdata
except:
n = np.nan
try:
o = obj[i].URL.cdata
except:
o = np.nan
line = [a, b, c, d, e, f, g, h, z, j, k, l, m, n, o]
df = pd.DataFrame([line], columns=cols)
# print(i)
if i == 0:
df.to_csv(
"../data/tramitacoes/propositura_parecer.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/propositura_parecer.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
os.remove(f"{path_to_save}{file_name}.xml")
df = pd.read_csv("../data/tramitacoes/propositura_parecer.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"anoparecer": "ano_parecer",
"adreferendum": "ad_referendum",
"relatorespecial": "relator_especial",
"votovencido": "voto_vencido",
"idcomissao": "id_comissao",
"iddocumento": "id_documento",
"idparecer": "id_parecer",
"idtipoparecer": "id_tipo_parecer",
"tipoparecer": "tipo_parecer",
"nrparecer": "numero_parecer",
"siglacomissao": "sigla_comissao",
"tpparecer": "tipo_parecer",
"url": "url",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/propositura_parecer.csv",
index=False,
encoding="utf-8",
)
def parse_comissoes_permanentes_presencas(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/comissoes_permanentes_presencas.xml"
r = requests.get(url)
obj = untangle.parse(r.text)
obj = obj.ComissoesReunioesPresencas.ReuniaoComissaoPresenca
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = [
"DataReuniao",
"IdComissao",
"IdDeputado",
"IdPauta",
"IdReuniao",
"Deputado",
"SiglaComissao",
]
l = len(obj)
for i in range(l):
line = []
try:
a = obj[i].DataReuniao.cdata
except:
a = np.nan
try:
b = obj[i].IdComissao.cdata
except:
b = np.nan
try:
c = obj[i].IdDeputado.cdata
except:
c = np.nan
try:
d = obj[i].IdPauta.cdata
except:
d = np.nan
try:
e = obj[i].IdReuniao.cdata
except:
e = np.nan
try:
f = obj[i].Deputado.cdata
except:
f = np.nan
try:
g = obj[i].SiglaComissao.cdata
except:
g = np.nan
line = [a, b, c, d, e, f, g]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_presencas.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_presencas.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
df = pd.read_csv("../data/tramitacoes/comissoes_permanentes_presencas.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"datareuniao": "data_reuniao",
"idcomissao": "id_comissao",
"iddeputado": "id_deputado",
"idpauta": "id_pauta",
"idreuniao": "id_reuniao",
"siglacomissao": "sigla_comissao",
"deputado": "nome_deputado",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_presencas.csv",
index=False,
encoding="utf-8",
)
return df
def parse_proposituras(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/proposituras.zip"
path_to_save = "../data/tramitacoes/"
file_name = download_unzip(url, path_to_save)
print("path_to_save = ", path_to_save)
print("file_name = ", file_name)
obj = untangle.parse("{}{}.xml".format(path_to_save, file_name))
obj = obj.proposituras.propositura
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = [
"AnoLegislativo",
"CodOriginalidade",
"Ementa",
"DtEntradaSistema",
"DtPublicacao",
"IdDocumento",
"IdNatureza",
"NroLegislativo",
]
l = len(obj)
for i in range(l):
line = []
try:
a = obj[i].AnoLegislativo.cdata
except:
a = np.nan
try:
b = obj[i].CodOriginalidade.cdata
except:
b = np.nan
c = obj[i].Ementa.cdata
try:
d = obj[i].DtEntradaSistema.cdata
except:
d = np.nan
e = obj[i].DtPublicacao.cdata
f = obj[i].IdDocumento.cdata
g = obj[i].IdNatureza.cdata
h = obj[i].NroLegislativo.cdata
line = [a, b, c, d, e, f, g, h]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/proposituras.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/proposituras.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
os.remove(f"{path_to_save}{file_name}.xml")
df = pd.read_csv("../data/tramitacoes/proposituras.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"anolegislativo": "ano_legislativo",
"codoriginalidade": "codigo_originalidade",
"dtentradasistema": "data_entrada_sistema",
"dtpublicacao": "data_publicacao",
"iddocumento": "id_documento",
"idnatureza": "id_natureza",
"nrolegislativo": "numero_legislativo",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/proposituras.csv",
index=False,
encoding="utf-8",
)
def parse_documento_regime(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/documento_regime.zip"
path_to_save = "../data/tramitacoes/"
file_name = download_unzip(url, path_to_save)
print("path_to_save = ", path_to_save)
print("file_name = ", file_name)
obj = untangle.parse("{}{}.xml".format(path_to_save, file_name))
obj = obj.documentos_regimes.DocumentoRegime
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = ["DataFim", "DataInicio", "IdDocumento", "IdRegime", "NomeRegime"]
l = len(obj)
for i in range(l):
line = []
try:
a = obj[i].DataFim.cdata
except:
a = np.nan
try:
b = obj[i].DataInicio.cdata
except:
b = np.nan
try:
c = obj[i].IdDocumento.cdata
except:
c = np.nan
try:
d = obj[i].IdRegime.cdata
except:
d = np.nan
try:
e = obj[i].NomeRegime.cdata
except:
e = np.nan
line = [a, b, c, d, e]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/documento_regime.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/documento_regime.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
os.remove(f"{path_to_save}{file_name}.xml")
df = pd.read_csv("../data/tramitacoes/documento_regime.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"datafim": "data_fim",
"datainicio": "data_inicio",
"iddocumento": "id_documento",
"idregime": "id_regime",
"nomeregime": "nome_regime",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/documento_regime.csv",
index=False,
encoding="utf-8",
)
def parse_comissoes_permanentes_reunioes(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/comissoes_permanentes_reunioes.xml"
r = requests.get(url)
obj = untangle.parse(r.text)
obj = obj.ComissoesReunioes.ReuniaoComissao
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = [
"Situacao",
"Data",
"IdComissao",
"IdPauta",
"IdReuniao",
"Presidente",
"NrConvocacao",
"NrLegislatura",
"TipoConvocacao",
"CodSituacao",
]
l = len(obj)
for i in range(l):
line = []
try:
a = obj[i].Situacao.cdata
except:
a = np.nan
try:
b = obj[i].Data.cdata
except:
b = np.nan
try:
c = obj[i].IdComissao.cdata
except:
c = np.nan
try:
d = obj[i].IdPauta.cdata
except:
d = np.nan
try:
e = obj[i].IdReuniao.cdata
except:
e = np.nan
try:
f = obj[i].Presidente.cdata
except:
f = np.nan
try:
g = obj[i].NrConvocacao.cdata
except:
g = np.nan
try:
h = obj[i].NrLegislatura.cdata
except:
h = np.nan
try:
z = obj[i].TipoConvocacao.cdata
except:
z = np.nan
try:
j = obj[i].CodSituacao.cdata
except:
j = np.nan
line = [a, b, c, d, e, f, g, h, z, j]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_reunioes.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_reunioes.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
df = pd.read_csv("../data/tramitacoes/comissoes_permanentes_reunioes.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"idcomissao": "id_comissao",
"idpauta": "id_pauta",
"nrconvocacao": "numero_convocacao",
"nrlegislatura": "numero_legislatura",
"tipoconvocacao": "tipo_convocacao",
"codsituacao": "codigo_situacao",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_reunioes.csv",
index=False,
encoding="utf-8",
)
def parse_comissoes_permanentes_votacoes(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/comissoes_permanentes_votacoes.xml"
r = requests.get(url)
obj = untangle.parse(r.text)
obj = obj.ComissoesReunioesVotacao.ReuniaoComissaoVotacao
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = [
"Voto",
"IdComissao",
"IdDeputado",
"IdDocumento",
"IdPauta",
"IdReuniao",
"Deputado",
"TipoVoto",
]
l = len(obj)
for i in range(l):
line = []
try:
a = obj[i].Voto.cdata
except:
a = np.nan
try:
b = obj[i].IdComissao.cdata
except:
b = np.nan
try:
c = obj[i].IdDeputado.cdata
except:
c = np.nan
try:
d = obj[i].IdDocumento.cdata
except:
d = np.nan
try:
e = obj[i].IdPauta.cdata
except:
e = np.nan
try:
f = obj[i].IdReuniao.cdata
except:
f = np.nan
try:
g = obj[i].Deputado.cdata
except:
g = np.nan
try:
h = obj[i].TipoVoto.cdata
except:
h = np.nan
line = [a, b, c, d, e, f, g, h]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_votacoes.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_votacoes.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
df = pd.read_csv("../data/tramitacoes/comissoes_permanentes_votacoes.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"idcomissao": "id_comissao",
"iddeputado": "id_deputado",
"iddocumento": "id_documento",
"idpauta": "id_pauta",
"idreuniao": "id_reuniao",
"tipovoto": "tipo_voto",
"deputado": "nome_deputado",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/comissoes_permanentes_votacoes.csv",
index=False,
encoding="utf-8",
)
def parse_documento_andamento_atual(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/documento_andamento_atual.zip"
path_to_save = "../data/tramitacoes/"
file_name = download_unzip(url, path_to_save)
print("path_to_save = ", path_to_save)
print("file_name = ", file_name)
obj = untangle.parse("{}{}.xml".format(path_to_save, file_name))
obj = obj.documentos_andamentos.DocumentoAndamento
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = [
"Descricao",
"Data",
"IdComissao",
"IdDocumento",
"IdEtapa",
"IdTpAndamento",
"NmEtapa",
"NrOrdem",
"TpAndamento",
]
l = len(obj)
for i in range(l):
line = []
try:
a = obj[i].Descricao.cdata
except:
a = np.nan
b = obj[i].Data.cdata
c = obj[i].IdComissao.cdata
d = obj[i].IdDocumento.cdata
e = obj[i].IdEtapa.cdata
f = obj[i].IdTpAndamento.cdata
g = obj[i].NmEtapa.cdata
h = obj[i].NrOrdem.cdata
z = obj[i].TpAndamento.cdata
line = [a, b, c, d, e, f, g, h, z]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/documento_andamento_atual.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/documento_andamento_atual.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
os.remove(f"{path_to_save}{file_name}.xml")
df = pd.read_csv("../data/tramitacoes/documento_andamento_atual.csv")
df.columns = manipulation.normalize_cols(df.columns)
rename_cols = {
"idcomissao": "id_comissao",
"iddocumento": "id_documento",
"idetapa": "id_etapa",
"idtpandamento": "id_tipo_andamento",
"nmetapa": "nome_etapa",
"nrordem": "numero_ordem",
"tpandamento": "tipo_andamento",
}
df = df.rename(columns=rename_cols)
df.to_csv(
"../data/tramitacoes/documento_andamento_atual.csv",
index=False,
encoding="utf-8",
)
def parse_documento_andamento(download=True):
if download:
url = "http://www.al.sp.gov.br/repositorioDados/processo_legislativo/documento_andamento.zip"
path_to_save = "../data/tramitacoes/"
file_name = download_unzip(url, path_to_save)
print("path_to_save = ", path_to_save)
print("file_name = ", file_name)
obj = untangle.parse("{}{}".format(path_to_save, file_name))
obj = obj.documentos_andamentos.DocumentoAndamento
print("rows: ", len(obj))
print("Sample:", obj[0])
cols = [
"Descricao",
"Data",
"IdComissao",
"IdDocumento",
"IdEtapa",
"IdTpAndamento",
"NmEtapa",
"NrOrdem",
"TpAndamento",
]
l = len(obj)
for i in range(l):
line = []
try:
a = obj[i].Descricao.cdata
except:
a = np.nan
b = obj[i].Data.cdata
c = obj[i].IdComissao.cdata
d = obj[i].IdDocumento.cdata
e = obj[i].IdEtapa.cdata
f = obj[i].IdTpAndamento.cdata
g = obj[i].NmEtapa.cdata
h = obj[i].NrOrdem.cdata
z = obj[i].TpAndamento.cdata
line = [a, b, c, d, e, f, g, h, z]
df = pd.DataFrame([line], columns=cols)
if i == 0:
df.to_csv(
"../data/tramitacoes/documento_andamento.csv",
index=False,
encoding="utf-8",
)
else:
df.to_csv(
"../data/tramitacoes/documento_andamento.csv",
index=False,
encoding="utf-8",
header=False,
mode="a",
)
os.remove(f"{path_to_save}{file_name}.xml")
| 27.346154
| 116
| 0.46665
| 3,617
| 36,972
| 4.643351
| 0.066353
| 0.024769
| 0.029056
| 0.028818
| 0.768383
| 0.744031
| 0.727121
| 0.723013
| 0.718904
| 0.620601
| 0
| 0.003502
| 0.413096
| 36,972
| 1,352
| 117
| 27.346154
| 0.770496
| 0.000811
| 0
| 0.712477
| 0
| 0
| 0.211478
| 0.076313
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014467
| false
| 0
| 0.007233
| 0
| 0.023508
| 0.039783
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a5917117426d612004e5b1bab658ca77d4d0d2da
| 134
|
py
|
Python
|
page/__init__.py
|
zhangvision11/appium-togetu
|
84f79f250aa34801c680a330b56b79fc91993da4
|
[
"MIT"
] | null | null | null |
page/__init__.py
|
zhangvision11/appium-togetu
|
84f79f250aa34801c680a330b56b79fc91993da4
|
[
"MIT"
] | null | null | null |
page/__init__.py
|
zhangvision11/appium-togetu
|
84f79f250aa34801c680a330b56b79fc91993da4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from utils.shell import Shell
from utils.shell import ADB
appium_v = Shell.invoke('appium -v')
print(appium_v)
| 26.8
| 36
| 0.723881
| 22
| 134
| 4.318182
| 0.545455
| 0.221053
| 0.294737
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008547
| 0.126866
| 134
| 5
| 37
| 26.8
| 0.803419
| 0.156716
| 0
| 0
| 0
| 0
| 0.080357
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
3c014eea6970c15e99e6957db5593ed4c99a7615
| 31
|
py
|
Python
|
testeBranch.py
|
Alexandre16347/Testes
|
dd1cd71607ae2530309cd08dc8bf235ea83b6b02
|
[
"MIT"
] | null | null | null |
testeBranch.py
|
Alexandre16347/Testes
|
dd1cd71607ae2530309cd08dc8bf235ea83b6b02
|
[
"MIT"
] | null | null | null |
testeBranch.py
|
Alexandre16347/Testes
|
dd1cd71607ae2530309cd08dc8bf235ea83b6b02
|
[
"MIT"
] | null | null | null |
print("Testando a branch dev")
| 15.5
| 30
| 0.741935
| 5
| 31
| 4.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0.677419
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
3c1051d9cc2ac82a119cb15a775cd0f9e117f92a
| 41
|
py
|
Python
|
src/model/blending/__init__.py
|
donglinwu6066/2022-NYCU-EVA-lab-project-demo-app
|
5de1021173240b2f9b325510e2c75f59cf3b14e1
|
[
"MIT"
] | null | null | null |
src/model/blending/__init__.py
|
donglinwu6066/2022-NYCU-EVA-lab-project-demo-app
|
5de1021173240b2f9b325510e2c75f59cf3b14e1
|
[
"MIT"
] | null | null | null |
src/model/blending/__init__.py
|
donglinwu6066/2022-NYCU-EVA-lab-project-demo-app
|
5de1021173240b2f9b325510e2c75f59cf3b14e1
|
[
"MIT"
] | 1
|
2022-03-25T10:08:41.000Z
|
2022-03-25T10:08:41.000Z
|
from .model import Generator as Blending
| 20.5
| 40
| 0.829268
| 6
| 41
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 41
| 1
| 41
| 41
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3c210e0a58cd470619d37ef06c98144d32564f28
| 5,083
|
py
|
Python
|
tests/test_samples.py
|
ndcolter-mcafee/opendxl-virustotal-client-python
|
f9638e94b52517b16d436f8b6a3006db82eff236
|
[
"Apache-2.0"
] | 5
|
2017-04-14T19:44:08.000Z
|
2018-08-09T15:08:16.000Z
|
tests/test_samples.py
|
ndcolter-mcafee/opendxl-virustotal-client-python
|
f9638e94b52517b16d436f8b6a3006db82eff236
|
[
"Apache-2.0"
] | 2
|
2018-03-27T20:51:59.000Z
|
2018-07-30T17:21:26.000Z
|
tests/test_samples.py
|
ndcolter-mcafee/opendxl-virustotal-client-python
|
f9638e94b52517b16d436f8b6a3006db82eff236
|
[
"Apache-2.0"
] | 4
|
2017-08-01T00:00:56.000Z
|
2021-01-25T06:41:41.000Z
|
from tests.test_base import *
from tests.test_value_constants import *
from tests.mock_vtservice import MockVtService
class TestSamples(BaseClientTest):
def test_basicdomainreport_example(self):
# Modify sample file to include necessary sample data
sample_filename = self.BASIC_FOLDER + "/basic_domain_report_example.py"
with self.create_client(max_retries=0) as dxl_client:
# Set up client, and register mock service
dxl_client.connect()
with MockVtService(dxl_client):
mock_print = self.run_sample(sample_filename)
mock_print.assert_any_call(
StringDoesNotContain("Error")
)
# Validate whois_timestamp from report
mock_print.assert_any_call(
StringContains(
str(SAMPLE_DOMAIN_REPORT["whois_timestamp"])
)
)
dxl_client.disconnect()
def test_basicfilereport_example(self):
# Modify sample file to include necessary sample data
sample_filename = self.BASIC_FOLDER + "/basic_file_report_example.py"
with self.create_client(max_retries=0) as dxl_client:
# Set up client, and register mock service
dxl_client.connect()
with MockVtService(dxl_client):
mock_print = self.run_sample(sample_filename)
mock_print.assert_any_call(
StringDoesNotContain("Error")
)
# Validate md5 from report
mock_print.assert_any_call(
StringContains(
str(SAMPLE_FILE_REPORT["md5"])
)
)
dxl_client.disconnect()
def test_basicfilerescan_example(self):
# Modify sample file to include necessary sample data
sample_filename = self.BASIC_FOLDER + "/basic_file_rescan_example.py"
with self.create_client(max_retries=0) as dxl_client:
# Set up client, and register mock service
dxl_client.connect()
with MockVtService(dxl_client):
mock_print = self.run_sample(sample_filename)
mock_print.assert_any_call(
StringDoesNotContain("Error")
)
# Validate scan_id from report
mock_print.assert_any_call(
StringContains(
str(SAMPLE_FILE_RESCAN["scan_id"])
)
)
dxl_client.disconnect()
def test_basicipreport_example(self):
# Modify sample file to include necessary sample data
sample_filename = self.BASIC_FOLDER + "/basic_ip_address_report_example.py"
with self.create_client(max_retries=0) as dxl_client:
# Set up client, and register mock service
dxl_client.connect()
with MockVtService(dxl_client):
mock_print = self.run_sample(sample_filename)
mock_print.assert_any_call(
StringDoesNotContain("Error")
)
# Validate asn from report
mock_print.assert_any_call(
StringContains(
str(SAMPLE_IP_ADDRESS_REPORT["asn"])
)
)
dxl_client.disconnect()
def test_basicurlreport_example(self):
# Modify sample file to include necessary sample data
sample_filename = self.BASIC_FOLDER + "/basic_url_report_example.py"
with self.create_client(max_retries=0) as dxl_client:
# Set up client, and register mock service
dxl_client.connect()
with MockVtService(dxl_client):
mock_print = self.run_sample(sample_filename)
mock_print.assert_any_call(
StringDoesNotContain("Error")
)
# Validate scan_id from report
mock_print.assert_any_call(
StringContains(
str(SAMPLE_URL_REPORT["scan_id"])
)
)
dxl_client.disconnect()
def test_basicurlscan_example(self):
# Modify sample file to include necessary sample data
sample_filename = self.BASIC_FOLDER + "/basic_url_scan_example.py"
with self.create_client(max_retries=0) as dxl_client:
# Set up client, and register mock service
dxl_client.connect()
with MockVtService(dxl_client):
mock_print = self.run_sample(sample_filename)
mock_print.assert_any_call(
StringDoesNotContain("Error")
)
# Validate scan_id from report
mock_print.assert_any_call(
StringContains(
str(SAMPLE_URL_SCAN["scan_id"])
)
)
dxl_client.disconnect()
| 32.793548
| 83
| 0.569546
| 504
| 5,083
| 5.430556
| 0.138889
| 0.078919
| 0.065765
| 0.078919
| 0.877969
| 0.840336
| 0.840336
| 0.816953
| 0.816953
| 0.816953
| 0
| 0.002498
| 0.36986
| 5,083
| 154
| 84
| 33.006494
| 0.852014
| 0.143813
| 0
| 0.574468
| 0
| 0
| 0.057697
| 0.04108
| 0
| 0
| 0
| 0
| 0.12766
| 1
| 0.06383
| false
| 0
| 0.031915
| 0
| 0.106383
| 0.191489
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b1d492268a82d7deafa710f6f662432b70097fa8
| 3,527
|
py
|
Python
|
maxsmi/pytorch_evaluation.py
|
t-kimber/maxsmi
|
d7d52a9ba95efb6b4219928425bb5de965c4b3b5
|
[
"MIT"
] | 1
|
2021-01-22T17:56:54.000Z
|
2021-01-22T17:56:54.000Z
|
maxsmi/pytorch_evaluation.py
|
t-kimber/maxsmi
|
d7d52a9ba95efb6b4219928425bb5de965c4b3b5
|
[
"MIT"
] | 12
|
2020-10-16T10:13:56.000Z
|
2021-04-14T07:25:05.000Z
|
maxsmi/pytorch_evaluation.py
|
t-kimber/maxsmi
|
d7d52a9ba95efb6b4219928425bb5de965c4b3b5
|
[
"MIT"
] | null | null | null |
"""
pytorch_evaluation.py
Pytorch evalution.
"""
import torch
from maxsmi.pytorch_data import data_to_pytorch_format
def model_evaluation(
data_loader,
ml_model_name,
ml_model,
smiles_dictionary,
max_length_smiles,
device_to_use,
):
"""
Evaluation per batch of a pytorch machine learning model.
Parameters
----------
data_loader : torch.utils.data
The training data as seen by Pytorch for mini-batches.
ml_model_name : str
Name of the machine learning model. It can be either "CONV1D", "CONV2D", or "RNN".
ml_model : nn.Module
Instance of the pytorch machine learning model.
smiles_dictionary : dict
The dictionary of SMILES characters.
max_length_smiles : int
The length of the longest SMILES.
device_to_use : torch.device
The device to use for model instance, "cpu" or "cuda".
Returns
-------
tuple of dict:
Dictionary of the predicted, true output values, respectively, in the data loader, with SMILES as keys.
"""
ml_model.eval()
with torch.no_grad():
all_output_pred = {}
all_output_true = {}
for _, data in enumerate(data_loader):
# SMILES and target
smiles, target = data
input_true, output_true = data_to_pytorch_format(
smiles,
target,
smiles_dictionary,
max_length_smiles,
ml_model_name,
device_to_use,
)
# Prediction
output_pred = ml_model(input_true)
# Convert to numpy arrays
output_pred = output_pred.cpu().detach().numpy()
output_true = output_true.cpu().detach().numpy()
for smile in smiles:
all_output_pred[smile] = output_pred
all_output_true[smile] = output_true
return (all_output_pred, all_output_true)
def out_of_sample_prediction(
data_loader,
ml_model_name,
ml_model,
smiles_dictionary,
max_length_smiles,
device_to_use,
):
"""
Prediction using trained pytorch machine learning model.
Parameters
----------
data_loader : torch.utils.data
The unlabeled data as seen by Pytorch.
ml_model_name : str
Name of the machine learning model. It can be either "CONV1D", "CONV2D", or "RNN".
ml_model : nn.Module
Instance of the pytorch machine learning model.
smiles_dictionary : dict
The dictionary of SMILES characters.
max_length_smiles : int
The length of the longest SMILES.
device_to_use : torch.device
The device to use for model instance, "cpu" or "cuda".
Returns
-------
np.array:
The prediction using the trained model.
"""
ml_model.eval()
with torch.no_grad():
all_output = {}
for _, data in enumerate(data_loader):
# SMILES and target
smiles, target = data
input_true, _ = data_to_pytorch_format(
smiles,
target,
smiles_dictionary,
max_length_smiles,
ml_model_name,
device_to_use,
)
# Prediction
output_pred = ml_model(input_true)
# Convert to numpy arrays
output_pred = output_pred.cpu().detach().numpy()
for smile in smiles:
all_output[smile] = output_pred
return all_output
| 26.923664
| 111
| 0.599093
| 416
| 3,527
| 4.822115
| 0.213942
| 0.048853
| 0.043868
| 0.04985
| 0.78664
| 0.756231
| 0.734796
| 0.734796
| 0.734796
| 0.667996
| 0
| 0.001688
| 0.328324
| 3,527
| 130
| 112
| 27.130769
| 0.845082
| 0.418486
| 0
| 0.701754
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035088
| false
| 0
| 0.035088
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
592e2c7fd9528926c979e39d3b42324c13b940ae
| 59
|
py
|
Python
|
src/exceptions/__init__.py
|
mobynickkk/pydi
|
2b234942da0fa5620417f60699a9d97bdb1a4bba
|
[
"Apache-2.0"
] | 1
|
2021-11-09T18:50:47.000Z
|
2021-11-09T18:50:47.000Z
|
src/exceptions/__init__.py
|
mobynickkk/pydi
|
2b234942da0fa5620417f60699a9d97bdb1a4bba
|
[
"Apache-2.0"
] | null | null | null |
src/exceptions/__init__.py
|
mobynickkk/pydi
|
2b234942da0fa5620417f60699a9d97bdb1a4bba
|
[
"Apache-2.0"
] | null | null | null |
from .ComponentNotFoundError import ComponentNotFoundError
| 29.5
| 58
| 0.915254
| 4
| 59
| 13.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 59
| 1
| 59
| 59
| 0.981818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3cba66d4fc4ba58e0c1e9702579230cb4e21ddf6
| 120
|
py
|
Python
|
lesson24.py
|
bc1109/ubuntu
|
71b3a1bd0b5d027016b5868da9ab442c8e4f49d2
|
[
"Apache-2.0"
] | null | null | null |
lesson24.py
|
bc1109/ubuntu
|
71b3a1bd0b5d027016b5868da9ab442c8e4f49d2
|
[
"Apache-2.0"
] | null | null | null |
lesson24.py
|
bc1109/ubuntu
|
71b3a1bd0b5d027016b5868da9ab442c8e4f49d2
|
[
"Apache-2.0"
] | null | null | null |
import re
phoneNumberRegex = re.compile(r'\d\d\d-\d\d\d-\d\d\d\d')
phoneNumberRegex.search('My number is 813-442-2837')
| 30
| 56
| 0.716667
| 24
| 120
| 3.583333
| 0.541667
| 0.209302
| 0.27907
| 0.325581
| 0.116279
| 0.116279
| 0.116279
| 0.116279
| 0.116279
| 0
| 0
| 0.09009
| 0.075
| 120
| 4
| 57
| 30
| 0.684685
| 0
| 0
| 0
| 0
| 0
| 0.38843
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
595c822b9cb782cfc8e147868091321b7b70a58b
| 2,791
|
py
|
Python
|
sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py
|
kushan2018/azure-sdk-for-python
|
08a9296207281f4e90e23cf7a30173863accc867
|
[
"MIT"
] | null | null | null |
sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py
|
kushan2018/azure-sdk-for-python
|
08a9296207281f4e90e23cf7a30173863accc867
|
[
"MIT"
] | 1
|
2020-03-06T05:57:16.000Z
|
2020-03-06T05:57:16.000Z
|
sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py
|
kushan2018/azure-sdk-for-python
|
08a9296207281f4e90e23cf7a30173863accc867
|
[
"MIT"
] | null | null | null |
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import pytest
from azure.eventhub import EventPosition, EventHubClient
@pytest.mark.liveTest
def test_iothub_receive_sync(iot_connection_str, device_id):
client = EventHubClient.from_connection_string(iot_connection_str, network_tracing=False)
receiver = client.create_consumer(consumer_group="$default", partition_id="0", event_position=EventPosition("-1"), operation='/messages/events')
try:
received = receiver.receive(timeout=10)
assert len(received) == 0
finally:
receiver.close()
@pytest.mark.liveTest
def test_iothub_get_properties_sync(iot_connection_str, device_id):
client = EventHubClient.from_connection_string(iot_connection_str, network_tracing=False)
properties = client.get_properties()
assert properties["partition_ids"] == ["0", "1", "2", "3"]
@pytest.mark.liveTest
def test_iothub_get_partition_ids_sync(iot_connection_str, device_id):
client = EventHubClient.from_connection_string(iot_connection_str, network_tracing=False)
partitions = client.get_partition_ids()
assert partitions == ["0", "1", "2", "3"]
@pytest.mark.liveTest
def test_iothub_get_partition_properties_sync(iot_connection_str, device_id):
client = EventHubClient.from_connection_string(iot_connection_str, network_tracing=False)
partition_properties = client.get_partition_properties("0")
assert partition_properties["id"] == "0"
@pytest.mark.liveTest
def test_iothub_receive_after_mgmt_ops_sync(iot_connection_str, device_id):
client = EventHubClient.from_connection_string(iot_connection_str, network_tracing=False)
partitions = client.get_partition_ids()
assert partitions == ["0", "1", "2", "3"]
receiver = client.create_consumer(consumer_group="$default", partition_id=partitions[0], event_position=EventPosition("-1"), operation='/messages/events')
with receiver:
received = receiver.receive(timeout=10)
assert len(received) == 0
@pytest.mark.liveTest
def test_iothub_mgmt_ops_after_receive_sync(iot_connection_str, device_id):
client = EventHubClient.from_connection_string(iot_connection_str, network_tracing=False)
receiver = client.create_consumer(consumer_group="$default", partition_id="0", event_position=EventPosition("-1"), operation='/messages/events')
with receiver:
received = receiver.receive(timeout=10)
assert len(received) == 0
partitions = client.get_partition_ids()
assert partitions == ["0", "1", "2", "3"]
| 42.938462
| 158
| 0.720889
| 329
| 2,791
| 5.808511
| 0.224924
| 0.081633
| 0.100471
| 0.065934
| 0.802721
| 0.802721
| 0.802721
| 0.732077
| 0.732077
| 0.675039
| 0
| 0.013447
| 0.120745
| 2,791
| 64
| 159
| 43.609375
| 0.765281
| 0.106772
| 0
| 0.636364
| 0
| 0
| 0.045418
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 1
| 0.136364
| false
| 0
| 0.045455
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5972b03afea8ae737b403e6897f2f6adc7e430a1
| 62
|
py
|
Python
|
website_multi_company_demo/models/__init__.py
|
factorlibre/website-addons
|
9a0c7a238e2b6030d57f7a08d48816b4f2431524
|
[
"MIT"
] | 1
|
2020-03-01T03:04:21.000Z
|
2020-03-01T03:04:21.000Z
|
website_multi_company_demo/models/__init__.py
|
factorlibre/website-addons
|
9a0c7a238e2b6030d57f7a08d48816b4f2431524
|
[
"MIT"
] | null | null | null |
website_multi_company_demo/models/__init__.py
|
factorlibre/website-addons
|
9a0c7a238e2b6030d57f7a08d48816b4f2431524
|
[
"MIT"
] | 3
|
2019-07-29T20:23:16.000Z
|
2021-01-07T20:51:24.000Z
|
from . import res_users
from . import product_public_category
| 20.666667
| 37
| 0.83871
| 9
| 62
| 5.444444
| 0.777778
| 0.408163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 62
| 2
| 38
| 31
| 0.907407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
59958d0c169f58f32d1e37d65515ddb248a41b5c
| 195
|
py
|
Python
|
tf_lassonet/__init__.py
|
ADeGobbis/TF_LassoNet
|
9c6cb9e0d8800ab32e35ddf5e35d20974963bcfd
|
[
"MIT"
] | null | null | null |
tf_lassonet/__init__.py
|
ADeGobbis/TF_LassoNet
|
9c6cb9e0d8800ab32e35ddf5e35d20974963bcfd
|
[
"MIT"
] | null | null | null |
tf_lassonet/__init__.py
|
ADeGobbis/TF_LassoNet
|
9c6cb9e0d8800ab32e35ddf5e35d20974963bcfd
|
[
"MIT"
] | null | null | null |
from .model import LassoNet
from .path import LassoPath
from .proximal import hier_prox_group
from .utils import feature_importance_time_series
from .graphics import feature_importance_histogram
| 32.5
| 50
| 0.871795
| 27
| 195
| 6.037037
| 0.62963
| 0.159509
| 0.282209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 195
| 5
| 51
| 39
| 0.931429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
59b85fc4921188646732ee324518cef27b73b722
| 27
|
py
|
Python
|
core/models/__init__.py
|
Latterlig96/airflow-model-trainer
|
7da36aae3036759639ae1c556f41fc70409aa444
|
[
"MIT"
] | 6
|
2021-06-10T11:53:24.000Z
|
2022-03-31T19:34:59.000Z
|
core/models/__init__.py
|
Latterlig96/airflow-model-trainer
|
7da36aae3036759639ae1c556f41fc70409aa444
|
[
"MIT"
] | 6
|
2021-03-15T11:01:27.000Z
|
2021-09-25T16:58:16.000Z
|
core/models/__init__.py
|
Latterlig96/airflow-model-trainer
|
7da36aae3036759639ae1c556f41fc70409aa444
|
[
"MIT"
] | 2
|
2021-07-29T08:05:54.000Z
|
2022-02-22T16:14:06.000Z
|
from .train import Trainer
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
59eb1237fcefd5c8a257707fff76652c06452246
| 39
|
py
|
Python
|
game_test.py
|
fbawa/RPS4
|
66589e43165d815766238dd83feca3a2db715d6f
|
[
"MIT"
] | null | null | null |
game_test.py
|
fbawa/RPS4
|
66589e43165d815766238dd83feca3a2db715d6f
|
[
"MIT"
] | null | null | null |
game_test.py
|
fbawa/RPS4
|
66589e43165d815766238dd83feca3a2db715d6f
|
[
"MIT"
] | null | null | null |
def test_example():
assert 20 > 3
| 9.75
| 19
| 0.615385
| 6
| 39
| 3.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 0.282051
| 39
| 3
| 20
| 13
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ab9fecd299a454a7857a9ff2197369134e50783d
| 148
|
py
|
Python
|
accounts/admin.py
|
powerticket/heroku
|
d214a3758efbf185e4a533f8373033ed558209dd
|
[
"MIT"
] | null | null | null |
accounts/admin.py
|
powerticket/heroku
|
d214a3758efbf185e4a533f8373033ed558209dd
|
[
"MIT"
] | 4
|
2020-10-20T15:13:32.000Z
|
2021-02-24T09:04:49.000Z
|
accounts/admin.py
|
powerticket/heroku
|
d214a3758efbf185e4a533f8373033ed558209dd
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.contrib.auth import get_user_model
# Register your models here.
admin.site.register(get_user_model())
| 24.666667
| 46
| 0.824324
| 23
| 148
| 5.130435
| 0.608696
| 0.169492
| 0.288136
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101351
| 148
| 5
| 47
| 29.6
| 0.887218
| 0.175676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
aba4fbdd43dc0c2db91d4496c972810dbafa78de
| 181
|
py
|
Python
|
tests/messaging/test_response_without_body.py
|
spaceone/httoop
|
99f5f51a6ebab4bfdfd02d3705a0bffb5379b4a9
|
[
"MIT"
] | 13
|
2015-01-07T19:39:02.000Z
|
2021-07-12T21:09:28.000Z
|
tests/messaging/test_response_without_body.py
|
spaceone/httoop
|
99f5f51a6ebab4bfdfd02d3705a0bffb5379b4a9
|
[
"MIT"
] | 9
|
2015-06-14T11:37:26.000Z
|
2020-12-11T09:12:30.000Z
|
tests/messaging/test_response_without_body.py
|
spaceone/httoop
|
99f5f51a6ebab4bfdfd02d3705a0bffb5379b4a9
|
[
"MIT"
] | 10
|
2015-05-28T05:51:46.000Z
|
2021-12-29T20:36:15.000Z
|
def test_head_request_():
pass
def test_status_smaller_100():
pass
def test_not_modified_304():
pass
def test_no_content_204():
pass
def test_reset_content_205():
pass
| 9.526316
| 30
| 0.762431
| 29
| 181
| 4.241379
| 0.551724
| 0.284553
| 0.357724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 0.154696
| 181
| 18
| 31
| 10.055556
| 0.72549
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
abbef9d2332b6916bf5ef3bc7695ffe793b99af1
| 4,417
|
py
|
Python
|
tests/test_samplers/test_tpe/test_distributions.py
|
ytsmiling/optur
|
cbc56c60b322ea764592f01758798f745199b455
|
[
"MIT"
] | 1
|
2022-01-19T09:18:15.000Z
|
2022-01-19T09:18:15.000Z
|
tests/test_samplers/test_tpe/test_distributions.py
|
ytsmiling/optur
|
cbc56c60b322ea764592f01758798f745199b455
|
[
"MIT"
] | null | null | null |
tests/test_samplers/test_tpe/test_distributions.py
|
ytsmiling/optur
|
cbc56c60b322ea764592f01758798f745199b455
|
[
"MIT"
] | null | null | null |
import random
import numpy as np
import pytest
from optur.proto.search_space_pb2 import Distribution, ParameterValue
from optur.proto.study_pb2 import Parameter, Trial
from optur.samplers.tpe import _MixturedDistribution
def int_distribution(low: int, high: int, log_scale: bool = False) -> Distribution:
return Distribution(
int_distribution=Distribution.IntDistribution(low=low, high=high, log_scale=log_scale)
)
def float_distribution(low: float, high: float, log_scale: bool = False) -> Distribution:
return Distribution(
float_distribution=Distribution.FloatDistribution(low=low, high=high, log_scale=log_scale)
)
@pytest.mark.parametrize("log_scale", [True, False])
def test_int_distribution_samples_valid_values(log_scale: bool) -> None:
dist = _MixturedDistribution(
name="foo",
distribution=int_distribution(low=1, high=100, log_scale=log_scale),
trials=[
Trial(
parameters={
"foo": Parameter(value=ParameterValue(int_value=random.randint(10, 30)))
}
)
for _ in range(97)
],
n_distribution=1,
)
active_indices = np.asarray(range(1, 97, 2))
samples = dist.sample(active_indices=active_indices)
assert samples.dtype == np.dtype("int64")
assert len(samples) == len(active_indices)
assert (1 <= samples).all() # type: ignore
assert (samples <= 100).all() # type: ignore
@pytest.mark.parametrize("log_scale", [True, False])
def test_int_distribution_calculates_valid_log_pdf(log_scale: bool) -> None:
dist = _MixturedDistribution(
name="foo",
distribution=int_distribution(low=1, high=100, log_scale=log_scale),
trials=[
Trial(
parameters={
"foo": Parameter(value=ParameterValue(int_value=random.randint(10, 30)))
}
)
for _ in range(97)
],
n_distribution=1,
)
active_indices = np.asarray(range(1, 97, 2))
samples = dist.sample(active_indices=active_indices)
log_pdf = dist.log_pdf(samples)
assert log_pdf.dtype == np.dtype("float64") # type: ignore
assert log_pdf.shape == (len(samples), 97)
assert (np.exp(log_pdf) <= 1.0).all()
assert (
np.exp(dist.log_pdf(np.random.randint(10, 30, size=100))).mean()
> np.exp(dist.log_pdf(np.random.randint(50, 80, size=100))).mean()
)
@pytest.mark.parametrize("log_scale", [True, False])
def test_float_distribution_samples_valid_values(log_scale: bool) -> None:
dist = _MixturedDistribution(
name="foo",
distribution=float_distribution(low=1, high=100, log_scale=log_scale),
trials=[
Trial(
parameters={
"foo": Parameter(
value=ParameterValue(double_value=random.random() * 20.0 + 10.0)
)
}
)
for _ in range(97)
],
n_distribution=1,
)
active_indices = np.asarray(range(1, 97, 2))
samples = dist.sample(active_indices=active_indices)
assert samples.dtype == np.dtype("float64")
assert len(samples) == len(active_indices)
assert (1.0 <= samples).all() # type: ignore
assert (samples <= 100.0).all() # type: ignore
@pytest.mark.parametrize("log_scale", [True, False])
def test_float_distribution_calculates_valid_log_pdf(log_scale: bool) -> None:
dist = _MixturedDistribution(
name="foo",
distribution=float_distribution(low=1, high=100, log_scale=log_scale),
trials=[
Trial(
parameters={
"foo": Parameter(
value=ParameterValue(double_value=random.random() * 20.0 + 10.0)
)
}
)
for _ in range(97)
],
n_distribution=1,
)
active_indices = np.asarray(range(1, 97, 2))
samples = dist.sample(active_indices=active_indices)
log_pdf = dist.log_pdf(samples)
assert log_pdf.dtype == np.dtype("float64") # type: ignore
assert log_pdf.shape == (len(samples), 97)
assert (np.exp(log_pdf) <= 1.0).all()
assert (
np.exp(dist.log_pdf(np.asarray([random.random() * 20.0 + 10.0]))).mean()
> np.exp(dist.log_pdf(np.asarray([random.random() * 20.0 + 60.0]))).mean()
)
| 35.336
| 98
| 0.612859
| 527
| 4,417
| 4.946869
| 0.1537
| 0.067511
| 0.027618
| 0.036824
| 0.850403
| 0.850403
| 0.849252
| 0.782509
| 0.71308
| 0.71308
| 0
| 0.036141
| 0.260811
| 4,417
| 124
| 99
| 35.620968
| 0.762328
| 0.017433
| 0
| 0.618182
| 0
| 0
| 0.019848
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 1
| 0.054545
| false
| 0
| 0.054545
| 0.018182
| 0.127273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e64ec539c00d675fdf62d875d19a50f1cd643618
| 49
|
py
|
Python
|
school/resolvers/__init__.py
|
iPalmTech/django-ariadne-starter
|
5930b6ca13c9d2a726d3889ce899f49fb6d5301c
|
[
"MIT"
] | null | null | null |
school/resolvers/__init__.py
|
iPalmTech/django-ariadne-starter
|
5930b6ca13c9d2a726d3889ce899f49fb6d5301c
|
[
"MIT"
] | null | null | null |
school/resolvers/__init__.py
|
iPalmTech/django-ariadne-starter
|
5930b6ca13c9d2a726d3889ce899f49fb6d5301c
|
[
"MIT"
] | null | null | null |
from .school import school_query, school_mutation
| 49
| 49
| 0.877551
| 7
| 49
| 5.857143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0509c96e4934916e642350dceed08baac99df9c3
| 3,016
|
py
|
Python
|
.virtual_documents/01_tutorial.ipynb.py
|
AtomScott/image_folder_datasets
|
935580929abc9d8ec9eeaf944a0d3c670a09d04d
|
[
"Apache-2.0"
] | null | null | null |
.virtual_documents/01_tutorial.ipynb.py
|
AtomScott/image_folder_datasets
|
935580929abc9d8ec9eeaf944a0d3c670a09d04d
|
[
"Apache-2.0"
] | null | null | null |
.virtual_documents/01_tutorial.ipynb.py
|
AtomScott/image_folder_datasets
|
935580929abc9d8ec9eeaf944a0d3c670a09d04d
|
[
"Apache-2.0"
] | null | null | null |
#hide
get_ipython().run_line_magic("load_ext", " autoreload")
get_ipython().run_line_magic("autoreload", " 2")
from image_folder_datasets.core import ImageFolderDataModule
data_dir = 'Datasets/cifar10'
dm = ImageFolderDataModule(data_dir, 128)
dm.setup()
# For ease of use, we also add a dataloader from the fastai library. This can be accessed from `dm.dls`.
# However it is not used for anything else.
dm.dls.show_batch()
import pytorch_lightning as pl
from image_folder_datasets.core import CNNModule
modelname = 'resnet50'
max_epochs = 50
logger = pl.loggers.TensorBoardLogger('tb_logs', name=modelname)
trainer = pl.Trainer(gpus=1, max_epochs=max_epochs, checkpoint_callback=False, logger=logger)
model = CNNModule(modelname, pretrained=False, freeze_extractor=False, num_classes=len(dm.trainset.classes))
trainer.fit(model, dm);
logger = pl.loggers.TensorBoardLogger('tb_logs', name=modelname+'_imagenet')
trainer = pl.Trainer(gpus=1, max_epochs=max_epochs, checkpoint_callback=False, logger=logger)
model = CNNModule(modelname, pretrained=True, freeze_extractor=True, num_classes=len(dm.trainset.classes))
trainer.fit(model, dm);
logger = pl.loggers.TensorBoardLogger('tb_logs', name=modelname+'_fractalDB')
weight_path = 'FractalDB-1000_resnet50_epoch90.pth'
trainer = pl.Trainer(gpus=1, max_epochs=max_epochs, checkpoint_callback=False, logger=logger)
model = CNNModule(modelname, pretrained=False, freeze_extractor=True, num_classes=len(dm.trainset.classes), weight_path=weight_path)
trainer.fit(model, dm);
from torchvision.transforms import ToTensor, Resize, Compose, CenterCrop, Normalize
transform = Compose([
Resize(256, interpolation=2),
CenterCrop(224),
ToTensor(),
Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
])
dm = ImageFolderDataModule(data_dir, 128, transform)
dm.setup()
logger = pl.loggers.TensorBoardLogger('tb_logs', name=modelname+'_fractalDB_imagenet_nm')
weight_path = 'FractalDB-1000_resnet50_epoch90.pth'
trainer = pl.Trainer(gpus=1, max_epochs=max_epochs, checkpoint_callback=False, logger=logger)
model = CNNModule(modelname, freeze_extractor=False, num_classes=len(dm.trainset.classes), weight_path=weight_path)
trainer.fit(model, dm);
## DO NOT FREEZE EXTRACTOR
from torchvision.transforms import ToTensor, Resize, Compose, CenterCrop, Normalize
transform = Compose([
Resize(256, interpolation=2),
CenterCrop(224),
ToTensor(),
Normalize(mean=[0.2, 0.2, 0.2],
std=[0.5, 0.5, 0.5])
])
dm = ImageFolderDataModule(data_dir, 128, transform)
dm.setup()
logger = pl.loggers.TensorBoardLogger('tb_logs', name=modelname+'_fractalDB')
weight_path = 'FractalDB-1000_resnet50_epoch90.pth'
trainer = pl.Trainer(gpus=1, max_epochs=max_epochs, checkpoint_callback=False, logger=logger)
model = CNNModule(modelname, freeze_extractor=False, num_classes=len(dm.trainset.classes), weight_path=weight_path)
trainer.fit(model, dm);
| 32.085106
| 132
| 0.75431
| 404
| 3,016
| 5.460396
| 0.272277
| 0.044878
| 0.033998
| 0.072529
| 0.840435
| 0.80553
| 0.775612
| 0.775612
| 0.752493
| 0.739801
| 0
| 0.035944
| 0.123674
| 3,016
| 93
| 133
| 32.430108
| 0.798714
| 0.057361
| 0
| 0.62963
| 0
| 0
| 0.086772
| 0.044797
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.092593
| 0
| 0.092593
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
055ded3d8ef1bd1828a1373a5580c27a8dd1ce6c
| 61
|
py
|
Python
|
prac_12/prac_12/models/__init__.py
|
moevm/db_sql_lab_examples
|
01aa4843b59bbddbea739b4c7b4db8958a2f8393
|
[
"MIT"
] | 3
|
2021-09-02T21:03:30.000Z
|
2021-10-08T13:48:04.000Z
|
prac_12/prac_12/models/__init__.py
|
moevm/db_sql_lab_examples
|
01aa4843b59bbddbea739b4c7b4db8958a2f8393
|
[
"MIT"
] | null | null | null |
prac_12/prac_12/models/__init__.py
|
moevm/db_sql_lab_examples
|
01aa4843b59bbddbea739b4c7b4db8958a2f8393
|
[
"MIT"
] | 1
|
2021-09-05T02:44:19.000Z
|
2021-09-05T02:44:19.000Z
|
from .base import *
from .book import *
from .shelf import *
| 15.25
| 20
| 0.704918
| 9
| 61
| 4.777778
| 0.555556
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196721
| 61
| 3
| 21
| 20.333333
| 0.877551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
056e381c50214efc09ae12f3be2dd204a76f6989
| 9,448
|
py
|
Python
|
megumin/modulos/admin/bans.py
|
davitudoplugins1234/WhiterKang
|
f4779d2c440849fa97e7014cd856f885b0abbc87
|
[
"MIT"
] | null | null | null |
megumin/modulos/admin/bans.py
|
davitudoplugins1234/WhiterKang
|
f4779d2c440849fa97e7014cd856f885b0abbc87
|
[
"MIT"
] | null | null | null |
megumin/modulos/admin/bans.py
|
davitudoplugins1234/WhiterKang
|
f4779d2c440849fa97e7014cd856f885b0abbc87
|
[
"MIT"
] | null | null | null |
##
#
import time
from pyrogram import filters
from pyrogram.errors import PeerIdInvalid, UserIdInvalid, UsernameInvalid
from pyrogram.types import Message
from megumin import megux
from megumin.utils import (
admin_check,
extract_time,
check_bot_rights,
check_rights,
is_admin,
is_dev,
is_self,
sed_sticker,
get_collection,
get_string,
)
@megux.on_message(filters.command("ban", prefixes=["/", "!"]))
async def _ban_user(_, message: Message):
DISABLED = get_collection(f"DISABLED {message.chat.id}")
LOGS = get_collection(f"LOGS {message.chat.id}")
query = "ban"
off = await DISABLED.find_one({"_cmd": query})
if off:
return
chat_id = message.chat.id
if not await check_rights(chat_id, message.from_user.id, "can_restrict_members"):
await message.reply(await get_string(chat_id, "NO_BAN_USER"))
return
cmd = len(message.text)
replied = message.reply_to_message
reason = ""
if replied:
id_ = replied.from_user.id
if cmd > 4:
_, reason = message.text.split(maxsplit=1)
elif cmd > 4:
_, args = message.text.split(maxsplit=1)
if " " in args:
id_, reason = args.split(" ", maxsplit=1)
else:
id_ = args
else:
await message.reply(await get_string(message.chat.id, "BANS_NOT_ESPECIFIED_USER"))
return
try:
user = await megux.get_users(id_)
user_id = user.id
mention = user.mention
except (UsernameInvalid, PeerIdInvalid, UserIdInvalid):
await message.reply(
await get_string(message.chat.id, "BANS_ID_INVALID")
)
return
if await is_self(user_id):
await message.reply(await get_string(chat_id, "BAN_MY_SELF"))
await sed_sticker(message)
return
if is_dev(user_id):
await message.reply(await get_string(chat_id, "BAN_IN_DEV"))
return
if is_admin(chat_id, user_id):
await message.reply(await get_string(chat_id, "BAN_IN_ADMIN"))
return
if not await check_rights(chat_id, megux.me.id, "can_restrict_members"):
await message.reply(await get_string(chat_id, "NO_BAN_BOT"))
await sed_sticker(message)
return
sent = await message.reply(await get_string(chat_id, "BAN_LOADING"))
try:
await megux.ban_chat_member(chat_id, user_id)
await sent.edit((await get_string(chat_id, "BAN_SUCCESS")).format(mention, message.from_user.mention(), message.chat.title, reason or None))
data = await LOGS.find_one()
if data:
id = data["log_id"]
id_log = int(id)
await megux.send_message(id_log, (await get_string(chat_id, "BAN_LOGGER")).format(message.chat.title, message.from_user.mention(), mention, user_id, reason or None))
return
except Exception as e_f:
await sent.edit(f"`Algo deu errado 🤔`\n\n**ERROR:** `{e_f}`")
@megux.on_message(filters.command("unban", prefixes=["/", "!"]))
async def _unban_user(_, message: Message):
DISABLED = get_collection(f"DISABLED {message.chat.id}")
LOGS = get_collection(f"LOGS {message.chat.id}")
query = "unban"
off = await DISABLED.find_one({"_cmd": query})
if off:
return
chat_id = message.chat.id
if not await check_rights(chat_id, message.from_user.id, "can_restrict_members"):
await message.reply("Você não tem direitos administrativos suficientes para banir/desbanir usuários!")
return
replied = message.reply_to_message
if replied:
id_ = replied.from_user.id
elif len(message.text) > 6:
_, id_ = message.text.split(maxsplit=1)
else:
await message.reply("`Nenhum User_id válido ou mensagem especificada.`")
return
try:
user_id = (await megux.get_users(id_)).id
mention = (await megux.get_users(id_)).mention
except (UsernameInvalid, PeerIdInvalid, UserIdInvalid):
await message.reply(
"`User_id ou nome de usuário inválido, tente novamente com informações válidas ⚠`"
)
return
if await is_self(user_id):
return
if is_admin(chat_id, user_id):
await message.reply("Este usuário é admin ele não precisa ser desbanido.")
return
if not await check_rights(chat_id, megux.me.id, "can_restrict_members"):
await message.reply("Eu não sou um administrador, **Por favor me promova como um administrador!**")
await sed_sticker(message)
return
sent = await message.reply("`Desbanindo Usuário...`")
try:
await megux.unban_chat_member(chat_id, user_id)
await sent.edit(await get_string(chat_id, "UNBAN_SUCCESS"))
data = await LOGS.find_one()
if data:
id = data["log_id"]
id_log = int(id)
await megux.send_message(id_log, (await get_string(chat_id, "UNBAN_LOGGER")).format(message.chat.title, message.from_user.mention(), mention, user_id))
return
except Exception as e_f:
await sent.edit(f"`Algo deu errado! 🤔`\n\n**ERROR:** `{e_f}`")
@megux.on_message(filters.command("kick", prefixes=["/", "!"]))
async def _kick_user(_, message: Message):
DISABLED = get_collection(f"DISABLED {message.chat.id}")
query = "kick"
off = await DISABLED.find_one({"_cmd": query})
if off:
return
chat_id = message.chat.id
if not await check_rights(chat_id, message.from_user.id, "can_restrict_members"):
await message.reply("Você não tem as seguintes permissões: **Can restrict members**")
return
cmd = len(message.text)
replied = message.reply_to_message
reason = ""
if replied:
id_ = replied.from_user.id
if cmd > 5:
_, reason = message.text.split(maxsplit=1)
elif cmd > 5:
_, args = message.text.split(maxsplit=1)
if " " in args:
id_, reason = args.split(" ", maxsplit=1)
else:
id_ = args
else:
await message.reply("`Nenhum user_id válido ou mensagem especificada.`")
return
try:
user = await megux.get_users(id_)
user_id = user.id
mention = user.mention
except (UsernameInvalid, PeerIdInvalid, UserIdInvalid):
await message.reply(
"`User_id ou nome de usuário inválido, tente novamente com informações válidas ⚠`"
)
return
if await is_self(user_id):
await sed_sticker(message)
return
if is_dev(user_id):
await message.reply("Porque eu iria banir meu desenvolvedor? Isso me parece uma idéia muito idiota.")
return
if is_admin(chat_id, user_id):
await message.reply("Porque eu iria kickar um(a) administrador(a)? Isso me parece uma idéia bem idiota.")
return
if not await check_rights(chat_id, megux.me.id, "can_restrict_members"):
await message.reply("Não posso restringir as pessoas aqui! Certifique-se de que sou administrador e de que posso adicionar novos administradores.")
await sed_sticker(message)
return
sent = await message.reply("`Kickando usuário...`")
try:
await megux.ban_chat_member(chat_id, user_id)
await megux.unban_chat_member(chat_id, user_id)
await sent.edit(f"Eu removi o usuário {mention}\n" f"**Motivo**: `{reason or None}`")
except Exception as e_f:
await sent.edit(f"`Algo deu errado! 🤔`\n\n**ERROR:** `{e_f}`")
@megux.on_message(filters.command("kickme", prefixes=["/", "!"]))
async def kickme_(_, message: Message):
DISABLED = get_collection(f"DISABLED {message.chat.id}")
query = "kickme"
off = await DISABLED.find_one({"_cmd": query})
if off:
return
chat_id = message.chat.id
user_id = message.from_user.id
admin_ = await admin_check(message)
if admin_:
await message.reply("`Hmmm admin...\nVocê não vai a lugar nenhum senpai.`")
return
else:
try:
if not await check_rights(chat_id, megux.me.id, "can_restrict_members"):
await message.reply("Não posso restringir as pessoas aqui! Certifique-se de que sou administrador e de que posso adicionar novos administradores.")
return
await message.reply("Ate mais, espero que tenha gostado da estadia.")
await megux.ban_chat_member(chat_id, user_id)
await megux.unban_chat_member(chat_id, user_id)
except Exception as e:
await message.reply(f"**ERRO:**\n{e}")
@megux.on_message(filters.command("banme", prefixes=["/", "!"]))
async def kickme_(_, message: Message):
DISABLED = get_collection(f"DISABLED {message.chat.id}")
query = "banme"
off = await DISABLED.find_one({"_cmd": query})
if off:
return
chat_id = message.chat.id
user_id = message.from_user.id
admin_ = await admin_check(message)
if admin_:
await message.reply("Por que eu baniria um(a) administrador(a)? Parece uma ideia bem idiota.")
return
else:
try:
if not await check_rights(chat_id, megux.me.id, "can_restrict_members"):
await message.reply("Eu não sou um(a) administrador(a)!")
return
await message.reply("Sem Problemas.")
await megux.ban_chat_member(chat_id, user_id)
except Exception as e:
await message.reply(f"**ERRO:**\n{e}")
| 38.406504
| 177
| 0.642782
| 1,268
| 9,448
| 4.597792
| 0.149842
| 0.048371
| 0.084563
| 0.0247
| 0.812007
| 0.779588
| 0.776844
| 0.772041
| 0.75163
| 0.72024
| 0
| 0.001683
| 0.245449
| 9,448
| 245
| 178
| 38.563265
| 0.815402
| 0
| 0
| 0.695652
| 0
| 0.008696
| 0.210164
| 0.002541
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026087
| 0
| 0.16087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0578b9ae2fe1c4f73fa9b804de9b366e13bd401e
| 326
|
py
|
Python
|
aiflearn/algorithms/inprocessing/__init__.py
|
gusrabbit/aif360-learn
|
b14a9b98e96dd2756faf312047e9a50ccc1559fa
|
[
"Apache-2.0"
] | null | null | null |
aiflearn/algorithms/inprocessing/__init__.py
|
gusrabbit/aif360-learn
|
b14a9b98e96dd2756faf312047e9a50ccc1559fa
|
[
"Apache-2.0"
] | null | null | null |
aiflearn/algorithms/inprocessing/__init__.py
|
gusrabbit/aif360-learn
|
b14a9b98e96dd2756faf312047e9a50ccc1559fa
|
[
"Apache-2.0"
] | null | null | null |
from aiflearn.algorithms.inprocessing.adversarial_debiasing import AdversarialDebiasing
from aiflearn.algorithms.inprocessing.art_classifier import ARTClassifier
from aiflearn.algorithms.inprocessing.prejudice_remover import PrejudiceRemover
from aiflearn.algorithms.inprocessing.meta_fair_classifier import MetaFairClassifier
| 81.5
| 87
| 0.917178
| 33
| 326
| 8.909091
| 0.515152
| 0.163265
| 0.29932
| 0.462585
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046012
| 326
| 4
| 88
| 81.5
| 0.945338
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
058dc32966147f97b6e04d1c04b166098b2f9acc
| 118
|
py
|
Python
|
src/services/queue_client.py
|
tombrereton/flask-api-starter-kit
|
2e244bfc4f5659e91fd7cd27388c37bf32baeaec
|
[
"MIT"
] | null | null | null |
src/services/queue_client.py
|
tombrereton/flask-api-starter-kit
|
2e244bfc4f5659e91fd7cd27388c37bf32baeaec
|
[
"MIT"
] | null | null | null |
src/services/queue_client.py
|
tombrereton/flask-api-starter-kit
|
2e244bfc4f5659e91fd7cd27388c37bf32baeaec
|
[
"MIT"
] | null | null | null |
from src.dtos.user import UserDto
def add_create_user_job(user: UserDto):
return f"user {user.user_name} added"
| 19.666667
| 41
| 0.762712
| 20
| 118
| 4.3
| 0.7
| 0.186047
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144068
| 118
| 5
| 42
| 23.6
| 0.851485
| 0
| 0
| 0
| 0
| 0
| 0.228814
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
555a496bf6ef787ee2e482c073694c1725d6aa95
| 21,561
|
py
|
Python
|
tests/preprocess/annotation/target_annotation_test.py
|
elifesciences/sciencebeam-gym
|
3ad654e08775e0c0cdd256753e14093bb5a42d44
|
[
"MIT"
] | 25
|
2017-07-25T12:44:55.000Z
|
2020-09-30T22:16:50.000Z
|
tests/preprocess/annotation/target_annotation_test.py
|
elifesciences/sciencebeam-gym
|
3ad654e08775e0c0cdd256753e14093bb5a42d44
|
[
"MIT"
] | 192
|
2017-11-29T08:57:03.000Z
|
2022-03-29T18:44:41.000Z
|
tests/preprocess/annotation/target_annotation_test.py
|
elifesciences/sciencebeam-gym
|
3ad654e08775e0c0cdd256753e14093bb5a42d44
|
[
"MIT"
] | 6
|
2019-02-01T18:49:33.000Z
|
2020-07-26T08:18:46.000Z
|
from __future__ import division
import json
from lxml.builder import E
from sciencebeam_gym.preprocess.annotation.target_annotation import (
strip_whitespace,
xml_root_to_target_annotations,
XmlMappingSuffix
)
TAG1 = 'tag1'
TAG2 = 'tag2'
SOME_VALUE = 'some value'
SOME_VALUE_2 = 'some value2'
SOME_LONGER_VALUE = 'some longer value1'
SOME_SHORTER_VALUE = 'value1'
class TestStripWhitespace(object):
def test_should_replace_tab_with_space(self):
assert strip_whitespace(SOME_VALUE + '\t' + SOME_VALUE_2) == SOME_VALUE + ' ' + SOME_VALUE_2
def test_should_strip_double_space(self):
assert strip_whitespace(SOME_VALUE + ' ' + SOME_VALUE_2) == SOME_VALUE + ' ' + SOME_VALUE_2
def test_should_strip_double_line_feed(self):
assert strip_whitespace(SOME_VALUE + '\n\n' +
SOME_VALUE_2) == SOME_VALUE + '\n' + SOME_VALUE_2
def test_should_replace_cr_with_line_feed(self):
assert strip_whitespace(
SOME_VALUE + '\r' + SOME_VALUE_2) == SOME_VALUE + '\n' + SOME_VALUE_2
def test_should_strip_spaces_around_line_feed(self):
assert strip_whitespace(SOME_VALUE + ' \n ' +
SOME_VALUE_2) == SOME_VALUE + '\n' + SOME_VALUE_2
def test_should_strip_multiple_lines_with_blanks(self):
assert (
strip_whitespace(SOME_VALUE + ' \n \n \n ' + SOME_VALUE_2) ==
SOME_VALUE + '\n' + SOME_VALUE_2
)
class TestXmlRootToTargetAnnotations(object):
def test_should_return_empty_target_annotations_for_empty_xml(self):
xml_root = E.article(
)
xml_mapping = {
'article': {
'title': 'title'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert target_annotations == []
def test_should_return_empty_target_annotations_for_no_matching_annotations(self):
xml_root = E.article(
E.other(SOME_VALUE)
)
xml_mapping = {
'article': {
TAG1: 'title'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert target_annotations == []
def test_should_return_matching_target_annotations(self):
xml_root = E.article(
E.title(SOME_VALUE)
)
xml_mapping = {
'article': {
TAG1: 'title'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert len(target_annotations) == 1
assert target_annotations[0].name == TAG1
assert target_annotations[0].value == SOME_VALUE
def test_should_strip_extra_space(self):
xml_root = E.article(
E.abstract(SOME_VALUE + ' ' + SOME_VALUE_2)
)
xml_mapping = {
'article': {
TAG1: 'abstract'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert len(target_annotations) == 1
assert target_annotations[0].name == TAG1
assert target_annotations[0].value == SOME_VALUE + ' ' + SOME_VALUE_2
def test_should_apply_regex_to_result(self):
xml_root = E.article(
E.title('1.1. ' + SOME_VALUE)
)
xml_mapping = {
'article': {
TAG1: 'title',
TAG1 + XmlMappingSuffix.REGEX: r'(?:\d+\.?)* ?(.*)'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert len(target_annotations) == 1
assert target_annotations[0].name == TAG1
assert target_annotations[0].value == SOME_VALUE
def test_should_apply_match_multiple_flag(self):
xml_root = E.article(
E.title(SOME_VALUE)
)
xml_mapping = {
'article': {
TAG1: 'title',
TAG1 + XmlMappingSuffix.MATCH_MULTIPLE: 'true'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [t.match_multiple for t in target_annotations] == [True]
def test_should_not_apply_match_multiple_flag_if_not_set(self):
xml_root = E.article(
E.title(SOME_VALUE)
)
xml_mapping = {
'article': {
TAG1: 'title'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [t.match_multiple for t in target_annotations] == [False]
def test_should_apply_match_bonding_flag(self):
xml_root = E.article(
E.title(SOME_VALUE)
)
xml_mapping = {
'article': {
TAG1: 'title',
TAG1 + XmlMappingSuffix.BONDING: 'true'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [t.bonding for t in target_annotations] == [True]
def test_should_not_apply_match_bonding_flag_if_not_set(self):
xml_root = E.article(
E.title(SOME_VALUE)
)
xml_mapping = {
'article': {
TAG1: 'title'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [t.bonding for t in target_annotations] == [False]
def test_should_apply_match_require_next_flag(self):
xml_root = E.article(
E.title(SOME_VALUE)
)
xml_mapping = {
'article': {
TAG1: 'title',
TAG1 + XmlMappingSuffix.REQUIRE_NEXT: 'true'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [t.require_next for t in target_annotations] == [True]
def test_should_not_apply_match_require_next_flag_if_not_set(self):
xml_root = E.article(
E.title(SOME_VALUE)
)
xml_mapping = {
'article': {
TAG1: 'title'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [t.require_next for t in target_annotations] == [False]
def test_should_use_multiple_xpaths(self):
xml_root = E.article(
E.entry(
E.child1(SOME_VALUE),
E.child2(SOME_VALUE_2)
)
)
xml_mapping = {
'article': {
TAG1: '\n{}\n{}\n'.format(
'entry/child1',
'entry/child2'
)
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, SOME_VALUE),
(TAG1, SOME_VALUE_2)
]
def test_should_apply_children_xpaths_and_sort_by_value_descending(self):
xml_root = E.article(
E.entry(
E.child1(SOME_SHORTER_VALUE),
E.child2(SOME_LONGER_VALUE)
),
E.entry(
E.child1(SOME_LONGER_VALUE)
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.CHILDREN: './/*'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, [SOME_LONGER_VALUE, SOME_SHORTER_VALUE]),
(TAG1, SOME_LONGER_VALUE)
]
def test_should_apply_children_xpaths_and_exclude_parents(self):
xml_root = E.article(
E.entry(
E.parent(
E.child2(SOME_LONGER_VALUE),
E.child1(SOME_SHORTER_VALUE)
)
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.CHILDREN: './/*'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, [SOME_LONGER_VALUE, SOME_SHORTER_VALUE])
]
def test_should_apply_children_xpaths_and_include_parent_text_between_matched_children(self):
xml_root = E.article(
E.entry(
E.parent(
E.child2(SOME_LONGER_VALUE),
SOME_VALUE,
E.child1(SOME_SHORTER_VALUE)
)
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.CHILDREN: './/*'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, [SOME_LONGER_VALUE, SOME_VALUE, SOME_SHORTER_VALUE])
]
def test_should_apply_multiple_children_xpaths_and_include_parent_text_if_enabled(self):
xml_root = E.article(
E.entry(
E.child1(SOME_SHORTER_VALUE),
SOME_LONGER_VALUE
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.CHILDREN: '\n{}\n{}\n'.format('.//*', '.'),
TAG1 + XmlMappingSuffix.UNMATCHED_PARENT_TEXT: 'true'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, [SOME_LONGER_VALUE, SOME_SHORTER_VALUE])
]
def test_should_apply_concat_children(self):
num_values = ['101', '202']
xml_root = E.article(
E.entry(
E.parent(
E.child1(SOME_VALUE),
E.fpage(num_values[0]),
E.lpage(num_values[1])
)
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.CHILDREN: './/*',
TAG1 + XmlMappingSuffix.CHILDREN_CONCAT: json.dumps([[{
'xpath': './/fpage'
}, {
'value': '-'
}, {
'xpath': './/lpage'
}]])
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, [SOME_VALUE, '-'.join(num_values)])
]
def test_should_not_apply_concat_children_if_one_node_was_not_found(self):
num_values = ['101', '202']
xml_root = E.article(
E.entry(
E.parent(
E.child1(SOME_VALUE),
E.fpage(num_values[0]),
E.lpage(num_values[1])
)
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.CHILDREN: './/*',
TAG1 + XmlMappingSuffix.CHILDREN_CONCAT: json.dumps([[{
'xpath': './/fpage'
}, {
'value': '-'
}, {
'xpath': './/unknown'
}]])
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, [SOME_VALUE, num_values[0], num_values[1]])
]
def test_should_apply_range_children(self):
num_values = [101, 102, 103, 104, 105, 106, 107]
xml_root = E.article(
E.entry(
E.child1(SOME_VALUE),
E.fpage(str(min(num_values))),
E.lpage(str(max(num_values)))
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.CHILDREN: 'fpage|lpage',
TAG1 + XmlMappingSuffix.CHILDREN_RANGE: json.dumps([{
'min': {
'xpath': 'fpage'
},
'max': {
'xpath': 'lpage'
}
}])
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, [str(x) for x in num_values])
]
def test_should_apply_range_children_as_separate_target_annotations(self):
num_values = [101, 102, 103, 104, 105, 106, 107]
xml_root = E.article(
E.entry(
E.child1(SOME_VALUE),
E.fpage(str(min(num_values))),
E.lpage(str(max(num_values)))
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.CHILDREN: 'fpage|lpage',
TAG1 + XmlMappingSuffix.CHILDREN_RANGE: json.dumps([{
'min': {
'xpath': 'fpage'
},
'max': {
'xpath': 'lpage'
},
'standalone': True
}])
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, str(x))
for x in num_values
]
def test_should_not_apply_range_children_if_xpath_not_matching(self):
num_values = [101, 102, 103, 104, 105, 106, 107]
fpage = str(min(num_values))
lpage = str(max(num_values))
xml_root = E.article(
E.entry(
E.child1(SOME_VALUE),
E.fpage(fpage),
E.lpage(lpage)
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.CHILDREN: 'fpage|unknown',
TAG1 + XmlMappingSuffix.CHILDREN_RANGE: json.dumps([{
'min': {
'xpath': 'fpage'
},
'max': {
'xpath': 'unknown'
}
}])
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, fpage)
]
def test_should_not_apply_range_children_if_value_is_not_integer(self):
fpage = 'abc'
lpage = 'xyz'
xml_root = E.article(
E.entry(
E.child1(SOME_VALUE),
E.fpage(fpage),
E.lpage(lpage)
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.CHILDREN: 'fpage|lpage',
TAG1 + XmlMappingSuffix.CHILDREN_RANGE: json.dumps([{
'min': {
'xpath': 'fpage'
},
'max': {
'xpath': 'lpage'
}
}])
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, [fpage, lpage])
]
def test_should_add_sub_annotations(self):
xml_root = E.article(
E.entry(
E.firstname(SOME_VALUE),
E.givennames(SOME_VALUE_2)
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.SUB + '.firstname': './firstname',
TAG1 + XmlMappingSuffix.SUB + '.givennames': './givennames',
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations[0].sub_annotations] == [
('firstname', SOME_VALUE),
('givennames', SOME_VALUE_2)
]
def test_should_add_sub_annotations_with_multiple_values(self):
xml_root = E.article(
E.entry(
E.value(SOME_VALUE),
E.value(SOME_VALUE_2)
)
)
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.SUB + '.value': './value'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(t.name, t.value) for t in target_annotations[0].sub_annotations] == [
('value', SOME_VALUE),
('value', SOME_VALUE_2)
]
def test_should_extract_numbers_from_value_after_text(self):
xml_root = E.article(E.entry(
E.value(SOME_VALUE + ' 12345')
))
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.EXTRACT_REGEX: r'.*\b(\d+)\b.*'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert len(target_annotations) == 1
assert [(t.name, set(t.value)) for t in target_annotations] == [
(TAG1, {SOME_VALUE + ' 12345', SOME_VALUE, '12345'})
]
def test_should_extract_single_value_if_its_the_only_value(self):
xml_root = E.article(E.entry(
E.value('12345')
))
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.EXTRACT_REGEX: r'.*\b(\d+)\b.*'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert len(target_annotations) == 1
assert [(t.name, t.value) for t in target_annotations] == [
(TAG1, '12345')
]
def test_should_unnest_extract_value_from_children(self):
xml_root = E.article(E.entry(
E.value(SOME_VALUE + ' 12345'),
E.value(SOME_VALUE_2 + ' 54321')
))
xml_mapping = {
'article': {
TAG1: 'entry',
TAG1 + XmlMappingSuffix.CHILDREN: r'.//*',
TAG1 + XmlMappingSuffix.EXTRACT_REGEX: r'.*\b(\d+)\b.*'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert len(target_annotations) == 1
assert [(t.name, set(t.value)) for t in target_annotations] == [
(TAG1, {
SOME_VALUE + ' 12345', SOME_VALUE, '12345',
SOME_VALUE_2 + ' 54321', SOME_VALUE_2, '54321'
})
]
def test_should_extract_numbers_from_sub_value_after_text(self):
xml_root = E.article(E.entry(
E.value(SOME_VALUE + ' 12345')
))
sub_key = TAG1 + XmlMappingSuffix.SUB + '.value'
xml_mapping = {
'article': {
TAG1: 'entry',
sub_key: './value',
sub_key + XmlMappingSuffix.EXTRACT_REGEX: r'.*\b(\d+)\b.*'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert len(target_annotations) == 1
assert [(t.name, set(t.value)) for t in target_annotations[0].sub_annotations] == [
('value', {SOME_VALUE + ' 12345', SOME_VALUE, '12345'})
]
def test_should_return_full_text(self):
xml_root = E.article(
E.title(
'some ',
E.other('embedded'),
' text'
)
)
xml_mapping = {
'article': {
TAG1: 'title'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert len(target_annotations) == 1
assert target_annotations[0].name == TAG1
assert target_annotations[0].value == 'some embedded text'
def test_should_return_target_annotations_in_order_of_xml(self):
xml_root = E.article(
E.tag1('tag1.1'), E.tag2('tag2.1'), E.tag1('tag1.2'), E.tag2('tag2.2'),
)
xml_mapping = {
'article': {
TAG1: 'tag1',
TAG2: 'tag2'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(ta.name, ta.value) for ta in target_annotations] == [
(TAG1, 'tag1.1'), (TAG2, 'tag2.1'), (TAG1, 'tag1.2'), (TAG2, 'tag2.2')
]
def test_should_return_target_annotations_in_order_of_priority_first(self):
xml_root = E.article(
E.tag1('tag1.1'), E.tag2('tag2.1'), E.tag1('tag1.2'), E.tag2('tag2.2'),
)
xml_mapping = {
'article': {
TAG1: 'tag1',
TAG2: 'tag2',
TAG2 + XmlMappingSuffix.PRIORITY: '1'
}
}
target_annotations = xml_root_to_target_annotations(xml_root, xml_mapping)
assert [(ta.name, ta.value) for ta in target_annotations] == [
(TAG2, 'tag2.1'), (TAG2, 'tag2.2'), (TAG1, 'tag1.1'), (TAG1, 'tag1.2')
]
| 34.38756
| 100
| 0.522286
| 2,210
| 21,561
| 4.742986
| 0.072398
| 0.181645
| 0.118298
| 0.141958
| 0.871494
| 0.858901
| 0.825033
| 0.796508
| 0.751288
| 0.719424
| 0
| 0.026248
| 0.370948
| 21,561
| 626
| 101
| 34.442492
| 0.74659
| 0
| 0
| 0.51463
| 0
| 0
| 0.055471
| 0
| 0
| 0
| 0
| 0
| 0.084337
| 1
| 0.063683
| false
| 0
| 0.006885
| 0
| 0.07401
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5569a9dbe5eb584498045708ee8aded31882fd20
| 198
|
py
|
Python
|
polls/views.py
|
cs-fullstack-fall-2018/django-intro1-bachmanryan
|
c7a94ebd132850212ab63c37d0516c0d398372c8
|
[
"Apache-2.0"
] | null | null | null |
polls/views.py
|
cs-fullstack-fall-2018/django-intro1-bachmanryan
|
c7a94ebd132850212ab63c37d0516c0d398372c8
|
[
"Apache-2.0"
] | null | null | null |
polls/views.py
|
cs-fullstack-fall-2018/django-intro1-bachmanryan
|
c7a94ebd132850212ab63c37d0516c0d398372c8
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(request):
return HttpResponse("This is a invalid response use a different route")
| 22
| 75
| 0.782828
| 28
| 198
| 5.535714
| 0.821429
| 0.129032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161616
| 198
| 9
| 75
| 22
| 0.933735
| 0.116162
| 0
| 0
| 0
| 0
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
559b5ea5d75a1f1a5eaa4eef6bbe0c5068fedbc1
| 32
|
py
|
Python
|
nes/nasbench201/__init__.py
|
automl/nes
|
1c54786c30acd6e19eb9708204bffc86b58ea272
|
[
"Apache-2.0"
] | 26
|
2020-06-22T16:07:54.000Z
|
2022-03-23T08:12:05.000Z
|
nes/nasbench201/__init__.py
|
automl/nes
|
1c54786c30acd6e19eb9708204bffc86b58ea272
|
[
"Apache-2.0"
] | 2
|
2020-07-13T06:23:18.000Z
|
2022-03-31T07:30:18.000Z
|
nes/nasbench201/__init__.py
|
automl/nes
|
1c54786c30acd6e19eb9708204bffc86b58ea272
|
[
"Apache-2.0"
] | 4
|
2020-07-06T01:55:16.000Z
|
2021-08-02T00:00:14.000Z
|
from .worker import NB201Worker
| 16
| 31
| 0.84375
| 4
| 32
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 0.125
| 32
| 1
| 32
| 32
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e956a1ae5c81b96ab68bc33e2d9e7c067080e55d
| 199
|
py
|
Python
|
base/classroom/admin.py
|
Aliemeka/classdodo
|
21759edf134a24eb881078d910bbdcad36548707
|
[
"MIT"
] | 2
|
2020-02-08T14:30:22.000Z
|
2021-01-30T02:06:47.000Z
|
base/classroom/admin.py
|
Aliemeka/classdodo
|
21759edf134a24eb881078d910bbdcad36548707
|
[
"MIT"
] | 7
|
2021-03-30T12:33:47.000Z
|
2022-02-28T04:03:54.000Z
|
base/classroom/admin.py
|
Aliemeka/classdodo
|
21759edf134a24eb881078d910bbdcad36548707
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Subject, Test, Question, Choice
admin.site.register(Subject)
admin.site.register(Test)
admin.site.register(Question)
admin.site.register(Choice)
| 24.875
| 51
| 0.81407
| 28
| 199
| 5.785714
| 0.428571
| 0.222222
| 0.419753
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080402
| 199
| 7
| 52
| 28.428571
| 0.885246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
75c2c39324bdc890ec07159d39c9e34172049cc6
| 202
|
py
|
Python
|
website/core/admin.py
|
ddxai/personalpage
|
7ce69a8ffcc9127933a412cbfad3fa95935b17c5
|
[
"MIT"
] | null | null | null |
website/core/admin.py
|
ddxai/personalpage
|
7ce69a8ffcc9127933a412cbfad3fa95935b17c5
|
[
"MIT"
] | null | null | null |
website/core/admin.py
|
ddxai/personalpage
|
7ce69a8ffcc9127933a412cbfad3fa95935b17c5
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Category, Picture, About, Social
admin.site.register(Category)
admin.site.register(Picture)
admin.site.register(About)
admin.site.register(Social)
| 22.444444
| 52
| 0.811881
| 28
| 202
| 5.857143
| 0.428571
| 0.219512
| 0.414634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084158
| 202
| 8
| 53
| 25.25
| 0.886486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
f940e92050cae9d003cd5b1dfc6d3d0b6be7dcdf
| 48
|
py
|
Python
|
lj/test/test_range.py
|
shliujing/qn-python-sdk
|
6a659b4197b7847b604b42cb223850977bcc86dc
|
[
"MIT"
] | null | null | null |
lj/test/test_range.py
|
shliujing/qn-python-sdk
|
6a659b4197b7847b604b42cb223850977bcc86dc
|
[
"MIT"
] | null | null | null |
lj/test/test_range.py
|
shliujing/qn-python-sdk
|
6a659b4197b7847b604b42cb223850977bcc86dc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
print(range(0, 30, 5))
| 12
| 23
| 0.5
| 8
| 48
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 0.1875
| 48
| 3
| 24
| 16
| 0.487179
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
f988ce90fad933ceb95eca71c9dfc62343163f0a
| 92
|
py
|
Python
|
threshold_crypto/__init__.py
|
tompetersen/threshold-crypto
|
bd51be2aacd65cf877d025f229cc96baaf5ff2c1
|
[
"MIT"
] | 15
|
2018-11-02T16:21:28.000Z
|
2022-03-21T05:01:08.000Z
|
threshold_crypto/__init__.py
|
tompetersen/threshold-crypto
|
bd51be2aacd65cf877d025f229cc96baaf5ff2c1
|
[
"MIT"
] | null | null | null |
threshold_crypto/__init__.py
|
tompetersen/threshold-crypto
|
bd51be2aacd65cf877d025f229cc96baaf5ff2c1
|
[
"MIT"
] | 2
|
2019-09-03T13:30:26.000Z
|
2021-10-08T03:56:25.000Z
|
from .participant import *
from .central import *
from .data import *
from .number import *
| 18.4
| 26
| 0.73913
| 12
| 92
| 5.666667
| 0.5
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 92
| 4
| 27
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f9d11bba9d8ae3f5d4e7b9de7e1e15b1cf379752
| 36
|
py
|
Python
|
gaiadet/models/necks/__init__.py
|
zengming16/GAIA-det
|
cac6b5601d63aeaa3882cea2256dcb2539fecb34
|
[
"Apache-2.0"
] | 149
|
2021-06-21T06:18:16.000Z
|
2022-03-23T08:55:23.000Z
|
gaiadet/models/necks/__init__.py
|
zengming16/GAIA-det
|
cac6b5601d63aeaa3882cea2256dcb2539fecb34
|
[
"Apache-2.0"
] | 7
|
2021-07-11T07:52:58.000Z
|
2022-03-30T11:41:39.000Z
|
gaiadet/models/necks/__init__.py
|
zengming16/GAIA-det
|
cac6b5601d63aeaa3882cea2256dcb2539fecb34
|
[
"Apache-2.0"
] | 13
|
2021-06-29T06:06:13.000Z
|
2022-02-28T01:31:17.000Z
|
from .dynamic_fpn import DynamicFPN
| 18
| 35
| 0.861111
| 5
| 36
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ddb3a72ad6ee69576fd20757accfb34c1fb088e7
| 45
|
py
|
Python
|
tomoxtal/__init__.py
|
apeck12/tomoxtal
|
d2b3407708da2a35ecf061fb62ba397d837b980c
|
[
"MIT"
] | null | null | null |
tomoxtal/__init__.py
|
apeck12/tomoxtal
|
d2b3407708da2a35ecf061fb62ba397d837b980c
|
[
"MIT"
] | null | null | null |
tomoxtal/__init__.py
|
apeck12/tomoxtal
|
d2b3407708da2a35ecf061fb62ba397d837b980c
|
[
"MIT"
] | 1
|
2021-11-22T18:30:30.000Z
|
2021-11-22T18:30:30.000Z
|
from .pipeline import *
from .utils import *
| 15
| 23
| 0.733333
| 6
| 45
| 5.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 45
| 2
| 24
| 22.5
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ddccbbf0c2e3c673b8a5615579250d3e4856099d
| 95
|
py
|
Python
|
sscls/modeling/__init__.py
|
poodarchu/sscls
|
8b1bd94b1ef4f0cef3ec6ecbb48be9dab129687b
|
[
"MIT"
] | 2
|
2020-04-26T13:41:24.000Z
|
2020-05-06T10:15:06.000Z
|
sscls/modeling/__init__.py
|
poodarchu/sscls
|
8b1bd94b1ef4f0cef3ec6ecbb48be9dab129687b
|
[
"MIT"
] | null | null | null |
sscls/modeling/__init__.py
|
poodarchu/sscls
|
8b1bd94b1ef4f0cef3ec6ecbb48be9dab129687b
|
[
"MIT"
] | null | null | null |
from .builder import build_model, register_model
__all__ = ["build_model", "register_model"]
| 19
| 48
| 0.778947
| 12
| 95
| 5.5
| 0.583333
| 0.30303
| 0.545455
| 0.69697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115789
| 95
| 4
| 49
| 23.75
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
ddea613f32c5b54db70269bb078759d5f73c99d7
| 20,291
|
py
|
Python
|
tests/tests.py
|
Amper/opyum
|
daa2320eb4e70f6c535a589b71bb9db4868aedfc
|
[
"BSD-3-Clause"
] | 2
|
2016-01-24T16:48:02.000Z
|
2016-02-02T04:31:02.000Z
|
tests/tests.py
|
Amper/opyum
|
daa2320eb4e70f6c535a589b71bb9db4868aedfc
|
[
"BSD-3-Clause"
] | null | null | null |
tests/tests.py
|
Amper/opyum
|
daa2320eb4e70f6c535a589b71bb9db4868aedfc
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
from opyum import *
from timeit import timeit
from itertools import repeat
class BaseTestCase(unittest.TestCase):
@staticmethod
def optimize(source, optimization):
return get_source\
( value = source
, optimized = True
, optimizations = [all_optimizations[optimization]]
)
class TestResults(BaseTestCase):
def setUp(self):
self.src_before = ()
self.optimizations = ()
self.src_check = ()
def tearDown(self):
if (self.src_before
and self.optimizations
and self.src_check != ()
):
if not isinstance(self.src_before, (list, tuple)):
self.src_before = (self.src_before, )
if not isinstance(self.src_check, (list, tuple)):
self.src_check = repeat(self.src_check, times=len(self.src_before))
if not isinstance(self.optimizations, (list, tuple)):
self.optimizations = (self.optimizations, )
for src_before, src_check in zip(self.src_before, self.src_check):
src_after = src_before
for optimization in self.optimizations:
src_after = self.optimize(src_after, optimization)
self.assertEqual(src_check, src_after)
else:
self.assertTrue(False, msg='Not specified all the necessary parameters')
self.src_before = ()
self.optimizations = ()
self.src_check = ()
def test_mult_to_sum_0(self):
self.optimizations = 'MultToSum'
self.src_before = ( 'y = (x * 0)'
, 'y = (x * 0.0)'
, 'y = (0 * x)'
, 'y = (0.0 * x)'
)
self.src_check = 'y = 0'
def test_mult_to_sum_1(self):
self.optimizations = 'MultToSum'
self.src_before = ( 'y = (x * 1)'
, 'y = (x * 1.0)'
, 'y = (1 * x)'
, 'y = (1.0 * x)'
)
self.src_check = 'y = x'
def test_mult_to_sum_2(self):
self.optimizations = 'MultToSum'
self.src_before = ( 'y = (x * 2)'
, 'y = (x * 2.0)'
, 'y = (2 * x)'
, 'y = (2.0 * x)'
)
self.src_check = 'y = (x + x)'
def test_mult_to_sum_3(self):
self.optimizations = 'MultToSum'
self.src_before = ( 'y = (x * 3)'
, 'y = (3 * x)'
)
self.src_check = self.src_before
def test_pow_to_mult_0(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 0))'
, 'y = (x ** (- 0.0))'
, 'y = (x ** 0)'
, 'y = (x ** 0.0)'
)
self.src_check = 'y = 1'
def test_pow_to_mult_0_5(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 0.5))'
, 'y = (x ** 0.5)'
)
self.src_check = self.src_before
def test_pow_to_mult_1(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 1))'
, 'y = (x ** (- 1.0))'
, 'y = (x ** 1)'
, 'y = (x ** 1.0)'
, 'y = (x ** (+ 1))'
, 'y = (x ** (+ 1.0))'
)
self.src_check = ( ['y = (1 / x)'] * 2
+ ['y = x'] * 4
)
def test_pow_to_mult_1_5(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 1.5))'
, 'y = (x ** 1.5)'
)
self.src_check = self.src_before
def test_pow_to_mult_2(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 2))'
, 'y = (x ** (- 2.0))'
, 'y = (x ** 2)'
, 'y = (x ** 2.0)'
, 'y = (x ** (+ 2))'
, 'y = (x ** (+ 2.0))'
)
self.src_check = ( ['y = (1 / (x * x))'] * 2
+ ['y = (x * x)'] * 4
)
def test_pow_to_mult_2_5(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 2.5))'
, 'y = (x ** 2.5)'
)
self.src_check = self.src_before
def test_pow_to_mult_3(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 3))'
, 'y = (x ** (- 3.0))'
, 'y = (x ** 3)'
, 'y = (x ** 3.0)'
, 'y = (x ** (+ 3))'
, 'y = (x ** (+ 3.0))'
)
self.src_check = ( ['y = (1 / ((x * x) * x))'] * 2
+ ['y = ((x * x) * x)'] * 4
)
def test_pow_to_mult_3_5(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 3.5))'
, 'y = (x ** 3.5)'
)
self.src_check = self.src_before
def test_pow_to_mult_4(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 4))'
, 'y = (x ** (- 4.0))'
, 'y = (x ** 4)'
, 'y = (x ** 4.0)'
)
self.src_check = self.src_before
def test_yield_to_yield_from_1(self):
self.optimizations = 'YieldToYieldFrom'
self.src_before = 'for y in range(x): yield y'
self.src_check = 'yield from range(x)'
def test_yield_to_yield_from_2(self):
self.optimizations = 'YieldToYieldFrom'
self.src_before = 'for x in range(10):\n yield (x + 1)'
self.src_check = self.src_before
def test_format_positions_1(self):
self.optimizations = 'FormatPositions'
self.src_before = "'{}'.format(*x)"
self.src_check = "'{0}'.format(*x)"
def test_format_positions_2(self):
self.optimizations = 'FormatPositions'
self.src_before = "'_{}_{}_'.format(*x)"
self.src_check = "'_{0}_{1}_'.format(*x)"
def test_format_positions_3(self):
self.optimizations = 'FormatPositions'
self.src_before = "'{}{}{}'.format(*x)"
self.src_check = "'{0}{1}{2}'.format(*x)"
def test_format_positions_4(self):
self.optimizations = 'FormatPositions'
self.src_before = "'{0}{}'.format(*args)"
self.src_check = self.src_before
def test_format_positions_5(self):
self.optimizations = 'FormatPositions'
self.src_before = "'{}{1}'.format(*args)"
self.src_check = self.src_before
def test_constant_folding_1(self):
self.optimizations = 'ConstantFolding'
self.src_before = 'x += ((10 + 5 * 4 - 2) * 2 - 14)'
self.src_check = 'x += 42'
def test_constant_folding_2(self):
self.optimizations = 'ConstantFolding'
self.src_before = 'x += (((10 + 10) + (10 + 10)) + (10 + (10 + 10)) + ((10 + 10) + 10))'
self.src_check = 'x += 100'
def test_constant_folding_3(self):
self.optimizations = 'ConstantFolding'
self.src_before = 'x = [(i + 1) for i in range(0, 20, 2) if ((i % 3) != 0)]'
self.src_check = 'x = [3, 5, 9, 11, 15, 17]'
def test_constant_folding_4(self):
self.optimizations = 'ConstantFolding'
self.src_before = ( 'x = 7 * 24 * 60 * 60'
, 'y = [(i ** 2) for i in range(10) if ((i % 2) == 0)]'
, 'z = sum(range(1000))'
)
self.src_check = ( 'x = 604800'
, 'y = [0, 4, 16, 36, 64]'
, 'z = 499500'
)
def test_builtin_const_propagation_and_folding_1(self):
self.optimizations = ('BuiltinConstantPropagation', 'ConstantFolding')
self.src_before = 'from math import pi\ny = sum(map((lambda r: (2 * pi * r)), range(x)))'
self.src_check = 'from math import pi\ny = sum(map((lambda r: (6.283185307179586 * r)), range(x)))'
def test_dead_code_elimination_1(self):
self.optimizations = 'DeadCodeElimination'
self.src_before = '\n'.join( ( 'if condition:'
, ' do_something()'
, 'else:'
, ' pass'
) )
self.src_check = '\n'.join( ( 'if condition:'
, ' do_something()'
) )
def test_dead_code_elimination_2(self):
self.optimizations = 'DeadCodeElimination'
self.src_before = '\n'.join( ( 'if condition:'
, ' pass'
, 'else:'
, ' do_something()'
) )
self.src_check = '\n'.join( ( 'if (not condition):'
, ' do_something()'
) )
def test_dead_code_elimination_3(self):
self.optimizations = 'DeadCodeElimination'
self.src_before = '\n'.join( ( 'if condition1:'
, ' pass'
, 'elif condition2:'
, ' pass'
, 'else:'
, ' pass'
, 'do_something()'
) )
self.src_check = 'do_something()'
def test_dead_code_elimination_4(self):
self.optimizations = 'DeadCodeElimination'
self.src_before = '\n'.join( ( 'if condition1:'
, ' pass'
, 'elif condition2:'
, ' do_something1()'
, 'else:'
, ' do_something2()'
) )
self.src_check = '\n'.join( ( 'if ((not condition1) and condition2):'
, ' do_something1()'
, 'else:'
, ' do_something2()'
) )
def test_dead_code_elimination_5(self):
self.optimizations = 'DeadCodeElimination'
self.src_before = '\n'.join( ( 'if condition1:'
, ' pass'
, 'elif condition2:'
, ' do_something()'
, 'else:'
, ' pass'
) )
self.src_check = '\n'.join( ( 'if ((not condition1) and condition2):'
, ' do_something()'
) )
def test_dead_code_elimination_6(self):
self.optimizations = 'DeadCodeElimination'
self.src_before = '\n'.join( ( 'if condition1:'
, ' pass'
, 'elif condition2:'
, ' pass'
, 'else:'
, ' do_something()'
) )
self.src_check = '\n'.join( ( 'if ((not condition1) and (not condition2)):'
, ' do_something()'
) )
def test_dead_code_elimination_7(self):
self.optimizations = 'DeadCodeElimination'
self.src_before = '\n'.join( ( 'def test(x):'
, ' return (x + 1)'
, ' x = (x - 1)'
, 'y = test(5)'
) )
self.src_check = '\n'.join( ( 'def test(x):'
, ' return (x + 1)'
, 'y = test(5)'
) )
def test_dead_code_elimination_8(self):
self.optimizations = 'DeadCodeElimination'
self.src_before = '\n'.join( ( "def test(x):"
, ""
, " def test2(f):"
, " return f((x + 1))"
, " x = (x - 1)"
, " return test2"
, " print(x)"
, "y = test(5)(func)"
) )
self.src_check = '\n'.join( ( "def test(x):"
, ""
, " def test2(f):"
, " return f((x + 1))"
, " return test2"
, "y = test(5)(func)"
) )
class Benchmarks(BaseTestCase):
def setUp(self):
self.time_before = 0
self.time_after = 0
self.src_before = ()
self.optimizations = ()
self.set_up = None
def tearDown(self):
if self.src_before and self.optimizations:
if not isinstance(self.src_before, (list, tuple)):
self.src_before = (self.src_before, )
if not isinstance(self.optimizations, (list, tuple)):
self.optimizations = (self.optimizations, )
for src_before in self.src_before:
src_after = src_before
for optimization in self.optimizations:
src_after = self.optimize(src_after, optimization)
self.time_before += timeit(src_before, setup=self.set_up)
self.time_after += timeit(src_after , setup=self.set_up)
self.assertGreaterEqual(self.time_before, self.time_after)
self.time_before = 0
self.time_after = 0
self.src_before = ()
self.optimizations = ()
self.set_up = None
def test_mult_to_sum_0(self):
self.optimizations = 'MultToSum'
self.src_before = ( 'y = (x * 0)'
, 'y = (x * 0.0)'
, 'y = (0 * x)'
, 'y = (0.0 * x)'
)
self.set_up = "x = 10000"
def test_mult_to_sum_1(self):
self.optimizations = 'MultToSum'
self.src_before = ( 'y = (x * 1)'
, 'y = (x * 1.0)'
, 'y = (1 * x)'
, 'y = (1.0 * x)'
)
self.set_up = "x = 10000"
def test_mult_to_sum_2(self):
self.optimizations = 'MultToSum'
self.src_before = ( 'y = (x * 2)'
, 'y = (x * 2.0)'
, 'y = (2 * x)'
, 'y = (2.0 * x)'
)
self.set_up = "x = 10000"
def test_pow_to_mult_0(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 0))'
, 'y = (x ** (- 0.0))'
, 'y = (x ** 0)'
, 'y = (x ** 0.0)'
)
self.set_up = "x = 10000"
def test_pow_to_mult_1(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 1))'
, 'y = (x ** (- 1.0))'
, 'y = (x ** 1)'
, 'y = (x ** 1.0)'
, 'y = (x ** (+ 1))'
, 'y = (x ** (+ 1.0))'
)
self.set_up = "x = 10000"
def test_pow_to_mult_2(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 2))'
, 'y = (x ** (- 2.0))'
, 'y = (x ** 2)'
, 'y = (x ** 2.0)'
, 'y = (x ** (+ 2))'
, 'y = (x ** (+ 2.0))'
)
self.set_up = "x = 10000"
def test_pow_to_mult_3(self):
self.optimizations = 'PowToMult'
self.src_before = ( 'y = (x ** (- 3))'
, 'y = (x ** (- 3.0))'
, 'y = (x ** 3)'
, 'y = (x ** 3.0)'
, 'y = (x ** (+ 3))'
, 'y = (x ** (+ 3.0))'
)
self.set_up = "x = 10000"
def test_yield_to_yield_from_1(self):
self.optimizations = 'YieldToYieldFrom'
self.src_before = 'def test(x):\n for y in range(x): yield y\nr = sum(test(x))'
self.set_up = "x = 10"
#def test_format_positions_1(self):
# self.optimizations = 'FormatPositions'
# self.src_before = "'{}'.format(*x)"
# self.set_up = "x = [10000]"
#def test_format_positions_2(self):
# self.optimizations = 'FormatPositions'
# self.src_before = "'_{}_{}_'.format(*x)"
# self.set_up = "x = [10000, 100000]"
#def test_format_positions_3(self):
# self.optimizations = 'FormatPositions'
# self.src_before = "'{}{}{}'.format(*x)"
# self.set_up = "x = [10000, 100000, 1000000]"
def test_constant_folding_1(self):
self.optimizations = 'ConstantFolding'
self.src_before = 'x += ((10 + 5 * 4 - 2) * 2 - 14)'
self.set_up = 'x = 0'
def test_constant_folding_2(self):
self.optimizations = 'ConstantFolding'
self.src_before = 'x += (((10 + 10) + (10 + 10)) + (10 + (10 + 10)) + ((10 + 10) + 10))'
self.set_up = 'x = 0'
def test_constant_folding_3(self):
self.optimizations = 'ConstantFolding'
self.src_before = 'x += sum([(i + 1) for i in range(0, 20, 2) if ((i % 3) != 0)])'
self.set_up = 'x = 0'
def test_constant_folding_4(self):
self.optimizations = 'ConstantFolding'
self.src_before = ( 'x = 7 * 24 * 60 * 60'
, 'y = [(i ** 2) for i in range(10) if ((i % 2) == 0)]'
, 'z = sum(range(1000))'
)
self.set_up = ''
def test_builtin_const_propagation_and_folding_1(self):
self.optimizations = ('BuiltinConstantPropagation', 'ConstantFolding')
self.src_before = 'from math import pi\ny = sum(map((lambda r: (2 * pi * r)), range(x)))'
self.set_up = 'x = 10'
if __name__ == '__main__':
unittest.main(verbosity = 2)
| 41.494888
| 111
| 0.378345
| 1,869
| 20,291
| 3.904227
| 0.074906
| 0.108401
| 0.130053
| 0.038372
| 0.885706
| 0.852679
| 0.825134
| 0.793477
| 0.741264
| 0.70851
| 0
| 0.045371
| 0.491647
| 20,291
| 488
| 112
| 41.579918
| 0.662046
| 0.02474
| 0
| 0.669903
| 0
| 0.024272
| 0.211367
| 0.006978
| 0
| 0
| 0
| 0
| 0.007282
| 1
| 0.123786
| false
| 0.024272
| 0.01699
| 0.002427
| 0.150485
| 0.002427
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ddf4032fd0f4b42c46a9ece0e7a0563309202eb8
| 3,143
|
py
|
Python
|
sheets api test.py
|
jacblo/tests-and-early-projects
|
16ca33498fe336b089e24981e148ad81e57adb13
|
[
"CC0-1.0"
] | null | null | null |
sheets api test.py
|
jacblo/tests-and-early-projects
|
16ca33498fe336b089e24981e148ad81e57adb13
|
[
"CC0-1.0"
] | null | null | null |
sheets api test.py
|
jacblo/tests-and-early-projects
|
16ca33498fe336b089e24981e148ad81e57adb13
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 10 22:02:19 2020
@author: y4
"""
import getpass
import gspread
from oauth2client.service_account import ServiceAccountCredentials
scope = ['https://www.googleapis.com/auth/spreadsheets']
credentials = ServiceAccountCredentials.from_json_keyfile_dict({
"type": "service_account",
"project_id": "whatsapp-test-spam",
"private_key_id": "3138a3ea03a4a1b66a6d367c53dd693976fe2df2",
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCkxqbKEU5Thlfr\n4u+IKvWPb2ddTxLoeH/5xQ1J/IFr5UTbUwR5GUzwBxRVLpK9sQj2XgpzyXaqXy0q\nuWal2A2x8eJZcUbgXVEvYOTq+ml5ffyOHsOfTsjyu3qP36sbyzxlQ3Ho4GFiLMoE\nEdDqF9NJlzW6pLYF5gE+IFUamn5pNzUQDQk205U2hgvx83zs9oTtWnK+7Dirbw9L\nciq2+EtMOUUQPBXyV+vJpjdKwFrE0oMAxI+GMh/iF35PDKYblZeY1OSGTP65AWgd\nxUEWCh19HiY/UOdJKch35k+ntetOjZkPzcw98lvIQSggruS0Znef8XPqsPKwG9V8\nIhgG++GzAgMBAAECggEARLjJNTt0hGdiYfIa3pq0Iadf382r4CLplP03JqVWQO61\nAhgkpHEF4pHBTCmJb+3XBBGCoHnksPfS+Z+rjP2H8LAmLBGPcuHYiz8JGmtn9BC0\ndX2lLtsH+hxw6HJrhcMEpGM1rd9vHif59SqNDCT1rRqQgRBTDjC4UfXgKKFImY6O\nY5uIhv6FhnGFYqOhwCHRqgTS04W1kq5Hr1R9He1M36XD14Mh75P5xRNlYuTgObcw\nbQHCjxqZBx4Rpe9H0GUjvU489q65jlQ4nQvsZXQNCwT8bIMNT5nfzIwAn3OMghtk\nxsmc8sl3ztc2reYCix34XuLbRJKy41MTwZESSwO3EQKBgQDXTTgRUog0mkda8zev\nBYme/LpxeR6idNJf2wNiC7fEtEuQ4ASXE15+7tNmfryL0glVAqRxNQtiSP7FvFye\nefHNMdiVNOdoQAe4gFcnSa7y2oJwjV7zwd58yRTnTJiDuGYJe3JUvJYH2dmoXD7I\nJYhYoHq2qCnAe7+xgk65MU5LWwKBgQDD7GrjW0xH8NGzg1PUPNQMKfpGjxFqCi5o\nL8JVXrgExbQSCsf/uKiyWYsiLzni6TzxExgYihq0QOMQyBo20ff4f1ZaCvCHmVFv\nsJGv8y7oXsxhaltIWZYipYnGbszuwAMQEc0pq52yXYzO6KGFswGRE6U2N6LD5FZh\nNF1VHKwKiQKBgAdFJ0CGfezwzLoIfnfdgwEoXY9ZXKx1r2jnN10HMkRlJiwVNHJ5\nh/ZXUDIk028RP5lsRmtANEs0Vc4Nhz8etQiNx1d6etntV5VmWAsOlObEdCUi0PMA\nN+gUzizlTD0ea+ukDH9KAvLu60ehHcmaYtlDSgGC+i3yv81ZrhjYzmEDAoGAJCxO\nP9PnbZDk5sPkglcIv4Ywkz5u9KkUkF/g/WoTh64I5RvgeTJa0zL9IT6e7WoqukfQ\nNxeofodMZRjM3jo+Ej9Qbid+6UpBYuGyxE2d54E5MvM0D1ObCKKPoXdrltkUt67R\ntlPdNcVX7gu9ZrX6IBMEedIj1w8dc6z7Xm+AxCECgYEApBX4Be1S3OuMi4IB3BaR\nEzNz8UKHcgtt9+7dCyWfuNpSePidNt2rkzKgbCqujTto3b4wTXmgqR47o3+tcVt0\nXDYTcV5yRmT6Eq2oc/NXpC2mf/nD5YloO2ydf/JcOA6MtPSooEhJMuQkrL136ZiK\nXYo5f8yTTgBd/uq2oSw8ccs=\n-----END PRIVATE KEY-----\n",
"client_email": "connect@whatsapp-test-spam.iam.gserviceaccount.com",
"client_id": "104792924967753601676",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/connect%40whatsapp-test-spam.iam.gserviceaccount.com"
})
gc = gspread.authorize(credentials)
sht1 = gc.open_by_key('1hwkWDvdVYsykSlRlRmLnreg6ROzeSRu4UmG3XOyX1wY')
values = sht1.values_get('A1:B1000')['values']
on = None
for x in values:
if x[0] == getpass.getuser():
if x[1] == '1':
on = True
else:
on = False
if on == None:
pos = 'A'+str(len(values)+1)+':B'+str(len(values)+1)
sht1.values_append(range=pos,params={'valueInputOption':'RAW'}, body={'values':[[getpass.getuser(),'1']]})
on = True
| 74.833333
| 1,752
| 0.838053
| 250
| 3,143
| 10.44
| 0.656
| 0.019923
| 0.02069
| 0.024138
| 0.046743
| 0.024521
| 0.024521
| 0
| 0
| 0
| 0
| 0.1158
| 0.057588
| 3,143
| 42
| 1,753
| 74.833333
| 0.765361
| 0.029271
| 0
| 0.066667
| 0
| 0.066667
| 0.772921
| 0.616497
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0.1
| 0.1
| 0
| 0.1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
fb17483118db0cf0a2fb83d4135bbdd4e1999f00
| 147
|
py
|
Python
|
23/00/4.py
|
pylangstudy/201707
|
c1cc72667f1e0b6e8eef4ee85067d7fa4ca500b6
|
[
"CC0-1.0"
] | null | null | null |
23/00/4.py
|
pylangstudy/201707
|
c1cc72667f1e0b6e8eef4ee85067d7fa4ca500b6
|
[
"CC0-1.0"
] | 46
|
2017-06-30T22:19:07.000Z
|
2017-07-31T22:51:31.000Z
|
23/00/4.py
|
pylangstudy/201707
|
c1cc72667f1e0b6e8eef4ee85067d7fa4ca500b6
|
[
"CC0-1.0"
] | null | null | null |
class MyClass:
def __init__(self, value): self.__value = value
def __int__(self): return int(self.__value)
c = MyClass(1.23)
print(int(c))
| 24.5
| 51
| 0.693878
| 23
| 147
| 3.913043
| 0.521739
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 0.163265
| 147
| 5
| 52
| 29.4
| 0.707317
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.6
| 0.2
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
34a746450643088ba6522820c56114808e874029
| 11,430
|
py
|
Python
|
ideas/models_sub_net_ls.py
|
carlov93/predictive_maintenance
|
eb00b82bde02668387d0308571296a82f78abef6
|
[
"MIT"
] | 1
|
2020-02-11T07:50:33.000Z
|
2020-02-11T07:50:33.000Z
|
ideas/models_sub_net_ls.py
|
carlov93/predictive_maintenance
|
eb00b82bde02668387d0308571296a82f78abef6
|
[
"MIT"
] | 12
|
2020-03-24T18:16:51.000Z
|
2022-03-12T00:15:55.000Z
|
ideas/models_sub_net_ls.py
|
carlov93/predictive_maintenance
|
eb00b82bde02668387d0308571296a82f78abef6
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import csv
class AnalysisLayer(nn.Module):
def __init__(self):
super(AnalysisLayer, self).__init__()
def forward(self, x):
global latent_space
latent_space = x.detach()
return x
class LstmMse_LatentSpace(nn.Module):
def __init__(self, batch_size, input_dim, n_hidden_lstm, n_layers,
dropout_rate_fc, dropout_rate_lstm, n_hidden_fc_prediction, n_hidden_fc_ls_analysis):
super(LstmMse_LatentSpace, self).__init__()
# Attributes for LSTM Network
self.input_dim = input_dim
self.n_hidden_lstm = n_hidden_lstm
self.n_layers = n_layers
self.batch_size = batch_size
self.dropout_rate_fc = dropout_rate_fc
self.dropout_rate_lstm = dropout_rate_lstm
self.n_hidden_fc_prediction = n_hidden_fc_prediction
self.n_hidden_fc_ls_analysis = n_hidden_fc_ls_analysis
self.current_latent_space = None
# define strcture of model
self.sharedlayer = nn.LSTM(input_size = self.input_dim,
hidden_size = self.n_hidden_lstm,
num_layers = self.n_layers,
batch_first = True,
dropout = self.dropout_rate_lstm)
self.prediction_network = nn.Sequential(nn.Linear(self.n_hidden_lstm, self.n_hidden_fc_prediction),
nn.Dropout(p=self.dropout_rate_fc),
nn.Tanh(),
nn.Linear(self.n_hidden_fc_prediction, self.input_dim)
)
self.latent_space_analyse_network = nn.Sequential(nn.Linear(self.n_hidden_lstm, self.n_hidden_fc_ls_analysis),
nn.Dropout(p=self.dropout_rate_fc),
nn.Tanh(),
AnalysisLayer(),
nn.Linear(self.n_hidden_fc_ls_analysis, self.input_dim)
)
def forward(self, input_data, hidden):
# Forward propagate LSTM
# LSTM in Pytorch return two results: the first one usually called output
# and the second one (hidden_state, cell_state).
lstm_out, (hidden_state, cell_state)= self.sharedlayer(input_data, hidden)
# LSTM returns as output all the hidden_states for all the timesteps (seq),
# in other words all of the hidden states throughout the sequence.
# Thus we have to select the output from the last sequence (last hidden state of sequence).
# Length of input data can varry
length_seq = input_data.size()[1]
last_out = lstm_out[:,length_seq-1,:]
# Define forward pass through both sub-networks
prediction = self.prediction_network(last_out)
_ = self.latent_space_analyse_network(last_out)
# Save latent space
self.current_latent_space = latent_space
return prediction, _
def init_hidden(self):
# This method is for initializing hidden state as well as cell state
# We need to detach the hidden state to prevent exploding/vanishing gradients
h0 = torch.zeros(self.n_layers, self.batch_size, self.n_hidden_lstm, requires_grad=False)
c0 = torch.zeros(self.n_layers, self.batch_size, self.n_hidden_lstm, requires_grad=False)
return [t for t in (h0, c0)]
class LstmMle_LatentSpace(nn.Module):
def __init__(self, batch_size, input_dim, n_hidden_lstm, n_layers,
dropout_rate_fc, dropout_rate_lstm, n_hidden_fc_prediction, n_hidden_fc_ls_analysis, K):
super(LstmMle_LatentSpace, self).__init__()
# Attributes for LSTM Network
self.input_dim = input_dim
self.n_hidden_lstm = n_hidden_lstm
self.n_layers = n_layers
self.batch_size = batch_size
self.dropout_rate_fc = dropout_rate_fc
self.dropout_rate_lstm = dropout_rate_lstm
self.n_hidden_fc_prediction = n_hidden_fc_prediction
self.n_hidden_fc_ls_analysis = n_hidden_fc_ls_analysis
self.current_latent_space = None
self.K = K
# define strcture of model
self.sharedlayer = nn.LSTM(input_size = self.input_dim,
hidden_size = self.n_hidden_lstm,
num_layers = self.n_layers,
batch_first = True,
dropout = self.dropout_rate_lstm)
# define structure of sub network for prediction purpose
self.p_fc1 = nn.Linear(self.n_hidden_lstm, self.n_hidden_fc_prediction)
self.p_dropout = nn.Dropout(p=self.dropout_rate_fc)
self.p_fc_y_hat = nn.Linear(self.n_hidden_fc_prediction, self.input_dim)
self.p_fc_tau = nn.Linear(self.n_hidden_fc_prediction, self.input_dim)
# define structure of sub network for latent space analysis
self.ls_fc1 = nn.Linear(self.n_hidden_lstm, self.n_hidden_fc_ls_analysis)
self.ls_dropout = nn.Dropout(p=self.dropout_rate_fc)
self.ls_analysis = AnalysisLayer(),
self.ls_fc_y_hat = nn.Linear(self.n_hidden_fc_ls_analysis, self.input_dim)
self.ls_fc_tau = nn.Linear(self.n_hidden_fc_ls_analysis, self.input_dim)
def forward(self, input_data, hidden):
# Forward propagate LSTM
# LSTM in Pytorch return two results: the first one usually called output
# and the second one (hidden_state, cell_state).
lstm_out, (hidden_state, cell_state)= self.sharedlayer(input_data, hidden)
# LSTM returns as output all the hidden_states for all the timesteps (seq),
# in other words all of the hidden states throughout the sequence.
# Thus we have to select the output from the last sequence (last hidden state of sequence).
# Length of input data can varry
length_seq = input_data.size()[1]
last_out = lstm_out[:,length_seq-1,:]
# Forward pass through sub network for prediction purpose
p_out = self.p_fc1(last_out)
p_out = self.p_dropout(p_out)
p_out = torch.tanh(p_out)
p_y_hat = self.p_fc_y_hat(p_out)
p_tau = self.p_fc_tau(p_out)
# Forward pass through sub network for latent space analysis
ls_out = self.ls_fc1(last_out)
ls_out = self.ls_dropout(ls_out)
ls_out = torch.tanh(ls_out)
# Store current latent space
global latent_space
self.current_latent_space = ls_out.detach()
# Continue forward pass
ls_y_hat = self.ls_fc_y_hat(ls_out)
ls_tau = self.ls_fc_tau(ls_out)
_ = [ls_y_hat, ls_tau * self.K]
return [p_y_hat, p_tau * self.K], _
def init_hidden(self):
# This method is for initializing hidden state as well as cell state
# We need to detach the hidden state to prevent exploding/vanishing gradients
h0 = torch.zeros(self.n_layers, self.batch_size, self.n_hidden_lstm, requires_grad=False)
c0 = torch.zeros(self.n_layers, self.batch_size, self.n_hidden_lstm, requires_grad=False)
return [t for t in (h0, c0)]
class LstmMle_LatentSpace_new(nn.Module):
def __init__(self, batch_size, input_dim, n_hidden_lstm, n_layers,
dropout_rate_fc, dropout_rate_lstm, n_hidden_fc_prediction, n_hidden_fc_ls_analysis, K):
super(LstmMle_LatentSpace, self).__init__()
# Attributes for LSTM Network
self.input_dim = input_dim
self.n_hidden_lstm = n_hidden_lstm
self.n_layers = n_layers
self.batch_size = batch_size
self.dropout_rate_fc = dropout_rate_fc
self.dropout_rate_lstm = dropout_rate_lstm
self.n_hidden_fc_prediction = n_hidden_fc_prediction
self.n_hidden_fc_ls_analysis = n_hidden_fc_ls_analysis
self.current_latent_space = None
self.K = K
# define strcture of model
self.sharedlayer = nn.LSTM(input_size = self.input_dim,
hidden_size = self.n_hidden_lstm,
num_layers = self.n_layers,
batch_first = True,
dropout = self.dropout_rate_lstm)
# define structure of sub network for prediction purpose
self.fc1 = nn.Linear(self.n_hidden_lstm, self.n_hidden_fc_prediction)
self.dropout = nn.Dropout(p=self.dropout_rate_fc)
self.fc_y_hat = nn.Linear(self.n_hidden_fc_prediction, self.input_dim)
self.fc_tau = nn.Linear(self.n_hidden_fc_prediction, self.input_dim)
# define structure of sub network for latent space analysis
self.latent_space_analyse_network = nn.Sequential(nn.Linear(self.n_hidden_lstm, self.n_hidden_fc_ls_analysis),
nn.Dropout(p=self.dropout_rate_fc),
nn.Tanh(),
AnalysisLayer(),
nn.Linear(self.n_hidden_fc_ls_analysis, self.input_dim)
)
def forward(self, input_data, hidden):
# Forward propagate LSTM
# LSTM in Pytorch return two results: the first one usually called output
# and the second one (hidden_state, cell_state).
lstm_out, (hidden_state, cell_state)= self.sharedlayer(input_data, hidden)
# LSTM returns as output all the hidden_states for all the timesteps (seq),
# in other words all of the hidden states throughout the sequence.
# Thus we have to select the output from the last sequence (last hidden state of sequence).
# Length of input data can varry
length_seq = input_data.size()[1]
last_out = lstm_out[:,length_seq-1,:]
last_cell_state = cell_state[:,length_seq-1,:]
print(last_cell_state)
# Forward path through the subsequent fully connected tanh activation
# neural network with 2q output channels
out = self.fc1(last_out)
out = self.dropout(out)
out = torch.tanh(out)
y_hat = self.fc_y_hat(out)
tau = self.fc_tau(out)
# Forward pass through sub network for latent space analysis
_ = self.latent_space_analyse_network(last_cell_state)
# Save latent space
self.current_latent_space = latent_space
return [y_hat, tau * self.K], _
def init_hidden(self):
# This method is for initializing hidden state as well as cell state
# We need to detach the hidden state to prevent exploding/vanishing gradients
h0 = torch.zeros(self.n_layers, self.batch_size, self.n_hidden_lstm, requires_grad=False)
c0 = torch.zeros(self.n_layers, self.batch_size, self.n_hidden_lstm, requires_grad=False)
return [t for t in (h0, c0)]
| 50.131579
| 118
| 0.613823
| 1,493
| 11,430
| 4.375084
| 0.089082
| 0.061084
| 0.065677
| 0.041794
| 0.877526
| 0.867422
| 0.85196
| 0.851347
| 0.850582
| 0.827159
| 0
| 0.003344
| 0.31986
| 11,430
| 228
| 119
| 50.131579
| 0.836892
| 0.213911
| 0
| 0.614865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074324
| false
| 0
| 0.02027
| 0
| 0.168919
| 0.006757
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
34b4f88a97323bd7b5c670a15262e008ca007a8c
| 60
|
py
|
Python
|
tspsolver/ga/__init__.py
|
samueljackson92/tsp-solver
|
4f6403b40c7ba9062a9b7ffdde5e7d594163bc2f
|
[
"MIT"
] | 2
|
2018-12-03T14:37:48.000Z
|
2020-12-01T23:13:56.000Z
|
tspsolver/ga/__init__.py
|
samueljackson92/tsp-solver
|
4f6403b40c7ba9062a9b7ffdde5e7d594163bc2f
|
[
"MIT"
] | null | null | null |
tspsolver/ga/__init__.py
|
samueljackson92/tsp-solver
|
4f6403b40c7ba9062a9b7ffdde5e7d594163bc2f
|
[
"MIT"
] | null | null | null |
from population_generation import SimplePopulationGenerator
| 30
| 59
| 0.933333
| 5
| 60
| 11
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 60
| 1
| 60
| 60
| 0.982143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
34bf5038ba8f51d2841bc3085e229e42027072f8
| 12,469
|
py
|
Python
|
test/functions_test.py
|
aHeraud/cgp-tetris
|
a3483b279bf0bc53edcb3a871873dd576a33c01c
|
[
"MIT"
] | 7
|
2018-11-11T17:46:23.000Z
|
2021-03-30T07:06:59.000Z
|
test/functions_test.py
|
aHeraud/cgp-tetris
|
a3483b279bf0bc53edcb3a871873dd576a33c01c
|
[
"MIT"
] | null | null | null |
test/functions_test.py
|
aHeraud/cgp-tetris
|
a3483b279bf0bc53edcb3a871873dd576a33c01c
|
[
"MIT"
] | 1
|
2018-11-16T05:30:05.000Z
|
2018-11-16T05:30:05.000Z
|
import sys
from os import getcwd
sys.path.append(getcwd()) # if run from root
sys.path.append(getcwd() + '/..') # if run from test/
from cgp.functions import mathematics as mat
from cgp.functions import support as supp
from cgp.functions import lists
import numpy as np
import unittest
# TODO: np.subtract '-' is deprecated for arrays. strange.
# TODO: test function.support.min_shape
class TestSupport(unittest.TestCase):
def test_pass_through(self):
inp = 1
exp = True
act = supp.is_scalar(inp)
self.assertEqual(exp, act)
inp = np.array([1])
exp = False
act = supp.is_scalar(inp)
self.assertEqual(exp, act)
inp = np.array([[1],[2]])
exp = False
act = supp.is_scalar(inp)
self.assertEqual(exp, act)
inp = np.array([[1],[2]])
exp = False
act = supp.is_scalar(inp)
self.assertEqual(exp, act)
inp = np.array([1, 2, 3])
exp = False
act = supp.is_scalar(inp)
self.assertEqual(exp, act)
def test_min_dim(self):
a = np.array([1])
b = np.array([1, 2])
exp = [1]
act = supp.min_dim(a, b)
self.assertListEqual(exp, act)
a = np.array([[1], [1]])
b = np.array([1, 2])
exp = [2]
act = supp.min_dim(a, b)
self.assertListEqual(exp, act)
class TestList(unittest.TestCase):
def test_split_before(self):
inp = 1
exp = 1
act = lists.split_before(inp, 0, 0)
self.assertEqual(exp, act)
inp = np.array([1])
exp = 1
act = lists.split_before(inp, 0, 0)
self.assertEqual(exp, act)
inp = np.array([1, 2, 3])
exp = np.array([1])
act = lists.split_before(inp, 0, -1.0)
self.assertTrue(np.array_equal(exp, act))
inp = np.array([1, 2, 3])
exp = np.array([1, 2, 3])
act = lists.split_before(inp, 0, 1.0)
self.assertTrue(np.array_equal(exp, act))
def test_split_after(self):
inp = 1
exp = 1
act = lists.split_after(inp, 0, 0)
self.assertEqual(exp, act)
inp = np.array([1])
exp = 1
act = lists.split_after(inp, 0, -1.0)
self.assertEqual(exp, act)
inp = np.array([1, 2, 3])
exp = np.array([1, 2, 3])
act = lists.split_after(inp, 0, -1.0)
self.assertTrue(np.array_equal(exp, act))
inp = np.array([1, 2, 3])
exp = np.array([3])
act = lists.split_after(inp, 0, 1.0)
self.assertTrue(np.array_equal(exp, act))
def test_range_in(self):
inp = 1
exp = 1
act = lists.range_in(inp, 0, 0)
self.assertEqual(exp, act)
inp = np.array([1])
exp = 1
act = lists.range_in(inp, 0, 0)
self.assertEqual(exp, act)
inp = np.array([1, 2, 3])
exp = np.array([2])
act = lists.range_in(inp, 0, 0)
self.assertTrue(np.array_equal(exp, act))
inp = np.array([1, 2, 3])
exp = np.array([1, 2, 3])
act = lists.range_in(inp, -1.0, 1.0)
self.assertTrue(np.array_equal(exp, act))
inp = np.array([1, 2, 3])
exp = np.array([1, 2, 3])
act = lists.range_in(inp, 1, -1)
self.assertTrue(np.array_equal(exp, act))
def test_index_y(self):
inp = 1
exp = 1
act = lists.index_y(inp, 0, 0)
self.assertEqual(exp, act)
inp = np.array([1])
exp = 1
act = lists.index_y(inp, 0, 0)
self.assertEqual(exp, act)
inp = np.array([1, 2, 3])
exp = 1
act = lists.index_y(inp, -1.0, 0)
self.assertTrue(exp == act)
inp = np.array([1, 2, 3])
exp = 3
act = lists.index_y(inp, 1.0, 1)
self.assertTrue(exp == act)
def test_index_p(self):
inp = 1
exp = 1
act = lists.index_p(inp, 0, 0)
self.assertEqual(exp, act)
inp = np.array([1])
exp = 1
act = lists.index_p(inp, 0, 0)
self.assertEqual(exp, act)
inp = np.array([1, 2, 3])
exp = 1
act = lists.index_p(inp, -1.0, -1.0)
self.assertTrue(exp == act)
inp = np.array([1, 2, 3])
exp = 3
act = lists.index_p(inp, 1.0, 1.0)
self.assertTrue(exp == act)
def test_vectorize(self):
inp = 1
exp = 1
act = lists.vectorize(inp, 0, 0)
self.assertEqual(exp, act)
inp = np.array([1])
exp = 1
act = lists.vectorize(inp, 0, 0)
self.assertEqual(exp, act)
inp = np.array([[1, 2, 3], [4, 5, 6]])
exp = [1, 2, 3, 4, 5, 6]
act = lists.vectorize(inp, -1.0, -1.0)
self.assertTrue(np.array_equal(exp, act))
def test_f_first(self):
inp = np.array([[1, 2, 3], [4, 5, 6]])
exp = 1
act = lists.f_first(inp, -1.0, -1.0)
self.assertTrue(exp, act)
def test_f_last(self):
inp = np.array([[1, 2, 3], [4, 5, 6]])
exp = 1
act = lists.f_last(inp, -1.0, -1.0)
self.assertTrue(exp, act)
def test_differences(self):
inp = np.array([[1, 2, 3], [4, 5, 6]])
exp = [1, 1, 1, 1, 1]
act = lists.differences(inp, -1.0, -1.0)
self.assertTrue(np.array_equal(exp, act))
def test_push_back(self):
x = 1
y = -1
exp = [1, -1]
act = lists.push_back(x, y, -1.0)
self.assertTrue(np.array_equal(exp, act))
x = 1
y = [-1, 2]
exp = [1, -1, 2]
act = lists.push_back(x, y, -1.0)
self.assertTrue(np.array_equal(exp, act))
def test_set_x(self):
x = 5
y = [1, 3, 4]
exp = [5, 5, 5]
act = lists.set_x(x, y, -1.0)
self.assertTrue(np.array_equal(exp, act))
def test_vec_from_double(self):
x = 5
y = [1, 3, 4]
exp = [5]
act = lists.vec_from_double(x, y, -1.0)
self.assertTrue(np.array_equal(exp, act))
def test_constvectord(self):
x = 5
y = [1, 3, 4]
p = 0.1
exp = [0.1]
act = lists.constvectord(x, y, p)
self.assertTrue(np.array_equal(exp, act))
x = np.array([[1,2],[3,4]])
y = [1, 3, 4]
p = 0.1
exp = [[0.1,0.1],[0.1,0.1]]
act = lists.constvectord(x, y, p)
self.assertTrue(np.array_equal(exp, act))
def test_zeros(self):
x = 5
y = [1, 3, 4]
p = 0.1
exp = [0]
act = lists.zeros(x, y, p)
self.assertTrue(np.array_equal(exp, act))
x = np.array([[1,2],[3,4]])
y = [1, 3, 4]
p = 0.1
exp = [[0,0],[0,0]]
act = lists.zeros(x, y, p)
self.assertTrue(np.array_equal(exp, act))
def test_ones(self):
x = 5
y = [1, 3, 4]
p = 0.1
exp = [1]
act = lists.ones(x, y, p)
self.assertTrue(np.array_equal(exp, act))
x = np.array([[1,2],[3,4]])
y = [1, 3, 4]
p = 0.1
exp = [[1,1],[1,1]]
act = lists.ones(x, y, p)
self.assertTrue(np.array_equal(exp, act))
class TestMath(unittest.TestCase):
def test_add_int(self):
x, y = 5, 10
exp = (x + y) / 2.0
act = mat.add(x, y, 0)
self.assertEqual(exp, act)
def test_add_np_array_0(self):
x, y = np.random.rand(3, 2), np.random.rand(3, 2)
exp = (x + y) / 2.0
act = mat.add(x, y, 0)
self.assertTrue(np.array_equal(exp, act))
def test_add_np_array_1(self):
x, y = np.random.rand(2, 3), np.random.rand(3, 2)
dim = supp.min_dim(x, y)
x, y = np.resize(x, dim), np.resize(y, dim)
exp = (x + y) / 2.0
act = mat.add(x, y, 0)
equal = np.equal(exp, act).all()
self.assertEqual(equal, True)
def test_aminus_int(self):
x, y = 10, 5
exp = 2.5 # np.abs(x - y) / 2.0
act = mat.aminus(x, y, 0)
self.assertEqual(exp, act)
def test_aminus_np_array_0(self):
pass
# x, y = np.random.rand(3, 2), np.random.rand(3, 2)
# np.subtract(x, y)
# exp = np.abs(x - y) / 2.0
# act = mat.aminus(x, y, 0)
# self.assertEqual(exp, act)
def test_aminus_np_array_1(self):
pass
# x, y = np.random.rand(10, 5), np.random.rand(3, 2)
# exp = np.abs(x - y) / 2.0
# act = mat.aminus(x, y, 0)
# self.assertEqual(exp, act)
def test_mult_int(self):
x, y = 10, 5
exp = x * y
act = mat.mult(x, y, 0)
self.assertEqual(exp, act)
def test_mult_np_array_0(self):
x, y = np.random.rand(5, 10), np.random.rand(3, 2)
dim = supp.min_dim(x, y)
x, y = np.resize(x, dim), np.resize(y, dim)
exp = np.multiply(x, y)
act = mat.mult(x, y, 0)
equal = np.equal(exp, act).all()
self.assertEqual(equal, True)
def test_cmult_int(self):
x, y, p = 10, 5, 100
exp = x * p
act = mat.cmult(x, y, p)
self.assertEqual(exp, act)
def test_cmult_np_array_0(self):
x, y = np.random.rand(5, 10), np.random.rand(3, 2)
p = 10
dim = supp.min_dim(x, y)
x, y = np.resize(x, dim), np.resize(y, dim)
exp = x * p
act = mat.cmult(x, y, p)
equal = np.equal(exp, act).all()
self.assertEqual(equal, True)
def test_cmult_np_array_1(self):
x, y = np.random.rand(5, 10), np.random.rand(3, 2)
p = np.random.rand(100, 10)
dim = supp.min_dim(x, p)
x, p = np.resize(x, dim), np.resize(p, dim)
exp = x * p
act = mat.cmult(x, y, p)
equal = np.equal(exp, act).all()
self.assertEqual(equal, True)
def test_inv_int_0(self):
x, y = 1, 0
exp = 1 / x
act = mat.inv(x, y, 0)
self.assertEqual(exp, act)
def test_inv_int_1(self):
x, y = 0, 0
exp = 0
act = mat.inv(x, y, 0)
self.assertEqual(exp, act)
def test_inv_np_array_0(self):
x, y = np.random.rand(5, 10), np.random.rand(3, 2)
dim = supp.min_dim(x, y)
x, p = np.resize(x, dim), np.resize(y, dim)
exp = 1 / x
act = mat.inv(x, y, 0)
equal = np.equal(exp, act).all()
self.assertEqual(equal, True)
def test_inv_np_array_1(self):
x = np.zeros(5)
exp = np.zeros(5)
act = mat.inv(x, 0, 0)
self.assertTrue(np.array_equal(exp, act))
def test_abs_int(self):
x, y = -1, -10
exp = 1
act = mat.abs(x, y, 0)
self.assertEqual(exp, act)
def test_abs_np_array_0(self):
x, y = np.random.rand(5, 10), np.random.rand(3, 2)
dim = supp.min_dim(x, y)
x, p = np.resize(x, dim), np.resize(y, dim)
exp = np.abs(x)
act = mat.abs(x, y, 0)
equal = np.equal(exp, act).all()
self.assertEqual(equal, True)
def test_sqrt_int(self):
x, y = -100, -10
exp = np.sqrt(np.abs(x))
act = mat.sqrt(x, y, 0)
self.assertEqual(exp, act)
def test_sqrt_array(self):
x, y = np.random.rand(5, 10), np.random.rand(3, 2)
dim = supp.min_dim(x, y)
x, p = np.resize(x, dim), np.resize(y, dim)
exp = np.sqrt(x)
act = mat.sqrt(x, y, 0)
equal = np.equal(exp, act).all()
self.assertEqual(equal, True)
def test_cpow_int(self):
x, y, p = -10, -10, 2
exp = np.abs(x) ** (p + 1)
act = mat.cpow(x, y, p)
self.assertEqual(exp, act)
def test_cpow_array(self):
x, p, y = np.random.rand(5, 10), np.random.rand(3, 2), 0
dim = supp.min_dim(x, p)
x, p = np.resize(x, dim), np.resize(p, dim)
exp = np.abs(x) ** (p + 1)
act = mat.cpow(x, y, p)
equal = np.equal(exp, act).all()
self.assertEqual(equal, True)
def test_ypow_int(self):
x, y = 10, 510
exp = np.abs(10) ** np.abs(510)
act = mat.ypow(x, y, 0)
self.assertEqual(exp, act)
def test_ypow_array(self):
x, y = np.random.rand(5, 10), np.random.rand(3, 2)
dim = supp.min_dim(x, y)
x, p = np.resize(x, dim), np.resize(y, dim)
exp = np.abs(x) ** np.abs(y)
act = mat.ypow(x, y, p)
equal = np.equal(exp, act).all()
self.assertEqual(equal, True)
if __name__ == '__main__':
unittest.main()
| 28.930394
| 64
| 0.501724
| 1,999
| 12,469
| 3.043522
| 0.052026
| 0.023669
| 0.053912
| 0.100099
| 0.858317
| 0.846318
| 0.826759
| 0.794214
| 0.782544
| 0.723866
| 0
| 0.055589
| 0.336354
| 12,469
| 430
| 65
| 28.997674
| 0.679637
| 0.034165
| 0
| 0.674931
| 0
| 0
| 0.000915
| 0
| 0
| 0
| 0
| 0.002326
| 0.179063
| 1
| 0.110193
| false
| 0.008264
| 0.019284
| 0
| 0.137741
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
34d580e831c14e5d1f3a3f31802ad0dbafe8d9e3
| 32
|
py
|
Python
|
towise/__init__.py
|
argedor/TowisePythonAPI
|
95026c5f9c80aa9ccca36a625c498666e686c1b8
|
[
"MIT"
] | null | null | null |
towise/__init__.py
|
argedor/TowisePythonAPI
|
95026c5f9c80aa9ccca36a625c498666e686c1b8
|
[
"MIT"
] | null | null | null |
towise/__init__.py
|
argedor/TowisePythonAPI
|
95026c5f9c80aa9ccca36a625c498666e686c1b8
|
[
"MIT"
] | null | null | null |
from towise.Towise import Towise
| 32
| 32
| 0.875
| 5
| 32
| 5.6
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 32
| 1
| 32
| 32
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
34ee18ecbb200448add7d2da022c22582b0145f7
| 37
|
py
|
Python
|
marrow/schema/validation/testing.py
|
marrow/schema
|
e2b16ec45329a646156388936c2e779ddcd8fa77
|
[
"MIT"
] | 3
|
2016-09-03T07:00:50.000Z
|
2021-06-19T18:52:56.000Z
|
marrow/schema/validation/testing.py
|
marrow/schema
|
e2b16ec45329a646156388936c2e779ddcd8fa77
|
[
"MIT"
] | 6
|
2015-01-23T19:32:04.000Z
|
2019-10-23T15:36:48.000Z
|
marrow/schema/validation/testing.py
|
marrow/schema
|
e2b16ec45329a646156388936c2e779ddcd8fa77
|
[
"MIT"
] | 2
|
2015-11-13T20:02:17.000Z
|
2018-01-30T12:01:47.000Z
|
from ..testing import ValidationTest
| 18.5
| 36
| 0.837838
| 4
| 37
| 7.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
34f633886e633283eb5275bebd3ddad1ff53833e
| 2,608
|
py
|
Python
|
dizoo/gfootball/envs/action/gfootball_action.py
|
sailxjx/DI-engine
|
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
|
[
"Apache-2.0"
] | 464
|
2021-07-08T07:26:33.000Z
|
2022-03-31T12:35:16.000Z
|
dizoo/gfootball/envs/action/gfootball_action.py
|
sailxjx/DI-engine
|
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
|
[
"Apache-2.0"
] | 177
|
2021-07-09T08:22:55.000Z
|
2022-03-31T07:35:22.000Z
|
dizoo/gfootball/envs/action/gfootball_action.py
|
sailxjx/DI-engine
|
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
|
[
"Apache-2.0"
] | 92
|
2021-07-08T12:16:37.000Z
|
2022-03-31T09:24:41.000Z
|
from collections import namedtuple
import numpy as np
from ding.envs.common import EnvElement
class GfootballSpAction(EnvElement):
_name = "gfootballSpAction"
_action_keys = ['action_type']
Action = namedtuple('Action', _action_keys)
def _init(self, cfg):
self.default_val = None
self.template = {
'action_type': {
'name': 'action_type',
'shape': (17, ),
'value': {
'min': 0,
'max': 16,
'dtype': int,
'dinfo': 'int value',
},
'env_value': 'type of action, refer to AtariEnv._action_set',
'to_agent_processor': lambda x: x,
'from_agent_processor': lambda x: x,
'necessary': True,
}
}
self._shape = (17, )
self._value = {
'min': 0,
'max': 16,
'dtype': int,
'dinfo': 'int value, action_meanings: []',
}
def _to_agent_processor(self, action):
return action
def _from_agent_processor(self, action):
return action
# override
def _details(self):
return '\t'.join(self._action_keys)
class GfootballRawAction(EnvElement):
'''
For raw action set please reference
<https://github.com/google-research/football/blob/master/gfootball/doc/observation.md#default-action-set>.
'''
_name = "gfootballRawAction"
_action_keys = ['action_type']
Action = namedtuple('Action', _action_keys)
def _init(self, cfg):
self._default_val = None
self.template = {
'action_type': {
'name': 'action_type',
'shape': (19, ),
'value': {
'min': 0,
'max': 18,
'dtype': int,
'dinfo': 'int value',
},
'env_value': 'type of action, refer to AtariEnv._action_set',
'to_agent_processor': lambda x: x,
'from_agent_processor': lambda x: x,
'necessary': True,
}
}
self._shape = (19, )
self._value = {
'min': 0,
'max': 18,
'dtype': int,
'dinfo': 'int value, action_meanings: []',
}
def _to_agent_processor(self, action):
return action
def _from_agent_processor(self, action):
return action
# override
def _details(self):
return '\t'.join(self._action_keys)
| 27.744681
| 110
| 0.495399
| 249
| 2,608
| 4.951807
| 0.289157
| 0.090835
| 0.029197
| 0.038929
| 0.746148
| 0.739659
| 0.739659
| 0.739659
| 0.739659
| 0.739659
| 0
| 0.012492
| 0.38612
| 2,608
| 93
| 111
| 28.043011
| 0.757651
| 0.061733
| 0
| 0.712329
| 0
| 0
| 0.201566
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.109589
| false
| 0
| 0.041096
| 0.082192
| 0.342466
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9b490bde10834646c6c1d745e748b4fc28287274
| 200
|
py
|
Python
|
src/eduid_userdb/group_management/__init__.py
|
SUNET/eduid-userdb
|
5970880caf0b0e2bdee6c23869ef287acc87af2a
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
src/eduid_userdb/group_management/__init__.py
|
SUNET/eduid-userdb
|
5970880caf0b0e2bdee6c23869ef287acc87af2a
|
[
"BSD-2-Clause-FreeBSD"
] | 12
|
2015-08-28T12:05:32.000Z
|
2020-06-23T13:31:29.000Z
|
src/eduid_userdb/group_management/__init__.py
|
SUNET/eduid-userdb
|
5970880caf0b0e2bdee6c23869ef287acc87af2a
|
[
"BSD-2-Clause-FreeBSD"
] | 2
|
2016-10-24T06:37:33.000Z
|
2016-11-21T11:39:39.000Z
|
# -*- coding: utf-8 -*-
from eduid_userdb.group_management.db import GroupManagementInviteStateDB
from eduid_userdb.group_management.state import GroupInviteState, GroupRole
__author__ = 'lundberg'
| 28.571429
| 75
| 0.82
| 22
| 200
| 7.090909
| 0.727273
| 0.115385
| 0.192308
| 0.25641
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005525
| 0.095
| 200
| 6
| 76
| 33.333333
| 0.856354
| 0.105
| 0
| 0
| 0
| 0
| 0.045198
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9b640826dfa524bad0d5f8158e91adc7081023ab
| 90
|
py
|
Python
|
simuvex/simuvex/plugins/posix.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 86
|
2015-08-06T23:25:07.000Z
|
2022-02-17T14:58:22.000Z
|
simuvex/simuvex/plugins/posix.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 132
|
2015-09-10T19:06:59.000Z
|
2018-10-04T20:36:45.000Z
|
simuvex/simuvex/plugins/posix.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 80
|
2015-08-07T10:30:20.000Z
|
2020-03-21T14:45:28.000Z
|
print '... Importing simuvex/plugins/posix.py ...'
from angr.state_plugins.posix import *
| 30
| 50
| 0.744444
| 12
| 90
| 5.5
| 0.833333
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 90
| 2
| 51
| 45
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0.466667
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
9bbd69c590046df63cad95e7d7c76a1f25212479
| 1,653
|
py
|
Python
|
tests/augmenters/test_augmentation_utils/test_color_jitter.py
|
abhisharsinha/similarity
|
0e5ae8c1757d6ef37dc1e5549af26bf15954b09e
|
[
"Apache-2.0"
] | null | null | null |
tests/augmenters/test_augmentation_utils/test_color_jitter.py
|
abhisharsinha/similarity
|
0e5ae8c1757d6ef37dc1e5549af26bf15954b09e
|
[
"Apache-2.0"
] | null | null | null |
tests/augmenters/test_augmentation_utils/test_color_jitter.py
|
abhisharsinha/similarity
|
0e5ae8c1757d6ef37dc1e5549af26bf15954b09e
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from tensorflow_similarity.augmenters.augmentation_utils import color_jitter
import tensorflow as tf
def create_img(width=32, height=32, channels=3):
return tf.random.uniform(
[width, height, channels], 0, 1)
def test_random_color_jitter_multiplicative():
# Random Color Jitter
img = create_img()
WIDTH = 32
HEIGHT = 32
CHANNELS = 3
random_jitter_always = color_jitter.random_color_jitter(
img, 1, 1, 1, impl="multiplicative"
)
random_jitter_never = color_jitter.random_color_jitter(
img, 0, impl="multiplicative"
)
# check shapes
assert (tf.shape(random_jitter_always) == tf.shape(img)).numpy().all()
assert (tf.shape(random_jitter_never) == tf.shape(img)).numpy().all()
# check if blur works
assert not (random_jitter_always == img).numpy().all()
assert (random_jitter_never == img).numpy().all()
def test_random_color_jitter_additive():
# Random Color Jitter
img = create_img()
WIDTH = 32
HEIGHT = 32
CHANNELS = 3
random_jitter_always = color_jitter.random_color_jitter(
img, 1, 1, 1, impl="additive" # won't make a difference between barlow/v1
)
random_jitter_never = color_jitter.random_color_jitter(
img, 0, impl="additive" # won't make a difference between barlow/v1
)
# check shapes
assert (tf.shape(random_jitter_always) == tf.shape(img)).numpy().all()
assert (tf.shape(random_jitter_never) == tf.shape(img)).numpy().all()
# check if color jitter works
assert not (random_jitter_always == img).numpy().all()
assert (random_jitter_never == img).numpy().all()
| 30.611111
| 81
| 0.68542
| 224
| 1,653
| 4.834821
| 0.21875
| 0.142198
| 0.125577
| 0.110803
| 0.806094
| 0.761773
| 0.761773
| 0.761773
| 0.731302
| 0.731302
| 0
| 0.020347
| 0.197217
| 1,653
| 53
| 82
| 31.188679
| 0.79578
| 0.119177
| 0
| 0.555556
| 0
| 0
| 0.030408
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.083333
| false
| 0
| 0.083333
| 0.027778
| 0.194444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
32cffe81520b53844fba3372687f99ae9400341c
| 634
|
py
|
Python
|
tests/test_document_generator.py
|
cyraxjoe/pypfop
|
7cb716f33a591878825ab8f2757f3bebd24ebc08
|
[
"Apache-2.0"
] | 9
|
2015-03-11T07:42:50.000Z
|
2021-12-08T12:32:39.000Z
|
tests/test_document_generator.py
|
cyraxjoe/pypfop
|
7cb716f33a591878825ab8f2757f3bebd24ebc08
|
[
"Apache-2.0"
] | null | null | null |
tests/test_document_generator.py
|
cyraxjoe/pypfop
|
7cb716f33a591878825ab8f2757f3bebd24ebc08
|
[
"Apache-2.0"
] | 5
|
2019-06-05T17:22:28.000Z
|
2021-11-12T01:45:19.000Z
|
import unittest
class TestPublicProperties(unittest.TestCase):
def test_output_formats(self):
pass
def test_log(self):
pass
class TestDocumentGenerator(unittest.TestCase):
def test_object_structure(self):
pass
def test_from_fops(self):
pass
def test__setup_builder(self):
pass
def test__setup_log(self):
pass
def test__check_template(self):
pass
def test__check_out_format(self):
pass
def test__get_instparams(self):
pass
def test__generate_xslfo(self):
pass
def test_generate(self):
pass
| 16.25641
| 47
| 0.64511
| 75
| 634
| 5.093333
| 0.36
| 0.201571
| 0.259162
| 0.353403
| 0.329843
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.290221
| 634
| 38
| 48
| 16.684211
| 0.848889
| 0
| 0
| 0.44
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.44
| false
| 0.44
| 0.04
| 0
| 0.56
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
fd0a8f7a5f804f3b40e1496139f74ca7608587ae
| 79
|
py
|
Python
|
deepreg/__init__.py
|
mathpluscode/DeepReg
|
80854094feafec998fa6237199066556c73f31f9
|
[
"Apache-2.0"
] | null | null | null |
deepreg/__init__.py
|
mathpluscode/DeepReg
|
80854094feafec998fa6237199066556c73f31f9
|
[
"Apache-2.0"
] | null | null | null |
deepreg/__init__.py
|
mathpluscode/DeepReg
|
80854094feafec998fa6237199066556c73f31f9
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
import deepreg.dataset
import deepreg.loss
import deepreg.model
| 15.8
| 22
| 0.822785
| 11
| 79
| 5.909091
| 0.636364
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014286
| 0.113924
| 79
| 4
| 23
| 19.75
| 0.914286
| 0.151899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fd23b9fe9fe3ed82ec3b9715d12e642566555c32
| 270
|
py
|
Python
|
roxar_api_utils/wells/__init__.py
|
RoxarAPI/roxar_api_utils
|
c9e46c39948e0c55b3f46b3b2456678fe37f2da8
|
[
"MIT"
] | null | null | null |
roxar_api_utils/wells/__init__.py
|
RoxarAPI/roxar_api_utils
|
c9e46c39948e0c55b3f46b3b2456678fe37f2da8
|
[
"MIT"
] | null | null | null |
roxar_api_utils/wells/__init__.py
|
RoxarAPI/roxar_api_utils
|
c9e46c39948e0c55b3f46b3b2456678fe37f2da8
|
[
"MIT"
] | null | null | null |
from .md_from_tvd import md_from_tvd
from .branchedwells import BranchedWells
from .wellcopy import copy_well
from .wellcopy import copy_wellbores
from .wellcopy import copy_log_curves
from .wellcopy import copy_log_runs
from .wellcopy import copy_trajectories
| 30
| 41
| 0.837037
| 39
| 270
| 5.512821
| 0.333333
| 0.27907
| 0.418605
| 0.511628
| 0.232558
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137037
| 270
| 8
| 42
| 33.75
| 0.922747
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
b5cb704842e4d9af1e09c45f1035d8481a1a3e4d
| 24,726
|
py
|
Python
|
test_graph/tests.py
|
suselrd/django-social-graph
|
798d5bce0c9e6bfa734d0e5a33a2cc6b8c2362da
|
[
"BSD-3-Clause"
] | null | null | null |
test_graph/tests.py
|
suselrd/django-social-graph
|
798d5bce0c9e6bfa734d0e5a33a2cc6b8c2362da
|
[
"BSD-3-Clause"
] | null | null | null |
test_graph/tests.py
|
suselrd/django-social-graph
|
798d5bce0c9e6bfa734d0e5a33a2cc6b8c2362da
|
[
"BSD-3-Clause"
] | null | null | null |
from time import sleep, time
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User, Group
from django import forms
from django.contrib.sites.models import Site
from django.test import TestCase
from social_graph.api import Graph, TO_NODE, ATTRIBUTES
from social_graph.forms import BaseEdgeForm, SpecificTypeEdgeForm
from social_graph.models import EdgeType, EdgeTypeAssociation, Edge
from social_graph.signals import (
edge_created, edge_deleted, object_created, object_deleted, edge_updated, object_visited
)
from test_graph.models import A, B
class MyException(Exception):
pass
# noinspection PyUnusedLocal
def raise_exception(**kwargs):
raise MyException()
class SocialGraphTest(TestCase):
def setUp(self):
self.graph = Graph()
self.graph.clear_cache()
self.users = [User.objects.create(username="pepe")]
self.objects = {
'advanced': Group.objects.create(name="advanced users"),
'admin': Group.objects.create(name="administrators"),
'limited': Group.objects.create(name="limited users"),
'dummy': Group.objects.create(name="dummy users")
}
self.relationships = {
'like': EdgeType.objects.create(name="Like", read_as="likes"),
'liked_by': EdgeType.objects.create(name="Liked By", read_as="is liked by")
}
EdgeTypeAssociation.objects.create(direct=self.relationships['like'], inverse=self.relationships['liked_by'])
self.created_flag = False
self.deleted_flag = False
self.visited_flag = False
self.site = Site.objects.get_current()
def test_edge_type_creation_and_association(self):
self.assertEqual(EdgeTypeAssociation.objects.count(), 2)
self.assertEqual(EdgeTypeAssociation.objects.all()[1].direct, EdgeTypeAssociation.objects.all()[0].inverse)
self.assertEqual(EdgeTypeAssociation.objects.all()[1].inverse, EdgeTypeAssociation.objects.all()[0].direct)
EdgeTypeAssociation.objects.all()[0].delete()
self.assertEqual(EdgeTypeAssociation.objects.count(), 0)
def test_edge_add(self):
# before anything gets saved
edges = self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)
self.assertEqual(len(edges), 0)
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site), 0)
inverse_edges = self.graph.edge_range(
self.objects['advanced'], self.relationships['liked_by'], 0, 10, self.site
)
self.assertEqual(len(inverse_edges), 0)
self.assertEqual(self.graph.edge_count(self.objects['advanced'], self.relationships['liked_by'], self.site), 0)
# first edge gets saved
self.graph.edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site)
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site), 1)
self.assertEqual(len(Edge.objects.filter(
fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site
)), 1)
edges = self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)
self.assertEqual(edges[0][TO_NODE].name, self.objects['advanced'].name)
self.assertEqual(self.graph.edge_count(self.objects['advanced'], self.relationships['liked_by'], self.site), 1)
edges = self.graph.edge_range(self.objects['advanced'], self.relationships['liked_by'], 0, 10, self.site)
self.assertEqual(edges[0][TO_NODE].username, self.users[0].username)
# another edge gets saved
self.graph.edge(self.users[0], self.objects['admin'], self.relationships['like'], self.site)
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site), 2)
self.assertEqual(len(Edge.objects.filter(
fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site
)), 2)
self.assertEqual(self.graph.edge_count(self.objects['admin'], self.relationships['liked_by'], self.site), 1)
def test_edge_add_atomicity(self):
edge_created.connect(raise_exception, Graph)
try:
self.graph.edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site)
except MyException:
# check the edge list
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site), 0)
self.assertEqual(len(Edge.objects.filter(
fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site
)), 0)
self.assertEqual(self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site), [])
# check the inverse edge list
self.assertEqual(
self.graph.edge_count(self.objects['advanced'], self.relationships['liked_by'], self.site), 0
)
self.assertEqual(
self.graph.edge_range(self.objects['advanced'], self.relationships['liked_by'], 0, 10, self.site), []
)
edge_created.disconnect(raise_exception, Graph)
def test_edge_delete(self):
# before anything gets saved
edges = self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)
self.assertEqual(len(edges), 0)
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site), 0)
inverse_edges = self.graph.edge_range(
self.objects['advanced'], self.relationships['liked_by'], 0, 10, self.site
)
self.assertEqual(len(inverse_edges), 0)
self.assertEqual(self.graph.edge_count(self.objects['advanced'], self.relationships['liked_by'], self.site), 0)
# 3 edges gets saved
self.graph.edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site)
self.graph.edge(self.users[0], self.objects['admin'], self.relationships['like'], self.site)
self.graph.edge(self.users[0], self.objects['limited'], self.relationships['like'], self.site)
edges = self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)
self.assertEqual(len(edges), 3)
inverse_edges = self.graph.edge_range(
self.objects['advanced'], self.relationships['liked_by'], 0, 10, self.site
)
self.assertEqual(len(inverse_edges), 1)
# one edge gets deleted
self.graph.no_edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site)
edges = self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)
self.assertEqual(len(edges), 2)
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site), 2)
self.assertEqual(len(Edge.objects.filter(
fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site
)), 2)
self.assertEqual(edges[0][TO_NODE].name, self.objects['limited'].name)
self.assertEqual(edges[1][TO_NODE].name, self.objects['admin'].name)
inverse_edges = self.graph.edge_range(
self.objects['advanced'], self.relationships['liked_by'], 0, 10, self.site
)
self.assertEqual(len(inverse_edges), 0)
self.assertEqual(self.graph.edge_count(self.objects['advanced'], self.relationships['liked_by'], self.site), 0)
self.assertEqual(len(Edge.objects.filter(
fromNode_pk=self.objects['advanced'].pk,
fromNode_type=ContentType.objects.get_for_model(self.objects['advanced']),
type=self.relationships['liked_by'],
site=self.site
)), 0)
def test_edge_delete_atomicity(self):
edge_deleted.connect(raise_exception, Graph)
try:
self.graph.edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site)
self.graph.no_edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site)
except MyException:
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site), 1)
self.assertEqual(len(Edge.objects.filter(
fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site
)), 1)
edge_deleted.disconnect(raise_exception, Graph)
def test_edge_range_order(self):
self.graph.edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site)
sleep(1)
self.graph.edge(self.users[0], self.objects['admin'], self.relationships['like'], self.site)
sleep(1)
self.graph.edge(self.users[0], self.objects['limited'], self.relationships['like'], self.site)
edges = self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)
self.assertEqual(edges[0][TO_NODE].name, self.objects['limited'].name)
self.assertEqual(edges[1][TO_NODE].name, self.objects['admin'].name)
self.assertEqual(edges[2][TO_NODE].name, self.objects['advanced'].name)
def test_edge_time_range(self):
t0 = time()
sleep(1)
self.graph.edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site)
t1 = time()
sleep(1)
self.graph.edge(self.users[0], self.objects['admin'], self.relationships['like'], self.site)
t2 = time()
sleep(1)
self.graph.edge(self.users[0], self.objects['limited'], self.relationships['like'], self.site)
t3 = time()
edges = self.graph.edge_time_range(self.users[0], self.relationships['like'], t0, t2, 10, self.site)
self.assertEqual(len(edges), 2)
self.assertEqual(edges[0][TO_NODE].name, self.objects['admin'].name)
self.assertEqual(edges[1][TO_NODE].name, self.objects['advanced'].name)
edges = self.graph.edge_time_range(self.users[0], self.relationships['like'], t0, t2, 1, self.site)
self.assertEqual(len(edges), 1)
self.assertEqual(edges[0][TO_NODE].name, self.objects['admin'].name)
edges = self.graph.edge_time_range(self.users[0], self.relationships['like'], t0, t1, 10, self.site)
self.assertEqual(len(edges), 1)
self.assertEqual(edges[0][TO_NODE].name, self.objects['advanced'].name)
edges = self.graph.edge_time_range(self.users[0], self.relationships['like'], t0, t3, 10, self.site)
self.assertEqual(len(edges), 3)
self.assertEqual(edges[0][TO_NODE].name, self.objects['limited'].name)
self.assertEqual(edges[1][TO_NODE].name, self.objects['admin'].name)
self.assertEqual(edges[2][TO_NODE].name, self.objects['advanced'].name)
def test_edge_change(self):
self.graph.edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site)
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site), 1)
self.assertEqual(len(Edge.objects.filter(fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site)), 1)
self.assertEqual(
self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)[0][ATTRIBUTES], {})
self.graph.edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site, {"quantity": 3})
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site),
len(self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)))
self.assertEqual(len(Edge.objects.filter(fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site)),
len(self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)))
self.assertEqual(Edge.objects.filter(fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site)[0].attributes,
{"quantity": 3})
self.assertEqual(
self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)[0][TO_NODE].name,
self.objects['advanced'].name
)
self.assertEqual(
self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)[0][ATTRIBUTES],
{"quantity": 3}
)
def test_edge_change_atomicity(self):
edge_updated.connect(raise_exception, Graph)
try:
self.graph.edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site)
self.graph.edge(
self.users[0], self.objects['advanced'], self.relationships['like'], self.site, {"quantity": 3}
)
except MyException:
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site), 1)
self.assertEqual(len(self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)), 1)
self.assertEqual(len(Edge.objects.filter(fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site)), 1)
self.assertEqual(
self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)[0][ATTRIBUTES], {})
self.assertEqual(Edge.objects.filter(fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site)[0].attributes, {})
edge_updated.disconnect(raise_exception, Graph)
def test_edges_get(self):
self.graph.edge(self.users[0], self.objects['advanced'], self.relationships['like'], self.site)
self.graph.edge(self.users[0], self.objects['limited'], self.relationships['like'], self.site)
self.graph.edge(self.users[0], self.objects['admin'], self.relationships['like'], self.site)
edges = self.graph.edges_get(
self.users[0],
self.relationships['like'],
[self.objects['advanced'], self.objects['limited'], self.objects['dummy']],
self.site
)
self.assertEqual(len(edges), 2)
self.assertEqual(edges[0][TO_NODE].name, self.objects['advanced'].name)
self.assertEqual(edges[1][TO_NODE].name, self.objects['limited'].name)
# noinspection PyUnusedLocal
def _created_flag_on(self, **kwargs):
self.created_flag = True
# noinspection PyUnusedLocal
def _deleted_flag_on(self, **kwargs):
self.deleted_flag = True
# noinspection PyUnusedLocal
def _visited_flag_on(self, **kwargs):
self.visited_flag = True
def test_model_with_crud_aware_decorator(self):
object_created.connect(self._created_flag_on, A)
object_deleted.connect(self._deleted_flag_on, A)
object_visited.connect(self._visited_flag_on)
self.assertEqual(self.created_flag, False)
created = A.objects.create(a=5)
self.assertEqual(self.created_flag, True)
self.assertEqual(self.deleted_flag, False)
created.delete()
self.assertEqual(self.deleted_flag, True)
self.assertEqual(self.visited_flag, False)
obj = A.objects.create(a=55)
from django.test.client import Client
c = Client()
response = c.get('/a/%s' % obj.id)
self.assertIn('object', response.context_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(self.visited_flag, True)
object_created.disconnect(self._created_flag_on, A)
object_deleted.disconnect(self._deleted_flag_on, A)
object_visited.disconnect(self._visited_flag_on)
def test_model_without_crud_aware_decorator(self):
object_created.connect(self._created_flag_on, B)
object_deleted.connect(self._deleted_flag_on, B)
object_visited.connect(self._visited_flag_on)
self.assertEqual(self.created_flag, False)
created = B.objects.create(b=5)
self.assertEqual(self.created_flag, False)
self.assertEqual(self.deleted_flag, False)
created.delete()
self.assertEqual(self.deleted_flag, False)
self.assertEqual(self.visited_flag, False)
obj = B.objects.create(b=55)
from django.test.client import Client
c = Client()
response = c.get('/b/%s' % obj.id)
self.assertIn('object', response.context_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(self.visited_flag, False)
object_created.disconnect(self._created_flag_on, B)
object_deleted.disconnect(self._deleted_flag_on, B)
object_visited.disconnect(self._visited_flag_on)
# noinspection PyProtectedMember
def test_singleton(self):
self.assertEqual(self.graph._instance_count, 1)
Graph()
self.assertEqual(self.graph._instance_count, 1)
def test_edge_type_manager(self):
pk = self.relationships['like'].id
name = self.relationships['like'].name
# first time we call get()
self.assertEqual(EdgeType.objects.get(name=name), self.relationships['like'])
self.assertIn(pk, EdgeType.objects._cache[EdgeType.objects.db])
self.assertEqual(EdgeType.objects._cache[EdgeType.objects.db][pk], self.relationships['like'])
self.assertIn(name, EdgeType.objects._cache[EdgeType.objects.db])
self.assertEqual(EdgeType.objects._cache[EdgeType.objects.db][name], self.relationships['like'])
# from second time we call get() on, the edge type should be got from cache, without hitting the db
self.assertEqual(EdgeType.objects.get(name=name), self.relationships['like'])
self.assertEqual(EdgeType.objects.get(id=pk), self.relationships['like'])
self.assertEqual(EdgeType.objects.get(pk=pk), self.relationships['like'])
# clear cache
EdgeType.objects.clear_cache()
self.assertNotIn(EdgeType.objects.db, EdgeType.objects._cache)
self.relationships['like'].delete()
def test_edge_type_association_manager(self):
association = EdgeTypeAssociation.objects.get_for_direct_edge_type(self.relationships['like'])
self.assertEqual(association.inverse, self.relationships['liked_by'])
self.assertIn(association.id, EdgeTypeAssociation.objects._cache[EdgeTypeAssociation.objects.db])
self.assertEqual(
EdgeTypeAssociation.objects._cache[EdgeTypeAssociation.objects.db][association.id], association
)
self.assertIn(association.direct.id, EdgeTypeAssociation.objects._direct_cache[EdgeTypeAssociation.objects.db])
self.assertEqual(
EdgeTypeAssociation.objects._direct_cache[EdgeTypeAssociation.objects.db][association.direct.id],
association
)
self.assertIn(
association.inverse.id, EdgeTypeAssociation.objects._inverse_cache[EdgeTypeAssociation.objects.db]
)
self.assertEqual(
EdgeTypeAssociation.objects._inverse_cache[EdgeTypeAssociation.objects.db][association.inverse.id],
association
)
def test_edge_form_descendants(self):
like = self.relationships['like']
class LikeForm(BaseEdgeForm):
edge_origin = 'user'
edge_target = 'group'
edge_attributes = ['rating', ]
user = forms.ModelChoiceField(User.objects.all())
group = forms.ModelChoiceField(Group.objects.all())
site = forms.ModelChoiceField(Site.objects.all())
rating = forms.CharField()
def get_etype(self):
return like
data = {
'user': self.users[0].pk,
'group': self.objects['advanced'].pk,
'rating': '5',
'site': self.site.pk
}
form = LikeForm(dict(**data))
self.assertTrue(form.is_valid())
form.save()
# then check the edge list
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site), 1)
self.assertEqual(len(Edge.objects.filter(fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site)), 1)
edges = self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)
self.assertEqual(edges[0][TO_NODE].name, self.objects['advanced'].name)
# and check the inverse edge list
self.assertEqual(self.graph.edge_count(self.objects['advanced'], self.relationships['liked_by'], self.site), 1)
edges = self.graph.edge_range(self.objects['advanced'], self.relationships['liked_by'], 0, 10, self.site)
self.assertEqual(edges[0][TO_NODE].username, self.users[0].username)
def test_specific_type_edge_form_descendants(self):
like = self.relationships['like']
class LikeForm(SpecificTypeEdgeForm):
etype = like
edge_origin = 'user'
edge_target = 'group'
edge_attributes = ['rating', 'favorite']
user = forms.ModelChoiceField(User.objects.all())
group = forms.ModelChoiceField(Group.objects.all())
site = forms.ModelChoiceField(Site.objects.all())
rating = forms.CharField()
favorite = forms.BooleanField()
data = {
'user': self.users[0].pk,
'group': self.objects['advanced'].pk,
'rating': '5',
'favorite': True,
'site': self.site.pk
}
form = LikeForm(dict(**data))
self.assertTrue(form.is_valid())
form.save()
# then check the edge list
self.assertEqual(self.graph.edge_count(self.users[0], self.relationships['like'], self.site), 1)
self.assertEqual(len(Edge.objects.filter(fromNode_pk=self.users[0].pk,
fromNode_type=ContentType.objects.get_for_model(self.users[0]),
type=self.relationships['like'],
site=self.site)), 1)
edges = self.graph.edge_range(self.users[0], self.relationships['like'], 0, 10, self.site)
self.assertEqual(edges[0][TO_NODE].name, self.objects['advanced'].name)
self.assertEqual(edges[0][ATTRIBUTES], {'rating': '5', 'favorite': True})
# and check the inverse edge list
self.assertEqual(self.graph.edge_count(self.objects['advanced'], self.relationships['liked_by'], self.site), 1)
edges = self.graph.edge_range(self.objects['advanced'], self.relationships['liked_by'], 0, 10, self.site)
self.assertEqual(edges[0][TO_NODE].username, self.users[0].username)
self.assertEqual(edges[0][ATTRIBUTES], {'rating': '5', 'favorite': True})
if __name__ == '__main__':
import unittest
unittest.main()
| 49.551102
| 120
| 0.631238
| 2,944
| 24,726
| 5.179688
| 0.059443
| 0.103285
| 0.055086
| 0.050495
| 0.836645
| 0.80143
| 0.779067
| 0.727392
| 0.702276
| 0.68752
| 0
| 0.014771
| 0.230607
| 24,726
| 498
| 121
| 49.650602
| 0.786796
| 0.023295
| 0
| 0.569652
| 0
| 0
| 0.050976
| 0
| 0
| 0
| 0
| 0
| 0.28607
| 1
| 0.057214
| false
| 0.002488
| 0.034826
| 0.002488
| 0.104478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b5d723a2535c7afe292e39ca46095c0d3926edd4
| 2,060
|
py
|
Python
|
tests/test_sample.py
|
mirnylab/cooltools
|
ab5d775ee50fb3d4483520a40f758914348e89b7
|
[
"MIT"
] | 39
|
2017-10-15T02:59:32.000Z
|
2020-09-15T21:53:56.000Z
|
tests/test_sample.py
|
mirnylab/cooltools
|
ab5d775ee50fb3d4483520a40f758914348e89b7
|
[
"MIT"
] | 131
|
2017-09-05T15:56:24.000Z
|
2020-09-22T13:23:54.000Z
|
tests/test_sample.py
|
mirnylab/cooltools
|
ab5d775ee50fb3d4483520a40f758914348e89b7
|
[
"MIT"
] | 29
|
2017-04-29T23:06:28.000Z
|
2020-08-28T19:14:23.000Z
|
import os.path as op
import cooler
import cooltools
import cooltools.api
from numpy import testing
def test_sample(request):
# perform test:
clr = cooler.Cooler(op.join(request.fspath.dirname, "data/CN.mm9.1000kb.cool"))
cooltools.api.sample.sample(
clr,
op.join(request.fspath.dirname, "data/CN.mm9.1000kb.test_sampled.cool"),
frac=0.5,
)
clr_result = cooler.Cooler(
op.join(request.fspath.dirname, "data/CN.mm9.1000kb.test_sampled.cool")
)
# Test that deviation from expected total is very small
testing.assert_allclose(clr_result.info["sum"], clr.info["sum"] / 2, rtol=1e-3)
cooltools.api.sample.sample(
clr,
op.join(request.fspath.dirname, "data/CN.mm9.1000kb.test_sampled.cool"),
count=200000000,
)
clr_result = cooler.Cooler(
op.join(request.fspath.dirname, "data/CN.mm9.1000kb.test_sampled.cool")
)
# Test that deviation from expected total is very small
testing.assert_allclose(clr_result.info["sum"], 200000000, rtol=1e-3)
def test_sample_exact(request):
# Exact sampling is very slow! So commented out
clr = cooler.Cooler(op.join(request.fspath.dirname, "data/CN.mm9.10000kb.cool"))
cooltools.api.sample.sample(
clr,
op.join(request.fspath.dirname, "data/CN.mm9.10000kb.test_sampled.cool"),
frac=0.5,
exact=True,
)
clr_result = cooler.Cooler(
op.join(request.fspath.dirname, "data/CN.mm9.10000kb.test_sampled.cool")
)
# Test that result matches expectation exactly
testing.assert_equal(clr_result.info["sum"], round(clr.info["sum"] * 0.5))
cooltools.api.sample.sample(
clr,
op.join(request.fspath.dirname, "data/CN.mm9.10000kb.test_sampled.cool"),
count=200000000,
exact=True,
)
clr_result = cooler.Cooler(
op.join(request.fspath.dirname, "data/CN.mm9.10000kb.test_sampled.cool")
)
# Test that result matches expectation exactly
testing.assert_equal(clr_result.info["sum"], 200000000)
| 32.698413
| 84
| 0.675728
| 283
| 2,060
| 4.837456
| 0.212014
| 0.043828
| 0.09496
| 0.138787
| 0.84076
| 0.807159
| 0.798393
| 0.798393
| 0.798393
| 0.798393
| 0
| 0.061483
| 0.19466
| 2,060
| 62
| 85
| 33.225806
| 0.763713
| 0.124757
| 0
| 0.553191
| 0
| 0
| 0.198775
| 0.188753
| 0
| 0
| 0
| 0
| 0.085106
| 1
| 0.042553
| false
| 0
| 0.106383
| 0
| 0.148936
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1fa0b7639975d874e3f4d26dd19daca98c62f95e
| 55
|
py
|
Python
|
python/non_buildable_2/number_returns/number_returns/gimmes.py
|
nagi49000/tutorial-memory-refs-and-folder-structures
|
bede74884fc96d89b9cfdd45fba3c69b3f9445c1
|
[
"MIT"
] | null | null | null |
python/non_buildable_2/number_returns/number_returns/gimmes.py
|
nagi49000/tutorial-memory-refs-and-folder-structures
|
bede74884fc96d89b9cfdd45fba3c69b3f9445c1
|
[
"MIT"
] | null | null | null |
python/non_buildable_2/number_returns/number_returns/gimmes.py
|
nagi49000/tutorial-memory-refs-and-folder-structures
|
bede74884fc96d89b9cfdd45fba3c69b3f9445c1
|
[
"MIT"
] | null | null | null |
def gimme5():
return 5
def gimme3():
return 3
| 9.166667
| 13
| 0.581818
| 8
| 55
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 0.309091
| 55
| 5
| 14
| 11
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
1fb8a11e1163474712bce1199e2c8cecbacef6b6
| 21
|
py
|
Python
|
abm/in-depth/in_depth_agent_based_modeling/simulation_models/spm/__init__.py
|
transentis/bptk_py_tutorial
|
db622858401fb63f773bc5917414bd42872c5010
|
[
"MIT"
] | 34
|
2020-02-01T04:53:56.000Z
|
2022-03-07T19:28:59.000Z
|
abm/how-to/how_to_choose_datacollector/simulation_models/spm/__init__.py
|
transentis/bptk_py_tutorial
|
db622858401fb63f773bc5917414bd42872c5010
|
[
"MIT"
] | 3
|
2021-05-04T07:08:26.000Z
|
2022-03-02T11:39:51.000Z
|
abm/in-depth/in_depth_agent_based_modeling/simulation_models/spm/__init__.py
|
transentis/bptk_py_tutorial
|
db622858401fb63f773bc5917414bd42872c5010
|
[
"MIT"
] | 14
|
2020-03-26T21:08:54.000Z
|
2022-02-04T14:20:01.000Z
|
from .SPM import SPM
| 10.5
| 20
| 0.761905
| 4
| 21
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 2
| 20
| 10.5
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2f133a8218630a88f69373d4c5dd7e59491e5316
| 1,744
|
py
|
Python
|
pwnlib/elf/maps.py
|
tkmikan/pwntools
|
1238fc359eb72313d3f82849b2effdb7063ab429
|
[
"MIT"
] | 9
|
2018-07-16T23:18:15.000Z
|
2019-11-14T10:06:04.000Z
|
pwnlib/elf/maps.py
|
tkmikan/pwntools
|
1238fc359eb72313d3f82849b2effdb7063ab429
|
[
"MIT"
] | 1
|
2018-10-31T22:03:35.000Z
|
2018-11-02T20:36:21.000Z
|
pwnlib/elf/maps.py
|
tkmikan/pwntools
|
1238fc359eb72313d3f82849b2effdb7063ab429
|
[
"MIT"
] | 3
|
2018-10-31T04:34:30.000Z
|
2021-02-06T00:39:32.000Z
|
from __future__ import absolute_import
# Pre-assembled shellcode for each architecture.
#
# This is literally the output of:
# shellcraft $ARCH.linux.cat2 /proc/self/maps
# shellcraft $ARCH.linux.syscalls.exit 0
CAT_PROC_MAPS_EXIT = {
'i386':
'680101010181342460717201686c662f6d68632f7365682f70726f89e331c931d2b6406a0558cd8029d489c389e16a0358cd806a015b89e189c26a0458cd80'
'31db6a0158cd80',
'amd64':
'48b801010101010101015048b86d672e6c607172014831042448b82f70726f632f7365506a02584889e731d2b64031f60f054829d44889c731c04889e60f054889c26a01586a015f4889e60f05'
'31ff6a3c580f05',
'arm':
'617007e3737040e304702de56c7606e32f7d46e304702de5637f02e3737546e304702de52f7007e3727f46e304702de50d00a0e1011021e00129a0e30570a0e3000000ef02d04de00d10a0e10370a0e3000000ef0020a0e10100a0e30d10a0e10470a0e3000000ef'
'000020e00170a0e3000000ef',
'thumb':
'004f01e0617073ff4fea07274fea172780b4dff8047001e06c662f6d80b4dff8047001e0632f736580b4dff8047001e02f70726f80b4684681ea01014ff480424ff0050741dfadeb020d69464ff0030741df02464ff0010069464ff0040741df'
'80ea00004ff0010741df00bf',
'mips':
'726f093c2f702935f0ffa9af7365093c632f2935f4ffa9af2f6d093c6c662935f8ffa9af8cff193c9e8f393727482003fcffa9aff0ffbd272020a003ffff0528ffbf192427302003a50f02340c01010122e8a603fcffa2affcffa48f2028a003a30f02340c010101feff1924272020032028a003fcffa2affcffa68fa40f02340c010101'
'ffff0428a10f02340c010101',
'aarch64':
'ee058ed24eeeadf26eecc5f26eaeecf28fcd8cd2efa5adf22f0ccef26f0ee0f2ee3fbfa980f39fd2e0ffbff2e0ffdff2e0fffff2e1030091e2031faa080780d2010000d4020088d2ff6322cbe1030091e80780d2010000d4e20300aa200080d2e1030091080880d2010000d4'
'e0031faaa80b80d2010000d4',
}
| 62.285714
| 274
| 0.858945
| 52
| 1,744
| 28.653846
| 0.865385
| 0.018792
| 0.025503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.578813
| 0.101491
| 1,744
| 27
| 275
| 64.592593
| 0.372049
| 0.097477
| 0
| 0
| 0
| 0
| 0.836735
| 0.80102
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.047619
| 0
| 0.047619
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2f307464159a3852316b48a6db2a19ab494960be
| 16
|
py
|
Python
|
py/ser.py
|
clker/xriscv
|
9ebaf87360da32a7659b376807c122a1f112cd70
|
[
"MIT"
] | 3
|
2019-09-17T03:06:35.000Z
|
2020-08-12T06:42:09.000Z
|
py/ser.py
|
clker/xriscv
|
9ebaf87360da32a7659b376807c122a1f112cd70
|
[
"MIT"
] | null | null | null |
py/ser.py
|
clker/xriscv
|
9ebaf87360da32a7659b376807c122a1f112cd70
|
[
"MIT"
] | 1
|
2020-08-12T07:21:19.000Z
|
2020-08-12T07:21:19.000Z
|
import serial
| 4
| 13
| 0.75
| 2
| 16
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 16
| 3
| 14
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2f699a530389a6ebbae66bc21c4f49db0b432c8f
| 52
|
py
|
Python
|
futura_ui/app/ui/recipes/recipes.py
|
pjamesjoyce/futura
|
fc4558bd07626b0d1e89093c0107ccd989ceaa6a
|
[
"BSD-3-Clause"
] | 6
|
2020-05-04T16:48:03.000Z
|
2022-03-29T14:58:16.000Z
|
futura_ui/app/ui/recipes/recipes.py
|
pjamesjoyce/futura
|
fc4558bd07626b0d1e89093c0107ccd989ceaa6a
|
[
"BSD-3-Clause"
] | 1
|
2021-09-13T06:41:21.000Z
|
2021-09-13T06:41:21.000Z
|
futura_ui/app/ui/recipes/recipes.py
|
pjamesjoyce/futura
|
fc4558bd07626b0d1e89093c0107ccd989ceaa6a
|
[
"BSD-3-Clause"
] | 1
|
2020-11-13T23:02:18.000Z
|
2020-11-13T23:02:18.000Z
|
def new_recipe():
print('creating a new recipe')
| 26
| 34
| 0.692308
| 8
| 52
| 4.375
| 0.75
| 0.514286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173077
| 52
| 2
| 34
| 26
| 0.813953
| 0
| 0
| 0
| 0
| 0
| 0.396226
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
85d08e0254709e63c95a612c60f8f3eb6ffb3f64
| 47
|
py
|
Python
|
ptk/__init__.py
|
patrickctrf/6-DOF-Inertial-Odometry
|
4e7a96408db69d609f0250fd6629c39173fc3863
|
[
"BSD-3-Clause"
] | null | null | null |
ptk/__init__.py
|
patrickctrf/6-DOF-Inertial-Odometry
|
4e7a96408db69d609f0250fd6629c39173fc3863
|
[
"BSD-3-Clause"
] | null | null | null |
ptk/__init__.py
|
patrickctrf/6-DOF-Inertial-Odometry
|
4e7a96408db69d609f0250fd6629c39173fc3863
|
[
"BSD-3-Clause"
] | null | null | null |
from .utils import *
from .timeseries import *
| 15.666667
| 25
| 0.744681
| 6
| 47
| 5.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 26
| 23.5
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
85e3aa98e4a4e4287bb423e4c8d3b6bc71b9d39a
| 42
|
py
|
Python
|
__init__.py
|
tr-ace/CiscoHelper
|
d69b91a13ed20335010ff7bdcfac708ac2d5a1f5
|
[
"MIT"
] | null | null | null |
__init__.py
|
tr-ace/CiscoHelper
|
d69b91a13ed20335010ff7bdcfac708ac2d5a1f5
|
[
"MIT"
] | 1
|
2021-03-09T23:01:42.000Z
|
2021-03-09T23:01:42.000Z
|
__init__.py
|
tr-ace/CiscoHelper
|
d69b91a13ed20335010ff7bdcfac708ac2d5a1f5
|
[
"MIT"
] | 1
|
2021-03-09T20:59:49.000Z
|
2021-03-09T20:59:49.000Z
|
from user import User
from dir import Dir
| 14
| 21
| 0.809524
| 8
| 42
| 4.25
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 42
| 2
| 22
| 21
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c81e8c00882ac688c5acd1064e1c6125e329f86b
| 59,166
|
py
|
Python
|
webfront/tests/tests_organism.py
|
ProteinsWebTeam/project-skeleton
|
7aeb971ba2d9bfe272e0590bd4484afb61336b96
|
[
"Apache-2.0"
] | 6
|
2020-05-25T17:35:52.000Z
|
2022-03-26T00:45:55.000Z
|
webfront/tests/tests_organism.py
|
ProteinsWebTeam/project-skeleton
|
7aeb971ba2d9bfe272e0590bd4484afb61336b96
|
[
"Apache-2.0"
] | 76
|
2016-07-29T09:22:34.000Z
|
2022-03-15T07:57:17.000Z
|
webfront/tests/tests_organism.py
|
ProteinsWebTeam/project-skeleton
|
7aeb971ba2d9bfe272e0590bd4484afb61336b96
|
[
"Apache-2.0"
] | 1
|
2017-04-09T20:08:30.000Z
|
2017-04-09T20:08:30.000Z
|
from rest_framework import status
from webfront.models import Taxonomy
from webfront.tests.InterproRESTTestCase import InterproRESTTestCase
class TaxonomyFixturesTest(InterproRESTTestCase):
def test_the_fixtures_are_loaded(self):
taxa = Taxonomy.objects.all()
self.assertEqual(taxa.count(), 6)
names = [t.scientific_name for t in taxa]
self.assertIn("ROOT", names)
self.assertNotIn("unicorn", names)
def test_can_get_the_taxonomy_count(self):
response = self.client.get("/api/taxonomy")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn("taxa", response.data)
self.assertIn("uniprot", response.data["taxa"])
# self.assertIn("proteome", response.data["taxa"])
def test_can_read_taxonomy_list(self):
response = self.client.get("/api/taxonomy/uniprot")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_is_list_of_objects_with_key(response.data["results"], "metadata")
self.assertEqual(len(response.data["results"]), 6)
def test_can_read_taxonomy_id(self):
response = self.client.get("/api/taxonomy/uniprot/2")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_taxonomy_details(response.data["metadata"])
class TaxonomyProteomeFixturesTest(InterproRESTTestCase):
def test_can_read_taxonomy_with_proteome_list(self):
response = self.client.get("/api/taxonomy/uniprot/proteome")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_is_list_of_objects_with_key(response.data["results"], "metadata")
self._check_is_list_of_objects_with_key(response.data["results"], "proteomes")
self.assertEqual(len(response.data["results"]), 3)
def test_can_read_taxonomy_leaf_id_with_proteome_count(self):
response = self.client.get("/api/taxonomy/uniprot/40296/proteome")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn("metadata", response.data)
self.assertIn("proteomes", response.data)
self.assertIn("uniprot", response.data["proteomes"])
self.assertEqual(response.data["proteomes"]["uniprot"], 1)
def test_can_read_taxonomy_leaf_id_with_proteomes(self):
response = self.client.get("/api/taxonomy/uniprot/40296/proteome/uniprot")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn("metadata", response.data)
self.assertIn("proteome_subset", response.data)
self.assertEqual(len(response.data["proteome_subset"]), 1)
def test_can_read_taxonomy_node_id_with_proteomes(self):
response = self.client.get("/api/taxonomy/uniprot/2579/proteome/uniprot")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn("metadata", response.data)
self.assertIn("proteome_subset", response.data)
self.assertEqual(len(response.data["proteome_subset"]), 2)
def test_can_read_proteome_id_including_tax_id(self):
lineage = [1, 2, 40296]
for taxon in lineage:
response = self.client.get(
"/api/taxonomy/uniprot/{}/proteome/uniprot/UP000030104".format(taxon)
)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "failed at " + str(taxon)
)
self.assertIn("proteomes", response.data)
self.assertEqual(len(response.data["proteomes"]), 1)
self.assertIn("accession", response.data["proteomes"][0])
self.assertIn("taxonomy", response.data["proteomes"][0])
class EntryTaxonomyTest(InterproRESTTestCase):
def test_can_get_the_taxonomy_count(self):
response = self.client.get("/api/entry/taxonomy")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_entry_count_overview(response.data)
self._check_taxonomy_count_overview(response.data)
def test_can_get_the_taxonomy_count_on_a_list(self):
acc = "IPR003165"
urls = [
"/api/entry/interpro/taxonomy/",
"/api/entry/pfam/taxonomy/",
"/api/entry/unintegrated/taxonomy/",
"/api/entry/interpro/pfam/taxonomy/",
"/api/entry/unintegrated/pfam/taxonomy/",
"/api/entry/interpro/" + acc + "/pfam/taxonomy",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(response.data["results"], "taxa")
for result in response.data["results"]:
self._check_taxonomy_count_overview(result)
def test_urls_that_return_entry_with_taxonomy_count(self):
acc = "IPR003165"
pfam = "PF02171"
pfam_un = "PF17176"
urls = [
"/api/entry/interpro/" + acc + "/taxonomy",
"/api/entry/pfam/" + pfam + "/taxonomy",
"/api/entry/pfam/" + pfam_un + "/taxonomy",
"/api/entry/interpro/" + acc + "/pfam/" + pfam + "/taxonomy",
"/api/entry/interpro/pfam/" + pfam + "/taxonomy",
"/api/entry/unintegrated/pfam/" + pfam_un + "/taxonomy",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_entry_details(response.data["metadata"])
self.assertIn(
"taxa",
response.data,
"'taxa' should be one of the keys in the response",
)
self._check_taxonomy_count_overview(response.data)
def test_can_filter_entry_counter_with_taxonomy_db(self):
url = "/api/entry/taxonomy/uniprot"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self.assertIn(
"taxa",
response.data["entries"]["integrated"],
"'taxa' should be one of the keys in the response",
)
if response.data["entries"]["unintegrated"] != 0:
self.assertIn(
"taxa",
response.data["entries"]["unintegrated"],
"'taxa' should be one of the keys in the response",
)
def test_can_get_the_taxonomy_list_on_a_list(self):
acc = "IPR003165"
urls = [
"/api/entry/interpro/taxonomy/uniprot",
"/api/entry/unintegrated/taxonomy/uniprot",
"/api/entry/interpro/" + acc + "/pfam/taxonomy/uniprot",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(
response.data["results"], "taxonomy_subset"
)
for result in response.data["results"]:
for taxon in result["taxonomy_subset"]:
self._check_taxonomy_from_searcher(taxon)
def test_can_get_the_taxonomy_list_on_an_object(self):
urls = [
"/api/entry/interpro/IPR003165/taxonomy/uniprot",
"/api/entry/pfam/PF02171/taxonomy/uniprot",
"/api/entry/unintegrated/pfam/PF17176/taxonomy/uniprot",
"/api/entry/interpro/IPR003165/pfam/PF02171/taxonomy/uniprot",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_entry_details(response.data["metadata"])
self.assertIn("taxonomy_subset", response.data)
for org in response.data["taxonomy_subset"]:
self._check_taxonomy_from_searcher(org)
def test_can_filter_entry_counter_with_taxonomy_acc(self):
urls = ["/api/entry/taxonomy/uniprot/2579", "/api/entry/taxonomy/uniprot/40296"]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_entry_count_overview(response.data)
def test_can_get_the_taxonomy_object_on_a_list(self):
acc = "IPR003165"
urls = [
"/api/entry/interpro/taxonomy/uniprot/2579",
"/api/entry/unintegrated/taxonomy/uniprot/2579",
"/api/entry/unintegrated/taxonomy/uniprot/344612",
"/api/entry/interpro/" + acc + "/pfam/taxonomy/uniprot/344612",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(response.data["results"], "taxa")
for result in response.data["results"]:
for org in result["taxa"]:
self._check_taxonomy_from_searcher(org)
def test_can_get_thetaxonomy_object_on_an_object(self):
urls = [
"/api/entry/interpro/IPR003165/taxonomy/uniprot/40296",
"/api/entry/unintegrated/pfam/PF17176/taxonomy/uniprot/344612",
"/api/entry/unintegrated/pfam/PF17176/taxonomy/uniprot/1",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_entry_details(response.data["metadata"])
self.assertIn("taxa", response.data)
for org in response.data["taxa"]:
self._check_taxonomy_from_searcher(org)
class ProteinTaxonomyTest(InterproRESTTestCase):
def test_can_get_the_taxonomy_count(self):
response = self.client.get("/api/protein/taxonomy")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_taxonomy_count_overview(response.data)
self._check_protein_count_overview(response.data)
def test_can_get_the_taxonomy_count_on_a_list(self):
urls = [
"/api/protein/reviewed/taxonomy/",
"/api/protein/unreviewed/taxonomy/",
"/api/protein/uniprot/taxonomy/",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(response.data["results"], "taxa")
for result in response.data["results"]:
self._check_taxonomy_count_overview(result)
def test_urls_that_return_protein_with_taxonomy_count(self):
reviewed = "A1CUJ5"
unreviewed = "P16582"
urls = [
"/api/protein/uniprot/" + reviewed + "/taxonomy/",
"/api/protein/uniprot/" + unreviewed + "/taxonomy/",
"/api/protein/reviewed/" + reviewed + "/taxonomy/",
"/api/protein/unreviewed/" + unreviewed + "/taxonomy/",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_protein_details(response.data["metadata"])
self.assertIn(
"taxa",
response.data,
"'taxa' should be one of the keys in the response",
)
self._check_taxonomy_count_overview(response.data)
def test_can_filter_protein_counter_with_taxonomy_db(self):
url = "/api/protein/taxonomy/uniprot"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self.assertIn(
"proteins",
response.data["proteins"]["uniprot"],
"'proteins' should be one of the keys in the response",
)
self.assertIn(
"taxa",
response.data["proteins"]["uniprot"],
"'taxa' should be one of the keys in the response",
)
if "reviewed" in response.data["proteins"]:
self.assertIn(
"proteins",
response.data["proteins"]["reviewed"],
"'proteins' should be one of the keys in the response",
)
self.assertIn(
"taxa",
response.data["proteins"]["reviewed"],
"'taxa' should be one of the keys in the response",
)
if "unreviewed" in response.data["proteins"]:
self.assertIn(
"proteins",
response.data["proteins"]["unreviewed"],
"'proteins' should be one of the keys in the response",
)
self.assertIn(
"taxa",
response.data["proteins"]["unreviewed"],
"'taxa' should be one of the keys in the response",
)
def test_can_get_the_taxonomy_list_on_a_list(self):
urls = [
"/api/protein/unreviewed/taxonomy/uniprot",
"/api/protein/reviewed/taxonomy/uniprot",
"/api/protein/uniprot/taxonomy/uniprot",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(
response.data["results"], "taxonomy_subset"
)
for result in response.data["results"]:
for org in result["taxonomy_subset"]:
self._check_taxonomy_from_searcher(org)
def test_can_get_the_taxonomy_list_on_an_object(self):
urls = [
"/api/protein/uniprot/A0A0A2L2G2/taxonomy/uniprot",
"/api/protein/unreviewed/P16582/taxonomy/uniprot/",
"/api/protein/reviewed/A1CUJ5/taxonomy/uniprot",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_protein_details(response.data["metadata"])
self.assertIn("taxonomy_subset", response.data)
for org in response.data["taxonomy_subset"]:
self._check_taxonomy_from_searcher(org)
def test_can_filter_counter_with_taxonomy_acc(self):
urls = [
"/api/protein/taxonomy/uniprot/2579",
"/api/protein/taxonomy/uniprot/40296",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_protein_count_overview(response.data)
def test_can_get_the_taxonomy_object_on_a_list(self):
urls = [
"/api/protein/reviewed/taxonomy/uniprot/2579",
"/api/protein/uniprot/taxonomy/uniprot/344612",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(response.data["results"], "taxa")
for result in response.data["results"]:
for org in result["taxa"]:
self._check_taxonomy_from_searcher(org)
def test_can_get_the_taxonomy_object_on_an_object(self):
urls = [
"/api/protein/uniprot/A0A0A2L2G2/taxonomy/uniprot/40296",
"/api/protein/unreviewed/P16582/taxonomy/uniprot/40296",
"/api/protein/reviewed/A1CUJ5/taxonomy/uniprot/2579",
"/api/protein/reviewed/A1CUJ5/taxonomy/uniprot/344612",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_protein_details(response.data["metadata"])
self.assertIn("taxa", response.data)
for org in response.data["taxa"]:
self._check_taxonomy_from_searcher(org)
class StructureTaxonomyTest(InterproRESTTestCase):
def test_can_get_the_taxonomy_count(self):
response = self.client.get("/api/structure/taxonomy")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_taxonomy_count_overview(response.data)
self._check_structure_count_overview(response.data)
def test_can_get_the_taxonomy_count_on_a_list(self):
url = "/api/structure/pdb/taxonomy/"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(response.data["results"], "metadata")
self._check_is_list_of_objects_with_key(response.data["results"], "taxa")
for result in response.data["results"]:
self._check_taxonomy_count_overview(result)
def test_urls_that_return_structure_with_taxonomy_count(self):
urls = [
"/api/structure/pdb/" + pdb + "/taxonomy/"
for pdb in ["1JM7", "2BKM", "1T2V"]
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_structure_details(response.data["metadata"])
self.assertIn(
"taxa",
response.data,
"'taxa' should be one of the keys in the response",
)
self._check_taxonomy_count_overview(response.data)
def test_can_filter_structure_counter_with_taxonomy_db(self):
url = "/api/structure/taxonomy/uniprot"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self.assertIn(
"structures",
response.data["structures"]["pdb"],
"'structures' should be one of the keys in the response",
)
self.assertIn(
"taxa",
response.data["structures"]["pdb"],
"'taxa' should be one of the keys in the response",
)
def test_can_get_the_taxonomy_list_on_a_list(self):
url = "/api/structure/pdb/taxonomy/uniprot"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(response.data["results"], "metadata")
self._check_is_list_of_objects_with_key(
response.data["results"], "taxonomy_subset"
)
for result in response.data["results"]:
for org in result["taxonomy_subset"]:
self._check_taxonomy_from_searcher(org)
def test_can_get_the_taxonomy_list_on_an_object(self):
urls = [
"/api/structure/pdb/1T2V/taxonomy/uniprot",
"/api/structure/pdb/1JZ8/taxonomy/uniprot",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_structure_details(response.data["metadata"])
self.assertIn("taxonomy_subset", response.data)
for org in response.data["taxonomy_subset"]:
self._check_taxonomy_from_searcher(org)
def test_can_filter_counter_with_taxonomy_acc(self):
urls = [
"/api/structure/taxonomy/uniprot/2579",
"/api/structure/taxonomy/uniprot/40296",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_structure_count_overview(response.data)
def test_can_get_the_taxonomy_object_on_a_list(self):
urls = [
"/api/structure/pdb/taxonomy/uniprot/2",
"/api/structure/pdb/taxonomy/uniprot/2579",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(response.data["results"], "taxa")
for result in response.data["results"]:
for org in result["taxa"]:
self._check_taxonomy_from_searcher(org)
def test_can_get_the_taxonomy_object_on_an_object(self):
urls = [
"/api/structure/pdb/1T2V/taxonomy/uniprot/40296",
"/api/structure/pdb/1JZ8/taxonomy/uniprot/1",
"/api/structure/pdb/1JZ8/taxonomy/uniprot/40296",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_structure_details(response.data["metadata"])
self.assertIn("taxa", response.data)
for org in response.data["taxa"]:
self._check_taxonomy_from_searcher(org)
class SetTaxonomyTest(InterproRESTTestCase):
def test_can_get_the_taxonomy_count(self):
response = self.client.get("/api/set/taxonomy")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_set_count_overview(response.data)
self._check_taxonomy_count_overview(response.data)
def test_can_get_the_taxonomy_count_on_a_list(self):
urls = [
"/api/set/pfam/taxonomy",
# "/api/set/kegg/taxonomy",
# "/api/set/kegg/KEGG01/node/taxonomy",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(response.data["results"], "taxa")
for result in response.data["results"]:
self._check_taxonomy_count_overview(result)
def test_can_get_the_taxonomy_count_on_a_set(self):
urls = [
"/api/set/pfam/CL0001/taxonomy",
# "/api/set/kegg/KEGG01/taxonomy",
# "/api/set/kegg/KEGG01/node/KEGG01-1/taxonomy",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_set_details(response.data["metadata"])
self.assertIn(
"taxa",
response.data,
"'taxa' should be one of the keys in the response",
)
self._check_taxonomy_count_overview(response.data)
def test_can_filter_set_counter_with_structure_db(self):
url = "/api/set/taxonomy/uniprot"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self.assertIsInstance(response.data, dict)
# if "kegg" in response.data["sets"]:
# self.assertIn("taxa", response.data["sets"]["kegg"],
# "'taxa' should be one of the keys in the response")
# self.assertIn("sets", response.data["sets"]["kegg"],
# "'sets' should be one of the keys in the response")
if "pfam" in response.data["sets"]:
self.assertIn(
"taxa",
response.data["sets"]["pfam"],
"'taxa' should be one of the keys in the response",
)
self.assertIn(
"sets",
response.data["sets"]["pfam"],
"'sets' should be one of the keys in the response",
)
def test_can_get_the_set_list_on_a_list(self):
urls = [
# "/api/set/kegg/taxonomy/uniprot",
# "/api/set/kegg/kegg01/node/taxonomy/uniprot",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(response.data["results"], "taxa")
for result in response.data["results"]:
for s in result["taxa"]:
self._check_taxonomy_from_searcher(s)
def test_can_get_a_list_from_the_set_object(self):
urls = [
"/api/set/pfam/Cl0001/taxonomy/uniprot",
# "/api/set/kegg/kegg01/node/KEGG01-1/taxonomy/uniprot/",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_set_details(response.data["metadata"], True)
self.assertIn("taxonomy_subset", response.data)
for st in response.data["taxonomy_subset"]:
self._check_taxonomy_from_searcher(st)
def test_can_filter_set_counter_with_acc(self):
urls = [
"/api/set/taxonomy/uniprot/1",
"/api/set/taxonomy/uniprot/2579",
"/api/set/taxonomy/uniprot/344612",
"/api/set/taxonomy/uniprot/1001583",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_set_count_overview(response.data)
# def test_can_get_object_on_a_set_list(self):
# urls = [
# # "/api/set/kegg/taxonomy/uniprot/2579",
# # "/api/set/kegg/taxonomy/uniprot/344612",
# ]
# for url in urls:
# response = self.client.get(url)
# self.assertEqual(response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url))
# self._check_is_list_of_objects_with_key(response.data["results"], "metadata")
# self._check_is_list_of_objects_with_key(response.data["results"], "taxa")
# for result in response.data["results"]:
# self._check_set_details(result["metadata"], False)
# for st in result["taxa"]:
# self._check_taxonomy_from_searcher(st)
def test_can_get_an_object_from_the_set_object(self):
urls = [
# "/api/set/kegg/kegg01/taxonomy/uniprot/2",
# "/api/set/kegg/kegg01/taxonomy/uniprot/40296",
# "/api/set/kegg/kegg01/node/kegg01-1/taxonomy/uniprot/40296",
"/api/set/pfam/Cl0001/taxonomy/uniprot/344612"
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_set_details(response.data["metadata"])
self.assertIn("taxa", response.data)
for s in response.data["taxa"]:
self._check_taxonomy_from_searcher(s)
class TaxonomyEntryTest(InterproRESTTestCase):
def test_can_get_the_taxonomy_count(self):
response = self.client.get("/api/taxonomy/entry")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_taxonomy_count_overview(response.data)
self._check_entry_count_overview(response.data)
def test_can_get_the_entry_count_on_a_list(self):
url = "/api/taxonomy/uniprot/entry"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(response.data["results"], "metadata")
self._check_is_list_of_objects_with_key(response.data["results"], "entries")
for result in response.data["results"]:
self._check_entry_count_overview(result)
def test_a_more_inclusive_taxon_has_more_items(self):
response1 = self.client.get("/api/taxonomy/uniprot/2579/entry")
response2 = self.client.get("/api/taxonomy/uniprot/1001583/entry")
self.assertEqual(response1.status_code, status.HTTP_200_OK)
self.assertEqual(response2.status_code, status.HTTP_200_OK)
self.assertGreater(
response1.data["entries"]["all"], response2.data["entries"]["all"]
)
def test_urls_that_return_taxonomy_with_entry_count(self):
urls = ["/api/taxonomy/uniprot/40296/entry", "/api/taxonomy/uniprot/2/entry"]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"])
self.assertIn(
"entries",
response.data,
"'entries' should be one of the keys in the response",
)
self._check_entry_count_overview(response.data)
def test_can_filter_taxonomy_counter_with_entry_db(self):
acc = "IPR003165"
urls = [
"/api/taxonomy/entry/interpro",
"/api/taxonomy/entry/pfam",
"/api/taxonomy/entry/unintegrated",
"/api/taxonomy/entry/unintegrated/pfam",
"/api/taxonomy/entry/interpro/pfam",
"/api/taxonomy/entry/interpro/" + acc + "/pfam",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self.assertIsInstance(response.data, dict)
self.assertIn(
"uniprot",
response.data["taxa"],
"'uniprot' should be one of the keys in the response",
)
self.assertIn(
"taxa",
response.data["taxa"]["uniprot"],
"'proteome' should be one of the keys in the response",
)
self.assertIn(
"entries",
response.data["taxa"]["uniprot"],
"'entries' should be one of the keys in the response",
)
def test_can_get_a_list_from_the_taxonomy_list(self):
urls = [
"/api/taxonomy/uniprot/entry/interpro",
"/api/taxonomy/uniprot/entry/unintegrated",
"/api/taxonomy/uniprot/entry/interpro/pfam",
"/api/taxonomy/uniprot/entry/unintegrated/pfam",
"/api/taxonomy/uniprot/entry/interpro/IPR003165/pfam",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(
response.data["results"], "entry_subset"
)
for result in response.data["results"]:
self._check_taxonomy_details(result["metadata"], False)
for st in result["entry_subset"]:
self._check_entry_from_searcher(st)
def test_can_get_a_list_from_the_taxonomy_object(self):
urls = [
"/api/taxonomy/uniprot/40296/entry/interpro",
"/api/taxonomy/uniprot/1/entry/interpro/pfam",
"/api/taxonomy/uniprot/2579/entry/unintegrated/pfam",
"/api/taxonomy/uniprot/344612/entry/unintegrated/pfam",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"], False)
self.assertIn("entry_subset", response.data)
for st in response.data["entry_subset"]:
self._check_entry_from_searcher(st)
def test_can_filter_taxonomy_counter_with_acc(self):
acc = "IPR003165"
pfam = "PF02171"
pfam_un = "PF17176"
urls = [
"/api/taxonomy/entry/interpro/" + acc,
"/api/taxonomy/entry/pfam/" + pfam,
"/api/taxonomy/entry/pfam/" + pfam_un,
"/api/taxonomy/entry/interpro/" + acc + "/pfam/" + pfam,
"/api/taxonomy/entry/interpro/pfam/" + pfam,
"/api/taxonomy/entry/unintegrated/pfam/" + pfam_un,
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_count_overview(response.data)
def test_can_get_object_on_a_taxonomy_list(self):
acc = "IPR003165"
pfam = "PF02171"
pfam_un = "PF17176"
urls = [
"/api/taxonomy/uniprot/entry/interpro/" + acc,
"/api/taxonomy/uniprot/entry/unintegrated/pfam/" + pfam_un,
"/api/taxonomy/uniprot/entry/interpro/pfam/" + pfam,
"/api/taxonomy/uniprot/entry/interpro/IPR003165/pfam/" + pfam,
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(response.data["results"], "entries")
for result in response.data["results"]:
self._check_taxonomy_details(result["metadata"], False)
for st in result["entries"]:
self._check_entry_from_searcher(st)
def test_can_get_an_object_from_the_taxonomy_object(self):
urls = [
"/api/taxonomy/uniprot/40296/entry/interpro/ipr003165",
"/api/taxonomy/uniprot/1/entry/interpro/pfam/pf02171",
"/api/taxonomy/uniprot/2579/entry/unintegrated/pfam/pf17176",
"/api/taxonomy/uniprot/344612/entry/unintegrated/pfam/pf17176",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"], False)
self.assertIn("entries", response.data)
for st in response.data["entries"]:
self._check_entry_from_searcher(st)
class TaxonomyProteinTest(InterproRESTTestCase):
def test_can_get_the_taxonomy_count(self):
response = self.client.get("/api/taxonomy/protein")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_taxonomy_count_overview(response.data)
self._check_protein_count_overview(response.data)
def test_can_get_the_protein_count_on_a_list(self):
url = "/api/taxonomy/uniprot/protein"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(response.data["results"], "metadata")
self._check_is_list_of_objects_with_key(response.data["results"], "proteins")
for result in response.data["results"]:
self._check_protein_count_overview(result)
def test_a_more_inclusive_taxon_has_more_items(self):
response1 = self.client.get("/api/taxonomy/uniprot/2579/protein")
response2 = self.client.get("/api/taxonomy/uniprot/1001583/protein")
self.assertEqual(response1.status_code, status.HTTP_200_OK)
self.assertEqual(response2.status_code, status.HTTP_200_OK)
self.assertGreater(
response1.data["proteins"]["uniprot"], response2.data["proteins"]["uniprot"]
)
def test_urls_that_return_taxonomy_with_entry_count(self):
urls = [
"/api/taxonomy/uniprot/40296/protein",
"/api/taxonomy/uniprot/2/protein",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"])
self.assertIn(
"proteins",
response.data,
"'proteins' should be one of the keys in the response",
)
self._check_protein_count_overview(response.data)
def test_can_filter_protein_counter_with_taxonomy_db(self):
urls = [
"/api/taxonomy/protein/uniprot",
"/api/taxonomy/protein/reviewed",
"/api/taxonomy/protein/unreviewed",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self.assertIsInstance(response.data, dict)
self.assertIn(
"uniprot",
response.data["taxa"],
"'uniprot' should be one of the keys in the response",
)
self.assertIn(
"taxa",
response.data["taxa"]["uniprot"],
"'taxa' should be one of the keys in the response",
)
self.assertIn(
"proteins",
response.data["taxa"]["uniprot"],
"'proteins' should be one of the keys in the response",
)
def test_can_get_a_list_from_the_taxonomy_list(self):
urls = [
"/api/taxonomy/uniprot/protein/uniprot",
"/api/taxonomy/uniprot/protein/unreviewed",
"/api/taxonomy/uniprot/protein/reviewed",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(
response.data["results"], "protein_subset"
)
for result in response.data["results"]:
self._check_taxonomy_details(result["metadata"], False)
for st in result["protein_subset"]:
self._check_match(st, include_coordinates=False)
def test_can_get_a_list_from_the_taxonomy_object(self):
urls = [
"/api/taxonomy/uniprot/40296/protein/uniprot",
"/api/taxonomy/uniprot/1/protein/unreviewed",
"/api/taxonomy/uniprot/2579/protein/reviewed",
"/api/taxonomy/uniprot/344612/protein/reviewed",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"], False)
self.assertIn("protein_subset", response.data)
for st in response.data["protein_subset"]:
self._check_match(st, include_coordinates=False)
def test_can_filter_taxonomy_counter_with_acc(self):
urls = [
"/api/taxonomy/protein/uniprot/M5ADK6",
"/api/taxonomy/protein/unreviewed/A0A0A2L2G2",
"/api/taxonomy/protein/reviewed/M5ADK6",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_count_overview(response.data)
def test_can_get_object_on_a_taxonomy_list(self):
urls = [
"/api/taxonomy/uniprot/protein/uniprot/P16582",
"/api/taxonomy/uniprot/protein/unreviewed/A0A0A2L2G2",
"/api/taxonomy/uniprot/protein/reviewed/M5ADK6",
"/api/taxonomy/uniprot/protein/reviewed/a1cuj5",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(
response.data["results"], "proteins"
)
for result in response.data["results"]:
self._check_taxonomy_details(result["metadata"], False)
for st in result["proteins"]:
self._check_match(st, include_coordinates=False)
def test_can_get_an_object_from_the_taxonomy_object(self):
urls = [
"/api/taxonomy/uniprot/40296/protein/uniprot/p16582",
"/api/taxonomy/uniprot/1/protein/reviewed/a1cuj5",
"/api/taxonomy/uniprot/2579/protein/reviewed/a1cuj5",
"/api/taxonomy/uniprot/344612/protein/reviewed/a1cuj5",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"], False)
self.assertIn("proteins", response.data)
for st in response.data["proteins"]:
self._check_match(st, include_coordinates=False)
class TaxonomyStructureTest(InterproRESTTestCase):
def test_can_get_the_taxonomy_count(self):
response = self.client.get("/api/taxonomy/structure")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_taxonomy_count_overview(response.data)
self._check_structure_count_overview(response.data)
def test_can_get_the_protein_count_on_a_list(self):
url = "/api/taxonomy/uniprot/structure"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(response.data["results"], "metadata")
self._check_is_list_of_objects_with_key(response.data["results"], "structures")
for result in response.data["results"]:
self._check_structure_count_overview(result)
def test_a_more_inclusive_taxon_has_more_items(self):
response1 = self.client.get("/api/taxonomy/uniprot/1/structure")
response2 = self.client.get("/api/taxonomy/uniprot/1001583/structure")
self.assertEqual(response1.status_code, status.HTTP_200_OK)
self.assertEqual(response2.status_code, status.HTTP_200_OK)
self.assertGreater(
response1.data["structures"]["pdb"], response2.data["structures"]["pdb"]
)
def test_urls_that_return_taxonomy_with_entry_count(self):
urls = [
"/api/taxonomy/uniprot/40296/structure",
"/api/taxonomy/uniprot/2/structure",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"])
self.assertIn(
"structures",
response.data,
"'structures' should be one of the keys in the response",
)
self._check_structure_count_overview(response.data)
def test_can_filter_structure_counter_with_taxonomy_db(self):
url = "/api/taxonomy/structure/pdb"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self.assertIsInstance(response.data, dict)
self.assertIn(
"uniprot",
response.data["taxa"],
"'uniprot' should be one of the keys in the response",
)
self.assertIn(
"structures",
response.data["taxa"]["uniprot"],
"'structures' should be one of the keys in the response",
)
self.assertIn(
"taxa",
response.data["taxa"]["uniprot"],
"'taxa' should be one of the keys in the response",
)
def test_can_get_a_list_from_the_taxonomy_list(self):
url = "/api/taxonomy/uniprot/structure/pdb"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(response.data["results"], "metadata")
self._check_is_list_of_objects_with_key(
response.data["results"], "structure_subset"
)
for result in response.data["results"]:
self._check_taxonomy_details(result["metadata"], False)
for st in result["structure_subset"]:
self._check_structure_chain_details(st)
def test_can_get_a_list_from_the_taxonomy_object(self):
urls = [
"/api/taxonomy/uniprot/40296/structure/pdb",
"/api/taxonomy/uniprot/1/structure/pdb",
"/api/taxonomy/uniprot/2579/structure/pdb",
"/api/taxonomy/uniprot/344612/structure/pdb",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"], False)
self.assertIn("structure_subset", response.data)
for st in response.data["structure_subset"]:
self._check_structure_chain_details(st)
def test_can_filter_taxonomy_counter_with_acc(self):
urls = ["/api/taxonomy/structure/pdb/1JM7", "/api/taxonomy/structure/pdb/1JZ8"]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_count_overview(response.data)
def test_can_get_object_on_a_taxonomy_list(self):
urls = [
"/api/taxonomy/uniprot/structure/pdb/1JM7",
"/api/taxonomy/uniprot/structure/pdb/1JZ8",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(
response.data["results"], "structures"
)
for result in response.data["results"]:
self._check_taxonomy_details(result["metadata"], False)
for st in result["structures"]:
self._check_structure_chain_details(st)
def test_can_get_an_object_from_the_taxonomy_object(self):
urls = [
"/api/taxonomy/uniprot/40296/structure/pdb/1t2v",
"/api/taxonomy/uniprot/1/structure/pdb/1jm7",
"/api/taxonomy/uniprot/2579/structure/pdb/1jm7",
"/api/taxonomy/uniprot/344612/structure/pdb/1jm7",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"], False)
self.assertIn("structures", response.data)
for st in response.data["structures"]:
self._check_structure_chain_details(st)
class TaxonomySetTest(InterproRESTTestCase):
def test_can_get_the_taxonomy_count(self):
response = self.client.get("/api/taxonomy/set")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_set_count_overview(response.data)
self._check_taxonomy_count_overview(response.data)
def test_can_get_the_set_count_on_a_list(self):
url = "/api/taxonomy/uniprot/set"
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(response.data["results"], "metadata")
self._check_is_list_of_objects_with_key(response.data["results"], "sets")
for result in response.data["results"]:
self._check_set_count_overview(result)
def test_urls_that_return_taxonomy_with_set_count(self):
urls = ["/api/taxonomy/uniprot/1001583/set", "/api/taxonomy/uniprot/1/set"]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"])
self.assertIn(
"sets",
response.data,
"'sets' should be one of the keys in the response",
)
self._check_set_count_overview(response.data)
def test_can_filter_taxonomy_counter_with_taxonomy_db(self):
urls = [
"/api/taxonomy/set/pfam",
# "/api/taxonomy/set/kegg",
# "/api/taxonomy/set/kegg/kegg01/node",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self.assertIn(
"uniprot",
response.data["taxa"],
"'uniprot' should be one of the keys in the response",
)
self.assertIn(
"taxa",
response.data["taxa"]["uniprot"],
"'taxa' should be one of the keys in the response",
)
self.assertIn(
"sets",
response.data["taxa"]["uniprot"],
"'sets' should be one of the keys in the response",
)
def test_can_get_the_set_list_on_a_list(self):
urls = [
"/api/taxonomy/uniprot/set/pfam",
# "/api/taxonomy/uniprot/set/kegg",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(
response.data["results"], "set_subset"
)
for result in response.data["results"]:
for s in result["set_subset"]:
self._check_set_from_searcher(s)
def test_can_get_the_set_list_on_a__tax_object(self):
urls = [
"/api/taxonomy/uniprot/2579/set/pfam",
# "/api/taxonomy/uniprot/2579/set/kegg",
# "/api/taxonomy/uniprot/2579/set/kegg/kegg01/node",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"])
self.assertIn("set_subset", response.data)
for s in response.data["set_subset"]:
self._check_set_from_searcher(s)
def test_can_filter_counter_with_set_acc(self):
urls = [
"/api/taxonomy/set/pfam/Cl0001",
# "/api/taxonomy/set/kegg/kegg01",
# "/api/taxonomy/set/kegg/kegg01/node/KEGG01-1",
# "/api/taxonomy/set/kegg/kegg01/node/KEGG01-2",
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_count_overview(response.data)
def test_can_get_the_set_object_on_a_list(self):
urls = [
# "/api/taxonomy/uniprot/set/kegg/kegg01",
# "/api/taxonomy/uniprot/set/kegg/kegg01/node/kegg01-1",
"/api/taxonomy/uniprot/set/pfam/Cl0001"
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_is_list_of_objects_with_key(
response.data["results"], "metadata"
)
self._check_is_list_of_objects_with_key(response.data["results"], "sets")
for result in response.data["results"]:
for org in result["sets"]:
self._check_set_from_searcher(org)
def test_can_get_the_object_on_an_object(self):
urls = [
# "/api/taxonomy/uniprot/2/set/kegg/kegg01",
# "/api/taxonomy/uniprot/40296/set/kegg/kegg01",
# "/api/taxonomy/uniprot/40296/set/kegg/kegg01/node/kegg01-1",
"/api/taxonomy/uniprot/344612/set/pfam/Cl0001"
]
for url in urls:
response = self.client.get(url)
self.assertEqual(
response.status_code, status.HTTP_200_OK, "URL : [{}]".format(url)
)
self._check_taxonomy_details(response.data["metadata"])
self.assertIn("sets", response.data)
for s in response.data["sets"]:
self._check_set_from_searcher(s)
class TaxonomyPerEntryTest(InterproRESTTestCase):
def test_can_get_the_root_per_interpro(self):
response = self.client.get("/api/taxonomy/uniprot/1?filter_by_entry=IPR001165")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_taxonomy_details(response.data["metadata"])
self.assertEqual(response.data["metadata"]["counters"]["proteins"], 1)
self.assertIsInstance(response.data["children"], dict)
def test_can_browse_lineage_with_children_key(self):
entries = ["IPR001165", "PF17180", "SM00950"]
for entry in entries:
tax = "1"
lineage = ""
payload_lineage = ""
while tax != "":
lineage += f" {tax}"
path = f"/api/taxonomy/uniprot/{tax}?filter_by_entry={entry}"
response = self.client.get(path)
self.assertEqual(response.status_code, status.HTTP_200_OK)
children = list(response.data["children"].keys())
tax = children[0] if len(children) > 0 else ""
payload_lineage = response.data["metadata"]["lineage"]
self.assertEqual(payload_lineage.strip(), lineage.strip())
def test_error_query(self):
response = self.client.get("/api/taxonomy/uniprot/1?filter_by_entry=XXX")
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
class TaxonomyPerEntryDBTest(InterproRESTTestCase):
def test_can_get_the_root_per_interpro(self):
response = self.client.get(
"/api/taxonomy/uniprot/1?filter_by_entry_db=interpro"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_taxonomy_details(response.data["metadata"])
self.assertEqual(response.data["metadata"]["counters"]["entries"], 2)
self.assertIsInstance(response.data["children"], dict)
def test_can_browse_lineage_with_children_key(self):
dbs = ["interpro", "pfam", "profile", "smart"]
for db in dbs:
tax = "1"
lineage = ""
payload_lineage = ""
while tax != "":
lineage += f" {tax}"
path = f"/api/taxonomy/uniprot/{tax}?filter_by_entry_db={db}"
response = self.client.get(path)
self.assertEqual(response.status_code, status.HTTP_200_OK)
children = list(response.data["children"].keys())
tax = children[0] if len(children) > 0 else ""
payload_lineage = response.data["metadata"]["lineage"]
self.assertEqual(payload_lineage.strip(), lineage.strip())
def test_error_query(self):
response = self.client.get("/api/taxonomy/uniprot/1?filter_by_entry_db=XXX")
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
| 42.873913
| 102
| 0.590694
| 6,569
| 59,166
| 5.06074
| 0.02801
| 0.085188
| 0.035976
| 0.055348
| 0.927746
| 0.886957
| 0.831879
| 0.798189
| 0.765401
| 0.744225
| 0
| 0.024535
| 0.292516
| 59,166
| 1,379
| 103
| 42.905004
| 0.769655
| 0.039381
| 0
| 0.613139
| 0
| 0
| 0.208417
| 0.114298
| 0
| 0
| 0
| 0
| 0.140308
| 1
| 0.072182
| false
| 0
| 0.002433
| 0
| 0.084347
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c099e38c328a7185742a9021971ff4c9c838a38c
| 128
|
py
|
Python
|
src/test/python/testDataSetRepo/provider/library/b/c.py
|
ninjapapa/SMV2
|
42cf9f176c3ec0bed61f66fbf859c18d97027dd6
|
[
"Apache-2.0"
] | null | null | null |
src/test/python/testDataSetRepo/provider/library/b/c.py
|
ninjapapa/SMV2
|
42cf9f176c3ec0bed61f66fbf859c18d97027dd6
|
[
"Apache-2.0"
] | 34
|
2022-02-26T04:27:34.000Z
|
2022-03-29T23:05:47.000Z
|
src/test/python/testDataSetRepo/provider/library/b/c.py
|
ninjapapa/SMV2
|
42cf9f176c3ec0bed61f66fbf859c18d97027dd6
|
[
"Apache-2.0"
] | null | null | null |
from smv.provider import SmvProvider
class SomeProvider(SmvProvider):
@staticmethod
def provider_type(): return "some"
| 21.333333
| 38
| 0.765625
| 14
| 128
| 6.928571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 128
| 5
| 39
| 25.6
| 0.898148
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
c0bd43bb567c564430d9caced4c527f47619dacd
| 8,197
|
py
|
Python
|
plgx-esp/migrations/versions/698f286777f0_.py
|
dhoomakethu/plgx-esp
|
b466b52a5e16a0d12a61e505e48add83bee5bad4
|
[
"MIT"
] | 20
|
2019-12-09T13:55:13.000Z
|
2022-01-10T09:10:42.000Z
|
plgx-esp/migrations/versions/698f286777f0_.py
|
dhoomakethu/plgx-esp
|
b466b52a5e16a0d12a61e505e48add83bee5bad4
|
[
"MIT"
] | 13
|
2019-12-03T13:27:27.000Z
|
2021-12-03T05:22:49.000Z
|
plgx-esp/migrations/versions/698f286777f0_.py
|
dhoomakethu/plgx-esp
|
b466b52a5e16a0d12a61e505e48add83bee5bad4
|
[
"MIT"
] | 16
|
2019-11-15T11:45:06.000Z
|
2022-01-07T08:07:11.000Z
|
"""empty message
Revision ID: 698f286777f0
Revises: a76be8b92780
Create Date: 2018-09-10 15:11:38.552110
"""
# revision identifiers, used by Alembic.
revision = '698f286777f0'
down_revision = 'a76be8b92780'
from alembic import op
import sqlalchemy as sa
import polylogyx.database
from sqlalchemy.dialects import postgresql
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('alert_distributed_query',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('alert_id', sa.String(), nullable=False),
sa.Column('distributed_query_id', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('node_email',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email_id', sa.String(), nullable=False),
sa.Column('status', sa.String(), nullable=True),
sa.Column('node_id', sa.Integer(), nullable=False),
sa.Column('email_verified', sa.Boolean(), nullable=False),
sa.Column('verification_token', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['node_id'], ['node.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('alert_email',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('alert_id', sa.Integer(), nullable=False),
sa.Column('status', sa.String(), nullable=True),
sa.Column('node_id', sa.Integer(), nullable=False),
sa.Column('body', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['alert_id'], ['alerts.id'], ),
sa.ForeignKeyConstraint(['node_id'], ['node.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.add_column(u'alerts', sa.Column('recon_queries', postgresql.JSONB(), nullable=True))
op.add_column(u'alerts', sa.Column('result_log_id', sa.String(), nullable=True))
op.alter_column(u'alerts', 'message',
existing_type=postgresql.JSONB(),
nullable=True)
op.alter_column(u'alerts', 'query_name',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column(u'alerts', 'rule_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column(u'carve_session', 'carve_guid',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column(u'carve_session', 'session_id',
existing_type=sa.VARCHAR(),
nullable=False)
op.add_column(u'distributed_query', sa.Column('alert_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'distributed_query', 'alerts', ['alert_id'], ['id'])
op.add_column(u'distributed_query_task', sa.Column('data', postgresql.JSONB(), nullable=True))
op.alter_column(u'email_recipient', 'status',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column(u'email_recipient', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.drop_constraint(u'email_recipient_recipient_key', 'email_recipient', type_='unique')
op.alter_column(u'node_config', 'apply_by_default',
existing_type=sa.BOOLEAN(),
nullable=False)
op.alter_column(u'node_config', 'config',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column(u'node_config', 'name',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column(u'node_config', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.alter_column(u'node_data', 'data',
existing_type=postgresql.JSONB(),
nullable=False)
op.alter_column(u'options', 'option',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column(u'options', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.drop_constraint(u'options_name_key', 'options', type_='unique')
op.add_column(u'rule', sa.Column('recon_queries', postgresql.JSONB(), nullable=True))
op.alter_column(u'settings', 'setting',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column(u'settings', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.drop_constraint(u'settings_name_key', 'settings', type_='unique')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint(u'settings_name_key', 'settings', ['name'])
op.alter_column(u'settings', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.alter_column(u'settings', 'setting',
existing_type=sa.VARCHAR(),
nullable=True)
op.drop_column(u'rule', 'recon_queries')
op.create_unique_constraint(u'options_name_key', 'options', ['name'])
op.alter_column(u'options', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.alter_column(u'options', 'option',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column(u'node_data', 'data',
existing_type=postgresql.JSONB(),
nullable=True)
op.alter_column(u'node_config', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.alter_column(u'node_config', 'name',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column(u'node_config', 'config',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column(u'node_config', 'apply_by_default',
existing_type=sa.BOOLEAN(),
nullable=True)
op.create_unique_constraint(u'email_recipient_recipient_key', 'email_recipient', ['recipient'])
op.alter_column(u'email_recipient', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.alter_column(u'email_recipient', 'status',
existing_type=sa.VARCHAR(),
nullable=True)
op.drop_column(u'distributed_query_task', 'data')
op.drop_constraint(None, 'distributed_query', type_='foreignkey')
op.drop_column(u'distributed_query', 'alert_id')
op.alter_column(u'carve_session', 'session_id',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column(u'carve_session', 'carve_guid',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column(u'alerts', 'rule_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column(u'alerts', 'query_name',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column(u'alerts', 'message',
existing_type=postgresql.JSONB(),
nullable=False)
op.drop_column(u'alerts', 'result_log_id')
op.drop_column(u'alerts', 'recon_queries')
op.drop_table('alert_email')
op.drop_table('node_email')
op.drop_table('alert_distributed_query')
# ### end Alembic commands ###
| 42.252577
| 99
| 0.572283
| 875
| 8,197
| 5.139429
| 0.113143
| 0.065377
| 0.092506
| 0.099622
| 0.832333
| 0.81143
| 0.749166
| 0.719146
| 0.691127
| 0.649099
| 0
| 0.009702
| 0.29584
| 8,197
| 193
| 100
| 42.471503
| 0.769404
| 0.035989
| 0
| 0.722581
| 0
| 0
| 0.176224
| 0.018818
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012903
| false
| 0
| 0.025806
| 0
| 0.03871
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
238e8772372967a73c2983e0d5f731f7ed1725ec
| 43,195
|
py
|
Python
|
tests/test_fragment.py
|
Nikolay-Lysenko/dodecaphony
|
1a02af4b8b11785b65596b7ce14e1790436e0098
|
[
"MIT"
] | 2
|
2021-08-29T03:20:21.000Z
|
2021-11-22T01:20:55.000Z
|
tests/test_fragment.py
|
Nikolay-Lysenko/dodecaphony
|
1a02af4b8b11785b65596b7ce14e1790436e0098
|
[
"MIT"
] | null | null | null |
tests/test_fragment.py
|
Nikolay-Lysenko/dodecaphony
|
1a02af4b8b11785b65596b7ce14e1790436e0098
|
[
"MIT"
] | 1
|
2021-08-29T03:20:53.000Z
|
2021-08-29T03:20:53.000Z
|
"""
Test `dodecaphony.fragment` module.
Author: Nikolay Lysenko
"""
from collections import Counter
from typing import Any
import pytest
from dodecaphony.fragment import (
Event,
Fragment,
FragmentParams,
SUPPORTED_DURATIONS,
calculate_durations_of_measures,
calculate_number_of_undefined_events,
create_initial_sonic_content,
create_initial_temporal_content,
distribute_pitch_classes,
find_mutable_sonic_content_indices,
find_mutable_temporal_content_indices,
find_sonorities,
initialize_fragment,
override_calculated_attributes,
set_pitches_of_lower_lines,
set_pitches_of_upper_line,
split_time_span,
validate,
)
@pytest.mark.parametrize(
"fragment, expected",
[
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0],
[2.0, 4.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['B', 'A', 'G', 'C#', 'D#', 'C', 'D', 'A#', 'F#', 'E', 'G#', 'F', 'pause'],
['A#', 'A', 'F#', 'C', 'D', 'B', 'C#', 'G#', 'F', 'D#', 'G', 'E'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=16,
line_ids=[1, 2],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1],
n_tone_row_instances_by_group=[1, 1],
mutable_temporal_content_indices=[0, 1],
mutable_sonic_content_indices=[0, 1],
),
# `expected`
[
[[1.0, 1.0, 1.0, 1.0], [2.0, 2.0], [1.0, 1.0, 1.0, 1.0], [2.0, 1.0, 1.0]],
[[2.0, 4.0], [2.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0]],
]
),
]
)
def test_calculate_durations_of_measures(
fragment: Fragment, expected: list[list[list[float]]]
) -> None:
"""Test `calculate_durations_of_measures` function."""
fragment = override_calculated_attributes(fragment)
result = calculate_durations_of_measures(fragment)
assert result == expected
@pytest.mark.parametrize(
"group_index, temporal_content, sonic_content, line_indices, n_tone_row_instances, "
"pauses_fraction, expected",
[
(
# `group_index`
0,
# `temporal_content`
[[], [1.0 for _ in range(12)]],
# `sonic_content`
{
0: {
'pitch_classes': [
'B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F',
'B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F',
'B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F',
]
}
},
# `line_indices`
[0, 1],
# `n_tone_row_instances`
3,
# `pauses_fraction`
0.0,
# `expected`
24
),
]
)
def test_calculate_number_of_undefined_events(
group_index: int, temporal_content: list[list[float]],
sonic_content: dict[int, dict[str, Any]], line_indices: list[int],
n_tone_row_instances: int, pauses_fraction: float, expected: float
) -> None:
"""Test `calculate_number_of_undefined_events` function."""
result = calculate_number_of_undefined_events(
group_index, temporal_content, sonic_content, line_indices, n_tone_row_instances,
pauses_fraction
)
assert result == expected
@pytest.mark.parametrize(
"params, temporal_content, expected_n_pauses_by_group",
[
(
# `params`
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[
{'n_melodic_lines': 1, 'n_tone_row_instances': 1},
],
meter_numerator=4,
meter_denominator=4,
n_measures=100,
line_ids=[1],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.1,
temporal_content={},
sonic_content={}
),
# `temporal_content`
[[1.0, 1.0, 1.0, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5]],
# `expected_n_pauses_by_group`
[1]
),
(
# `params`
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[
{'n_melodic_lines': 1, 'n_tone_row_instances': 1},
],
meter_numerator=4,
meter_denominator=4,
n_measures=2,
line_ids=[1],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.1,
temporal_content={},
sonic_content={
0: {
'pitch_classes': [
'pause', 'B', 'A#', 'G', 'C#', 'D#', 'C',
'D', 'A', 'F#', 'E', 'G#', 'F', 'pause'
]
}
}
),
# `temporal_content`
[[1.0, 1.0, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5]],
# `expected_n_pauses_by_group`
[2]
),
]
)
def test_create_initial_sonic_content(
params: FragmentParams, temporal_content: list[list[float]],
expected_n_pauses_by_group: list[int]
) -> None:
"""Test `create_initial_sonic_content` function."""
sonic_content = create_initial_sonic_content(params, temporal_content)
assert len(sonic_content) == len(params.groups)
zipped = zip(sonic_content, expected_n_pauses_by_group)
for i, (line_content, expected_n_pauses) in enumerate(zipped):
counter = Counter(line_content)
for pitch_class in params.tone_row:
assert counter[pitch_class] == params.groups[i]['n_tone_row_instances']
assert counter['pause'] == expected_n_pauses
@pytest.mark.parametrize(
"params, expected_n_events_by_line",
[
(
# `params`
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[
{'n_melodic_lines': 1, 'n_tone_row_instances': 1},
{'n_melodic_lines': 3, 'n_tone_row_instances': 6},
],
meter_numerator=4,
meter_denominator=4,
n_measures=8,
line_ids=[1, 2, 3, 4],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.1,
temporal_content={},
sonic_content={}
),
# `expected_n_events_by_line`
[13, 27, 27, 26]
),
(
# `params`
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[
{'n_melodic_lines': 1, 'n_tone_row_instances': 1},
{'n_melodic_lines': 3, 'n_tone_row_instances': 6},
],
meter_numerator=4,
meter_denominator=4,
n_measures=8,
line_ids=[1, 2, 3, 4],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.1,
temporal_content={1: {'durations': [4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0]}},
sonic_content={}
),
# `expected_n_events_by_line`
[13, 8, 36, 36]
),
(
# `params`
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[
{'n_melodic_lines': 1, 'n_tone_row_instances': 1},
{'n_melodic_lines': 3, 'n_tone_row_instances': 6},
],
meter_numerator=4,
meter_denominator=4,
n_measures=8,
line_ids=[1, 2, 3, 4],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.1,
temporal_content={},
sonic_content={
0: {
'pitch_classes': [
'B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'
]
}
}
),
# `expected_n_events_by_line`
[12, 27, 27, 26]
),
]
)
def test_create_initial_temporal_content(
params: FragmentParams, expected_n_events_by_line: list[int]
) -> None:
"""Test `create_initial_temporal_content` function."""
temporal_content = create_initial_temporal_content(params)
assert len(temporal_content) == len(params.line_ids)
n_events_by_line = [len(x) for x in temporal_content]
assert n_events_by_line == expected_n_events_by_line
@pytest.mark.parametrize(
"fragment, expected",
[
(
# `fragment`
Fragment(
temporal_content=[
[4.0],
[3.0, 1.0],
[2.0, 2.0],
],
sonic_content=[
['C'],
['D', 'E', 'F', 'G'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=4,
line_ids=[1, 2, 3],
upper_line_highest_position=88,
upper_line_lowest_position=1,
n_melodic_lines_by_group=[1, 2],
n_tone_row_instances_by_group=[0, 0],
mutable_temporal_content_indices=[0, 1, 2],
mutable_sonic_content_indices=[0, 1],
),
# `expected`
[
[
Event(line_index=0, start_time=0.0, duration=4.0, pitch_class='C'),
],
[
Event(line_index=1, start_time=0.0, duration=3.0, pitch_class='D'),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='G'),
],
[
Event(line_index=2, start_time=0.0, duration=2.0, pitch_class='E'),
Event(line_index=2, start_time=2.0, duration=2.0, pitch_class='F'),
]
]
),
]
)
def test_distribute_pitch_classes(fragment: Fragment, expected: list[list[Event]]) -> None:
"""Test `distribute_pitch_classes` function."""
result = distribute_pitch_classes(fragment)
assert result == expected
@pytest.mark.parametrize(
"params, expected",
[
(
# `params`
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[
{'n_melodic_lines': 1, 'n_tone_row_instances': 1},
{'n_melodic_lines': 3, 'n_tone_row_instances': 6},
],
meter_numerator=4,
meter_denominator=4,
n_measures=8,
line_ids=[1, 2, 3, 4],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.1,
temporal_content={},
sonic_content={
0: {
'pitch_classes': [
'B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'
],
'immutable': True
}
}
),
# `expected`
[1]
),
]
)
def test_find_mutable_sonic_content_indices(params: FragmentParams, expected: list[int]) -> None:
"""Test `find_mutable_sonic_content_indices` function."""
result = find_mutable_sonic_content_indices(params)
assert result == expected
@pytest.mark.parametrize(
"params, expected",
[
(
# `params`
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[
{'n_melodic_lines': 1, 'n_tone_row_instances': 1},
{'n_melodic_lines': 3, 'n_tone_row_instances': 6},
],
meter_numerator=4,
meter_denominator=4,
n_measures=8,
line_ids=[1, 2, 3, 4],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.1,
temporal_content={
1: {
'durations': [4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0],
'immutable': True
}
}
),
# `expected`
[0, 2, 3]
),
]
)
def test_find_mutable_temporal_content_indices(
params: FragmentParams, expected: list[int]
) -> None:
"""Test `find_mutable_temporal_content_indices` function."""
result = find_mutable_temporal_content_indices(params)
assert result == expected
@pytest.mark.parametrize(
"melodic_lines, expected",
[
(
# `melodic_lines`
[
[
Event(line_index=0, start_time=0.0, duration=3.0),
Event(line_index=0, start_time=3.0, duration=1.0),
],
[
Event(line_index=1, start_time=0.0, duration=2.0),
Event(line_index=1, start_time=2.0, duration=2.0),
],
[
Event(line_index=2, start_time=0.0, duration=2.0),
Event(line_index=2, start_time=2.0, duration=2.0),
],
],
# `expected`
[
[
Event(line_index=0, start_time=0.0, duration=3.0),
Event(line_index=1, start_time=0.0, duration=2.0),
Event(line_index=2, start_time=0.0, duration=2.0),
],
[
Event(line_index=0, start_time=0.0, duration=3.0),
Event(line_index=1, start_time=2.0, duration=2.0),
Event(line_index=2, start_time=2.0, duration=2.0),
],
[
Event(line_index=0, start_time=3.0, duration=1.0),
Event(line_index=1, start_time=2.0, duration=2.0),
Event(line_index=2, start_time=2.0, duration=2.0),
],
]
),
]
)
def test_find_sonorities(melodic_lines: list[list[Event]], expected: list[list[Event]]) -> None:
"""Test `find_sonorities` function."""
result = find_sonorities(melodic_lines)
assert result == expected
@pytest.mark.parametrize(
"params",
[
(
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[
{'n_melodic_lines': 1, 'n_tone_row_instances': 1},
{'n_melodic_lines': 3, 'n_tone_row_instances': 6},
],
meter_numerator=4,
meter_denominator=4,
n_measures=8,
line_ids=[1, 2, 3, 4],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.1
)
),
]
)
def test_initialize_fragment(params: FragmentParams) -> None:
"""Test `initialize_fragment` function."""
fragment = initialize_fragment(params)
for melodic_line in fragment.melodic_lines:
for event in melodic_line:
assert event.position_in_semitones is not None or event.pitch_class == 'pause'
@pytest.mark.parametrize(
"fragment, max_interval, default_shift, expected_melodic_lines, expected_sonorities",
[
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['C', 'A', 'D', 'F'],
['D', 'B', 'G', 'A'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=4,
line_ids=[1, 2],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1],
n_tone_row_instances_by_group=[0, 0],
mutable_temporal_content_indices=[0, 1],
mutable_sonic_content_indices=[0, 1],
),
# `max_interval`
16,
# `default_shift`
7,
# `expected_melodic_lines`
[
[
Event(line_index=0, start_time=0.0, duration=1.0, pitch_class='C', position_in_semitones=51),
Event(line_index=0, start_time=1.0, duration=1.0, pitch_class='A', position_in_semitones=48),
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D', position_in_semitones=53),
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F', position_in_semitones=44),
],
[
Event(line_index=1, start_time=0.0, duration=1.0, pitch_class='D', position_in_semitones=41),
Event(line_index=1, start_time=1.0, duration=1.0, pitch_class='B', position_in_semitones=38),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='G', position_in_semitones=46),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='A', position_in_semitones=36),
],
],
# `expected_sonorities`
[
[
Event(line_index=0, start_time=0.0, duration=1.0, pitch_class='C', position_in_semitones=51),
Event(line_index=1, start_time=0.0, duration=1.0, pitch_class='D', position_in_semitones=41),
],
[
Event(line_index=0, start_time=1.0, duration=1.0, pitch_class='A', position_in_semitones=48),
Event(line_index=1, start_time=1.0, duration=1.0, pitch_class='B', position_in_semitones=38),
],
[
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D', position_in_semitones=53),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='G', position_in_semitones=46),
],
[
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F', position_in_semitones=44),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='A', position_in_semitones=36),
],
]
),
(
# `fragment`
Fragment(
temporal_content=[
[2.0, 1.0, 1.0],
[2.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['C', 'D', 'F'],
['C', 'D', 'F'],
['G', 'B', 'G', 'A'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=4,
line_ids=[1, 2, 3],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1, 1],
n_tone_row_instances_by_group=[0, 0, 0],
mutable_temporal_content_indices=[0, 1, 2],
mutable_sonic_content_indices=[0, 1, 2],
),
# `max_interval`
16,
# `default_shift`
7,
# `expected_melodic_lines`
[
[
Event(line_index=0, start_time=0.0, duration=2.0, pitch_class='C', position_in_semitones=51),
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D', position_in_semitones=53),
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F', position_in_semitones=44),
],
[
Event(line_index=1, start_time=0.0, duration=2.0, pitch_class='C', position_in_semitones=39),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='D', position_in_semitones=41),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='F', position_in_semitones=32),
],
[
Event(line_index=2, start_time=0.0, duration=1.0, pitch_class='G', position_in_semitones=34),
Event(line_index=2, start_time=1.0, duration=1.0, pitch_class='B', position_in_semitones=38),
Event(line_index=2, start_time=2.0, duration=1.0, pitch_class='G', position_in_semitones=34),
Event(line_index=2, start_time=3.0, duration=1.0, pitch_class='A', position_in_semitones=24),
],
],
# `expected_sonorities`
[
[
Event(line_index=0, start_time=0.0, duration=2.0, pitch_class='C', position_in_semitones=51),
Event(line_index=1, start_time=0.0, duration=2.0, pitch_class='C', position_in_semitones=39),
Event(line_index=2, start_time=0.0, duration=1.0, pitch_class='G', position_in_semitones=34),
],
[
Event(line_index=0, start_time=0.0, duration=2.0, pitch_class='C', position_in_semitones=51),
Event(line_index=1, start_time=0.0, duration=2.0, pitch_class='C', position_in_semitones=39),
Event(line_index=2, start_time=1.0, duration=1.0, pitch_class='B', position_in_semitones=38),
],
[
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D', position_in_semitones=53),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='D', position_in_semitones=41),
Event(line_index=2, start_time=2.0, duration=1.0, pitch_class='G', position_in_semitones=34),
],
[
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F', position_in_semitones=44),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='F', position_in_semitones=32),
Event(line_index=2, start_time=3.0, duration=1.0, pitch_class='A', position_in_semitones=24),
],
]
),
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['C', 'A', 'D', 'F'],
['D', 'pause', 'G', 'A'],
['D', 'B', 'G', 'A'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=4,
line_ids=[1, 2, 3],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1, 1],
n_tone_row_instances_by_group=[0, 0, 0],
mutable_temporal_content_indices=[0, 1, 2],
mutable_sonic_content_indices=[0, 1, 2],
),
# `max_interval`
16,
# `default_shift`
24,
# `expected_melodic_lines`
[
[
Event(line_index=0, start_time=0.0, duration=1.0, pitch_class='C', position_in_semitones=51),
Event(line_index=0, start_time=1.0, duration=1.0, pitch_class='A', position_in_semitones=48),
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D', position_in_semitones=53),
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F', position_in_semitones=44),
],
[
Event(line_index=1, start_time=0.0, duration=1.0, pitch_class='D', position_in_semitones=41),
Event(line_index=1, start_time=1.0, duration=1.0, pitch_class='pause', position_in_semitones=None),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='G', position_in_semitones=46),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='A', position_in_semitones=36),
],
[
Event(line_index=2, start_time=0.0, duration=1.0, pitch_class='D', position_in_semitones=29),
Event(line_index=2, start_time=1.0, duration=1.0, pitch_class='B', position_in_semitones=14),
Event(line_index=2, start_time=2.0, duration=1.0, pitch_class='G', position_in_semitones=22),
Event(line_index=2, start_time=3.0, duration=1.0, pitch_class='A', position_in_semitones=24),
],
],
# `expected_sonorities`
[
[
Event(line_index=0, start_time=0.0, duration=1.0, pitch_class='C', position_in_semitones=51),
Event(line_index=1, start_time=0.0, duration=1.0, pitch_class='D', position_in_semitones=41),
Event(line_index=2, start_time=0.0, duration=1.0, pitch_class='D', position_in_semitones=29),
],
[
Event(line_index=0, start_time=1.0, duration=1.0, pitch_class='A', position_in_semitones=48),
Event(line_index=1, start_time=1.0, duration=1.0, pitch_class='pause', position_in_semitones=None),
Event(line_index=2, start_time=1.0, duration=1.0, pitch_class='B', position_in_semitones=14),
],
[
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D', position_in_semitones=53),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='G', position_in_semitones=46),
Event(line_index=2, start_time=2.0, duration=1.0, pitch_class='G', position_in_semitones=22),
],
[
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F', position_in_semitones=44),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='A', position_in_semitones=36),
Event(line_index=2, start_time=3.0, duration=1.0, pitch_class='A', position_in_semitones=24),
],
]
),
]
)
def test_set_pitches_of_lower_lines(
fragment: Fragment,
max_interval: int,
default_shift: int,
expected_melodic_lines: list[list[Event]],
expected_sonorities: list[list[Event]]
) -> None:
"""Test `set_pitches_of_lower_lines` function."""
# Below three lines are added instead of setting all arguments initially,
# because `sonorities` and `melodic_lines` must reference to the same events.
fragment.melodic_lines = distribute_pitch_classes(fragment)
fragment.sonorities = find_sonorities(fragment.melodic_lines)
fragment = set_pitches_of_upper_line(fragment)
fragment = set_pitches_of_lower_lines(fragment, max_interval, default_shift)
assert fragment.melodic_lines == expected_melodic_lines
assert fragment.sonorities == expected_sonorities
@pytest.mark.parametrize(
"fragment, expected",
[
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['C', 'A', 'D', 'F'],
['D', 'B', 'G', 'A'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=4,
line_ids=[1, 2],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1],
n_tone_row_instances_by_group=[0, 0],
mutable_temporal_content_indices=[0, 1],
mutable_sonic_content_indices=[0, 1],
melodic_lines=[
[
Event(line_index=0, start_time=0.0, duration=1.0, pitch_class='C'),
Event(line_index=0, start_time=1.0, duration=1.0, pitch_class='A'),
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D'),
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F'),
],
[
Event(line_index=1, start_time=0.0, duration=1.0, pitch_class='D'),
Event(line_index=1, start_time=1.0, duration=1.0, pitch_class='B'),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='G'),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='A'),
],
],
),
# `expected`
[
[
Event(line_index=0, start_time=0.0, duration=1.0, pitch_class='C', position_in_semitones=51),
Event(line_index=0, start_time=1.0, duration=1.0, pitch_class='A', position_in_semitones=48),
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D', position_in_semitones=53),
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F', position_in_semitones=44),
],
[
Event(line_index=1, start_time=0.0, duration=1.0, pitch_class='D'),
Event(line_index=1, start_time=1.0, duration=1.0, pitch_class='B'),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='G'),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='A'),
],
],
),
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['pause', 'A', 'D', 'F'],
['D', 'B', 'G', 'A'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=4,
line_ids=[1, 2],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1],
n_tone_row_instances_by_group=[0, 0],
mutable_temporal_content_indices=[0, 1],
mutable_sonic_content_indices=[0, 1],
melodic_lines=[
[
Event(line_index=0, start_time=0.0, duration=1.0, pitch_class='pause'),
Event(line_index=0, start_time=1.0, duration=1.0, pitch_class='A'),
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D'),
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F'),
],
[
Event(line_index=1, start_time=0.0, duration=1.0, pitch_class='D'),
Event(line_index=1, start_time=1.0, duration=1.0, pitch_class='B'),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='G'),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='A'),
],
],
),
# `expected`
[
[
Event(line_index=0, start_time=0.0, duration=1.0, pitch_class='pause', position_in_semitones=None),
Event(line_index=0, start_time=1.0, duration=1.0, pitch_class='A', position_in_semitones=48),
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D', position_in_semitones=53),
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F', position_in_semitones=44),
],
[
Event(line_index=1, start_time=0.0, duration=1.0, pitch_class='D'),
Event(line_index=1, start_time=1.0, duration=1.0, pitch_class='B'),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='G'),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='A'),
],
],
),
(
# `fragment`
Fragment(
temporal_content=[
[1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0],
],
sonic_content=[
['C', 'pause', 'D', 'F'],
['D', 'B', 'G', 'A'],
],
meter_numerator=4,
meter_denominator=4,
n_beats=4,
line_ids=[1, 2],
upper_line_highest_position=55,
upper_line_lowest_position=41,
n_melodic_lines_by_group=[1, 1],
n_tone_row_instances_by_group=[0, 0],
mutable_temporal_content_indices=[0, 1],
mutable_sonic_content_indices=[0, 1],
melodic_lines=[
[
Event(line_index=0, start_time=0.0, duration=1.0, pitch_class='C'),
Event(line_index=0, start_time=1.0, duration=1.0, pitch_class='pause'),
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D'),
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F'),
],
[
Event(line_index=1, start_time=0.0, duration=1.0, pitch_class='D'),
Event(line_index=1, start_time=1.0, duration=1.0, pitch_class='B'),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='G'),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='A'),
],
],
),
# `expected`
[
[
Event(line_index=0, start_time=0.0, duration=1.0, pitch_class='C', position_in_semitones=51),
Event(line_index=0, start_time=1.0, duration=1.0, pitch_class='pause', position_in_semitones=None),
Event(line_index=0, start_time=2.0, duration=1.0, pitch_class='D', position_in_semitones=53),
Event(line_index=0, start_time=3.0, duration=1.0, pitch_class='F', position_in_semitones=44),
],
[
Event(line_index=1, start_time=0.0, duration=1.0, pitch_class='D'),
Event(line_index=1, start_time=1.0, duration=1.0, pitch_class='B'),
Event(line_index=1, start_time=2.0, duration=1.0, pitch_class='G'),
Event(line_index=1, start_time=3.0, duration=1.0, pitch_class='A'),
],
],
),
]
)
def test_set_pitches_of_upper_line(fragment: Fragment, expected: list[list[Event]]) -> None:
"""Test `set_pitches_of_upper_line` function."""
fragment = set_pitches_of_upper_line(fragment)
assert fragment.melodic_lines == expected
@pytest.mark.parametrize(
"n_measures, n_events, meter_numerator",
[
(2, 9, 4),
(8, 51, 3),
]
)
def test_split_time_span(n_measures: int, n_events: int, meter_numerator: float) -> None:
"""Test `split_time_span` function."""
durations = split_time_span(n_measures, n_events, meter_numerator)
assert len(durations) == n_events
assert sum(durations) == n_measures * meter_numerator
for duration in durations:
assert duration in SUPPORTED_DURATIONS
@pytest.mark.parametrize(
"n_measures, n_events, meter_numerator, match",
[
(4, 3, 4, "Average duration of an event is longer than semibreve."),
(1, 20, 4, "The number of events is so high that some of them are too short.")
]
)
def test_split_time_span_with_invalid_arguments(
n_measures: int, n_events: int, meter_numerator: float, match: str
) -> None:
"""Test `split_time_span` function with invalid arguments."""
with pytest.raises(ValueError, match=match):
split_time_span(n_measures, n_events, meter_numerator)
@pytest.mark.parametrize(
"params, match",
[
(
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[{'n_melodic_lines': 1, 'n_tone_row_instances': 2}],
meter_numerator=4,
meter_denominator=4,
n_measures=8,
line_ids=[1, 2],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.0
),
"Number of lines in `groups` is not equal to that in `line_ids`."
),
(
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[{'n_melodic_lines': 2, 'n_tone_row_instances': 2}],
meter_numerator=4,
meter_denominator=4,
n_measures=8,
line_ids=[1, 1],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.0
),
"IDs of melodic lines must be unique."
),
(
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[{'n_melodic_lines': 2, 'n_tone_row_instances': 2}],
meter_numerator=5,
meter_denominator=4,
n_measures=8,
line_ids=[1, 2],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.0
),
"Meter numerator = 5 is not supported."
),
(
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[{'n_melodic_lines': 2, 'n_tone_row_instances': 2}],
meter_numerator=4,
meter_denominator=4,
n_measures=8,
line_ids=[1, 2],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.0,
temporal_content={
0: {'durations': [1.0 for _ in range(40)]},
1: {'durations': [1.0]},
}
),
"A line has duration that is not equal to that of the fragment."
),
(
FragmentParams(
tone_row=['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F'],
groups=[{'n_melodic_lines': 2, 'n_tone_row_instances': 2}],
meter_numerator=4,
meter_denominator=4,
n_measures=8,
line_ids=[1, 2],
upper_line_highest_note='E6',
upper_line_lowest_note='E4',
pauses_fraction=0.0,
sonic_content={
0: {
'pitch_classes': [
'B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F',
'B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F',
'B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F',
]
},
}
),
"A group has wrong number of tone row instances."
),
]
)
def test_validate(params: FragmentParams, match: str) -> None:
"""Test `validate` function."""
with pytest.raises(ValueError, match=match):
validate(params)
@pytest.mark.parametrize(
"first_temporal_content, second_temporal_content, first_sonic_content, second_sonic_content, "
"expected",
[
(
[[1.0 for _ in range(12)]],
[[1.0 for _ in range(12)]],
[['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F']],
[['B', 'A#', 'G', 'C#', 'D#', 'C', 'D', 'A', 'F#', 'E', 'G#', 'F']],
True
),
]
)
def test_equality_of_fragments(
first_temporal_content: list[list[float]], second_temporal_content: list[list[float]],
first_sonic_content: list[list[str]], second_sonic_content: list[list[str]],
expected: bool
) -> None:
"""Test `__eq__` method of `Fragment` class."""
first_fragment = Fragment(
first_temporal_content,
first_sonic_content,
meter_numerator=4,
meter_denominator=4,
n_beats=12,
line_ids=[1],
upper_line_highest_position=88,
upper_line_lowest_position=0,
n_melodic_lines_by_group=[1],
n_tone_row_instances_by_group=[1],
mutable_temporal_content_indices=[0],
mutable_sonic_content_indices=[0]
)
first_fragment = override_calculated_attributes(first_fragment)
second_fragment = Fragment(
second_temporal_content,
second_sonic_content,
meter_numerator=4,
meter_denominator=4,
n_beats=12,
line_ids=[1],
upper_line_highest_position=88,
upper_line_lowest_position=0,
n_melodic_lines_by_group=[1],
n_tone_row_instances_by_group=[1],
mutable_temporal_content_indices=[0],
mutable_sonic_content_indices=[0]
)
second_fragment = override_calculated_attributes(second_fragment)
result = first_fragment == second_fragment
assert result == expected
| 41.216603
| 119
| 0.49096
| 5,104
| 43,195
| 3.869122
| 0.038597
| 0.023699
| 0.092161
| 0.059601
| 0.851934
| 0.794106
| 0.758963
| 0.74124
| 0.72858
| 0.703869
| 0
| 0.055953
| 0.369441
| 43,195
| 1,047
| 120
| 41.255969
| 0.66909
| 0.038315
| 0
| 0.647872
| 0
| 0
| 0.059871
| 0.003937
| 0
| 0
| 0
| 0
| 0.020213
| 1
| 0.015957
| false
| 0
| 0.004255
| 0
| 0.020213
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
23c49af2b4a4617044b1f634b3f89a8bc8361d4e
| 22
|
py
|
Python
|
src/robusta/integrations/slack/__init__.py
|
kandahk/robusta
|
61a2001cb1c4e90e8a74b810463ec99e6cb80787
|
[
"MIT"
] | 273
|
2021-12-28T20:48:48.000Z
|
2022-03-31T16:03:13.000Z
|
src/robusta/integrations/slack/__init__.py
|
kandahk/robusta
|
61a2001cb1c4e90e8a74b810463ec99e6cb80787
|
[
"MIT"
] | 103
|
2022-01-10T11:45:47.000Z
|
2022-03-31T16:31:11.000Z
|
src/robusta/integrations/slack/__init__.py
|
kandahk/robusta
|
61a2001cb1c4e90e8a74b810463ec99e6cb80787
|
[
"MIT"
] | 35
|
2021-12-30T15:30:14.000Z
|
2022-03-28T11:43:57.000Z
|
from .sender import *
| 11
| 21
| 0.727273
| 3
| 22
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
23fbdff0dbe3bdbd10a85e8b96ea314819fe183d
| 49
|
py
|
Python
|
config/__init__.py
|
akyruu/blender-cartography-addon
|
4f34b029d9b6a72619227ab3ceaed9393506934e
|
[
"Apache-2.0"
] | null | null | null |
config/__init__.py
|
akyruu/blender-cartography-addon
|
4f34b029d9b6a72619227ab3ceaed9393506934e
|
[
"Apache-2.0"
] | null | null | null |
config/__init__.py
|
akyruu/blender-cartography-addon
|
4f34b029d9b6a72619227ab3ceaed9393506934e
|
[
"Apache-2.0"
] | null | null | null |
from . import mappings
from .properties import *
| 16.333333
| 25
| 0.77551
| 6
| 49
| 6.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 49
| 2
| 26
| 24.5
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9b05240c2f8e613f3b3ab50085bea2d8b24a2f9b
| 182
|
py
|
Python
|
groundwork/__init__.py
|
amhaske/groundwork
|
abd63a54a34434ebdf527b1619c8bc90d8f97c28
|
[
"MIT"
] | 17
|
2016-07-27T12:32:06.000Z
|
2022-01-24T15:58:04.000Z
|
groundwork/__init__.py
|
amhaske/groundwork
|
abd63a54a34434ebdf527b1619c8bc90d8f97c28
|
[
"MIT"
] | 31
|
2016-12-16T07:29:54.000Z
|
2019-05-07T07:08:18.000Z
|
groundwork/__init__.py
|
amhaske/groundwork
|
abd63a54a34434ebdf527b1619c8bc90d8f97c28
|
[
"MIT"
] | 6
|
2018-03-05T13:53:31.000Z
|
2019-06-07T05:33:54.000Z
|
from __future__ import absolute_import
from groundwork.groundwork import App
from groundwork.patterns.gw_base_pattern import GwBasePattern
from groundwork.version import __version__
| 36.4
| 61
| 0.89011
| 23
| 182
| 6.565217
| 0.521739
| 0.278146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087912
| 182
| 4
| 62
| 45.5
| 0.909639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9b218957caac1673aeb2289192155089e828e3b9
| 8,764
|
py
|
Python
|
pase/models/aspp.py
|
ishine/pase
|
2a41e63e54fa8673efd12c16cdcdd5ad4f0f125e
|
[
"MIT"
] | 428
|
2019-04-08T04:34:00.000Z
|
2022-03-18T08:44:31.000Z
|
pase/models/aspp.py
|
ishine/pase
|
2a41e63e54fa8673efd12c16cdcdd5ad4f0f125e
|
[
"MIT"
] | 46
|
2019-04-07T23:38:53.000Z
|
2022-02-19T12:06:12.000Z
|
pase/models/aspp.py
|
ishine/pase
|
2a41e63e54fa8673efd12c16cdcdd5ad4f0f125e
|
[
"MIT"
] | 89
|
2019-04-08T18:17:25.000Z
|
2022-03-31T02:39:45.000Z
|
import math
import torch
import torch.nn as nn
from .modules import *
import torch.nn.functional as F
class _ASPPModule(Model):
def __init__(self, inplanes, planes, kernel_size, padding, dilation):
super(_ASPPModule, self).__init__()
self.atrous_conv = nn.Conv1d(inplanes, planes, kernel_size=kernel_size,
stride=1, padding=padding, dilation=dilation, bias=False)
self.bn = nn.BatchNorm1d(planes)
self.relu = nn.ReLU()
self._init_weight()
def forward(self, x):
x = self.atrous_conv(x)
x = self.bn(x)
return self.relu(x)
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv1d):
torch.nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm1d):
m.weight.data.fill_(1)
m.bias.data.zero_()
class _ASPPModule2d(Model):
def __init__(self, inplanes, planes, kernel_size, padding, dilation):
super(_ASPPModule2d, self).__init__()
self.atrous_conv = nn.Conv2d(inplanes, planes, kernel_size=kernel_size,
stride=1, padding=padding, dilation=dilation, bias=False)
self.bn = nn.BatchNorm2d(planes)
self.relu = nn.ReLU()
self._init_weight()
def forward(self, x):
x = self.atrous_conv(x)
x = self.bn(x)
return self.relu(x)
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
torch.nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
class ASPP(Model):
def __init__(self, inplanes, emb_dim, dilations=[1, 6, 12, 18], fmaps=48, dense=False):
super(ASPP, self).__init__()
if not dense:
self.aspp1 = _ASPPModule(inplanes, fmaps, 1, padding=0, dilation=dilations[0])
self.aspp2 = _ASPPModule(inplanes, fmaps, 3, padding=dilations[1], dilation=dilations[1])
self.aspp3 = _ASPPModule(inplanes, fmaps, 3, padding=dilations[2], dilation=dilations[2])
self.aspp4 = _ASPPModule(inplanes, fmaps, 3, padding=dilations[3], dilation=dilations[3])
self.global_avg_pool = nn.Sequential(nn.AdaptiveAvgPool1d((1)),
nn.Conv1d(inplanes, fmaps, 1, stride=1, bias=False),
nn.BatchNorm1d(fmaps),
nn.ReLU())
else:
self.aspp1 = _ASPPModule(inplanes, fmaps, dilations[0], padding=0, dilation=1)
self.aspp2 = _ASPPModule(inplanes, fmaps, dilations[1], padding=dilations[1]//2, dilation=1)
self.aspp3 = _ASPPModule(inplanes, fmaps, dilations[2], padding=dilations[2]//2, dilation=1)
self.aspp4 = _ASPPModule(inplanes, fmaps, dilations[3], padding=dilations[3]//2, dilation=1)
self.global_avg_pool = nn.Sequential(nn.AdaptiveAvgPool1d((1)),
nn.Conv1d(inplanes, fmaps, 1, stride=1, bias=False),
nn.BatchNorm1d(fmaps),
nn.ReLU())
self.conv1 = nn.Conv1d(fmaps * 5, emb_dim, 1, bias=False)
self.bn1 = nn.BatchNorm1d(emb_dim)
self.relu = nn.ReLU()
self.dropout = nn.Dropout(0.5)
self._init_weight()
def forward(self, x):
x1 = self.aspp1(x)
x2 = self.aspp2(x)
x3 = self.aspp3(x)
x4 = self.aspp4(x)
x5 = self.global_avg_pool(x)
x5 = F.interpolate(x5, size=x4.size()[2:], mode='linear', align_corners=True)
x = torch.cat((x1, x2, x3, x4, x5), dim=1)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
return self.dropout(x)
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv1d):
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, math.sqrt(2. / n))
torch.nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm1d):
m.weight.data.fill_(1)
m.bias.data.zero_()
class ASPP2d(Model):
def __init__(self, inplanes, emb_dim, dilations=[1, 6, 12, 18], fmaps=48, dense=False):
super(ASPP2d, self).__init__()
if not dense:
self.aspp1 = _ASPPModule2d(inplanes, fmaps, 1, padding=0, dilation=dilations[0])
self.aspp2 = _ASPPModule2d(inplanes, fmaps, 3, padding=dilations[1], dilation=dilations[1])
self.aspp3 = _ASPPModule2d(inplanes, fmaps, 3, padding=dilations[2], dilation=dilations[2])
self.aspp4 = _ASPPModule2d(inplanes, fmaps, 3, padding=dilations[3], dilation=dilations[3])
self.global_avg_pool = nn.Sequential(nn.AdaptiveAvgPool2d((1, 1)),
nn.Conv2d(inplanes, fmaps, 1, stride=1, bias=False),
nn.BatchNorm2d(fmaps),
nn.ReLU())
self.conv1 = nn.Conv2d(fmaps * 5, 1, 1, bias=False)
self.bn1 = nn.BatchNorm2d(1)
self.relu = nn.ReLU()
self.dropout = nn.Dropout(0.5)
self._init_weight()
def forward(self, x):
x = x.unsqueeze(1)
x1 = self.aspp1(x)
x2 = self.aspp2(x)
x3 = self.aspp3(x)
x4 = self.aspp4(x)
x5 = self.global_avg_pool(x)
x5 = F.interpolate(x5, size=x4.size()[2:], mode='bilinear', align_corners=True)
x = torch.cat((x1, x2, x3, x4, x5), dim=1)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.dropout(x).squeeze(1)
return x
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, math.sqrt(2. / n))
torch.nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
class aspp_resblock(Model):
def __init__(self, in_channel, out_channel, kernel_size, stride, dilations, fmaps, pool2d=False, dense=False):
super().__init__(name="aspp_resblock")
padding = kernel_size // 2
if pool2d:
self.block1 = nn.Sequential(ASPP2d(1, out_channel, dilations, fmaps, dense),
nn.Conv1d(in_channel, out_channel, kernel_size=kernel_size, stride=stride,
padding=padding, bias=False),
nn.BatchNorm1d(out_channel),
nn.ReLU(out_channel))
self.block2 = nn.Sequential(ASPP2d(1, out_channel, dilations, fmaps, dense),
nn.Conv1d(out_channel, out_channel, kernel_size=kernel_size, stride=1,
padding=padding, bias=False),
nn.BatchNorm1d(out_channel),
nn.ReLU(out_channel))
else:
self.block1 = nn.Sequential(ASPP(in_channel, out_channel, dilations, fmaps, dense),
nn.Conv1d(out_channel, out_channel, kernel_size=kernel_size, stride=stride, padding=padding, bias=False),
nn.BatchNorm1d(out_channel),
nn.ReLU(out_channel))
self.block2 = nn.Sequential(ASPP(out_channel, out_channel, dilations, fmaps, dense),
nn.Conv1d(out_channel, out_channel, kernel_size=kernel_size, stride=1, padding=padding, bias=False),
nn.BatchNorm1d(out_channel),
nn.ReLU(out_channel))
self._init_weight()
def forward(self, x):
out_1 = self.block1(x)
out_2 = self.block2(out_1)
y = out_1 + out_2
return y
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv1d):
n = m.kernel_size[0] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm1d):
m.weight.data.fill_(1)
m.bias.data.zero_()
| 38.438596
| 145
| 0.534231
| 1,033
| 8,764
| 4.365924
| 0.102614
| 0.046563
| 0.028825
| 0.026608
| 0.841685
| 0.818182
| 0.762971
| 0.745455
| 0.738137
| 0.738137
| 0
| 0.037795
| 0.347901
| 8,764
| 227
| 146
| 38.60793
| 0.751356
| 0.022935
| 0
| 0.628049
| 0
| 0
| 0.003157
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091463
| false
| 0
| 0.030488
| 0
| 0.182927
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f1da34745cb7f808236fef1dda49fe8ee6ac1be5
| 1,511
|
py
|
Python
|
core/tests/test_notify.py
|
uktrade/dnb-service
|
c8f22af82af70f33b8d6bf92e3ca6992fce1f220
|
[
"MIT"
] | 4
|
2019-12-03T14:59:50.000Z
|
2020-04-28T12:42:24.000Z
|
core/tests/test_notify.py
|
uktrade/dnb-service
|
c8f22af82af70f33b8d6bf92e3ca6992fce1f220
|
[
"MIT"
] | 17
|
2019-04-11T13:12:57.000Z
|
2022-01-13T10:08:07.000Z
|
core/tests/test_notify.py
|
uktrade/dnb-service
|
c8f22af82af70f33b8d6bf92e3ca6992fce1f220
|
[
"MIT"
] | 3
|
2021-05-11T16:13:57.000Z
|
2022-03-08T15:57:19.000Z
|
import io
from unittest import mock
from core.notify import notify_by_email
def test_notify_by_email_no_file(monkeypatch):
"""
Test notify_by_email function when there is no file in the template context.
"""
notifications_client_mock = mock.Mock()
monkeypatch.setattr('core.notify.notifications_client', notifications_client_mock)
email_address = 'joe.bloggs@example.net'
template_id = 'foobar'
context = {'foo': 'bar'}
notify_by_email(email_address, template_id, context)
notifications_client_mock.send_email_notification.assert_called_with(
email_address=email_address,
template_id=template_id,
personalisation=context,
)
def test_notify_by_email_with_file(monkeypatch):
"""
Test notify_by_email function when there is a file in the template context.
"""
notifications_client_mock = mock.Mock()
monkeypatch.setattr('core.notify.notifications_client', notifications_client_mock)
email_address = 'joe.bloggs@example.net'
template_id = 'foobar'
context = {
'foo': 'bar',
'file': io.BytesIO(b'foo bar baz'),
}
expected_personalisation = {
**context,
'file': {'file': 'Zm9vIGJhciBiYXo=', 'is_csv': False},
}
notify_by_email(email_address, template_id, context)
notifications_client_mock.send_email_notification.assert_called_with(
email_address=email_address,
template_id=template_id,
personalisation=expected_personalisation,
)
| 32.847826
| 86
| 0.716744
| 180
| 1,511
| 5.677778
| 0.266667
| 0.148728
| 0.089041
| 0.066536
| 0.816047
| 0.776908
| 0.776908
| 0.776908
| 0.776908
| 0.776908
| 0
| 0.000818
| 0.191264
| 1,511
| 45
| 87
| 33.577778
| 0.835516
| 0.100596
| 0
| 0.470588
| 0
| 0
| 0.133283
| 0.081325
| 0
| 0
| 0
| 0
| 0.058824
| 1
| 0.058824
| false
| 0
| 0.088235
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f1f126c65696bd914af678555f8e65d752594270
| 3,567
|
py
|
Python
|
spyder_okvim/executor/tests/test_colon.py
|
ok97465/spyder_okvim
|
6ba22c0013a2419a14f7950bd8931d6ee7e107e4
|
[
"MIT"
] | 3
|
2021-03-13T13:01:03.000Z
|
2021-12-05T05:19:55.000Z
|
spyder_okvim/executor/tests/test_colon.py
|
ok97465/spyder_okvim
|
6ba22c0013a2419a14f7950bd8931d6ee7e107e4
|
[
"MIT"
] | 18
|
2020-11-02T22:08:01.000Z
|
2021-09-20T05:53:12.000Z
|
spyder_okvim/executor/tests/test_colon.py
|
ok97465/spyder_okvim
|
6ba22c0013a2419a14f7950bd8931d6ee7e107e4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""."""
"""Tests for the executor_colon."""
# Third party imports
import pytest
from qtpy.QtCore import Qt
@pytest.mark.parametrize(
"text, cmd_list, cmd_line_expected",
[
('', [":", "k", "k"], ':kk'),
('', [":", "k", "k", Qt.Key_Escape], ''),
]
)
def test_colon_cmd(vim_bot, text, cmd_list, cmd_line_expected):
"""Test colon command."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
if isinstance(cmd, str):
qtbot.keyClicks(cmd_line, cmd)
else:
qtbot.keyPress(cmd_line, cmd)
assert cmd_line.text() == cmd_line_expected
@pytest.mark.parametrize(
"text, cmd_list",
[
('', [":", Qt.Key_Return]),
('', [":", Qt.Key_Left, 'd', Qt.Key_Enter]),
]
)
def test_colon_corner_case_cmd(vim_bot, text, cmd_list):
"""Test colon command."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
if isinstance(cmd, str):
qtbot.keyClicks(cmd_line, cmd)
else:
qtbot.keyPress(cmd_line, cmd)
assert cmd_line.text() == ''
assert vim.vim_cmd.vim_status.sub_mode is None
def test_colon_w_command(vim_bot):
"""Test :w."""
main, editor_stack, editor, vim, qtbot = vim_bot
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':')
qtbot.keyClicks(cmd_line, 'w')
qtbot.keyPress(cmd_line, Qt.Key_Return)
main.editor.save_action.trigger.assert_called_once_with()
def test_colon_q_command(vim_bot):
"""Test :q."""
main, editor_stack, editor, vim, qtbot = vim_bot
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':')
qtbot.keyClicks(cmd_line, 'q')
qtbot.keyPress(cmd_line, Qt.Key_Return)
main.editor.close_action.trigger.assert_called_once_with()
def test_colon_qexclamation_command(vim_bot):
"""Test :q!."""
main, editor_stack, editor, vim, qtbot = vim_bot
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':')
qtbot.keyClicks(cmd_line, 'q')
qtbot.keyClicks(cmd_line, '!')
qtbot.keyPress(cmd_line, Qt.Key_Return)
main.editor.close_action.trigger.assert_called_once_with()
def test_colon_wq_command(vim_bot):
"""Test :wq."""
main, editor_stack, editor, vim, qtbot = vim_bot
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':')
qtbot.keyClicks(cmd_line, 'w')
qtbot.keyClicks(cmd_line, 'q')
qtbot.keyPress(cmd_line, Qt.Key_Return)
main.editor.close_action.trigger.assert_called_once_with()
main.editor.save_action.trigger.assert_called_once_with()
def test_colon_n_command(vim_bot):
"""Test :n."""
main, editor_stack, editor, vim, qtbot = vim_bot
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':')
qtbot.keyClicks(cmd_line, 'n')
qtbot.keyPress(cmd_line, Qt.Key_Return)
main.editor.new_action.trigger.assert_called_once_with()
def test_colon_backspace_command(vim_bot):
"""Test backspace in ex cmd."""
main, editor_stack, editor, vim, qtbot = vim_bot
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':')
qtbot.keyClicks(cmd_line, 'n')
qtbot.keyPress(cmd_line, Qt.Key_Backspace)
assert cmd_line.text() == ":"
assert vim.vim_cmd.vim_status.sub_mode is not None
qtbot.keyPress(cmd_line, Qt.Key_Backspace)
assert cmd_line.text() == ""
assert vim.vim_cmd.vim_status.sub_mode is None
| 29.237705
| 63
| 0.663302
| 507
| 3,567
| 4.34714
| 0.149901
| 0.127042
| 0.123412
| 0.15245
| 0.855717
| 0.848457
| 0.787659
| 0.787659
| 0.787659
| 0.748185
| 0
| 0.000347
| 0.191758
| 3,567
| 121
| 64
| 29.479339
| 0.764135
| 0.044015
| 0
| 0.662791
| 0
| 0
| 0.022236
| 0
| 0
| 0
| 0
| 0
| 0.151163
| 1
| 0.093023
| false
| 0
| 0.023256
| 0
| 0.116279
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f1f174e3d29e329b3d0b1b16abe2d3e98481f2fa
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/clikit/handler/help/help_text_handler.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/clikit/handler/help/help_text_handler.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/clikit/handler/help/help_text_handler.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/e7/47/c0/77299a9d17091be40ab8b2b55aed69af2623074cd84c782886bbde1e2d
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 0
| 96
| 1
| 96
| 96
| 0.479167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f1fe05a13bda0caadf24c5ed039b0bfffcdd8dca
| 189
|
py
|
Python
|
Learning/CodeWars/Python/7 kyu_Help_the_Fruit_Guy.py
|
aliasfoxkde/snippets
|
bb6dcc6597316ef9c88611f526935059451c3b5a
|
[
"MIT"
] | null | null | null |
Learning/CodeWars/Python/7 kyu_Help_the_Fruit_Guy.py
|
aliasfoxkde/snippets
|
bb6dcc6597316ef9c88611f526935059451c3b5a
|
[
"MIT"
] | null | null | null |
Learning/CodeWars/Python/7 kyu_Help_the_Fruit_Guy.py
|
aliasfoxkde/snippets
|
bb6dcc6597316ef9c88611f526935059451c3b5a
|
[
"MIT"
] | null | null | null |
# See: https://www.codewars.com/kata/557af4c6169ac832300000ba
def remove_rotten(bag_of_fruits):
return [i.replace("rotten", "").lower() for i in bag_of_fruits] if bag_of_fruits else []
| 47.25
| 92
| 0.751323
| 29
| 189
| 4.655172
| 0.724138
| 0.111111
| 0.244444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100592
| 0.10582
| 189
| 4
| 92
| 47.25
| 0.698225
| 0.312169
| 0
| 0
| 0
| 0
| 0.046512
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
7b17460a5e70b8fa34ec6f7b909c208f7c18a769
| 526
|
py
|
Python
|
thornode_client/thornode_client/api/__init__.py
|
hoodieonwho/thorchain-python-client
|
fccfd66552e16bdab1dbb90b68022475c7a9693d
|
[
"MIT"
] | null | null | null |
thornode_client/thornode_client/api/__init__.py
|
hoodieonwho/thorchain-python-client
|
fccfd66552e16bdab1dbb90b68022475c7a9693d
|
[
"MIT"
] | null | null | null |
thornode_client/thornode_client/api/__init__.py
|
hoodieonwho/thorchain-python-client
|
fccfd66552e16bdab1dbb90b68022475c7a9693d
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from thornode_client.api.health_check_api import HealthCheckApi
from thornode_client.api.keygen__keysign_api import KeygenKeysignApi
from thornode_client.api.network_api import NetworkApi
from thornode_client.api.nodes_api import NodesApi
from thornode_client.api.pools_api import PoolsApi
from thornode_client.api.queue_api import QueueApi
from thornode_client.api.tx_api import TxApi
from thornode_client.api.vaults_api import VaultsApi
| 37.571429
| 68
| 0.874525
| 78
| 526
| 5.589744
| 0.384615
| 0.220183
| 0.330275
| 0.385321
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002088
| 0.089354
| 526
| 13
| 69
| 40.461538
| 0.908142
| 0.077947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9e2a1f936142c210166e42b6702938e059795aa6
| 13,864
|
py
|
Python
|
database/model.py
|
braycarlson/viking
|
56e2029dde2054ebff6a2993d3d094e2c1733e7e
|
[
"MIT"
] | 7
|
2018-01-10T19:37:46.000Z
|
2020-12-06T22:17:02.000Z
|
database/model.py
|
braycarlson/viking
|
56e2029dde2054ebff6a2993d3d094e2c1733e7e
|
[
"MIT"
] | 2
|
2017-05-07T00:58:16.000Z
|
2020-04-17T18:54:42.000Z
|
database/model.py
|
braycarlson/viking
|
56e2029dde2054ebff6a2993d3d094e2c1733e7e
|
[
"MIT"
] | 8
|
2017-05-06T00:48:26.000Z
|
2020-04-17T18:19:51.000Z
|
from gino import Gino
database = Gino()
class MemberSounds(database.Model):
__tablename__ = 'member_sounds'
id = database.Column(database.BigInteger(), primary_key=True)
name = database.Column(database.TEXT(), nullable=False)
created_by = database.Column(database.BigInteger(), nullable=False)
created_at = database.Column(database.DateTime(), nullable=True)
updated_at = database.Column(database.DateTime(), nullable=True)
_fk_discord_id = database.ForeignKeyConstraint(["created_by"], ["active_members.discord_id"])
_idx_name = database.Index('index_name', database.func.lower('name'))
class GuildRoles(database.Model):
__tablename__ = 'guild_roles'
id = database.Column(database.BigInteger(), primary_key=True)
name = database.Column(database.TEXT(), nullable=False)
colour = database.Column(database.VARCHAR(255), nullable=False)
hoist = database.Column(database.Boolean(), nullable=False)
position = database.Column(database.SmallInteger(), primary_key=True)
managed = database.Column(database.Boolean(), nullable=False)
mentionable = database.Column(database.Boolean(), nullable=False)
is_default = database.Column(database.Boolean(), nullable=False)
created_at = database.Column(database.DateTime(), nullable=True)
_idx_role_id = database.Index('index_role_id', 'id', unique=True)
_idx_role_name = database.Index('index_role_name', database.func.lower('name'))
class ActiveMembers(database.Model):
__tablename__ = 'active_members'
discord_id = database.Column(database.BigInteger(), primary_key=True)
name = database.Column(database.TEXT(), nullable=False)
discriminator = database.Column(database.VARCHAR(255), nullable=False)
display_name = database.Column(database.TEXT(), nullable=False)
nickname = database.Column(database.TEXT(), nullable=True)
role_id = database.Column(database.BigInteger(), nullable=False)
bot = database.Column(database.Boolean(), nullable=False)
previous_name = database.Column(database.ARRAY(database.TEXT()), nullable=True)
previous_discriminator = database.Column(database.ARRAY(database.VARCHAR(255)), nullable=True)
previous_nickname = database.Column(database.ARRAY(database.TEXT()), nullable=True)
created_at = database.Column(database.DateTime(), nullable=True)
joined_at = database.Column(database.DateTime(), nullable=True)
updated_at = database.Column(database.DateTime(), nullable=True)
removed_at = database.Column(database.DateTime(), nullable=True)
deleted_at = database.Column(database.DateTime(), nullable=True)
_fk_role_id = database.ForeignKeyConstraint(["role_id"], ["guild_roles.id"])
_idx_member_name = database.Index('index_member_name', database.func.lower('name'))
_idx_member_nickname = database.Index('index_member_nickname', database.func.lower('nickname'))
class RemovedMembers(database.Model):
__tablename__ = 'removed_members'
discord_id = database.Column(database.BigInteger(), primary_key=True)
name = database.Column(database.TEXT(), nullable=False)
discriminator = database.Column(database.VARCHAR(255), nullable=False)
display_name = database.Column(database.TEXT(), nullable=False)
nickname = database.Column(database.TEXT(), nullable=True)
role_id = database.Column(database.BigInteger(), nullable=False)
bot = database.Column(database.Boolean(), nullable=False)
previous_name = database.Column(database.ARRAY(database.TEXT()), nullable=True)
previous_discriminator = database.Column(database.ARRAY(database.VARCHAR(255)), nullable=True)
previous_nickname = database.Column(database.ARRAY(database.TEXT()), nullable=True)
created_at = database.Column(database.DateTime(), nullable=True)
joined_at = database.Column(database.DateTime(), nullable=True)
updated_at = database.Column(database.DateTime(), nullable=True)
removed_at = database.Column(database.DateTime(), nullable=True)
deleted_at = database.Column(database.DateTime(), nullable=True)
_fk_role_id = database.ForeignKeyConstraint(["role_id"], ["guild_roles.id"])
_idx_removed_member_name = database.Index('index_removed_member_name', database.func.lower('name'))
_idx_removed_member_nickname = database.Index('index_removed_member_nickname', database.func.lower('nickname'))
class BannedMembers(database.Model):
__tablename__ = 'banned_members'
discord_id = database.Column(database.BigInteger(), primary_key=True)
name = database.Column(database.TEXT(), nullable=False)
discriminator = database.Column(database.VARCHAR(255), nullable=False)
display_name = database.Column(database.TEXT(), nullable=False)
nickname = database.Column(database.TEXT(), nullable=True)
role_id = database.Column(database.BigInteger(), nullable=False)
bot = database.Column(database.Boolean(), nullable=False)
previous_name = database.Column(database.ARRAY(database.TEXT()), nullable=True)
previous_discriminator = database.Column(database.ARRAY(database.VARCHAR(255)), nullable=True)
previous_nickname = database.Column(database.ARRAY(database.TEXT()), nullable=True)
created_at = database.Column(database.DateTime(), nullable=True)
joined_at = database.Column(database.DateTime(), nullable=True)
updated_at = database.Column(database.DateTime(), nullable=True)
removed_at = database.Column(database.DateTime(), nullable=True)
deleted_at = database.Column(database.DateTime(), nullable=True)
_fk_role_id = database.ForeignKeyConstraint(["role_id"], ["guild_roles.id"])
_idx_banned_member_name = database.Index('index_banned_member_name', database.func.lower('name'))
_idx_banned_member_nickname = database.Index('index_banned_member_nickname', database.func.lower('nickname'))
class PublicCommands(database.Model):
__tablename__ = 'public_commands'
id = database.Column(database.Integer(), primary_key=True)
name = database.Column(database.TEXT(), nullable=False)
aliases = database.Column(database.ARRAY(database.TEXT()), nullable=True)
_idx_public_command_name = database.Index('index_public_command_name', 'name', unique=True)
class HiddenCommands(database.Model):
__tablename__ = 'hidden_commands'
id = database.Column(database.Integer(), primary_key=True)
name = database.Column(database.TEXT(), nullable=False)
aliases = database.Column(database.ARRAY(database.TEXT()), nullable=True)
_idx_hidden_command_name = database.Index('index_hidden_command_name', 'name', unique=True)
class NHLTeams(database.Model):
__tablename__ = 'nhl_teams'
team_id = database.Column(database.Integer(), primary_key=True)
name = database.Column(database.TEXT(), nullable=False)
link = database.Column(database.TEXT(), nullable=False)
venue_id = database.Column(database.Integer(), nullable=True)
venue_name = database.Column(database.TEXT(), nullable=False)
venue_link = database.Column(database.TEXT(), nullable=False)
venue_city = database.Column(database.TEXT(), nullable=False)
timezone_id = database.Column(database.TEXT(), nullable=False)
timezone_offset = database.Column(database.Integer(), nullable=False)
timezone_tz = database.Column(database.TEXT(), nullable=False)
abbreviation = database.Column(database.TEXT(), nullable=False)
team_name = database.Column(database.TEXT(), nullable=False)
location_name = database.Column(database.TEXT(), nullable=False)
first_year_of_play = database.Column(database.TEXT(), nullable=False)
division_id = database.Column(database.Integer(), nullable=False)
division_name = database.Column(database.TEXT(), nullable=False)
division_name_short = database.Column(database.TEXT(), nullable=False)
division_link = database.Column(database.TEXT(), nullable=False)
division_abbreviation = database.Column(database.TEXT(), nullable=False)
conference_id = database.Column(database.Integer(), nullable=False)
conference_name = database.Column(database.TEXT(), nullable=False)
conference_link = database.Column(database.TEXT(), nullable=False)
franchise_id = database.Column(database.Integer(), nullable=False)
franchise_name = database.Column(database.TEXT(), nullable=False)
franchise_link = database.Column(database.TEXT(), nullable=False)
short_name = database.Column(database.TEXT(), nullable=False)
official_website = database.Column(database.TEXT(), nullable=False)
active = database.Column(database.Boolean(), nullable=False)
_idx_nhl_team_name = database.Index('index_nhl_team_name', 'name', unique=True)
_idx_nhl_team_abbreviation = database.Index('index_nhl_team_abbreviation', 'abbreviation', unique=True)
class NHLPlayers(database.Model):
__tablename__ = 'nhl_players'
player_id = database.Column(database.BigInteger(), primary_key=True)
full_name = database.Column(database.TEXT(), nullable=False)
link = database.Column(database.TEXT(), nullable=False)
first_name = database.Column(database.TEXT(), nullable=False)
last_name = database.Column(database.TEXT(), nullable=False)
number = database.Column(database.TEXT(), nullable=False)
birthdate = database.Column(database.TEXT(), nullable=False)
age = database.Column(database.Integer(), nullable=False)
city = database.Column(database.TEXT(), nullable=False)
province = database.Column(database.TEXT(), nullable=True)
country = database.Column(database.TEXT(), nullable=False)
nationality = database.Column(database.TEXT(), nullable=False)
height = database.Column(database.TEXT(), nullable=False)
weight = database.Column(database.Integer(), nullable=False)
active = database.Column(database.Boolean(), nullable=False)
alternate_captain = database.Column(database.Boolean(), nullable=False)
captain = database.Column(database.Boolean(), nullable=False)
rookie = database.Column(database.Boolean(), nullable=False)
shooting_hand = database.Column(database.TEXT(), nullable=False)
team_id = database.Column(database.Integer(), nullable=True)
team_name = database.Column(database.TEXT(), nullable=True)
team_link = database.Column(database.TEXT(), nullable=True)
position_code = database.Column(database.TEXT(), nullable=True)
position_name = database.Column(database.TEXT(), nullable=True)
position_type = database.Column(database.TEXT(), nullable=True)
position_abbreviation = database.Column(database.TEXT(), nullable=True)
_fk_team_id = database.ForeignKeyConstraint(["team_id"], ["nhl_teams.team_id"])
class LoLChampions(database.Model):
__tablename__ = 'lol_champions'
champion_id = database.Column(database.TEXT(), primary_key=True)
name = database.Column(database.TEXT(), nullable=False)
title = database.Column(database.TEXT(), nullable=False)
blurb = database.Column(database.TEXT(), nullable=False)
attack_information = database.Column(database.Integer(), nullable=False)
defense_information = database.Column(database.Integer(), nullable=False)
magic_information = database.Column(database.Integer(), nullable=False)
difficulty_information = database.Column(database.Integer(), nullable=False)
full_image = database.Column(database.TEXT(), nullable=False)
champion_class = database.Column(database.TEXT(), nullable=False)
resource = database.Column(database.TEXT(), nullable=True)
health = database.Column(database.Integer(), nullable=False)
health_per_level = database.Column(database.Integer(), nullable=False)
mana = database.Column(database.Integer(), nullable=False)
mana_per_level = database.Column(database.Integer(), nullable=False)
movement_speed = database.Column(database.Integer(), nullable=False)
armor = database.Column(database.Integer(), nullable=False)
armor_per_level = database.Column(database.Integer(), nullable=False)
spellblock = database.Column(database.Integer(), nullable=False)
spellblock_per_level = database.Column(database.Integer(), nullable=False)
attack_range = database.Column(database.Integer(), nullable=False)
health_regeneration = database.Column(database.Integer(), nullable=False)
health_regeneration_per_level = database.Column(database.Integer(), nullable=False)
mana_regeneration = database.Column(database.Integer(), nullable=False)
mana_regeneration_per_level = database.Column(database.Integer(), nullable=False)
critical_strike = database.Column(database.Integer(), nullable=False)
critical_strike_per_level = database.Column(database.Integer(), nullable=False)
attack_damage = database.Column(database.Integer(), nullable=False)
attack_damage_per_level = database.Column(database.Integer(), nullable=False)
attack_speed_per_level = database.Column(database.Integer(), nullable=False)
attack_speed = database.Column(database.Integer(), nullable=False)
class LoLSpells(database.Model):
__tablename__ = 'lol_spells'
spell_id = database.Column(database.TEXT(), nullable=False)
spell_key = database.Column(database.TEXT(), nullable=False)
name = database.Column(database.TEXT(), nullable=False)
description = database.Column(database.TEXT(), nullable=True)
maximum_rank = database.Column(database.Integer(), nullable=True)
cooldown = database.Column(database.TEXT(), nullable=True)
cost = database.Column(database.TEXT(), nullable=True)
cost_type = database.Column(database.TEXT(), nullable=True)
maximum_ammo = database.Column(database.TEXT(), nullable=True)
spell_range = database.Column(database.TEXT(), nullable=True)
full_image = database.Column(database.TEXT(), nullable=True)
resource = database.Column(database.TEXT(), nullable=True)
level = database.Column(database.Integer(), nullable=True)
| 55.456
| 115
| 0.753895
| 1,603
| 13,864
| 6.336868
| 0.092327
| 0.224651
| 0.353022
| 0.179169
| 0.871136
| 0.837468
| 0.724651
| 0.453633
| 0.404509
| 0.363556
| 0
| 0.001713
| 0.115623
| 13,864
| 249
| 116
| 55.678715
| 0.826768
| 0
| 0
| 0.341463
| 0
| 0
| 0.043999
| 0.016518
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004878
| 0
| 0.995122
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9e2cc0442a52a63b8719a99aea7526543fa8b16a
| 32
|
py
|
Python
|
Bioinformatics Stronghold/Counting Subsets.py
|
Vinay-Vinod/Rosalind
|
6818a38d1378a55e84e9f75636bacce2c274d24c
|
[
"MIT"
] | null | null | null |
Bioinformatics Stronghold/Counting Subsets.py
|
Vinay-Vinod/Rosalind
|
6818a38d1378a55e84e9f75636bacce2c274d24c
|
[
"MIT"
] | null | null | null |
Bioinformatics Stronghold/Counting Subsets.py
|
Vinay-Vinod/Rosalind
|
6818a38d1378a55e84e9f75636bacce2c274d24c
|
[
"MIT"
] | null | null | null |
n = 3
print(pow(2, n, 1000000))
| 10.666667
| 25
| 0.59375
| 7
| 32
| 2.714286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.346154
| 0.1875
| 32
| 2
| 26
| 16
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
7b88492ae8c0b2ff2d571b4a837bb16743517b89
| 202
|
py
|
Python
|
hpbandster/optimizers/__init__.py
|
RevanMacQueen/HpBandSter
|
3b6a5594df30796f43114f7e0e70c1dc56c11e60
|
[
"BSD-3-Clause"
] | 546
|
2018-01-18T08:09:02.000Z
|
2022-03-25T03:06:24.000Z
|
hpbandster/optimizers/__init__.py
|
RevanMacQueen/HpBandSter
|
3b6a5594df30796f43114f7e0e70c1dc56c11e60
|
[
"BSD-3-Clause"
] | 99
|
2018-02-09T14:00:13.000Z
|
2022-01-11T17:05:44.000Z
|
hpbandster/optimizers/__init__.py
|
RevanMacQueen/HpBandSter
|
3b6a5594df30796f43114f7e0e70c1dc56c11e60
|
[
"BSD-3-Clause"
] | 126
|
2018-02-12T14:08:58.000Z
|
2022-03-08T02:50:33.000Z
|
from hpbandster.optimizers.randomsearch import RandomSearch
from hpbandster.optimizers.hyperband import HyperBand
from hpbandster.optimizers.bohb import BOHB
from hpbandster.optimizers.h2bo import H2BO
| 40.4
| 59
| 0.881188
| 24
| 202
| 7.416667
| 0.333333
| 0.314607
| 0.539326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010753
| 0.079208
| 202
| 4
| 60
| 50.5
| 0.946237
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
7b9469617a9696cc4247eb8e65224a73bf51c015
| 99
|
py
|
Python
|
hydroDL/model/__init__.py
|
fkwai/geolearn
|
30cb4353d22af5020a48100d07ab04f465a315b0
|
[
"MIT"
] | null | null | null |
hydroDL/model/__init__.py
|
fkwai/geolearn
|
30cb4353d22af5020a48100d07ab04f465a315b0
|
[
"MIT"
] | null | null | null |
hydroDL/model/__init__.py
|
fkwai/geolearn
|
30cb4353d22af5020a48100d07ab04f465a315b0
|
[
"MIT"
] | 2
|
2021-04-04T02:45:59.000Z
|
2022-03-19T09:41:39.000Z
|
from .train import trainModel, testModel
from . import rnn
from . import crit
from . import layers
| 19.8
| 40
| 0.777778
| 14
| 99
| 5.5
| 0.571429
| 0.38961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171717
| 99
| 4
| 41
| 24.75
| 0.939024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7bcd27608751b2c2e9144a7999d90f3f1a3cf386
| 1,380
|
py
|
Python
|
MIT 6.00.1x/Week 2/Guess my number.py
|
kai92a/Learning_Python
|
5195aeb950e21150838c44d7c6af87cd86d31301
|
[
"MIT"
] | null | null | null |
MIT 6.00.1x/Week 2/Guess my number.py
|
kai92a/Learning_Python
|
5195aeb950e21150838c44d7c6af87cd86d31301
|
[
"MIT"
] | null | null | null |
MIT 6.00.1x/Week 2/Guess my number.py
|
kai92a/Learning_Python
|
5195aeb950e21150838c44d7c6af87cd86d31301
|
[
"MIT"
] | null | null | null |
#Guess my number
#Week 2 Finger Exercise 3
print ("Please think of a number between 0 and 100!")
print ("Is your secret number 50?")
s=[x for x in range (100)]
i=input("Enter 'h' to indicate the guess is too high. Enter 'l' to indicate the guess is too low. Enter 'c' to indicate I guessed correctly.")
j=0
k=len(s)
m=50
while i!="c":
if i!="l":
if i!="h":
if i!="c":
print ("Sorry, I did not understand your input.")
print ("Is your secret number "+str(s[m])+"?")
i=input("Enter 'h' to indicate the guess is too high. Enter 'l' to indicate the guess is too low. Enter 'c' to indicate I guessed correctly.")
if i == "h":
k=m
m=int((j+k)/2)
print ("Is your secret number "+str(s[m])+"?")
i=input("Enter 'h' to indicate the guess is too high. Enter 'l' to indicate the guess is too low. Enter 'c' to indicate I guessed correctly.")
if i=="c":
break
elif i=="l":
j=m
m=int((j+k)/2)
print ("Is your secret number "+str(s[m])+"?")
i=input("Enter 'h' to indicate the guess is too high. Enter 'l' to indicate the guess is too low. Enter 'c' to indicate I guessed correctly.")
if i=="c":
break
elif i=="c":
break
if i=="c":
print ("Game over. Your secret number was: "+str(s[m]))
| 39.428571
| 158
| 0.562319
| 235
| 1,380
| 3.302128
| 0.242553
| 0.154639
| 0.134021
| 0.185567
| 0.734536
| 0.704897
| 0.704897
| 0.704897
| 0.704897
| 0.704897
| 0
| 0.016393
| 0.292754
| 1,380
| 34
| 159
| 40.588235
| 0.778689
| 0.028261
| 0
| 0.46875
| 0
| 0.125
| 0.556385
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.21875
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c8f64a0be0b6c5fa37a93c36503df4a87b83448e
| 371
|
py
|
Python
|
backend/deploy-manage-service.py
|
YanpingDong/Spring-Cloud-Manager
|
e856915c7da1fd01bdeb7d7dfcd10cc9b69464fa
|
[
"Unlicense",
"MIT"
] | 1
|
2016-11-22T07:38:04.000Z
|
2016-11-22T07:38:04.000Z
|
backend/deploy-manage-service.py
|
YanpingDong/Spring-Cloud-Manager
|
e856915c7da1fd01bdeb7d7dfcd10cc9b69464fa
|
[
"Unlicense",
"MIT"
] | null | null | null |
backend/deploy-manage-service.py
|
YanpingDong/Spring-Cloud-Manager
|
e856915c7da1fd01bdeb7d7dfcd10cc9b69464fa
|
[
"Unlicense",
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from app import app
from config import GlobalVar
if __name__ == '__main__':
#app.debug = GlobalVar.PROPERTIES.get('debug') # 设置调试模式,生产模式的时候要关掉debug
app.run(host=GlobalVar.PROPERTIES.get('host'),
port=int(GlobalVar.PROPERTIES.get('port')),
debug=GlobalVar.PROPERTIES.get('debug')) # 启动服务器
| 41.222222
| 85
| 0.622642
| 41
| 371
| 5.439024
| 0.512195
| 0.340807
| 0.394619
| 0.242152
| 0.286996
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003509
| 0.231806
| 371
| 9
| 86
| 41.222222
| 0.778947
| 0.285714
| 0
| 0
| 0
| 0
| 0.08046
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
cdaf831540cc3ab086f47add0d2a62d175a0f1b0
| 35
|
py
|
Python
|
os_v3_hek/defs/cont.py
|
holy-crust/reclaimer
|
0aa693da3866ce7999c68d5f71f31a9c932cdb2c
|
[
"MIT"
] | null | null | null |
os_v3_hek/defs/cont.py
|
holy-crust/reclaimer
|
0aa693da3866ce7999c68d5f71f31a9c932cdb2c
|
[
"MIT"
] | null | null | null |
os_v3_hek/defs/cont.py
|
holy-crust/reclaimer
|
0aa693da3866ce7999c68d5f71f31a9c932cdb2c
|
[
"MIT"
] | null | null | null |
from ...os_hek.defs.cont import *
| 17.5
| 34
| 0.685714
| 6
| 35
| 3.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 35
| 1
| 35
| 35
| 0.766667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cde00db50b9f2192ea3e79800ca7d0b555114b8e
| 220,313
|
py
|
Python
|
rubiks-cube-NxNxN-solver/rubikscubennnsolver/RubiksCube777.py
|
sliu54/mathworks-hackday-2020
|
cc033c437e9630f3d7f848853768bd3e793b370d
|
[
"BSD-2-Clause"
] | null | null | null |
rubiks-cube-NxNxN-solver/rubikscubennnsolver/RubiksCube777.py
|
sliu54/mathworks-hackday-2020
|
cc033c437e9630f3d7f848853768bd3e793b370d
|
[
"BSD-2-Clause"
] | null | null | null |
rubiks-cube-NxNxN-solver/rubikscubennnsolver/RubiksCube777.py
|
sliu54/mathworks-hackday-2020
|
cc033c437e9630f3d7f848853768bd3e793b370d
|
[
"BSD-2-Clause"
] | null | null | null |
from rubikscubennnsolver.misc import SolveError
from rubikscubennnsolver.RubiksCubeNNNOddEdges import RubiksCubeNNNOddEdges
from rubikscubennnsolver.LookupTable import (
LookupTable,
LookupTableIDAViaC,
)
from rubikscubennnsolver.LookupTableIDAViaGraph import LookupTableIDAViaGraph
import logging
import sys
log = logging.getLogger(__name__)
moves_777 = (
"U", "U'", "U2", "Uw", "Uw'", "Uw2", "3Uw", "3Uw'", "3Uw2",
"L", "L'", "L2", "Lw", "Lw'", "Lw2", "3Lw", "3Lw'", "3Lw2",
"F", "F'", "F2", "Fw", "Fw'", "Fw2", "3Fw", "3Fw'", "3Fw2",
"R", "R'", "R2", "Rw", "Rw'", "Rw2", "3Rw", "3Rw'", "3Rw2",
"B", "B'", "B2", "Bw", "Bw'", "Bw2", "3Bw", "3Bw'", "3Bw2",
"D", "D'", "D2", "Dw", "Dw'", "Dw2", "3Dw", "3Dw'", "3Dw2",
# slices...not used for now
# "2U", "2U'", "2U2", "2D", "2D'", "2D2",
# "2L", "2L'", "2L2", "2R", "2R'", "2R2",
# "2F", "2F'", "2F2", "2B", "2B'", "2B2",
# "3U", "3U'", "3U2", "3D", "3D'", "3D2",
# "3L", "3L'", "3L2", "3R", "3R'", "3R2",
# "3F", "3F'", "3F2", "3B", "3B'", "3B2"
)
solved_777 = "UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUURRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB"
centers_777 = (
9, 10, 11, 12, 13, 16, 17, 18, 19, 20, 23, 24, 25, 26, 27, 30, 31, 32, 33, 34, 37, 38, 39, 40, 41, # Upper
58, 59, 60, 61, 62, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 79, 80, 81, 82, 83, 86, 87, 88, 89, 90, # Left
107, 108, 109, 110, 111, 114, 115, 116, 117, 118, 121, 122, 123, 124, 125, 128, 129, 130, 131, 132, 135, 136, 137, 138, 139, # Front
156, 157, 158, 159, 160, 163, 164, 165, 166, 167, 170, 171, 172, 173, 174, 177, 178, 179, 180, 181, 184, 185, 186, 187, 188, # Right
205, 206, 207, 208, 209, 212, 213, 214, 215, 216, 219, 220, 221, 222, 223, 226, 227, 228, 229, 230, 233, 234, 235, 236, 237, # Back
254, 255, 256, 257, 258, 261, 262, 263, 264, 265, 268, 269, 270, 271, 272, 275, 276, 277, 278, 279, 282, 283, 284, 285, 286, # Down
)
ULRD_centers_777 = (
9, 10, 11, 12, 13, 16, 17, 18, 19, 20, 23, 24, 25, 26, 27, 30, 31, 32, 33, 34, 37, 38, 39, 40, 41, # Upper
58, 59, 60, 61, 62, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 79, 80, 81, 82, 83, 86, 87, 88, 89, 90, # Left
156, 157, 158, 159, 160, 163, 164, 165, 166, 167, 170, 171, 172, 173, 174, 177, 178, 179, 180, 181, 184, 185, 186, 187, 188, # Right
254, 255, 256, 257, 258, 261, 262, 263, 264, 265, 268, 269, 270, 271, 272, 275, 276, 277, 278, 279, 282, 283, 284, 285, 286, # Down
)
class LookupTableIDA777LRObliqueEdgePairing(LookupTableIDAViaC):
oblique_edges_777 = (
10, 11, 12, 16, 20, 23, 27, 30, 34, 38, 39, 40, # Upper
59, 60, 61, 65, 69, 72, 76, 79, 83, 87, 88, 89, # Left
108, 109, 110, 114, 118, 121, 125, 128, 132, 136, 137, 138, # Front
157, 158, 159, 163, 167, 170, 174, 177, 181, 185, 186, 187, # Right
206, 207, 208, 212, 216, 219, 223, 226, 230, 234, 235, 236, # Back
255, 256, 257, 261, 265, 268, 272, 275, 279, 283, 284, 285, # Down
)
def __init__(self, parent):
LookupTableIDAViaC.__init__(
self,
parent,
# Needed tables and their md5 signatures
(),
"7x7x7-LR-oblique-edges-stage", # C_ida_type
)
def recolor(self):
log.info("%s: recolor (custom)" % self)
self.parent.nuke_corners()
self.parent.nuke_edges()
for x in centers_777:
if x in self.oblique_edges_777:
if self.parent.state[x] == "L" or self.parent.state[x] == "R":
self.parent.state[x] = "L"
else:
self.parent.state[x] = "x"
else:
self.parent.state[x] = "."
class LookupTableIDA777UDObliqueEdgePairing(LookupTableIDAViaC):
UFBD_oblique_edges_777 = (
10, 11, 12, 16, 20, 23, 27, 30, 34, 38, 39, 40, # Upper
108, 109, 110, 114, 118, 121, 125, 128, 132, 136, 137, 138, # Front
206, 207, 208, 212, 216, 219, 223, 226, 230, 234, 235, 236, # Back
255, 256, 257, 261, 265, 268, 272, 275, 279, 283, 284, 285, # Down
)
def __init__(self, parent):
LookupTableIDAViaC.__init__(
self,
parent,
# Needed tables and their md5 signatures
(),
"7x7x7-UD-oblique-edges-stage", # C_ida_type
)
def recolor(self):
log.info("%s: recolor (custom)" % self)
self.parent.nuke_corners()
self.parent.nuke_edges()
for x in centers_777:
if x in self.UFBD_oblique_edges_777:
if self.parent.state[x] == "U" or self.parent.state[x] == "D":
self.parent.state[x] = "U"
else:
self.parent.state[x] = "x"
else:
self.parent.state[x] = "."
class LookupTable777Step41(LookupTable):
"""
lookup-table-7x7x7-step41.txt
=============================
0 steps has 8 entries (0 percent, 0.00x previous step)
1 steps has 370 entries (0 percent, 46.25x previous step)
2 steps has 2,000 entries (0 percent, 5.41x previous step)
3 steps has 10,166 entries (2 percent, 5.08x previous step)
4 steps has 43,316 entries (12 percent, 4.26x previous step)
5 steps has 115,392 entries (33 percent, 2.66x previous step)
6 steps has 135,856 entries (39 percent, 1.18x previous step)
7 steps has 34,484 entries (10 percent, 0.25x previous step)
8 steps has 1,408 entries (0 percent, 0.04x previous step)
Total: 343,000 entries
Average: 5.40 moves
"""
state_targets = (
'LLLLLLLLLLLLRRRRRRRRRRRR',
'LLLLRLRLRLLLRRRLRLRLRRRR',
'LLLLRLRLRLLLRRRRLRLRLRRR',
'LLLRLRLRLLLLRRRLRLRLRRRR',
'LLLRLRLRLLLLRRRRLRLRLRRR',
'LLLRRRRRRLLLRRRLLLLLLRRR',
'LLRLLLLLLLLRLRRRRRRRRLRR',
'LLRLLLLLLLLRRRLRRRRRRRRL',
'LLRLRLRLRLLRLRRLRLRLRLRR',
'LLRLRLRLRLLRLRRRLRLRLLRR',
'LLRLRLRLRLLRRRLLRLRLRRRL',
'LLRLRLRLRLLRRRLRLRLRLRRL',
'LLRRLRLRLLLRLRRLRLRLRLRR',
'LLRRLRLRLLLRLRRRLRLRLLRR',
'LLRRLRLRLLLRRRLLRLRLRRRL',
'LLRRLRLRLLLRRRLRLRLRLRRL',
'LLRRRRRRRLLRLRRLLLLLLLRR',
'LLRRRRRRRLLRRRLLLLLLLRRL',
'LRLLLLLLLLRLRLRRRRRRRRLR',
'LRLLRLRLRLRLRLRLRLRLRRLR',
'LRLLRLRLRLRLRLRRLRLRLRLR',
'LRLRLRLRLLRLRLRLRLRLRRLR',
'LRLRLRLRLLRLRLRRLRLRLRLR',
'LRLRRRRRRLRLRLRLLLLLLRLR',
'LRRLLLLLLLRRLLRRRRRRRLLR',
'LRRLLLLLLLRRRLLRRRRRRRLL',
'LRRLRLRLRLRRLLRLRLRLRLLR',
'LRRLRLRLRLRRLLRRLRLRLLLR',
'LRRLRLRLRLRRRLLLRLRLRRLL',
'LRRLRLRLRLRRRLLRLRLRLRLL',
'LRRRLRLRLLRRLLRLRLRLRLLR',
'LRRRLRLRLLRRLLRRLRLRLLLR',
'LRRRLRLRLLRRRLLLRLRLRRLL',
'LRRRLRLRLLRRRLLRLRLRLRLL',
'LRRRRRRRRLRRLLRLLLLLLLLR',
'LRRRRRRRRLRRRLLLLLLLLRLL',
'RLLLLLLLLRLLLRRRRRRRRLRR',
'RLLLLLLLLRLLRRLRRRRRRRRL',
'RLLLRLRLRRLLLRRLRLRLRLRR',
'RLLLRLRLRRLLLRRRLRLRLLRR',
'RLLLRLRLRRLLRRLLRLRLRRRL',
'RLLLRLRLRRLLRRLRLRLRLRRL',
'RLLRLRLRLRLLLRRLRLRLRLRR',
'RLLRLRLRLRLLLRRRLRLRLLRR',
'RLLRLRLRLRLLRRLLRLRLRRRL',
'RLLRLRLRLRLLRRLRLRLRLRRL',
'RLLRRRRRRRLLLRRLLLLLLLRR',
'RLLRRRRRRRLLRRLLLLLLLRRL',
'RLRLLLLLLRLRLRLRRRRRRLRL',
'RLRLRLRLRRLRLRLLRLRLRLRL',
'RLRLRLRLRRLRLRLRLRLRLLRL',
'RLRRLRLRLRLRLRLLRLRLRLRL',
'RLRRLRLRLRLRLRLRLRLRLLRL',
'RLRRRRRRRRLRLRLLLLLLLLRL',
'RRLLLLLLLRRLLLRRRRRRRLLR',
'RRLLLLLLLRRLRLLRRRRRRRLL',
'RRLLRLRLRRRLLLRLRLRLRLLR',
'RRLLRLRLRRRLLLRRLRLRLLLR',
'RRLLRLRLRRRLRLLLRLRLRRLL',
'RRLLRLRLRRRLRLLRLRLRLRLL',
'RRLRLRLRLRRLLLRLRLRLRLLR',
'RRLRLRLRLRRLLLRRLRLRLLLR',
'RRLRLRLRLRRLRLLLRLRLRRLL',
'RRLRLRLRLRRLRLLRLRLRLRLL',
'RRLRRRRRRRRLLLRLLLLLLLLR',
'RRLRRRRRRRRLRLLLLLLLLRLL',
'RRRLLLLLLRRRLLLRRRRRRLLL',
'RRRLRLRLRRRRLLLLRLRLRLLL',
'RRRLRLRLRRRRLLLRLRLRLLLL',
'RRRRLRLRLRRRLLLLRLRLRLLL',
'RRRRLRLRLRRRLLLRLRLRLLLL',
'RRRRRRRRRRRRLLLLLLLLLLLL'
)
LR_oblique_edges_and_outer_t_center = (
# 10, 11, 12, 16, 20, 23, 27, 30, 34, 38, 39, 40, # Upper
59, 60, 61, 65, 69, 72, 76, 79, 83, 87, 88, 89, # Left
# 108, 109, 110, 114, 118, 121, 125, 128, 132, 136, 137, 138, # Front
157, 158, 159, 163, 167, 170, 174, 177, 181, 185, 186, 187, # Right
# 206, 207, 208, 212, 216, 219, 223, 226, 230, 234, 235, 236, # Back
# 255, 256, 257, 261, 265, 268, 272, 275, 279, 283, 284, 285, # Down
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step41.txt',
self.state_targets,
linecount=343000,
max_depth=8,
filesize=20237000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"U", "U'", "U2",
"D", "D'", "D2",
"F", "F'", "F2",
"D", "D'", "D2",
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.LR_oblique_edges_and_outer_t_center])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.LR_oblique_edges_and_outer_t_center, state):
cube[pos] = pos_state
class LookupTable777Step42(LookupTable):
"""
lookup-table-7x7x7-step42.txt
=============================
0 steps has 10 entries (0 percent, 0.00x previous step)
1 steps has 216 entries (0 percent, 21.60x previous step)
2 steps has 1,289 entries (0 percent, 5.97x previous step)
3 steps has 6,178 entries (1 percent, 4.79x previous step)
4 steps has 24,456 entries (7 percent, 3.96x previous step)
5 steps has 73,866 entries (21 percent, 3.02x previous step)
6 steps has 131,607 entries (38 percent, 1.78x previous step)
7 steps has 90,214 entries (26 percent, 0.69x previous step)
8 steps has 14,832 entries (4 percent, 0.16x previous step)
9 steps has 332 entries (0 percent, 0.02x previous step)
Total: 343,000 entries
Average: 5.92 moves
"""
state_targets = (
'LLLLLLLLLLLLLRRRRRRRRRRRRR',
'LLLLLLLLRLLLLRRRRLRRRRRRRR',
'LLLLLLLLRLLLLRRRRRRRRLRRRR',
'LLLLRLLLLLLLLRRRRLRRRRRRRR',
'LLLLRLLLLLLLLRRRRRRRRLRRRR',
'LLLLRLLLRLLLLRRRRLRRRLRRRR',
'LLLRLLLRLLLRRLLRRRLRRRLRRR',
'LLLRLLLRLLLRRRRRLRRRLRRRLL',
'LLLRLLLRRLLRRLLRRLLRRRLRRR',
'LLLRLLLRRLLRRLLRRRLRRLLRRR',
'LLLRLLLRRLLRRRRRLLRRLRRRLL',
'LLLRLLLRRLLRRRRRLRRRLLRRLL',
'LLLRRLLRLLLRRLLRRLLRRRLRRR',
'LLLRRLLRLLLRRLLRRRLRRLLRRR',
'LLLRRLLRLLLRRRRRLLRRLRRRLL',
'LLLRRLLRLLLRRRRRLRRRLLRRLL',
'LLLRRLLRRLLRRLLRRLLRRLLRRR',
'LLLRRLLRRLLRRRRRLLRRLLRRLL',
'RRLLLRLLLRLLLLLRRRLRRRLRRR',
'RRLLLRLLLRLLLRRRLRRRLRRRLL',
'RRLLLRLLRRLLLLLRRLLRRRLRRR',
'RRLLLRLLRRLLLLLRRRLRRLLRRR',
'RRLLLRLLRRLLLRRRLLRRLRRRLL',
'RRLLLRLLRRLLLRRRLRRRLLRRLL',
'RRLLRRLLLRLLLLLRRLLRRRLRRR',
'RRLLRRLLLRLLLLLRRRLRRLLRRR',
'RRLLRRLLLRLLLRRRLLRRLRRRLL',
'RRLLRRLLLRLLLRRRLRRRLLRRLL',
'RRLLRRLLRRLLLLLRRLLRRLLRRR',
'RRLLRRLLRRLLLRRRLLRRLLRRLL',
'RRLRLRLRLRLRRLLRLRLRLRLRLL',
'RRLRLRLRRRLRRLLRLLLRLRLRLL',
'RRLRLRLRRRLRRLLRLRLRLLLRLL',
'RRLRRRLRLRLRRLLRLLLRLRLRLL',
'RRLRRRLRLRLRRLLRLRLRLLLRLL',
'RRLRRRLRRRLRRLLRLLLRLLLRLL'
)
LR_inside_centers_and_left_oblique_edges = (
# 10, 17, 18, 19, 20, 24, 25, 26, 30, 31, 32, 33, 40, # Upper
59, 66, 67, 68, 69, 73, 74, 75, 79, 80, 81, 82, 89, # Left
# 108, 115, 116, 117, 118, 122, 123, 124, 128, 129, 130, 131, 138, # Front
157, 164, 165, 166, 167, 171, 172, 173, 177, 178, 179, 180, 187, # Right
# 206, 213, 214, 215, 216, 220, 221, 222, 226, 227, 228, 229, 236, # Back
# 255, 262, 263, 264, 265, 269, 270, 271, 275, 276, 277, 278, 285, # Down
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step42.txt',
self.state_targets,
linecount=343000,
max_depth=9,
filesize=22981000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"U", "U'", "U2",
"D", "D'", "D2",
"F", "F'", "F2",
"D", "D'", "D2",
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.LR_inside_centers_and_left_oblique_edges])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.LR_inside_centers_and_left_oblique_edges, state):
cube[pos] = pos_state
class LookupTable777Step43(LookupTable):
"""
lookup-table-7x7x7-step43.txt
=============================
0 steps has 11 entries (0 percent, 0.00x previous step)
1 steps has 239 entries (0 percent, 21.73x previous step)
2 steps has 1,405 entries (0 percent, 5.88x previous step)
3 steps has 6,372 entries (1 percent, 4.54x previous step)
4 steps has 25,225 entries (7 percent, 3.96x previous step)
5 steps has 77,525 entries (22 percent, 3.07x previous step)
6 steps has 135,173 entries (39 percent, 1.74x previous step)
7 steps has 85,458 entries (24 percent, 0.63x previous step)
8 steps has 11,492 entries (3 percent, 0.13x previous step)
9 steps has 100 entries (0 percent, 0.01x previous step)
Total: 343,000 entries
Average: 5.87 moves
"""
LR_inside_centers_and_outer_t_centers = (
# 11, 17, 18, 19, 23, 24, 25, 26, 27, 31, 32, 33, 39, # Upper
60, 66, 67, 68, 72, 73, 74, 75, 76, 80, 81, 82, 88, # Left
# 109, 115, 116, 117, 121, 122, 123, 124, 125, 129, 130, 131, 137, # Front
158, 164, 165, 166, 170, 171, 172, 173, 174, 178, 179, 180, 186, # Right
# 207, 213, 214, 215, 219, 220, 221, 222, 223, 227, 228, 229, 235, # Back
# 256, 262, 263, 264, 268, 269, 270, 271, 272, 276, 277, 278, 284, # Down
)
state_targets = (
'LLLLLLLLLLLLLRRRRRRRRRRRRR',
'LLLLLLLLRLLLLRRRRLRRRRRRRR',
'LLLLLLLLRLLLLRRRRRRRRLRRRR',
'LLLLRLLLLLLLLRRRRLRRRRRRRR',
'LLLLRLLLLLLLLRRRRRRRRLRRRR',
'LLLLRLLLRLLLLRRRRLRRRLRRRR',
'LLLRLLLRLLLRLRLRRRLRRRLRRR',
'LLLRLLLRLLLRLRRRLRRRLRRRLR',
'LLLRLLLRRLLRLRLRRLLRRRLRRR',
'LLLRLLLRRLLRLRLRRRLRRLLRRR',
'LLLRLLLRRLLRLRRRLLRRLRRRLR',
'LLLRLLLRRLLRLRRRLRRRLLRRLR',
'LLLRRLLRLLLRLRLRRLLRRRLRRR',
'LLLRRLLRLLLRLRLRRRLRRLLRRR',
'LLLRRLLRLLLRLRRRLLRRLRRRLR',
'LLLRRLLRLLLRLRRRLRRRLLRRLR',
'LLLRRLLRRLLRLRLRRLLRRLLRRR',
'LLLRRLLRRLLRLRRRLLRRLLRRLR',
'LRLLLRLLLRLLLRLRRRLRRRLRRR',
'LRLLLRLLLRLLLRRRLRRRLRRRLR',
'LRLLLRLLRRLLLRLRRLLRRRLRRR',
'LRLLLRLLRRLLLRLRRRLRRLLRRR',
'LRLLLRLLRRLLLRRRLLRRLRRRLR',
'LRLLLRLLRRLLLRRRLRRRLLRRLR',
'LRLLRRLLLRLLLRLRRLLRRRLRRR',
'LRLLRRLLLRLLLRLRRRLRRLLRRR',
'LRLLRRLLLRLLLRRRLLRRLRRRLR',
'LRLLRRLLLRLLLRRRLRRRLLRRLR',
'LRLLRRLLRRLLLRLRRLLRRLLRRR',
'LRLLRRLLRRLLLRRRLLRRLLRRLR',
'LRLRLRLRLRLRLRLRLRLRLRLRLR',
'LRLRLRLRRRLRLRLRLLLRLRLRLR',
'LRLRLRLRRRLRLRLRLRLRLLLRLR',
'LRLRRRLRLRLRLRLRLLLRLRLRLR',
'LRLRRRLRLRLRLRLRLRLRLLLRLR',
'LRLRRRLRRRLRLRLRLLLRLLLRLR'
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step43.txt',
self.state_targets,
linecount=343000,
max_depth=8,
filesize=22981000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"U", "U'", "U2",
"D", "D'", "D2",
"F", "F'", "F2",
"D", "D'", "D2",
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.LR_inside_centers_and_outer_t_centers])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.LR_inside_centers_and_outer_t_centers, state):
cube[pos] = pos_state
class LookupTable777Step44(LookupTable):
"""
lookup-table-7x7x7-step44.txt
=============================
0 steps has 9 entries (0 percent, 0.00x previous step)
1 steps has 217 entries (0 percent, 24.11x previous step)
2 steps has 1,289 entries (0 percent, 5.94x previous step)
3 steps has 6,178 entries (1 percent, 4.79x previous step)
4 steps has 24,456 entries (7 percent, 3.96x previous step)
5 steps has 73,866 entries (21 percent, 3.02x previous step)
6 steps has 131,607 entries (38 percent, 1.78x previous step)
7 steps has 90,214 entries (26 percent, 0.69x previous step)
8 steps has 14,832 entries (4 percent, 0.16x previous step)
9 steps has 332 entries (0 percent, 0.02x previous step)
Total: 343,000 entries
Average: 5.92 moves
"""
state_targets = (
'LLLLLLLLLLLLLRRRRRRRRRRRRR',
'LLLLLLLLLLLRLRLRRRRRRRRRRR',
'LLLLLLLLLLLRLRRRRRRRRRRRLR',
'LLRLLRLLRLLLRLRRRLRRLRRLRR',
'LLRLLRLLRLLLRRRLRRLRRLRRRL',
'LLRLLRLLRLLRRLLRRLRRLRRLRR',
'LLRLLRLLRLLRRLRRRLRRLRRLLR',
'LLRLLRLLRLLRRRLLRRLRRLRRRL',
'LLRLLRLLRLLRRRRLRRLRRLRRLL',
'LRLLLLLLLLLLLRLRRRRRRRRRRR',
'LRLLLLLLLLLLLRRRRRRRRRRRLR',
'LRLLLLLLLLLRLRLRRRRRRRRRLR',
'LRRLLRLLRLLLRLLRRLRRLRRLRR',
'LRRLLRLLRLLLRLRRRLRRLRRLLR',
'LRRLLRLLRLLLRRLLRRLRRLRRRL',
'LRRLLRLLRLLLRRRLRRLRRLRRLL',
'LRRLLRLLRLLRRLLRRLRRLRRLLR',
'LRRLLRLLRLLRRRLLRRLRRLRRLL',
'RLLLRLLRLLRLLLRRRLRRLRRLRR',
'RLLLRLLRLLRLLRRLRRLRRLRRRL',
'RLLLRLLRLLRRLLLRRLRRLRRLRR',
'RLLLRLLRLLRRLLRRRLRRLRRLLR',
'RLLLRLLRLLRRLRLLRRLRRLRRRL',
'RLLLRLLRLLRRLRRLRRLRRLRRLL',
'RLRLRRLRRLRLRLRLRLLRLLRLRL',
'RLRLRRLRRLRRRLLLRLLRLLRLRL',
'RLRLRRLRRLRRRLRLRLLRLLRLLL',
'RRLLRLLRLLRLLLLRRLRRLRRLRR',
'RRLLRLLRLLRLLLRRRLRRLRRLLR',
'RRLLRLLRLLRLLRLLRRLRRLRRRL',
'RRLLRLLRLLRLLRRLRRLRRLRRLL',
'RRLLRLLRLLRRLLLRRLRRLRRLLR',
'RRLLRLLRLLRRLRLLRRLRRLRRLL',
'RRRLRRLRRLRLRLLLRLLRLLRLRL',
'RRRLRRLRRLRLRLRLRLLRLLRLLL',
'RRRLRRLRRLRRRLLLRLLRLLRLLL'
)
LR_inside_centers_and_right_oblique_edges = [
# 12, 16, 17, 18, 19, 24, 25, 26, 31, 32, 33, 34, 38, # Upper
61, 65, 66, 67, 68, 73, 74, 75, 80, 81, 82, 83, 87, # Left
# 110, 114, 115, 116, 117, 122, 123, 124, 129, 130, 131, 132, 136, # Front
159, 163, 164, 165, 166, 171, 172, 173, 178, 179, 180, 181, 185, # Right
# 208, 212, 213, 214, 215, 220, 221, 222, 227, 228, 229, 230, 234, # Back
# 257, 261, 262, 263, 264, 269, 270, 271, 276, 277, 278, 279, 283, # Down
]
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step44.txt',
self.state_targets,
linecount=343000,
max_depth=9,
filesize=22981000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"U", "U'", "U2",
"D", "D'", "D2",
"F", "F'", "F2",
"D", "D'", "D2",
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.LR_inside_centers_and_right_oblique_edges])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.LR_inside_centers_and_right_oblique_edges, state):
cube[pos] = pos_state
class LookupTableIDA777Step40(LookupTableIDAViaGraph):
def __init__(self, parent):
LookupTableIDAViaGraph.__init__(
self,
parent,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"U", "U'", "U2",
"D", "D'", "D2",
"F", "F'", "F2",
"D", "D'", "D2",
),
prune_tables=(
parent.lt_step41,
parent.lt_step42,
parent.lt_step43,
parent.lt_step44,
),
)
class LookupTable777Step51(LookupTable):
"""
lookup-table-7x7x7-step51.txt
=============================
0 steps has 22 entries (0 percent, 0.00x previous step)
1 steps has 288 entries (0 percent, 13.09x previous step)
2 steps has 1,328 entries (0 percent, 4.61x previous step)
3 steps has 6,846 entries (1 percent, 5.16x previous step)
4 steps has 32,296 entries (9 percent, 4.72x previous step)
5 steps has 99,008 entries (28 percent, 3.07x previous step)
6 steps has 148,952 entries (43 percent, 1.50x previous step)
7 steps has 51,980 entries (15 percent, 0.35x previous step)
8 steps has 2,272 entries (0 percent, 0.04x previous step)
9 steps has 8 entries (0 percent, 0.00x previous step)
Total: 343,000 entries
Average: 5.61 moves
"""
state_targets = (
'DDDDDDDDDDDDUUUUUUUUUUUU',
'DDDDUDUDUDDDUUUDUDUDUUUU',
'DDDDUDUDUDDDUUUUDUDUDUUU',
'DDDUDUDUDDDDUUUDUDUDUUUU',
'DDDUDUDUDDDDUUUUDUDUDUUU',
'DDDUUUUUUDDDUUUDDDDDDUUU',
'DDUDDDDDDDDUDUUUUUUUUDUU',
'DDUDDDDDDDDUUUDUUUUUUUUD',
'DDUDUDUDUDDUDUUDUDUDUDUU',
'DDUDUDUDUDDUDUUUDUDUDDUU',
'DDUDUDUDUDDUUUDDUDUDUUUD',
'DDUDUDUDUDDUUUDUDUDUDUUD',
'DDUUDUDUDDDUDUUDUDUDUDUU',
'DDUUDUDUDDDUDUUUDUDUDDUU',
'DDUUDUDUDDDUUUDDUDUDUUUD',
'DDUUDUDUDDDUUUDUDUDUDUUD',
'DDUUUUUUUDDUDUUDDDDDDDUU',
'DDUUUUUUUDDUUUDDDDDDDUUD',
'DUDDDDDDDDUDUDUUUUUUUUDU',
'DUDDUDUDUDUDUDUDUDUDUUDU',
'DUDDUDUDUDUDUDUUDUDUDUDU',
'DUDUDUDUDDUDUDUDUDUDUUDU',
'DUDUDUDUDDUDUDUUDUDUDUDU',
'DUDUUUUUUDUDUDUDDDDDDUDU',
'DUUDDDDDDDUUDDUUUUUUUDDU',
'DUUDDDDDDDUUUDDUUUUUUUDD',
'DUUDUDUDUDUUDDUDUDUDUDDU',
'DUUDUDUDUDUUDDUUDUDUDDDU',
'DUUDUDUDUDUUUDDDUDUDUUDD',
'DUUDUDUDUDUUUDDUDUDUDUDD',
'DUUUDUDUDDUUDDUDUDUDUDDU',
'DUUUDUDUDDUUDDUUDUDUDDDU',
'DUUUDUDUDDUUUDDDUDUDUUDD',
'DUUUDUDUDDUUUDDUDUDUDUDD',
'DUUUUUUUUDUUDDUDDDDDDDDU',
'DUUUUUUUUDUUUDDDDDDDDUDD',
'UDDDDDDDDUDDDUUUUUUUUDUU',
'UDDDDDDDDUDDUUDUUUUUUUUD',
'UDDDUDUDUUDDDUUDUDUDUDUU',
'UDDDUDUDUUDDDUUUDUDUDDUU',
'UDDDUDUDUUDDUUDDUDUDUUUD',
'UDDDUDUDUUDDUUDUDUDUDUUD',
'UDDUDUDUDUDDDUUDUDUDUDUU',
'UDDUDUDUDUDDDUUUDUDUDDUU',
'UDDUDUDUDUDDUUDDUDUDUUUD',
'UDDUDUDUDUDDUUDUDUDUDUUD',
'UDDUUUUUUUDDDUUDDDDDDDUU',
'UDDUUUUUUUDDUUDDDDDDDUUD',
'UDUDDDDDDUDUDUDUUUUUUDUD',
'UDUDUDUDUUDUDUDDUDUDUDUD',
'UDUDUDUDUUDUDUDUDUDUDDUD',
'UDUUDUDUDUDUDUDDUDUDUDUD',
'UDUUDUDUDUDUDUDUDUDUDDUD',
'UDUUUUUUUUDUDUDDDDDDDDUD',
'UUDDDDDDDUUDDDUUUUUUUDDU',
'UUDDDDDDDUUDUDDUUUUUUUDD',
'UUDDUDUDUUUDDDUDUDUDUDDU',
'UUDDUDUDUUUDDDUUDUDUDDDU',
'UUDDUDUDUUUDUDDDUDUDUUDD',
'UUDDUDUDUUUDUDDUDUDUDUDD',
'UUDUDUDUDUUDDDUDUDUDUDDU',
'UUDUDUDUDUUDDDUUDUDUDDDU',
'UUDUDUDUDUUDUDDDUDUDUUDD',
'UUDUDUDUDUUDUDDUDUDUDUDD',
'UUDUUUUUUUUDDDUDDDDDDDDU',
'UUDUUUUUUUUDUDDDDDDDDUDD',
'UUUDDDDDDUUUDDDUUUUUUDDD',
'UUUDUDUDUUUUDDDDUDUDUDDD',
'UUUDUDUDUUUUDDDUDUDUDDDD',
'UUUUDUDUDUUUDDDDUDUDUDDD',
'UUUUDUDUDUUUDDDUDUDUDDDD',
'UUUUUUUUUUUUDDDDDDDDDDDD'
)
UD_oblique_edges_and_outer_t_center = (
10, 11, 12, 16, 20, 23, 27, 30, 34, 38, 39, 40, # Upper
# 59, 60, 61, 65, 69, 72, 76, 79, 83, 87, 88, 89, # Left
# 108, 109, 110, 114, 118, 121, 125, 128, 132, 136, 137, 138, # Front
# 157, 158, 159, 163, 167, 170, 174, 177, 181, 185, 186, 187, # Right
# 206, 207, 208, 212, 216, 219, 223, 226, 230, 234, 235, 236, # Back
255, 256, 257, 261, 265, 268, 272, 275, 279, 283, 284, 285, # Down
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step51.txt',
self.state_targets,
linecount=343000,
max_depth=9,
filesize=21266000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Uw2", "3Dw2", "Uw2", "Dw2",
"F", "F'", "F2",
"D", "D'", "D2",
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.UD_oblique_edges_and_outer_t_center])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.UD_oblique_edges_and_outer_t_center, state):
cube[pos] = pos_state
class LookupTable777Step52(LookupTable):
"""
lookup-table-7x7x7-step52.txt
=============================
0 steps has 21 entries (0 percent, 0.00x previous step)
1 steps has 170 entries (0 percent, 8.10x previous step)
2 steps has 876 entries (0 percent, 5.15x previous step)
3 steps has 4,080 entries (1 percent, 4.66x previous step)
4 steps has 16,546 entries (4 percent, 4.06x previous step)
5 steps has 54,737 entries (15 percent, 3.31x previous step)
6 steps has 121,824 entries (35 percent, 2.23x previous step)
7 steps has 115,046 entries (33 percent, 0.94x previous step)
8 steps has 28,763 entries (8 percent, 0.25x previous step)
9 steps has 927 entries (0 percent, 0.03x previous step)
10 steps has 10 entries (0 percent, 0.01x previous step)
Total: 343,000 entries
Average: 6.21 moves
"""
state_targets = (
'DDUDDDUDDDUDDUUDUUUDUUUDUU',
'DDUDDDUDUDUDDUUDUDUDUUUDUU',
'DDUDDDUDUDUDDUUDUUUDUDUDUU',
'DDUDUDUDDDUDDUUDUDUDUUUDUU',
'DDUDUDUDDDUDDUUDUUUDUDUDUU',
'DDUDUDUDUDUDDUUDUDUDUDUDUU',
'DDUUDDUUDDUUUDDDUUDDUUDDUU',
'DDUUDDUUDDUUUUUDDUUDDUUDDD',
'DDUUDDUUUDUUUDDDUDDDUUDDUU',
'DDUUDDUUUDUUUDDDUUDDUDDDUU',
'DDUUDDUUUDUUUUUDDDUDDUUDDD',
'DDUUDDUUUDUUUUUDDUUDDDUDDD',
'DDUUUDUUDDUUUDDDUDDDUUDDUU',
'DDUUUDUUDDUUUDDDUUDDUDDDUU',
'DDUUUDUUDDUUUUUDDDUDDUUDDD',
'DDUUUDUUDDUUUUUDDUUDDDUDDD',
'DDUUUDUUUDUUUDDDUDDDUDDDUU',
'DDUUUDUUUDUUUUUDDDUDDDUDDD',
'UUUDDUUDDUUDDDDDUUDDUUDDUU',
'UUUDDUUDDUUDDUUDDUUDDUUDDD',
'UUUDDUUDUUUDDDDDUDDDUUDDUU',
'UUUDDUUDUUUDDDDDUUDDUDDDUU',
'UUUDDUUDUUUDDUUDDDUDDUUDDD',
'UUUDDUUDUUUDDUUDDUUDDDUDDD',
'UUUDUUUDDUUDDDDDUDDDUUDDUU',
'UUUDUUUDDUUDDDDDUUDDUDDDUU',
'UUUDUUUDDUUDDUUDDDUDDUUDDD',
'UUUDUUUDDUUDDUUDDUUDDDUDDD',
'UUUDUUUDUUUDDDDDUDDDUDDDUU',
'UUUDUUUDUUUDDUUDDDUDDDUDDD',
'UUUUDUUUDUUUUDDDDUDDDUDDDD',
'UUUUDUUUUUUUUDDDDDDDDUDDDD',
'UUUUDUUUUUUUUDDDDUDDDDDDDD',
'UUUUUUUUDUUUUDDDDDDDDUDDDD',
'UUUUUUUUDUUUUDDDDUDDDDDDDD',
'UUUUUUUUUUUUUDDDDDDDDDDDDD'
)
UD_inside_centers_and_left_oblique_edges = (
10, 17, 18, 19, 20, 24, 25, 26, 30, 31, 32, 33, 40, # Upper
# 59, 66, 67, 68, 69, 73, 74, 75, 79, 80, 81, 82, 89, # Left
# 108, 115, 116, 117, 118, 122, 123, 124, 128, 129, 130, 131, 138, # Front
# 157, 164, 165, 166, 167, 171, 172, 173, 177, 178, 179, 180, 187, # Right
# 206, 213, 214, 215, 216, 220, 221, 222, 226, 227, 228, 229, 236, # Back
255, 262, 263, 264, 265, 269, 270, 271, 275, 276, 277, 278, 285, # Down
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step52.txt',
self.state_targets,
linecount=343000,
max_depth=10,
filesize=23667000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Uw2", "3Dw2", "Uw2", "Dw2",
"F", "F'", "F2",
"D", "D'", "D2",
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.UD_inside_centers_and_left_oblique_edges])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.UD_inside_centers_and_left_oblique_edges, state):
cube[pos] = pos_state
class LookupTable777Step53(LookupTable):
"""
lookup-table-7x7x7-step53.txt
=============================
0 steps has 21 entries (0 percent, 0.00x previous step)
1 steps has 194 entries (0 percent, 9.24x previous step)
2 steps has 960 entries (0 percent, 4.95x previous step)
3 steps has 4,061 entries (1 percent, 4.23x previous step)
4 steps has 16,207 entries (4 percent, 3.99x previous step)
5 steps has 54,813 entries (15 percent, 3.38x previous step)
6 steps has 122,554 entries (35 percent, 2.24x previous step)
7 steps has 116,234 entries (33 percent, 0.95x previous step)
8 steps has 27,300 entries (7 percent, 0.23x previous step)
9 steps has 654 entries (0 percent, 0.02x previous step)
10 steps has 2 entries (0 percent, 0.00x previous step)
Total: 343,000 entries
Average: 6.20 moves
"""
state_targets = (
'UDUDDDUDDDUDUDUDUUUDUUUDUD',
'UDUDDDUDUDUDUDUDUDUDUUUDUD',
'UDUDDDUDUDUDUDUDUUUDUDUDUD',
'UDUDUDUDDDUDUDUDUDUDUUUDUD',
'UDUDUDUDDDUDUDUDUUUDUDUDUD',
'UDUDUDUDUDUDUDUDUDUDUDUDUD',
'UDUUDDUUDDUUUDDDUUDDUUDDUD',
'UDUUDDUUDDUUUDUDDUUDDUUDDD',
'UDUUDDUUUDUUUDDDUDDDUUDDUD',
'UDUUDDUUUDUUUDDDUUDDUDDDUD',
'UDUUDDUUUDUUUDUDDDUDDUUDDD',
'UDUUDDUUUDUUUDUDDUUDDDUDDD',
'UDUUUDUUDDUUUDDDUDDDUUDDUD',
'UDUUUDUUDDUUUDDDUUDDUDDDUD',
'UDUUUDUUDDUUUDUDDDUDDUUDDD',
'UDUUUDUUDDUUUDUDDUUDDDUDDD',
'UDUUUDUUUDUUUDDDUDDDUDDDUD',
'UDUUUDUUUDUUUDUDDDUDDDUDDD',
'UUUDDUUDDUUDUDDDUUDDUUDDUD',
'UUUDDUUDDUUDUDUDDUUDDUUDDD',
'UUUDDUUDUUUDUDDDUDDDUUDDUD',
'UUUDDUUDUUUDUDDDUUDDUDDDUD',
'UUUDDUUDUUUDUDUDDDUDDUUDDD',
'UUUDDUUDUUUDUDUDDUUDDDUDDD',
'UUUDUUUDDUUDUDDDUDDDUUDDUD',
'UUUDUUUDDUUDUDDDUUDDUDDDUD',
'UUUDUUUDDUUDUDUDDDUDDUUDDD',
'UUUDUUUDDUUDUDUDDUUDDDUDDD',
'UUUDUUUDUUUDUDDDUDDDUDDDUD',
'UUUDUUUDUUUDUDUDDDUDDDUDDD',
'UUUUDUUUDUUUUDDDDUDDDUDDDD',
'UUUUDUUUUUUUUDDDDDDDDUDDDD',
'UUUUDUUUUUUUUDDDDUDDDDDDDD',
'UUUUUUUUDUUUUDDDDDDDDUDDDD',
'UUUUUUUUDUUUUDDDDUDDDDDDDD',
'UUUUUUUUUUUUUDDDDDDDDDDDDD'
)
UD_inside_centers_and_outer_t_centers = (
11, 17, 18, 19, 23, 24, 25, 26, 27, 31, 32, 33, 39, # Upper
# 60, 66, 67, 68, 72, 73, 74, 75, 76, 80, 81, 82, 88, # Left
# 109, 115, 116, 117, 121, 122, 123, 124, 125, 129, 130, 131, 137, # Front
# 158, 164, 165, 166, 170, 171, 172, 173, 174, 178, 179, 180, 186, # Right
# 207, 213, 214, 215, 219, 220, 221, 222, 223, 227, 228, 229, 235, # Back
256, 262, 263, 264, 268, 269, 270, 271, 272, 276, 277, 278, 284, # Down
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step53.txt',
self.state_targets,
linecount=343000,
max_depth=10,
filesize=23667000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Uw2", "3Dw2", "Uw2", "Dw2",
"F", "F'", "F2",
"D", "D'", "D2",
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.UD_inside_centers_and_outer_t_centers])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.UD_inside_centers_and_outer_t_centers, state):
cube[pos] = pos_state
class LookupTable777Step54(LookupTable):
"""
lookup-table-7x7x7-step54.txt
=============================
0 steps has 20 entries (0 percent, 0.00x previous step)
1 steps has 171 entries (0 percent, 8.55x previous step)
2 steps has 876 entries (0 percent, 5.12x previous step)
3 steps has 4,080 entries (1 percent, 4.66x previous step)
4 steps has 16,546 entries (4 percent, 4.06x previous step)
5 steps has 54,737 entries (15 percent, 3.31x previous step)
6 steps has 121,824 entries (35 percent, 2.23x previous step)
7 steps has 115,046 entries (33 percent, 0.94x previous step)
8 steps has 28,763 entries (8 percent, 0.25x previous step)
9 steps has 927 entries (0 percent, 0.03x previous step)
10 steps has 10 entries (0 percent, 0.01x previous step)
Total: 343,000 entries
Average: 6.21 moves
"""
state_targets = (
'DDDUDDUDDUDDDUUUDUUDUUDUUU',
'DDDUDDUDDUDUDUDUDUUDUUDUUU',
'DDDUDDUDDUDUDUUUDUUDUUDUDU',
'DDUUDUUDUUDDUDUUDDUDDUDDUU',
'DDUUDUUDUUDDUUUDDUDDUDDUUD',
'DDUUDUUDUUDUUDDUDDUDDUDDUU',
'DDUUDUUDUUDUUDUUDDUDDUDDDU',
'DDUUDUUDUUDUUUDDDUDDUDDUUD',
'DDUUDUUDUUDUUUUDDUDDUDDUDD',
'DUDUDDUDDUDDDUDUDUUDUUDUUU',
'DUDUDDUDDUDDDUUUDUUDUUDUDU',
'DUDUDDUDDUDUDUDUDUUDUUDUDU',
'DUUUDUUDUUDDUDDUDDUDDUDDUU',
'DUUUDUUDUUDDUDUUDDUDDUDDDU',
'DUUUDUUDUUDDUUDDDUDDUDDUUD',
'DUUUDUUDUUDDUUUDDUDDUDDUDD',
'DUUUDUUDUUDUUDDUDDUDDUDDDU',
'DUUUDUUDUUDUUUDDDUDDUDDUDD',
'UDDUUDUUDUUDDDUUDDUDDUDDUU',
'UDDUUDUUDUUDDUUDDUDDUDDUUD',
'UDDUUDUUDUUUDDDUDDUDDUDDUU',
'UDDUUDUUDUUUDDUUDDUDDUDDDU',
'UDDUUDUUDUUUDUDDDUDDUDDUUD',
'UDDUUDUUDUUUDUUDDUDDUDDUDD',
'UDUUUUUUUUUDUDUDDDDDDDDDUD',
'UDUUUUUUUUUUUDDDDDDDDDDDUD',
'UDUUUUUUUUUUUDUDDDDDDDDDDD',
'UUDUUDUUDUUDDDDUDDUDDUDDUU',
'UUDUUDUUDUUDDDUUDDUDDUDDDU',
'UUDUUDUUDUUDDUDDDUDDUDDUUD',
'UUDUUDUUDUUDDUUDDUDDUDDUDD',
'UUDUUDUUDUUUDDDUDDUDDUDDDU',
'UUDUUDUUDUUUDUDDDUDDUDDUDD',
'UUUUUUUUUUUDUDDDDDDDDDDDUD',
'UUUUUUUUUUUDUDUDDDDDDDDDDD',
'UUUUUUUUUUUUUDDDDDDDDDDDDD'
)
UD_inside_centers_and_right_oblique_edges = [
12, 16, 17, 18, 19, 24, 25, 26, 31, 32, 33, 34, 38, # Upper
# 61, 65, 66, 67, 68, 73, 74, 75, 80, 81, 82, 83, 87, # Left
# 110, 114, 115, 116, 117, 122, 123, 124, 129, 130, 131, 132, 136, # Front
# 159, 163, 164, 165, 166, 171, 172, 173, 178, 179, 180, 181, 185, # Right
# 208, 212, 213, 214, 215, 220, 221, 222, 227, 228, 229, 230, 234, # Back
257, 261, 262, 263, 264, 269, 270, 271, 276, 277, 278, 279, 283, # Down
]
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step54.txt',
self.state_targets,
linecount=343000,
max_depth=10,
filesize=24010000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Uw2", "3Dw2", "Uw2", "Dw2",
"F", "F'", "F2",
"D", "D'", "D2",
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.UD_inside_centers_and_right_oblique_edges])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.UD_inside_centers_and_right_oblique_edges, state):
cube[pos] = pos_state
class LookupTable777Step55(LookupTable):
"""
lookup-table-7x7x7-step55.txt
=============================
0 steps has 2 entries (2 percent, 0.00x previous step)
1 steps has 8 entries (11 percent, 4.00x previous step)
2 steps has 20 entries (27 percent, 2.50x previous step)
3 steps has 24 entries (33 percent, 1.20x previous step)
4 steps has 18 entries (25 percent, 0.75x previous step)
Total: 72 entries
Average: 2.67 moves
"""
LR_centers_minus_outside_x_centers_777 = (
59, 60, 61, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 79, 80, 81, 82, 83, 87, 88, 89, # Left
157, 158, 159, 163, 164, 165, 166, 167, 170, 171, 172, 173, 174, 177, 178, 179, 180, 181, 185, 186, 187, # Right
)
state_targets = (
"LLLLLLLLLLLLLLLLLLLLLRRRRRRRRRRRRRRRRRRRRR",
"RRRRLLLRRLLLRRLLLRRRRLLLLRRRLLRRRLLRRRLLLL",
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step55.txt',
self.state_targets,
linecount=72,
max_depth=4,
filesize=4392,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Uw2", "3Dw2", "Uw2", "Dw2",
"F", "F'", "F2",
"D", "D'", "D2",
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.LR_centers_minus_outside_x_centers_777])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.LR_centers_minus_outside_x_centers_777, state):
cube[pos] = pos_state
class LookupTableIDA777Step50(LookupTableIDAViaGraph):
def __init__(self, parent):
LookupTableIDAViaGraph.__init__(
self,
parent,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Uw2", "3Dw2", "Uw2", "Dw2",
"F", "F'", "F2",
"D", "D'", "D2",
),
prune_tables=(
parent.lt_step51,
parent.lt_step52,
parent.lt_step53,
parent.lt_step54,
parent.lt_step55,
),
)
class LookupTable777Step61(LookupTable):
"""
lookup-table-7x7x7-step61.txt
=============================
0 steps has 2 entries (2 percent, 0.00x previous step)
1 steps has 8 entries (11 percent, 4.00x previous step)
2 steps has 20 entries (27 percent, 2.50x previous step)
3 steps has 24 entries (33 percent, 1.20x previous step)
4 steps has 18 entries (25 percent, 0.75x previous step)
Total: 72 entries
Average: 2.67 moves
"""
UD_centers_minus_outside_x_centers_777 = (
10, 11, 12, 16, 17, 18, 19, 20, 23, 24, 25, 26, 27, 30, 31, 32, 33, 34, 38, 39, 40, # Upper
255, 256, 257, 261, 262, 263, 264, 265, 268, 269, 270, 271, 272, 275, 276, 277, 278, 279, 283, 284, 285, # Down
)
state_targets = (
"UUUUUUUUUUUUUUUUUUUUUDDDDDDDDDDDDDDDDDDDDD",
"DDDDUUUDDUUUDDUUUDDDDUUUUDDDUUDDDUUDDDUUUU",
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step61.txt',
self.state_targets,
linecount=72,
max_depth=4,
filesize=4392,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Fw2", "3Bw2", "Fw2", "Bw2",
"U", "U'",
"D", "D'"
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.UD_centers_minus_outside_x_centers_777])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.UD_centers_minus_outside_x_centers_777, state):
cube[pos] = pos_state
class LookupTable777Step62(LookupTable):
"""
lookup-table-7x7x7-step62.txt
=============================
0 steps has 2 entries (2 percent, 0.00x previous step)
1 steps has 8 entries (11 percent, 4.00x previous step)
2 steps has 20 entries (27 percent, 2.50x previous step)
3 steps has 24 entries (33 percent, 1.20x previous step)
4 steps has 18 entries (25 percent, 0.75x previous step)
Total: 72 entries
Average: 2.67 moves
"""
LR_centers_minus_outside_x_centers_777 = (
59, 60, 61, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 79, 80, 81, 82, 83, 87, 88, 89, # Left
157, 158, 159, 163, 164, 165, 166, 167, 170, 171, 172, 173, 174, 177, 178, 179, 180, 181, 185, 186, 187, # Right
)
state_targets = (
"LLLLLLLLLLLLLLLLLLLLLRRRRRRRRRRRRRRRRRRRRR",
"RRRRLLLRRLLLRRLLLRRRRLLLLRRRLLRRRLLRRRLLLL",
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step62.txt',
self.state_targets,
linecount=72,
max_depth=4,
filesize=4392,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Fw2", "3Bw2", "Fw2", "Bw2",
"U", "U'",
"D", "D'"
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.LR_centers_minus_outside_x_centers_777])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.LR_centers_minus_outside_x_centers_777, state):
cube[pos] = pos_state
class LookupTable777Step65(LookupTable):
"""
lookup-table-7x7x7-step65.txt
=============================
0 steps has 2 entries (0 percent, 0.00x previous step)
1 steps has 16 entries (0 percent, 8.00x previous step)
2 steps has 106 entries (0 percent, 6.62x previous step)
3 steps has 538 entries (0 percent, 5.08x previous step)
4 steps has 2,308 entries (0 percent, 4.29x previous step)
5 steps has 9,244 entries (2 percent, 4.01x previous step)
6 steps has 31,742 entries (9 percent, 3.43x previous step)
7 steps has 84,464 entries (24 percent, 2.66x previous step)
8 steps has 128,270 entries (37 percent, 1.52x previous step)
9 steps has 75,830 entries (22 percent, 0.59x previous step)
10 steps has 10,480 entries (3 percent, 0.14x previous step)
Total: 343,000 entries
Average: 7.73 moves
"""
FB_inside_centers_and_outer_t_centers = (
# 11, 17, 18, 19, 23, 24, 25, 26, 27, 31, 32, 33, 39, # Upper
# 60, 66, 67, 68, 72, 73, 74, 75, 76, 80, 81, 82, 88, # Left
109, 115, 116, 117, 121, 122, 123, 124, 125, 129, 130, 131, 137, # Front
# 158, 164, 165, 166, 170, 171, 172, 173, 174, 178, 179, 180, 186, # Right
207, 213, 214, 215, 219, 220, 221, 222, 223, 227, 228, 229, 235, # Back
# 256, 262, 263, 264, 268, 269, 270, 271, 272, 276, 277, 278, 284, # Down
)
state_targets = (
"FFFFFFFFFFFFFBBBBBBBBBBBBB",
"BFFFBFFFBFFFBFBBBFBBBFBBBF",
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step65.txt',
self.state_targets,
linecount=343000,
max_depth=10,
filesize=25039000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Fw2", "3Bw2", "Fw2", "Bw2",
"U", "U'",
"D", "D'"
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.FB_inside_centers_and_outer_t_centers])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.FB_inside_centers_and_outer_t_centers, state):
cube[pos] = pos_state
class LookupTable777Step66(LookupTable):
"""
lookup-table-7x7x7-step66.txt
=============================
0 steps has 2 entries (0 percent, 0.00x previous step)
1 steps has 16 entries (0 percent, 8.00x previous step)
2 steps has 82 entries (0 percent, 5.12x previous step)
3 steps has 450 entries (0 percent, 5.49x previous step)
4 steps has 2,406 entries (0 percent, 5.35x previous step)
5 steps has 11,960 entries (3 percent, 4.97x previous step)
6 steps has 43,430 entries (12 percent, 3.63x previous step)
7 steps has 108,510 entries (31 percent, 2.50x previous step)
8 steps has 133,124 entries (38 percent, 1.23x previous step)
9 steps has 40,908 entries (11 percent, 0.31x previous step)
10 steps has 2,112 entries (0 percent, 0.05x previous step)
Total: 343,000 entries
Average: 7.42 moves
"""
FB_oblique_edges_and_outer_t_center = (
# 10, 11, 12, 16, 20, 23, 27, 30, 34, 38, 39, 40, # Upper
# 59, 60, 61, 65, 69, 72, 76, 79, 83, 87, 88, 89, # Left
108, 109, 110, 114, 118, 121, 125, 128, 132, 136, 137, 138, # Front
# 157, 158, 159, 163, 167, 170, 174, 177, 181, 185, 186, 187, # Right
206, 207, 208, 212, 216, 219, 223, 226, 230, 234, 235, 236, # Back
# 255, 256, 257, 261, 265, 268, 272, 275, 279, 283, 284, 285, # Down
)
state_targets = (
"BBBBBBBBBBBBFFFFFFFFFFFF",
"FFFFFFFFFFFFBBBBBBBBBBBB",
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step66.txt',
self.state_targets,
linecount=343000,
max_depth=10,
filesize=23667000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Fw2", "3Bw2", "Fw2", "Bw2",
"U", "U'",
"D", "D'"
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.FB_oblique_edges_and_outer_t_center])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.FB_oblique_edges_and_outer_t_center, state):
cube[pos] = pos_state
class LookupTableIDA777Step60(LookupTableIDAViaGraph):
def __init__(self, parent):
LookupTableIDAViaGraph.__init__(
self,
parent,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Fw2", "3Bw2", "Fw2", "Bw2",
"U", "U'",
"D", "D'"
),
prune_tables=(
parent.lt_step61,
parent.lt_step62,
parent.lt_step65,
parent.lt_step66,
),
)
class LookupTable777Step71(LookupTable):
"""
lookup-table-7x7x7-step71.txt
=============================
0 steps has 1 entries (2 percent, 0.00x previous step)
1 steps has 4 entries (11 percent, 4.00x previous step)
2 steps has 10 entries (27 percent, 2.50x previous step)
3 steps has 12 entries (33 percent, 1.20x previous step)
4 steps has 9 entries (25 percent, 0.75x previous step)
Total: 36 entries
Average: 2.67 moves
"""
UD_centers_minus_outside_x_centers_777 = (
10, 11, 12, 16, 17, 18, 19, 20, 23, 24, 25, 26, 27, 30, 31, 32, 33, 34, 38, 39, 40, # Upper
255, 256, 257, 261, 262, 263, 264, 265, 268, 269, 270, 271, 272, 275, 276, 277, 278, 279, 283, 284, 285, # Down
)
state_targets = (
"UUUUUUUUUUUUUUUUUUUUUDDDDDDDDDDDDDDDDDDDDD",
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step71.txt',
self.state_targets,
linecount=36,
max_depth=4,
filesize=2196,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Fw2", "3Bw2", "Fw2", "Bw2",
"U", "U'",
"D", "D'"
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.UD_centers_minus_outside_x_centers_777])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.UD_centers_minus_outside_x_centers_777, state):
cube[pos] = pos_state
class LookupTable777Step72(LookupTable):
"""
lookup-table-7x7x7-step72.txt
=============================
0 steps has 1 entries (2 percent, 0.00x previous step)
1 steps has 4 entries (11 percent, 4.00x previous step)
2 steps has 10 entries (27 percent, 2.50x previous step)
3 steps has 12 entries (33 percent, 1.20x previous step)
4 steps has 9 entries (25 percent, 0.75x previous step)
Total: 36 entries
Average: 2.67 moves
"""
LR_centers_minus_outside_x_centers_777 = (
59, 60, 61, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 79, 80, 81, 82, 83, 87, 88, 89, # Left
157, 158, 159, 163, 164, 165, 166, 167, 170, 171, 172, 173, 174, 177, 178, 179, 180, 181, 185, 186, 187, # Right
)
state_targets = (
"LLLLLLLLLLLLLLLLLLLLLRRRRRRRRRRRRRRRRRRRRR",
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step72.txt',
self.state_targets,
linecount=36,
max_depth=4,
filesize=2196,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Fw2", "3Bw2", "Fw2", "Bw2",
"U", "U'",
"D", "D'"
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.LR_centers_minus_outside_x_centers_777])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.LR_centers_minus_outside_x_centers_777, state):
cube[pos] = pos_state
class LookupTable777Step75(LookupTable):
"""
lookup-table-7x7x7-step75.txt
=============================
0 steps has 1 entries (0 percent, 0.00x previous step)
1 steps has 8 entries (0 percent, 8.00x previous step)
2 steps has 56 entries (0 percent, 7.00x previous step)
3 steps has 300 entries (0 percent, 5.36x previous step)
4 steps has 1,317 entries (0 percent, 4.39x previous step)
5 steps has 5,382 entries (1 percent, 4.09x previous step)
6 steps has 19,083 entries (5 percent, 3.55x previous step)
7 steps has 55,022 entries (16 percent, 2.88x previous step)
8 steps has 104,894 entries (30 percent, 1.91x previous step)
9 steps has 106,324 entries (30 percent, 1.01x previous step)
10 steps has 44,533 entries (12 percent, 0.42x previous step)
11 steps has 5,880 entries (1 percent, 0.13x previous step)
12 steps has 200 entries (0 percent, 0.03x previous step)
Total: 343,000 entries
Average: 8.28 moves
"""
FB_inside_centers_and_outer_t_centers = (
# 11, 17, 18, 19, 23, 24, 25, 26, 27, 31, 32, 33, 39, # Upper
# 60, 66, 67, 68, 72, 73, 74, 75, 76, 80, 81, 82, 88, # Left
109, 115, 116, 117, 121, 122, 123, 124, 125, 129, 130, 131, 137, # Front
# 158, 164, 165, 166, 170, 171, 172, 173, 174, 178, 179, 180, 186, # Right
207, 213, 214, 215, 219, 220, 221, 222, 223, 227, 228, 229, 235, # Back
# 256, 262, 263, 264, 268, 269, 270, 271, 272, 276, 277, 278, 284, # Down
)
state_targets = (
"FFFFFFFFFFFFFBBBBBBBBBBBBB",
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step75.txt',
self.state_targets,
linecount=343000,
max_depth=12,
filesize=27097000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Fw2", "3Bw2", "Fw2", "Bw2",
"U", "U'",
"D", "D'"
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.FB_inside_centers_and_outer_t_centers])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.FB_inside_centers_and_outer_t_centers, state):
cube[pos] = pos_state
class LookupTable777Step76(LookupTable):
"""
lookup-table-7x7x7-step76.txt
=============================
0 steps has 1 entries (0 percent, 0.00x previous step)
1 steps has 8 entries (0 percent, 8.00x previous step)
2 steps has 48 entries (0 percent, 6.00x previous step)
3 steps has 276 entries (0 percent, 5.75x previous step)
4 steps has 1,572 entries (0 percent, 5.70x previous step)
5 steps has 8,134 entries (2 percent, 5.17x previous step)
6 steps has 33,187 entries (9 percent, 4.08x previous step)
7 steps has 94,826 entries (27 percent, 2.86x previous step)
8 steps has 141,440 entries (41 percent, 1.49x previous step)
9 steps has 59,620 entries (17 percent, 0.42x previous step)
10 steps has 3,808 entries (1 percent, 0.06x previous step)
11 steps has 80 entries (0 percent, 0.02x previous step)
Total: 343,000 entries
Average: 7.63 moves
"""
FB_oblique_edges_and_outer_t_center = (
# 10, 11, 12, 16, 20, 23, 27, 30, 34, 38, 39, 40, # Upper
# 59, 60, 61, 65, 69, 72, 76, 79, 83, 87, 88, 89, # Left
108, 109, 110, 114, 118, 121, 125, 128, 132, 136, 137, 138, # Front
# 157, 158, 159, 163, 167, 170, 174, 177, 181, 185, 186, 187, # Right
206, 207, 208, 212, 216, 219, 223, 226, 230, 234, 235, 236, # Back
# 255, 256, 257, 261, 265, 268, 272, 275, 279, 283, 284, 285, # Down
)
state_targets = (
"FFFFFFFFFFFFBBBBBBBBBBBB",
)
def __init__(self, parent):
LookupTable.__init__(
self,
parent,
'lookup-table-7x7x7-step76.txt',
self.state_targets,
linecount=343000,
max_depth=11,
filesize=24353000,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Fw2", "3Bw2", "Fw2", "Bw2",
"U", "U'",
"D", "D'"
),
use_state_index=True,
)
def state(self):
parent_state = self.parent.state
return "".join([parent_state[x] for x in self.FB_oblique_edges_and_outer_t_center])
def populate_cube_from_state(self, state, cube, steps_to_solve):
state = list(state)
for (pos, pos_state) in zip(self.FB_oblique_edges_and_outer_t_center, state):
cube[pos] = pos_state
class LookupTableIDA777Step70(LookupTableIDAViaGraph):
def __init__(self, parent):
LookupTableIDAViaGraph.__init__(
self,
parent,
all_moves=moves_777,
illegal_moves=(
"3Uw", "3Uw'", "Uw", "Uw'",
"3Lw", "3Lw'", "Lw", "Lw'",
"3Fw", "3Fw'", "Fw", "Fw'",
"3Rw", "3Rw'", "Rw", "Rw'",
"3Bw", "3Bw'", "Bw", "Bw'",
"3Dw", "3Dw'", "Dw", "Dw'",
"L", "L'",
"R", "R'",
"3Fw2", "3Bw2", "Fw2", "Bw2",
"U", "U'",
"D", "D'"
),
prune_tables=(
parent.lt_step71,
parent.lt_step72,
parent.lt_step75,
parent.lt_step76,
),
multiplier=1.2,
)
class RubiksCube777(RubiksCubeNNNOddEdges):
"""
For 7x7x7 centers
- stage the UD inside 9 centers via 5x5x5
- UD oblique edges
- pair the two outside oblique edges via 6x6x6
- build a lookup table to pair the middle oblique edges with the two
outside oblique edges. The restriction being that if you do a 3Lw move
you must also do a 3Rw' in order to keep the two outside oblique edges
paired up...so it is a slice of the layer in the middle. This table
should be (24!/(8!*16!))^2 or 540,917,591,800 so use IDA.
- stage the rest of the UD centers via 5x5x5
- stage the LR inside 9 centers via 5x5x5
- LR oblique edges...use the same strategy as UD oblique edges
- stage the rest of the LR centers via 5x5x5
- solve the UD centers...this is (8!/(4!*4!))^6 or 117 billion so use IDA
- solve the LR centers
- solve the LR and FB centers
For 7x7x7 edges
- pair the middle 3 wings for each side via 5x5x5
- pair the outer 2 wings with the paired middle 3 wings via 5x5x5
Inheritance model
-----------------
RubiksCube
|
RubiksCubeNNNOddEdges
/ \
RubiksCubeNNNOdd RubiksCube777
"""
instantiated = False
def __init__(self, state, order, colormap=None, debug=False):
RubiksCubeNNNOddEdges.__init__(self, state, order, colormap, debug)
if RubiksCube777.instantiated:
# raise Exception("Another 7x7x7 instance is being created")
log.warning("Another 7x7x7 instance is being created")
else:
RubiksCube777.instantiated = True
def phase(self):
if self._phase is None:
self._phase = "Stage UD centers"
return self._phase
if self._phase == "Stage UD centers":
if self.UD_centers_staged():
self._phase = "Stage LR centers"
return self._phase
if self._phase == "Stage LR centers":
if self.LR_centers_staged():
self._phase = "Solve Centers"
if self._phase == "Solve Centers":
if self.centers_solved():
self._phase = "Pair Edges"
if self._phase == "Pair Edges":
if not self.get_non_paired_edges():
self._phase = "Solve 3x3x3"
return self._phase
def sanity_check(self):
edge_orbit_0 = (
2, 6, 14, 42, 48, 44, 36, 8,
51, 55, 63, 91, 97, 93, 85, 57,
100, 104, 112, 140, 146, 142, 134, 106,
149, 153, 161, 189, 195, 191, 183, 155,
198, 202, 210, 238, 244, 240, 232, 204,
247, 251, 259, 287, 293, 289, 281, 253,
)
edge_orbit_1 = (
3, 5, 21, 35, 47, 45, 29, 15,
52, 54, 70, 84, 96, 94, 78, 64,
101, 103, 119, 133, 145, 143, 127, 113,
150, 152, 168, 182, 194, 192, 176, 162,
199, 201, 217, 231, 243, 241, 225, 211,
248, 250, 266, 280, 292, 290, 274, 260,
)
edge_orbit_2 = (
4, 28, 46, 22,
53, 77, 95, 71,
102, 126, 144, 120,
151, 175, 193, 169,
200, 224, 242, 218,
249, 273, 291, 267,
)
corners = (
1, 7, 43, 49,
50, 56, 92, 98,
99, 105, 141, 147,
148, 154, 190, 196,
197, 203, 239, 245,
246, 252, 288, 294,
)
left_oblique_edge = (
10, 20, 40, 30,
59, 69, 89, 79,
108, 118, 138, 128,
157, 167, 187, 177,
206, 216, 236, 226,
255, 265, 285, 275,
)
right_oblique_edge = (
12, 34, 38, 16,
61, 83, 87, 65,
110, 132, 136, 114,
159, 181, 185, 163,
208, 230, 234, 212,
257, 279, 283, 261,
)
outside_x_centers = (
9, 13, 37, 41,
58, 62, 86, 90,
107, 111, 135, 139,
156, 160, 184, 188,
205, 209, 233, 237,
254, 258, 282, 286,
)
inside_x_centers = (
17, 19, 31, 33,
66, 68, 80, 82,
115, 117, 129, 131,
164, 166, 178, 180,
213, 215, 227, 229,
262, 264, 276, 278,
)
outside_t_centers = (
11, 23, 27, 39,
60, 72, 76, 88,
109, 121, 125, 137,
158, 170, 174, 186,
207, 219, 223, 235,
256, 268, 272, 284,
)
inside_t_centers = (
18, 24, 26, 32,
67, 73, 75, 81,
116, 122, 124, 130,
165, 171, 173, 179,
214, 220, 222, 228,
263, 269, 271, 277,
)
centers = (25, 74, 123, 172, 221, 270)
self._sanity_check("edge-orbit-0", edge_orbit_0, 8)
self._sanity_check("edge-orbit-1", edge_orbit_1, 8)
self._sanity_check("edge-orbit-2", edge_orbit_2, 4)
self._sanity_check("corners", corners, 4)
self._sanity_check("left-oblique", left_oblique_edge, 4)
self._sanity_check("right-oblique", right_oblique_edge, 4)
self._sanity_check("outside x-centers", outside_x_centers, 4)
self._sanity_check("inside x-centers", inside_x_centers, 4)
self._sanity_check("outside t-centers", outside_t_centers, 4)
self._sanity_check("inside t-centers", inside_t_centers, 4)
self._sanity_check("centers", centers, 1)
def lt_init(self):
if self.lt_init_called:
return
self.lt_init_called = True
self.lt_LR_oblique_edge_pairing = LookupTableIDA777LRObliqueEdgePairing(self)
self.lt_UD_oblique_edge_pairing = LookupTableIDA777UDObliqueEdgePairing(self)
self.lt_step41 = LookupTable777Step41(self)
self.lt_step42 = LookupTable777Step42(self)
self.lt_step43 = LookupTable777Step43(self)
self.lt_step44 = LookupTable777Step44(self)
self.lt_step40 = LookupTableIDA777Step40(self)
self.lt_step51 = LookupTable777Step51(self)
self.lt_step52 = LookupTable777Step52(self)
self.lt_step53 = LookupTable777Step53(self)
self.lt_step54 = LookupTable777Step54(self)
self.lt_step55 = LookupTable777Step55(self)
self.lt_step50 = LookupTableIDA777Step50(self)
self.lt_step61 = LookupTable777Step61(self)
self.lt_step62 = LookupTable777Step62(self)
self.lt_step65 = LookupTable777Step65(self)
self.lt_step66 = LookupTable777Step66(self)
self.lt_step60 = LookupTableIDA777Step60(self)
self.lt_step71 = LookupTable777Step71(self)
self.lt_step72 = LookupTable777Step72(self)
self.lt_step75 = LookupTable777Step75(self)
self.lt_step76 = LookupTable777Step76(self)
self.lt_step70 = LookupTableIDA777Step70(self)
def create_fake_555_from_inside_centers(self):
# Create a fake 5x5x5 to stage the UD inner 5x5x5 centers
fake_555 = self.get_fake_555()
fake_555.nuke_corners()
fake_555.nuke_edges()
fake_555.nuke_centers()
for side_index in range(6):
offset_555 = side_index * 25
offset_777 = side_index * 49
# centers
fake_555.state[7 + offset_555] = self.state[17 + offset_777]
fake_555.state[8 + offset_555] = self.state[18 + offset_777]
fake_555.state[9 + offset_555] = self.state[19 + offset_777]
fake_555.state[12 + offset_555] = self.state[24 + offset_777]
fake_555.state[13 + offset_555] = self.state[25 + offset_777]
fake_555.state[14 + offset_555] = self.state[26 + offset_777]
fake_555.state[17 + offset_555] = self.state[31 + offset_777]
fake_555.state[18 + offset_555] = self.state[32 + offset_777]
fake_555.state[19 + offset_555] = self.state[33 + offset_777]
# edges
fake_555.state[2 + offset_555] = self.state[3 + offset_777]
fake_555.state[3 + offset_555] = self.state[4 + offset_777]
fake_555.state[4 + offset_555] = self.state[5 + offset_777]
fake_555.state[6 + offset_555] = self.state[15 + offset_777]
fake_555.state[11 + offset_555] = self.state[22 + offset_777]
fake_555.state[16 + offset_555] = self.state[29 + offset_777]
fake_555.state[10 + offset_555] = self.state[21 + offset_777]
fake_555.state[15 + offset_555] = self.state[28 + offset_777]
fake_555.state[20 + offset_555] = self.state[35 + offset_777]
fake_555.state[22 + offset_555] = self.state[45 + offset_777]
fake_555.state[23 + offset_555] = self.state[46 + offset_777]
fake_555.state[24 + offset_555] = self.state[47 + offset_777]
def create_fake_555_from_outside_centers(self):
# Create a fake 5x5x5 to solve 7x7x7 centers (they have been reduced to a 5x5x5)
fake_555 = self.get_fake_555()
fake_555.nuke_corners()
fake_555.nuke_edges()
fake_555.nuke_centers()
for side_index in range(6):
offset_555 = side_index * 25
offset_777 = side_index * 49
# centers
fake_555.state[7 + offset_555] = self.state[9 + offset_777]
fake_555.state[8 + offset_555] = self.state[11 + offset_777]
fake_555.state[9 + offset_555] = self.state[13 + offset_777]
fake_555.state[12 + offset_555] = self.state[23 + offset_777]
fake_555.state[13 + offset_555] = self.state[25 + offset_777]
fake_555.state[14 + offset_555] = self.state[27 + offset_777]
fake_555.state[17 + offset_555] = self.state[37 + offset_777]
fake_555.state[18 + offset_555] = self.state[39 + offset_777]
fake_555.state[19 + offset_555] = self.state[41 + offset_777]
# edges
fake_555.state[2 + offset_555] = self.state[2 + offset_777]
fake_555.state[3 + offset_555] = self.state[4 + offset_777]
fake_555.state[4 + offset_555] = self.state[6 + offset_777]
fake_555.state[6 + offset_555] = self.state[8 + offset_777]
fake_555.state[11 + offset_555] = self.state[22 + offset_777]
fake_555.state[16 + offset_555] = self.state[36 + offset_777]
fake_555.state[10 + offset_555] = self.state[14 + offset_777]
fake_555.state[15 + offset_555] = self.state[28 + offset_777]
fake_555.state[20 + offset_555] = self.state[42 + offset_777]
fake_555.state[22 + offset_555] = self.state[44 + offset_777]
fake_555.state[23 + offset_555] = self.state[46 + offset_777]
fake_555.state[24 + offset_555] = self.state[48 + offset_777]
def UD_inside_centers_staged(self):
state = self.state
for x in (17, 18, 19, 24, 25, 26, 31, 32, 33, 262, 263, 264, 269, 270, 271, 276, 277, 278):
if state[x] not in ("U", "D"):
return False
return True
def group_inside_UD_centers(self):
self.create_fake_555_from_inside_centers()
self.fake_555.group_centers_stage_FB()
for step in self.fake_555.solution:
if step.startswith("COMMENT"):
self.solution.append(step)
else:
if step.startswith("5"):
step = "7" + step[1:]
elif step.startswith("3"):
step = "4" + step[1:]
elif "w" in step:
step = "3" + step
self.rotate(step)
def LR_inside_centers_staged(self):
state = self.state
for x in (66, 67, 68, 73, 74, 75, 80, 81, 82, 164, 165, 166, 171, 172, 173, 178, 179, 180):
if state[x] not in ("L", "R"):
return False
return True
def group_inside_LR_centers(self):
if self.LR_inside_centers_staged():
return
self.create_fake_555_from_inside_centers()
self.fake_555.group_centers_stage_LR()
for step in self.fake_555.solution:
if step.startswith("COMMENT"):
self.solution.append(step)
else:
if step.startswith("5"):
step = "7" + step[1:]
elif step.startswith("3"):
raise Exception("5x5x5 solution has 3 wide turn")
elif "w" in step:
step = "3" + step
self.rotate(step)
def stage_UD_centers(self):
self.group_inside_UD_centers()
self.print_cube()
log.info(
"%s: UD inner x-centers staged, %d steps in"
% (self, self.get_solution_len_minus_rotates(self.solution))
)
log.info("")
log.info("")
log.info("")
log.info("")
# Pair the oblique UD edges
tmp_solution_len = len(self.solution)
self.lt_UD_oblique_edge_pairing.solve()
self.print_cube()
self.solution.append(
"COMMENT_%d_steps_777_UD_oblique_edges_staged"
% self.get_solution_len_minus_rotates(self.solution[tmp_solution_len:])
)
log.info(
"%s: UD oblique edges paired/staged, %d steps in"
% (self, self.get_solution_len_minus_rotates(self.solution))
)
log.info("")
log.info("")
log.info("")
log.info("")
# Stage the UD centers
self.create_fake_555_from_outside_centers()
self.fake_555.group_centers_stage_FB()
for step in self.fake_555.solution:
if step.startswith("COMMENT"):
self.solution.append(step)
else:
if step.startswith("5"):
step = "7" + step[1:]
elif step.startswith("3"):
raise Exception("5x5x5 solution has 3 wide turn")
self.rotate(step)
self.print_cube()
# log.info("kociemba: %s" % self.get_kociemba_string(True))
log.info(
"%s: UD centers staged, %d steps in"
% (self, self.get_solution_len_minus_rotates(self.solution))
)
log.info("")
log.info("")
log.info("")
log.info("")
def stage_LR_centers(self):
# Uses 5x5x5 solver to stage the inner x-centers
self.group_inside_LR_centers()
self.print_cube()
log.info(
"%s: LR inner x-centers staged, %d steps in"
% (self, self.get_solution_len_minus_rotates(self.solution))
)
log.info("")
log.info("")
log.info("")
log.info("")
# Test the pruning tables
# self.lt_LR_left_right_oblique_edge_pairing.solve()
# self.lt_LR_left_middle_oblique_edge_pairing.solve()
# self.print_cube()
# log.info("%s: %d steps in" % (self, self.get_solution_len_minus_rotates(self.solution)))
# log.info("kociemba: %s" % self.get_kociemba_string(True))
tmp_solution_len = len(self.solution)
self.lt_LR_oblique_edge_pairing.solve()
self.print_cube()
self.solution.append(
"COMMENT_%d_steps_777_LR_oblique_edges_staged"
% self.get_solution_len_minus_rotates(self.solution[tmp_solution_len:])
)
log.info(
"%s: LR oblique edges staged, %d steps in"
% (self, self.get_solution_len_minus_rotates(self.solution))
)
# Stage the LR centers
self.create_fake_555_from_outside_centers()
self.fake_555.group_centers_stage_LR()
for step in self.fake_555.solution:
if step.startswith("COMMENT"):
self.solution.append(step)
else:
if step.startswith("5"):
step = "7" + step[1:]
elif step.startswith("3"):
raise Exception("5x5x5 solution has 3 wide turn")
self.rotate(step)
self.print_cube()
log.info(
"%s: LR centers staged, %d steps in"
% (self, self.get_solution_len_minus_rotates(self.solution))
)
log.info("")
log.info("")
log.info("")
log.info("")
def LR_centers_vertical_bars(self):
# Test the pruning tables
# self.lt_step41.solve()
# self.lt_step42.solve()
# self.print_cube()
# log.info("%s: %d steps in" % (self, self.get_solution_len_minus_rotates(self.solution)))
tmp_solution_len = len(self.solution)
self.lt_step40.solve_via_c()
self.print_cube()
# log.info("kociemba: %s" % self.get_kociemba_string(True))
self.solution.append(
"COMMENT_%d_steps_777_LR_centers_vertical_bars"
% self.get_solution_len_minus_rotates(self.solution[tmp_solution_len:])
)
log.info(
"%s: LR centers vertical bars, %d steps in"
% (self, self.get_solution_len_minus_rotates(self.solution))
)
def UD_centers_vertical_bars(self):
# Test the pruning tables
# self.lt_step51.solve()
# self.lt_step52.solve()
# self.print_cube()
# log.info("%s: %d steps in" % (self, self.get_solution_len_minus_rotates(self.solution)))
tmp_solution_len = len(self.solution)
self.lt_step50.solve_via_c()
# log.info("kociemba: %s" % self.get_kociemba_string(True))
self.solution.append(
"COMMENT_%d_steps_777_UD_centers_vertical_bars"
% self.get_solution_len_minus_rotates(self.solution[tmp_solution_len:])
)
log.info(
"%s: LR solved, UD centers vertical bars, %d steps in"
% (self, self.get_solution_len_minus_rotates(self.solution))
)
def centers_daisy_solve(self):
tmp_solution_len = len(self.solution)
self.lt_step60.solve_via_c()
self.solution.append(
"COMMENT_%d_steps_777_centers_daisy_solved"
% self.get_solution_len_minus_rotates(self.solution[tmp_solution_len:])
)
self.print_cube()
# log.info("kociemba: %s" % self.get_kociemba_string(True))
log.info(
"%s: centers daisy solved, %d steps in"
% (self, self.get_solution_len_minus_rotates(self.solution))
)
def group_centers_guts(self):
self.lt_init()
if not self.LR_centers_staged():
self.stage_LR_centers()
if not self.UD_centers_staged():
self.stage_UD_centers()
# log.info("kociemba: %s" % self.get_kociemba_string(True))
self.LR_centers_vertical_bars()
self.UD_centers_vertical_bars()
self.centers_daisy_solve()
def solve_t_centers(self):
# This is only used when solving a cube larger than 777
assert self.LR_centers_staged()
assert self.UD_centers_staged()
self.LR_centers_vertical_bars()
self.UD_centers_vertical_bars()
tmp_solution_len = len(self.solution)
self.lt_step70.solve_via_c()
self.solution.append(
"COMMENT_%d_steps_777_centers_solved"
% self.get_solution_len_minus_rotates(self.solution[tmp_solution_len:])
)
self.print_cube()
# log.info("kociemba: %s" % self.get_kociemba_string(True))
log.info(
"%s: centers solved, %d steps in"
% (self, self.get_solution_len_minus_rotates(self.solution))
)
def solve_centers(self):
# This is only used when solving a cube larger than 777
tmp_solution_len = len(self.solution)
self.create_fake_555_from_outside_centers()
self.fake_555.lt_ULFRBD_centers_solve.solve_via_c()
for step in self.fake_555.solution:
if step.startswith("COMMENT"):
self.solution.append(step)
else:
if step.startswith("5"):
step = "7" + step[1:]
elif step.startswith("3"):
raise Exception("5x5x5 solution has 3 wide turn")
self.rotate(step)
self.solution.append(
"COMMENT_%d_steps_777_centers_solved"
% self.get_solution_len_minus_rotates(self.solution[tmp_solution_len:])
)
self.print_cube()
# log.info("kociemba: %s" % self.get_kociemba_string(True))
log.info(
"%s: centers solved, %d steps in"
% (self, self.get_solution_len_minus_rotates(self.solution))
)
if not self.centers_solved():
raise SolveError("centers should be solved")
swaps_777 = { "2B": ( 0, 1, 2, 3, 4, 5, 6, 7, 153, 160, 167, 174, 181, 188, 195, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 14, 52, 53, 54, 55, 56, 57, 13, 59, 60, 61, 62, 63, 64, 12, 66, 67, 68, 69, 70, 71, 11, 73, 74, 75, 76, 77, 78, 10, 80, 81, 82, 83, 84, 85, 9, 87, 88, 89, 90, 91, 92, 8, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 287, 154, 155, 156, 157, 158, 159, 286, 161, 162, 163, 164, 165, 166, 285, 168, 169, 170, 171, 172, 173, 284, 175, 176, 177, 178, 179, 180, 283, 182, 183, 184, 185, 186, 187, 282, 189, 190, 191, 192, 193, 194, 281, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 51, 58, 65, 72, 79, 86, 93, 288, 289, 290, 291, 292, 293, 294,), "2B'": ( 0, 1, 2, 3, 4, 5, 6, 7, 93, 86, 79, 72, 65, 58, 51, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 281, 52, 53, 54, 55, 56, 57, 282, 59, 60, 61, 62, 63, 64, 283, 66, 67, 68, 69, 70, 71, 284, 73, 74, 75, 76, 77, 78, 285, 80, 81, 82, 83, 84, 85, 286, 87, 88, 89, 90, 91, 92, 287, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 8, 154, 155, 156, 157, 158, 159, 9, 161, 162, 163, 164, 165, 166, 10, 168, 169, 170, 171, 172, 173, 11, 175, 176, 177, 178, 179, 180, 12, 182, 183, 184, 185, 186, 187, 13, 189, 190, 191, 192, 193, 194, 14, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 195, 188, 181, 174, 167, 160, 153, 288, 289, 290, 291, 292, 293, 294,), "2B2": ( 0, 1, 2, 3, 4, 5, 6, 7, 287, 286, 285, 284, 283, 282, 281, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 195, 52, 53, 54, 55, 56, 57, 188, 59, 60, 61, 62, 63, 64, 181, 66, 67, 68, 69, 70, 71, 174, 73, 74, 75, 76, 77, 78, 167, 80, 81, 82, 83, 84, 85, 160, 87, 88, 89, 90, 91, 92, 153, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 93, 154, 155, 156, 157, 158, 159, 86, 161, 162, 163, 164, 165, 166, 79, 168, 169, 170, 171, 172, 173, 72, 175, 176, 177, 178, 179, 180, 65, 182, 183, 184, 185, 186, 187, 58, 189, 190, 191, 192, 193, 194, 51, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 14, 13, 12, 11, 10, 9, 8, 288, 289, 290, 291, 292, 293, 294,), "2D": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 232, 233, 234, 235, 236, 237, 238, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 85, 86, 87, 88, 89, 90, 91, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 134, 135, 136, 137, 138, 139, 140, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 183, 184, 185, 186, 187, 188, 189, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "2D'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 134, 135, 136, 137, 138, 139, 140, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 183, 184, 185, 186, 187, 188, 189, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 232, 233, 234, 235, 236, 237, 238, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 85, 86, 87, 88, 89, 90, 91, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "2D2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 183, 184, 185, 186, 187, 188, 189, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 232, 233, 234, 235, 236, 237, 238, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 85, 86, 87, 88, 89, 90, 91, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 134, 135, 136, 137, 138, 139, 140, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "2F": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 97, 90, 83, 76, 69, 62, 55, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 253, 56, 57, 58, 59, 60, 61, 254, 63, 64, 65, 66, 67, 68, 255, 70, 71, 72, 73, 74, 75, 256, 77, 78, 79, 80, 81, 82, 257, 84, 85, 86, 87, 88, 89, 258, 91, 92, 93, 94, 95, 96, 259, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 36, 150, 151, 152, 153, 154, 155, 37, 157, 158, 159, 160, 161, 162, 38, 164, 165, 166, 167, 168, 169, 39, 171, 172, 173, 174, 175, 176, 40, 178, 179, 180, 181, 182, 183, 41, 185, 186, 187, 188, 189, 190, 42, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 191, 184, 177, 170, 163, 156, 149, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "2F'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 149, 156, 163, 170, 177, 184, 191, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 42, 56, 57, 58, 59, 60, 61, 41, 63, 64, 65, 66, 67, 68, 40, 70, 71, 72, 73, 74, 75, 39, 77, 78, 79, 80, 81, 82, 38, 84, 85, 86, 87, 88, 89, 37, 91, 92, 93, 94, 95, 96, 36, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 259, 150, 151, 152, 153, 154, 155, 258, 157, 158, 159, 160, 161, 162, 257, 164, 165, 166, 167, 168, 169, 256, 171, 172, 173, 174, 175, 176, 255, 178, 179, 180, 181, 182, 183, 254, 185, 186, 187, 188, 189, 190, 253, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 55, 62, 69, 76, 83, 90, 97, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "2F2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 259, 258, 257, 256, 255, 254, 253, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 191, 56, 57, 58, 59, 60, 61, 184, 63, 64, 65, 66, 67, 68, 177, 70, 71, 72, 73, 74, 75, 170, 77, 78, 79, 80, 81, 82, 163, 84, 85, 86, 87, 88, 89, 156, 91, 92, 93, 94, 95, 96, 149, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 97, 150, 151, 152, 153, 154, 155, 90, 157, 158, 159, 160, 161, 162, 83, 164, 165, 166, 167, 168, 169, 76, 171, 172, 173, 174, 175, 176, 69, 178, 179, 180, 181, 182, 183, 62, 185, 186, 187, 188, 189, 190, 55, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 42, 41, 40, 39, 38, 37, 36, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "2L": ( 0, 1, 244, 3, 4, 5, 6, 7, 8, 237, 10, 11, 12, 13, 14, 15, 230, 17, 18, 19, 20, 21, 22, 223, 24, 25, 26, 27, 28, 29, 216, 31, 32, 33, 34, 35, 36, 209, 38, 39, 40, 41, 42, 43, 202, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 2, 101, 102, 103, 104, 105, 106, 9, 108, 109, 110, 111, 112, 113, 16, 115, 116, 117, 118, 119, 120, 23, 122, 123, 124, 125, 126, 127, 30, 129, 130, 131, 132, 133, 134, 37, 136, 137, 138, 139, 140, 141, 44, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 289, 203, 204, 205, 206, 207, 208, 282, 210, 211, 212, 213, 214, 215, 275, 217, 218, 219, 220, 221, 222, 268, 224, 225, 226, 227, 228, 229, 261, 231, 232, 233, 234, 235, 236, 254, 238, 239, 240, 241, 242, 243, 247, 245, 246, 100, 248, 249, 250, 251, 252, 253, 107, 255, 256, 257, 258, 259, 260, 114, 262, 263, 264, 265, 266, 267, 121, 269, 270, 271, 272, 273, 274, 128, 276, 277, 278, 279, 280, 281, 135, 283, 284, 285, 286, 287, 288, 142, 290, 291, 292, 293, 294,), "2L'": ( 0, 1, 100, 3, 4, 5, 6, 7, 8, 107, 10, 11, 12, 13, 14, 15, 114, 17, 18, 19, 20, 21, 22, 121, 24, 25, 26, 27, 28, 29, 128, 31, 32, 33, 34, 35, 36, 135, 38, 39, 40, 41, 42, 43, 142, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 247, 101, 102, 103, 104, 105, 106, 254, 108, 109, 110, 111, 112, 113, 261, 115, 116, 117, 118, 119, 120, 268, 122, 123, 124, 125, 126, 127, 275, 129, 130, 131, 132, 133, 134, 282, 136, 137, 138, 139, 140, 141, 289, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 44, 203, 204, 205, 206, 207, 208, 37, 210, 211, 212, 213, 214, 215, 30, 217, 218, 219, 220, 221, 222, 23, 224, 225, 226, 227, 228, 229, 16, 231, 232, 233, 234, 235, 236, 9, 238, 239, 240, 241, 242, 243, 2, 245, 246, 244, 248, 249, 250, 251, 252, 253, 237, 255, 256, 257, 258, 259, 260, 230, 262, 263, 264, 265, 266, 267, 223, 269, 270, 271, 272, 273, 274, 216, 276, 277, 278, 279, 280, 281, 209, 283, 284, 285, 286, 287, 288, 202, 290, 291, 292, 293, 294,), "2L2": ( 0, 1, 247, 3, 4, 5, 6, 7, 8, 254, 10, 11, 12, 13, 14, 15, 261, 17, 18, 19, 20, 21, 22, 268, 24, 25, 26, 27, 28, 29, 275, 31, 32, 33, 34, 35, 36, 282, 38, 39, 40, 41, 42, 43, 289, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 244, 101, 102, 103, 104, 105, 106, 237, 108, 109, 110, 111, 112, 113, 230, 115, 116, 117, 118, 119, 120, 223, 122, 123, 124, 125, 126, 127, 216, 129, 130, 131, 132, 133, 134, 209, 136, 137, 138, 139, 140, 141, 202, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 142, 203, 204, 205, 206, 207, 208, 135, 210, 211, 212, 213, 214, 215, 128, 217, 218, 219, 220, 221, 222, 121, 224, 225, 226, 227, 228, 229, 114, 231, 232, 233, 234, 235, 236, 107, 238, 239, 240, 241, 242, 243, 100, 245, 246, 2, 248, 249, 250, 251, 252, 253, 9, 255, 256, 257, 258, 259, 260, 16, 262, 263, 264, 265, 266, 267, 23, 269, 270, 271, 272, 273, 274, 30, 276, 277, 278, 279, 280, 281, 37, 283, 284, 285, 286, 287, 288, 44, 290, 291, 292, 293, 294,), "2R": ( 0, 1, 2, 3, 4, 5, 104, 7, 8, 9, 10, 11, 12, 111, 14, 15, 16, 17, 18, 19, 118, 21, 22, 23, 24, 25, 26, 125, 28, 29, 30, 31, 32, 33, 132, 35, 36, 37, 38, 39, 40, 139, 42, 43, 44, 45, 46, 47, 146, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 251, 105, 106, 107, 108, 109, 110, 258, 112, 113, 114, 115, 116, 117, 265, 119, 120, 121, 122, 123, 124, 272, 126, 127, 128, 129, 130, 131, 279, 133, 134, 135, 136, 137, 138, 286, 140, 141, 142, 143, 144, 145, 293, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 48, 199, 200, 201, 202, 203, 204, 41, 206, 207, 208, 209, 210, 211, 34, 213, 214, 215, 216, 217, 218, 27, 220, 221, 222, 223, 224, 225, 20, 227, 228, 229, 230, 231, 232, 13, 234, 235, 236, 237, 238, 239, 6, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 240, 252, 253, 254, 255, 256, 257, 233, 259, 260, 261, 262, 263, 264, 226, 266, 267, 268, 269, 270, 271, 219, 273, 274, 275, 276, 277, 278, 212, 280, 281, 282, 283, 284, 285, 205, 287, 288, 289, 290, 291, 292, 198, 294,), "2R'": ( 0, 1, 2, 3, 4, 5, 240, 7, 8, 9, 10, 11, 12, 233, 14, 15, 16, 17, 18, 19, 226, 21, 22, 23, 24, 25, 26, 219, 28, 29, 30, 31, 32, 33, 212, 35, 36, 37, 38, 39, 40, 205, 42, 43, 44, 45, 46, 47, 198, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 6, 105, 106, 107, 108, 109, 110, 13, 112, 113, 114, 115, 116, 117, 20, 119, 120, 121, 122, 123, 124, 27, 126, 127, 128, 129, 130, 131, 34, 133, 134, 135, 136, 137, 138, 41, 140, 141, 142, 143, 144, 145, 48, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 293, 199, 200, 201, 202, 203, 204, 286, 206, 207, 208, 209, 210, 211, 279, 213, 214, 215, 216, 217, 218, 272, 220, 221, 222, 223, 224, 225, 265, 227, 228, 229, 230, 231, 232, 258, 234, 235, 236, 237, 238, 239, 251, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 104, 252, 253, 254, 255, 256, 257, 111, 259, 260, 261, 262, 263, 264, 118, 266, 267, 268, 269, 270, 271, 125, 273, 274, 275, 276, 277, 278, 132, 280, 281, 282, 283, 284, 285, 139, 287, 288, 289, 290, 291, 292, 146, 294,), "2R2": ( 0, 1, 2, 3, 4, 5, 251, 7, 8, 9, 10, 11, 12, 258, 14, 15, 16, 17, 18, 19, 265, 21, 22, 23, 24, 25, 26, 272, 28, 29, 30, 31, 32, 33, 279, 35, 36, 37, 38, 39, 40, 286, 42, 43, 44, 45, 46, 47, 293, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 240, 105, 106, 107, 108, 109, 110, 233, 112, 113, 114, 115, 116, 117, 226, 119, 120, 121, 122, 123, 124, 219, 126, 127, 128, 129, 130, 131, 212, 133, 134, 135, 136, 137, 138, 205, 140, 141, 142, 143, 144, 145, 198, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 146, 199, 200, 201, 202, 203, 204, 139, 206, 207, 208, 209, 210, 211, 132, 213, 214, 215, 216, 217, 218, 125, 220, 221, 222, 223, 224, 225, 118, 227, 228, 229, 230, 231, 232, 111, 234, 235, 236, 237, 238, 239, 104, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 6, 252, 253, 254, 255, 256, 257, 13, 259, 260, 261, 262, 263, 264, 20, 266, 267, 268, 269, 270, 271, 27, 273, 274, 275, 276, 277, 278, 34, 280, 281, 282, 283, 284, 285, 41, 287, 288, 289, 290, 291, 292, 48, 294,), "2U": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 106, 107, 108, 109, 110, 111, 112, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 155, 156, 157, 158, 159, 160, 161, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 204, 205, 206, 207, 208, 209, 210, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 57, 58, 59, 60, 61, 62, 63, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "2U'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 204, 205, 206, 207, 208, 209, 210, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 57, 58, 59, 60, 61, 62, 63, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 106, 107, 108, 109, 110, 111, 112, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 155, 156, 157, 158, 159, 160, 161, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "2U2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 155, 156, 157, 158, 159, 160, 161, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 204, 205, 206, 207, 208, 209, 210, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 57, 58, 59, 60, 61, 62, 63, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 106, 107, 108, 109, 110, 111, 112, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3B": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 152, 159, 166, 173, 180, 187, 194, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 21, 53, 54, 55, 56, 57, 58, 20, 60, 61, 62, 63, 64, 65, 19, 67, 68, 69, 70, 71, 72, 18, 74, 75, 76, 77, 78, 79, 17, 81, 82, 83, 84, 85, 86, 16, 88, 89, 90, 91, 92, 93, 15, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 280, 153, 154, 155, 156, 157, 158, 279, 160, 161, 162, 163, 164, 165, 278, 167, 168, 169, 170, 171, 172, 277, 174, 175, 176, 177, 178, 179, 276, 181, 182, 183, 184, 185, 186, 275, 188, 189, 190, 191, 192, 193, 274, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 52, 59, 66, 73, 80, 87, 94, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3B'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 94, 87, 80, 73, 66, 59, 52, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 274, 53, 54, 55, 56, 57, 58, 275, 60, 61, 62, 63, 64, 65, 276, 67, 68, 69, 70, 71, 72, 277, 74, 75, 76, 77, 78, 79, 278, 81, 82, 83, 84, 85, 86, 279, 88, 89, 90, 91, 92, 93, 280, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 15, 153, 154, 155, 156, 157, 158, 16, 160, 161, 162, 163, 164, 165, 17, 167, 168, 169, 170, 171, 172, 18, 174, 175, 176, 177, 178, 179, 19, 181, 182, 183, 184, 185, 186, 20, 188, 189, 190, 191, 192, 193, 21, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 194, 187, 180, 173, 166, 159, 152, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3B2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 280, 279, 278, 277, 276, 275, 274, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 194, 53, 54, 55, 56, 57, 58, 187, 60, 61, 62, 63, 64, 65, 180, 67, 68, 69, 70, 71, 72, 173, 74, 75, 76, 77, 78, 79, 166, 81, 82, 83, 84, 85, 86, 159, 88, 89, 90, 91, 92, 93, 152, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 94, 153, 154, 155, 156, 157, 158, 87, 160, 161, 162, 163, 164, 165, 80, 167, 168, 169, 170, 171, 172, 73, 174, 175, 176, 177, 178, 179, 66, 181, 182, 183, 184, 185, 186, 59, 188, 189, 190, 191, 192, 193, 52, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 21, 20, 19, 18, 17, 16, 15, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3Bw": ( 0, 154, 161, 168, 175, 182, 189, 196, 153, 160, 167, 174, 181, 188, 195, 152, 159, 166, 173, 180, 187, 194, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 7, 14, 21, 53, 54, 55, 56, 6, 13, 20, 60, 61, 62, 63, 5, 12, 19, 67, 68, 69, 70, 4, 11, 18, 74, 75, 76, 77, 3, 10, 17, 81, 82, 83, 84, 2, 9, 16, 88, 89, 90, 91, 1, 8, 15, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 280, 287, 294, 155, 156, 157, 158, 279, 286, 293, 162, 163, 164, 165, 278, 285, 292, 169, 170, 171, 172, 277, 284, 291, 176, 177, 178, 179, 276, 283, 290, 183, 184, 185, 186, 275, 282, 289, 190, 191, 192, 193, 274, 281, 288, 239, 232, 225, 218, 211, 204, 197, 240, 233, 226, 219, 212, 205, 198, 241, 234, 227, 220, 213, 206, 199, 242, 235, 228, 221, 214, 207, 200, 243, 236, 229, 222, 215, 208, 201, 244, 237, 230, 223, 216, 209, 202, 245, 238, 231, 224, 217, 210, 203, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 52, 59, 66, 73, 80, 87, 94, 51, 58, 65, 72, 79, 86, 93, 50, 57, 64, 71, 78, 85, 92,), "3Bw'": ( 0, 92, 85, 78, 71, 64, 57, 50, 93, 86, 79, 72, 65, 58, 51, 94, 87, 80, 73, 66, 59, 52, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 288, 281, 274, 53, 54, 55, 56, 289, 282, 275, 60, 61, 62, 63, 290, 283, 276, 67, 68, 69, 70, 291, 284, 277, 74, 75, 76, 77, 292, 285, 278, 81, 82, 83, 84, 293, 286, 279, 88, 89, 90, 91, 294, 287, 280, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 15, 8, 1, 155, 156, 157, 158, 16, 9, 2, 162, 163, 164, 165, 17, 10, 3, 169, 170, 171, 172, 18, 11, 4, 176, 177, 178, 179, 19, 12, 5, 183, 184, 185, 186, 20, 13, 6, 190, 191, 192, 193, 21, 14, 7, 203, 210, 217, 224, 231, 238, 245, 202, 209, 216, 223, 230, 237, 244, 201, 208, 215, 222, 229, 236, 243, 200, 207, 214, 221, 228, 235, 242, 199, 206, 213, 220, 227, 234, 241, 198, 205, 212, 219, 226, 233, 240, 197, 204, 211, 218, 225, 232, 239, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 194, 187, 180, 173, 166, 159, 152, 195, 188, 181, 174, 167, 160, 153, 196, 189, 182, 175, 168, 161, 154,), "3Bw2": ( 0, 294, 293, 292, 291, 290, 289, 288, 287, 286, 285, 284, 283, 282, 281, 280, 279, 278, 277, 276, 275, 274, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 196, 195, 194, 53, 54, 55, 56, 189, 188, 187, 60, 61, 62, 63, 182, 181, 180, 67, 68, 69, 70, 175, 174, 173, 74, 75, 76, 77, 168, 167, 166, 81, 82, 83, 84, 161, 160, 159, 88, 89, 90, 91, 154, 153, 152, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 94, 93, 92, 155, 156, 157, 158, 87, 86, 85, 162, 163, 164, 165, 80, 79, 78, 169, 170, 171, 172, 73, 72, 71, 176, 177, 178, 179, 66, 65, 64, 183, 184, 185, 186, 59, 58, 57, 190, 191, 192, 193, 52, 51, 50, 245, 244, 243, 242, 241, 240, 239, 238, 237, 236, 235, 234, 233, 232, 231, 230, 229, 228, 227, 226, 225, 224, 223, 222, 221, 220, 219, 218, 217, 216, 215, 214, 213, 212, 211, 210, 209, 208, 207, 206, 205, 204, 203, 202, 201, 200, 199, 198, 197, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1,), "3D": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 225, 226, 227, 228, 229, 230, 231, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 78, 79, 80, 81, 82, 83, 84, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 127, 128, 129, 130, 131, 132, 133, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 176, 177, 178, 179, 180, 181, 182, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3D'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 127, 128, 129, 130, 131, 132, 133, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 176, 177, 178, 179, 180, 181, 182, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 225, 226, 227, 228, 229, 230, 231, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 78, 79, 80, 81, 82, 83, 84, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3D2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 176, 177, 178, 179, 180, 181, 182, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 225, 226, 227, 228, 229, 230, 231, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 78, 79, 80, 81, 82, 83, 84, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 127, 128, 129, 130, 131, 132, 133, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3Dw": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 288, 281, 274, 267, 260, 253, 246, 289, 282, 275, 268, 261, 254, 247, 290, 283, 276, 269, 262, 255, 248, 291, 284, 277, 270, 263, 256, 249, 292, 285, 278, 271, 264, 257, 250, 293, 286, 279, 272, 265, 258, 251, 294, 287, 280, 273, 266, 259, 252,), "3Dw'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 252, 259, 266, 273, 280, 287, 294, 251, 258, 265, 272, 279, 286, 293, 250, 257, 264, 271, 278, 285, 292, 249, 256, 263, 270, 277, 284, 291, 248, 255, 262, 269, 276, 283, 290, 247, 254, 261, 268, 275, 282, 289, 246, 253, 260, 267, 274, 281, 288,), "3Dw2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 294, 293, 292, 291, 290, 289, 288, 287, 286, 285, 284, 283, 282, 281, 280, 279, 278, 277, 276, 275, 274, 273, 272, 271, 270, 269, 268, 267, 266, 265, 264, 263, 262, 261, 260, 259, 258, 257, 256, 255, 254, 253, 252, 251, 250, 249, 248, 247, 246,), "3F": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 96, 89, 82, 75, 68, 61, 54, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 260, 55, 56, 57, 58, 59, 60, 261, 62, 63, 64, 65, 66, 67, 262, 69, 70, 71, 72, 73, 74, 263, 76, 77, 78, 79, 80, 81, 264, 83, 84, 85, 86, 87, 88, 265, 90, 91, 92, 93, 94, 95, 266, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 29, 151, 152, 153, 154, 155, 156, 30, 158, 159, 160, 161, 162, 163, 31, 165, 166, 167, 168, 169, 170, 32, 172, 173, 174, 175, 176, 177, 33, 179, 180, 181, 182, 183, 184, 34, 186, 187, 188, 189, 190, 191, 35, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 192, 185, 178, 171, 164, 157, 150, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3F'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 150, 157, 164, 171, 178, 185, 192, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 35, 55, 56, 57, 58, 59, 60, 34, 62, 63, 64, 65, 66, 67, 33, 69, 70, 71, 72, 73, 74, 32, 76, 77, 78, 79, 80, 81, 31, 83, 84, 85, 86, 87, 88, 30, 90, 91, 92, 93, 94, 95, 29, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 266, 151, 152, 153, 154, 155, 156, 265, 158, 159, 160, 161, 162, 163, 264, 165, 166, 167, 168, 169, 170, 263, 172, 173, 174, 175, 176, 177, 262, 179, 180, 181, 182, 183, 184, 261, 186, 187, 188, 189, 190, 191, 260, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 54, 61, 68, 75, 82, 89, 96, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3F2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 266, 265, 264, 263, 262, 261, 260, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 192, 55, 56, 57, 58, 59, 60, 185, 62, 63, 64, 65, 66, 67, 178, 69, 70, 71, 72, 73, 74, 171, 76, 77, 78, 79, 80, 81, 164, 83, 84, 85, 86, 87, 88, 157, 90, 91, 92, 93, 94, 95, 150, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 96, 151, 152, 153, 154, 155, 156, 89, 158, 159, 160, 161, 162, 163, 82, 165, 166, 167, 168, 169, 170, 75, 172, 173, 174, 175, 176, 177, 68, 179, 180, 181, 182, 183, 184, 61, 186, 187, 188, 189, 190, 191, 54, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 35, 34, 33, 32, 31, 30, 29, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3Fw": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 96, 89, 82, 75, 68, 61, 54, 97, 90, 83, 76, 69, 62, 55, 98, 91, 84, 77, 70, 63, 56, 50, 51, 52, 53, 260, 253, 246, 57, 58, 59, 60, 261, 254, 247, 64, 65, 66, 67, 262, 255, 248, 71, 72, 73, 74, 263, 256, 249, 78, 79, 80, 81, 264, 257, 250, 85, 86, 87, 88, 265, 258, 251, 92, 93, 94, 95, 266, 259, 252, 141, 134, 127, 120, 113, 106, 99, 142, 135, 128, 121, 114, 107, 100, 143, 136, 129, 122, 115, 108, 101, 144, 137, 130, 123, 116, 109, 102, 145, 138, 131, 124, 117, 110, 103, 146, 139, 132, 125, 118, 111, 104, 147, 140, 133, 126, 119, 112, 105, 43, 36, 29, 151, 152, 153, 154, 44, 37, 30, 158, 159, 160, 161, 45, 38, 31, 165, 166, 167, 168, 46, 39, 32, 172, 173, 174, 175, 47, 40, 33, 179, 180, 181, 182, 48, 41, 34, 186, 187, 188, 189, 49, 42, 35, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 190, 183, 176, 169, 162, 155, 148, 191, 184, 177, 170, 163, 156, 149, 192, 185, 178, 171, 164, 157, 150, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3Fw'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 150, 157, 164, 171, 178, 185, 192, 149, 156, 163, 170, 177, 184, 191, 148, 155, 162, 169, 176, 183, 190, 50, 51, 52, 53, 35, 42, 49, 57, 58, 59, 60, 34, 41, 48, 64, 65, 66, 67, 33, 40, 47, 71, 72, 73, 74, 32, 39, 46, 78, 79, 80, 81, 31, 38, 45, 85, 86, 87, 88, 30, 37, 44, 92, 93, 94, 95, 29, 36, 43, 105, 112, 119, 126, 133, 140, 147, 104, 111, 118, 125, 132, 139, 146, 103, 110, 117, 124, 131, 138, 145, 102, 109, 116, 123, 130, 137, 144, 101, 108, 115, 122, 129, 136, 143, 100, 107, 114, 121, 128, 135, 142, 99, 106, 113, 120, 127, 134, 141, 252, 259, 266, 151, 152, 153, 154, 251, 258, 265, 158, 159, 160, 161, 250, 257, 264, 165, 166, 167, 168, 249, 256, 263, 172, 173, 174, 175, 248, 255, 262, 179, 180, 181, 182, 247, 254, 261, 186, 187, 188, 189, 246, 253, 260, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 56, 63, 70, 77, 84, 91, 98, 55, 62, 69, 76, 83, 90, 97, 54, 61, 68, 75, 82, 89, 96, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3Fw2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 266, 265, 264, 263, 262, 261, 260, 259, 258, 257, 256, 255, 254, 253, 252, 251, 250, 249, 248, 247, 246, 50, 51, 52, 53, 192, 191, 190, 57, 58, 59, 60, 185, 184, 183, 64, 65, 66, 67, 178, 177, 176, 71, 72, 73, 74, 171, 170, 169, 78, 79, 80, 81, 164, 163, 162, 85, 86, 87, 88, 157, 156, 155, 92, 93, 94, 95, 150, 149, 148, 147, 146, 145, 144, 143, 142, 141, 140, 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, 129, 128, 127, 126, 125, 124, 123, 122, 121, 120, 119, 118, 117, 116, 115, 114, 113, 112, 111, 110, 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, 99, 98, 97, 96, 151, 152, 153, 154, 91, 90, 89, 158, 159, 160, 161, 84, 83, 82, 165, 166, 167, 168, 77, 76, 75, 172, 173, 174, 175, 70, 69, 68, 179, 180, 181, 182, 63, 62, 61, 186, 187, 188, 189, 56, 55, 54, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 49, 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3L": ( 0, 1, 2, 243, 4, 5, 6, 7, 8, 9, 236, 11, 12, 13, 14, 15, 16, 229, 18, 19, 20, 21, 22, 23, 222, 25, 26, 27, 28, 29, 30, 215, 32, 33, 34, 35, 36, 37, 208, 39, 40, 41, 42, 43, 44, 201, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 3, 102, 103, 104, 105, 106, 107, 10, 109, 110, 111, 112, 113, 114, 17, 116, 117, 118, 119, 120, 121, 24, 123, 124, 125, 126, 127, 128, 31, 130, 131, 132, 133, 134, 135, 38, 137, 138, 139, 140, 141, 142, 45, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 290, 202, 203, 204, 205, 206, 207, 283, 209, 210, 211, 212, 213, 214, 276, 216, 217, 218, 219, 220, 221, 269, 223, 224, 225, 226, 227, 228, 262, 230, 231, 232, 233, 234, 235, 255, 237, 238, 239, 240, 241, 242, 248, 244, 245, 246, 247, 101, 249, 250, 251, 252, 253, 254, 108, 256, 257, 258, 259, 260, 261, 115, 263, 264, 265, 266, 267, 268, 122, 270, 271, 272, 273, 274, 275, 129, 277, 278, 279, 280, 281, 282, 136, 284, 285, 286, 287, 288, 289, 143, 291, 292, 293, 294,), "3L'": ( 0, 1, 2, 101, 4, 5, 6, 7, 8, 9, 108, 11, 12, 13, 14, 15, 16, 115, 18, 19, 20, 21, 22, 23, 122, 25, 26, 27, 28, 29, 30, 129, 32, 33, 34, 35, 36, 37, 136, 39, 40, 41, 42, 43, 44, 143, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 248, 102, 103, 104, 105, 106, 107, 255, 109, 110, 111, 112, 113, 114, 262, 116, 117, 118, 119, 120, 121, 269, 123, 124, 125, 126, 127, 128, 276, 130, 131, 132, 133, 134, 135, 283, 137, 138, 139, 140, 141, 142, 290, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 45, 202, 203, 204, 205, 206, 207, 38, 209, 210, 211, 212, 213, 214, 31, 216, 217, 218, 219, 220, 221, 24, 223, 224, 225, 226, 227, 228, 17, 230, 231, 232, 233, 234, 235, 10, 237, 238, 239, 240, 241, 242, 3, 244, 245, 246, 247, 243, 249, 250, 251, 252, 253, 254, 236, 256, 257, 258, 259, 260, 261, 229, 263, 264, 265, 266, 267, 268, 222, 270, 271, 272, 273, 274, 275, 215, 277, 278, 279, 280, 281, 282, 208, 284, 285, 286, 287, 288, 289, 201, 291, 292, 293, 294,), "3L2": ( 0, 1, 2, 248, 4, 5, 6, 7, 8, 9, 255, 11, 12, 13, 14, 15, 16, 262, 18, 19, 20, 21, 22, 23, 269, 25, 26, 27, 28, 29, 30, 276, 32, 33, 34, 35, 36, 37, 283, 39, 40, 41, 42, 43, 44, 290, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 243, 102, 103, 104, 105, 106, 107, 236, 109, 110, 111, 112, 113, 114, 229, 116, 117, 118, 119, 120, 121, 222, 123, 124, 125, 126, 127, 128, 215, 130, 131, 132, 133, 134, 135, 208, 137, 138, 139, 140, 141, 142, 201, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 143, 202, 203, 204, 205, 206, 207, 136, 209, 210, 211, 212, 213, 214, 129, 216, 217, 218, 219, 220, 221, 122, 223, 224, 225, 226, 227, 228, 115, 230, 231, 232, 233, 234, 235, 108, 237, 238, 239, 240, 241, 242, 101, 244, 245, 246, 247, 3, 249, 250, 251, 252, 253, 254, 10, 256, 257, 258, 259, 260, 261, 17, 263, 264, 265, 266, 267, 268, 24, 270, 271, 272, 273, 274, 275, 31, 277, 278, 279, 280, 281, 282, 38, 284, 285, 286, 287, 288, 289, 45, 291, 292, 293, 294,), "3Lw": ( 0, 245, 244, 243, 4, 5, 6, 7, 238, 237, 236, 11, 12, 13, 14, 231, 230, 229, 18, 19, 20, 21, 224, 223, 222, 25, 26, 27, 28, 217, 216, 215, 32, 33, 34, 35, 210, 209, 208, 39, 40, 41, 42, 203, 202, 201, 46, 47, 48, 49, 92, 85, 78, 71, 64, 57, 50, 93, 86, 79, 72, 65, 58, 51, 94, 87, 80, 73, 66, 59, 52, 95, 88, 81, 74, 67, 60, 53, 96, 89, 82, 75, 68, 61, 54, 97, 90, 83, 76, 69, 62, 55, 98, 91, 84, 77, 70, 63, 56, 1, 2, 3, 102, 103, 104, 105, 8, 9, 10, 109, 110, 111, 112, 15, 16, 17, 116, 117, 118, 119, 22, 23, 24, 123, 124, 125, 126, 29, 30, 31, 130, 131, 132, 133, 36, 37, 38, 137, 138, 139, 140, 43, 44, 45, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 290, 289, 288, 204, 205, 206, 207, 283, 282, 281, 211, 212, 213, 214, 276, 275, 274, 218, 219, 220, 221, 269, 268, 267, 225, 226, 227, 228, 262, 261, 260, 232, 233, 234, 235, 255, 254, 253, 239, 240, 241, 242, 248, 247, 246, 99, 100, 101, 249, 250, 251, 252, 106, 107, 108, 256, 257, 258, 259, 113, 114, 115, 263, 264, 265, 266, 120, 121, 122, 270, 271, 272, 273, 127, 128, 129, 277, 278, 279, 280, 134, 135, 136, 284, 285, 286, 287, 141, 142, 143, 291, 292, 293, 294,), "3Lw'": ( 0, 99, 100, 101, 4, 5, 6, 7, 106, 107, 108, 11, 12, 13, 14, 113, 114, 115, 18, 19, 20, 21, 120, 121, 122, 25, 26, 27, 28, 127, 128, 129, 32, 33, 34, 35, 134, 135, 136, 39, 40, 41, 42, 141, 142, 143, 46, 47, 48, 49, 56, 63, 70, 77, 84, 91, 98, 55, 62, 69, 76, 83, 90, 97, 54, 61, 68, 75, 82, 89, 96, 53, 60, 67, 74, 81, 88, 95, 52, 59, 66, 73, 80, 87, 94, 51, 58, 65, 72, 79, 86, 93, 50, 57, 64, 71, 78, 85, 92, 246, 247, 248, 102, 103, 104, 105, 253, 254, 255, 109, 110, 111, 112, 260, 261, 262, 116, 117, 118, 119, 267, 268, 269, 123, 124, 125, 126, 274, 275, 276, 130, 131, 132, 133, 281, 282, 283, 137, 138, 139, 140, 288, 289, 290, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 45, 44, 43, 204, 205, 206, 207, 38, 37, 36, 211, 212, 213, 214, 31, 30, 29, 218, 219, 220, 221, 24, 23, 22, 225, 226, 227, 228, 17, 16, 15, 232, 233, 234, 235, 10, 9, 8, 239, 240, 241, 242, 3, 2, 1, 245, 244, 243, 249, 250, 251, 252, 238, 237, 236, 256, 257, 258, 259, 231, 230, 229, 263, 264, 265, 266, 224, 223, 222, 270, 271, 272, 273, 217, 216, 215, 277, 278, 279, 280, 210, 209, 208, 284, 285, 286, 287, 203, 202, 201, 291, 292, 293, 294,), "3Lw2": ( 0, 246, 247, 248, 4, 5, 6, 7, 253, 254, 255, 11, 12, 13, 14, 260, 261, 262, 18, 19, 20, 21, 267, 268, 269, 25, 26, 27, 28, 274, 275, 276, 32, 33, 34, 35, 281, 282, 283, 39, 40, 41, 42, 288, 289, 290, 46, 47, 48, 49, 98, 97, 96, 95, 94, 93, 92, 91, 90, 89, 88, 87, 86, 85, 84, 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68, 67, 66, 65, 64, 63, 62, 61, 60, 59, 58, 57, 56, 55, 54, 53, 52, 51, 50, 245, 244, 243, 102, 103, 104, 105, 238, 237, 236, 109, 110, 111, 112, 231, 230, 229, 116, 117, 118, 119, 224, 223, 222, 123, 124, 125, 126, 217, 216, 215, 130, 131, 132, 133, 210, 209, 208, 137, 138, 139, 140, 203, 202, 201, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 143, 142, 141, 204, 205, 206, 207, 136, 135, 134, 211, 212, 213, 214, 129, 128, 127, 218, 219, 220, 221, 122, 121, 120, 225, 226, 227, 228, 115, 114, 113, 232, 233, 234, 235, 108, 107, 106, 239, 240, 241, 242, 101, 100, 99, 1, 2, 3, 249, 250, 251, 252, 8, 9, 10, 256, 257, 258, 259, 15, 16, 17, 263, 264, 265, 266, 22, 23, 24, 270, 271, 272, 273, 29, 30, 31, 277, 278, 279, 280, 36, 37, 38, 284, 285, 286, 287, 43, 44, 45, 291, 292, 293, 294,), "3R": ( 0, 1, 2, 3, 4, 103, 6, 7, 8, 9, 10, 11, 110, 13, 14, 15, 16, 17, 18, 117, 20, 21, 22, 23, 24, 25, 124, 27, 28, 29, 30, 31, 32, 131, 34, 35, 36, 37, 38, 39, 138, 41, 42, 43, 44, 45, 46, 145, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 250, 104, 105, 106, 107, 108, 109, 257, 111, 112, 113, 114, 115, 116, 264, 118, 119, 120, 121, 122, 123, 271, 125, 126, 127, 128, 129, 130, 278, 132, 133, 134, 135, 136, 137, 285, 139, 140, 141, 142, 143, 144, 292, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 47, 200, 201, 202, 203, 204, 205, 40, 207, 208, 209, 210, 211, 212, 33, 214, 215, 216, 217, 218, 219, 26, 221, 222, 223, 224, 225, 226, 19, 228, 229, 230, 231, 232, 233, 12, 235, 236, 237, 238, 239, 240, 5, 242, 243, 244, 245, 246, 247, 248, 249, 241, 251, 252, 253, 254, 255, 256, 234, 258, 259, 260, 261, 262, 263, 227, 265, 266, 267, 268, 269, 270, 220, 272, 273, 274, 275, 276, 277, 213, 279, 280, 281, 282, 283, 284, 206, 286, 287, 288, 289, 290, 291, 199, 293, 294,), "3R'": ( 0, 1, 2, 3, 4, 241, 6, 7, 8, 9, 10, 11, 234, 13, 14, 15, 16, 17, 18, 227, 20, 21, 22, 23, 24, 25, 220, 27, 28, 29, 30, 31, 32, 213, 34, 35, 36, 37, 38, 39, 206, 41, 42, 43, 44, 45, 46, 199, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 5, 104, 105, 106, 107, 108, 109, 12, 111, 112, 113, 114, 115, 116, 19, 118, 119, 120, 121, 122, 123, 26, 125, 126, 127, 128, 129, 130, 33, 132, 133, 134, 135, 136, 137, 40, 139, 140, 141, 142, 143, 144, 47, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 292, 200, 201, 202, 203, 204, 205, 285, 207, 208, 209, 210, 211, 212, 278, 214, 215, 216, 217, 218, 219, 271, 221, 222, 223, 224, 225, 226, 264, 228, 229, 230, 231, 232, 233, 257, 235, 236, 237, 238, 239, 240, 250, 242, 243, 244, 245, 246, 247, 248, 249, 103, 251, 252, 253, 254, 255, 256, 110, 258, 259, 260, 261, 262, 263, 117, 265, 266, 267, 268, 269, 270, 124, 272, 273, 274, 275, 276, 277, 131, 279, 280, 281, 282, 283, 284, 138, 286, 287, 288, 289, 290, 291, 145, 293, 294,), "3R2": ( 0, 1, 2, 3, 4, 250, 6, 7, 8, 9, 10, 11, 257, 13, 14, 15, 16, 17, 18, 264, 20, 21, 22, 23, 24, 25, 271, 27, 28, 29, 30, 31, 32, 278, 34, 35, 36, 37, 38, 39, 285, 41, 42, 43, 44, 45, 46, 292, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 241, 104, 105, 106, 107, 108, 109, 234, 111, 112, 113, 114, 115, 116, 227, 118, 119, 120, 121, 122, 123, 220, 125, 126, 127, 128, 129, 130, 213, 132, 133, 134, 135, 136, 137, 206, 139, 140, 141, 142, 143, 144, 199, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 145, 200, 201, 202, 203, 204, 205, 138, 207, 208, 209, 210, 211, 212, 131, 214, 215, 216, 217, 218, 219, 124, 221, 222, 223, 224, 225, 226, 117, 228, 229, 230, 231, 232, 233, 110, 235, 236, 237, 238, 239, 240, 103, 242, 243, 244, 245, 246, 247, 248, 249, 5, 251, 252, 253, 254, 255, 256, 12, 258, 259, 260, 261, 262, 263, 19, 265, 266, 267, 268, 269, 270, 26, 272, 273, 274, 275, 276, 277, 33, 279, 280, 281, 282, 283, 284, 40, 286, 287, 288, 289, 290, 291, 47, 293, 294,), "3Rw": ( 0, 1, 2, 3, 4, 103, 104, 105, 8, 9, 10, 11, 110, 111, 112, 15, 16, 17, 18, 117, 118, 119, 22, 23, 24, 25, 124, 125, 126, 29, 30, 31, 32, 131, 132, 133, 36, 37, 38, 39, 138, 139, 140, 43, 44, 45, 46, 145, 146, 147, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 250, 251, 252, 106, 107, 108, 109, 257, 258, 259, 113, 114, 115, 116, 264, 265, 266, 120, 121, 122, 123, 271, 272, 273, 127, 128, 129, 130, 278, 279, 280, 134, 135, 136, 137, 285, 286, 287, 141, 142, 143, 144, 292, 293, 294, 190, 183, 176, 169, 162, 155, 148, 191, 184, 177, 170, 163, 156, 149, 192, 185, 178, 171, 164, 157, 150, 193, 186, 179, 172, 165, 158, 151, 194, 187, 180, 173, 166, 159, 152, 195, 188, 181, 174, 167, 160, 153, 196, 189, 182, 175, 168, 161, 154, 49, 48, 47, 200, 201, 202, 203, 42, 41, 40, 207, 208, 209, 210, 35, 34, 33, 214, 215, 216, 217, 28, 27, 26, 221, 222, 223, 224, 21, 20, 19, 228, 229, 230, 231, 14, 13, 12, 235, 236, 237, 238, 7, 6, 5, 242, 243, 244, 245, 246, 247, 248, 249, 241, 240, 239, 253, 254, 255, 256, 234, 233, 232, 260, 261, 262, 263, 227, 226, 225, 267, 268, 269, 270, 220, 219, 218, 274, 275, 276, 277, 213, 212, 211, 281, 282, 283, 284, 206, 205, 204, 288, 289, 290, 291, 199, 198, 197,), "3Rw'": ( 0, 1, 2, 3, 4, 241, 240, 239, 8, 9, 10, 11, 234, 233, 232, 15, 16, 17, 18, 227, 226, 225, 22, 23, 24, 25, 220, 219, 218, 29, 30, 31, 32, 213, 212, 211, 36, 37, 38, 39, 206, 205, 204, 43, 44, 45, 46, 199, 198, 197, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 5, 6, 7, 106, 107, 108, 109, 12, 13, 14, 113, 114, 115, 116, 19, 20, 21, 120, 121, 122, 123, 26, 27, 28, 127, 128, 129, 130, 33, 34, 35, 134, 135, 136, 137, 40, 41, 42, 141, 142, 143, 144, 47, 48, 49, 154, 161, 168, 175, 182, 189, 196, 153, 160, 167, 174, 181, 188, 195, 152, 159, 166, 173, 180, 187, 194, 151, 158, 165, 172, 179, 186, 193, 150, 157, 164, 171, 178, 185, 192, 149, 156, 163, 170, 177, 184, 191, 148, 155, 162, 169, 176, 183, 190, 294, 293, 292, 200, 201, 202, 203, 287, 286, 285, 207, 208, 209, 210, 280, 279, 278, 214, 215, 216, 217, 273, 272, 271, 221, 222, 223, 224, 266, 265, 264, 228, 229, 230, 231, 259, 258, 257, 235, 236, 237, 238, 252, 251, 250, 242, 243, 244, 245, 246, 247, 248, 249, 103, 104, 105, 253, 254, 255, 256, 110, 111, 112, 260, 261, 262, 263, 117, 118, 119, 267, 268, 269, 270, 124, 125, 126, 274, 275, 276, 277, 131, 132, 133, 281, 282, 283, 284, 138, 139, 140, 288, 289, 290, 291, 145, 146, 147,), "3Rw2": ( 0, 1, 2, 3, 4, 250, 251, 252, 8, 9, 10, 11, 257, 258, 259, 15, 16, 17, 18, 264, 265, 266, 22, 23, 24, 25, 271, 272, 273, 29, 30, 31, 32, 278, 279, 280, 36, 37, 38, 39, 285, 286, 287, 43, 44, 45, 46, 292, 293, 294, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 241, 240, 239, 106, 107, 108, 109, 234, 233, 232, 113, 114, 115, 116, 227, 226, 225, 120, 121, 122, 123, 220, 219, 218, 127, 128, 129, 130, 213, 212, 211, 134, 135, 136, 137, 206, 205, 204, 141, 142, 143, 144, 199, 198, 197, 196, 195, 194, 193, 192, 191, 190, 189, 188, 187, 186, 185, 184, 183, 182, 181, 180, 179, 178, 177, 176, 175, 174, 173, 172, 171, 170, 169, 168, 167, 166, 165, 164, 163, 162, 161, 160, 159, 158, 157, 156, 155, 154, 153, 152, 151, 150, 149, 148, 147, 146, 145, 200, 201, 202, 203, 140, 139, 138, 207, 208, 209, 210, 133, 132, 131, 214, 215, 216, 217, 126, 125, 124, 221, 222, 223, 224, 119, 118, 117, 228, 229, 230, 231, 112, 111, 110, 235, 236, 237, 238, 105, 104, 103, 242, 243, 244, 245, 246, 247, 248, 249, 5, 6, 7, 253, 254, 255, 256, 12, 13, 14, 260, 261, 262, 263, 19, 20, 21, 267, 268, 269, 270, 26, 27, 28, 274, 275, 276, 277, 33, 34, 35, 281, 282, 283, 284, 40, 41, 42, 288, 289, 290, 291, 47, 48, 49,), "3U": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 113, 114, 115, 116, 117, 118, 119, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 162, 163, 164, 165, 166, 167, 168, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 211, 212, 213, 214, 215, 216, 217, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 64, 65, 66, 67, 68, 69, 70, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3U'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 211, 212, 213, 214, 215, 216, 217, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 64, 65, 66, 67, 68, 69, 70, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 113, 114, 115, 116, 117, 118, 119, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 162, 163, 164, 165, 166, 167, 168, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3U2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 162, 163, 164, 165, 166, 167, 168, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 211, 212, 213, 214, 215, 216, 217, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 64, 65, 66, 67, 68, 69, 70, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 113, 114, 115, 116, 117, 118, 119, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3Uw": ( 0, 43, 36, 29, 22, 15, 8, 1, 44, 37, 30, 23, 16, 9, 2, 45, 38, 31, 24, 17, 10, 3, 46, 39, 32, 25, 18, 11, 4, 47, 40, 33, 26, 19, 12, 5, 48, 41, 34, 27, 20, 13, 6, 49, 42, 35, 28, 21, 14, 7, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3Uw'": ( 0, 7, 14, 21, 28, 35, 42, 49, 6, 13, 20, 27, 34, 41, 48, 5, 12, 19, 26, 33, 40, 47, 4, 11, 18, 25, 32, 39, 46, 3, 10, 17, 24, 31, 38, 45, 2, 9, 16, 23, 30, 37, 44, 1, 8, 15, 22, 29, 36, 43, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "3Uw2": ( 0, 49, 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "B": ( 0, 154, 161, 168, 175, 182, 189, 196, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 7, 51, 52, 53, 54, 55, 56, 6, 58, 59, 60, 61, 62, 63, 5, 65, 66, 67, 68, 69, 70, 4, 72, 73, 74, 75, 76, 77, 3, 79, 80, 81, 82, 83, 84, 2, 86, 87, 88, 89, 90, 91, 1, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 294, 155, 156, 157, 158, 159, 160, 293, 162, 163, 164, 165, 166, 167, 292, 169, 170, 171, 172, 173, 174, 291, 176, 177, 178, 179, 180, 181, 290, 183, 184, 185, 186, 187, 188, 289, 190, 191, 192, 193, 194, 195, 288, 239, 232, 225, 218, 211, 204, 197, 240, 233, 226, 219, 212, 205, 198, 241, 234, 227, 220, 213, 206, 199, 242, 235, 228, 221, 214, 207, 200, 243, 236, 229, 222, 215, 208, 201, 244, 237, 230, 223, 216, 209, 202, 245, 238, 231, 224, 217, 210, 203, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 50, 57, 64, 71, 78, 85, 92,), "B'": ( 0, 92, 85, 78, 71, 64, 57, 50, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 288, 51, 52, 53, 54, 55, 56, 289, 58, 59, 60, 61, 62, 63, 290, 65, 66, 67, 68, 69, 70, 291, 72, 73, 74, 75, 76, 77, 292, 79, 80, 81, 82, 83, 84, 293, 86, 87, 88, 89, 90, 91, 294, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 1, 155, 156, 157, 158, 159, 160, 2, 162, 163, 164, 165, 166, 167, 3, 169, 170, 171, 172, 173, 174, 4, 176, 177, 178, 179, 180, 181, 5, 183, 184, 185, 186, 187, 188, 6, 190, 191, 192, 193, 194, 195, 7, 203, 210, 217, 224, 231, 238, 245, 202, 209, 216, 223, 230, 237, 244, 201, 208, 215, 222, 229, 236, 243, 200, 207, 214, 221, 228, 235, 242, 199, 206, 213, 220, 227, 234, 241, 198, 205, 212, 219, 226, 233, 240, 197, 204, 211, 218, 225, 232, 239, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 196, 189, 182, 175, 168, 161, 154,), "B2": ( 0, 294, 293, 292, 291, 290, 289, 288, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 196, 51, 52, 53, 54, 55, 56, 189, 58, 59, 60, 61, 62, 63, 182, 65, 66, 67, 68, 69, 70, 175, 72, 73, 74, 75, 76, 77, 168, 79, 80, 81, 82, 83, 84, 161, 86, 87, 88, 89, 90, 91, 154, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 92, 155, 156, 157, 158, 159, 160, 85, 162, 163, 164, 165, 166, 167, 78, 169, 170, 171, 172, 173, 174, 71, 176, 177, 178, 179, 180, 181, 64, 183, 184, 185, 186, 187, 188, 57, 190, 191, 192, 193, 194, 195, 50, 245, 244, 243, 242, 241, 240, 239, 238, 237, 236, 235, 234, 233, 232, 231, 230, 229, 228, 227, 226, 225, 224, 223, 222, 221, 220, 219, 218, 217, 216, 215, 214, 213, 212, 211, 210, 209, 208, 207, 206, 205, 204, 203, 202, 201, 200, 199, 198, 197, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 7, 6, 5, 4, 3, 2, 1,), "Bw": ( 0, 154, 161, 168, 175, 182, 189, 196, 153, 160, 167, 174, 181, 188, 195, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 7, 14, 52, 53, 54, 55, 56, 6, 13, 59, 60, 61, 62, 63, 5, 12, 66, 67, 68, 69, 70, 4, 11, 73, 74, 75, 76, 77, 3, 10, 80, 81, 82, 83, 84, 2, 9, 87, 88, 89, 90, 91, 1, 8, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 287, 294, 155, 156, 157, 158, 159, 286, 293, 162, 163, 164, 165, 166, 285, 292, 169, 170, 171, 172, 173, 284, 291, 176, 177, 178, 179, 180, 283, 290, 183, 184, 185, 186, 187, 282, 289, 190, 191, 192, 193, 194, 281, 288, 239, 232, 225, 218, 211, 204, 197, 240, 233, 226, 219, 212, 205, 198, 241, 234, 227, 220, 213, 206, 199, 242, 235, 228, 221, 214, 207, 200, 243, 236, 229, 222, 215, 208, 201, 244, 237, 230, 223, 216, 209, 202, 245, 238, 231, 224, 217, 210, 203, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 51, 58, 65, 72, 79, 86, 93, 50, 57, 64, 71, 78, 85, 92,), "Bw'": ( 0, 92, 85, 78, 71, 64, 57, 50, 93, 86, 79, 72, 65, 58, 51, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 288, 281, 52, 53, 54, 55, 56, 289, 282, 59, 60, 61, 62, 63, 290, 283, 66, 67, 68, 69, 70, 291, 284, 73, 74, 75, 76, 77, 292, 285, 80, 81, 82, 83, 84, 293, 286, 87, 88, 89, 90, 91, 294, 287, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 8, 1, 155, 156, 157, 158, 159, 9, 2, 162, 163, 164, 165, 166, 10, 3, 169, 170, 171, 172, 173, 11, 4, 176, 177, 178, 179, 180, 12, 5, 183, 184, 185, 186, 187, 13, 6, 190, 191, 192, 193, 194, 14, 7, 203, 210, 217, 224, 231, 238, 245, 202, 209, 216, 223, 230, 237, 244, 201, 208, 215, 222, 229, 236, 243, 200, 207, 214, 221, 228, 235, 242, 199, 206, 213, 220, 227, 234, 241, 198, 205, 212, 219, 226, 233, 240, 197, 204, 211, 218, 225, 232, 239, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 195, 188, 181, 174, 167, 160, 153, 196, 189, 182, 175, 168, 161, 154,), "Bw2": ( 0, 294, 293, 292, 291, 290, 289, 288, 287, 286, 285, 284, 283, 282, 281, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 196, 195, 52, 53, 54, 55, 56, 189, 188, 59, 60, 61, 62, 63, 182, 181, 66, 67, 68, 69, 70, 175, 174, 73, 74, 75, 76, 77, 168, 167, 80, 81, 82, 83, 84, 161, 160, 87, 88, 89, 90, 91, 154, 153, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 93, 92, 155, 156, 157, 158, 159, 86, 85, 162, 163, 164, 165, 166, 79, 78, 169, 170, 171, 172, 173, 72, 71, 176, 177, 178, 179, 180, 65, 64, 183, 184, 185, 186, 187, 58, 57, 190, 191, 192, 193, 194, 51, 50, 245, 244, 243, 242, 241, 240, 239, 238, 237, 236, 235, 234, 233, 232, 231, 230, 229, 228, 227, 226, 225, 224, 223, 222, 221, 220, 219, 218, 217, 216, 215, 214, 213, 212, 211, 210, 209, 208, 207, 206, 205, 204, 203, 202, 201, 200, 199, 198, 197, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1,), "D": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 239, 240, 241, 242, 243, 244, 245, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 92, 93, 94, 95, 96, 97, 98, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 141, 142, 143, 144, 145, 146, 147, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 190, 191, 192, 193, 194, 195, 196, 288, 281, 274, 267, 260, 253, 246, 289, 282, 275, 268, 261, 254, 247, 290, 283, 276, 269, 262, 255, 248, 291, 284, 277, 270, 263, 256, 249, 292, 285, 278, 271, 264, 257, 250, 293, 286, 279, 272, 265, 258, 251, 294, 287, 280, 273, 266, 259, 252,), "D'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 141, 142, 143, 144, 145, 146, 147, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 190, 191, 192, 193, 194, 195, 196, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 239, 240, 241, 242, 243, 244, 245, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 92, 93, 94, 95, 96, 97, 98, 252, 259, 266, 273, 280, 287, 294, 251, 258, 265, 272, 279, 286, 293, 250, 257, 264, 271, 278, 285, 292, 249, 256, 263, 270, 277, 284, 291, 248, 255, 262, 269, 276, 283, 290, 247, 254, 261, 268, 275, 282, 289, 246, 253, 260, 267, 274, 281, 288,), "D2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 190, 191, 192, 193, 194, 195, 196, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 239, 240, 241, 242, 243, 244, 245, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 92, 93, 94, 95, 96, 97, 98, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 141, 142, 143, 144, 145, 146, 147, 294, 293, 292, 291, 290, 289, 288, 287, 286, 285, 284, 283, 282, 281, 280, 279, 278, 277, 276, 275, 274, 273, 272, 271, 270, 269, 268, 267, 266, 265, 264, 263, 262, 261, 260, 259, 258, 257, 256, 255, 254, 253, 252, 251, 250, 249, 248, 247, 246,), "Dw": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 288, 281, 274, 267, 260, 253, 246, 289, 282, 275, 268, 261, 254, 247, 290, 283, 276, 269, 262, 255, 248, 291, 284, 277, 270, 263, 256, 249, 292, 285, 278, 271, 264, 257, 250, 293, 286, 279, 272, 265, 258, 251, 294, 287, 280, 273, 266, 259, 252,), "Dw'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 252, 259, 266, 273, 280, 287, 294, 251, 258, 265, 272, 279, 286, 293, 250, 257, 264, 271, 278, 285, 292, 249, 256, 263, 270, 277, 284, 291, 248, 255, 262, 269, 276, 283, 290, 247, 254, 261, 268, 275, 282, 289, 246, 253, 260, 267, 274, 281, 288,), "Dw2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 294, 293, 292, 291, 290, 289, 288, 287, 286, 285, 284, 283, 282, 281, 280, 279, 278, 277, 276, 275, 274, 273, 272, 271, 270, 269, 268, 267, 266, 265, 264, 263, 262, 261, 260, 259, 258, 257, 256, 255, 254, 253, 252, 251, 250, 249, 248, 247, 246,), "F": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 98, 91, 84, 77, 70, 63, 56, 50, 51, 52, 53, 54, 55, 246, 57, 58, 59, 60, 61, 62, 247, 64, 65, 66, 67, 68, 69, 248, 71, 72, 73, 74, 75, 76, 249, 78, 79, 80, 81, 82, 83, 250, 85, 86, 87, 88, 89, 90, 251, 92, 93, 94, 95, 96, 97, 252, 141, 134, 127, 120, 113, 106, 99, 142, 135, 128, 121, 114, 107, 100, 143, 136, 129, 122, 115, 108, 101, 144, 137, 130, 123, 116, 109, 102, 145, 138, 131, 124, 117, 110, 103, 146, 139, 132, 125, 118, 111, 104, 147, 140, 133, 126, 119, 112, 105, 43, 149, 150, 151, 152, 153, 154, 44, 156, 157, 158, 159, 160, 161, 45, 163, 164, 165, 166, 167, 168, 46, 170, 171, 172, 173, 174, 175, 47, 177, 178, 179, 180, 181, 182, 48, 184, 185, 186, 187, 188, 189, 49, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 190, 183, 176, 169, 162, 155, 148, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "F'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 148, 155, 162, 169, 176, 183, 190, 50, 51, 52, 53, 54, 55, 49, 57, 58, 59, 60, 61, 62, 48, 64, 65, 66, 67, 68, 69, 47, 71, 72, 73, 74, 75, 76, 46, 78, 79, 80, 81, 82, 83, 45, 85, 86, 87, 88, 89, 90, 44, 92, 93, 94, 95, 96, 97, 43, 105, 112, 119, 126, 133, 140, 147, 104, 111, 118, 125, 132, 139, 146, 103, 110, 117, 124, 131, 138, 145, 102, 109, 116, 123, 130, 137, 144, 101, 108, 115, 122, 129, 136, 143, 100, 107, 114, 121, 128, 135, 142, 99, 106, 113, 120, 127, 134, 141, 252, 149, 150, 151, 152, 153, 154, 251, 156, 157, 158, 159, 160, 161, 250, 163, 164, 165, 166, 167, 168, 249, 170, 171, 172, 173, 174, 175, 248, 177, 178, 179, 180, 181, 182, 247, 184, 185, 186, 187, 188, 189, 246, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 56, 63, 70, 77, 84, 91, 98, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "F2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 252, 251, 250, 249, 248, 247, 246, 50, 51, 52, 53, 54, 55, 190, 57, 58, 59, 60, 61, 62, 183, 64, 65, 66, 67, 68, 69, 176, 71, 72, 73, 74, 75, 76, 169, 78, 79, 80, 81, 82, 83, 162, 85, 86, 87, 88, 89, 90, 155, 92, 93, 94, 95, 96, 97, 148, 147, 146, 145, 144, 143, 142, 141, 140, 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, 129, 128, 127, 126, 125, 124, 123, 122, 121, 120, 119, 118, 117, 116, 115, 114, 113, 112, 111, 110, 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, 99, 98, 149, 150, 151, 152, 153, 154, 91, 156, 157, 158, 159, 160, 161, 84, 163, 164, 165, 166, 167, 168, 77, 170, 171, 172, 173, 174, 175, 70, 177, 178, 179, 180, 181, 182, 63, 184, 185, 186, 187, 188, 189, 56, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 49, 48, 47, 46, 45, 44, 43, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "Fw": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 97, 90, 83, 76, 69, 62, 55, 98, 91, 84, 77, 70, 63, 56, 50, 51, 52, 53, 54, 253, 246, 57, 58, 59, 60, 61, 254, 247, 64, 65, 66, 67, 68, 255, 248, 71, 72, 73, 74, 75, 256, 249, 78, 79, 80, 81, 82, 257, 250, 85, 86, 87, 88, 89, 258, 251, 92, 93, 94, 95, 96, 259, 252, 141, 134, 127, 120, 113, 106, 99, 142, 135, 128, 121, 114, 107, 100, 143, 136, 129, 122, 115, 108, 101, 144, 137, 130, 123, 116, 109, 102, 145, 138, 131, 124, 117, 110, 103, 146, 139, 132, 125, 118, 111, 104, 147, 140, 133, 126, 119, 112, 105, 43, 36, 150, 151, 152, 153, 154, 44, 37, 157, 158, 159, 160, 161, 45, 38, 164, 165, 166, 167, 168, 46, 39, 171, 172, 173, 174, 175, 47, 40, 178, 179, 180, 181, 182, 48, 41, 185, 186, 187, 188, 189, 49, 42, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 190, 183, 176, 169, 162, 155, 148, 191, 184, 177, 170, 163, 156, 149, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "Fw'": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 149, 156, 163, 170, 177, 184, 191, 148, 155, 162, 169, 176, 183, 190, 50, 51, 52, 53, 54, 42, 49, 57, 58, 59, 60, 61, 41, 48, 64, 65, 66, 67, 68, 40, 47, 71, 72, 73, 74, 75, 39, 46, 78, 79, 80, 81, 82, 38, 45, 85, 86, 87, 88, 89, 37, 44, 92, 93, 94, 95, 96, 36, 43, 105, 112, 119, 126, 133, 140, 147, 104, 111, 118, 125, 132, 139, 146, 103, 110, 117, 124, 131, 138, 145, 102, 109, 116, 123, 130, 137, 144, 101, 108, 115, 122, 129, 136, 143, 100, 107, 114, 121, 128, 135, 142, 99, 106, 113, 120, 127, 134, 141, 252, 259, 150, 151, 152, 153, 154, 251, 258, 157, 158, 159, 160, 161, 250, 257, 164, 165, 166, 167, 168, 249, 256, 171, 172, 173, 174, 175, 248, 255, 178, 179, 180, 181, 182, 247, 254, 185, 186, 187, 188, 189, 246, 253, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 56, 63, 70, 77, 84, 91, 98, 55, 62, 69, 76, 83, 90, 97, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "Fw2": ( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 259, 258, 257, 256, 255, 254, 253, 252, 251, 250, 249, 248, 247, 246, 50, 51, 52, 53, 54, 191, 190, 57, 58, 59, 60, 61, 184, 183, 64, 65, 66, 67, 68, 177, 176, 71, 72, 73, 74, 75, 170, 169, 78, 79, 80, 81, 82, 163, 162, 85, 86, 87, 88, 89, 156, 155, 92, 93, 94, 95, 96, 149, 148, 147, 146, 145, 144, 143, 142, 141, 140, 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, 129, 128, 127, 126, 125, 124, 123, 122, 121, 120, 119, 118, 117, 116, 115, 114, 113, 112, 111, 110, 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, 99, 98, 97, 150, 151, 152, 153, 154, 91, 90, 157, 158, 159, 160, 161, 84, 83, 164, 165, 166, 167, 168, 77, 76, 171, 172, 173, 174, 175, 70, 69, 178, 179, 180, 181, 182, 63, 62, 185, 186, 187, 188, 189, 56, 55, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 49, 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "L": ( 0, 245, 2, 3, 4, 5, 6, 7, 238, 9, 10, 11, 12, 13, 14, 231, 16, 17, 18, 19, 20, 21, 224, 23, 24, 25, 26, 27, 28, 217, 30, 31, 32, 33, 34, 35, 210, 37, 38, 39, 40, 41, 42, 203, 44, 45, 46, 47, 48, 49, 92, 85, 78, 71, 64, 57, 50, 93, 86, 79, 72, 65, 58, 51, 94, 87, 80, 73, 66, 59, 52, 95, 88, 81, 74, 67, 60, 53, 96, 89, 82, 75, 68, 61, 54, 97, 90, 83, 76, 69, 62, 55, 98, 91, 84, 77, 70, 63, 56, 1, 100, 101, 102, 103, 104, 105, 8, 107, 108, 109, 110, 111, 112, 15, 114, 115, 116, 117, 118, 119, 22, 121, 122, 123, 124, 125, 126, 29, 128, 129, 130, 131, 132, 133, 36, 135, 136, 137, 138, 139, 140, 43, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 288, 204, 205, 206, 207, 208, 209, 281, 211, 212, 213, 214, 215, 216, 274, 218, 219, 220, 221, 222, 223, 267, 225, 226, 227, 228, 229, 230, 260, 232, 233, 234, 235, 236, 237, 253, 239, 240, 241, 242, 243, 244, 246, 99, 247, 248, 249, 250, 251, 252, 106, 254, 255, 256, 257, 258, 259, 113, 261, 262, 263, 264, 265, 266, 120, 268, 269, 270, 271, 272, 273, 127, 275, 276, 277, 278, 279, 280, 134, 282, 283, 284, 285, 286, 287, 141, 289, 290, 291, 292, 293, 294,), "L'": ( 0, 99, 2, 3, 4, 5, 6, 7, 106, 9, 10, 11, 12, 13, 14, 113, 16, 17, 18, 19, 20, 21, 120, 23, 24, 25, 26, 27, 28, 127, 30, 31, 32, 33, 34, 35, 134, 37, 38, 39, 40, 41, 42, 141, 44, 45, 46, 47, 48, 49, 56, 63, 70, 77, 84, 91, 98, 55, 62, 69, 76, 83, 90, 97, 54, 61, 68, 75, 82, 89, 96, 53, 60, 67, 74, 81, 88, 95, 52, 59, 66, 73, 80, 87, 94, 51, 58, 65, 72, 79, 86, 93, 50, 57, 64, 71, 78, 85, 92, 246, 100, 101, 102, 103, 104, 105, 253, 107, 108, 109, 110, 111, 112, 260, 114, 115, 116, 117, 118, 119, 267, 121, 122, 123, 124, 125, 126, 274, 128, 129, 130, 131, 132, 133, 281, 135, 136, 137, 138, 139, 140, 288, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 43, 204, 205, 206, 207, 208, 209, 36, 211, 212, 213, 214, 215, 216, 29, 218, 219, 220, 221, 222, 223, 22, 225, 226, 227, 228, 229, 230, 15, 232, 233, 234, 235, 236, 237, 8, 239, 240, 241, 242, 243, 244, 1, 245, 247, 248, 249, 250, 251, 252, 238, 254, 255, 256, 257, 258, 259, 231, 261, 262, 263, 264, 265, 266, 224, 268, 269, 270, 271, 272, 273, 217, 275, 276, 277, 278, 279, 280, 210, 282, 283, 284, 285, 286, 287, 203, 289, 290, 291, 292, 293, 294,), "L2": ( 0, 246, 2, 3, 4, 5, 6, 7, 253, 9, 10, 11, 12, 13, 14, 260, 16, 17, 18, 19, 20, 21, 267, 23, 24, 25, 26, 27, 28, 274, 30, 31, 32, 33, 34, 35, 281, 37, 38, 39, 40, 41, 42, 288, 44, 45, 46, 47, 48, 49, 98, 97, 96, 95, 94, 93, 92, 91, 90, 89, 88, 87, 86, 85, 84, 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68, 67, 66, 65, 64, 63, 62, 61, 60, 59, 58, 57, 56, 55, 54, 53, 52, 51, 50, 245, 100, 101, 102, 103, 104, 105, 238, 107, 108, 109, 110, 111, 112, 231, 114, 115, 116, 117, 118, 119, 224, 121, 122, 123, 124, 125, 126, 217, 128, 129, 130, 131, 132, 133, 210, 135, 136, 137, 138, 139, 140, 203, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 141, 204, 205, 206, 207, 208, 209, 134, 211, 212, 213, 214, 215, 216, 127, 218, 219, 220, 221, 222, 223, 120, 225, 226, 227, 228, 229, 230, 113, 232, 233, 234, 235, 236, 237, 106, 239, 240, 241, 242, 243, 244, 99, 1, 247, 248, 249, 250, 251, 252, 8, 254, 255, 256, 257, 258, 259, 15, 261, 262, 263, 264, 265, 266, 22, 268, 269, 270, 271, 272, 273, 29, 275, 276, 277, 278, 279, 280, 36, 282, 283, 284, 285, 286, 287, 43, 289, 290, 291, 292, 293, 294,), "Lw": ( 0, 245, 244, 3, 4, 5, 6, 7, 238, 237, 10, 11, 12, 13, 14, 231, 230, 17, 18, 19, 20, 21, 224, 223, 24, 25, 26, 27, 28, 217, 216, 31, 32, 33, 34, 35, 210, 209, 38, 39, 40, 41, 42, 203, 202, 45, 46, 47, 48, 49, 92, 85, 78, 71, 64, 57, 50, 93, 86, 79, 72, 65, 58, 51, 94, 87, 80, 73, 66, 59, 52, 95, 88, 81, 74, 67, 60, 53, 96, 89, 82, 75, 68, 61, 54, 97, 90, 83, 76, 69, 62, 55, 98, 91, 84, 77, 70, 63, 56, 1, 2, 101, 102, 103, 104, 105, 8, 9, 108, 109, 110, 111, 112, 15, 16, 115, 116, 117, 118, 119, 22, 23, 122, 123, 124, 125, 126, 29, 30, 129, 130, 131, 132, 133, 36, 37, 136, 137, 138, 139, 140, 43, 44, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 289, 288, 204, 205, 206, 207, 208, 282, 281, 211, 212, 213, 214, 215, 275, 274, 218, 219, 220, 221, 222, 268, 267, 225, 226, 227, 228, 229, 261, 260, 232, 233, 234, 235, 236, 254, 253, 239, 240, 241, 242, 243, 247, 246, 99, 100, 248, 249, 250, 251, 252, 106, 107, 255, 256, 257, 258, 259, 113, 114, 262, 263, 264, 265, 266, 120, 121, 269, 270, 271, 272, 273, 127, 128, 276, 277, 278, 279, 280, 134, 135, 283, 284, 285, 286, 287, 141, 142, 290, 291, 292, 293, 294,), "Lw'": ( 0, 99, 100, 3, 4, 5, 6, 7, 106, 107, 10, 11, 12, 13, 14, 113, 114, 17, 18, 19, 20, 21, 120, 121, 24, 25, 26, 27, 28, 127, 128, 31, 32, 33, 34, 35, 134, 135, 38, 39, 40, 41, 42, 141, 142, 45, 46, 47, 48, 49, 56, 63, 70, 77, 84, 91, 98, 55, 62, 69, 76, 83, 90, 97, 54, 61, 68, 75, 82, 89, 96, 53, 60, 67, 74, 81, 88, 95, 52, 59, 66, 73, 80, 87, 94, 51, 58, 65, 72, 79, 86, 93, 50, 57, 64, 71, 78, 85, 92, 246, 247, 101, 102, 103, 104, 105, 253, 254, 108, 109, 110, 111, 112, 260, 261, 115, 116, 117, 118, 119, 267, 268, 122, 123, 124, 125, 126, 274, 275, 129, 130, 131, 132, 133, 281, 282, 136, 137, 138, 139, 140, 288, 289, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 44, 43, 204, 205, 206, 207, 208, 37, 36, 211, 212, 213, 214, 215, 30, 29, 218, 219, 220, 221, 222, 23, 22, 225, 226, 227, 228, 229, 16, 15, 232, 233, 234, 235, 236, 9, 8, 239, 240, 241, 242, 243, 2, 1, 245, 244, 248, 249, 250, 251, 252, 238, 237, 255, 256, 257, 258, 259, 231, 230, 262, 263, 264, 265, 266, 224, 223, 269, 270, 271, 272, 273, 217, 216, 276, 277, 278, 279, 280, 210, 209, 283, 284, 285, 286, 287, 203, 202, 290, 291, 292, 293, 294,), "Lw2": ( 0, 246, 247, 3, 4, 5, 6, 7, 253, 254, 10, 11, 12, 13, 14, 260, 261, 17, 18, 19, 20, 21, 267, 268, 24, 25, 26, 27, 28, 274, 275, 31, 32, 33, 34, 35, 281, 282, 38, 39, 40, 41, 42, 288, 289, 45, 46, 47, 48, 49, 98, 97, 96, 95, 94, 93, 92, 91, 90, 89, 88, 87, 86, 85, 84, 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68, 67, 66, 65, 64, 63, 62, 61, 60, 59, 58, 57, 56, 55, 54, 53, 52, 51, 50, 245, 244, 101, 102, 103, 104, 105, 238, 237, 108, 109, 110, 111, 112, 231, 230, 115, 116, 117, 118, 119, 224, 223, 122, 123, 124, 125, 126, 217, 216, 129, 130, 131, 132, 133, 210, 209, 136, 137, 138, 139, 140, 203, 202, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 142, 141, 204, 205, 206, 207, 208, 135, 134, 211, 212, 213, 214, 215, 128, 127, 218, 219, 220, 221, 222, 121, 120, 225, 226, 227, 228, 229, 114, 113, 232, 233, 234, 235, 236, 107, 106, 239, 240, 241, 242, 243, 100, 99, 1, 2, 248, 249, 250, 251, 252, 8, 9, 255, 256, 257, 258, 259, 15, 16, 262, 263, 264, 265, 266, 22, 23, 269, 270, 271, 272, 273, 29, 30, 276, 277, 278, 279, 280, 36, 37, 283, 284, 285, 286, 287, 43, 44, 290, 291, 292, 293, 294,), "R": ( 0, 1, 2, 3, 4, 5, 6, 105, 8, 9, 10, 11, 12, 13, 112, 15, 16, 17, 18, 19, 20, 119, 22, 23, 24, 25, 26, 27, 126, 29, 30, 31, 32, 33, 34, 133, 36, 37, 38, 39, 40, 41, 140, 43, 44, 45, 46, 47, 48, 147, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 252, 106, 107, 108, 109, 110, 111, 259, 113, 114, 115, 116, 117, 118, 266, 120, 121, 122, 123, 124, 125, 273, 127, 128, 129, 130, 131, 132, 280, 134, 135, 136, 137, 138, 139, 287, 141, 142, 143, 144, 145, 146, 294, 190, 183, 176, 169, 162, 155, 148, 191, 184, 177, 170, 163, 156, 149, 192, 185, 178, 171, 164, 157, 150, 193, 186, 179, 172, 165, 158, 151, 194, 187, 180, 173, 166, 159, 152, 195, 188, 181, 174, 167, 160, 153, 196, 189, 182, 175, 168, 161, 154, 49, 198, 199, 200, 201, 202, 203, 42, 205, 206, 207, 208, 209, 210, 35, 212, 213, 214, 215, 216, 217, 28, 219, 220, 221, 222, 223, 224, 21, 226, 227, 228, 229, 230, 231, 14, 233, 234, 235, 236, 237, 238, 7, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 239, 253, 254, 255, 256, 257, 258, 232, 260, 261, 262, 263, 264, 265, 225, 267, 268, 269, 270, 271, 272, 218, 274, 275, 276, 277, 278, 279, 211, 281, 282, 283, 284, 285, 286, 204, 288, 289, 290, 291, 292, 293, 197,), "R'": ( 0, 1, 2, 3, 4, 5, 6, 239, 8, 9, 10, 11, 12, 13, 232, 15, 16, 17, 18, 19, 20, 225, 22, 23, 24, 25, 26, 27, 218, 29, 30, 31, 32, 33, 34, 211, 36, 37, 38, 39, 40, 41, 204, 43, 44, 45, 46, 47, 48, 197, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 7, 106, 107, 108, 109, 110, 111, 14, 113, 114, 115, 116, 117, 118, 21, 120, 121, 122, 123, 124, 125, 28, 127, 128, 129, 130, 131, 132, 35, 134, 135, 136, 137, 138, 139, 42, 141, 142, 143, 144, 145, 146, 49, 154, 161, 168, 175, 182, 189, 196, 153, 160, 167, 174, 181, 188, 195, 152, 159, 166, 173, 180, 187, 194, 151, 158, 165, 172, 179, 186, 193, 150, 157, 164, 171, 178, 185, 192, 149, 156, 163, 170, 177, 184, 191, 148, 155, 162, 169, 176, 183, 190, 294, 198, 199, 200, 201, 202, 203, 287, 205, 206, 207, 208, 209, 210, 280, 212, 213, 214, 215, 216, 217, 273, 219, 220, 221, 222, 223, 224, 266, 226, 227, 228, 229, 230, 231, 259, 233, 234, 235, 236, 237, 238, 252, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 105, 253, 254, 255, 256, 257, 258, 112, 260, 261, 262, 263, 264, 265, 119, 267, 268, 269, 270, 271, 272, 126, 274, 275, 276, 277, 278, 279, 133, 281, 282, 283, 284, 285, 286, 140, 288, 289, 290, 291, 292, 293, 147,), "R2": ( 0, 1, 2, 3, 4, 5, 6, 252, 8, 9, 10, 11, 12, 13, 259, 15, 16, 17, 18, 19, 20, 266, 22, 23, 24, 25, 26, 27, 273, 29, 30, 31, 32, 33, 34, 280, 36, 37, 38, 39, 40, 41, 287, 43, 44, 45, 46, 47, 48, 294, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 239, 106, 107, 108, 109, 110, 111, 232, 113, 114, 115, 116, 117, 118, 225, 120, 121, 122, 123, 124, 125, 218, 127, 128, 129, 130, 131, 132, 211, 134, 135, 136, 137, 138, 139, 204, 141, 142, 143, 144, 145, 146, 197, 196, 195, 194, 193, 192, 191, 190, 189, 188, 187, 186, 185, 184, 183, 182, 181, 180, 179, 178, 177, 176, 175, 174, 173, 172, 171, 170, 169, 168, 167, 166, 165, 164, 163, 162, 161, 160, 159, 158, 157, 156, 155, 154, 153, 152, 151, 150, 149, 148, 147, 198, 199, 200, 201, 202, 203, 140, 205, 206, 207, 208, 209, 210, 133, 212, 213, 214, 215, 216, 217, 126, 219, 220, 221, 222, 223, 224, 119, 226, 227, 228, 229, 230, 231, 112, 233, 234, 235, 236, 237, 238, 105, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 7, 253, 254, 255, 256, 257, 258, 14, 260, 261, 262, 263, 264, 265, 21, 267, 268, 269, 270, 271, 272, 28, 274, 275, 276, 277, 278, 279, 35, 281, 282, 283, 284, 285, 286, 42, 288, 289, 290, 291, 292, 293, 49,), "Rw": ( 0, 1, 2, 3, 4, 5, 104, 105, 8, 9, 10, 11, 12, 111, 112, 15, 16, 17, 18, 19, 118, 119, 22, 23, 24, 25, 26, 125, 126, 29, 30, 31, 32, 33, 132, 133, 36, 37, 38, 39, 40, 139, 140, 43, 44, 45, 46, 47, 146, 147, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 251, 252, 106, 107, 108, 109, 110, 258, 259, 113, 114, 115, 116, 117, 265, 266, 120, 121, 122, 123, 124, 272, 273, 127, 128, 129, 130, 131, 279, 280, 134, 135, 136, 137, 138, 286, 287, 141, 142, 143, 144, 145, 293, 294, 190, 183, 176, 169, 162, 155, 148, 191, 184, 177, 170, 163, 156, 149, 192, 185, 178, 171, 164, 157, 150, 193, 186, 179, 172, 165, 158, 151, 194, 187, 180, 173, 166, 159, 152, 195, 188, 181, 174, 167, 160, 153, 196, 189, 182, 175, 168, 161, 154, 49, 48, 199, 200, 201, 202, 203, 42, 41, 206, 207, 208, 209, 210, 35, 34, 213, 214, 215, 216, 217, 28, 27, 220, 221, 222, 223, 224, 21, 20, 227, 228, 229, 230, 231, 14, 13, 234, 235, 236, 237, 238, 7, 6, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 240, 239, 253, 254, 255, 256, 257, 233, 232, 260, 261, 262, 263, 264, 226, 225, 267, 268, 269, 270, 271, 219, 218, 274, 275, 276, 277, 278, 212, 211, 281, 282, 283, 284, 285, 205, 204, 288, 289, 290, 291, 292, 198, 197,), "Rw'": ( 0, 1, 2, 3, 4, 5, 240, 239, 8, 9, 10, 11, 12, 233, 232, 15, 16, 17, 18, 19, 226, 225, 22, 23, 24, 25, 26, 219, 218, 29, 30, 31, 32, 33, 212, 211, 36, 37, 38, 39, 40, 205, 204, 43, 44, 45, 46, 47, 198, 197, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 6, 7, 106, 107, 108, 109, 110, 13, 14, 113, 114, 115, 116, 117, 20, 21, 120, 121, 122, 123, 124, 27, 28, 127, 128, 129, 130, 131, 34, 35, 134, 135, 136, 137, 138, 41, 42, 141, 142, 143, 144, 145, 48, 49, 154, 161, 168, 175, 182, 189, 196, 153, 160, 167, 174, 181, 188, 195, 152, 159, 166, 173, 180, 187, 194, 151, 158, 165, 172, 179, 186, 193, 150, 157, 164, 171, 178, 185, 192, 149, 156, 163, 170, 177, 184, 191, 148, 155, 162, 169, 176, 183, 190, 294, 293, 199, 200, 201, 202, 203, 287, 286, 206, 207, 208, 209, 210, 280, 279, 213, 214, 215, 216, 217, 273, 272, 220, 221, 222, 223, 224, 266, 265, 227, 228, 229, 230, 231, 259, 258, 234, 235, 236, 237, 238, 252, 251, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 104, 105, 253, 254, 255, 256, 257, 111, 112, 260, 261, 262, 263, 264, 118, 119, 267, 268, 269, 270, 271, 125, 126, 274, 275, 276, 277, 278, 132, 133, 281, 282, 283, 284, 285, 139, 140, 288, 289, 290, 291, 292, 146, 147,), "Rw2": ( 0, 1, 2, 3, 4, 5, 251, 252, 8, 9, 10, 11, 12, 258, 259, 15, 16, 17, 18, 19, 265, 266, 22, 23, 24, 25, 26, 272, 273, 29, 30, 31, 32, 33, 279, 280, 36, 37, 38, 39, 40, 286, 287, 43, 44, 45, 46, 47, 293, 294, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 240, 239, 106, 107, 108, 109, 110, 233, 232, 113, 114, 115, 116, 117, 226, 225, 120, 121, 122, 123, 124, 219, 218, 127, 128, 129, 130, 131, 212, 211, 134, 135, 136, 137, 138, 205, 204, 141, 142, 143, 144, 145, 198, 197, 196, 195, 194, 193, 192, 191, 190, 189, 188, 187, 186, 185, 184, 183, 182, 181, 180, 179, 178, 177, 176, 175, 174, 173, 172, 171, 170, 169, 168, 167, 166, 165, 164, 163, 162, 161, 160, 159, 158, 157, 156, 155, 154, 153, 152, 151, 150, 149, 148, 147, 146, 199, 200, 201, 202, 203, 140, 139, 206, 207, 208, 209, 210, 133, 132, 213, 214, 215, 216, 217, 126, 125, 220, 221, 222, 223, 224, 119, 118, 227, 228, 229, 230, 231, 112, 111, 234, 235, 236, 237, 238, 105, 104, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 6, 7, 253, 254, 255, 256, 257, 13, 14, 260, 261, 262, 263, 264, 20, 21, 267, 268, 269, 270, 271, 27, 28, 274, 275, 276, 277, 278, 34, 35, 281, 282, 283, 284, 285, 41, 42, 288, 289, 290, 291, 292, 48, 49,), "U": ( 0, 43, 36, 29, 22, 15, 8, 1, 44, 37, 30, 23, 16, 9, 2, 45, 38, 31, 24, 17, 10, 3, 46, 39, 32, 25, 18, 11, 4, 47, 40, 33, 26, 19, 12, 5, 48, 41, 34, 27, 20, 13, 6, 49, 42, 35, 28, 21, 14, 7, 99, 100, 101, 102, 103, 104, 105, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 148, 149, 150, 151, 152, 153, 154, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 197, 198, 199, 200, 201, 202, 203, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 50, 51, 52, 53, 54, 55, 56, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "U'": ( 0, 7, 14, 21, 28, 35, 42, 49, 6, 13, 20, 27, 34, 41, 48, 5, 12, 19, 26, 33, 40, 47, 4, 11, 18, 25, 32, 39, 46, 3, 10, 17, 24, 31, 38, 45, 2, 9, 16, 23, 30, 37, 44, 1, 8, 15, 22, 29, 36, 43, 197, 198, 199, 200, 201, 202, 203, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 50, 51, 52, 53, 54, 55, 56, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 99, 100, 101, 102, 103, 104, 105, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 148, 149, 150, 151, 152, 153, 154, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "U2": ( 0, 49, 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 148, 149, 150, 151, 152, 153, 154, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 197, 198, 199, 200, 201, 202, 203, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 50, 51, 52, 53, 54, 55, 56, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 99, 100, 101, 102, 103, 104, 105, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "Uw": ( 0, 43, 36, 29, 22, 15, 8, 1, 44, 37, 30, 23, 16, 9, 2, 45, 38, 31, 24, 17, 10, 3, 46, 39, 32, 25, 18, 11, 4, 47, 40, 33, 26, 19, 12, 5, 48, 41, 34, 27, 20, 13, 6, 49, 42, 35, 28, 21, 14, 7, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "Uw'": ( 0, 7, 14, 21, 28, 35, 42, 49, 6, 13, 20, 27, 34, 41, 48, 5, 12, 19, 26, 33, 40, 47, 4, 11, 18, 25, 32, 39, 46, 3, 10, 17, 24, 31, 38, 45, 2, 9, 16, 23, 30, 37, 44, 1, 8, 15, 22, 29, 36, 43, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "Uw2": ( 0, 49, 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,), "x": ( 0, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 56, 63, 70, 77, 84, 91, 98, 55, 62, 69, 76, 83, 90, 97, 54, 61, 68, 75, 82, 89, 96, 53, 60, 67, 74, 81, 88, 95, 52, 59, 66, 73, 80, 87, 94, 51, 58, 65, 72, 79, 86, 93, 50, 57, 64, 71, 78, 85, 92, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 190, 183, 176, 169, 162, 155, 148, 191, 184, 177, 170, 163, 156, 149, 192, 185, 178, 171, 164, 157, 150, 193, 186, 179, 172, 165, 158, 151, 194, 187, 180, 173, 166, 159, 152, 195, 188, 181, 174, 167, 160, 153, 196, 189, 182, 175, 168, 161, 154, 49, 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 245, 244, 243, 242, 241, 240, 239, 238, 237, 236, 235, 234, 233, 232, 231, 230, 229, 228, 227, 226, 225, 224, 223, 222, 221, 220, 219, 218, 217, 216, 215, 214, 213, 212, 211, 210, 209, 208, 207, 206, 205, 204, 203, 202, 201, 200, 199, 198, 197,), "x'": ( 0, 245, 244, 243, 242, 241, 240, 239, 238, 237, 236, 235, 234, 233, 232, 231, 230, 229, 228, 227, 226, 225, 224, 223, 222, 221, 220, 219, 218, 217, 216, 215, 214, 213, 212, 211, 210, 209, 208, 207, 206, 205, 204, 203, 202, 201, 200, 199, 198, 197, 92, 85, 78, 71, 64, 57, 50, 93, 86, 79, 72, 65, 58, 51, 94, 87, 80, 73, 66, 59, 52, 95, 88, 81, 74, 67, 60, 53, 96, 89, 82, 75, 68, 61, 54, 97, 90, 83, 76, 69, 62, 55, 98, 91, 84, 77, 70, 63, 56, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 154, 161, 168, 175, 182, 189, 196, 153, 160, 167, 174, 181, 188, 195, 152, 159, 166, 173, 180, 187, 194, 151, 158, 165, 172, 179, 186, 193, 150, 157, 164, 171, 178, 185, 192, 149, 156, 163, 170, 177, 184, 191, 148, 155, 162, 169, 176, 183, 190, 294, 293, 292, 291, 290, 289, 288, 287, 286, 285, 284, 283, 282, 281, 280, 279, 278, 277, 276, 275, 274, 273, 272, 271, 270, 269, 268, 267, 266, 265, 264, 263, 262, 261, 260, 259, 258, 257, 256, 255, 254, 253, 252, 251, 250, 249, 248, 247, 246, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,), "x2": ( 0, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 98, 97, 96, 95, 94, 93, 92, 91, 90, 89, 88, 87, 86, 85, 84, 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68, 67, 66, 65, 64, 63, 62, 61, 60, 59, 58, 57, 56, 55, 54, 53, 52, 51, 50, 245, 244, 243, 242, 241, 240, 239, 238, 237, 236, 235, 234, 233, 232, 231, 230, 229, 228, 227, 226, 225, 224, 223, 222, 221, 220, 219, 218, 217, 216, 215, 214, 213, 212, 211, 210, 209, 208, 207, 206, 205, 204, 203, 202, 201, 200, 199, 198, 197, 196, 195, 194, 193, 192, 191, 190, 189, 188, 187, 186, 185, 184, 183, 182, 181, 180, 179, 178, 177, 176, 175, 174, 173, 172, 171, 170, 169, 168, 167, 166, 165, 164, 163, 162, 161, 160, 159, 158, 157, 156, 155, 154, 153, 152, 151, 150, 149, 148, 147, 146, 145, 144, 143, 142, 141, 140, 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, 129, 128, 127, 126, 125, 124, 123, 122, 121, 120, 119, 118, 117, 116, 115, 114, 113, 112, 111, 110, 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, 99, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,), "y": ( 0, 43, 36, 29, 22, 15, 8, 1, 44, 37, 30, 23, 16, 9, 2, 45, 38, 31, 24, 17, 10, 3, 46, 39, 32, 25, 18, 11, 4, 47, 40, 33, 26, 19, 12, 5, 48, 41, 34, 27, 20, 13, 6, 49, 42, 35, 28, 21, 14, 7, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 252, 259, 266, 273, 280, 287, 294, 251, 258, 265, 272, 279, 286, 293, 250, 257, 264, 271, 278, 285, 292, 249, 256, 263, 270, 277, 284, 291, 248, 255, 262, 269, 276, 283, 290, 247, 254, 261, 268, 275, 282, 289, 246, 253, 260, 267, 274, 281, 288,), "y'": ( 0, 7, 14, 21, 28, 35, 42, 49, 6, 13, 20, 27, 34, 41, 48, 5, 12, 19, 26, 33, 40, 47, 4, 11, 18, 25, 32, 39, 46, 3, 10, 17, 24, 31, 38, 45, 2, 9, 16, 23, 30, 37, 44, 1, 8, 15, 22, 29, 36, 43, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 288, 281, 274, 267, 260, 253, 246, 289, 282, 275, 268, 261, 254, 247, 290, 283, 276, 269, 262, 255, 248, 291, 284, 277, 270, 263, 256, 249, 292, 285, 278, 271, 264, 257, 250, 293, 286, 279, 272, 265, 258, 251, 294, 287, 280, 273, 266, 259, 252,), "y2": ( 0, 49, 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 294, 293, 292, 291, 290, 289, 288, 287, 286, 285, 284, 283, 282, 281, 280, 279, 278, 277, 276, 275, 274, 273, 272, 271, 270, 269, 268, 267, 266, 265, 264, 263, 262, 261, 260, 259, 258, 257, 256, 255, 254, 253, 252, 251, 250, 249, 248, 247, 246,), "z": ( 0, 92, 85, 78, 71, 64, 57, 50, 93, 86, 79, 72, 65, 58, 51, 94, 87, 80, 73, 66, 59, 52, 95, 88, 81, 74, 67, 60, 53, 96, 89, 82, 75, 68, 61, 54, 97, 90, 83, 76, 69, 62, 55, 98, 91, 84, 77, 70, 63, 56, 288, 281, 274, 267, 260, 253, 246, 289, 282, 275, 268, 261, 254, 247, 290, 283, 276, 269, 262, 255, 248, 291, 284, 277, 270, 263, 256, 249, 292, 285, 278, 271, 264, 257, 250, 293, 286, 279, 272, 265, 258, 251, 294, 287, 280, 273, 266, 259, 252, 141, 134, 127, 120, 113, 106, 99, 142, 135, 128, 121, 114, 107, 100, 143, 136, 129, 122, 115, 108, 101, 144, 137, 130, 123, 116, 109, 102, 145, 138, 131, 124, 117, 110, 103, 146, 139, 132, 125, 118, 111, 104, 147, 140, 133, 126, 119, 112, 105, 43, 36, 29, 22, 15, 8, 1, 44, 37, 30, 23, 16, 9, 2, 45, 38, 31, 24, 17, 10, 3, 46, 39, 32, 25, 18, 11, 4, 47, 40, 33, 26, 19, 12, 5, 48, 41, 34, 27, 20, 13, 6, 49, 42, 35, 28, 21, 14, 7, 203, 210, 217, 224, 231, 238, 245, 202, 209, 216, 223, 230, 237, 244, 201, 208, 215, 222, 229, 236, 243, 200, 207, 214, 221, 228, 235, 242, 199, 206, 213, 220, 227, 234, 241, 198, 205, 212, 219, 226, 233, 240, 197, 204, 211, 218, 225, 232, 239, 190, 183, 176, 169, 162, 155, 148, 191, 184, 177, 170, 163, 156, 149, 192, 185, 178, 171, 164, 157, 150, 193, 186, 179, 172, 165, 158, 151, 194, 187, 180, 173, 166, 159, 152, 195, 188, 181, 174, 167, 160, 153, 196, 189, 182, 175, 168, 161, 154,), "z'": ( 0, 154, 161, 168, 175, 182, 189, 196, 153, 160, 167, 174, 181, 188, 195, 152, 159, 166, 173, 180, 187, 194, 151, 158, 165, 172, 179, 186, 193, 150, 157, 164, 171, 178, 185, 192, 149, 156, 163, 170, 177, 184, 191, 148, 155, 162, 169, 176, 183, 190, 7, 14, 21, 28, 35, 42, 49, 6, 13, 20, 27, 34, 41, 48, 5, 12, 19, 26, 33, 40, 47, 4, 11, 18, 25, 32, 39, 46, 3, 10, 17, 24, 31, 38, 45, 2, 9, 16, 23, 30, 37, 44, 1, 8, 15, 22, 29, 36, 43, 105, 112, 119, 126, 133, 140, 147, 104, 111, 118, 125, 132, 139, 146, 103, 110, 117, 124, 131, 138, 145, 102, 109, 116, 123, 130, 137, 144, 101, 108, 115, 122, 129, 136, 143, 100, 107, 114, 121, 128, 135, 142, 99, 106, 113, 120, 127, 134, 141, 252, 259, 266, 273, 280, 287, 294, 251, 258, 265, 272, 279, 286, 293, 250, 257, 264, 271, 278, 285, 292, 249, 256, 263, 270, 277, 284, 291, 248, 255, 262, 269, 276, 283, 290, 247, 254, 261, 268, 275, 282, 289, 246, 253, 260, 267, 274, 281, 288, 239, 232, 225, 218, 211, 204, 197, 240, 233, 226, 219, 212, 205, 198, 241, 234, 227, 220, 213, 206, 199, 242, 235, 228, 221, 214, 207, 200, 243, 236, 229, 222, 215, 208, 201, 244, 237, 230, 223, 216, 209, 202, 245, 238, 231, 224, 217, 210, 203, 56, 63, 70, 77, 84, 91, 98, 55, 62, 69, 76, 83, 90, 97, 54, 61, 68, 75, 82, 89, 96, 53, 60, 67, 74, 81, 88, 95, 52, 59, 66, 73, 80, 87, 94, 51, 58, 65, 72, 79, 86, 93, 50, 57, 64, 71, 78, 85, 92,), "z2": ( 0, 294, 293, 292, 291, 290, 289, 288, 287, 286, 285, 284, 283, 282, 281, 280, 279, 278, 277, 276, 275, 274, 273, 272, 271, 270, 269, 268, 267, 266, 265, 264, 263, 262, 261, 260, 259, 258, 257, 256, 255, 254, 253, 252, 251, 250, 249, 248, 247, 246, 196, 195, 194, 193, 192, 191, 190, 189, 188, 187, 186, 185, 184, 183, 182, 181, 180, 179, 178, 177, 176, 175, 174, 173, 172, 171, 170, 169, 168, 167, 166, 165, 164, 163, 162, 161, 160, 159, 158, 157, 156, 155, 154, 153, 152, 151, 150, 149, 148, 147, 146, 145, 144, 143, 142, 141, 140, 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, 129, 128, 127, 126, 125, 124, 123, 122, 121, 120, 119, 118, 117, 116, 115, 114, 113, 112, 111, 110, 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, 99, 98, 97, 96, 95, 94, 93, 92, 91, 90, 89, 88, 87, 86, 85, 84, 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68, 67, 66, 65, 64, 63, 62, 61, 60, 59, 58, 57, 56, 55, 54, 53, 52, 51, 50, 245, 244, 243, 242, 241, 240, 239, 238, 237, 236, 235, 234, 233, 232, 231, 230, 229, 228, 227, 226, 225, 224, 223, 222, 221, 220, 219, 218, 217, 216, 215, 214, 213, 212, 211, 210, 209, 208, 207, 206, 205, 204, 203, 202, 201, 200, 199, 198, 197, 49, 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1,), }
def rotate_777(cube, step):
return [cube[x] for x in swaps_777[step]]
| 94.9625
| 136,197
| 0.56265
| 38,890
| 220,313
| 3.149653
| 0.028465
| 0.010058
| 0.004213
| 0.004898
| 0.869206
| 0.842624
| 0.679533
| 0.674512
| 0.665499
| 0.664324
| 0
| 0.517484
| 0.252073
| 220,313
| 2,319
| 136,198
| 95.00345
| 0.22588
| 0.077962
| 0
| 0.526124
| 0
| 0
| 0.070017
| 0.052986
| 0
| 0
| 0
| 0
| 0.001215
| 1
| 0.047388
| false
| 0
| 0.003645
| 0.000608
| 0.104496
| 0.006075
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a80636cb896688cbdf4d552a5c6c25701e6c742b
| 149
|
py
|
Python
|
1-HRF-xgb/repre/__init__.py
|
iamlockelightning/HIF-KAT
|
6845e88a4833b2da4a738035f1b02273ea75b703
|
[
"MIT"
] | 6
|
2021-06-21T05:16:17.000Z
|
2022-02-11T21:00:51.000Z
|
1-HRF-xgb/repre/__init__.py
|
iamlockelightning/HIF-KAT
|
6845e88a4833b2da4a738035f1b02273ea75b703
|
[
"MIT"
] | 1
|
2021-11-27T11:52:43.000Z
|
2021-12-09T09:10:05.000Z
|
1-HRF-xgb/repre/__init__.py
|
iamlockelightning/HIF-KAT
|
6845e88a4833b2da4a738035f1b02273ea75b703
|
[
"MIT"
] | 1
|
2021-10-18T03:51:54.000Z
|
2021-10-18T03:51:54.000Z
|
from .datamodel import *
from .embedding import *
from .summarize import *
from .attention import *
from .graphconv import *
from .represent import *
| 24.833333
| 24
| 0.765101
| 18
| 149
| 6.333333
| 0.444444
| 0.438596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154362
| 149
| 6
| 25
| 24.833333
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a8089f97b3bd80b2feb4479c8c302b6a7dc37e0a
| 233
|
py
|
Python
|
reactics-smt/logics/__init__.py
|
arturmeski/reactics
|
a565b5bf5ec671ccad4bbdab38ad264b9d8369cc
|
[
"MIT"
] | 2
|
2019-03-04T08:51:00.000Z
|
2019-11-04T10:42:13.000Z
|
reactics-smt/logics/__init__.py
|
arturmeski/reactics
|
a565b5bf5ec671ccad4bbdab38ad264b9d8369cc
|
[
"MIT"
] | null | null | null |
reactics-smt/logics/__init__.py
|
arturmeski/reactics
|
a565b5bf5ec671ccad4bbdab38ad264b9d8369cc
|
[
"MIT"
] | null | null | null |
from logics.rsltl import Formula_rsLTL
from logics.bags import BagDescription
from logics.param_constr import ParamConstraint
from logics.rsltl_encoder import rsLTL_Encoder
from logics.param_constr_encoder import ParamConstr_Encoder
| 38.833333
| 59
| 0.892704
| 32
| 233
| 6.28125
| 0.375
| 0.248756
| 0.149254
| 0.208955
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085837
| 233
| 5
| 60
| 46.6
| 0.943662
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b5496a16809b7e2149e7dd72e66c7e6662ef9a0a
| 7,051
|
py
|
Python
|
run.py
|
sithhell/hpxcbenchmarks
|
3687081a1bab5ffa872576f4ff8267f32d4fcc85
|
[
"BSL-1.0"
] | 1
|
2020-10-24T14:12:59.000Z
|
2020-10-24T14:12:59.000Z
|
run.py
|
sithhell/hpxcbenchmarks
|
3687081a1bab5ffa872576f4ff8267f32d4fcc85
|
[
"BSL-1.0"
] | null | null | null |
run.py
|
sithhell/hpxcbenchmarks
|
3687081a1bab5ffa872576f4ff8267f32d4fcc85
|
[
"BSL-1.0"
] | null | null | null |
#!/usr/bin/env python
import subprocess, os, sys, socket
import multiprocessing
if len(sys.argv) >= 2:
max_cores = int(sys.argv[1])
run=sys.argv[2:]
else:
max_cores = multiprocessing.cpu_count()
run='all'
my_env = os.environ.copy()
result_dir = os.path.join('runs', socket.gethostname())
if 'tasks' in run or run == 'all':
benchmarks=[
'tasks/coroutines_overhead',
'tasks/future_overhead',
'tasks/hpx_thread_overhead',
'tasks/omp_overhead',
]
my_env['NUM_THREADS'] = '1'
my_env['OMP_NUM_THREADS'] = '1'
for benchmark in benchmarks:
print(' %s' % benchmark)
result = os.path.join(result_dir, benchmark + '.json')
if not os.path.exists(os.path.dirname(result)):
os.makedirs(os.path.dirname(result))
bench = [
os.path.join(os.getcwd(), benchmark), '--hpx:ini=hpx.parcel.enable=0', '--hpx:threads=1', '--benchmark_out_format=json', '--benchmark_out=' + result]
print(bench)
p = subprocess.Popen(bench, env = my_env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, b''):
print(line.rstrip())
p.wait()
benchmarks=[
'tasks/std_thread_overhead',
]
my_env['NUM_THREADS'] = '1'
my_env['OMP_NUM_THREADS'] = '1'
for benchmark in benchmarks:
print(' %s' % benchmark)
result = os.path.join(result_dir, benchmark + '.json')
if not os.path.exists(os.path.dirname(result)):
os.makedirs(os.path.dirname(result))
bench = [
os.path.join(os.getcwd(), benchmark), '--benchmark_out_format=json', '--benchmark_out=' + result]
print(bench)
p = subprocess.Popen(bench, env = my_env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, b''):
print(line.rstrip())
p.wait()
if 'scheduling' in run or run == 'all':
benchmarks=[
'scheduling/hpx_scheduling',
'scheduling/omp_scheduling',
'scheduling/seq_scheduling',
'scheduling/std_scheduling',
]
r = range(0, max_cores + 1, 2)
r[0] = 1
for threads in r:
my_env['NUM_THREADS'] = str(threads)
for benchmark in benchmarks:
if benchmark == 'scheduling/seq_scheduling' and threads > 1:
continue
print(' %s' % benchmark)
result = os.path.join(result_dir, benchmark + ('_t%s' % threads) + '.json')
if not os.path.exists(os.path.dirname(result)):
os.makedirs(os.path.dirname(result))
bench = [
os.path.join(os.getcwd(), benchmark), '--hpx:ini=hpx.parcel.enable=0', '-Ihpx.stacks.use_guard_pages=0', '--benchmark_out_format=json', '--benchmark_out=' + result]
p = subprocess.Popen(bench, env = my_env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, b''):
print(line.rstrip())
p.wait()
if 'serialization' in run or run == 'all':
benchmarks=[
'distributed/serialization_overhead',
]
my_env['NUM_THREADS'] = '1'
my_env['OMP_NUM_THREADS'] = '1'
for benchmark in benchmarks:
print(' %s' % benchmark)
result = os.path.join(result_dir, benchmark + '.json')
if not os.path.exists(os.path.dirname(result)):
os.makedirs(os.path.dirname(result))
bench = [
os.path.join(os.getcwd(), benchmark), '--hpx:ini=hpx.parcel.enable=0', '--hpx:threads=1', '--benchmark_out_format=json', '--benchmark_out=' + result]
p = subprocess.Popen(bench, env = my_env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, b''):
print(line.rstrip())
p.wait()
if 'distributed' in run or run == 'all':
benchmarks=[
'distributed/async_latency',
'distributed/channel_send_recv',
'distributed/components',
]
my_env['NUM_THREADS'] = '%s' % max_cores
my_env['OMP_NUM_THREADS'] = '1'
for benchmark in benchmarks:
print(' %s' % benchmark)
result = os.path.join(result_dir, benchmark + '.json')
if not os.path.exists(os.path.dirname(result)):
os.makedirs(os.path.dirname(result))
bench = ['srun', '--pty',
os.path.join(os.getcwd(), benchmark), '--hpx:threads=%s' % max_cores, '--benchmark_out_format=json', '--benchmark_out=' + result]
p = subprocess.Popen(bench, env = my_env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, b''):
print(line.rstrip())
p.wait()
benchmarks=[
'distributed/mpi_latency',
]
my_env['NUM_THREADS'] = '%s' % max_cores
my_env['OMP_NUM_THREADS'] = '1'
for benchmark in benchmarks:
print(' %s' % benchmark)
result = os.path.join(result_dir, benchmark + '.json')
if not os.path.exists(os.path.dirname(result)):
os.makedirs(os.path.dirname(result))
bench = ['srun', '--pty',
os.path.join(os.getcwd(), benchmark), '--benchmark_out_format=json', '--benchmark_out=' + result]
p = subprocess.Popen(bench, env = my_env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, b''):
print(line.rstrip())
p.wait()
if 'broadcast' in run or run == 'all':
benchmarks=[
'distributed/broadcast',
]
my_env['NUM_THREADS'] = '%s' % max_cores
my_env['OMP_NUM_THREADS'] = '1'
nodes = my_env['SLURM_NNODES']
for benchmark in benchmarks:
print(' %s' % benchmark)
result = os.path.join(result_dir, "%s_%s" % (benchmark, nodes) + '.json')
if not os.path.exists(os.path.dirname(result)):
os.makedirs(os.path.dirname(result))
bench = ['srun', '--pty',
os.path.join(os.getcwd(), benchmark), '--hpx:threads=%s' % max_cores, '--benchmark_out_format=json', '--benchmark_out=' + result]
p = subprocess.Popen(bench, env = my_env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, b''):
print(line.rstrip())
p.wait()
benchmarks=[
'distributed/mpi_broadcast',
]
my_env['NUM_THREADS'] = '%s' % max_cores
my_env['OMP_NUM_THREADS'] = '1'
for benchmark in benchmarks:
print(' %s' % benchmark)
result = os.path.join(result_dir, "%s_%s" % (benchmark, nodes) + '.json')
if not os.path.exists(os.path.dirname(result)):
os.makedirs(os.path.dirname(result))
bench = ['srun', '--pty',
os.path.join(os.getcwd(), benchmark), '--benchmark_out_format=json', '--benchmark_out=' + result]
p = subprocess.Popen(bench, env = my_env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, b''):
print(line.rstrip())
p.wait()
| 39.391061
| 184
| 0.593391
| 879
| 7,051
| 4.630262
| 0.114903
| 0.060442
| 0.041769
| 0.074693
| 0.827764
| 0.827764
| 0.816462
| 0.7914
| 0.7914
| 0.7914
| 0
| 0.004718
| 0.248475
| 7,051
| 178
| 185
| 39.61236
| 0.763352
| 0.002836
| 0
| 0.696203
| 0
| 0
| 0.192888
| 0.104267
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012658
| 0
| 0.012658
| 0.113924
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b54ab5874a6ddb8a0e4874955db8700f5d953d22
| 24
|
py
|
Python
|
napari/layers/base/__init__.py
|
marshuang80/napari
|
10f1d0f39fe9ccd42456c95458e2f23b59450f02
|
[
"BSD-3-Clause"
] | 1
|
2021-04-24T10:10:54.000Z
|
2021-04-24T10:10:54.000Z
|
napari/layers/base/__init__.py
|
marshuang80/napari
|
10f1d0f39fe9ccd42456c95458e2f23b59450f02
|
[
"BSD-3-Clause"
] | 17
|
2020-06-11T21:02:03.000Z
|
2021-02-02T19:10:19.000Z
|
napari/layers/base/__init__.py
|
marshuang80/napari
|
10f1d0f39fe9ccd42456c95458e2f23b59450f02
|
[
"BSD-3-Clause"
] | 1
|
2020-07-19T18:03:35.000Z
|
2020-07-19T18:03:35.000Z
|
from .base import Layer
| 12
| 23
| 0.791667
| 4
| 24
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b56542a4f73770346ac07c551841d581fdf3521c
| 389
|
py
|
Python
|
pytorch_lightning/accelerator_backends/__init__.py
|
stas00/pytorch-lightning
|
84c507c4df5f5c336deb19ce7f70fa02329f39f6
|
[
"Apache-2.0"
] | 1
|
2021-06-10T07:12:58.000Z
|
2021-06-10T07:12:58.000Z
|
pytorch_lightning/accelerator_backends/__init__.py
|
stas00/pytorch-lightning
|
84c507c4df5f5c336deb19ce7f70fa02329f39f6
|
[
"Apache-2.0"
] | null | null | null |
pytorch_lightning/accelerator_backends/__init__.py
|
stas00/pytorch-lightning
|
84c507c4df5f5c336deb19ce7f70fa02329f39f6
|
[
"Apache-2.0"
] | null | null | null |
from pytorch_lightning.accelerator_backends.gpu_backend import GPUBackend
from pytorch_lightning.accelerator_backends.tpu_backend import TPUBackend
from pytorch_lightning.accelerator_backends.dp_backend import DataParallelBackend
from pytorch_lightning.accelerator_backends.ddp_spawn_backend import DDPSpawnBackend
from pytorch_lightning.accelerator_backends.cpu_backend import CPUBackend
| 64.833333
| 84
| 0.922879
| 46
| 389
| 7.456522
| 0.391304
| 0.16035
| 0.291545
| 0.451895
| 0.568513
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051414
| 389
| 5
| 85
| 77.8
| 0.929539
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a938e3a23d2ed9d716d9be2f8c42cf0330d651d6
| 1,818
|
py
|
Python
|
tests/test_main.py
|
onetinov/gimme-aws-creds
|
fb2519a3da1b771b6c1ab47ec30850eaed1cf33a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_main.py
|
onetinov/gimme-aws-creds
|
fb2519a3da1b771b6c1ab47ec30850eaed1cf33a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_main.py
|
onetinov/gimme-aws-creds
|
fb2519a3da1b771b6c1ab47ec30850eaed1cf33a
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from mock import patch
from gimme_aws_creds.main import GimmeAWSCreds, RoleSet
class TestMain(unittest.TestCase):
APP_INFO = [
RoleSet(idp='idp', role='test1'),
RoleSet(idp='idp', role='test2')
]
AWS_INFO = [
{'name': 'test1'},
{'name': 'test2'}
]
@patch('builtins.input', return_value='-1')
def test_choose_role_app_neg1(self, mock):
creds = GimmeAWSCreds()
self.assertRaises(SystemExit, creds._choose_role, self.APP_INFO)
self.assertRaises(SystemExit, creds._choose_app, self.AWS_INFO)
@patch('builtins.input', return_value='0')
def test_choose_role_app_0(self, mock):
creds = GimmeAWSCreds()
selection = creds._choose_role(self.APP_INFO)
self.assertEqual(selection, self.APP_INFO[0].role)
selection = creds._choose_app(self.AWS_INFO)
self.assertEqual(selection, self.AWS_INFO[0])
@patch('builtins.input', return_value='1')
def test_choose_role_app_1(self, mock):
creds = GimmeAWSCreds()
selection = creds._choose_role(self.APP_INFO)
self.assertEqual(selection, self.APP_INFO[1].role)
selection = creds._choose_app(self.AWS_INFO)
self.assertEqual(selection, self.AWS_INFO[1])
@patch('builtins.input', return_value='2')
def test_choose_role_app_2(self, mock):
creds = GimmeAWSCreds()
self.assertRaises(SystemExit, creds._choose_role, self.APP_INFO)
self.assertRaises(SystemExit, creds._choose_app, self.AWS_INFO)
@patch('builtins.input', return_value='a')
def test_choose_role_app_a(self, mock):
creds = GimmeAWSCreds()
self.assertRaises(SystemExit, creds._choose_role, self.APP_INFO)
self.assertRaises(SystemExit, creds._choose_app, self.AWS_INFO)
| 33.666667
| 72
| 0.679318
| 228
| 1,818
| 5.140351
| 0.166667
| 0.085324
| 0.0657
| 0.158703
| 0.829352
| 0.753413
| 0.753413
| 0.753413
| 0.753413
| 0.753413
| 0
| 0.010966
| 0.19747
| 1,818
| 53
| 73
| 34.301887
| 0.792324
| 0
| 0
| 0.365854
| 0
| 0
| 0.060506
| 0
| 0
| 0
| 0
| 0
| 0.243902
| 1
| 0.121951
| false
| 0
| 0.073171
| 0
| 0.268293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a95fabe1c519430166e133339cf75f9768b18aff
| 271
|
py
|
Python
|
examples/traversals/configuration/__init__.py
|
jvrana/caldera
|
a346324e77f20739e00a82f97530dda4906f59dd
|
[
"MIT"
] | 2
|
2021-12-13T17:52:17.000Z
|
2021-12-13T17:52:18.000Z
|
examples/traversals/configuration/__init__.py
|
jvrana/caldera
|
a346324e77f20739e00a82f97530dda4906f59dd
|
[
"MIT"
] | 4
|
2020-10-06T21:06:15.000Z
|
2020-10-10T01:18:23.000Z
|
examples/traversals/configuration/__init__.py
|
jvrana/caldera
|
a346324e77f20739e00a82f97530dda4906f59dd
|
[
"MIT"
] | null | null | null |
from .config import Config
from .config import DataConfig
from .config import get_config
from .config import HyperParamConfig
from .config import NetConfig
from .data import DataGenConfig
__all__ = ["Config", "NetConfig", "DataConfig", "HyperParamConfig", "get_config"]
| 30.111111
| 81
| 0.797048
| 32
| 271
| 6.5625
| 0.3125
| 0.238095
| 0.380952
| 0.209524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118081
| 271
| 8
| 82
| 33.875
| 0.878661
| 0
| 0
| 0
| 0
| 0
| 0.188192
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.857143
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a997a15456ab8ff7cbe2396aae61d299a5040800
| 351
|
py
|
Python
|
buildscripts/BuildRoot/BuildRootCommandHandler.py
|
YuanYuLin/iopcbuilder
|
19537c9d651a7cd42432b9f6f6654c1ddeb0dbf0
|
[
"Apache-2.0"
] | null | null | null |
buildscripts/BuildRoot/BuildRootCommandHandler.py
|
YuanYuLin/iopcbuilder
|
19537c9d651a7cd42432b9f6f6654c1ddeb0dbf0
|
[
"Apache-2.0"
] | null | null | null |
buildscripts/BuildRoot/BuildRootCommandHandler.py
|
YuanYuLin/iopcbuilder
|
19537c9d651a7cd42432b9f6f6654c1ddeb0dbf0
|
[
"Apache-2.0"
] | null | null | null |
import os
import shutil
from DefconfigParser import DefconfigParser
class BuildRootCommandHandler:
def __init__(self, next_command_handler):
self.next_command_handler = next_command_handler
def do_next_command_handler(self, config_obj):
if self.next_command_handler:
self.next_command_handler.action(config_obj)
| 27
| 56
| 0.777778
| 43
| 351
| 5.906977
| 0.418605
| 0.259843
| 0.425197
| 0.346457
| 0.346457
| 0.346457
| 0.346457
| 0.346457
| 0
| 0
| 0
| 0
| 0.173789
| 351
| 12
| 57
| 29.25
| 0.875862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.