repo_id stringlengths 21 96 | file_path stringlengths 31 155 | content stringlengths 1 92.9M | __index_level_0__ int64 0 0 |
|---|---|---|---|
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/kmeans/kmeans_predict.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <raft/core/handle.hpp>
#include <raft/cluster/kmeans.cuh>
#include <raft/cluster/kmeans_types.hpp>
namespace ML {
namespace kmeans {
// ----------------------------- predict ---------------------------------//
template <typename value_t, typename idx_t>
void predict_impl(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const value_t* centroids,
const value_t* X,
idx_t n_samples,
idx_t n_features,
const value_t* sample_weight,
bool normalize_weights,
idx_t* labels,
value_t& inertia)
{
auto X_view = raft::make_device_matrix_view(X, n_samples, n_features);
std::optional<raft::device_vector_view<const value_t>> sw = std::nullopt;
if (sample_weight != nullptr)
sw = std::make_optional(
raft::make_device_vector_view<const value_t, idx_t>(sample_weight, n_samples));
auto centroids_view =
raft::make_device_matrix_view<const value_t, idx_t>(centroids, params.n_clusters, n_features);
auto rLabels = raft::make_device_vector_view<idx_t, idx_t>(labels, n_samples);
auto inertia_view = raft::make_host_scalar_view<value_t>(&inertia);
raft::cluster::kmeans_predict<value_t, idx_t>(
handle, params, X_view, sw, centroids_view, rLabels, normalize_weights, inertia_view);
}
void predict(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const float* centroids,
const float* X,
int n_samples,
int n_features,
const float* sample_weight,
bool normalize_weights,
int* labels,
float& inertia)
{
predict_impl(handle,
params,
centroids,
X,
n_samples,
n_features,
sample_weight,
normalize_weights,
labels,
inertia);
}
void predict(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const double* centroids,
const double* X,
int n_samples,
int n_features,
const double* sample_weight,
bool normalize_weights,
int* labels,
double& inertia)
{
predict_impl(handle,
params,
centroids,
X,
n_samples,
n_features,
sample_weight,
normalize_weights,
labels,
inertia);
}
void predict(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const float* centroids,
const float* X,
int64_t n_samples,
int64_t n_features,
const float* sample_weight,
bool normalize_weights,
int64_t* labels,
float& inertia)
{
predict_impl(handle,
params,
centroids,
X,
n_samples,
n_features,
sample_weight,
normalize_weights,
labels,
inertia);
}
void predict(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const double* centroids,
const double* X,
int64_t n_samples,
int64_t n_features,
const double* sample_weight,
bool normalize_weights,
int64_t* labels,
double& inertia)
{
predict_impl(handle,
params,
centroids,
X,
n_samples,
n_features,
sample_weight,
normalize_weights,
labels,
inertia);
}
}; // end namespace kmeans
}; // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/kmeans/kmeans_mg.cu | /*
* Copyright (c) 2020-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "kmeans_mg_impl.cuh"
#include <cuml/cluster/kmeans_mg.hpp>
#include <raft/cluster/kmeans_types.hpp>
namespace ML {
namespace kmeans {
namespace opg {
// ----------------------------- fit ---------------------------------//
void fit(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const float* X,
int n_samples,
int n_features,
const float* sample_weight,
float* centroids,
float& inertia,
int& n_iter)
{
const raft::handle_t& h = handle;
raft::stream_syncer _(h);
impl::fit(h, params, X, n_samples, n_features, sample_weight, centroids, inertia, n_iter);
}
void fit(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const double* X,
int n_samples,
int n_features,
const double* sample_weight,
double* centroids,
double& inertia,
int& n_iter)
{
const raft::handle_t& h = handle;
raft::stream_syncer _(h);
impl::fit(h, params, X, n_samples, n_features, sample_weight, centroids, inertia, n_iter);
}
void fit(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const float* X,
int64_t n_samples,
int64_t n_features,
const float* sample_weight,
float* centroids,
float& inertia,
int64_t& n_iter)
{
const raft::handle_t& h = handle;
raft::stream_syncer _(h);
impl::fit(h, params, X, n_samples, n_features, sample_weight, centroids, inertia, n_iter);
}
void fit(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const double* X,
int64_t n_samples,
int64_t n_features,
const double* sample_weight,
double* centroids,
double& inertia,
int64_t& n_iter)
{
const raft::handle_t& h = handle;
raft::stream_syncer _(h);
impl::fit(h, params, X, n_samples, n_features, sample_weight, centroids, inertia, n_iter);
}
}; // end namespace opg
}; // end namespace kmeans
}; // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/kmeans/kmeans_mg_impl.cuh | /*
* Copyright (c) 2020-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <cuml/common/logger.hpp>
#include <raft/cluster/kmeans.cuh>
#include <raft/cluster/kmeans_types.hpp>
#include <raft/core/device_mdarray.hpp>
#include <raft/core/handle.hpp>
#include <raft/core/host_mdarray.hpp>
#include <raft/matrix/gather.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_scalar.hpp>
#include <rmm/device_uvector.hpp>
#include <ml_cuda_utils.h>
#include <thrust/execution_policy.h>
#include <thrust/fill.h>
#include <thrust/reduce.h>
#include <thrust/scan.h>
#include <thrust/transform.h>
#include <cstdint>
namespace ML {
#define CUML_LOG_KMEANS(handle, fmt, ...) \
do { \
bool isRoot = true; \
if (handle.comms_initialized()) { \
const auto& comm = handle.get_comms(); \
const int my_rank = comm.get_rank(); \
isRoot = my_rank == 0; \
} \
if (isRoot) { CUML_LOG_DEBUG(fmt, ##__VA_ARGS__); } \
} while (0)
namespace kmeans {
namespace opg {
namespace impl {
#define KMEANS_COMM_ROOT 0
static raft::cluster::kmeans::KMeansParams default_params;
// Selects 'n_clusters' samples randomly from X
template <typename DataT, typename IndexT>
void initRandom(const raft::handle_t& handle,
const raft::cluster::kmeans::KMeansParams& params,
raft::device_matrix_view<const DataT, IndexT> X,
raft::device_matrix_view<DataT, IndexT> centroids)
{
const auto& comm = handle.get_comms();
cudaStream_t stream = handle.get_stream();
auto n_local_samples = X.extent(0);
auto n_features = X.extent(1);
auto n_clusters = params.n_clusters;
const int my_rank = comm.get_rank();
const int n_ranks = comm.get_size();
std::vector<int> nCentroidsSampledByRank(n_ranks, 0);
std::vector<size_t> nCentroidsElementsToReceiveFromRank(n_ranks, 0);
const int nranks_reqd = std::min(n_ranks, n_clusters);
ASSERT(KMEANS_COMM_ROOT < nranks_reqd, "KMEANS_COMM_ROOT must be in [0, %d)\n", nranks_reqd);
for (int rank = 0; rank < nranks_reqd; ++rank) {
int nCentroidsSampledInRank = n_clusters / nranks_reqd;
if (rank == KMEANS_COMM_ROOT) {
nCentroidsSampledInRank += n_clusters - nCentroidsSampledInRank * nranks_reqd;
}
nCentroidsSampledByRank[rank] = nCentroidsSampledInRank;
nCentroidsElementsToReceiveFromRank[rank] = nCentroidsSampledInRank * n_features;
}
auto nCentroidsSampledInRank = nCentroidsSampledByRank[my_rank];
ASSERT((IndexT)nCentroidsSampledInRank <= (IndexT)n_local_samples,
"# random samples requested from rank-%d is larger than the available "
"samples at the rank (requested is %lu, available is %lu)",
my_rank,
(size_t)nCentroidsSampledInRank,
(size_t)n_local_samples);
auto centroidsSampledInRank =
raft::make_device_matrix<DataT, IndexT>(handle, nCentroidsSampledInRank, n_features);
raft::cluster::kmeans::shuffle_and_gather(
handle, X, centroidsSampledInRank.view(), nCentroidsSampledInRank, params.rng_state.seed);
std::vector<size_t> displs(n_ranks);
thrust::exclusive_scan(thrust::host,
nCentroidsElementsToReceiveFromRank.begin(),
nCentroidsElementsToReceiveFromRank.end(),
displs.begin());
// gather centroids from all ranks
comm.allgatherv<DataT>(centroidsSampledInRank.data_handle(), // sendbuff
centroids.data_handle(), // recvbuff
nCentroidsElementsToReceiveFromRank.data(), // recvcount
displs.data(),
stream);
}
/*
* @brief Selects 'n_clusters' samples from X using scalable kmeans++ algorithm
* Scalable kmeans++ pseudocode
* 1: C = sample a point uniformly at random from X
* 2: psi = phi_X (C)
* 3: for O( log(psi) ) times do
* 4: C' = sample each point x in X independently with probability
* p_x = l * ( d^2(x, C) / phi_X (C) )
* 5: C = C U C'
* 6: end for
* 7: For x in C, set w_x to be the number of points in X closer to x than any
* other point in C
* 8: Recluster the weighted points in C into k clusters
*/
template <typename DataT, typename IndexT>
void initKMeansPlusPlus(const raft::handle_t& handle,
const raft::cluster::kmeans::KMeansParams& params,
raft::device_matrix_view<const DataT, IndexT> X,
raft::device_matrix_view<DataT, IndexT> centroidsRawData,
rmm::device_uvector<char>& workspace)
{
const auto& comm = handle.get_comms();
cudaStream_t stream = handle.get_stream();
const int my_rank = comm.get_rank();
const int n_rank = comm.get_size();
auto n_samples = X.extent(0);
auto n_features = X.extent(1);
auto n_clusters = params.n_clusters;
auto metric = params.metric;
raft::random::RngState rng(params.rng_state.seed, raft::random::GeneratorType::GenPhilox);
// <<<< Step-1 >>> : C <- sample a point uniformly at random from X
// 1.1 - Select a rank r' at random from the available n_rank ranks with a
// probability of 1/n_rank [Note - with same seed all rank selects
// the same r' which avoids a call to comm]
// 1.2 - Rank r' samples a point uniformly at random from the local dataset
// X which will be used as the initial centroid for kmeans++
// 1.3 - Communicate the initial centroid chosen by rank-r' to all other
// ranks
std::mt19937 gen(params.rng_state.seed);
std::uniform_int_distribution<> dis(0, n_rank - 1);
int rp = dis(gen);
// buffer to flag the sample that is chosen as initial centroids
std::vector<std::uint8_t> h_isSampleCentroid(n_samples);
std::fill(h_isSampleCentroid.begin(), h_isSampleCentroid.end(), 0);
auto initialCentroid = raft::make_device_matrix<DataT, IndexT>(handle, 1, n_features);
CUML_LOG_KMEANS(
handle, "@Rank-%d : KMeans|| : initial centroid is sampled at rank-%d\n", my_rank, rp);
// 1.2 - Rank r' samples a point uniformly at random from the local dataset
// X which will be used as the initial centroid for kmeans++
if (my_rank == rp) {
std::mt19937 gen(params.rng_state.seed);
std::uniform_int_distribution<> dis(0, n_samples - 1);
int cIdx = dis(gen);
auto centroidsView = raft::make_device_matrix_view<const DataT, IndexT>(
X.data_handle() + cIdx * n_features, 1, n_features);
raft::copy(
initialCentroid.data_handle(), centroidsView.data_handle(), centroidsView.size(), stream);
h_isSampleCentroid[cIdx] = 1;
}
// 1.3 - Communicate the initial centroid chosen by rank-r' to all other ranks
comm.bcast<DataT>(initialCentroid.data_handle(), initialCentroid.size(), rp, stream);
// device buffer to flag the sample that is chosen as initial centroid
auto isSampleCentroid = raft::make_device_vector<std::uint8_t, IndexT>(handle, n_samples);
raft::copy(
isSampleCentroid.data_handle(), h_isSampleCentroid.data(), isSampleCentroid.size(), stream);
rmm::device_uvector<DataT> centroidsBuf(0, stream);
// reset buffer to store the chosen centroid
centroidsBuf.resize(initialCentroid.size(), stream);
raft::copy(centroidsBuf.begin(), initialCentroid.data_handle(), initialCentroid.size(), stream);
auto potentialCentroids = raft::make_device_matrix_view<DataT, IndexT>(
centroidsBuf.data(), initialCentroid.extent(0), initialCentroid.extent(1));
// <<< End of Step-1 >>>
rmm::device_uvector<DataT> L2NormBuf_OR_DistBuf(0, stream);
// L2 norm of X: ||x||^2
auto L2NormX = raft::make_device_vector<DataT, IndexT>(handle, n_samples);
if (metric == raft::distance::DistanceType::L2Expanded ||
metric == raft::distance::DistanceType::L2SqrtExpanded) {
raft::linalg::rowNorm(L2NormX.data_handle(),
X.data_handle(),
X.extent(1),
X.extent(0),
raft::linalg::L2Norm,
true,
stream);
}
auto minClusterDistance = raft::make_device_vector<DataT, IndexT>(handle, n_samples);
auto uniformRands = raft::make_device_vector<DataT, IndexT>(handle, n_samples);
// <<< Step-2 >>>: psi <- phi_X (C)
auto clusterCost = raft::make_device_scalar<DataT>(handle, 0);
raft::cluster::kmeans::min_cluster_distance(handle,
X,
potentialCentroids,
minClusterDistance.view(),
L2NormX.view(),
L2NormBuf_OR_DistBuf,
params.metric,
params.batch_samples,
params.batch_centroids,
workspace);
// compute partial cluster cost from the samples in rank
raft::cluster::kmeans::cluster_cost(
handle,
minClusterDistance.view(),
workspace,
clusterCost.view(),
[] __device__(const DataT& a, const DataT& b) { return a + b; });
// compute total cluster cost by accumulating the partial cost from all the
// ranks
comm.allreduce(
clusterCost.data_handle(), clusterCost.data_handle(), 1, raft::comms::op_t::SUM, stream);
DataT psi = 0;
raft::copy(&psi, clusterCost.data_handle(), 1, stream);
// <<< End of Step-2 >>>
ASSERT(comm.sync_stream(stream) == raft::comms::status_t::SUCCESS,
"An error occurred in the distributed operation. This can result from "
"a failed rank");
// Scalable kmeans++ paper claims 8 rounds is sufficient
int niter = std::min(8, (int)ceil(log(psi)));
CUML_LOG_KMEANS(handle,
"@Rank-%d:KMeans|| :phi - %f, max # of iterations for kmeans++ loop - "
"%d\n",
my_rank,
psi,
niter);
// <<<< Step-3 >>> : for O( log(psi) ) times do
for (int iter = 0; iter < niter; ++iter) {
CUML_LOG_KMEANS(handle,
"@Rank-%d:KMeans|| - Iteration %d: # potential centroids sampled - "
"%d\n",
my_rank,
iter,
potentialCentroids.extent(0));
raft::cluster::kmeans::min_cluster_distance(handle,
X,
potentialCentroids,
minClusterDistance.view(),
L2NormX.view(),
L2NormBuf_OR_DistBuf,
params.metric,
params.batch_samples,
params.batch_centroids,
workspace);
raft::cluster::kmeans::cluster_cost(
handle,
minClusterDistance.view(),
workspace,
clusterCost.view(),
[] __device__(const DataT& a, const DataT& b) { return a + b; });
comm.allreduce(
clusterCost.data_handle(), clusterCost.data_handle(), 1, raft::comms::op_t::SUM, stream);
raft::copy(&psi, clusterCost.data_handle(), 1, stream);
ASSERT(comm.sync_stream(stream) == raft::comms::status_t::SUCCESS,
"An error occurred in the distributed operation. This can result "
"from a failed rank");
// <<<< Step-4 >>> : Sample each point x in X independently and identify new
// potentialCentroids
raft::random::uniform(
handle, rng, uniformRands.data_handle(), uniformRands.extent(0), (DataT)0, (DataT)1);
raft::cluster::kmeans::SamplingOp<DataT, IndexT> select_op(psi,
params.oversampling_factor,
n_clusters,
uniformRands.data_handle(),
isSampleCentroid.data_handle());
rmm::device_uvector<DataT> inRankCp(0, stream);
raft::cluster::kmeans::sample_centroids(handle,
X,
minClusterDistance.view(),
isSampleCentroid.view(),
select_op,
inRankCp,
workspace);
/// <<<< End of Step-4 >>>>
int* nPtsSampledByRank;
RAFT_CUDA_TRY(cudaMallocHost(&nPtsSampledByRank, n_rank * sizeof(int)));
/// <<<< Step-5 >>> : C = C U C'
// append the data in Cp from all ranks to the buffer holding the
// potentialCentroids
// RAFT_CUDA_TRY(cudaMemsetAsync(nPtsSampledByRank, 0, n_rank * sizeof(int), stream));
std::fill(nPtsSampledByRank, nPtsSampledByRank + n_rank, 0);
nPtsSampledByRank[my_rank] = inRankCp.size() / n_features;
comm.allgather(&(nPtsSampledByRank[my_rank]), nPtsSampledByRank, 1, stream);
ASSERT(comm.sync_stream(stream) == raft::comms::status_t::SUCCESS,
"An error occurred in the distributed operation. This can result "
"from a failed rank");
auto nPtsSampled =
thrust::reduce(thrust::host, nPtsSampledByRank, nPtsSampledByRank + n_rank, 0);
// gather centroids from all ranks
std::vector<size_t> sizes(n_rank);
thrust::transform(
thrust::host, nPtsSampledByRank, nPtsSampledByRank + n_rank, sizes.begin(), [&](int val) {
return val * n_features;
});
RAFT_CUDA_TRY_NO_THROW(cudaFreeHost(nPtsSampledByRank));
std::vector<size_t> displs(n_rank);
thrust::exclusive_scan(thrust::host, sizes.begin(), sizes.end(), displs.begin());
centroidsBuf.resize(centroidsBuf.size() + nPtsSampled * n_features, stream);
comm.allgatherv<DataT>(inRankCp.data(),
centroidsBuf.end() - nPtsSampled * n_features,
sizes.data(),
displs.data(),
stream);
auto tot_centroids = potentialCentroids.extent(0) + nPtsSampled;
potentialCentroids =
raft::make_device_matrix_view<DataT, IndexT>(centroidsBuf.data(), tot_centroids, n_features);
/// <<<< End of Step-5 >>>
} /// <<<< Step-6 >>>
CUML_LOG_KMEANS(handle,
"@Rank-%d:KMeans||: # potential centroids sampled - %d\n",
my_rank,
potentialCentroids.extent(0));
if ((IndexT)potentialCentroids.extent(0) > (IndexT)n_clusters) {
// <<< Step-7 >>>: For x in C, set w_x to be the number of pts closest to X
// temporary buffer to store the sample count per cluster, destructor
// releases the resource
auto weight = raft::make_device_vector<DataT, IndexT>(handle, potentialCentroids.extent(0));
raft::cluster::kmeans::count_samples_in_cluster(
handle, params, X, L2NormX.view(), potentialCentroids, workspace, weight.view());
// merge the local histogram from all ranks
comm.allreduce<DataT>(weight.data_handle(), // sendbuff
weight.data_handle(), // recvbuff
weight.size(), // count
raft::comms::op_t::SUM,
stream);
// <<< end of Step-7 >>>
// Step-8: Recluster the weighted points in C into k clusters
// Note - reclustering step is duplicated across all ranks and with the same
// seed they should generate the same potentialCentroids
auto const_centroids = raft::make_device_matrix_view<const DataT, IndexT>(
potentialCentroids.data_handle(), potentialCentroids.extent(0), potentialCentroids.extent(1));
raft::cluster::kmeans::init_plus_plus(
handle, params, const_centroids, centroidsRawData, workspace);
auto inertia = raft::make_host_scalar<DataT>(0);
auto n_iter = raft::make_host_scalar<IndexT>(0);
auto weight_view =
raft::make_device_vector_view<const DataT, IndexT>(weight.data_handle(), weight.extent(0));
raft::cluster::kmeans::KMeansParams params_copy = params;
params_copy.rng_state = default_params.rng_state;
raft::cluster::kmeans::fit_main(handle,
params_copy,
const_centroids,
weight_view,
centroidsRawData,
inertia.view(),
n_iter.view(),
workspace);
} else if ((IndexT)potentialCentroids.extent(0) < (IndexT)n_clusters) {
// supplement with random
auto n_random_clusters = n_clusters - potentialCentroids.extent(0);
CUML_LOG_KMEANS(handle,
"[Warning!] KMeans||: found fewer than %d centroids during "
"initialization (found %d centroids, remaining %d centroids will be "
"chosen randomly from input samples)\n",
n_clusters,
potentialCentroids.extent(0),
n_random_clusters);
// generate `n_random_clusters` centroids
raft::cluster::kmeans::KMeansParams rand_params = params;
rand_params.rng_state = default_params.rng_state;
rand_params.init = raft::cluster::kmeans::KMeansParams::InitMethod::Random;
rand_params.n_clusters = n_random_clusters;
initRandom(handle, rand_params, X, centroidsRawData);
// copy centroids generated during kmeans|| iteration to the buffer
raft::copy(centroidsRawData.data_handle() + n_random_clusters * n_features,
potentialCentroids.data_handle(),
potentialCentroids.size(),
stream);
} else {
// found the required n_clusters
raft::copy(centroidsRawData.data_handle(),
potentialCentroids.data_handle(),
potentialCentroids.size(),
stream);
}
}
template <typename DataT, typename IndexT>
void checkWeights(const raft::handle_t& handle,
rmm::device_uvector<char>& workspace,
raft::device_vector_view<DataT, IndexT> weight)
{
cudaStream_t stream = handle.get_stream();
rmm::device_scalar<DataT> wt_aggr(stream);
const auto& comm = handle.get_comms();
auto n_samples = weight.extent(0);
size_t temp_storage_bytes = 0;
RAFT_CUDA_TRY(cub::DeviceReduce::Sum(
nullptr, temp_storage_bytes, weight.data_handle(), wt_aggr.data(), n_samples, stream));
workspace.resize(temp_storage_bytes, stream);
RAFT_CUDA_TRY(cub::DeviceReduce::Sum(
workspace.data(), temp_storage_bytes, weight.data_handle(), wt_aggr.data(), n_samples, stream));
comm.allreduce<DataT>(wt_aggr.data(), // sendbuff
wt_aggr.data(), // recvbuff
1, // count
raft::comms::op_t::SUM,
stream);
DataT wt_sum = wt_aggr.value(stream);
handle.sync_stream(stream);
if (wt_sum != n_samples) {
CUML_LOG_KMEANS(handle,
"[Warning!] KMeans: normalizing the user provided sample weights to "
"sum up to %d samples",
n_samples);
DataT scale = n_samples / wt_sum;
raft::linalg::unaryOp(
weight.data_handle(),
weight.data_handle(),
weight.size(),
[=] __device__(const DataT& wt) { return wt * scale; },
stream);
}
}
template <typename DataT, typename IndexT>
void fit(const raft::handle_t& handle,
const raft::cluster::kmeans::KMeansParams& params,
raft::device_matrix_view<const DataT, IndexT> X,
raft::device_vector_view<DataT, IndexT> weight,
raft::device_matrix_view<DataT, IndexT> centroids,
raft::host_scalar_view<DataT> inertia,
raft::host_scalar_view<IndexT> n_iter,
rmm::device_uvector<char>& workspace)
{
const auto& comm = handle.get_comms();
cudaStream_t stream = handle.get_stream();
auto n_samples = X.extent(0);
auto n_features = X.extent(1);
auto n_clusters = params.n_clusters;
auto metric = params.metric;
// stores (key, value) pair corresponding to each sample where
// - key is the index of nearest cluster
// - value is the distance to the nearest cluster
auto minClusterAndDistance =
raft::make_device_vector<raft::KeyValuePair<IndexT, DataT>, IndexT>(handle, n_samples);
// temporary buffer to store L2 norm of centroids or distance matrix,
// destructor releases the resource
rmm::device_uvector<DataT> L2NormBuf_OR_DistBuf(0, stream);
// temporary buffer to store intermediate centroids, destructor releases the
// resource
auto newCentroids = raft::make_device_matrix<DataT, IndexT>(handle, n_clusters, n_features);
// temporary buffer to store the weights per cluster, destructor releases
// the resource
auto wtInCluster = raft::make_device_vector<DataT, IndexT>(handle, n_clusters);
// L2 norm of X: ||x||^2
auto L2NormX = raft::make_device_vector<DataT, IndexT>(handle, n_samples);
if (metric == raft::distance::DistanceType::L2Expanded ||
metric == raft::distance::DistanceType::L2SqrtExpanded) {
raft::linalg::rowNorm(L2NormX.data_handle(),
X.data_handle(),
X.extent(1),
X.extent(0),
raft::linalg::L2Norm,
true,
stream);
}
DataT priorClusteringCost = 0;
for (n_iter[0] = 1; n_iter[0] <= params.max_iter; ++n_iter[0]) {
CUML_LOG_KMEANS(handle,
"KMeans.fit: Iteration-%d: fitting the model using the initialize "
"cluster centers\n",
n_iter[0]);
auto const_centroids = raft::make_device_matrix_view<const DataT, IndexT>(
centroids.data_handle(), centroids.extent(0), centroids.extent(1));
// computes minClusterAndDistance[0:n_samples) where
// minClusterAndDistance[i] is a <key, value> pair where
// 'key' is index to an sample in 'centroids' (index of the nearest
// centroid) and 'value' is the distance between the sample 'X[i]' and the
// 'centroid[key]'
raft::cluster::kmeans::min_cluster_and_distance(handle,
X,
const_centroids,
minClusterAndDistance.view(),
L2NormX.view(),
L2NormBuf_OR_DistBuf,
params.metric,
params.batch_samples,
params.batch_centroids,
workspace);
// Using TransformInputIteratorT to dereference an array of
// cub::KeyValuePair and converting them to just return the Key to be used
// in reduce_rows_by_key prims
raft::cluster::kmeans::KeyValueIndexOp<IndexT, DataT> conversion_op;
cub::TransformInputIterator<IndexT,
raft::cluster::kmeans::KeyValueIndexOp<IndexT, DataT>,
raft::KeyValuePair<IndexT, DataT>*>
itr(minClusterAndDistance.data_handle(), conversion_op);
workspace.resize(n_samples, stream);
// Calculates weighted sum of all the samples assigned to cluster-i and
// store the result in newCentroids[i]
raft::linalg::reduce_rows_by_key((DataT*)X.data_handle(),
X.extent(1),
itr,
weight.data_handle(),
workspace.data(),
X.extent(0),
X.extent(1),
static_cast<IndexT>(n_clusters),
newCentroids.data_handle(),
stream);
// Reduce weights by key to compute weight in each cluster
raft::linalg::reduce_cols_by_key(weight.data_handle(),
itr,
wtInCluster.data_handle(),
(IndexT)1,
(IndexT)weight.extent(0),
(IndexT)n_clusters,
stream);
// merge the local histogram from all ranks
comm.allreduce<DataT>(wtInCluster.data_handle(), // sendbuff
wtInCluster.data_handle(), // recvbuff
wtInCluster.size(), // count
raft::comms::op_t::SUM,
stream);
// reduces newCentroids from all ranks
comm.allreduce<DataT>(newCentroids.data_handle(), // sendbuff
newCentroids.data_handle(), // recvbuff
newCentroids.size(), // count
raft::comms::op_t::SUM,
stream);
// Computes newCentroids[i] = newCentroids[i]/wtInCluster[i] where
// newCentroids[n_clusters x n_features] - 2D array, newCentroids[i] has
// sum of all the samples assigned to cluster-i
// wtInCluster[n_clusters] - 1D array, wtInCluster[i] contains # of
// samples in cluster-i.
// Note - when wtInCluster[i] is 0, newCentroid[i] is reset to 0
raft::linalg::matrixVectorOp(
newCentroids.data_handle(),
newCentroids.data_handle(),
wtInCluster.data_handle(),
newCentroids.extent(1),
newCentroids.extent(0),
true,
false,
[=] __device__(DataT mat, DataT vec) {
if (vec == 0)
return DataT(0);
else
return mat / vec;
},
stream);
// copy the centroids[i] to newCentroids[i] when wtInCluster[i] is 0
cub::ArgIndexInputIterator<DataT*> itr_wt(wtInCluster.data_handle());
raft::matrix::gather_if(
centroids.data_handle(),
centroids.extent(1),
centroids.extent(0),
itr_wt,
itr_wt,
wtInCluster.extent(0),
newCentroids.data_handle(),
[=] __device__(raft::KeyValuePair<ptrdiff_t, DataT> map) { // predicate
// copy when the # of samples in the cluster is 0
if (map.value == 0)
return true;
else
return false;
},
[=] __device__(raft::KeyValuePair<ptrdiff_t, DataT> map) { // map
return map.key;
},
stream);
// compute the squared norm between the newCentroids and the original
// centroids, destructor releases the resource
auto sqrdNorm = raft::make_device_scalar<DataT>(handle, 1);
raft::linalg::mapThenSumReduce(
sqrdNorm.data_handle(),
newCentroids.size(),
[=] __device__(const DataT a, const DataT b) {
DataT diff = a - b;
return diff * diff;
},
stream,
centroids.data_handle(),
newCentroids.data_handle());
DataT sqrdNormError = 0;
raft::copy(&sqrdNormError, sqrdNorm.data_handle(), sqrdNorm.size(), stream);
raft::copy(centroids.data_handle(), newCentroids.data_handle(), newCentroids.size(), stream);
bool done = false;
if (params.inertia_check) {
rmm::device_scalar<raft::KeyValuePair<IndexT, DataT>> clusterCostD(stream);
// calculate cluster cost phi_x(C)
raft::cluster::kmeans::cluster_cost(
handle,
minClusterAndDistance.view(),
workspace,
raft::make_device_scalar_view(clusterCostD.data()),
[] __device__(const raft::KeyValuePair<IndexT, DataT>& a,
const raft::KeyValuePair<IndexT, DataT>& b) {
raft::KeyValuePair<IndexT, DataT> res;
res.key = 0;
res.value = a.value + b.value;
return res;
});
// Cluster cost phi_x(C) from all ranks
comm.allreduce(&(clusterCostD.data()->value),
&(clusterCostD.data()->value),
1,
raft::comms::op_t::SUM,
stream);
DataT curClusteringCost = 0;
raft::copy(&curClusteringCost, &(clusterCostD.data()->value), 1, stream);
ASSERT(comm.sync_stream(stream) == raft::comms::status_t::SUCCESS,
"An error occurred in the distributed operation. This can result "
"from a failed rank");
ASSERT(curClusteringCost != (DataT)0.0,
"Too few points and centroids being found is getting 0 cost from "
"centers\n");
if (n_iter[0] > 0) {
DataT delta = curClusteringCost / priorClusteringCost;
if (delta > 1 - params.tol) done = true;
}
priorClusteringCost = curClusteringCost;
}
handle.sync_stream(stream);
if (sqrdNormError < params.tol) done = true;
if (done) {
CUML_LOG_KMEANS(
handle, "Threshold triggered after %d iterations. Terminating early.\n", n_iter[0]);
break;
}
}
}
template <typename DataT, typename IndexT = int>
void fit(const raft::handle_t& handle,
const raft::cluster::kmeans::KMeansParams& params,
const DataT* X,
const IndexT n_local_samples,
const IndexT n_features,
const DataT* sample_weight,
DataT* centroids,
DataT& inertia,
IndexT& n_iter)
{
cudaStream_t stream = handle.get_stream();
ASSERT(n_local_samples > 0, "# of samples must be > 0");
ASSERT(params.oversampling_factor > 0,
"oversampling factor must be > 0 (requested %d)",
(int)params.oversampling_factor);
ASSERT(is_device_or_managed_type(X), "input data must be device accessible");
auto n_clusters = params.n_clusters;
auto data = raft::make_device_matrix_view<const DataT, IndexT>(X, n_local_samples, n_features);
auto weight = raft::make_device_vector<DataT, IndexT>(handle, n_local_samples);
if (sample_weight != nullptr) {
raft::copy(weight.data_handle(), sample_weight, n_local_samples, stream);
} else {
thrust::fill(
handle.get_thrust_policy(), weight.data_handle(), weight.data_handle() + weight.size(), 1);
}
// underlying expandable storage that holds centroids data
auto centroidsRawData = raft::make_device_matrix<DataT, IndexT>(handle, n_clusters, n_features);
// Device-accessible allocation of expandable storage used as temporary buffers
rmm::device_uvector<char> workspace(0, stream);
// check if weights sum up to n_samples
checkWeights(handle, workspace, weight.view());
if (params.init == raft::cluster::kmeans::KMeansParams::InitMethod::Random) {
// initializing with random samples from input dataset
CUML_LOG_KMEANS(handle,
"KMeans.fit: initialize cluster centers by randomly choosing from the "
"input data.\n");
initRandom<DataT, IndexT>(handle, params, data, centroidsRawData.view());
} else if (params.init == raft::cluster::kmeans::KMeansParams::InitMethod::KMeansPlusPlus) {
// default method to initialize is kmeans++
CUML_LOG_KMEANS(handle, "KMeans.fit: initialize cluster centers using k-means++ algorithm.\n");
initKMeansPlusPlus<DataT, IndexT>(handle, params, data, centroidsRawData.view(), workspace);
} else if (params.init == raft::cluster::kmeans::KMeansParams::InitMethod::Array) {
CUML_LOG_KMEANS(handle,
"KMeans.fit: initialize cluster centers from the ndarray array input "
"passed to init argument.\n");
ASSERT(centroids != nullptr,
"centroids array is null (require a valid array of centroids for "
"the requested initialization method)");
raft::copy(centroidsRawData.data_handle(), centroids, params.n_clusters * n_features, stream);
} else {
THROW("unknown initialization method to select initial centers");
}
auto inertiaView = raft::make_host_scalar_view(&inertia);
auto n_iterView = raft::make_host_scalar_view(&n_iter);
fit<DataT, IndexT>(handle,
params,
data,
weight.view(),
centroidsRawData.view(),
inertiaView,
n_iterView,
workspace);
raft::copy(centroids, centroidsRawData.data_handle(), params.n_clusters * n_features, stream);
CUML_LOG_KMEANS(handle,
"KMeans.fit: async call returned (fit could still be running on the "
"device)\n");
}
}; // end namespace impl
}; // end namespace opg
}; // end namespace kmeans
}; // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/kmeans/kmeans_fit_predict.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <raft/core/handle.hpp>
#include <raft/cluster/kmeans.cuh>
#include <raft/cluster/kmeans_types.hpp>
namespace ML {
namespace kmeans {
// -------------------------- fit_predict --------------------------------//
template <typename value_t, typename idx_t>
void fit_predict_impl(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const value_t* X,
idx_t n_samples,
idx_t n_features,
const value_t* sample_weight,
value_t* centroids,
idx_t* labels,
value_t& inertia,
idx_t& n_iter)
{
auto X_view = raft::make_device_matrix_view(X, n_samples, n_features);
std::optional<raft::device_vector_view<const value_t, idx_t>> sw = std::nullopt;
if (sample_weight != nullptr)
sw = std::make_optional(
raft::make_device_vector_view<const value_t, idx_t>(sample_weight, n_samples));
auto centroids_opt = std::make_optional(
raft::make_device_matrix_view<value_t, idx_t>(centroids, params.n_clusters, n_features));
auto rLabels = raft::make_device_vector_view<idx_t, idx_t>(labels, n_samples);
auto inertia_view = raft::make_host_scalar_view<value_t>(&inertia);
auto n_iter_view = raft::make_host_scalar_view<idx_t>(&n_iter);
raft::cluster::kmeans_fit_predict<value_t, idx_t>(
handle, params, X_view, sw, centroids_opt, rLabels, inertia_view, n_iter_view);
}
void fit_predict(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const float* X,
int n_samples,
int n_features,
const float* sample_weight,
float* centroids,
int* labels,
float& inertia,
int& n_iter)
{
fit_predict_impl(
handle, params, X, n_samples, n_features, sample_weight, centroids, labels, inertia, n_iter);
}
void fit_predict(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const double* X,
int n_samples,
int n_features,
const double* sample_weight,
double* centroids,
int* labels,
double& inertia,
int& n_iter)
{
fit_predict_impl(
handle, params, X, n_samples, n_features, sample_weight, centroids, labels, inertia, n_iter);
}
void fit_predict(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const float* X,
int64_t n_samples,
int64_t n_features,
const float* sample_weight,
float* centroids,
int64_t* labels,
float& inertia,
int64_t& n_iter)
{
fit_predict_impl(
handle, params, X, n_samples, n_features, sample_weight, centroids, labels, inertia, n_iter);
}
void fit_predict(const raft::handle_t& handle,
const raft::cluster::KMeansParams& params,
const double* X,
int64_t n_samples,
int64_t n_features,
const double* sample_weight,
double* centroids,
int64_t* labels,
double& inertia,
int64_t& n_iter)
{
fit_predict_impl(
handle, params, X, n_samples, n_features, sample_weight, centroids, labels, inertia, n_iter);
}
}; // end namespace kmeans
}; // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/glm/ridge_mg.cu | /*
* Copyright (c) 2020-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/linear_model/preprocess_mg.hpp>
#include <cuml/linear_model/ridge_mg.hpp>
#include <cumlprims/opg/linalg/mv_aTb.hpp>
#include <cumlprims/opg/linalg/svd.hpp>
#include <cumlprims/opg/stats/mean.hpp>
#include <raft/core/comms.hpp>
#include <raft/linalg/add.cuh>
#include <raft/linalg/gemm.cuh>
#include <raft/matrix/math.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <cstddef>
using namespace MLCommon;
namespace ML {
namespace Ridge {
namespace opg {
template <typename T>
void ridgeSolve(const raft::handle_t& handle,
T* S,
T* V,
std::vector<Matrix::Data<T>*>& U,
const Matrix::PartDescriptor& UDesc,
const std::vector<Matrix::Data<T>*>& b,
const T* alpha,
const int n_alpha,
T* w,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
// Implements this: w = V * inv(S^2 + λ*I) * S * U^T * b
T* S_nnz;
T alp = T(1);
T beta = T(0);
T thres = T(1e-10);
raft::matrix::setSmallValuesZero(S, UDesc.N, streams[0], thres);
rmm::device_uvector<T> S_nnz_vector(UDesc.N, streams[0]);
S_nnz = S_nnz_vector.data();
raft::copy(S_nnz, S, UDesc.N, streams[0]);
raft::matrix::power(S_nnz, UDesc.N, streams[0]);
raft::linalg::addScalar(S_nnz, S_nnz, alpha[0], UDesc.N, streams[0]);
raft::matrix::matrixVectorBinaryDivSkipZero(
S, S_nnz, size_t(1), UDesc.N, false, true, streams[0], true);
raft::matrix::matrixVectorBinaryMult(V, S, UDesc.N, UDesc.N, false, true, streams[0]);
Matrix::Data<T> S_nnz_data;
S_nnz_data.totalSize = UDesc.N;
S_nnz_data.ptr = S_nnz;
LinAlg::opg::mv_aTb(handle, S_nnz_data, U, UDesc, b, streams, n_streams);
raft::linalg::gemm(handle,
V,
UDesc.N,
UDesc.N,
S_nnz,
w,
UDesc.N,
1,
CUBLAS_OP_N,
CUBLAS_OP_N,
alp,
beta,
streams[0]);
}
template <typename T>
void ridgeEig(raft::handle_t& handle,
const std::vector<Matrix::Data<T>*>& A,
const Matrix::PartDescriptor& ADesc,
const std::vector<Matrix::Data<T>*>& b,
const T* alpha,
const int n_alpha,
T* coef,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
const auto& comm = handle.get_comms();
int rank = comm.get_rank();
rmm::device_uvector<T> S(ADesc.N, streams[0]);
rmm::device_uvector<T> V(ADesc.N * ADesc.N, streams[0]);
std::vector<Matrix::Data<T>*> U;
std::vector<Matrix::Data<T>> U_temp;
std::vector<Matrix::RankSizePair*> partsToRanks = ADesc.blocksOwnedBy(rank);
size_t total_size = 0;
for (std::size_t i = 0; i < partsToRanks.size(); i++) {
total_size += partsToRanks[i]->size;
}
total_size = total_size * ADesc.N;
rmm::device_uvector<T> U_parts(total_size, streams[0]);
T* curr_ptr = U_parts.data();
for (std::size_t i = 0; i < partsToRanks.size(); i++) {
Matrix::Data<T> d;
d.totalSize = partsToRanks[i]->size;
d.ptr = curr_ptr;
curr_ptr = curr_ptr + (partsToRanks[i]->size * ADesc.N);
U_temp.push_back(d);
}
for (std::size_t i = 0; i < A.size(); i++) {
U.push_back(&(U_temp[i]));
}
LinAlg::opg::svdEig(handle, A, ADesc, U, S.data(), V.data(), streams, n_streams);
ridgeSolve(
handle, S.data(), V.data(), U, ADesc, b, alpha, n_alpha, coef, streams, n_streams, verbose);
}
template <typename T>
void fit_impl(raft::handle_t& handle,
std::vector<Matrix::Data<T>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<T>*>& labels,
T* alpha,
int n_alpha,
T* coef,
T* intercept,
bool fit_intercept,
bool normalize,
int algo,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
rmm::device_uvector<T> mu_input(0, streams[0]);
rmm::device_uvector<T> norm2_input(0, streams[0]);
rmm::device_uvector<T> mu_labels(0, streams[0]);
if (fit_intercept) {
mu_input.resize(input_desc.N, streams[0]);
mu_labels.resize(1, streams[0]);
if (normalize) { norm2_input.resize(input_desc.N, streams[0]); }
GLM::opg::preProcessData(handle,
input_data,
input_desc,
labels,
mu_input.data(),
mu_labels.data(),
norm2_input.data(),
fit_intercept,
normalize,
streams,
n_streams,
verbose);
}
if (algo == 0 || input_desc.N == 1) {
ASSERT(false, "olsFit: no algorithm with this id has been implemented");
} else if (algo == 1) {
ridgeEig(
handle, input_data, input_desc, labels, alpha, n_alpha, coef, streams, n_streams, verbose);
} else {
ASSERT(false, "olsFit: no algorithm with this id has been implemented");
}
if (fit_intercept) {
GLM::opg::postProcessData(handle,
input_data,
input_desc,
labels,
coef,
intercept,
mu_input.data(),
mu_labels.data(),
norm2_input.data(),
fit_intercept,
normalize,
streams,
n_streams,
verbose);
} else {
*intercept = T(0);
}
}
/**
* @brief performs MNMG fit operation for the ridge regression
* @input param handle: the internal cuml handle object
* @input param rank_sizes: includes all the partition size information for the rank
* @input param n_parts: number of partitions
* @input param input: input data
* @input param n_rows: number of rows of the input data
* @input param n_cols: number of cols of the input data
* @input param labels: labels data
* @input param alpha: ridge parameter
* @input param n_alpha: number of ridge parameters. Only one parameter is supported right now.
* @output param coef: learned regression coefficients
* @output param intercept: intercept value
* @input param fit_intercept: fit intercept or not
* @input param normalize: normalize the data or not
* @input param verbose
*/
template <typename T>
void fit_impl(raft::handle_t& handle,
std::vector<Matrix::Data<T>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<T>*>& labels,
T* alpha,
int n_alpha,
T* coef,
T* intercept,
bool fit_intercept,
bool normalize,
int algo,
bool verbose)
{
int rank = handle.get_comms().get_rank();
// TODO: These streams should come from raft::handle_t
// Tracking issue: https://github.com/rapidsai/cuml/issues/2470
int n_streams = input_desc.blocksOwnedBy(rank).size();
cudaStream_t streams[n_streams];
for (int i = 0; i < n_streams; i++) {
RAFT_CUDA_TRY(cudaStreamCreate(&streams[i]));
}
fit_impl(handle,
input_data,
input_desc,
labels,
alpha,
n_alpha,
coef,
intercept,
fit_intercept,
normalize,
algo,
streams,
n_streams,
verbose);
for (int i = 0; i < n_streams; i++) {
handle.sync_stream(streams[i]);
}
for (int i = 0; i < n_streams; i++) {
RAFT_CUDA_TRY(cudaStreamDestroy(streams[i]));
}
}
template <typename T>
void predict_impl(raft::handle_t& handle,
std::vector<Matrix::Data<T>*>& input_data,
Matrix::PartDescriptor& input_desc,
T* coef,
T intercept,
std::vector<Matrix::Data<T>*>& preds,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
std::vector<Matrix::RankSizePair*> local_blocks = input_desc.partsToRanks;
T alpha = T(1);
T beta = T(0);
for (std::size_t i = 0; i < input_data.size(); i++) {
int si = i % n_streams;
raft::linalg::gemm(handle,
input_data[i]->ptr,
local_blocks[i]->size,
input_desc.N,
coef,
preds[i]->ptr,
local_blocks[i]->size,
size_t(1),
CUBLAS_OP_N,
CUBLAS_OP_N,
alpha,
beta,
streams[si]);
raft::linalg::addScalar(
preds[i]->ptr, preds[i]->ptr, intercept, local_blocks[i]->size, streams[si]);
}
}
template <typename T>
void predict_impl(raft::handle_t& handle,
Matrix::RankSizePair** rank_sizes,
size_t n_parts,
Matrix::Data<T>** input,
size_t n_rows,
size_t n_cols,
T* coef,
T intercept,
Matrix::Data<T>** preds,
bool verbose)
{
int rank = handle.get_comms().get_rank();
std::vector<Matrix::RankSizePair*> ranksAndSizes(rank_sizes, rank_sizes + n_parts);
std::vector<Matrix::Data<T>*> input_data(input, input + n_parts);
Matrix::PartDescriptor input_desc(n_rows, n_cols, ranksAndSizes, rank);
std::vector<Matrix::Data<T>*> preds_data(preds, preds + n_parts);
// TODO: These streams should come from raft::handle_t
int n_streams = n_parts;
cudaStream_t streams[n_streams];
for (int i = 0; i < n_streams; i++) {
RAFT_CUDA_TRY(cudaStreamCreate(&streams[i]));
}
predict_impl(
handle, input_data, input_desc, coef, intercept, preds_data, streams, n_streams, verbose);
for (int i = 0; i < n_streams; i++) {
handle.sync_stream(streams[i]);
}
for (int i = 0; i < n_streams; i++) {
RAFT_CUDA_TRY(cudaStreamDestroy(streams[i]));
}
}
void fit(raft::handle_t& handle,
std::vector<Matrix::Data<float>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<float>*>& labels,
float* alpha,
int n_alpha,
float* coef,
float* intercept,
bool fit_intercept,
bool normalize,
int algo,
bool verbose)
{
fit_impl(handle,
input_data,
input_desc,
labels,
alpha,
n_alpha,
coef,
intercept,
fit_intercept,
normalize,
algo,
verbose);
}
void fit(raft::handle_t& handle,
std::vector<Matrix::Data<double>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<double>*>& labels,
double* alpha,
int n_alpha,
double* coef,
double* intercept,
bool fit_intercept,
bool normalize,
int algo,
bool verbose)
{
fit_impl(handle,
input_data,
input_desc,
labels,
alpha,
n_alpha,
coef,
intercept,
fit_intercept,
normalize,
algo,
verbose);
}
void predict(raft::handle_t& handle,
Matrix::RankSizePair** rank_sizes,
size_t n_parts,
Matrix::Data<float>** input,
size_t n_rows,
size_t n_cols,
float* coef,
float intercept,
Matrix::Data<float>** preds,
bool verbose)
{
predict_impl(handle, rank_sizes, n_parts, input, n_rows, n_cols, coef, intercept, preds, verbose);
}
void predict(raft::handle_t& handle,
Matrix::RankSizePair** rank_sizes,
size_t n_parts,
Matrix::Data<double>** input,
size_t n_rows,
size_t n_cols,
double* coef,
double intercept,
Matrix::Data<double>** preds,
bool verbose)
{
predict_impl(handle, rank_sizes, n_parts, input, n_rows, n_cols, coef, intercept, preds, verbose);
}
} // namespace opg
} // namespace Ridge
} // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/glm/ridge.cuh | /*
* Copyright (c) 2018-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <raft/linalg/add.cuh>
#include <raft/linalg/gemm.cuh>
#include <raft/linalg/map.cuh>
#include <raft/linalg/norm.cuh>
#include <raft/linalg/subtract.cuh>
#include <raft/linalg/svd.cuh>
#include <raft/matrix/math.cuh>
#include <raft/stats/mean.cuh>
#include <raft/stats/mean_center.cuh>
#include <raft/stats/stddev.cuh>
#include <raft/stats/sum.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include "preprocess.cuh"
namespace ML {
namespace GLM {
namespace detail {
template <typename math_t>
void ridgeSolve(const raft::handle_t& handle,
math_t* S,
math_t* V,
math_t* U,
size_t n_rows,
size_t n_cols,
math_t* b,
math_t* alpha,
int n_alpha,
math_t* w)
{
auto stream = handle.get_stream();
auto cublasH = handle.get_cublas_handle();
auto cusolverH = handle.get_cusolver_dn_handle();
// Implements this: w = V * inv(S^2 + λ*I) * S * U^T * b
rmm::device_uvector<math_t> S_nnz_vector(n_cols, stream);
math_t* S_nnz = S_nnz_vector.data();
math_t alp = math_t(1);
math_t beta = math_t(0);
math_t thres = math_t(1e-10);
raft::matrix::setSmallValuesZero(S, n_cols, stream, thres);
raft::copy(S_nnz, S, n_cols, stream);
raft::matrix::power(S_nnz, n_cols, stream);
raft::linalg::addScalar(S_nnz, S_nnz, alpha[0], n_cols, stream);
raft::matrix::matrixVectorBinaryDivSkipZero(
S, S_nnz, (size_t)1, n_cols, false, true, stream, true);
raft::matrix::matrixVectorBinaryMult(V, S, n_cols, n_cols, false, true, stream);
raft::linalg::gemm(
handle, U, n_rows, n_cols, b, S_nnz, n_cols, 1, CUBLAS_OP_T, CUBLAS_OP_N, alp, beta, stream);
raft::linalg::gemm(
handle, V, n_cols, n_cols, S_nnz, w, n_cols, 1, CUBLAS_OP_N, CUBLAS_OP_N, alp, beta, stream);
}
template <typename math_t>
void ridgeSVD(const raft::handle_t& handle,
math_t* A,
size_t n_rows,
size_t n_cols,
math_t* b,
math_t* alpha,
int n_alpha,
math_t* w)
{
auto stream = handle.get_stream();
auto cublasH = handle.get_cublas_handle();
auto cusolverH = handle.get_cusolver_dn_handle();
ASSERT(n_cols > 0, "ridgeSVD: number of columns cannot be less than one");
ASSERT(n_rows > 1, "ridgeSVD: number of rows cannot be less than two");
auto U_len = n_rows * n_cols;
auto V_len = n_cols * n_cols;
rmm::device_uvector<math_t> S(n_cols, stream);
rmm::device_uvector<math_t> V(V_len, stream);
rmm::device_uvector<math_t> U(U_len, stream);
raft::linalg::svdQR(
handle, A, n_rows, n_cols, S.data(), U.data(), V.data(), true, true, true, stream);
ridgeSolve(handle, S.data(), V.data(), U.data(), n_rows, n_cols, b, alpha, n_alpha, w);
}
template <typename math_t>
void ridgeEig(const raft::handle_t& handle,
math_t* A,
size_t n_rows,
size_t n_cols,
math_t* b,
math_t* alpha,
int n_alpha,
math_t* w)
{
auto stream = handle.get_stream();
auto cublasH = handle.get_cublas_handle();
auto cusolverH = handle.get_cusolver_dn_handle();
ASSERT(n_cols > 1, "ridgeEig: number of columns cannot be less than two");
ASSERT(n_rows > 1, "ridgeEig: number of rows cannot be less than two");
auto U_len = n_rows * n_cols;
auto V_len = n_cols * n_cols;
rmm::device_uvector<math_t> S(n_cols, stream);
rmm::device_uvector<math_t> V(V_len, stream);
rmm::device_uvector<math_t> U(U_len, stream);
raft::linalg::svdEig(handle, A, n_rows, n_cols, S.data(), U.data(), V.data(), true, stream);
ridgeSolve(handle, S.data(), V.data(), U.data(), n_rows, n_cols, b, alpha, n_alpha, w);
}
/**
* @brief fit a ridge regression model (l2 regularized least squares)
* @param handle cuml handle
* @param input device pointer to feature matrix n_rows x n_cols (col-major)
* @param n_rows number of rows of the feature matrix
* @param n_cols number of columns of the feature matrix
* @param labels device pointer to label vector of length n_rows
* @param alpha host pointer to parameters of the l2 regularizer
* @param n_alpha number of regularization parameters
* @param coef device pointer to hold the solution for weights of size n_cols
* @param intercept host pointer to hold the solution for bias term of size 1
* @param fit_intercept if true, fit intercept
* @param normalize if true, normalize data to zero mean, unit variance
* @param algo specifies which solver to use (0: SVD, 1: Eigendecomposition)
* @param sample_weight device pointer to sample weight vector of length n_rows (nullptr for uniform
* weights) This vector is modified during the computation
*/
template <typename math_t>
void ridgeFit(const raft::handle_t& handle,
math_t* input,
size_t n_rows,
size_t n_cols,
math_t* labels,
math_t* alpha,
int n_alpha,
math_t* coef,
math_t* intercept,
bool fit_intercept,
bool normalize,
int algo = 0,
math_t* sample_weight = nullptr)
{
cudaStream_t stream = handle.get_stream();
auto cublas_handle = handle.get_cublas_handle();
auto cusolver_handle = handle.get_cusolver_dn_handle();
ASSERT(n_cols > 0, "ridgeFit: number of columns cannot be less than one");
ASSERT(n_rows > 1, "ridgeFit: number of rows cannot be less than two");
rmm::device_uvector<math_t> mu_input(0, stream);
rmm::device_uvector<math_t> norm2_input(0, stream);
rmm::device_uvector<math_t> mu_labels(0, stream);
if (fit_intercept) {
mu_input.resize(n_cols, stream);
mu_labels.resize(1, stream);
if (normalize) { norm2_input.resize(n_cols, stream); }
preProcessData(handle,
input,
n_rows,
n_cols,
labels,
intercept,
mu_input.data(),
mu_labels.data(),
norm2_input.data(),
fit_intercept,
normalize,
sample_weight);
}
if (sample_weight != nullptr) {
raft::linalg::sqrt(sample_weight, sample_weight, n_rows, stream);
raft::matrix::matrixVectorBinaryMult(
input, sample_weight, n_rows, n_cols, false, false, stream);
raft::linalg::map_k(
labels,
n_rows,
[] __device__(math_t a, math_t b) { return a * b; },
stream,
labels,
sample_weight);
}
if (algo == 0 || n_cols == 1) {
ridgeSVD(handle, input, n_rows, n_cols, labels, alpha, n_alpha, coef);
} else if (algo == 1) {
ridgeEig(handle, input, n_rows, n_cols, labels, alpha, n_alpha, coef);
} else if (algo == 2) {
ASSERT(false, "ridgeFit: no algorithm with this id has been implemented");
} else {
ASSERT(false, "ridgeFit: no algorithm with this id has been implemented");
}
if (sample_weight != nullptr) {
raft::matrix::matrixVectorBinaryDivSkipZero(
input, sample_weight, n_rows, n_cols, false, false, stream);
raft::linalg::map_k(
labels,
n_rows,
[] __device__(math_t a, math_t b) { return a / b; },
stream,
labels,
sample_weight);
raft::linalg::powerScalar(sample_weight, sample_weight, (math_t)2, n_rows, stream);
}
if (fit_intercept) {
postProcessData(handle,
input,
n_rows,
n_cols,
labels,
coef,
intercept,
mu_input.data(),
mu_labels.data(),
norm2_input.data(),
fit_intercept,
normalize);
} else {
*intercept = math_t(0);
}
}
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/glm/preprocess_mg.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/linear_model/preprocess_mg.hpp>
#include <cumlprims/opg/linalg/norm.hpp>
#include <cumlprims/opg/matrix/math.hpp>
#include <cumlprims/opg/stats/mean.hpp>
#include <cumlprims/opg/stats/mean_center.hpp>
#include <raft/core/comms.hpp>
#include <raft/linalg/gemm.cuh>
#include <raft/linalg/subtract.cuh>
#include <raft/matrix/math.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
using namespace MLCommon;
namespace ML {
namespace GLM {
namespace opg {
template <typename T>
void preProcessData_impl(raft::handle_t& handle,
std::vector<Matrix::Data<T>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<T>*>& labels,
T* mu_input,
T* mu_labels,
T* norm2_input,
bool fit_intercept,
bool normalize,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
const auto& comm = handle.get_comms();
cublasHandle_t cublas_handle = handle.get_cublas_handle();
cusolverDnHandle_t cusolver_handle = handle.get_cusolver_dn_handle();
if (fit_intercept) {
Matrix::Data<T> mu_input_data{mu_input, size_t(input_desc.N)};
Stats::opg::mean(handle, mu_input_data, input_data, input_desc, streams, n_streams);
Stats::opg::mean_center(input_data, input_desc, mu_input_data, comm, streams, n_streams);
Matrix::PartDescriptor labels_desc = input_desc;
labels_desc.N = size_t(1);
Matrix::Data<T> mu_labels_data{mu_labels, size_t(1)};
Stats::opg::mean(handle, mu_labels_data, labels, labels_desc, streams, n_streams);
Stats::opg::mean_center(labels, labels_desc, mu_labels_data, comm, streams, n_streams);
if (normalize) {
Matrix::Data<T> norm2_input_data{norm2_input, size_t(input_desc.N)};
LinAlg::opg::colNorm2(handle, norm2_input_data, input_data, input_desc, streams, n_streams);
Matrix::opg::matrixVectorBinaryDivSkipZero(
input_data, input_desc, norm2_input_data, false, true, true, comm, streams, n_streams);
}
}
}
template <typename T>
void postProcessData_impl(raft::handle_t& handle,
std::vector<Matrix::Data<T>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<T>*>& labels,
T* coef,
T* intercept,
T* mu_input,
T* mu_labels,
T* norm2_input,
bool fit_intercept,
bool normalize,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
const auto& comm = handle.get_comms();
cublasHandle_t cublas_handle = handle.get_cublas_handle();
cusolverDnHandle_t cusolver_handle = handle.get_cusolver_dn_handle();
rmm::device_uvector<T> d_intercept(1, streams[0]);
if (normalize) {
Matrix::Data<T> norm2_input_data{norm2_input, input_desc.N};
Matrix::opg::matrixVectorBinaryMult(
input_data, input_desc, norm2_input_data, false, true, comm, streams, n_streams);
raft::matrix::matrixVectorBinaryDivSkipZero(
coef, norm2_input, size_t(1), input_desc.N, false, true, streams[0], true);
}
raft::linalg::gemm(handle,
mu_input,
1,
input_desc.N,
coef,
d_intercept.data(),
1,
1,
CUBLAS_OP_N,
CUBLAS_OP_N,
streams[0]);
raft::linalg::subtract(d_intercept.data(), mu_labels, d_intercept.data(), 1, streams[0]);
raft::update_host(intercept, d_intercept.data(), 1, streams[0]);
Matrix::Data<T> mu_input_data{mu_input, size_t(input_desc.N)};
Stats::opg::mean_add(input_data, input_desc, mu_input_data, comm, streams, n_streams);
Matrix::PartDescriptor label_desc = input_desc;
label_desc.N = size_t(1);
Matrix::Data<T> mu_label_data{mu_labels, size_t(1)};
Stats::opg::mean_add(labels, label_desc, mu_label_data, comm, streams, n_streams);
}
void preProcessData(raft::handle_t& handle,
std::vector<Matrix::Data<float>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<float>*>& labels,
float* mu_input,
float* mu_labels,
float* norm2_input,
bool fit_intercept,
bool normalize,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
preProcessData_impl(handle,
input_data,
input_desc,
labels,
mu_input,
mu_labels,
norm2_input,
fit_intercept,
normalize,
streams,
n_streams,
verbose);
}
void preProcessData(raft::handle_t& handle,
std::vector<Matrix::Data<double>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<double>*>& labels,
double* mu_input,
double* mu_labels,
double* norm2_input,
bool fit_intercept,
bool normalize,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
preProcessData_impl(handle,
input_data,
input_desc,
labels,
mu_input,
mu_labels,
norm2_input,
fit_intercept,
normalize,
streams,
n_streams,
verbose);
}
void postProcessData(raft::handle_t& handle,
std::vector<Matrix::Data<float>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<float>*>& labels,
float* coef,
float* intercept,
float* mu_input,
float* mu_labels,
float* norm2_input,
bool fit_intercept,
bool normalize,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
postProcessData_impl(handle,
input_data,
input_desc,
labels,
coef,
intercept,
mu_input,
mu_labels,
norm2_input,
fit_intercept,
normalize,
streams,
n_streams,
verbose);
}
void postProcessData(raft::handle_t& handle,
std::vector<Matrix::Data<double>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<double>*>& labels,
double* coef,
double* intercept,
double* mu_input,
double* mu_labels,
double* norm2_input,
bool fit_intercept,
bool normalize,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
postProcessData_impl(handle,
input_data,
input_desc,
labels,
coef,
intercept,
mu_input,
mu_labels,
norm2_input,
fit_intercept,
normalize,
streams,
n_streams,
verbose);
}
} // namespace opg
} // namespace GLM
} // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/glm/ols_mg.cu | /*
* Copyright (c) 2020-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/linear_model/ols_mg.hpp>
#include <cuml/linear_model/preprocess_mg.hpp>
#include <cumlprims/opg/linalg/lstsq.hpp>
#include <cumlprims/opg/stats/mean.hpp>
#include <raft/core/comms.hpp>
#include <raft/linalg/add.cuh>
#include <raft/linalg/gemm.cuh>
#include <raft/util/cuda_utils.cuh>
#include <rmm/device_uvector.hpp>
#include <cstddef>
using namespace MLCommon;
namespace ML {
namespace OLS {
namespace opg {
template <typename T>
void fit_impl(raft::handle_t& handle,
std::vector<Matrix::Data<T>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<T>*>& labels,
T* coef,
T* intercept,
bool fit_intercept,
bool normalize,
int algo,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
rmm::device_uvector<T> mu_input(0, streams[0]);
rmm::device_uvector<T> norm2_input(0, streams[0]);
rmm::device_uvector<T> mu_labels(0, streams[0]);
if (fit_intercept) {
mu_input.resize(input_desc.N, streams[0]);
mu_labels.resize(1, streams[0]);
if (normalize) { norm2_input.resize(input_desc.N, streams[0]); }
GLM::opg::preProcessData(handle,
input_data,
input_desc,
labels,
mu_input.data(),
mu_labels.data(),
norm2_input.data(),
fit_intercept,
normalize,
streams,
n_streams,
verbose);
}
if (algo == 0 || input_desc.N == 1) {
ASSERT(false, "olsFit: no algorithm with this id has been implemented");
} else if (algo == 1) {
LinAlg::opg::lstsqEig(handle, input_data, input_desc, labels, coef, streams, n_streams);
} else {
ASSERT(false, "olsFit: no algorithm with this id has been implemented");
}
if (fit_intercept) {
GLM::opg::postProcessData(handle,
input_data,
input_desc,
labels,
coef,
intercept,
mu_input.data(),
mu_labels.data(),
norm2_input.data(),
fit_intercept,
normalize,
streams,
n_streams,
verbose);
} else {
*intercept = T(0);
}
}
/**
* @brief performs MNMG fit operation for the ols
* @input param handle: the internal cuml handle object
* @input param rank_sizes: includes all the partition size information for the rank
* @input param n_parts: number of partitions
* @input param input: input data
* @input param labels: labels data
* @output param coef: learned regression coefficients
* @output param intercept: intercept value
* @input param fit_intercept: fit intercept or not
* @input param normalize: normalize the data or not
* @input param verbose
*/
template <typename T>
void fit_impl(raft::handle_t& handle,
std::vector<Matrix::Data<T>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<T>*>& labels,
T* coef,
T* intercept,
bool fit_intercept,
bool normalize,
int algo,
bool verbose)
{
int rank = handle.get_comms().get_rank();
// TODO: These streams should come from raft::handle_t
int n_streams = input_desc.blocksOwnedBy(rank).size();
cudaStream_t streams[n_streams];
for (int i = 0; i < n_streams; i++) {
RAFT_CUDA_TRY(cudaStreamCreate(&streams[i]));
}
fit_impl(handle,
input_data,
input_desc,
labels,
coef,
intercept,
fit_intercept,
normalize,
algo,
streams,
n_streams,
verbose);
for (int i = 0; i < n_streams; i++) {
handle.sync_stream(streams[i]);
}
for (int i = 0; i < n_streams; i++) {
RAFT_CUDA_TRY(cudaStreamDestroy(streams[i]));
}
}
template <typename T>
void predict_impl(raft::handle_t& handle,
std::vector<Matrix::Data<T>*>& input_data,
Matrix::PartDescriptor& input_desc,
T* coef,
T intercept,
std::vector<Matrix::Data<T>*>& preds,
cudaStream_t* streams,
int n_streams,
bool verbose)
{
std::vector<Matrix::RankSizePair*> local_blocks = input_desc.partsToRanks;
T alpha = T(1);
T beta = T(0);
for (std::size_t i = 0; i < input_data.size(); i++) {
int si = i % n_streams;
raft::linalg::gemm(handle,
input_data[i]->ptr,
local_blocks[i]->size,
input_desc.N,
coef,
preds[i]->ptr,
local_blocks[i]->size,
size_t(1),
CUBLAS_OP_N,
CUBLAS_OP_N,
alpha,
beta,
streams[si]);
raft::linalg::addScalar(
preds[i]->ptr, preds[i]->ptr, intercept, local_blocks[i]->size, streams[si]);
}
}
template <typename T>
void predict_impl(raft::handle_t& handle,
Matrix::RankSizePair** rank_sizes,
size_t n_parts,
Matrix::Data<T>** input,
size_t n_rows,
size_t n_cols,
T* coef,
T intercept,
Matrix::Data<T>** preds,
bool verbose)
{
int rank = handle.get_comms().get_rank();
std::vector<Matrix::RankSizePair*> ranksAndSizes(rank_sizes, rank_sizes + n_parts);
std::vector<Matrix::Data<T>*> input_data(input, input + n_parts);
Matrix::PartDescriptor input_desc(n_rows, n_cols, ranksAndSizes, rank);
std::vector<Matrix::Data<T>*> preds_data(preds, preds + n_parts);
// TODO: These streams should come from raft::handle_t
int n_streams = n_parts;
cudaStream_t streams[n_streams];
for (int i = 0; i < n_streams; i++) {
RAFT_CUDA_TRY(cudaStreamCreate(&streams[i]));
}
predict_impl(
handle, input_data, input_desc, coef, intercept, preds_data, streams, n_streams, verbose);
for (int i = 0; i < n_streams; i++) {
handle.sync_stream(streams[i]);
}
for (int i = 0; i < n_streams; i++) {
RAFT_CUDA_TRY(cudaStreamDestroy(streams[i]));
}
}
void fit(raft::handle_t& handle,
std::vector<Matrix::Data<float>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<float>*>& labels,
float* coef,
float* intercept,
bool fit_intercept,
bool normalize,
int algo,
bool verbose)
{
fit_impl(handle,
input_data,
input_desc,
labels,
coef,
intercept,
fit_intercept,
normalize,
algo,
verbose);
}
void fit(raft::handle_t& handle,
std::vector<Matrix::Data<double>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<double>*>& labels,
double* coef,
double* intercept,
bool fit_intercept,
bool normalize,
int algo,
bool verbose)
{
fit_impl(handle,
input_data,
input_desc,
labels,
coef,
intercept,
fit_intercept,
normalize,
algo,
verbose);
}
void predict(raft::handle_t& handle,
Matrix::RankSizePair** rank_sizes,
size_t n_parts,
Matrix::Data<float>** input,
size_t n_rows,
size_t n_cols,
float* coef,
float intercept,
Matrix::Data<float>** preds,
bool verbose)
{
predict_impl(handle, rank_sizes, n_parts, input, n_rows, n_cols, coef, intercept, preds, verbose);
}
void predict(raft::handle_t& handle,
Matrix::RankSizePair** rank_sizes,
size_t n_parts,
Matrix::Data<double>** input,
size_t n_rows,
size_t n_cols,
double* coef,
double intercept,
Matrix::Data<double>** preds,
bool verbose)
{
predict_impl(handle, rank_sizes, n_parts, input, n_rows, n_cols, coef, intercept, preds, verbose);
}
} // namespace opg
} // namespace OLS
} // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/glm/glm.cu | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ols.cuh"
#include "qn/qn.cuh"
#include "ridge.cuh"
#include <cuml/linear_model/glm.hpp>
namespace raft {
class handle_t;
}
namespace ML {
namespace GLM {
void olsFit(const raft::handle_t& handle,
float* input,
size_t n_rows,
size_t n_cols,
float* labels,
float* coef,
float* intercept,
bool fit_intercept,
bool normalize,
int algo,
float* sample_weight)
{
detail::olsFit(handle,
input,
n_rows,
n_cols,
labels,
coef,
intercept,
fit_intercept,
normalize,
algo,
sample_weight);
}
void olsFit(const raft::handle_t& handle,
double* input,
size_t n_rows,
size_t n_cols,
double* labels,
double* coef,
double* intercept,
bool fit_intercept,
bool normalize,
int algo,
double* sample_weight)
{
detail::olsFit(handle,
input,
n_rows,
n_cols,
labels,
coef,
intercept,
fit_intercept,
normalize,
algo,
sample_weight);
}
void gemmPredict(const raft::handle_t& handle,
const float* input,
size_t n_rows,
size_t n_cols,
const float* coef,
float intercept,
float* preds)
{
detail::gemmPredict(handle, input, n_rows, n_cols, coef, intercept, preds);
}
void gemmPredict(const raft::handle_t& handle,
const double* input,
size_t n_rows,
size_t n_cols,
const double* coef,
double intercept,
double* preds)
{
detail::gemmPredict(handle, input, n_rows, n_cols, coef, intercept, preds);
}
void ridgeFit(const raft::handle_t& handle,
float* input,
size_t n_rows,
size_t n_cols,
float* labels,
float* alpha,
int n_alpha,
float* coef,
float* intercept,
bool fit_intercept,
bool normalize,
int algo,
float* sample_weight)
{
detail::ridgeFit(handle,
input,
n_rows,
n_cols,
labels,
alpha,
n_alpha,
coef,
intercept,
fit_intercept,
normalize,
algo,
sample_weight);
}
void ridgeFit(const raft::handle_t& handle,
double* input,
size_t n_rows,
size_t n_cols,
double* labels,
double* alpha,
int n_alpha,
double* coef,
double* intercept,
bool fit_intercept,
bool normalize,
int algo,
double* sample_weight)
{
detail::ridgeFit(handle,
input,
n_rows,
n_cols,
labels,
alpha,
n_alpha,
coef,
intercept,
fit_intercept,
normalize,
algo,
sample_weight);
}
template <typename T, typename I>
void qnFit(const raft::handle_t& cuml_handle,
const qn_params& pams,
T* X,
bool X_col_major,
T* y,
I N,
I D,
I C,
T* w0,
T* f,
int* num_iters,
T* sample_weight,
T svr_eps)
{
detail::qnFit<T>(
cuml_handle, pams, X, X_col_major, y, N, D, C, w0, f, num_iters, sample_weight, svr_eps);
}
template void qnFit<float>(const raft::handle_t&,
const qn_params&,
float*,
bool,
float*,
int,
int,
int,
float*,
float*,
int*,
float*,
float);
template void qnFit<double>(const raft::handle_t&,
const qn_params&,
double*,
bool,
double*,
int,
int,
int,
double*,
double*,
int*,
double*,
double);
template <typename T, typename I>
void qnFitSparse(const raft::handle_t& cuml_handle,
const qn_params& pams,
T* X_values,
I* X_cols,
I* X_row_ids,
I X_nnz,
T* y,
I N,
I D,
I C,
T* w0,
T* f,
int* num_iters,
T* sample_weight,
T svr_eps)
{
detail::qnFitSparse<T>(cuml_handle,
pams,
X_values,
X_cols,
X_row_ids,
X_nnz,
y,
N,
D,
C,
w0,
f,
num_iters,
sample_weight,
svr_eps);
}
template void qnFitSparse<float>(const raft::handle_t&,
const qn_params&,
float*,
int*,
int*,
int,
float*,
int,
int,
int,
float*,
float*,
int*,
float*,
float);
template void qnFitSparse<double>(const raft::handle_t&,
const qn_params&,
double*,
int*,
int*,
int,
double*,
int,
int,
int,
double*,
double*,
int*,
double*,
double);
template <typename T, typename I>
void qnDecisionFunction(const raft::handle_t& cuml_handle,
const qn_params& pams,
T* X,
bool X_col_major,
I N,
I D,
I C,
T* params,
T* scores)
{
detail::qnDecisionFunction<T>(cuml_handle, pams, X, X_col_major, N, D, C, params, scores);
}
template void qnDecisionFunction<float>(
const raft::handle_t&, const qn_params&, float*, bool, int, int, int, float*, float*);
template void qnDecisionFunction<double>(
const raft::handle_t&, const qn_params&, double*, bool, int, int, int, double*, double*);
template <typename T, typename I>
void qnDecisionFunctionSparse(const raft::handle_t& cuml_handle,
const qn_params& pams,
T* X_values,
I* X_cols,
I* X_row_ids,
I X_nnz,
I N,
I D,
I C,
T* params,
T* scores)
{
detail::qnDecisionFunctionSparse<T>(
cuml_handle, pams, X_values, X_cols, X_row_ids, X_nnz, N, D, C, params, scores);
}
template void qnDecisionFunctionSparse<float>(
const raft::handle_t&, const qn_params&, float*, int*, int*, int, int, int, int, float*, float*);
template void qnDecisionFunctionSparse<double>(const raft::handle_t&,
const qn_params&,
double*,
int*,
int*,
int,
int,
int,
int,
double*,
double*);
template <typename T, typename I>
void qnPredict(const raft::handle_t& cuml_handle,
const qn_params& pams,
T* X,
bool X_col_major,
I N,
I D,
I C,
T* params,
T* scores)
{
detail::qnPredict<T>(cuml_handle, pams, X, X_col_major, N, D, C, params, scores);
}
template void qnPredict<float>(
const raft::handle_t&, const qn_params&, float*, bool, int, int, int, float*, float*);
template void qnPredict<double>(
const raft::handle_t&, const qn_params&, double*, bool, int, int, int, double*, double*);
template <typename T, typename I>
void qnPredictSparse(const raft::handle_t& cuml_handle,
const qn_params& pams,
T* X_values,
I* X_cols,
I* X_row_ids,
I X_nnz,
I N,
I D,
I C,
T* params,
T* preds)
{
detail::qnPredictSparse<T>(
cuml_handle, pams, X_values, X_cols, X_row_ids, X_nnz, N, D, C, params, preds);
}
template void qnPredictSparse<float>(
const raft::handle_t&, const qn_params&, float*, int*, int*, int, int, int, int, float*, float*);
template void qnPredictSparse<double>(const raft::handle_t&,
const qn_params&,
double*,
int*,
int*,
int,
int,
int,
int,
double*,
double*);
} // namespace GLM
} // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/glm/ols.cuh | /*
* Copyright (c) 2018-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <raft/linalg/add.cuh>
#include <raft/linalg/gemv.cuh>
#include <raft/linalg/lstsq.cuh>
#include <raft/linalg/map.cuh>
#include <raft/linalg/norm.cuh>
#include <raft/linalg/power.cuh>
#include <raft/linalg/sqrt.cuh>
#include <raft/linalg/subtract.cuh>
#include <raft/matrix/math.cuh>
#include <raft/stats/mean.cuh>
#include <raft/stats/mean_center.cuh>
#include <raft/stats/stddev.cuh>
#include <raft/stats/sum.cuh>
#include <rmm/device_uvector.hpp>
#include "preprocess.cuh"
namespace ML {
namespace GLM {
namespace detail {
/**
* @brief fit an ordinary least squares model
* @param handle cuml handle
* @param input device pointer to feature matrix n_rows x n_cols
* @param n_rows number of rows of the feature matrix
* @param n_cols number of columns of the feature matrix
* @param labels device pointer to label vector of length n_rows
* @param coef device pointer to hold the solution for weights of size n_cols
* @param intercept host pointer to hold the solution for bias term of size 1
* @param fit_intercept if true, fit intercept
* @param normalize if true, normalize data to zero mean, unit variance
* @param algo specifies which solver to use (0: SVD, 1: Eigendecomposition, 2:
* QR-decomposition)
* @param sample_weight device pointer to sample weight vector of length n_rows (nullptr for uniform
* weights) This vector is modified during the computation
*/
template <typename math_t>
void olsFit(const raft::handle_t& handle,
math_t* input,
size_t n_rows,
size_t n_cols,
math_t* labels,
math_t* coef,
math_t* intercept,
bool fit_intercept,
bool normalize,
int algo = 0,
math_t* sample_weight = nullptr)
{
cudaStream_t stream = handle.get_stream();
auto cublas_handle = handle.get_cublas_handle();
auto cusolver_handle = handle.get_cusolver_dn_handle();
ASSERT(n_cols > 0, "olsFit: number of columns cannot be less than one");
ASSERT(n_rows > 1, "olsFit: number of rows cannot be less than two");
rmm::device_uvector<math_t> mu_input(0, stream);
rmm::device_uvector<math_t> norm2_input(0, stream);
rmm::device_uvector<math_t> mu_labels(0, stream);
if (fit_intercept) {
mu_input.resize(n_cols, stream);
mu_labels.resize(1, stream);
if (normalize) { norm2_input.resize(n_cols, stream); }
preProcessData(handle,
input,
n_rows,
n_cols,
labels,
intercept,
mu_input.data(),
mu_labels.data(),
norm2_input.data(),
fit_intercept,
normalize,
sample_weight);
}
if (sample_weight != nullptr) {
raft::linalg::sqrt(sample_weight, sample_weight, n_rows, stream);
raft::matrix::matrixVectorBinaryMult(
input, sample_weight, n_rows, n_cols, false, false, stream);
raft::linalg::map_k(
labels,
n_rows,
[] __device__(math_t a, math_t b) { return a * b; },
stream,
labels,
sample_weight);
}
int selectedAlgo = algo;
if (n_cols > n_rows || n_cols == 1) selectedAlgo = 0;
raft::common::nvtx::push_range("ML::GLM::olsFit/algo-%d", selectedAlgo);
switch (selectedAlgo) {
case 0:
raft::linalg::lstsqSvdJacobi(handle, input, n_rows, n_cols, labels, coef, stream);
break;
case 1: raft::linalg::lstsqEig(handle, input, n_rows, n_cols, labels, coef, stream); break;
case 2: raft::linalg::lstsqQR(handle, input, n_rows, n_cols, labels, coef, stream); break;
case 3: raft::linalg::lstsqSvdQR(handle, input, n_rows, n_cols, labels, coef, stream); break;
default:
ASSERT(false, "olsFit: no algorithm with this id (%d) has been implemented", algo);
break;
}
raft::common::nvtx::pop_range();
if (sample_weight != nullptr) {
raft::matrix::matrixVectorBinaryDivSkipZero(
input, sample_weight, n_rows, n_cols, false, false, stream);
raft::linalg::map_k(
labels,
n_rows,
[] __device__(math_t a, math_t b) { return a / b; },
stream,
labels,
sample_weight);
raft::linalg::powerScalar(sample_weight, sample_weight, (math_t)2, n_rows, stream);
}
if (fit_intercept) {
postProcessData(handle,
input,
n_rows,
n_cols,
labels,
coef,
intercept,
mu_input.data(),
mu_labels.data(),
norm2_input.data(),
fit_intercept,
normalize);
} else {
*intercept = math_t(0);
}
}
/**
* @brief to make predictions with a fitted ordinary least squares and ridge regression model
* @param handle cuml ahndle
* @param input device pointer to feature matrix n_rows x n_cols
* @param n_rows number of rows of the feature matrix
* @param n_cols number of columns of the feature matrix
* @param coef coefficients of the model
* @param intercept bias term of the model
* @param preds device pointer to store predictions of size n_rows
*/
template <typename math_t>
void gemmPredict(const raft::handle_t& handle,
const math_t* input,
size_t n_rows,
size_t n_cols,
const math_t* coef,
math_t intercept,
math_t* preds)
{
ASSERT(n_cols > 0, "gemmPredict: number of columns cannot be less than one");
ASSERT(n_rows > 0, "gemmPredict: number of rows cannot be less than one");
cudaStream_t stream = handle.get_stream();
math_t alpha = math_t(1);
math_t beta = math_t(0);
raft::linalg::gemm(handle,
input,
n_rows,
n_cols,
coef,
preds,
n_rows,
1,
CUBLAS_OP_N,
CUBLAS_OP_N,
alpha,
beta,
stream);
if (intercept != math_t(0)) raft::linalg::addScalar(preds, preds, intercept, n_rows, stream);
}
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/glm/glm_api.cpp | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/linear_model/glm_api.h>
#include <cuml/linear_model/qn.h>
#include <common/cumlHandle.hpp>
#include <cuml/linear_model/glm.hpp>
namespace ML::GLM {
extern "C" {
cumlError_t cumlSpQnFit(cumlHandle_t cuml_handle,
const qn_params* pams,
float* X,
float* y,
int N,
int D,
int C,
float* w0,
float* f,
int* num_iters,
bool X_col_major)
{
cumlError_t status;
raft::handle_t* handle_ptr;
std::tie(handle_ptr, status) = ML::handleMap.lookupHandlePointer(cuml_handle);
if (status == CUML_SUCCESS) {
try {
qnFit(*handle_ptr, *pams, X, X_col_major, y, N, D, C, w0, f, num_iters);
}
// TODO: Implement this
// catch (const MLCommon::Exception& e)
//{
// //log e.what()?
// status = e.getErrorCode();
//}
catch (...) {
status = CUML_ERROR_UNKNOWN;
}
}
return status;
}
cumlError_t cumlDpQnFit(cumlHandle_t cuml_handle,
const qn_params* pams,
double* X,
double* y,
int N,
int D,
int C,
double* w0,
double* f,
int* num_iters,
bool X_col_major)
{
cumlError_t status;
raft::handle_t* handle_ptr;
std::tie(handle_ptr, status) = ML::handleMap.lookupHandlePointer(cuml_handle);
if (status == CUML_SUCCESS) {
try {
qnFit(*handle_ptr, *pams, X, X_col_major, y, N, D, C, w0, f, num_iters);
}
// TODO: Implement this
// catch (const MLCommon::Exception& e)
//{
// //log e.what()?
// status = e.getErrorCode();
//}
catch (...) {
status = CUML_ERROR_UNKNOWN;
}
}
return status;
}
}
} // namespace ML::GLM
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/glm/qn_mg.cu | /*
* Copyright (c) 2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "qn/mg/qn_mg.cuh"
#include "qn/simple_mat/dense.hpp"
#include <cuda_runtime.h>
#include <cuml/common/logger.hpp>
#include <cuml/linear_model/qn.h>
#include <cuml/linear_model/qn_mg.hpp>
#include <raft/core/comms.hpp>
#include <raft/core/device_mdarray.hpp>
#include <raft/core/error.hpp>
#include <raft/core/handle.hpp>
#include <raft/label/classlabels.cuh>
#include <raft/util/cudart_utils.hpp>
#include <vector>
using namespace MLCommon;
namespace ML {
namespace GLM {
namespace opg {
template <typename T>
std::vector<T> distinct_mg(const raft::handle_t& handle, T* y, size_t n)
{
cudaStream_t stream = handle.get_stream();
raft::comms::comms_t const& comm = raft::resource::get_comms(handle);
int rank = comm.get_rank();
int n_ranks = comm.get_size();
rmm::device_uvector<T> unique_y(0, stream);
raft::label::getUniquelabels(unique_y, y, n, stream);
rmm::device_uvector<size_t> recv_counts(n_ranks, stream);
auto send_count = raft::make_device_scalar<size_t>(handle, unique_y.size());
comm.allgather(send_count.data_handle(), recv_counts.data(), 1, stream);
comm.sync_stream(stream);
std::vector<size_t> recv_counts_host(n_ranks);
raft::copy(recv_counts_host.data(), recv_counts.data(), n_ranks, stream);
std::vector<size_t> displs(n_ranks);
size_t pos = 0;
for (int i = 0; i < n_ranks; ++i) {
displs[i] = pos;
pos += recv_counts_host[i];
}
rmm::device_uvector<T> recv_buff(displs.back() + recv_counts_host.back(), stream);
comm.allgatherv(
unique_y.data(), recv_buff.data(), recv_counts_host.data(), displs.data(), stream);
comm.sync_stream(stream);
rmm::device_uvector<T> global_unique_y(0, stream);
int n_distinct =
raft::label::getUniquelabels(global_unique_y, recv_buff.data(), recv_buff.size(), stream);
std::vector<T> global_unique_y_host(global_unique_y.size());
raft::copy(global_unique_y_host.data(), global_unique_y.data(), global_unique_y.size(), stream);
return global_unique_y_host;
}
template <typename T>
void qnFit_impl(const raft::handle_t& handle,
const qn_params& pams,
T* X,
bool X_col_major,
T* y,
size_t N,
size_t D,
size_t C,
T* w0,
T* f,
int* num_iters,
size_t n_samples,
int rank,
int n_ranks)
{
auto X_simple = SimpleDenseMat<T>(X, N, D, X_col_major ? COL_MAJOR : ROW_MAJOR);
ML::GLM::opg::qn_fit_x_mg(handle,
pams,
X_simple,
y,
C,
w0,
f,
num_iters,
n_samples,
rank,
n_ranks); // ignore sample_weight, svr_eps
return;
}
template <typename T>
void qnFit_impl(raft::handle_t& handle,
std::vector<Matrix::Data<T>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<T>*>& labels,
T* coef,
const qn_params& pams,
bool X_col_major,
int n_classes,
T* f,
int* num_iters)
{
RAFT_EXPECTS(input_data.size() == 1,
"qn_mg.cu currently does not accept more than one input matrix");
RAFT_EXPECTS(labels.size() == input_data.size(), "labels size does not equal to input_data size");
auto data_X = input_data[0];
auto data_y = labels[0];
size_t n_samples = 0;
for (auto p : input_desc.partsToRanks) {
n_samples += p->size;
}
qnFit_impl<T>(handle,
pams,
data_X->ptr,
X_col_major,
data_y->ptr,
input_desc.totalElementsOwnedBy(input_desc.rank),
input_desc.N,
n_classes,
coef,
f,
num_iters,
input_desc.M,
input_desc.rank,
input_desc.uniqueRanks().size());
}
std::vector<float> getUniquelabelsMG(const raft::handle_t& handle,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<float>*>& labels)
{
RAFT_EXPECTS(labels.size() == 1,
"getUniqueLabelsMG currently does not accept more than one data chunk");
Matrix::Data<float>* data_y = labels[0];
int n_rows = input_desc.totalElementsOwnedBy(input_desc.rank);
return distinct_mg<float>(handle, data_y->ptr, n_rows);
}
void qnFit(raft::handle_t& handle,
std::vector<Matrix::Data<float>*>& input_data,
Matrix::PartDescriptor& input_desc,
std::vector<Matrix::Data<float>*>& labels,
float* coef,
const qn_params& pams,
bool X_col_major,
int n_classes,
float* f,
int* num_iters)
{
qnFit_impl<float>(
handle, input_data, input_desc, labels, coef, pams, X_col_major, n_classes, f, num_iters);
}
}; // namespace opg
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/glm/preprocess.cuh | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <raft/core/handle.hpp>
#include <raft/linalg/gemm.cuh>
#include <raft/linalg/norm.cuh>
#include <raft/matrix/math.cuh>
#include <raft/matrix/matrix.cuh>
#include <raft/stats/mean.cuh>
#include <raft/stats/mean_center.cuh>
#include <raft/stats/meanvar.cuh>
#include <raft/stats/stddev.cuh>
#include <raft/stats/weighted_mean.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_scalar.hpp>
#include <rmm/device_uvector.hpp>
namespace ML {
namespace GLM {
/**
* @brief Center and scale the data, depending on the flags fit_intercept and normalize
*
* @tparam math_t the element type
* @param [inout] input the column-major data of size [n_rows, n_cols]
* @param [in] n_rows
* @param [in] n_cols
* @param [inout] labels vector of size [n_rows]
* @param [out] intercept
* @param [out] mu_input the column-wise means of the input of size [n_cols]
* @param [out] mu_labels the scalar mean of the target (labels vector)
* @param [out] norm2_input the column-wise standard deviations of the input of size [n_cols];
* note, the biased estimator is used to match sklearn's StandardScaler
* (dividing by n_rows, not by (n_rows - 1)).
* @param [in] fit_intercept whether to center the data / to fit the intercept
* @param [in] normalize whether to normalize the data
* @param [in] stream
*/
template <typename math_t>
void preProcessData(const raft::handle_t& handle,
math_t* input,
size_t n_rows,
size_t n_cols,
math_t* labels,
math_t* intercept,
math_t* mu_input,
math_t* mu_labels,
math_t* norm2_input,
bool fit_intercept,
bool normalize,
math_t* sample_weight = nullptr)
{
cudaStream_t stream = handle.get_stream();
raft::common::nvtx::range fun_scope("ML::GLM::preProcessData-%d-%d", n_rows, n_cols);
ASSERT(n_cols > 0, "Parameter n_cols: number of columns cannot be less than one");
ASSERT(n_rows > 1, "Parameter n_rows: number of rows cannot be less than two");
if (fit_intercept) {
if (normalize && sample_weight == nullptr) {
raft::stats::meanvar(mu_input, norm2_input, input, n_cols, n_rows, false, false, stream);
raft::linalg::unaryOp(
norm2_input,
norm2_input,
n_cols,
[] __device__(math_t v) { return raft::mySqrt(v); },
stream);
raft::matrix::linewiseOp(
input,
input,
n_rows,
n_cols,
false,
[] __device__(math_t x, math_t m, math_t s) { return s > 1e-10 ? (x - m) / s : 0; },
stream,
mu_input,
norm2_input);
} else {
if (sample_weight != nullptr) {
raft::stats::weightedMean(
mu_input, input, sample_weight, n_cols, n_rows, false, false, stream);
} else {
raft::stats::mean(mu_input, input, n_cols, n_rows, false, false, stream);
}
raft::stats::meanCenter(input, input, mu_input, n_cols, n_rows, false, true, stream);
if (normalize) {
raft::linalg::colNorm(norm2_input,
input,
n_cols,
n_rows,
raft::linalg::L2Norm,
false,
stream,
[] __device__(math_t v) { return raft::mySqrt(v); });
raft::matrix::matrixVectorBinaryDivSkipZero(
input, norm2_input, n_rows, n_cols, false, true, stream, true);
}
}
if (sample_weight != nullptr) {
raft::stats::weightedMean(
mu_labels, labels, sample_weight, (size_t)1, n_rows, true, false, stream);
} else {
raft::stats::mean(mu_labels, labels, (size_t)1, n_rows, false, false, stream);
}
raft::stats::meanCenter(labels, labels, mu_labels, (size_t)1, n_rows, false, true, stream);
}
}
template <typename math_t>
void postProcessData(const raft::handle_t& handle,
math_t* input,
size_t n_rows,
size_t n_cols,
math_t* labels,
math_t* coef,
math_t* intercept,
math_t* mu_input,
math_t* mu_labels,
math_t* norm2_input,
bool fit_intercept,
bool normalize)
{
cudaStream_t stream = handle.get_stream();
raft::common::nvtx::range fun_scope("ML::GLM::postProcessData-%d-%d", n_rows, n_cols);
ASSERT(n_cols > 0, "Parameter n_cols: number of columns cannot be less than one");
ASSERT(n_rows > 1, "Parameter n_rows: number of rows cannot be less than two");
cublasHandle_t cublas_handle = handle.get_cublas_handle();
rmm::device_scalar<math_t> d_intercept(stream);
if (normalize) {
raft::matrix::matrixVectorBinaryDivSkipZero(
coef, norm2_input, (size_t)1, n_cols, false, true, stream, true);
}
raft::linalg::gemm(handle,
mu_input,
(size_t)1,
n_cols,
coef,
d_intercept.data(),
1,
1,
CUBLAS_OP_N,
CUBLAS_OP_N,
stream);
raft::linalg::subtract(d_intercept.data(), mu_labels, d_intercept.data(), 1, stream);
*intercept = d_intercept.value(stream);
if (normalize) {
raft::matrix::linewiseOp(
input,
input,
n_rows,
n_cols,
false,
[] __device__(math_t x, math_t m, math_t s) { return s * x + m; },
stream,
mu_input,
norm2_input);
} else {
raft::stats::meanAdd(input, input, mu_input, n_cols, n_rows, false, true, stream);
}
raft::stats::meanAdd(labels, labels, mu_labels, (size_t)1, n_rows, false, true, stream);
}
}; // namespace GLM
}; // namespace ML
// end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm | rapidsai_public_repos/cuml/cpp/src/glm/qn/glm_linear.cuh | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "glm_base.cuh"
#include "simple_mat.cuh"
#include <raft/linalg/add.cuh>
#include <raft/util/cuda_utils.cuh>
namespace ML {
namespace GLM {
namespace detail {
template <typename T>
struct SquaredLoss : GLMBase<T, SquaredLoss<T>> {
typedef GLMBase<T, SquaredLoss<T>> Super;
const struct Lz {
inline __device__ T operator()(const T y, const T z) const
{
T diff = z - y;
return diff * diff * 0.5;
}
} lz;
const struct Dlz {
inline __device__ T operator()(const T y, const T z) const { return z - y; }
} dlz;
SquaredLoss(const raft::handle_t& handle, int D, bool has_bias)
: Super(handle, D, 1, has_bias), lz{}, dlz{}
{
}
inline T gradNorm(const SimpleVec<T>& grad, T* dev_scalar, cudaStream_t stream)
{
return squaredNorm(grad, dev_scalar, stream) * 0.5;
}
};
template <typename T>
struct AbsLoss : GLMBase<T, AbsLoss<T>> {
typedef GLMBase<T, AbsLoss<T>> Super;
const struct Lz {
inline __device__ T operator()(const T y, const T z) const { return raft::myAbs<T>(z - y); }
} lz;
const struct Dlz {
inline __device__ T operator()(const T y, const T z) const
{
return z > y ? 1 : (z < y ? -1 : 0);
}
} dlz;
AbsLoss(const raft::handle_t& handle, int D, bool has_bias)
: Super(handle, D, 1, has_bias), lz{}, dlz{}
{
}
inline T gradNorm(const SimpleVec<T>& grad, T* dev_scalar, cudaStream_t stream)
{
return nrm1(grad, dev_scalar, stream);
}
};
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm | rapidsai_public_repos/cuml/cpp/src/glm/qn/qn_linesearch.cuh | /*
* Copyright (c) 2018-2021, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "qn_util.cuh"
/*
* Linesearch functions
*/
namespace ML {
namespace GLM {
namespace detail {
template <typename T>
struct LSProjectedStep {
typedef SimpleVec<T> Vector;
struct op_pstep {
T step;
op_pstep(const T s) : step(s) {}
HDI T operator()(const T xp, const T drt, const T pg) const
{
T xi = xp == 0 ? -pg : xp;
return project_orth(xp + step * drt, xi);
}
};
void operator()(const T step,
Vector& x,
const Vector& drt,
const Vector& xp,
const Vector& pgrad,
cudaStream_t stream) const
{
op_pstep pstep(step);
x.assign_ternary(xp, drt, pgrad, pstep, stream);
}
};
template <typename T>
inline bool ls_success(const LBFGSParam<T>& param,
const T fx_init,
const T dg_init,
const T fx,
const T dg_test,
const T step,
const SimpleVec<T>& grad,
const SimpleVec<T>& drt,
T* width,
T* dev_scalar,
cudaStream_t stream)
{
if (fx > fx_init + step * dg_test) {
*width = param.ls_dec;
} else {
// Armijo condition is met
if (param.linesearch == LBFGS_LS_BT_ARMIJO) return true;
const T dg = dot(grad, drt, dev_scalar, stream);
if (dg < param.wolfe * dg_init) {
*width = param.ls_inc;
} else {
// Regular Wolfe condition is met
if (param.linesearch == LBFGS_LS_BT_WOLFE) return true;
if (dg > -param.wolfe * dg_init) {
*width = param.ls_dec;
} else {
// Strong Wolfe condition is met
return true;
}
}
}
return false;
}
/**
* Backtracking linesearch
*
* \param param LBFGS parameters
* \param f A function object such that `f(x, grad)` returns the
* objective function value at `x`, and overwrites `grad`
* with the gradient.
* \param fx In: The objective function value at the current point.
* Out: The function value at the new point.
* \param x Out: The new point moved to.
* \param grad In: The current gradient vector.
* Out: The gradient at the new point.
* \param step In: The initial step length.
* Out: The calculated step length.
* \param drt The current moving direction.
* \param xp The current point.
* \param dev_scalar Device pointer to workspace of at least 1
* \param stream Device pointer to workspace of at least 1
*/
template <typename T, typename Function>
LINE_SEARCH_RETCODE ls_backtrack(const LBFGSParam<T>& param,
Function& f,
T& fx,
SimpleVec<T>& x,
SimpleVec<T>& grad,
T& step,
const SimpleVec<T>& drt,
const SimpleVec<T>& xp,
T* dev_scalar,
cudaStream_t stream)
{
// Check the value of step
if (step <= T(0)) return LS_INVALID_STEP;
// Save the function value at the current x
const T fx_init = fx;
// Projection of gradient on the search direction
const T dg_init = dot(grad, drt, dev_scalar, stream);
// Make sure d points to a descent direction
if (dg_init > 0) return LS_INVALID_DIR;
const T dg_test = param.ftol * dg_init;
T width;
CUML_LOG_TRACE("Starting line search fx_init=%f, dg_init=%f", fx_init, dg_init);
int iter;
for (iter = 0; iter < param.max_linesearch; iter++) {
// x_{k+1} = x_k + step * d_k
x.axpy(step, drt, xp, stream);
// Evaluate this candidate
fx = f(x, grad, dev_scalar, stream);
CUML_LOG_TRACE("Line search iter %d, fx=%f", iter, fx);
// if (is_success(fx_init, dg_init, fx, dg_test, step, grad, drt, &width))
if (ls_success(
param, fx_init, dg_init, fx, dg_test, step, grad, drt, &width, dev_scalar, stream))
return LS_SUCCESS;
if (step < param.min_step) return LS_INVALID_STEP_MIN;
if (step > param.max_step) return LS_INVALID_STEP_MAX;
step *= width;
}
return LS_MAX_ITERS_REACHED;
}
template <typename T, typename Function>
LINE_SEARCH_RETCODE ls_backtrack_projected(const LBFGSParam<T>& param,
Function& f,
T& fx,
SimpleVec<T>& x,
SimpleVec<T>& grad,
const SimpleVec<T>& pseudo_grad,
T& step,
const SimpleVec<T>& drt,
const SimpleVec<T>& xp,
T l1_penalty,
T* dev_scalar,
cudaStream_t stream)
{
LSProjectedStep<T> lsstep;
// Check the value of step
if (step <= T(0)) return LS_INVALID_STEP;
// Save the function value at the current x
const T fx_init = fx;
// Projection of gradient on the search direction
const T dg_init = dot(pseudo_grad, drt, dev_scalar, stream);
// Make sure d points to a descent direction
if (dg_init > 0) return LS_INVALID_DIR;
const T dg_test = param.ftol * dg_init;
T width;
int iter;
for (iter = 0; iter < param.max_linesearch; iter++) {
// x_{k+1} = proj_orth(x_k + step * d_k)
lsstep(step, x, drt, xp, pseudo_grad, stream);
// evaluates fx with l1 term, but only grad of the loss term
fx = f(x, grad, dev_scalar, stream);
// if (is_success(fx_init, dg_init, fx, dg_test, step, pseudo_grad, drt,
// &width))
if (ls_success(
param, fx_init, dg_init, fx, dg_test, step, pseudo_grad, drt, &width, dev_scalar, stream))
return LS_SUCCESS;
if (step < param.min_step) return LS_INVALID_STEP_MIN;
if (step > param.max_step) return LS_INVALID_STEP_MAX;
step *= width;
}
return LS_MAX_ITERS_REACHED;
}
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm | rapidsai_public_repos/cuml/cpp/src/glm/qn/glm_logistic.cuh | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "glm_base.cuh"
#include "simple_mat.cuh"
#include <raft/linalg/add.cuh>
#include <raft/util/cuda_utils.cuh>
namespace ML {
namespace GLM {
namespace detail {
template <typename T>
struct LogisticLoss : GLMBase<T, LogisticLoss<T>> {
typedef GLMBase<T, LogisticLoss<T>> Super;
const struct Lz {
inline __device__ T log_sigmoid(const T x) const
{
// To avoid floating point overflow in the exp function
T temp = raft::myLog(1 + raft::myExp(x < 0 ? x : -x));
return x < 0 ? x - temp : -temp;
}
inline __device__ T operator()(const T y, const T z) const
{
T ytil = 2 * y - 1;
return -log_sigmoid(ytil * z);
}
} lz;
const struct Dlz {
inline __device__ T operator()(const T y, const T z) const
{
// To avoid fp overflow with exp(z) when abs(z) is large
T ez = raft::myExp(z < 0 ? z : -z);
T numerator = z < 0 ? ez : T(1.0);
return numerator / (T(1.0) + ez) - y;
}
} dlz;
LogisticLoss(const raft::handle_t& handle, int D, bool has_bias)
: Super(handle, D, 1, has_bias), lz{}, dlz{}
{
}
inline T gradNorm(const SimpleVec<T>& grad, T* dev_scalar, cudaStream_t stream)
{
return nrmMax(grad, dev_scalar, stream);
}
};
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm | rapidsai_public_repos/cuml/cpp/src/glm/qn/glm_svm.cuh | /*
* Copyright (c) 2021-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "glm_base.cuh"
#include "simple_mat.cuh"
#include <raft/linalg/add.cuh>
#include <raft/util/cuda_utils.cuh>
namespace ML {
namespace GLM {
namespace detail {
template <typename T>
struct SVCL1Loss : GLMBase<T, SVCL1Loss<T>> {
typedef GLMBase<T, SVCL1Loss<T>> Super;
const struct Lz {
inline __device__ T operator()(const T y, const T z) const
{
T s = 2 * y - 1;
return raft::myMax<T>(0, 1 - s * z);
}
} lz;
const struct Dlz {
inline __device__ T operator()(const T y, const T z) const
{
T s = 2 * y - 1;
return s * z <= 1 ? -s : 0;
}
} dlz;
SVCL1Loss(const raft::handle_t& handle, int D, bool has_bias)
: Super(handle, D, 1, has_bias), lz{}, dlz{}
{
}
inline T gradNorm(const SimpleVec<T>& grad, T* dev_scalar, cudaStream_t stream)
{
return nrm1(grad, dev_scalar, stream);
}
};
template <typename T>
struct SVCL2Loss : GLMBase<T, SVCL2Loss<T>> {
typedef GLMBase<T, SVCL2Loss<T>> Super;
const struct Lz {
inline __device__ T operator()(const T y, const T z) const
{
T s = 2 * y - 1;
T t = raft::myMax<T>(0, 1 - s * z);
return t * t;
}
} lz;
const struct Dlz {
inline __device__ T operator()(const T y, const T z) const
{
T s = 2 * y - 1;
return s * z <= 1 ? z - s : 0;
}
} dlz;
SVCL2Loss(const raft::handle_t& handle, int D, bool has_bias)
: Super(handle, D, 1, has_bias), lz{}, dlz{}
{
}
inline T gradNorm(const SimpleVec<T>& grad, T* dev_scalar, cudaStream_t stream)
{
return squaredNorm(grad, dev_scalar, stream) * 0.5;
}
};
template <typename T>
struct SVRL1Loss : GLMBase<T, SVRL1Loss<T>> {
typedef GLMBase<T, SVRL1Loss<T>> Super;
const struct Lz {
T sensitivity;
inline __device__ T operator()(const T y, const T z) const
{
T t = y - z;
return t > sensitivity ? t - sensitivity : t < -sensitivity ? -t - sensitivity : 0;
}
} lz;
const struct Dlz {
T sensitivity;
inline __device__ T operator()(const T y, const T z) const
{
T t = y - z;
return t > sensitivity ? -1 : (t < -sensitivity ? 1 : 0);
}
} dlz;
SVRL1Loss(const raft::handle_t& handle, int D, bool has_bias, T sensitivity)
: Super(handle, D, 1, has_bias), lz{sensitivity}, dlz{sensitivity}
{
}
inline T gradNorm(const SimpleVec<T>& grad, T* dev_scalar, cudaStream_t stream)
{
return nrm1(grad, dev_scalar, stream);
}
};
template <typename T>
struct SVRL2Loss : GLMBase<T, SVRL2Loss<T>> {
typedef GLMBase<T, SVRL2Loss<T>> Super;
const struct Lz {
T sensitivity;
inline __device__ T operator()(const T y, const T z) const
{
T t = y - z;
T s = t > sensitivity ? t - sensitivity : t < -sensitivity ? -t - sensitivity : 0;
return s * s;
}
} lz;
const struct Dlz {
T sensitivity;
inline __device__ T operator()(const T y, const T z) const
{
T t = y - z;
return -2 * (t > sensitivity ? t - sensitivity : t < -sensitivity ? (t + sensitivity) : 0);
}
} dlz;
SVRL2Loss(const raft::handle_t& handle, int D, bool has_bias, T sensitivity)
: Super(handle, D, 1, has_bias), lz{sensitivity}, dlz{sensitivity}
{
}
inline T gradNorm(const SimpleVec<T>& grad, T* dev_scalar, cudaStream_t stream)
{
return squaredNorm(grad, dev_scalar, stream) * 0.5;
}
};
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm | rapidsai_public_repos/cuml/cpp/src/glm/qn/glm_regularizer.cuh | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "simple_mat.cuh"
#include <raft/linalg/add.cuh>
#include <raft/linalg/map_then_reduce.cuh>
#include <raft/stats/mean.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
namespace ML {
namespace GLM {
namespace detail {
template <typename T>
struct Tikhonov {
T l2_penalty;
Tikhonov(T l2) : l2_penalty(l2) {}
Tikhonov(const Tikhonov<T>& other) : l2_penalty(other.l2_penalty) {}
HDI T operator()(const T w) const { return 0.5 * l2_penalty * w * w; }
inline void reg_grad(T* reg_val,
SimpleDenseMat<T>& G,
const SimpleDenseMat<T>& W,
const bool has_bias,
cudaStream_t stream) const
{
// NOTE: scikit generally does not penalize biases
SimpleDenseMat<T> Gweights;
SimpleDenseMat<T> Wweights;
col_slice(G, Gweights, 0, G.n - has_bias);
col_slice(W, Wweights, 0, G.n - has_bias);
Gweights.ax(l2_penalty, Wweights, stream);
raft::linalg::mapThenSumReduce(reg_val, Wweights.len, *this, stream, Wweights.data);
}
};
template <typename T, class Loss, class Reg>
struct RegularizedGLM : GLMDims {
Reg* reg;
Loss* loss;
RegularizedGLM(Loss* loss, Reg* reg)
: reg(reg), loss(loss), GLMDims(loss->C, loss->D, loss->fit_intercept)
{
}
inline void loss_grad(T* loss_val,
SimpleDenseMat<T>& G,
const SimpleDenseMat<T>& W,
const SimpleMat<T>& Xb,
const SimpleVec<T>& yb,
SimpleDenseMat<T>& Zb,
cudaStream_t stream,
bool initGradZero = true)
{
T reg_host, loss_host;
SimpleVec<T> lossVal(loss_val, 1);
G.fill(0, stream);
reg->reg_grad(lossVal.data, G, W, loss->fit_intercept, stream);
raft::update_host(®_host, lossVal.data, 1, stream);
loss->loss_grad(lossVal.data, G, W, Xb, yb, Zb, stream, false);
raft::update_host(&loss_host, lossVal.data, 1, stream);
raft::interruptible::synchronize(stream);
lossVal.fill(loss_host + reg_host, stream);
}
inline T gradNorm(const SimpleVec<T>& grad, T* dev_scalar, cudaStream_t stream)
{
return loss->gradNorm(grad, dev_scalar, stream);
}
};
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm | rapidsai_public_repos/cuml/cpp/src/glm/qn/qn_solvers.cuh | /*
* Copyright (c) 2018-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
/*
* This file contains implementations of two popular Quasi-Newton methods:
* - Limited-memory Broyden Fletcher Goldfarb Shanno (L-BFGS) [Nocedal, Wright -
* Numerical Optimization (1999)]
* - Orthant-wise limited-memory quasi-newton (OWL-QN) [Andrew, Gao - ICML 2007]
* https://www.microsoft.com/en-us/research/publication/scalable-training-of-l1-regularized-log-linear-models/
*
* L-BFGS is a classical method to solve unconstrained optimization problems of
* differentiable multi-variate functions f: R^D \mapsto R, i.e. it solves
*
* \min_{x \in R^D} f(x)
*
* iteratively by building up a m-dimensional (inverse) Hessian approximation.
*
* OWL-QN is an extension of L-BFGS that is specifically designed to optimize
* functions of the form
*
* f(x) + \lambda * \sum_i |x_i|,
*
* i.e. functions with an l1 penalty, by leveraging that |z| is differentiable
* when restricted to an orthant.
*
*/
#include "qn_linesearch.cuh"
#include "qn_util.cuh"
#include "simple_mat.cuh"
#include <cuml/common/logger.hpp>
#include <raft/util/cuda_utils.cuh>
#include <rmm/device_uvector.hpp>
namespace ML {
namespace GLM {
namespace detail {
// TODO better way to deal with alignment? Smaller align possible?
constexpr size_t qn_align = 256;
template <typename T>
inline size_t lbfgs_workspace_size(const LBFGSParam<T>& param, const int n)
{
size_t mat_size = raft::alignTo<size_t>(sizeof(T) * param.m * n, qn_align);
size_t vec_size = raft::alignTo<size_t>(sizeof(T) * n, qn_align);
return 2 * mat_size + 4 * vec_size + qn_align;
}
template <typename T>
inline size_t owlqn_workspace_size(const LBFGSParam<T>& param, const int n)
{
size_t vec_size = raft::alignTo<size_t>(sizeof(T) * n, qn_align);
return lbfgs_workspace_size(param, n) + vec_size;
}
template <typename T>
inline bool update_and_check(const char* solver,
const LBFGSParam<T>& param,
int iter,
LINE_SEARCH_RETCODE lsret,
T& fx,
T& fxp,
const T& gnorm,
ML::SimpleVec<T>& x,
ML::SimpleVec<T>& xp,
ML::SimpleVec<T>& grad,
ML::SimpleVec<T>& gradp,
std::vector<T>& fx_hist,
T* dev_scalar,
OPT_RETCODE& outcode,
cudaStream_t stream)
{
bool stop = false;
bool converged = false;
bool isLsValid = !isnan(fx) && !isinf(fx);
// Linesearch may fail to converge, but still come closer to the solution;
// if that is not the case, let `check_convergence` ("insufficient change")
// below terminate the loop.
bool isLsNonCritical = lsret == LS_INVALID_STEP_MIN || lsret == LS_MAX_ITERS_REACHED;
// If the error is not critical, check that the target function does not grow.
// This shouldn't really happen, but weird things can happen if the convergence
// thresholds are too small.
bool isLsInDoubt = isLsValid && fx <= fxp + param.ftol && isLsNonCritical;
bool isLsSuccess = lsret == LS_SUCCESS || isLsInDoubt;
CUML_LOG_TRACE("%s iteration %d, fx=%f", solver, iter, fx);
// if the target is at least finite, we can check the convergence
if (isLsValid) converged = check_convergence(param, iter, fx, gnorm, fx_hist);
if (!isLsSuccess && !converged) {
CUML_LOG_WARN(
"%s line search failed (code %d); stopping at the last valid step", solver, lsret);
outcode = OPT_LS_FAILED;
stop = true;
} else if (!isLsValid) {
CUML_LOG_ERROR(
"%s error fx=%f at iteration %d; stopping at the last valid step", solver, fx, iter);
outcode = OPT_NUMERIC_ERROR;
stop = true;
} else if (converged) {
CUML_LOG_DEBUG("%s converged", solver);
outcode = OPT_SUCCESS;
stop = true;
} else if (isLsInDoubt && fx + param.ftol >= fxp) {
// If a non-critical error has happened during the line search, check if the target
// is improved at least a bit. Otherwise, stop to avoid spinning till the iteration limit.
CUML_LOG_WARN(
"%s stopped, because the line search failed to advance (step delta = %f)", solver, fx - fxp);
outcode = OPT_LS_FAILED;
stop = true;
}
// if linesearch wasn't successful, undo the update.
if (!isLsSuccess || !isLsValid) {
fx = fxp;
x.copy_async(xp, stream);
grad.copy_async(gradp, stream);
}
return stop;
}
template <typename T, typename Function>
inline OPT_RETCODE min_lbfgs(const LBFGSParam<T>& param,
Function& f, // function to minimize
SimpleVec<T>& x, // initial point, holds result
T& fx, // output function value
int* k, // output iterations
SimpleVec<T>& workspace, // scratch space
cudaStream_t stream,
int verbosity = 0)
{
int n = x.len;
const int workspace_size = lbfgs_workspace_size(param, n);
ASSERT(workspace.len >= workspace_size, "LBFGS: workspace insufficient");
// SETUP WORKSPACE
size_t mat_size = raft::alignTo<size_t>(sizeof(T) * param.m * n, qn_align);
size_t vec_size = raft::alignTo<size_t>(sizeof(T) * n, qn_align);
T* p_ws = workspace.data;
SimpleDenseMat<T> S(p_ws, n, param.m);
p_ws += mat_size;
SimpleDenseMat<T> Y(p_ws, n, param.m);
p_ws += mat_size;
SimpleVec<T> xp(p_ws, n);
p_ws += vec_size;
SimpleVec<T> grad(p_ws, n);
p_ws += vec_size;
SimpleVec<T> gradp(p_ws, n);
p_ws += vec_size;
SimpleVec<T> drt(p_ws, n);
p_ws += vec_size;
T* dev_scalar = p_ws;
SimpleVec<T> svec, yvec; // mask vectors
std::vector<T> ys(param.m);
std::vector<T> alpha(param.m);
std::vector<T> fx_hist(param.past > 0 ? param.past : 0);
*k = 0;
ML::Logger::get().setLevel(verbosity);
CUML_LOG_DEBUG("Running L-BFGS");
// Evaluate function and compute gradient
fx = f(x, grad, dev_scalar, stream);
T gnorm = f.gradNorm(grad, dev_scalar, stream);
if (param.past > 0) fx_hist[0] = fx;
// Early exit if the initial x is already a minimizer
if (check_convergence(param, *k, fx, gnorm, fx_hist)) {
CUML_LOG_DEBUG("Initial solution fulfills optimality condition.");
return OPT_SUCCESS;
}
// Initial direction
drt.ax(-1.0, grad, stream);
// Initial step
T step = T(1.0) / nrm2(drt, dev_scalar, stream);
T fxp = fx;
*k = 1;
int end = 0;
int n_vec = 0; // number of vector updates made in lbfgs_search_dir
OPT_RETCODE retcode;
LINE_SEARCH_RETCODE lsret;
for (; *k <= param.max_iterations; (*k)++) {
// Save the current x and gradient
xp.copy_async(x, stream);
gradp.copy_async(grad, stream);
fxp = fx;
// Line search to update x, fx and gradient
lsret = ls_backtrack(param, f, fx, x, grad, step, drt, xp, dev_scalar, stream);
gnorm = f.gradNorm(grad, dev_scalar, stream);
if (update_and_check("L-BFGS",
param,
*k,
lsret,
fx,
fxp,
gnorm,
x,
xp,
grad,
gradp,
fx_hist,
dev_scalar,
retcode,
stream))
return retcode;
// Update s and y
// s_{k+1} = x_{k+1} - x_k
// y_{k+1} = g_{k+1} - g_k
col_ref(S, svec, end);
col_ref(Y, yvec, end);
svec.axpy(-1.0, xp, x, stream);
yvec.axpy(-1.0, gradp, grad, stream);
// drt <- -H * g
end = lbfgs_search_dir(
param, &n_vec, end, S, Y, grad, svec, yvec, drt, ys, alpha, dev_scalar, stream);
// step = 1.0 as initial guess
step = T(1.0);
}
CUML_LOG_WARN("L-BFGS: max iterations reached");
return OPT_MAX_ITERS_REACHED;
}
template <typename T>
inline void update_pseudo(const SimpleVec<T>& x,
const SimpleVec<T>& grad,
const op_pseudo_grad<T>& pseudo_grad,
const int pg_limit,
SimpleVec<T>& pseudo,
cudaStream_t stream)
{
if (grad.len > pg_limit) {
pseudo.copy_async(grad, stream);
SimpleVec<T> mask(pseudo.data, pg_limit);
mask.assign_binary(x, grad, pseudo_grad, stream);
} else {
pseudo.assign_binary(x, grad, pseudo_grad, stream);
}
}
template <typename T, typename Function>
inline OPT_RETCODE min_owlqn(const LBFGSParam<T>& param,
Function& f,
const T l1_penalty,
const int pg_limit,
SimpleVec<T>& x,
T& fx,
int* k,
SimpleVec<T>& workspace, // scratch space
cudaStream_t stream,
const int verbosity = 0)
{
int n = x.len;
const int workspace_size = owlqn_workspace_size(param, n);
ASSERT(workspace.len >= workspace_size, "LBFGS: workspace insufficient");
ASSERT(pg_limit <= n && pg_limit > 0, "OWL-QN: Invalid pseudo grad limit parameter");
// SETUP WORKSPACE
size_t mat_size = raft::alignTo<size_t>(sizeof(T) * param.m * n, qn_align);
size_t vec_size = raft::alignTo<size_t>(sizeof(T) * n, qn_align);
T* p_ws = workspace.data;
SimpleDenseMat<T> S(p_ws, n, param.m);
p_ws += mat_size;
SimpleDenseMat<T> Y(p_ws, n, param.m);
p_ws += mat_size;
SimpleVec<T> xp(p_ws, n);
p_ws += vec_size;
SimpleVec<T> grad(p_ws, n);
p_ws += vec_size;
SimpleVec<T> gradp(p_ws, n);
p_ws += vec_size;
SimpleVec<T> drt(p_ws, n);
p_ws += vec_size;
SimpleVec<T> pseudo(p_ws, n);
p_ws += vec_size;
T* dev_scalar = p_ws;
ML::Logger::get().setLevel(verbosity);
SimpleVec<T> svec, yvec; // mask vectors
std::vector<T> ys(param.m);
std::vector<T> alpha(param.m);
std::vector<T> fx_hist(param.past > 0 ? param.past : 0);
op_project<T> project_neg(T(-1.0));
auto f_wrap = [&f, &l1_penalty, &pg_limit](
SimpleVec<T>& x, SimpleVec<T>& grad, T* dev_scalar, cudaStream_t stream) {
T tmp = f(x, grad, dev_scalar, stream);
SimpleVec<T> mask(x.data, pg_limit);
return tmp + l1_penalty * nrm1(mask, dev_scalar, stream);
};
*k = 0;
CUML_LOG_DEBUG("Running OWL-QN with lambda=%f", l1_penalty);
// op to compute the pseudo gradients
op_pseudo_grad<T> pseudo_grad(l1_penalty);
fx = f_wrap(x, grad, dev_scalar,
stream); // fx is loss+regularizer, grad is grad of loss only
T gnorm = f.gradNorm(grad, dev_scalar, stream);
// compute pseudo grad, but don't overwrite grad: used to build H
// pseudo.assign_binary(x, grad, pseudo_grad);
update_pseudo(x, grad, pseudo_grad, pg_limit, pseudo, stream);
if (param.past > 0) fx_hist[0] = fx;
// Early exit if the initial x is already a minimizer
if (check_convergence(param, *k, fx, gnorm, fx_hist)) {
CUML_LOG_DEBUG("Initial solution fulfills optimality condition.");
return OPT_SUCCESS;
}
// Initial direction
drt.ax(-1.0, pseudo, stream); // using Pseudo gradient here
// below should be done for consistency but seems unnecessary
// drt.assign_k_ary(project, pseudo, x);
// Initial step
T step = T(1.0) / std::max(T(1), nrm2(drt, dev_scalar, stream));
T fxp = fx;
int end = 0;
int n_vec = 0; // number of vector updates made in lbfgs_search_dir
OPT_RETCODE retcode;
LINE_SEARCH_RETCODE lsret;
for ((*k) = 1; (*k) <= param.max_iterations; (*k)++) {
// Save the current x and gradient
xp.copy_async(x, stream);
gradp.copy_async(grad, stream);
fxp = fx;
// Projected line search to update x, fx and gradient
lsret = ls_backtrack_projected(
param, f_wrap, fx, x, grad, pseudo, step, drt, xp, l1_penalty, dev_scalar, stream);
gnorm = f.gradNorm(grad, dev_scalar, stream);
if (update_and_check("QWL-QN",
param,
*k,
lsret,
fx,
fxp,
gnorm,
x,
xp,
grad,
gradp,
fx_hist,
dev_scalar,
retcode,
stream))
return retcode;
// recompute pseudo
// pseudo.assign_binary(x, grad, pseudo_grad);
update_pseudo(x, grad, pseudo_grad, pg_limit, pseudo, stream);
// Update s and y - We should only do this if there is no skipping condition
col_ref(S, svec, end);
col_ref(Y, yvec, end);
svec.axpy(-1.0, xp, x, stream);
yvec.axpy(-1.0, gradp, grad, stream);
// drt <- -H * -> pseudo grad <-
end = lbfgs_search_dir(
param, &n_vec, end, S, Y, pseudo, svec, yvec, drt, ys, alpha, dev_scalar, stream);
// Project drt onto orthant of -pseudog
drt.assign_binary(drt, pseudo, project_neg, stream);
// step = 1.0 as initial guess
step = T(1.0);
}
CUML_LOG_WARN("QWL-QN: max iterations reached");
return OPT_MAX_ITERS_REACHED;
}
/*
* Chooses the right algorithm, depending on presence of l1 term
*/
template <typename T, typename LossFunction>
inline int qn_minimize(const raft::handle_t& handle,
SimpleVec<T>& x,
T* fx,
int* num_iters,
LossFunction& loss,
const T l1,
const LBFGSParam<T>& opt_param,
const int verbosity = 0)
{
// TODO should the worksapce allocation happen outside?
cudaStream_t stream = handle.get_stream();
OPT_RETCODE ret;
if (l1 == 0.0) {
rmm::device_uvector<T> tmp(lbfgs_workspace_size(opt_param, x.len), stream);
SimpleVec<T> workspace(tmp.data(), tmp.size());
ret = min_lbfgs(opt_param,
loss, // function to minimize
x, // initial point, holds result
*fx, // output function value
num_iters, // output iterations
workspace, // scratch space
stream,
verbosity);
CUML_LOG_DEBUG("L-BFGS Done");
} else {
// There might not be a better way to deal with dispatching
// for the l1 case:
// The algorithm explicitly expects a differentiable
// function f(x). It takes care of adding and
// handling the term l1norm(x) * l1_pen explicitly, i.e.
// it needs to evaluate f(x) and its gradient separately
rmm::device_uvector<T> tmp(owlqn_workspace_size(opt_param, x.len), stream);
SimpleVec<T> workspace(tmp.data(), tmp.size());
ret = min_owlqn(opt_param,
loss, // function to minimize
l1,
loss.D * loss.C,
x, // initial point, holds result
*fx, // output function value
num_iters, // output iterations
workspace, // scratch space
stream,
verbosity);
CUML_LOG_DEBUG("OWL-QN Done");
}
if (ret == OPT_MAX_ITERS_REACHED) {
CUML_LOG_WARN(
"Maximum iterations reached before solver is converged. To increase "
"model accuracy you can increase the number of iterations (max_iter) or "
"improve the scaling of the input data.");
}
return ret;
}
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm | rapidsai_public_repos/cuml/cpp/src/glm/qn/glm_base.cuh | /*
* Copyright (c) 2018-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "simple_mat.cuh"
#include <raft/linalg/add.cuh>
#include <raft/linalg/map.cuh>
#include <raft/linalg/map_then_reduce.cuh>
#include <raft/linalg/matrix_vector_op.cuh>
#include <raft/stats/mean.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <thrust/execution_policy.h>
#include <thrust/functional.h>
#include <thrust/reduce.h>
#include <vector>
namespace ML {
namespace GLM {
namespace detail {
template <typename T>
inline void linearFwd(const raft::handle_t& handle,
SimpleDenseMat<T>& Z,
const SimpleMat<T>& X,
const SimpleDenseMat<T>& W)
{
cudaStream_t stream = handle.get_stream();
// Forward pass: compute Z <- W * X.T + bias
const bool has_bias = X.n != W.n;
const int D = X.n;
if (has_bias) {
SimpleVec<T> bias;
SimpleDenseMat<T> weights;
col_ref(W, bias, D);
col_slice(W, weights, 0, D);
// We implement Z <- W * X^T + b by
// - Z <- b (broadcast): TODO reads Z unnecessarily atm
// - Z <- W * X^T + Z : TODO can be fused in CUTLASS?
auto set_bias = [] __device__(const T z, const T b) { return b; };
raft::linalg::matrixVectorOp(
Z.data, Z.data, bias.data, Z.n, Z.m, false, false, set_bias, stream);
Z.assign_gemm(handle, 1, weights, false, X, true, 1, stream);
} else {
Z.assign_gemm(handle, 1, W, false, X, true, 0, stream);
}
}
template <typename T>
inline void linearBwd(const raft::handle_t& handle,
SimpleDenseMat<T>& G,
const SimpleMat<T>& X,
const SimpleDenseMat<T>& dZ,
bool setZero)
{
cudaStream_t stream = handle.get_stream();
// Backward pass:
// - compute G <- dZ * X.T
// - for bias: Gb = mean(dZ, 1)
const bool has_bias = X.n != G.n;
const int D = X.n;
const T beta = setZero ? T(0) : T(1);
if (has_bias) {
SimpleVec<T> Gbias;
SimpleDenseMat<T> Gweights;
col_ref(G, Gbias, D);
col_slice(G, Gweights, 0, D);
// TODO can this be fused somehow?
Gweights.assign_gemm(handle, 1.0 / X.m, dZ, false, X, false, beta, stream);
raft::stats::mean(Gbias.data, dZ.data, dZ.m, dZ.n, false, true, stream);
} else {
G.assign_gemm(handle, 1.0 / X.m, dZ, false, X, false, beta, stream);
}
}
struct GLMDims {
bool fit_intercept;
int C, D, dims, n_param;
GLMDims(int C, int D, bool fit_intercept) : C(C), D(D), fit_intercept(fit_intercept)
{
dims = D + fit_intercept;
n_param = dims * C;
}
};
template <typename T, class Loss>
struct GLMBase : GLMDims {
typedef SimpleDenseMat<T> Mat;
typedef SimpleVec<T> Vec;
const raft::handle_t& handle;
T* sample_weights;
T weights_sum;
GLMBase(const raft::handle_t& handle, int D, int C, bool fit_intercept)
: GLMDims(C, D, fit_intercept), handle(handle), sample_weights(nullptr), weights_sum(0)
{
}
void add_sample_weights(T* sample_weights, int n_samples, cudaStream_t stream)
{
this->sample_weights = sample_weights;
this->weights_sum = thrust::reduce(thrust::cuda::par.on(stream),
sample_weights,
sample_weights + n_samples,
(T)0,
thrust::plus<T>());
}
/*
* Computes the following:
* 1. Z <- dL/DZ
* 2. loss_val <- sum loss(Z)
*
* Default: elementwise application of loss and its derivative
*
* NB: for this method to work, loss implementations must have two functor fields `lz` and `dlz`.
* These two compute loss value and its derivative w.r.t. `z`.
*/
inline void getLossAndDZ(T* loss_val,
SimpleDenseMat<T>& Z,
const SimpleVec<T>& y,
cudaStream_t stream)
{
// Base impl assumes simple case C = 1
// TODO would be nice to have a kernel that fuses these two steps
// This would be easy, if mapThenSumReduce allowed outputting the result of
// map (supporting inplace)
auto lz_copy = static_cast<Loss*>(this)->lz;
auto dlz_copy = static_cast<Loss*>(this)->dlz;
if (this->sample_weights) { // Sample weights are in use
T normalization = 1.0 / this->weights_sum;
raft::linalg::mapThenSumReduce(
loss_val,
y.len,
[lz_copy, normalization] __device__(const T y, const T z, const T weight) {
return lz_copy(y, z) * (weight * normalization);
},
stream,
y.data,
Z.data,
sample_weights);
raft::linalg::map_k(
Z.data,
y.len,
[dlz_copy] __device__(const T y, const T z, const T weight) {
return weight * dlz_copy(y, z);
},
stream,
y.data,
Z.data,
sample_weights);
} else { // Sample weights are not used
T normalization = 1.0 / y.len;
raft::linalg::mapThenSumReduce(
loss_val,
y.len,
[lz_copy, normalization] __device__(const T y, const T z) {
return lz_copy(y, z) * normalization;
},
stream,
y.data,
Z.data);
raft::linalg::binaryOp(Z.data, y.data, Z.data, y.len, dlz_copy, stream);
}
}
inline void loss_grad(T* loss_val,
Mat& G,
const Mat& W,
const SimpleMat<T>& Xb,
const Vec& yb,
Mat& Zb,
cudaStream_t stream,
bool initGradZero = true)
{
Loss* loss = static_cast<Loss*>(this); // static polymorphism
linearFwd(handle, Zb, Xb, W); // linear part: forward pass
loss->getLossAndDZ(loss_val, Zb, yb, stream); // loss specific part
linearBwd(handle, G, Xb, Zb, initGradZero); // linear part: backward pass
}
};
template <typename T, class GLMObjective>
struct GLMWithData : GLMDims {
const SimpleMat<T>* X;
const SimpleVec<T>* y;
SimpleDenseMat<T>* Z;
GLMObjective* objective;
GLMWithData(GLMObjective* obj, const SimpleMat<T>& X, const SimpleVec<T>& y, SimpleDenseMat<T>& Z)
: objective(obj), X(&X), y(&y), Z(&Z), GLMDims(obj->C, obj->D, obj->fit_intercept)
{
}
// interface exposed to typical non-linear optimizers
inline T operator()(const SimpleVec<T>& wFlat,
SimpleVec<T>& gradFlat,
T* dev_scalar,
cudaStream_t stream)
{
SimpleDenseMat<T> W(wFlat.data, C, dims);
SimpleDenseMat<T> G(gradFlat.data, C, dims);
objective->loss_grad(dev_scalar, G, W, *X, *y, *Z, stream);
T loss_host;
raft::update_host(&loss_host, dev_scalar, 1, stream);
raft::interruptible::synchronize(stream);
return loss_host;
}
/**
* @brief Calculate a norm of the gradient computed using the given Loss instance.
*
* This function is intended to be used in `check_convergence`; it's output is supposed
* to be proportional to the loss value w.r.t. the number of features (D).
*
* Different loss functions may scale differently with the number of features (D).
* This has an effect on the convergence criteria. To account for that, we let a
* loss function define its preferred metric. Normally, we differentiate between the
* L2 norm (e.g. for Squared loss) and LInf norm (e.g. for Softmax loss).
*/
inline T gradNorm(const SimpleVec<T>& grad, T* dev_scalar, cudaStream_t stream)
{
return objective->gradNorm(grad, dev_scalar, stream);
}
};
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm | rapidsai_public_repos/cuml/cpp/src/glm/qn/simple_mat.cuh | /*
* Copyright (c) 2018-2021, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "simple_mat/base.hpp"
#include "simple_mat/dense.hpp"
#include "simple_mat/sparse.hpp"
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm | rapidsai_public_repos/cuml/cpp/src/glm/qn/qn.cuh | /*
* Copyright (c) 2018-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "glm_base.cuh"
#include "glm_linear.cuh"
#include "glm_logistic.cuh"
#include "glm_regularizer.cuh"
#include "glm_softmax.cuh"
#include "glm_svm.cuh"
#include "qn_solvers.cuh"
#include "qn_util.cuh"
#include <cuml/linear_model/qn.h>
#include <raft/matrix/math.cuh>
#include <rmm/device_uvector.hpp>
namespace ML {
namespace GLM {
namespace detail {
template <typename T, typename LossFunction>
int qn_fit(const raft::handle_t& handle,
const qn_params& pams,
LossFunction& loss,
const SimpleMat<T>& X,
const SimpleVec<T>& y,
SimpleDenseMat<T>& Z,
T* w0_data, // initial value and result
T* fx,
int* num_iters)
{
cudaStream_t stream = handle.get_stream();
LBFGSParam<T> opt_param(pams);
SimpleVec<T> w0(w0_data, loss.n_param);
// Scale the regularization strength with the number of samples.
T l1 = pams.penalty_l1;
T l2 = pams.penalty_l2;
if (pams.penalty_normalized) {
l1 /= X.m;
l2 /= X.m;
}
if (l2 == 0) {
GLMWithData<T, LossFunction> lossWith(&loss, X, y, Z);
return qn_minimize(handle, w0, fx, num_iters, lossWith, l1, opt_param, pams.verbose);
} else {
Tikhonov<T> reg(l2);
RegularizedGLM<T, LossFunction, decltype(reg)> obj(&loss, ®);
GLMWithData<T, decltype(obj)> lossWith(&obj, X, y, Z);
return qn_minimize(handle, w0, fx, num_iters, lossWith, l1, opt_param, pams.verbose);
}
}
template <typename T>
inline void qn_fit_x(const raft::handle_t& handle,
const qn_params& pams,
SimpleMat<T>& X,
T* y_data,
int C,
T* w0_data,
T* f,
int* num_iters,
T* sample_weight = nullptr,
T svr_eps = 0)
{
/*
NB:
N - number of data rows
D - number of data columns (features)
C - number of output classes
X in R^[N, D]
w in R^[D, C]
y in {0, 1}^[N, C] or {cat}^N
Dimensionality of w0 depends on loss, so we initialize it later.
*/
cudaStream_t stream = handle.get_stream();
int N = X.m;
int D = X.n;
int n_targets = qn_is_classification(pams.loss) && C == 2 ? 1 : C;
rmm::device_uvector<T> tmp(n_targets * N, stream);
SimpleDenseMat<T> Z(tmp.data(), n_targets, N);
SimpleVec<T> y(y_data, N);
switch (pams.loss) {
case QN_LOSS_LOGISTIC: {
ASSERT(C == 2, "qn.h: logistic loss invalid C");
LogisticLoss<T> loss(handle, D, pams.fit_intercept);
if (sample_weight) loss.add_sample_weights(sample_weight, N, stream);
qn_fit<T, decltype(loss)>(handle, pams, loss, X, y, Z, w0_data, f, num_iters);
} break;
case QN_LOSS_SQUARED: {
ASSERT(C == 1, "qn.h: squared loss invalid C");
SquaredLoss<T> loss(handle, D, pams.fit_intercept);
if (sample_weight) loss.add_sample_weights(sample_weight, N, stream);
qn_fit<T, decltype(loss)>(handle, pams, loss, X, y, Z, w0_data, f, num_iters);
} break;
case QN_LOSS_SOFTMAX: {
ASSERT(C > 2, "qn.h: softmax invalid C");
Softmax<T> loss(handle, D, C, pams.fit_intercept);
if (sample_weight) loss.add_sample_weights(sample_weight, N, stream);
qn_fit<T, decltype(loss)>(handle, pams, loss, X, y, Z, w0_data, f, num_iters);
} break;
case QN_LOSS_SVC_L1: {
ASSERT(C == 2, "qn.h: SVC-L1 loss invalid C");
SVCL1Loss<T> loss(handle, D, pams.fit_intercept);
if (sample_weight) loss.add_sample_weights(sample_weight, N, stream);
qn_fit<T, decltype(loss)>(handle, pams, loss, X, y, Z, w0_data, f, num_iters);
} break;
case QN_LOSS_SVC_L2: {
ASSERT(C == 2, "qn.h: SVC-L2 loss invalid C");
SVCL2Loss<T> loss(handle, D, pams.fit_intercept);
if (sample_weight) loss.add_sample_weights(sample_weight, N, stream);
qn_fit<T, decltype(loss)>(handle, pams, loss, X, y, Z, w0_data, f, num_iters);
} break;
case QN_LOSS_SVR_L1: {
ASSERT(C == 1, "qn.h: SVR-L1 loss invalid C");
SVRL1Loss<T> loss(handle, D, pams.fit_intercept, svr_eps);
if (sample_weight) loss.add_sample_weights(sample_weight, N, stream);
qn_fit<T, decltype(loss)>(handle, pams, loss, X, y, Z, w0_data, f, num_iters);
} break;
case QN_LOSS_SVR_L2: {
ASSERT(C == 1, "qn.h: SVR-L2 loss invalid C");
SVRL2Loss<T> loss(handle, D, pams.fit_intercept, svr_eps);
if (sample_weight) loss.add_sample_weights(sample_weight, N, stream);
qn_fit<T, decltype(loss)>(handle, pams, loss, X, y, Z, w0_data, f, num_iters);
} break;
case QN_LOSS_ABS: {
ASSERT(C == 1, "qn.h: abs loss (L1) invalid C");
AbsLoss<T> loss(handle, D, pams.fit_intercept);
if (sample_weight) loss.add_sample_weights(sample_weight, N, stream);
qn_fit<T, decltype(loss)>(handle, pams, loss, X, y, Z, w0_data, f, num_iters);
} break;
default: {
ASSERT(false, "qn.h: unknown loss function type (id = %d).", pams.loss);
}
}
}
template <typename T>
void qnFit(const raft::handle_t& handle,
const qn_params& pams,
T* X_data,
bool X_col_major,
T* y_data,
int N,
int D,
int C,
T* w0_data,
T* f,
int* num_iters,
T* sample_weight = nullptr,
T svr_eps = 0)
{
SimpleDenseMat<T> X(X_data, N, D, X_col_major ? COL_MAJOR : ROW_MAJOR);
qn_fit_x(handle, pams, X, y_data, C, w0_data, f, num_iters, sample_weight, svr_eps);
}
template <typename T>
void qnFitSparse(const raft::handle_t& handle,
const qn_params& pams,
T* X_values,
int* X_cols,
int* X_row_ids,
int X_nnz,
T* y_data,
int N,
int D,
int C,
T* w0_data,
T* f,
int* num_iters,
T* sample_weight = nullptr,
T svr_eps = 0)
{
SimpleSparseMat<T> X(X_values, X_cols, X_row_ids, X_nnz, N, D);
qn_fit_x(handle, pams, X, y_data, C, w0_data, f, num_iters, sample_weight, svr_eps);
}
template <typename T>
void qn_decision_function(
const raft::handle_t& handle, const qn_params& pams, SimpleMat<T>& X, int C, T* params, T* scores)
{
// NOTE: While gtests pass X as row-major, and python API passes X as
// col-major, no extensive testing has been done to ensure that
// this function works correctly for both input types
int n_targets = qn_is_classification(pams.loss) && C == 2 ? 1 : C;
GLMDims dims(n_targets, X.n, pams.fit_intercept);
SimpleDenseMat<T> W(params, n_targets, dims.dims);
SimpleDenseMat<T> Z(scores, n_targets, X.m);
linearFwd(handle, Z, X, W);
}
template <typename T>
void qnDecisionFunction(const raft::handle_t& handle,
const qn_params& pams,
T* Xptr,
bool X_col_major,
int N,
int D,
int C,
T* params,
T* scores)
{
SimpleDenseMat<T> X(Xptr, N, D, X_col_major ? COL_MAJOR : ROW_MAJOR);
qn_decision_function(handle, pams, X, C, params, scores);
}
template <typename T>
void qnDecisionFunctionSparse(const raft::handle_t& handle,
const qn_params& pams,
T* X_values,
int* X_cols,
int* X_row_ids,
int X_nnz,
int N,
int D,
int C,
T* params,
T* scores)
{
SimpleSparseMat<T> X(X_values, X_cols, X_row_ids, X_nnz, N, D);
qn_decision_function(handle, pams, X, C, params, scores);
}
template <typename T>
void qn_predict(
const raft::handle_t& handle, const qn_params& pams, SimpleMat<T>& X, int C, T* params, T* preds)
{
cudaStream_t stream = handle.get_stream();
bool is_class = qn_is_classification(pams.loss);
int n_targets = is_class && C == 2 ? 1 : C;
rmm::device_uvector<T> scores(n_targets * X.m, stream);
qn_decision_function(handle, pams, X, C, params, scores.data());
SimpleDenseMat<T> Z(scores.data(), n_targets, X.m);
SimpleDenseMat<T> P(preds, 1, X.m);
if (is_class) {
if (C == 2) {
P.assign_unary(
Z, [] __device__(const T z) { return z > 0.0 ? T(1) : T(0); }, stream);
} else {
raft::matrix::argmax(Z.data, C, X.m, preds, stream);
}
} else {
P.copy_async(Z, stream);
}
}
template <typename T>
void qnPredict(const raft::handle_t& handle,
const qn_params& pams,
T* Xptr,
bool X_col_major,
int N,
int D,
int C,
T* params,
T* preds)
{
SimpleDenseMat<T> X(Xptr, N, D, X_col_major ? COL_MAJOR : ROW_MAJOR);
qn_predict(handle, pams, X, C, params, preds);
}
template <typename T>
void qnPredictSparse(const raft::handle_t& handle,
const qn_params& pams,
T* X_values,
int* X_cols,
int* X_row_ids,
int X_nnz,
int N,
int D,
int C,
T* params,
T* preds)
{
SimpleSparseMat<T> X(X_values, X_cols, X_row_ids, X_nnz, N, D);
qn_predict(handle, pams, X, C, params, preds);
}
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm | rapidsai_public_repos/cuml/cpp/src/glm/qn/glm_softmax.cuh | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "glm_base.cuh"
#include "simple_mat.cuh"
#include <raft/linalg/add.cuh>
#include <raft/util/cuda_utils.cuh>
namespace ML {
namespace GLM {
namespace detail {
using raft::ceildiv;
using raft::myExp;
using raft::myLog;
using raft::myMax;
// Input: matrix Z (dims: CxN)
// Computes softmax cross entropy loss across columns, i.e. normalization
// column-wise.
//
// This kernel performs best for small number of classes C.
// It's much faster than implementation based on ml-prims (up to ~2x - ~10x for
// small C <= BX). More importantly, it does not require another CxN scratch
// space. In that case the block covers the whole column and warp reduce is fast
// TODO for very large C, there should be maybe rather something along the lines
// of
// coalesced reduce, i.e. blocks should take care of columns
// TODO split into two kernels for small and large case?
template <typename T, int BX = 32, int BY = 8>
__global__ void logSoftmaxKernel(
T* out, T* dZ, const T* in, const T* labels, int C, int N, bool getDerivative = true)
{
typedef cub::WarpReduce<T, BX> WarpRed;
typedef cub::BlockReduce<T, BX, cub::BLOCK_REDUCE_WARP_REDUCTIONS, BY> BlockRed;
__shared__ union {
typename WarpRed::TempStorage warpStore[BY];
typename BlockRed::TempStorage blockStore;
T sh_val[BY];
} shm;
int y = threadIdx.y + blockIdx.x * BY;
int len = C * N;
bool delta = false;
// TODO is there a better way to read this?
if (getDerivative && threadIdx.x == 0) {
if (y < N) {
shm.sh_val[threadIdx.y] = labels[y];
} else {
shm.sh_val[threadIdx.y] = std::numeric_limits<T>::lowest();
}
}
__syncthreads();
T label = shm.sh_val[threadIdx.y];
__syncthreads();
T eta_y = 0;
T myEta = 0;
T etaMax = -1e9;
T lse = 0;
/*
* Phase 1: Find Maximum m over column
*/
for (int x = threadIdx.x; x < C; x += BX) {
int idx = x + y * C;
if (x < C && idx < len) {
myEta = in[idx];
if (x == label) {
delta = true;
eta_y = myEta;
}
etaMax = myMax<T>(myEta, etaMax);
}
}
T tmpMax = WarpRed(shm.warpStore[threadIdx.y]).Reduce(etaMax, cub::Max());
if (threadIdx.x == 0) { shm.sh_val[threadIdx.y] = tmpMax; }
__syncthreads();
etaMax = shm.sh_val[threadIdx.y];
__syncthreads();
/*
* Phase 2: Compute stabilized log-sum-exp over column
* lse = m + log(sum(exp(eta - m)))
*/
// TODO there must be a better way to do this...
if (C <= BX) { // this means one block covers a column and myEta is valid
int idx = threadIdx.x + y * C;
if (threadIdx.x < C && idx < len) { lse = myExp<T>(myEta - etaMax); }
} else {
for (int x = threadIdx.x; x < C; x += BX) {
int idx = x + y * C;
if (x < C && idx < len) { lse += myExp<T>(in[idx] - etaMax); }
}
}
T tmpLse = WarpRed(shm.warpStore[threadIdx.y]).Sum(lse);
if (threadIdx.x == 0) { shm.sh_val[threadIdx.y] = etaMax + myLog<T>(tmpLse); }
__syncthreads();
lse = shm.sh_val[threadIdx.y];
__syncthreads();
/*
* Phase 3: Compute derivatives dL/dZ = P - delta_y
* P is the softmax distribution, delta_y the kronecker delta for the class of
* label y If we getDerivative=false, dZ will just contain P, which might be
* useful
*/
if (C <= BX) { // this means one block covers a column and myEta is valid
int idx = threadIdx.x + y * C;
if (threadIdx.x < C && idx < len) {
dZ[idx] = (myExp<T>(myEta - lse) - (getDerivative ? (threadIdx.x == label) : T(0)));
}
} else {
for (int x = threadIdx.x; x < C; x += BX) {
int idx = x + y * C;
if (x < C && idx < len) {
T logP = in[idx] - lse;
dZ[idx] = (myExp<T>(logP) - (getDerivative ? (x == label) : T(0)));
}
}
}
if (!getDerivative) // no need to continue, lossval will be undefined
return;
T lossVal = 0;
if (delta) { lossVal = (lse - eta_y) / N; }
/*
* Phase 4: accumulate loss value
*/
T blockSum = BlockRed(shm.blockStore).Sum(lossVal);
if (threadIdx.x == 0 && threadIdx.y == 0) { raft::myAtomicAdd(out, blockSum); }
}
template <typename T>
void launchLogsoftmax(
T* loss_val, T* dldZ, const T* Z, const T* labels, int C, int N, cudaStream_t stream)
{
RAFT_CUDA_TRY(cudaMemsetAsync(loss_val, 0, sizeof(T), stream));
raft::interruptible::synchronize(stream);
if (C <= 4) {
dim3 bs(4, 64);
dim3 gs(ceildiv(N, 64));
logSoftmaxKernel<T, 4, 64><<<gs, bs, 0, stream>>>(loss_val, dldZ, Z, labels, C, N);
} else if (C <= 8) {
dim3 bs(8, 32);
dim3 gs(ceildiv(N, 32));
logSoftmaxKernel<T, 8, 32><<<gs, bs, 0, stream>>>(loss_val, dldZ, Z, labels, C, N);
} else if (C <= 16) {
dim3 bs(16, 16);
dim3 gs(ceildiv(N, 16));
logSoftmaxKernel<T, 16, 16><<<gs, bs, 0, stream>>>(loss_val, dldZ, Z, labels, C, N);
} else {
dim3 bs(32, 8);
dim3 gs(ceildiv(N, 8));
logSoftmaxKernel<T, 32, 8><<<gs, bs, 0, stream>>>(loss_val, dldZ, Z, labels, C, N);
}
RAFT_CUDA_TRY(cudaPeekAtLastError());
}
template <typename T>
struct Softmax : GLMBase<T, Softmax<T>> {
typedef GLMBase<T, Softmax<T>> Super;
Softmax(const raft::handle_t& handle, int D, int C, bool has_bias) : Super(handle, D, C, has_bias)
{
}
inline void getLossAndDZ(T* loss_val,
SimpleDenseMat<T>& Z,
const SimpleVec<T>& y,
cudaStream_t stream)
{
launchLogsoftmax(loss_val, Z.data, Z.data, y.data, Z.m, Z.n, stream);
}
inline T gradNorm(const SimpleVec<T>& grad, T* dev_scalar, cudaStream_t stream)
{
return nrmMax(grad, dev_scalar, stream);
}
};
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm | rapidsai_public_repos/cuml/cpp/src/glm/qn/qn_util.cuh | /*
* Copyright (c) 2018-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <cuml/linear_model/qn.h>
#include <cuml/common/logger.hpp>
#include <limits>
#include <raft/util/cuda_utils.cuh>
namespace ML {
namespace GLM {
namespace detail {
enum LINE_SEARCH_ALGORITHM {
LBFGS_LS_BT_ARMIJO = 1,
LBFGS_LS_BT = 2, // Default. Alias for Wolfe
LBFGS_LS_BT_WOLFE = 2,
LBFGS_LS_BT_STRONG_WOLFE = 3
};
enum LINE_SEARCH_RETCODE {
LS_SUCCESS = 0,
LS_INVALID_STEP_MIN = 1,
LS_INVALID_STEP_MAX = 2,
LS_MAX_ITERS_REACHED = 3,
LS_INVALID_DIR = 4,
LS_INVALID_STEP = 5
};
enum OPT_RETCODE {
OPT_SUCCESS = 0,
OPT_NUMERIC_ERROR = 1,
OPT_LS_FAILED = 2,
OPT_MAX_ITERS_REACHED = 3,
OPT_INVALID_ARGS = 4
};
template <typename T = double>
class LBFGSParam {
public:
int m; // lbfgs memory limit
T epsilon; // controls convergence
int past; // lookback for function value based convergence test
T delta; // controls fun val based conv test
int max_iterations;
int linesearch; // see enum above
int max_linesearch;
T min_step; // min. allowed step length
T max_step; // max. allowed step length
T ftol; // line search tolerance
T wolfe; // wolfe parameter
T ls_dec; // line search decrease factor
T ls_inc; // line search increase factor
public:
LBFGSParam()
{
m = 6;
epsilon = T(1e-5);
past = 0;
delta = T(0);
max_iterations = 0;
linesearch = LBFGS_LS_BT_ARMIJO;
max_linesearch = 20;
min_step = T(1e-20);
max_step = T(1e+20);
ftol = T(1e-4);
wolfe = T(0.9);
ls_dec = T(0.5);
ls_inc = T(2.1);
}
explicit LBFGSParam(const qn_params& pams) : LBFGSParam()
{
m = pams.lbfgs_memory;
epsilon = T(pams.grad_tol);
// sometimes even number works better - to detect zig-zags;
past = pams.change_tol > 0 ? 10 : 0;
delta = T(pams.change_tol);
max_iterations = pams.max_iter;
max_linesearch = pams.linesearch_max_iter;
ftol = pams.change_tol > 0 ? T(pams.change_tol * 0.1) : T(1e-4);
}
inline int check_param() const
{ // TODO exceptions
int ret = 1;
if (m <= 0) return ret;
ret++;
if (epsilon <= 0) return ret;
ret++;
if (past < 0) return ret;
ret++;
if (delta < 0) return ret;
ret++;
if (max_iterations < 0) return ret;
ret++;
if (linesearch < LBFGS_LS_BT_ARMIJO || linesearch > LBFGS_LS_BT_STRONG_WOLFE) return ret;
ret++;
if (max_linesearch <= 0) return ret;
ret++;
if (min_step < 0) return ret;
ret++;
if (max_step < min_step) return ret;
ret++;
if (ftol <= 0 || ftol >= 0.5) return ret;
ret++;
if (wolfe <= ftol || wolfe >= 1) return ret;
ret++;
return 0;
}
};
inline bool qn_is_classification(qn_loss_type t)
{
switch (t) {
case QN_LOSS_LOGISTIC:
case QN_LOSS_SOFTMAX:
case QN_LOSS_SVC_L1:
case QN_LOSS_SVC_L2: return true;
default: return false;
}
}
template <typename T>
HDI T project_orth(T x, T y)
{
return x * y <= T(0) ? T(0) : x;
}
template <typename T>
inline bool check_convergence(
const LBFGSParam<T>& param, const int k, const T fx, const T gnorm, std::vector<T>& fx_hist)
{
// Positive scale factor for the stop condition
T fmag = std::max(fx, param.epsilon);
CUML_LOG_DEBUG(
"%04d: f(x)=%.8f conv.crit=%.8f (gnorm=%.8f, fmag=%.8f)", k, fx, gnorm / fmag, gnorm, fmag);
// Convergence test -- gradient
if (gnorm <= param.epsilon * fmag) {
CUML_LOG_DEBUG("Converged after %d iterations: f(x)=%.6f", k, fx);
return true;
}
// Convergence test -- objective function value
if (param.past > 0) {
if (k >= param.past && std::abs(fx_hist[k % param.past] - fx) <= param.delta * fmag) {
CUML_LOG_DEBUG("Insufficient change in objective value");
return true;
}
fx_hist[k % param.past] = fx;
}
return false;
}
/*
* Multiplies a vector g with the inverse hessian approximation, i.e.
* drt = - H * g,
* e.g. to compute the new search direction for g = \nabla f(x)
*/
template <typename T>
inline int lbfgs_search_dir(const LBFGSParam<T>& param,
int* n_vec,
const int end_prev,
const SimpleDenseMat<T>& S,
const SimpleDenseMat<T>& Y,
const SimpleVec<T>& g,
const SimpleVec<T>& svec,
const SimpleVec<T>& yvec,
SimpleVec<T>& drt,
std::vector<T>& yhist,
std::vector<T>& alpha,
T* dev_scalar,
cudaStream_t stream)
{
SimpleVec<T> sj, yj; // mask vectors
int end = end_prev;
// note: update_state assigned svec, yvec to m_s[:,end], m_y[:,end]
T ys = dot(svec, yvec, dev_scalar, stream);
T yy = dot(yvec, yvec, dev_scalar, stream);
CUML_LOG_TRACE("ys=%e, yy=%e", ys, yy);
// Skipping test:
if (ys <= std::numeric_limits<T>::epsilon() * yy) {
// We can land here for example if yvec == 0 (no change in the gradient,
// g_k == g_k+1). That means the Hessian is approximately zero. We cannot
// use the QN model to update the search dir, we just continue along the
// previous direction.
//
// See eq (3.9) and Section 6 in "A limited memory algorithm for bound
// constrained optimization" Richard H. Byrd, Peihuang Lu, Jorge Nocedal and
// Ciyou Zhu Technical Report NAM-08 (1994) NORTHWESTERN UNIVERSITY.
//
// Alternative condition to skip update is: ys / (-gs) <= epsmch,
// (where epsmch = std::numeric_limits<T>::epsilon) given in Section 5 of
// "L-BFGS-B Fortran subroutines for large-scale bound constrained
// optimization" Ciyou Zhu, Richard H. Byrd, Peihuang Lu and Jorge Nocedal
// (1994).
CUML_LOG_DEBUG("L-BFGS WARNING: skipping update step ys=%f, yy=%f", ys, yy);
return end;
}
(*n_vec)++;
yhist[end] = ys;
// Recursive formula to compute d = -H * g
drt.ax(-1.0, g, stream);
int bound = std::min(param.m, *n_vec);
end = (end + 1) % param.m;
int j = end;
for (int i = 0; i < bound; i++) {
j = (j + param.m - 1) % param.m;
col_ref(S, sj, j);
col_ref(Y, yj, j);
alpha[j] = dot(sj, drt, dev_scalar, stream) / yhist[j];
drt.axpy(-alpha[j], yj, drt, stream);
}
drt.ax(ys / yy, drt, stream);
for (int i = 0; i < bound; i++) {
col_ref(S, sj, j);
col_ref(Y, yj, j);
T beta = dot(yj, drt, dev_scalar, stream) / yhist[j];
drt.axpy((alpha[j] - beta), sj, drt, stream);
j = (j + 1) % param.m;
}
return end;
}
template <typename T>
HDI T get_pseudo_grad(T x, T dlossx, T C)
{
if (x != 0) { return dlossx + raft::sgn(x) * C; }
T dplus = dlossx + C;
T dmins = dlossx - C;
if (dmins > T(0)) return dmins;
if (dplus < T(0)) return dplus;
return T(0);
}
template <typename T>
struct op_project {
T scal;
op_project(T s) : scal(s) {}
HDI T operator()(const T x, const T y) const { return project_orth(x, scal * y); }
};
template <typename T>
struct op_pseudo_grad {
T l1;
op_pseudo_grad(const T lam) : l1(lam) {}
HDI T operator()(const T x, const T dlossx) const { return get_pseudo_grad(x, dlossx, l1); }
};
}; // namespace detail
}; // namespace GLM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm/qn | rapidsai_public_repos/cuml/cpp/src/glm/qn/simple_mat/base.hpp | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <raft/core/handle.hpp>
#include <raft/core/interruptible.hpp>
#include <raft/util/cuda_utils.cuh>
namespace ML {
template <typename T>
struct SimpleDenseMat;
template <typename T>
struct SimpleMat {
int m, n;
SimpleMat(int m, int n) : m(m), n(n) {}
void operator=(const SimpleMat<T>& other) = delete;
virtual void print(std::ostream& oss) const = 0;
/**
* GEMM assigning to C where `this` refers to B.
*
* ```
* C <- alpha * A^transA * (*this)^transB + beta * C
* ```
*/
virtual void gemmb(const raft::handle_t& handle,
const T alpha,
const SimpleDenseMat<T>& A,
const bool transA,
const bool transB,
const T beta,
SimpleDenseMat<T>& C,
cudaStream_t stream) const = 0;
};
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm/qn | rapidsai_public_repos/cuml/cpp/src/glm/qn/simple_mat/dense.hpp | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <iostream>
#include <vector>
#include "base.hpp"
#include <raft/core/handle.hpp>
#include <raft/linalg/add.cuh>
#include <raft/linalg/ternary_op.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
// #TODO: Replace with public header when ready
#include <raft/linalg/detail/cublas_wrappers.hpp>
#include <raft/linalg/map_then_reduce.cuh>
#include <raft/linalg/norm.cuh>
#include <raft/linalg/unary_op.cuh>
#include <rmm/device_uvector.hpp>
namespace ML {
enum STORAGE_ORDER { COL_MAJOR = 0, ROW_MAJOR = 1 };
template <typename T>
struct SimpleDenseMat : SimpleMat<T> {
typedef SimpleMat<T> Super;
int len;
T* data;
STORAGE_ORDER ord; // storage order: runtime param for compile time sake
SimpleDenseMat(STORAGE_ORDER order = COL_MAJOR) : Super(0, 0), data(nullptr), len(0), ord(order)
{
}
SimpleDenseMat(T* data, int m, int n, STORAGE_ORDER order = COL_MAJOR)
: Super(m, n), data(data), len(m * n), ord(order)
{
}
void reset(T* data_, int m_, int n_)
{
this->m = m_;
this->n = n_;
data = data_;
len = m_ * n_;
}
// Implemented GEMM as a static method here to improve readability
inline static void gemm(const raft::handle_t& handle,
const T alpha,
const SimpleDenseMat<T>& A,
const bool transA,
const SimpleDenseMat<T>& B,
const bool transB,
const T beta,
SimpleDenseMat<T>& C,
cudaStream_t stream)
{
int kA = A.n;
int kB = B.m;
if (transA) {
ASSERT(A.n == C.m, "GEMM invalid dims: m");
kA = A.m;
} else {
ASSERT(A.m == C.m, "GEMM invalid dims: m");
}
if (transB) {
ASSERT(B.m == C.n, "GEMM invalid dims: n");
kB = B.n;
} else {
ASSERT(B.n == C.n, "GEMM invalid dims: n");
}
ASSERT(kA == kB, "GEMM invalid dims: k");
if (A.ord == COL_MAJOR && B.ord == COL_MAJOR && C.ord == COL_MAJOR) {
// #TODO: Call from public API when ready
raft::linalg::detail::cublasgemm(handle.get_cublas_handle(), // handle
transA ? CUBLAS_OP_T : CUBLAS_OP_N, // transA
transB ? CUBLAS_OP_T : CUBLAS_OP_N, // transB
C.m,
C.n,
kA, // dimensions m,n,k
&alpha,
A.data,
A.m, // lda
B.data,
B.m, // ldb
&beta,
C.data,
C.m, // ldc,
stream);
return;
}
if (A.ord == ROW_MAJOR) {
const SimpleDenseMat<T> Acm(A.data, A.n, A.m, COL_MAJOR);
gemm(handle, alpha, Acm, !transA, B, transB, beta, C, stream);
return;
}
if (B.ord == ROW_MAJOR) {
const SimpleDenseMat<T> Bcm(B.data, B.n, B.m, COL_MAJOR);
gemm(handle, alpha, A, transA, Bcm, !transB, beta, C, stream);
return;
}
if (C.ord == ROW_MAJOR) {
SimpleDenseMat<T> Ccm(C.data, C.n, C.m, COL_MAJOR);
gemm(handle, alpha, B, !transB, A, !transA, beta, Ccm, stream);
return;
}
}
inline void gemmb(const raft::handle_t& handle,
const T alpha,
const SimpleDenseMat<T>& A,
const bool transA,
const bool transB,
const T beta,
SimpleDenseMat<T>& C,
cudaStream_t stream) const override
{
SimpleDenseMat<T>::gemm(handle, alpha, A, transA, *this, transB, beta, C, stream);
}
/**
* GEMM assigning to C where `this` refers to C.
*
* ```
* *this <- alpha * A^transA * B^transB + beta * (*this)
* ```
*/
inline void assign_gemm(const raft::handle_t& handle,
const T alpha,
const SimpleDenseMat<T>& A,
const bool transA,
const SimpleMat<T>& B,
const bool transB,
const T beta,
cudaStream_t stream)
{
B.gemmb(handle, alpha, A, transA, transB, beta, *this, stream);
}
// this = a*x
inline void ax(const T a, const SimpleDenseMat<T>& x, cudaStream_t stream)
{
ASSERT(ord == x.ord, "SimpleDenseMat::ax: Storage orders must match");
auto scale = [a] __device__(const T x) { return a * x; };
raft::linalg::unaryOp(data, x.data, len, scale, stream);
}
// this = a*x + y
inline void axpy(const T a,
const SimpleDenseMat<T>& x,
const SimpleDenseMat<T>& y,
cudaStream_t stream)
{
ASSERT(ord == x.ord, "SimpleDenseMat::axpy: Storage orders must match");
ASSERT(ord == y.ord, "SimpleDenseMat::axpy: Storage orders must match");
auto axpy = [a] __device__(const T x, const T y) { return a * x + y; };
raft::linalg::binaryOp(data, x.data, y.data, len, axpy, stream);
}
template <typename Lambda>
inline void assign_unary(const SimpleDenseMat<T>& other, Lambda f, cudaStream_t stream)
{
ASSERT(ord == other.ord, "SimpleDenseMat::assign_unary: Storage orders must match");
raft::linalg::unaryOp(data, other.data, len, f, stream);
}
template <typename Lambda>
inline void assign_binary(const SimpleDenseMat<T>& other1,
const SimpleDenseMat<T>& other2,
Lambda& f,
cudaStream_t stream)
{
ASSERT(ord == other1.ord, "SimpleDenseMat::assign_binary: Storage orders must match");
ASSERT(ord == other2.ord, "SimpleDenseMat::assign_binary: Storage orders must match");
raft::linalg::binaryOp(data, other1.data, other2.data, len, f, stream);
}
template <typename Lambda>
inline void assign_ternary(const SimpleDenseMat<T>& other1,
const SimpleDenseMat<T>& other2,
const SimpleDenseMat<T>& other3,
Lambda& f,
cudaStream_t stream)
{
ASSERT(ord == other1.ord, "SimpleDenseMat::assign_ternary: Storage orders must match");
ASSERT(ord == other2.ord, "SimpleDenseMat::assign_ternary: Storage orders must match");
ASSERT(ord == other3.ord, "SimpleDenseMat::assign_ternary: Storage orders must match");
raft::linalg::ternaryOp(data, other1.data, other2.data, other3.data, len, f, stream);
}
inline void fill(const T val, cudaStream_t stream)
{
// TODO this reads data unnecessary, though it's mostly used for testing
auto f = [val] __device__(const T x) { return val; };
raft::linalg::unaryOp(data, data, len, f, stream);
}
inline void copy_async(const SimpleDenseMat<T>& other, cudaStream_t stream)
{
ASSERT((ord == other.ord) && (this->m == other.m) && (this->n == other.n),
"SimpleDenseMat::copy: matrices not compatible");
RAFT_CUDA_TRY(
cudaMemcpyAsync(data, other.data, len * sizeof(T), cudaMemcpyDeviceToDevice, stream));
}
void print(std::ostream& oss) const override { oss << (*this) << std::endl; }
void operator=(const SimpleDenseMat<T>& other) = delete;
};
template <typename T>
struct SimpleVec : SimpleDenseMat<T> {
typedef SimpleDenseMat<T> Super;
SimpleVec(T* data, const int n) : Super(data, n, 1, COL_MAJOR) {}
// this = alpha * A * x + beta * this
void assign_gemv(const raft::handle_t& handle,
const T alpha,
const SimpleDenseMat<T>& A,
bool transA,
const SimpleVec<T>& x,
const T beta,
cudaStream_t stream)
{
Super::assign_gemm(handle, alpha, A, transA, x, false, beta, stream);
}
SimpleVec() : Super(COL_MAJOR) {}
inline void reset(T* new_data, int n) { Super::reset(new_data, n, 1); }
};
template <typename T>
inline void col_ref(const SimpleDenseMat<T>& mat, SimpleVec<T>& mask_vec, int c)
{
ASSERT(mat.ord == COL_MAJOR, "col_ref only available for column major mats");
T* tmp = &mat.data[mat.m * c];
mask_vec.reset(tmp, mat.m);
}
template <typename T>
inline void col_slice(const SimpleDenseMat<T>& mat,
SimpleDenseMat<T>& mask_mat,
int c_from,
int c_to)
{
ASSERT(c_from >= 0 && c_from < mat.n, "col_slice: invalid from");
ASSERT(c_to >= 0 && c_to <= mat.n, "col_slice: invalid to");
ASSERT(mat.ord == COL_MAJOR, "col_ref only available for column major mats");
ASSERT(mask_mat.ord == COL_MAJOR, "col_ref only available for column major mask");
T* tmp = &mat.data[mat.m * c_from];
mask_mat.reset(tmp, mat.m, c_to - c_from);
}
// Reductions such as dot or norm require an additional location in dev mem
// to hold the result. We don't want to deal with this in the SimpleVec class
// as it impedes thread safety and constness
template <typename T>
inline T dot(const SimpleVec<T>& u, const SimpleVec<T>& v, T* tmp_dev, cudaStream_t stream)
{
auto f = [] __device__(const T x, const T y) { return x * y; };
raft::linalg::mapThenSumReduce(tmp_dev, u.len, f, stream, u.data, v.data);
T tmp_host;
raft::update_host(&tmp_host, tmp_dev, 1, stream);
raft::interruptible::synchronize(stream);
return tmp_host;
}
template <typename T>
inline T squaredNorm(const SimpleVec<T>& u, T* tmp_dev, cudaStream_t stream)
{
return dot(u, u, tmp_dev, stream);
}
template <typename T>
inline T nrmMax(const SimpleVec<T>& u, T* tmp_dev, cudaStream_t stream)
{
auto f = [] __device__(const T x) { return raft::myAbs<T>(x); };
auto r = [] __device__(const T x, const T y) { return raft::myMax<T>(x, y); };
raft::linalg::mapThenReduce(tmp_dev, u.len, T(0), f, r, stream, u.data);
T tmp_host;
raft::update_host(&tmp_host, tmp_dev, 1, stream);
raft::interruptible::synchronize(stream);
return tmp_host;
}
template <typename T>
inline T nrm2(const SimpleVec<T>& u, T* tmp_dev, cudaStream_t stream)
{
return raft::mySqrt<T>(squaredNorm(u, tmp_dev, stream));
}
template <typename T>
inline T nrm1(const SimpleVec<T>& u, T* tmp_dev, cudaStream_t stream)
{
raft::linalg::rowNorm(
tmp_dev, u.data, u.len, 1, raft::linalg::L1Norm, true, stream, raft::Nop<T>());
T tmp_host;
raft::update_host(&tmp_host, tmp_dev, 1, stream);
raft::interruptible::synchronize(stream);
return tmp_host;
}
template <typename T>
std::ostream& operator<<(std::ostream& os, const SimpleVec<T>& v)
{
std::vector<T> out(v.len);
raft::update_host(&out[0], v.data, v.len, 0);
raft::interruptible::synchronize(rmm::cuda_stream_view());
int it = 0;
for (; it < v.len - 1;) {
os << out[it] << " ";
it++;
}
os << out[it];
return os;
}
template <typename T>
std::ostream& operator<<(std::ostream& os, const SimpleDenseMat<T>& mat)
{
os << "ord=" << (mat.ord == COL_MAJOR ? "CM" : "RM") << "\n";
std::vector<T> out(mat.len);
raft::update_host(&out[0], mat.data, mat.len, rmm::cuda_stream_default);
raft::interruptible::synchronize(rmm::cuda_stream_view());
if (mat.ord == COL_MAJOR) {
for (int r = 0; r < mat.m; r++) {
int idx = r;
for (int c = 0; c < mat.n - 1; c++) {
os << out[idx] << ",";
idx += mat.m;
}
os << out[idx] << std::endl;
}
} else {
for (int c = 0; c < mat.m; c++) {
int idx = c * mat.n;
for (int r = 0; r < mat.n - 1; r++) {
os << out[idx] << ",";
idx += 1;
}
os << out[idx] << std::endl;
}
}
return os;
}
template <typename T>
struct SimpleVecOwning : SimpleVec<T> {
typedef SimpleVec<T> Super;
typedef rmm::device_uvector<T> Buffer;
Buffer buf;
SimpleVecOwning() = delete;
SimpleVecOwning(int n, cudaStream_t stream) : Super(), buf(n, stream)
{
Super::reset(buf.data(), n);
}
void operator=(const SimpleVec<T>& other) = delete;
};
template <typename T>
struct SimpleMatOwning : SimpleDenseMat<T> {
typedef SimpleDenseMat<T> Super;
typedef rmm::device_uvector<T> Buffer;
Buffer buf;
using Super::m;
using Super::n;
using Super::ord;
SimpleMatOwning() = delete;
SimpleMatOwning(int m, int n, cudaStream_t stream, STORAGE_ORDER order = COL_MAJOR)
: Super(order), buf(m * n, stream)
{
Super::reset(buf.data(), m, n);
}
void operator=(const SimpleVec<T>& other) = delete;
};
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm/qn | rapidsai_public_repos/cuml/cpp/src/glm/qn/simple_mat/sparse.hpp | /*
* Copyright (c) 2021-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <iostream>
#include <vector>
#include "base.hpp"
#include <raft/core/handle.hpp>
#include <raft/linalg/ternary_op.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <raft/linalg/add.cuh>
#include <raft/linalg/map_then_reduce.cuh>
#include <raft/linalg/norm.cuh>
#include <raft/linalg/unary_op.cuh>
#include <raft/sparse/detail/cusparse_wrappers.h>
#include <rmm/device_uvector.hpp>
#include <raft/sparse/detail/cusparse_wrappers.h>
namespace ML {
/**
* Sparse matrix in CSR format.
*
* Note, we use cuSPARSE to manimulate matrices, and it guarantees:
*
* 1. row_ids[m] == nnz
* 2. cols are sorted within rows.
*
* However, when the data comes from the outside, we cannot guarantee that.
*/
template <typename T>
struct SimpleSparseMat : SimpleMat<T> {
typedef SimpleMat<T> Super;
T* values;
int* cols;
int* row_ids;
int nnz;
SimpleSparseMat() : Super(0, 0), values(nullptr), cols(nullptr), row_ids(nullptr), nnz(0) {}
SimpleSparseMat(T* values, int* cols, int* row_ids, int nnz, int m, int n)
: Super(m, n), values(values), cols(cols), row_ids(row_ids), nnz(nnz)
{
check_csr(*this, 0);
}
void print(std::ostream& oss) const override { oss << (*this) << std::endl; }
void operator=(const SimpleSparseMat<T>& other) = delete;
inline void gemmb(const raft::handle_t& handle,
const T alpha,
const SimpleDenseMat<T>& A,
const bool transA,
const bool transB,
const T beta,
SimpleDenseMat<T>& C,
cudaStream_t stream) const override
{
const SimpleSparseMat<T>& B = *this;
int kA = A.n;
int kB = B.m;
if (transA) {
ASSERT(A.n == C.m, "GEMM invalid dims: m");
kA = A.m;
} else {
ASSERT(A.m == C.m, "GEMM invalid dims: m");
}
if (transB) {
ASSERT(B.m == C.n, "GEMM invalid dims: n");
kB = B.n;
} else {
ASSERT(B.n == C.n, "GEMM invalid dims: n");
}
ASSERT(kA == kB, "GEMM invalid dims: k");
// matrix C must change the order and be transposed, because we need
// to swap arguments A and B in cusparseSpMM.
cusparseDnMatDescr_t descrC;
auto order = C.ord == COL_MAJOR ? CUSPARSE_ORDER_ROW : CUSPARSE_ORDER_COL;
RAFT_CUSPARSE_TRY(raft::sparse::detail::cusparsecreatednmat(
&descrC, C.n, C.m, order == CUSPARSE_ORDER_COL ? C.n : C.m, C.data, order));
/*
The matrix A must have the same order as the matrix C in the input
of function cusparseSpMM (i.e. swapped order w.r.t. original C).
To account this requirement, I may need to flip transA (whether to transpose A).
C C' rowsC' colsC' ldC' A A' rowsA' colsA' ldA' flipTransA
c r n m m c r n m m x
c r n m m r r m n n o
r c n m n c c m n m o
r c n m n r c n m n x
where:
c/r - column/row major order
A,C - input to gemmb
A', C' - input to cusparseSpMM
ldX' - leading dimension - m or n, depending on order and transX
*/
cusparseDnMatDescr_t descrA;
RAFT_CUSPARSE_TRY(raft::sparse::detail::cusparsecreatednmat(&descrA,
C.ord == A.ord ? A.n : A.m,
C.ord == A.ord ? A.m : A.n,
A.ord == COL_MAJOR ? A.m : A.n,
A.data,
order));
auto opA =
transA ^ (C.ord == A.ord) ? CUSPARSE_OPERATION_NON_TRANSPOSE : CUSPARSE_OPERATION_TRANSPOSE;
cusparseSpMatDescr_t descrB;
RAFT_CUSPARSE_TRY(raft::sparse::detail::cusparsecreatecsr(
&descrB, B.m, B.n, B.nnz, B.row_ids, B.cols, B.values));
auto opB = transB ? CUSPARSE_OPERATION_NON_TRANSPOSE : CUSPARSE_OPERATION_TRANSPOSE;
auto alg = order == CUSPARSE_ORDER_COL ? CUSPARSE_SPMM_CSR_ALG1 : CUSPARSE_SPMM_CSR_ALG2;
size_t bufferSize;
RAFT_CUSPARSE_TRY(raft::sparse::detail::cusparsespmm_bufferSize(handle.get_cusparse_handle(),
opB,
opA,
&alpha,
descrB,
descrA,
&beta,
descrC,
alg,
&bufferSize,
stream));
raft::interruptible::synchronize(stream);
rmm::device_uvector<T> tmp(bufferSize, stream);
RAFT_CUSPARSE_TRY(raft::sparse::detail::cusparsespmm(handle.get_cusparse_handle(),
opB,
opA,
&alpha,
descrB,
descrA,
&beta,
descrC,
alg,
tmp.data(),
stream));
RAFT_CUSPARSE_TRY(cusparseDestroyDnMat(descrA));
RAFT_CUSPARSE_TRY(cusparseDestroySpMat(descrB));
RAFT_CUSPARSE_TRY(cusparseDestroyDnMat(descrC));
}
};
template <typename T>
inline void check_csr(const SimpleSparseMat<T>& mat, cudaStream_t stream)
{
int row_ids_nnz;
raft::update_host(&row_ids_nnz, &mat.row_ids[mat.m], 1, stream);
raft::interruptible::synchronize(stream);
ASSERT(row_ids_nnz == mat.nnz,
"SimpleSparseMat: the size of CSR row_ids array must be `m + 1`, and "
"the last element must be equal nnz.");
}
template <typename T>
std::ostream& operator<<(std::ostream& os, const SimpleSparseMat<T>& mat)
{
check_csr(mat, 0);
os << "SimpleSparseMat (CSR)"
<< "\n";
std::vector<T> values(mat.nnz);
std::vector<int> cols(mat.nnz);
std::vector<int> row_ids(mat.m + 1);
raft::update_host(&values[0], mat.values, mat.nnz, rmm::cuda_stream_default);
raft::update_host(&cols[0], mat.cols, mat.nnz, rmm::cuda_stream_default);
raft::update_host(&row_ids[0], mat.row_ids, mat.m + 1, rmm::cuda_stream_default);
raft::interruptible::synchronize(rmm::cuda_stream_view());
int i, row_end = 0;
for (int row = 0; row < mat.m; row++) {
i = row_end;
row_end = row_ids[row + 1];
for (int col = 0; col < mat.n; col++) {
if (i >= row_end || col < cols[i]) {
os << "0";
} else {
os << values[i];
i++;
}
if (col < mat.n - 1) os << ",";
}
os << std::endl;
}
return os;
}
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src/glm/qn | rapidsai_public_repos/cuml/cpp/src/glm/qn/mg/qn_mg.cuh | /*
* Copyright (c) 2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "glm_base_mg.cuh"
#include <glm/qn/glm_logistic.cuh>
#include <glm/qn/glm_regularizer.cuh>
#include <glm/qn/glm_softmax.cuh>
#include <glm/qn/glm_svm.cuh>
#include <glm/qn/qn_solvers.cuh>
#include <glm/qn/qn_util.cuh>
#include <cuml/linear_model/qn.h>
#include <rmm/device_uvector.hpp>
namespace ML {
namespace GLM {
namespace opg {
using namespace ML::GLM::detail;
template <typename T, typename LossFunction>
int qn_fit_mg(const raft::handle_t& handle,
const qn_params& pams,
LossFunction& loss,
const SimpleMat<T>& X,
const SimpleVec<T>& y,
SimpleDenseMat<T>& Z,
T* w0_data, // initial value and result
T* fx,
int* num_iters,
size_t n_samples,
int rank,
int n_ranks)
{
cudaStream_t stream = handle.get_stream();
LBFGSParam<T> opt_param(pams);
SimpleVec<T> w0(w0_data, loss.n_param);
// Scale the regularization strength with the number of samples.
T l1 = pams.penalty_l1;
T l2 = pams.penalty_l2;
if (pams.penalty_normalized) {
l1 /= n_samples;
l2 /= n_samples;
}
ML::GLM::detail::Tikhonov<T> reg(l2);
ML::GLM::detail::RegularizedGLM<T, LossFunction, decltype(reg)> regularizer_obj(&loss, ®);
auto obj_function = GLMWithDataMG(handle, rank, n_ranks, n_samples, ®ularizer_obj, X, y, Z);
return ML::GLM::detail::qn_minimize(
handle, w0, fx, num_iters, obj_function, l1, opt_param, pams.verbose);
}
template <typename T>
inline void qn_fit_x_mg(const raft::handle_t& handle,
const qn_params& pams,
SimpleMat<T>& X,
T* y_data,
int C,
T* w0_data,
T* f,
int* num_iters,
int64_t n_samples,
int rank,
int n_ranks,
T* sample_weight = nullptr,
T svr_eps = 0)
{
/*
NB:
N - number of data rows
D - number of data columns (features)
C - number of output classes
X in R^[N, D]
w in R^[D, C]
y in {0, 1}^[N, C] or {cat}^N
Dimensionality of w0 depends on loss, so we initialize it later.
*/
cudaStream_t stream = handle.get_stream();
int N = X.m;
int D = X.n;
int n_targets = ML::GLM::detail::qn_is_classification(pams.loss) && C == 2 ? 1 : C;
rmm::device_uvector<T> tmp(n_targets * N, stream);
SimpleDenseMat<T> Z(tmp.data(), n_targets, N);
SimpleVec<T> y(y_data, N);
switch (pams.loss) {
case QN_LOSS_LOGISTIC: {
ASSERT(C == 2, "qn_mg.cuh: logistic loss invalid C");
ML::GLM::detail::LogisticLoss<T> loss(handle, D, pams.fit_intercept);
ML::GLM::opg::qn_fit_mg<T, decltype(loss)>(
handle, pams, loss, X, y, Z, w0_data, f, num_iters, n_samples, rank, n_ranks);
} break;
case QN_LOSS_SOFTMAX: {
ASSERT(C > 2, "qn_mg.cuh: softmax invalid C");
ML::GLM::detail::Softmax<T> loss(handle, D, C, pams.fit_intercept);
ML::GLM::opg::qn_fit_mg<T, decltype(loss)>(
handle, pams, loss, X, y, Z, w0_data, f, num_iters, n_samples, rank, n_ranks);
} break;
default: {
ASSERT(false, "qn_mg.cuh: unknown loss function type (id = %d).", pams.loss);
}
}
}
}; // namespace opg
}; // namespace GLM
}; // namespace ML | 0 |
rapidsai_public_repos/cuml/cpp/src/glm/qn | rapidsai_public_repos/cuml/cpp/src/glm/qn/mg/glm_base_mg.cuh | /*
* Copyright (c) 2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <raft/core/comms.hpp>
#include <raft/core/handle.hpp>
#include <raft/linalg/multiply.cuh>
#include <raft/util/cudart_utils.hpp>
#include <glm/qn/glm_base.cuh>
#include <glm/qn/glm_logistic.cuh>
#include <glm/qn/glm_regularizer.cuh>
#include <glm/qn/qn_solvers.cuh>
#include <glm/qn/qn_util.cuh>
namespace ML {
namespace GLM {
namespace opg {
template <typename T>
// multi-gpu version of linearBwd
inline void linearBwdMG(const raft::handle_t& handle,
SimpleDenseMat<T>& G,
const SimpleMat<T>& X,
const SimpleDenseMat<T>& dZ,
bool setZero,
const int64_t n_samples,
const int n_ranks)
{
cudaStream_t stream = handle.get_stream();
// Backward pass:
// - compute G <- dZ * X.T
// - for bias: Gb = mean(dZ, 1)
const bool has_bias = X.n != G.n;
const int D = X.n;
const T beta = setZero ? T(0) : T(1);
if (has_bias) {
SimpleVec<T> Gbias;
SimpleDenseMat<T> Gweights;
col_ref(G, Gbias, D);
col_slice(G, Gweights, 0, D);
// TODO can this be fused somehow?
Gweights.assign_gemm(handle, 1.0 / n_samples, dZ, false, X, false, beta / n_ranks, stream);
raft::stats::mean(Gbias.data, dZ.data, dZ.m, dZ.n, false, true, stream);
T bias_factor = 1.0 * dZ.n / n_samples;
raft::linalg::multiplyScalar(Gbias.data, Gbias.data, bias_factor, dZ.m, stream);
} else {
CUML_LOG_DEBUG("has bias not enabled");
G.assign_gemm(handle, 1.0 / n_samples, dZ, false, X, false, beta / n_ranks, stream);
}
}
/**
* @brief Aggregates local gradient vectors and loss values from local training data. This
* class is the multi-node-multi-gpu version of GLMWithData.
*
* The implementation overrides existing GLMWithData::() function. The purpose is to
* aggregate local gradient vectors and loss values from distributed X, y, where X represents the
* input vectors and y represents labels.
*
* GLMWithData::() currently invokes three functions: linearFwd, getLossAndDz and linearBwd.
* linearFwd multiplies local input vectors with the coefficient vector (i.e. coef_), so does not
* require communication. getLossAndDz calculates local loss so requires allreduce to obtain a
* global loss. linearBwd calculates local gradient vector so requires allreduce to obtain a
* global gradient vector. The global loss and the global gradient vector will be used in
* min_lbfgs to update coefficient. The update runs individually on every GPU and when finished,
* all GPUs have the same value of coefficient.
*/
template <typename T, class GLMObjective>
struct GLMWithDataMG : ML::GLM::detail::GLMWithData<T, GLMObjective> {
const raft::handle_t* handle_p;
int rank;
int64_t n_samples;
int n_ranks;
GLMWithDataMG(raft::handle_t const& handle,
int rank,
int n_ranks,
int64_t n_samples,
GLMObjective* obj,
const SimpleMat<T>& X,
const SimpleVec<T>& y,
SimpleDenseMat<T>& Z)
: ML::GLM::detail::GLMWithData<T, GLMObjective>(obj, X, y, Z)
{
this->handle_p = &handle;
this->rank = rank;
this->n_ranks = n_ranks;
this->n_samples = n_samples;
}
inline T operator()(const SimpleVec<T>& wFlat,
SimpleVec<T>& gradFlat,
T* dev_scalar,
cudaStream_t stream)
{
SimpleDenseMat<T> W(wFlat.data, this->C, this->dims);
SimpleDenseMat<T> G(gradFlat.data, this->C, this->dims);
SimpleVec<T> lossVal(dev_scalar, 1);
// apply regularization
auto regularizer_obj = this->objective;
auto lossFunc = regularizer_obj->loss;
auto reg = regularizer_obj->reg;
G.fill(0, stream);
float reg_host = 0;
if (reg->l2_penalty != 0) {
reg->reg_grad(dev_scalar, G, W, lossFunc->fit_intercept, stream);
raft::update_host(®_host, dev_scalar, 1, stream);
// note: avoid syncing here because there's a sync before reg_host is used.
}
// apply linearFwd, getLossAndDz, linearBwd
ML::GLM::detail::linearFwd(
lossFunc->handle, *(this->Z), *(this->X), W); // linear part: forward pass
raft::comms::comms_t const& communicator = raft::resource::get_comms(*(this->handle_p));
lossFunc->getLossAndDZ(dev_scalar, *(this->Z), *(this->y), stream); // loss specific part
// normalize local loss before allreduce sum
T factor = 1.0 * (*this->y).len / this->n_samples;
raft::linalg::multiplyScalar(dev_scalar, dev_scalar, factor, 1, stream);
communicator.allreduce(dev_scalar, dev_scalar, 1, raft::comms::op_t::SUM, stream);
communicator.sync_stream(stream);
linearBwdMG(lossFunc->handle,
G,
*(this->X),
*(this->Z),
false,
n_samples,
n_ranks); // linear part: backward pass
communicator.allreduce(G.data, G.data, this->C * this->dims, raft::comms::op_t::SUM, stream);
communicator.sync_stream(stream);
float loss_host;
raft::update_host(&loss_host, dev_scalar, 1, stream);
raft::resource::sync_stream(*(this->handle_p));
loss_host += reg_host;
lossVal.fill(loss_host + reg_host, stream);
return loss_host;
}
};
}; // namespace opg
}; // namespace GLM
}; // namespace ML | 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/randomforest/randomforest.cuh | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <decisiontree/batched-levelalgo/quantiles.cuh>
#include <decisiontree/decisiontree.cuh>
#include <decisiontree/treelite_util.h>
#include <raft/random/permute.cuh>
#include <raft/core/handle.hpp>
#include <raft/core/nvtx.hpp>
#include <raft/random/rng.cuh>
#include <raft/stats/accuracy.cuh>
#include <raft/stats/regression_metrics.cuh>
#include <raft/util/cudart_utils.hpp>
#include <thrust/execution_policy.h>
#include <thrust/sequence.h>
#ifdef _OPENMP
#include <omp.h>
#else
#define omp_get_thread_num() 0
#define omp_get_max_threads() 1
#endif
#include <map>
namespace ML {
template <class T, class L>
class RandomForest {
protected:
RF_params rf_params; // structure containing RF hyperparameters
int rf_type; // 0 for classification 1 for regression
void get_row_sample(int tree_id,
int n_rows,
rmm::device_uvector<int>* selected_rows,
const cudaStream_t stream)
{
raft::common::nvtx::range fun_scope("bootstrapping row IDs @randomforest.cuh");
// Hash these together so they are uncorrelated
auto rs = DT::fnv1a32_basis;
rs = DT::fnv1a32(rs, rf_params.seed);
rs = DT::fnv1a32(rs, tree_id);
raft::random::Rng rng(rs, raft::random::GenPhilox);
if (rf_params.bootstrap) {
// Use bootstrapped sample set
rng.uniformInt<int>(selected_rows->data(), selected_rows->size(), 0, n_rows, stream);
} else {
// Use all the samples from the dataset
thrust::sequence(thrust::cuda::par.on(stream), selected_rows->begin(), selected_rows->end());
}
}
void error_checking(const T* input, L* predictions, int n_rows, int n_cols, bool predict) const
{
if (predict) {
ASSERT(predictions != nullptr, "Error! User has not allocated memory for predictions.");
}
ASSERT((n_rows > 0), "Invalid n_rows %d", n_rows);
ASSERT((n_cols > 0), "Invalid n_cols %d", n_cols);
bool input_is_dev_ptr = DT::is_dev_ptr(input);
bool preds_is_dev_ptr = DT::is_dev_ptr(predictions);
if (!input_is_dev_ptr || (input_is_dev_ptr != preds_is_dev_ptr)) {
ASSERT(false,
"RF Error: Expected both input and labels/predictions to be GPU "
"pointers");
}
}
public:
/**
* @brief Construct RandomForest object.
* @param[in] cfg_rf_params: Random forest hyper-parameter struct.
* @param[in] cfg_rf_type: Task type: 0 for classification, 1 for regression
*/
RandomForest(RF_params cfg_rf_params, int cfg_rf_type = RF_type::CLASSIFICATION)
: rf_params(cfg_rf_params), rf_type(cfg_rf_type){};
/**
* @brief Build (i.e., fit, train) random forest for input data.
* @param[in] user_handle: raft::handle_t
* @param[in] input: train data (n_rows samples, n_cols features) in column major format,
* excluding labels. Device pointer.
* @param[in] n_rows: number of training data samples.
* @param[in] n_cols: number of features (i.e., columns) excluding target feature.
* @param[in] labels: 1D array of target predictions/labels. Device Pointer.
For classification task, only labels of type int are supported.
Assumption: labels were preprocessed to map to ascending numbers from 0;
needed for current gini impl in decision tree
For regression task, the labels (predictions) can be float or double data type.
* @param[in] n_unique_labels: (meaningful only for classification) #unique label values (known
during preprocessing)
* @param[in] forest: CPU point to RandomForestMetaData struct.
*/
void fit(const raft::handle_t& user_handle,
const T* input,
int n_rows,
int n_cols,
L* labels,
int n_unique_labels,
RandomForestMetaData<T, L>*& forest)
{
raft::common::nvtx::range fun_scope("RandomForest::fit @randomforest.cuh");
this->error_checking(input, labels, n_rows, n_cols, false);
const raft::handle_t& handle = user_handle;
int n_sampled_rows = 0;
if (this->rf_params.bootstrap) {
n_sampled_rows = std::round(this->rf_params.max_samples * n_rows);
} else {
if (this->rf_params.max_samples != 1.0) {
CUML_LOG_WARN(
"If bootstrap sampling is disabled, max_samples value is ignored and "
"whole dataset is used for building each tree");
this->rf_params.max_samples = 1.0;
}
n_sampled_rows = n_rows;
}
int n_streams = this->rf_params.n_streams;
ASSERT(static_cast<std::size_t>(n_streams) <= handle.get_stream_pool_size(),
"rf_params.n_streams (=%d) should be <= raft::handle_t.n_streams (=%lu)",
n_streams,
handle.get_stream_pool_size());
// computing the quantiles: last two return values are shared pointers to device memory
// encapsulated by quantiles struct
auto [quantiles, quantiles_array, n_bins_array] =
DT::computeQuantiles(handle, input, this->rf_params.tree_params.max_n_bins, n_rows, n_cols);
// n_streams should not be less than n_trees
if (this->rf_params.n_trees < n_streams) n_streams = this->rf_params.n_trees;
// Select n_sampled_rows (with replacement) numbers from [0, n_rows) per tree.
// selected_rows: randomly generated IDs for bootstrapped samples (w/ replacement); a device
// ptr.
// Use a deque instead of vector because it can be used on objects with a deleted copy
// constructor
std::deque<rmm::device_uvector<int>> selected_rows;
for (int i = 0; i < n_streams; i++) {
selected_rows.emplace_back(n_sampled_rows, handle.get_stream_from_stream_pool(i));
}
#pragma omp parallel for num_threads(n_streams)
for (int i = 0; i < this->rf_params.n_trees; i++) {
int stream_id = omp_get_thread_num();
auto s = handle.get_stream_from_stream_pool(stream_id);
this->get_row_sample(i, n_rows, &selected_rows[stream_id], s);
/* Build individual tree in the forest.
- input is a pointer to orig data that have n_cols features and n_rows rows.
- n_sampled_rows: # rows sampled for tree's bootstrap sample.
- sorted_selected_rows: points to a list of row #s (w/ n_sampled_rows elements)
used to build the bootstrapped sample.
Expectation: Each tree node will contain (a) # n_sampled_rows and
(b) a pointer to a list of row numbers w.r.t original data.
*/
forest->trees[i] = DT::DecisionTree::fit(handle,
s,
input,
n_cols,
n_rows,
labels,
&selected_rows[stream_id],
n_unique_labels,
this->rf_params.tree_params,
this->rf_params.seed,
quantiles,
i);
}
// Cleanup
handle.sync_stream_pool();
handle.sync_stream();
}
/**
* @brief Predict target feature for input data
* @param[in] user_handle: raft::handle_t.
* @param[in] input: test data (n_rows samples, n_cols features) in row major format. GPU
* pointer.
* @param[in] n_rows: number of data samples.
* @param[in] n_cols: number of features (excluding target feature).
* @param[in, out] predictions: n_rows predicted labels. GPU pointer, user allocated.
* @param[in] verbosity: verbosity level for logging messages during execution
*/
void predict(const raft::handle_t& user_handle,
const T* input,
int n_rows,
int n_cols,
L* predictions,
const RandomForestMetaData<T, L>* forest,
int verbosity) const
{
ML::Logger::get().setLevel(verbosity);
this->error_checking(input, predictions, n_rows, n_cols, true);
std::vector<L> h_predictions(n_rows);
cudaStream_t stream = user_handle.get_stream();
std::vector<T> h_input(std::size_t(n_rows) * n_cols);
raft::update_host(h_input.data(), input, std::size_t(n_rows) * n_cols, stream);
user_handle.sync_stream(stream);
int row_size = n_cols;
ML::PatternSetter _("%v");
for (int row_id = 0; row_id < n_rows; row_id++) {
std::vector<T> row_prediction(forest->trees[0]->num_outputs);
for (int i = 0; i < this->rf_params.n_trees; i++) {
DT::DecisionTree::predict(user_handle,
*forest->trees[i],
&h_input[row_id * row_size],
1,
n_cols,
row_prediction.data(),
forest->trees[i]->num_outputs,
verbosity);
}
for (int k = 0; k < forest->trees[0]->num_outputs; k++) {
row_prediction[k] /= this->rf_params.n_trees;
}
if (rf_type == RF_type::CLASSIFICATION) { // classification task: use 'majority' prediction
L best_class = 0;
T best_prob = 0.0;
for (int k = 0; k < forest->trees[0]->num_outputs; k++) {
if (row_prediction[k] > best_prob) {
best_class = k;
best_prob = row_prediction[k];
}
}
h_predictions[row_id] = best_class;
} else {
h_predictions[row_id] = row_prediction[0];
}
}
raft::update_device(predictions, h_predictions.data(), n_rows, stream);
user_handle.sync_stream(stream);
}
/**
* @brief Predict target feature for input data and score against ref_labels.
* @param[in] user_handle: raft::handle_t.
* @param[in] input: test data (n_rows samples, n_cols features) in row major format. GPU
* pointer.
* @param[in] ref_labels: label values for cross validation (n_rows elements); GPU pointer.
* @param[in] n_rows: number of data samples.
* @param[in] n_cols: number of features (excluding target feature).
* @param[in] predictions: n_rows predicted labels. GPU pointer, user allocated.
* @param[in] verbosity: verbosity level for logging messages during execution
* @param[in] rf_type: task type: 0 for classification, 1 for regression
*/
static RF_metrics score(const raft::handle_t& user_handle,
const L* ref_labels,
int n_rows,
const L* predictions,
int verbosity,
int rf_type = RF_type::CLASSIFICATION)
{
ML::Logger::get().setLevel(verbosity);
cudaStream_t stream = user_handle.get_stream();
RF_metrics stats;
if (rf_type == RF_type::CLASSIFICATION) { // task classifiation: get classification metrics
float accuracy = raft::stats::accuracy(predictions, ref_labels, n_rows, stream);
stats = set_rf_metrics_classification(accuracy);
if (ML::Logger::get().shouldLogFor(CUML_LEVEL_DEBUG)) print(stats);
/* TODO: Potentially augment RF_metrics w/ more metrics (e.g., precision, F1, etc.).
For non binary classification problems (i.e., one target and > 2 labels), need avg.
for each of these metrics */
} else { // regression task: get regression metrics
double mean_abs_error, mean_squared_error, median_abs_error;
raft::stats::regression_metrics(predictions,
ref_labels,
n_rows,
stream,
mean_abs_error,
mean_squared_error,
median_abs_error);
stats = set_rf_metrics_regression(mean_abs_error, mean_squared_error, median_abs_error);
if (ML::Logger::get().shouldLogFor(CUML_LEVEL_DEBUG)) print(stats);
}
return stats;
}
};
// class specializations
template class RandomForest<float, int>;
template class RandomForest<float, float>;
template class RandomForest<double, int>;
template class RandomForest<double, double>;
} // End namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/randomforest/randomforest.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/common/logger.hpp>
#include <cuml/ensemble/randomforest.hpp>
#include <cuml/tree/flatnode.h>
#include <raft/core/handle.hpp>
#include <treelite/c_api.h>
#include <treelite/tree.h>
#include <raft/core/error.hpp>
#include <cstddef>
#include <cstdio>
#include <cstring>
#include <fstream>
#include <iostream>
#include <string>
#include <type_traits>
#include <vector>
#include "randomforest.cuh"
namespace ML {
using namespace MLCommon;
using namespace std;
namespace tl = treelite;
/**
* @brief Set RF_metrics.
* @param[in] rf_type: Random Forest type: classification or regression
* @param[in] cfg_accuracy: accuracy.
* @param[in] mean_abs_error: mean absolute error.
* @param[in] mean_squared_error: mean squared error.
* @param[in] median_abs_error: median absolute error.
* @return RF_metrics struct with classification or regression score.
*/
RF_metrics set_all_rf_metrics(RF_type rf_type,
float accuracy,
double mean_abs_error,
double mean_squared_error,
double median_abs_error)
{
RF_metrics rf_metrics;
rf_metrics.rf_type = rf_type;
rf_metrics.accuracy = accuracy;
rf_metrics.mean_abs_error = mean_abs_error;
rf_metrics.mean_squared_error = mean_squared_error;
rf_metrics.median_abs_error = median_abs_error;
return rf_metrics;
}
/**
* @brief Set RF_metrics for classification.
* @param[in] cfg_accuracy: accuracy.
* @return RF_metrics struct with classification score.
*/
RF_metrics set_rf_metrics_classification(float accuracy)
{
return set_all_rf_metrics(RF_type::CLASSIFICATION, accuracy, -1.0, -1.0, -1.0);
}
/**
* @brief Set RF_metrics for regression.
* @param[in] mean_abs_error: mean absolute error.
* @param[in] mean_squared_error: mean squared error.
* @param[in] median_abs_error: median absolute error.
* @return RF_metrics struct with regression score.
*/
RF_metrics set_rf_metrics_regression(double mean_abs_error,
double mean_squared_error,
double median_abs_error)
{
return set_all_rf_metrics(
RF_type::REGRESSION, -1.0, mean_abs_error, mean_squared_error, median_abs_error);
}
/**
* @brief Print either accuracy metric for classification, or mean absolute error,
* mean squared error, and median absolute error metrics for regression.
* @param[in] rf_metrics: random forest metrics to print.
*/
void print(const RF_metrics rf_metrics)
{
if (rf_metrics.rf_type == RF_type::CLASSIFICATION) {
CUML_LOG_DEBUG("Accuracy: %f", rf_metrics.accuracy);
} else if (rf_metrics.rf_type == RF_type::REGRESSION) {
CUML_LOG_DEBUG("Mean Absolute Error: %f", rf_metrics.mean_abs_error);
CUML_LOG_DEBUG("Mean Squared Error: %f", rf_metrics.mean_squared_error);
CUML_LOG_DEBUG("Median Absolute Error: %f", rf_metrics.median_abs_error);
}
}
/**
* @brief Update labels so they are unique from 0 to n_unique_labels values.
* Create/update an old label to new label map per random forest.
* @param[in] n_rows: number of rows (labels)
* @param[in,out] labels: 1D labels array to be changed in-place.
* @param[in,out] labels_map: map of old label values to new ones.
* @param[in] verbosity: verbosity level for logging messages during execution
*/
void preprocess_labels(int n_rows,
std::vector<int>& labels,
std::map<int, int>& labels_map,
int verbosity)
{
std::pair<std::map<int, int>::iterator, bool> ret;
int n_unique_labels = 0;
ML::Logger::get().setLevel(verbosity);
CUML_LOG_DEBUG("Preprocessing labels");
for (int i = 0; i < n_rows; i++) {
ret = labels_map.insert(std::pair<int, int>(labels[i], n_unique_labels));
if (ret.second) { n_unique_labels += 1; }
auto prev = labels[i];
labels[i] = ret.first->second; // Update labels **IN-PLACE**
CUML_LOG_DEBUG("Mapping %d to %d", prev, labels[i]);
}
CUML_LOG_DEBUG("Finished preprocessing labels");
}
/**
* @brief Revert label preprocessing effect, if needed.
* @param[in] n_rows: number of rows (labels)
* @param[in,out] labels: 1D labels array to be changed in-place.
* @param[in] labels_map: map of old to new label values used during preprocessing.
* @param[in] verbosity: verbosity level for logging messages during execution
*/
void postprocess_labels(int n_rows,
std::vector<int>& labels,
std::map<int, int>& labels_map,
int verbosity)
{
ML::Logger::get().setLevel(verbosity);
CUML_LOG_DEBUG("Postrocessing labels");
std::map<int, int>::iterator it;
int n_unique_cnt = labels_map.size();
std::vector<int> reverse_map;
reverse_map.resize(n_unique_cnt);
for (auto it = labels_map.begin(); it != labels_map.end(); it++) {
reverse_map[it->second] = it->first;
}
for (int i = 0; i < n_rows; i++) {
auto prev = labels[i];
labels[i] = reverse_map[prev];
CUML_LOG_DEBUG("Mapping %d back to %d", prev, labels[i]);
}
CUML_LOG_DEBUG("Finished postrocessing labels");
}
/**
* @brief Deletes RandomForestMetaData object
* @param[in] forest: CPU pointer to RandomForestMetaData.
*/
template <class T, class L>
void delete_rf_metadata(RandomForestMetaData<T, L>* forest)
{
delete forest;
}
template <class T, class L>
std::string _get_rf_text(const RandomForestMetaData<T, L>* forest, bool summary)
{
ML::PatternSetter _("%v");
if (!forest) {
return "Empty forest";
} else {
std::ostringstream oss;
oss << "Forest has " << forest->rf_params.n_trees << " trees, "
<< "max_depth " << forest->rf_params.tree_params.max_depth << ", and max_leaves "
<< forest->rf_params.tree_params.max_leaves << "\n";
for (int i = 0; i < forest->rf_params.n_trees; i++) {
oss << "Tree #" << i << "\n";
if (summary) {
oss << DT::get_tree_summary_text<T, L>(forest->trees[i].get()) << "\n";
} else {
oss << DT::get_tree_text<T, L>(forest->trees[i].get()) << "\n";
}
}
return oss.str();
}
}
template <class T, class L>
std::string _get_rf_json(const RandomForestMetaData<T, L>* forest)
{
if (!forest) { return "[]"; }
std::ostringstream oss;
oss << "[\n";
for (int i = 0; i < forest->rf_params.n_trees; i++) {
oss << DT::get_tree_json<T, L>(forest->trees[i].get());
if (i < forest->rf_params.n_trees - 1) { oss << ",\n"; }
}
oss << "\n]";
return oss.str();
}
/**
* @brief Print summary for all trees in the random forest.
* @tparam T: data type for input data (float or double).
* @tparam L: data type for labels (int type for classification, T type for regression).
* @param[in] forest: CPU pointer to RandomForestMetaData struct.
*/
template <class T, class L>
std::string get_rf_summary_text(const RandomForestMetaData<T, L>* forest)
{
return _get_rf_text(forest, true);
}
/**
* @brief Print detailed view of all trees in the random forest.
* @tparam T: data type for input data (float or double).
* @tparam L: data type for labels (int type for classification, T type for regression).
* @param[in] forest: CPU pointer to RandomForestMetaData struct.
*/
template <class T, class L>
std::string get_rf_detailed_text(const RandomForestMetaData<T, L>* forest)
{
return _get_rf_text(forest, false);
}
template <class T, class L>
std::string get_rf_json(const RandomForestMetaData<T, L>* forest)
{
return _get_rf_json(forest);
}
template <class T, class L>
void build_treelite_forest(ModelHandle* model_handle,
const RandomForestMetaData<T, L>* forest,
int num_features)
{
auto parent_model = tl::Model::Create<T, T>();
tl::ModelImpl<T, T>* model = dynamic_cast<tl::ModelImpl<T, T>*>(parent_model.get());
ASSERT(model != nullptr, "Invalid downcast to tl::ModelImpl");
// Determine number of outputs
ASSERT(forest->trees.size() == forest->rf_params.n_trees, "Inconsistent number of trees.");
ASSERT(forest->trees.size() > 0, "Empty forest.");
int num_outputs = forest->trees.front()->num_outputs;
ASSERT(num_outputs > 0, "Invalid forest");
for (const auto& tree : forest->trees) {
ASSERT(num_outputs == tree->num_outputs, "Invalid forest");
}
if constexpr (std::is_integral_v<L>) {
ASSERT(num_outputs > 1, "More than one variable expected for classification problem.");
model->task_type = tl::TaskType::kMultiClfProbDistLeaf;
std::strncpy(model->param.pred_transform, "max_index", sizeof(model->param.pred_transform));
} else {
model->task_type = tl::TaskType::kBinaryClfRegr;
}
model->task_param = tl::TaskParam{
tl::TaskParam::OutputType::kFloat, false, (unsigned int)num_outputs, (unsigned int)num_outputs};
model->num_feature = num_features;
model->average_tree_output = true;
model->SetTreeLimit(forest->rf_params.n_trees);
#pragma omp parallel for
for (int i = 0; i < forest->rf_params.n_trees; i++) {
auto rf_tree = forest->trees[i];
if (rf_tree->sparsetree.size() != 0) {
model->trees[i] = DT::build_treelite_tree<T, L>(*rf_tree, num_outputs);
}
}
*model_handle = static_cast<ModelHandle>(parent_model.release());
}
/**
* @brief Compares the trees present in concatenated treelite forest with the trees
* of the forests present in the different workers. If there is a difference in the two
* then an error statement will be thrown.
* @param[in] tree_from_concatenated_forest: Tree info from the concatenated forest.
* @param[in] tree_from_individual_forest: Tree info from the forest present in each worker.
*/
template <class T, class L>
void compare_trees(tl::Tree<T, L>& tree_from_concatenated_forest,
tl::Tree<T, L>& tree_from_individual_forest)
{
ASSERT(tree_from_concatenated_forest.num_nodes == tree_from_individual_forest.num_nodes,
"Error! Mismatch the number of nodes present in a tree in the "
"concatenated forest and"
" the tree present in the individual forests");
for (int each_node = 0; each_node < tree_from_concatenated_forest.num_nodes; each_node++) {
ASSERT(tree_from_concatenated_forest.IsLeaf(each_node) ==
tree_from_individual_forest.IsLeaf(each_node),
"Error! mismatch in the position of a leaf between concatenated "
"forest and the"
" individual forests ");
ASSERT(tree_from_concatenated_forest.LeafValue(each_node) ==
tree_from_individual_forest.LeafValue(each_node),
"Error! leaf value mismatch between concatenated forest and the"
" individual forests ");
ASSERT(tree_from_concatenated_forest.RightChild(each_node) ==
tree_from_individual_forest.RightChild(each_node),
"Error! mismatch in the position of the node between concatenated "
"forest and the"
" individual forests ");
ASSERT(tree_from_concatenated_forest.LeftChild(each_node) ==
tree_from_individual_forest.LeftChild(each_node),
"Error! mismatch in the position of the node between concatenated "
"forest and the"
" individual forests ");
ASSERT(tree_from_concatenated_forest.SplitIndex(each_node) ==
tree_from_individual_forest.SplitIndex(each_node),
"Error! split index value mismatch between concatenated forest and the"
" individual forests ");
}
}
/**
* @brief Compares the concatenated treelite model with the information of the forest
* present in the different workers. If there is a difference in the two then an error
* statement will be thrown.
* @param[in] concat_tree_handle: ModelHandle for the concatenated forest.
* @param[in] treelite_handles: List containing ModelHandles for the forest present in
* each worker.
*/
void compare_concat_forest_to_subforests(ModelHandle concat_tree_handle,
std::vector<ModelHandle> treelite_handles)
{
size_t concat_forest;
size_t total_num_trees = 0;
for (std::size_t forest_idx = 0; forest_idx < treelite_handles.size(); forest_idx++) {
size_t num_trees_each_forest;
TREELITE_CHECK_RET(TreeliteQueryNumTree(treelite_handles[forest_idx], &num_trees_each_forest));
total_num_trees = total_num_trees + num_trees_each_forest;
}
TREELITE_CHECK_RET(TreeliteQueryNumTree(concat_tree_handle, &concat_forest));
ASSERT(concat_forest == total_num_trees,
"Error! the number of trees in the concatenated forest and the sum "
"of the trees present in the forests present in each worker are not equal");
int concat_mod_tree_num = 0;
tl::Model& concat_model = *(tl::Model*)(concat_tree_handle);
for (std::size_t forest_idx = 0; forest_idx < treelite_handles.size(); forest_idx++) {
tl::Model& model = *(tl::Model*)(treelite_handles[forest_idx]);
ASSERT(concat_model.GetThresholdType() == model.GetThresholdType(),
"Error! Concatenated forest does not have the same threshold type as "
"the individual forests");
ASSERT(concat_model.GetLeafOutputType() == model.GetLeafOutputType(),
"Error! Concatenated forest does not have the same leaf output type as "
"the individual forests");
ASSERT(concat_model.num_feature == model.num_feature,
"Error! number of features mismatch between concatenated forest and the"
" individual forests");
ASSERT(concat_model.task_param.num_class == model.task_param.num_class,
"Error! number of classes mismatch between concatenated forest "
"and the individual forests ");
ASSERT(concat_model.average_tree_output == model.average_tree_output,
"Error! average_tree_output flag value mismatch between "
"concatenated forest and the individual forests");
model.Dispatch([&concat_mod_tree_num, &concat_model](auto& model_inner) {
// model_inner is of the concrete type tl::ModelImpl<T, L>
using model_type = std::remove_reference_t<decltype(model_inner)>;
auto& concat_model_inner = dynamic_cast<model_type&>(concat_model);
for (std::size_t indiv_trees = 0; indiv_trees < model_inner.trees.size(); indiv_trees++) {
compare_trees(concat_model_inner.trees[concat_mod_tree_num + indiv_trees],
model_inner.trees[indiv_trees]);
}
concat_mod_tree_num = concat_mod_tree_num + model_inner.trees.size();
});
}
}
/**
* @brief Concatenates the forest information present in different workers to
* create a single forest. This concatenated forest is stored in a new treelite model.
* The model created is owned by and must be freed by the user.
* @param[in] concat_tree_handle: ModelHandle for the concatenated forest.
* @param[in] treelite_handles: List containing ModelHandles for the forest present in
* each worker.
*/
ModelHandle concatenate_trees(std::vector<ModelHandle> treelite_handles)
{
/* TODO(hcho3): Use treelite::ConcatenateModelObjects(),
once https://github.com/dmlc/treelite/issues/474 is fixed. */
if (treelite_handles.empty()) { return nullptr; }
tl::Model& first_model = *static_cast<tl::Model*>(treelite_handles[0]);
tl::Model* concat_model = first_model.Dispatch([&treelite_handles](auto& first_model_inner) {
// first_model_inner is of the concrete type tl::ModelImpl<T, L>
using model_type = std::remove_reference_t<decltype(first_model_inner)>;
auto* concat_model = dynamic_cast<model_type*>(
tl::Model::Create(first_model_inner.GetThresholdType(), first_model_inner.GetLeafOutputType())
.release());
for (std::size_t forest_idx = 0; forest_idx < treelite_handles.size(); forest_idx++) {
tl::Model& model = *static_cast<tl::Model*>(treelite_handles[forest_idx]);
auto& model_inner = dynamic_cast<model_type&>(model);
for (const auto& tree : model_inner.trees) {
concat_model->trees.push_back(tree.Clone());
}
}
concat_model->num_feature = first_model_inner.num_feature;
concat_model->task_type = first_model_inner.task_type;
concat_model->task_param = first_model_inner.task_param;
concat_model->average_tree_output = first_model_inner.average_tree_output;
concat_model->param = first_model_inner.param;
return static_cast<tl::Model*>(concat_model);
});
return concat_model;
}
/**
* @defgroup RandomForestClassificationFit Random Forest Classification - Fit function
* @brief Build (i.e., fit, train) random forest classifier for input data.
* @param[in] user_handle: raft::handle_t
* @param[in,out] forest: CPU pointer to RandomForestMetaData object. User allocated.
* @param[in] input: train data (n_rows samples, n_cols features) in column major format,
* excluding labels. Device pointer.
* @param[in] n_rows: number of training data samples.
* @param[in] n_cols: number of features (i.e., columns) excluding target feature.
* @param[in] labels: 1D array of target features (int only), with one label per
* training sample. Device pointer.
* Assumption: labels were preprocessed to map to ascending numbers from 0;
* needed for current gini impl. in decision tree
* @param[in] n_unique_labels: #unique label values (known during preprocessing)
* @param[in] rf_params: Random Forest training hyper parameter struct.
* @param[in] verbosity: verbosity level for logging messages during execution
* @{
*/
void fit(const raft::handle_t& user_handle,
RandomForestClassifierF*& forest,
float* input,
int n_rows,
int n_cols,
int* labels,
int n_unique_labels,
RF_params rf_params,
int verbosity)
{
raft::common::nvtx::range fun_scope("RF::fit @randomforest.cu");
ML::Logger::get().setLevel(verbosity);
ASSERT(forest->trees.empty(), "Cannot fit an existing forest.");
forest->trees.resize(rf_params.n_trees);
forest->rf_params = rf_params;
std::shared_ptr<RandomForest<float, int>> rf_classifier =
std::make_shared<RandomForest<float, int>>(rf_params, RF_type::CLASSIFICATION);
rf_classifier->fit(user_handle, input, n_rows, n_cols, labels, n_unique_labels, forest);
}
void fit(const raft::handle_t& user_handle,
RandomForestClassifierD*& forest,
double* input,
int n_rows,
int n_cols,
int* labels,
int n_unique_labels,
RF_params rf_params,
int verbosity)
{
raft::common::nvtx::range fun_scope("RF::fit @randomforest.cu");
ML::Logger::get().setLevel(verbosity);
ASSERT(forest->trees.empty(), "Cannot fit an existing forest.");
forest->trees.resize(rf_params.n_trees);
forest->rf_params = rf_params;
std::shared_ptr<RandomForest<double, int>> rf_classifier =
std::make_shared<RandomForest<double, int>>(rf_params, RF_type::CLASSIFICATION);
rf_classifier->fit(user_handle, input, n_rows, n_cols, labels, n_unique_labels, forest);
}
/** @} */
/**
* @defgroup RandomForestClassificationPredict Random Forest Classification - Predict function
* @brief Predict target feature for input data; n-ary classification for
single feature supported.
* @param[in] user_handle: raft::handle_t.
* @param[in] forest: CPU pointer to RandomForestMetaData object.
* The user should have previously called fit to build the random forest.
* @param[in] input: test data (n_rows samples, n_cols features) in row major format. GPU pointer.
* @param[in] n_rows: number of data samples.
* @param[in] n_cols: number of features (excluding target feature).
* @param[in, out] predictions: n_rows predicted labels. GPU pointer, user allocated.
* @param[in] verbosity: verbosity level for logging messages during execution
* @{
*/
void predict(const raft::handle_t& user_handle,
const RandomForestClassifierF* forest,
const float* input,
int n_rows,
int n_cols,
int* predictions,
int verbosity)
{
ASSERT(!forest->trees.empty(), "Cannot predict! No trees in the forest.");
std::shared_ptr<RandomForest<float, int>> rf_classifier =
std::make_shared<RandomForest<float, int>>(forest->rf_params, RF_type::CLASSIFICATION);
rf_classifier->predict(user_handle, input, n_rows, n_cols, predictions, forest, verbosity);
}
void predict(const raft::handle_t& user_handle,
const RandomForestClassifierD* forest,
const double* input,
int n_rows,
int n_cols,
int* predictions,
int verbosity)
{
ASSERT(!forest->trees.empty(), "Cannot predict! No trees in the forest.");
std::shared_ptr<RandomForest<double, int>> rf_classifier =
std::make_shared<RandomForest<double, int>>(forest->rf_params, RF_type::CLASSIFICATION);
rf_classifier->predict(user_handle, input, n_rows, n_cols, predictions, forest, verbosity);
}
/**
* @defgroup RandomForestClassificationScore Random Forest Classification - Score function
* @brief Compare predicted features validate against ref_labels.
* @param[in] user_handle: raft::handle_t.
* @param[in] forest: CPU pointer to RandomForestMetaData object.
* The user should have previously called fit to build the random forest.
* @param[in] input: test data (n_rows samples, n_cols features) in row major format. GPU pointer.
* @param[in] ref_labels: label values for cross validation (n_rows elements); GPU pointer.
* @param[in] n_rows: number of data samples.
* @param[in] n_cols: number of features (excluding target feature).
* @param[in] predictions: n_rows predicted labels. GPU pointer, user allocated.
* @param[in] verbosity: verbosity level for logging messages during execution
* @return RF_metrics struct with classification score (i.e., accuracy)
* @{
*/
RF_metrics score(const raft::handle_t& user_handle,
const RandomForestClassifierF* forest,
const int* ref_labels,
int n_rows,
const int* predictions,
int verbosity)
{
RF_metrics classification_score = RandomForest<float, int>::score(
user_handle, ref_labels, n_rows, predictions, verbosity, RF_type::CLASSIFICATION);
return classification_score;
}
RF_metrics score(const raft::handle_t& user_handle,
const RandomForestClassifierD* forest,
const int* ref_labels,
int n_rows,
const int* predictions,
int verbosity)
{
RF_metrics classification_score = RandomForest<double, int>::score(
user_handle, ref_labels, n_rows, predictions, verbosity, RF_type::CLASSIFICATION);
return classification_score;
}
/**
* @brief Check validity of all random forest hyper-parameters.
* @param[in] rf_params: random forest hyper-parameters
*/
void validity_check(const RF_params rf_params)
{
ASSERT((rf_params.n_trees > 0), "Invalid n_trees %d", rf_params.n_trees);
ASSERT((rf_params.max_samples > 0) && (rf_params.max_samples <= 1.0),
"max_samples value %f outside permitted (0, 1] range",
rf_params.max_samples);
}
RF_params set_rf_params(int max_depth,
int max_leaves,
float max_features,
int max_n_bins,
int min_samples_leaf,
int min_samples_split,
float min_impurity_decrease,
bool bootstrap,
int n_trees,
float max_samples,
uint64_t seed,
CRITERION split_criterion,
int cfg_n_streams,
int max_batch_size)
{
DT::DecisionTreeParams tree_params;
DT::set_tree_params(tree_params,
max_depth,
max_leaves,
max_features,
max_n_bins,
min_samples_leaf,
min_samples_split,
min_impurity_decrease,
split_criterion,
max_batch_size);
RF_params rf_params;
rf_params.n_trees = n_trees;
rf_params.bootstrap = bootstrap;
rf_params.max_samples = max_samples;
rf_params.seed = seed;
rf_params.n_streams = min(cfg_n_streams, omp_get_max_threads());
if (n_trees < rf_params.n_streams) rf_params.n_streams = n_trees;
rf_params.tree_params = tree_params;
validity_check(rf_params);
return rf_params;
}
/** @} */
/**
* @defgroup RandomForestRegressorFit Random Forest Regression - Fit function
* @brief Build (i.e., fit, train) random forest regressor for input data.
* @param[in] user_handle: raft::handle_t
* @param[in,out] forest: CPU pointer to RandomForestMetaData object. User allocated.
* @param[in] input: train data (n_rows samples, n_cols features) in column major format,
* excluding labels. Device pointer.
* @param[in] n_rows: number of training data samples.
* @param[in] n_cols: number of features (i.e., columns) excluding target feature.
* @param[in] labels: 1D array of target features (float or double), with one label per
* training sample. Device pointer.
* @param[in] rf_params: Random Forest training hyper parameter struct.
* @param[in] verbosity: verbosity level for logging messages during execution
* @{
*/
void fit(const raft::handle_t& user_handle,
RandomForestRegressorF*& forest,
float* input,
int n_rows,
int n_cols,
float* labels,
RF_params rf_params,
int verbosity)
{
raft::common::nvtx::range fun_scope("RF::fit @randomforest.cu");
ML::Logger::get().setLevel(verbosity);
ASSERT(forest->trees.empty(), "Cannot fit an existing forest.");
forest->trees.resize(rf_params.n_trees);
forest->rf_params = rf_params;
std::shared_ptr<RandomForest<float, float>> rf_regressor =
std::make_shared<RandomForest<float, float>>(rf_params, RF_type::REGRESSION);
rf_regressor->fit(user_handle, input, n_rows, n_cols, labels, 1, forest);
}
void fit(const raft::handle_t& user_handle,
RandomForestRegressorD*& forest,
double* input,
int n_rows,
int n_cols,
double* labels,
RF_params rf_params,
int verbosity)
{
raft::common::nvtx::range fun_scope("RF::fit @randomforest.cu");
ML::Logger::get().setLevel(verbosity);
ASSERT(forest->trees.empty(), "Cannot fit an existing forest.");
forest->trees.resize(rf_params.n_trees);
forest->rf_params = rf_params;
std::shared_ptr<RandomForest<double, double>> rf_regressor =
std::make_shared<RandomForest<double, double>>(rf_params, RF_type::REGRESSION);
rf_regressor->fit(user_handle, input, n_rows, n_cols, labels, 1, forest);
}
/** @} */
/**
* @defgroup RandomForestRegressorPredict Random Forest Regression - Predict function
* @brief Predict target feature for input data; regression for single feature supported.
* @param[in] user_handle: raft::handle_t.
* @param[in] forest: CPU pointer to RandomForestMetaData object.
* The user should have previously called fit to build the random forest.
* @param[in] input: test data (n_rows samples, n_cols features) in row major format. GPU pointer.
* @param[in] n_rows: number of data samples.
* @param[in] n_cols: number of features (excluding target feature).
* @param[in, out] predictions: n_rows predicted labels. GPU pointer, user allocated.
* @param[in] verbosity: verbosity level for logging messages during execution
* @{
*/
void predict(const raft::handle_t& user_handle,
const RandomForestRegressorF* forest,
const float* input,
int n_rows,
int n_cols,
float* predictions,
int verbosity)
{
std::shared_ptr<RandomForest<float, float>> rf_regressor =
std::make_shared<RandomForest<float, float>>(forest->rf_params, RF_type::REGRESSION);
rf_regressor->predict(user_handle, input, n_rows, n_cols, predictions, forest, verbosity);
}
void predict(const raft::handle_t& user_handle,
const RandomForestRegressorD* forest,
const double* input,
int n_rows,
int n_cols,
double* predictions,
int verbosity)
{
std::shared_ptr<RandomForest<double, double>> rf_regressor =
std::make_shared<RandomForest<double, double>>(forest->rf_params, RF_type::REGRESSION);
rf_regressor->predict(user_handle, input, n_rows, n_cols, predictions, forest, verbosity);
}
/** @} */
/**
* @defgroup RandomForestRegressorScore Random Forest Regression - Score function
* @brief Predict target feature for input data and validate against ref_labels.
* @param[in] user_handle: raft::handle_t.
* @param[in] forest: CPU pointer to RandomForestMetaData object.
* The user should have previously called fit to build the random forest.
* @param[in] input: test data (n_rows samples, n_cols features) in row major format. GPU pointer.
* @param[in] ref_labels: label values for cross validation (n_rows elements); GPU pointer.
* @param[in] n_rows: number of data samples.
* @param[in] n_cols: number of features (excluding target feature).
* @param[in] predictions: n_rows predicted labels. GPU pointer, user allocated.
* @param[in] verbosity: verbosity level for logging messages during execution
* @return RF_metrics struct with regression score (i.e., mean absolute error,
* mean squared error, median absolute error)
* @{
*/
RF_metrics score(const raft::handle_t& user_handle,
const RandomForestRegressorF* forest,
const float* ref_labels,
int n_rows,
const float* predictions,
int verbosity)
{
RF_metrics regression_score = RandomForest<float, float>::score(
user_handle, ref_labels, n_rows, predictions, verbosity, RF_type::REGRESSION);
return regression_score;
}
RF_metrics score(const raft::handle_t& user_handle,
const RandomForestRegressorD* forest,
const double* ref_labels,
int n_rows,
const double* predictions,
int verbosity)
{
RF_metrics regression_score = RandomForest<double, double>::score(
user_handle, ref_labels, n_rows, predictions, verbosity, RF_type::REGRESSION);
return regression_score;
}
/** @} */
// Functions' specializations
template std::string get_rf_summary_text<float, int>(const RandomForestClassifierF* forest);
template std::string get_rf_summary_text<double, int>(const RandomForestClassifierD* forest);
template std::string get_rf_summary_text<float, float>(const RandomForestRegressorF* forest);
template std::string get_rf_summary_text<double, double>(const RandomForestRegressorD* forest);
template std::string get_rf_detailed_text<float, int>(const RandomForestClassifierF* forest);
template std::string get_rf_detailed_text<double, int>(const RandomForestClassifierD* forest);
template std::string get_rf_detailed_text<float, float>(const RandomForestRegressorF* forest);
template std::string get_rf_detailed_text<double, double>(const RandomForestRegressorD* forest);
template std::string get_rf_json<float, int>(const RandomForestClassifierF* forest);
template std::string get_rf_json<double, int>(const RandomForestClassifierD* forest);
template std::string get_rf_json<float, float>(const RandomForestRegressorF* forest);
template std::string get_rf_json<double, double>(const RandomForestRegressorD* forest);
template void delete_rf_metadata<float, int>(RandomForestClassifierF* forest);
template void delete_rf_metadata<double, int>(RandomForestClassifierD* forest);
template void delete_rf_metadata<float, float>(RandomForestRegressorF* forest);
template void delete_rf_metadata<double, double>(RandomForestRegressorD* forest);
template void build_treelite_forest<float, int>(ModelHandle* model,
const RandomForestMetaData<float, int>* forest,
int num_features);
template void build_treelite_forest<double, int>(ModelHandle* model,
const RandomForestMetaData<double, int>* forest,
int num_features);
template void build_treelite_forest<float, float>(ModelHandle* model,
const RandomForestMetaData<float, float>* forest,
int num_features);
template void build_treelite_forest<double, double>(
ModelHandle* model, const RandomForestMetaData<double, double>* forest, int num_features);
} // End namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/genetic/fitness.cuh | /*
* Copyright (c) 2021-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <raft/linalg/eltwise.cuh>
#include <raft/linalg/matrix_vector_op.cuh>
#include <raft/linalg/strided_reduction.cuh>
#include <raft/linalg/unary_op.cuh>
#include <raft/matrix/math.cuh>
#include <raft/stats/mean.cuh>
#include <raft/stats/mean_center.cuh>
#include <raft/stats/stddev.cuh>
#include <raft/stats/sum.cuh>
#include <raft/util/cuda_utils.cuh>
#include <rmm/device_scalar.hpp>
#include <rmm/device_uvector.hpp>
#include <thrust/adjacent_difference.h>
#include <thrust/copy.h>
#include <thrust/device_ptr.h>
#include <thrust/device_vector.h>
#include <thrust/functional.h>
#include <thrust/iterator/permutation_iterator.h>
#include <thrust/memory.h>
#include <thrust/scan.h>
#include <thrust/sequence.h>
#include <thrust/sort.h>
#include <thrust/transform.h>
#include <raft/util/cudart_utils.hpp>
namespace cuml {
namespace genetic {
template <typename math_t = float>
void weightedPearson(const raft::handle_t& h,
const uint64_t n_samples,
const uint64_t n_progs,
const math_t* Y,
const math_t* X,
const math_t* W,
math_t* out)
{
// Find Pearson's correlation coefficient
cudaStream_t stream = h.get_stream();
rmm::device_uvector<math_t> corr(n_samples * n_progs, stream);
rmm::device_uvector<math_t> y_tmp(n_samples, stream);
rmm::device_uvector<math_t> x_tmp(n_samples * n_progs, stream);
rmm::device_scalar<math_t> y_mu(stream); // output mean
rmm::device_uvector<math_t> x_mu(n_progs, stream); // predicted output mean
rmm::device_uvector<math_t> y_diff(n_samples, stream); // normalized output
rmm::device_uvector<math_t> x_diff(n_samples * n_progs,
stream); // normalized predicted output
rmm::device_uvector<math_t> y_std(1, stream); // output stddev
rmm::device_uvector<math_t> x_std(n_progs,
stream); // predicted output stddev
rmm::device_scalar<math_t> dWS(stream); // sample weight sum
math_t N = (math_t)n_samples;
// Sum of weights
raft::stats::sum(dWS.data(), W, (uint64_t)1, n_samples, false, stream);
math_t WS = dWS.value(stream);
// Find y_mu
raft::linalg::matrixVectorOp(
y_tmp.data(),
Y,
W,
(uint64_t)1,
n_samples,
false,
false,
[N, WS] __device__(math_t y, math_t w) { return N * w * y / WS; },
stream);
raft::stats::mean(y_mu.data(), y_tmp.data(), (uint64_t)1, n_samples, false, false, stream);
// Find x_mu
raft::linalg::matrixVectorOp(
x_tmp.data(),
X,
W,
n_progs,
n_samples,
false,
true,
[N, WS] __device__(math_t x, math_t w) { return N * w * x / WS; },
stream);
raft::stats::mean(x_mu.data(), x_tmp.data(), n_progs, n_samples, false, false, stream);
// Find y_diff
raft::stats::meanCenter(
y_diff.data(), Y, y_mu.data(), (uint64_t)1, n_samples, false, true, stream);
// Find x_diff
raft::stats::meanCenter(x_diff.data(), X, x_mu.data(), n_progs, n_samples, false, true, stream);
// Find y_std
raft::linalg::stridedReduction(
y_std.data(),
y_diff.data(),
(uint64_t)1,
n_samples,
(math_t)0,
stream,
false,
[W] __device__(math_t v, int i) { return v * v * W[i]; },
raft::Sum<math_t>(),
[] __device__(math_t in) { return raft::mySqrt(in); });
math_t HYstd = y_std.element(0, stream);
// Find x_std
raft::linalg::stridedReduction(
x_std.data(),
x_diff.data(),
n_progs,
n_samples,
(math_t)0,
stream,
false,
[W] __device__(math_t v, int i) { return v * v * W[i]; },
raft::Sum<math_t>(),
[] __device__(math_t in) { return raft::mySqrt(in); });
// Cross covariance
raft::linalg::matrixVectorOp(
corr.data(),
x_diff.data(),
y_diff.data(),
W,
n_progs,
n_samples,
false,
false,
[N, HYstd] __device__(math_t xd, math_t yd, math_t w) { return N * w * xd * yd / HYstd; },
stream);
// Find Correlation coeff
raft::linalg::matrixVectorOp(
corr.data(),
corr.data(),
x_std.data(),
n_progs,
n_samples,
false,
true,
[] __device__(math_t c, math_t xd) { return c / xd; },
stream);
raft::stats::mean(out, corr.data(), n_progs, n_samples, false, false, stream);
}
struct rank_functor {
template <typename math_t>
__host__ __device__ math_t operator()(math_t data)
{
if (data == 0)
return 0;
else
return 1;
}
};
template <typename math_t = float>
void weightedSpearman(const raft::handle_t& h,
const uint64_t n_samples,
const uint64_t n_progs,
const math_t* Y,
const math_t* Y_pred,
const math_t* W,
math_t* out)
{
cudaStream_t stream = h.get_stream();
// Get ranks for Y
thrust::device_vector<math_t> Ycopy(Y, Y + n_samples);
thrust::device_vector<math_t> rank_idx(n_samples, 0);
thrust::device_vector<math_t> rank_diff(n_samples, 0);
thrust::device_vector<math_t> Yrank(n_samples, 0);
auto exec_policy = rmm::exec_policy(stream);
thrust::sequence(exec_policy, rank_idx.begin(), rank_idx.end(), 0);
thrust::sort_by_key(exec_policy, Ycopy.begin(), Ycopy.end(), rank_idx.begin());
thrust::adjacent_difference(exec_policy, Ycopy.begin(), Ycopy.end(), rank_diff.begin());
thrust::transform(
exec_policy, rank_diff.begin(), rank_diff.end(), rank_diff.begin(), rank_functor());
rank_diff[0] = 1;
thrust::inclusive_scan(exec_policy, rank_diff.begin(), rank_diff.end(), rank_diff.begin());
thrust::copy(rank_diff.begin(),
rank_diff.end(),
thrust::make_permutation_iterator(Yrank.begin(), rank_idx.begin()));
// Get ranks for Y_pred
// TODO: Find a better way to batch this
thrust::device_vector<math_t> Ypredcopy(Y_pred, Y_pred + n_samples * n_progs);
thrust::device_vector<math_t> Ypredrank(n_samples * n_progs, 0);
thrust::device_ptr<math_t> Ypredptr = thrust::device_pointer_cast<math_t>(Ypredcopy.data());
thrust::device_ptr<math_t> Ypredrankptr = thrust::device_pointer_cast<math_t>(Ypredrank.data());
for (std::size_t i = 0; i < n_progs; ++i) {
thrust::sequence(exec_policy, rank_idx.begin(), rank_idx.end(), 0);
thrust::sort_by_key(
exec_policy, Ypredptr + (i * n_samples), Ypredptr + ((i + 1) * n_samples), rank_idx.begin());
thrust::adjacent_difference(
exec_policy, Ypredptr + (i * n_samples), Ypredptr + ((i + 1) * n_samples), rank_diff.begin());
thrust::transform(
exec_policy, rank_diff.begin(), rank_diff.end(), rank_diff.begin(), rank_functor());
rank_diff[0] = 1;
thrust::inclusive_scan(exec_policy, rank_diff.begin(), rank_diff.end(), rank_diff.begin());
thrust::copy(
rank_diff.begin(),
rank_diff.end(),
thrust::make_permutation_iterator(Ypredrankptr + (i * n_samples), rank_idx.begin()));
}
// Compute pearson's coefficient
weightedPearson(h,
n_samples,
n_progs,
thrust::raw_pointer_cast(Yrank.data()),
thrust::raw_pointer_cast(Ypredrank.data()),
W,
out);
}
template <typename math_t = float>
void meanAbsoluteError(const raft::handle_t& h,
const uint64_t n_samples,
const uint64_t n_progs,
const math_t* Y,
const math_t* Y_pred,
const math_t* W,
math_t* out)
{
cudaStream_t stream = h.get_stream();
rmm::device_uvector<math_t> error(n_samples * n_progs, stream);
rmm::device_scalar<math_t> dWS(stream);
math_t N = (math_t)n_samples;
// Weight Sum
raft::stats::sum(dWS.data(), W, (uint64_t)1, n_samples, false, stream);
math_t WS = dWS.value(stream);
// Compute absolute differences
raft::linalg::matrixVectorOp(
error.data(),
Y_pred,
Y,
W,
n_progs,
n_samples,
false,
false,
[N, WS] __device__(math_t y_p, math_t y, math_t w) {
return N * w * raft::myAbs(y - y_p) / WS;
},
stream);
// Average along rows
raft::stats::mean(out, error.data(), n_progs, n_samples, false, false, stream);
}
template <typename math_t = float>
void meanSquareError(const raft::handle_t& h,
const uint64_t n_samples,
const uint64_t n_progs,
const math_t* Y,
const math_t* Y_pred,
const math_t* W,
math_t* out)
{
cudaStream_t stream = h.get_stream();
rmm::device_uvector<math_t> error(n_samples * n_progs, stream);
rmm::device_scalar<math_t> dWS(stream);
math_t N = (math_t)n_samples;
// Weight Sum
raft::stats::sum(dWS.data(), W, (uint64_t)1, n_samples, false, stream);
math_t WS = dWS.value(stream);
// Compute square differences
raft::linalg::matrixVectorOp(
error.data(),
Y_pred,
Y,
W,
n_progs,
n_samples,
false,
false,
[N, WS] __device__(math_t y_p, math_t y, math_t w) {
return N * w * (y_p - y) * (y_p - y) / WS;
},
stream);
// Add up row values per column
raft::stats::mean(out, error.data(), n_progs, n_samples, false, false, stream);
}
template <typename math_t = float>
void rootMeanSquareError(const raft::handle_t& h,
const uint64_t n_samples,
const uint64_t n_progs,
const math_t* Y,
const math_t* Y_pred,
const math_t* W,
math_t* out)
{
cudaStream_t stream = h.get_stream();
// Find MSE
meanSquareError(h, n_samples, n_progs, Y, Y_pred, W, out);
// Take sqrt on all entries
raft::matrix::seqRoot(out, n_progs, stream);
}
template <typename math_t = float>
void logLoss(const raft::handle_t& h,
const uint64_t n_samples,
const uint64_t n_progs,
const math_t* Y,
const math_t* Y_pred,
const math_t* W,
math_t* out)
{
cudaStream_t stream = h.get_stream();
// Logistic error per sample
rmm::device_uvector<math_t> error(n_samples * n_progs, stream);
rmm::device_scalar<math_t> dWS(stream);
math_t N = (math_t)n_samples;
// Weight Sum
raft::stats::sum(dWS.data(), W, (uint64_t)1, n_samples, false, stream);
math_t WS = dWS.value(stream);
// Compute logistic loss as described in
// http://fa.bianp.net/blog/2019/evaluate_logistic/
// in an attempt to avoid encountering nan values. Modified for weighted logistic regression.
raft::linalg::matrixVectorOp(
error.data(),
Y_pred,
Y,
W,
n_progs,
n_samples,
false,
false,
[N, WS] __device__(math_t yp, math_t y, math_t w) {
math_t logsig;
if (yp < -33.3)
logsig = yp;
else if (yp <= -18)
logsig = yp - expf(yp);
else if (yp <= 37)
logsig = -log1pf(expf(-yp));
else
logsig = -expf(-yp);
return ((1 - y) * yp - logsig) * (N * w / WS);
},
stream);
// Take average along rows
raft::stats::mean(out, error.data(), n_progs, n_samples, false, false, stream);
}
} // namespace genetic
} // namespace cuml
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/genetic/program.cu | /*
* Copyright (c) 2021-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/common/logger.hpp>
#include <cuml/genetic/node.h>
#include <cuml/genetic/program.h>
#include <raft/linalg/unary_op.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <algorithm>
#include <numeric>
#include <random>
#include <stack>
#include "constants.h"
#include "fitness.cuh"
#include "node.cuh"
#include "reg_stack.cuh"
namespace cuml {
namespace genetic {
/**
* Execution kernel for a single program. We assume that the input data
* is stored in column major format.
*/
template <int MaxSize = MAX_STACK_SIZE>
__global__ void execute_kernel(const program_t d_progs,
const float* data,
float* y_pred,
const uint64_t n_rows)
{
uint64_t pid = blockIdx.y; // current program
uint64_t row_id = blockIdx.x * blockDim.x + threadIdx.x; // current dataset row
if (row_id >= n_rows) { return; }
stack<float, MaxSize> eval_stack; // Maintain stack only for remaining threads
program_t curr_p = d_progs + pid; // Current program
int end = curr_p->len - 1;
node* curr_node = curr_p->nodes + end;
float res = 0.0f;
float in[2] = {0.0f, 0.0f};
while (end >= 0) {
if (detail::is_nonterminal(curr_node->t)) {
int ar = detail::arity(curr_node->t);
in[0] = eval_stack.pop(); // Min arity of function is 1
if (ar > 1) in[1] = eval_stack.pop();
}
res = detail::evaluate_node(*curr_node, data, n_rows, row_id, in);
eval_stack.push(res);
curr_node--;
end--;
}
// Outputs stored in col-major format
y_pred[pid * n_rows + row_id] = eval_stack.pop();
}
program::program()
: len(0),
depth(0),
raw_fitness_(0.0f),
metric(metric_t::mse),
mut_type(mutation_t::none),
nodes(nullptr)
{
}
program::~program() { delete[] nodes; }
program::program(const program& src)
: len(src.len),
depth(src.depth),
raw_fitness_(src.raw_fitness_),
metric(src.metric),
mut_type(src.mut_type)
{
nodes = new node[len];
std::copy(src.nodes, src.nodes + src.len, nodes);
}
program& program::operator=(const program& src)
{
len = src.len;
depth = src.depth;
raw_fitness_ = src.raw_fitness_;
metric = src.metric;
mut_type = src.mut_type;
// Copy nodes
delete[] nodes;
nodes = new node[len];
std::copy(src.nodes, src.nodes + src.len, nodes);
return *this;
}
void compute_metric(const raft::handle_t& h,
int n_rows,
int n_progs,
const float* y,
const float* y_pred,
const float* w,
float* score,
const param& params)
{
// Call appropriate metric function based on metric defined in params
if (params.metric == metric_t::pearson) {
weightedPearson(h, n_rows, n_progs, y, y_pred, w, score);
} else if (params.metric == metric_t::spearman) {
weightedSpearman(h, n_rows, n_progs, y, y_pred, w, score);
} else if (params.metric == metric_t::mae) {
meanAbsoluteError(h, n_rows, n_progs, y, y_pred, w, score);
} else if (params.metric == metric_t::mse) {
meanSquareError(h, n_rows, n_progs, y, y_pred, w, score);
} else if (params.metric == metric_t::rmse) {
rootMeanSquareError(h, n_rows, n_progs, y, y_pred, w, score);
} else if (params.metric == metric_t::logloss) {
logLoss(h, n_rows, n_progs, y, y_pred, w, score);
} else {
// This should not be reachable
}
}
void execute(const raft::handle_t& h,
const program_t& d_progs,
const int n_rows,
const int n_progs,
const float* data,
float* y_pred)
{
cudaStream_t stream = h.get_stream();
dim3 blks(raft::ceildiv(n_rows, GENE_TPB), n_progs, 1);
execute_kernel<<<blks, GENE_TPB, 0, stream>>>(d_progs, data, y_pred, (uint64_t)n_rows);
RAFT_CUDA_TRY(cudaPeekAtLastError());
}
void find_fitness(const raft::handle_t& h,
program_t& d_prog,
float* score,
const param& params,
const int n_rows,
const float* data,
const float* y,
const float* sample_weights)
{
cudaStream_t stream = h.get_stream();
// Compute predicted values
rmm::device_uvector<float> y_pred(n_rows, stream);
execute(h, d_prog, n_rows, 1, data, y_pred.data());
// Compute error
compute_metric(h, n_rows, 1, y, y_pred.data(), sample_weights, score, params);
}
void find_batched_fitness(const raft::handle_t& h,
int n_progs,
program_t& d_progs,
float* score,
const param& params,
const int n_rows,
const float* data,
const float* y,
const float* sample_weights)
{
cudaStream_t stream = h.get_stream();
rmm::device_uvector<float> y_pred((uint64_t)n_rows * (uint64_t)n_progs, stream);
execute(h, d_progs, n_rows, n_progs, data, y_pred.data());
// Compute error
compute_metric(h, n_rows, n_progs, y, y_pred.data(), sample_weights, score, params);
}
void set_fitness(const raft::handle_t& h,
program_t& d_prog,
program& h_prog,
const param& params,
const int n_rows,
const float* data,
const float* y,
const float* sample_weights)
{
cudaStream_t stream = h.get_stream();
rmm::device_uvector<float> score(1, stream);
find_fitness(h, d_prog, score.data(), params, n_rows, data, y, sample_weights);
// Update host and device score for program
RAFT_CUDA_TRY(cudaMemcpyAsync(
&d_prog[0].raw_fitness_, score.data(), sizeof(float), cudaMemcpyDeviceToDevice, stream));
h_prog.raw_fitness_ = score.front_element(stream);
}
void set_batched_fitness(const raft::handle_t& h,
int n_progs,
program_t& d_progs,
std::vector<program>& h_progs,
const param& params,
const int n_rows,
const float* data,
const float* y,
const float* sample_weights)
{
cudaStream_t stream = h.get_stream();
rmm::device_uvector<float> score(n_progs, stream);
find_batched_fitness(h, n_progs, d_progs, score.data(), params, n_rows, data, y, sample_weights);
// Update scores on host and device
// TODO: Find a way to reduce the number of implicit memory transfers
for (auto i = 0; i < n_progs; ++i) {
RAFT_CUDA_TRY(cudaMemcpyAsync(&d_progs[i].raw_fitness_,
score.element_ptr(i),
sizeof(float),
cudaMemcpyDeviceToDevice,
stream));
h_progs[i].raw_fitness_ = score.element(i, stream);
}
}
float get_fitness(const program& prog, const param& params)
{
int crit = params.criterion();
float penalty = params.parsimony_coefficient * prog.len * (2 * crit - 1);
return (prog.raw_fitness_ - penalty);
}
/**
* @brief Get a random subtree of the current program nodes (on CPU)
*
* @param pnodes AST represented as a list of nodes
* @param len The total number of nodes in the AST
* @param rng Random number generator for subtree selection
* @return A tuple [first,last) which contains the required subtree
*/
std::pair<int, int> get_subtree(node* pnodes, int len, std::mt19937& rng)
{
int start, end;
start = end = 0;
// Specify RNG
std::uniform_real_distribution<float> dist_uniform(0.0f, 1.0f);
float bound = dist_uniform(rng);
// Specify subtree start probs acc to Koza's selection approach
std::vector<float> node_probs(len, 0.1);
float sum = 0.1 * len;
for (int i = 0; i < len; ++i) {
if (pnodes[i].is_nonterminal()) {
node_probs[i] = 0.9;
sum += 0.8;
}
}
// Normalize vector
for (int i = 0; i < len; ++i) {
node_probs[i] /= sum;
}
// Compute cumulative sum
std::partial_sum(node_probs.begin(), node_probs.end(), node_probs.begin());
start = std::lower_bound(node_probs.begin(), node_probs.end(), bound) - node_probs.begin();
end = start;
// Iterate until all function arguments are satisfied in current subtree
int num_args = 1;
while (num_args > end - start) {
node curr;
curr = pnodes[end];
if (curr.is_nonterminal()) num_args += curr.arity();
++end;
}
return std::make_pair(start, end);
}
int get_depth(const program& p_out)
{
int depth = 0;
std::stack<int> arity_stack;
for (auto i = 0; i < p_out.len; ++i) {
node curr(p_out.nodes[i]);
// Update depth
int sz = arity_stack.size();
depth = std::max(depth, sz);
// Update stack
if (curr.is_nonterminal()) {
arity_stack.push(curr.arity());
} else {
// Only triggered for a depth 0 node
if (arity_stack.empty()) break;
int e = arity_stack.top();
arity_stack.pop();
arity_stack.push(e - 1);
while (arity_stack.top() == 0) {
arity_stack.pop();
if (arity_stack.empty()) break;
e = arity_stack.top();
arity_stack.pop();
arity_stack.push(e - 1);
}
}
}
return depth;
}
void build_program(program& p_out, const param& params, std::mt19937& rng)
{
// Define data structures needed for tree
std::stack<int> arity_stack;
std::vector<node> nodelist;
nodelist.reserve(1 << (MAX_STACK_SIZE));
// Specify Distributions with parameters
std::uniform_int_distribution<int> dist_function(0, params.function_set.size() - 1);
std::uniform_int_distribution<int> dist_initDepth(params.init_depth[0], params.init_depth[1]);
std::uniform_int_distribution<int> dist_terminalChoice(0, params.num_features);
std::uniform_real_distribution<float> dist_constVal(params.const_range[0], params.const_range[1]);
std::bernoulli_distribution dist_nodeChoice(params.terminalRatio);
std::bernoulli_distribution dist_coinToss(0.5);
// Initialize nodes
int max_depth = dist_initDepth(rng);
node::type func = params.function_set[dist_function(rng)];
node curr_node(func);
nodelist.push_back(curr_node);
arity_stack.push(curr_node.arity());
init_method_t method = params.init_method;
if (method == init_method_t::half_and_half) {
// Choose either grow or full for this tree
bool choice = dist_coinToss(rng);
method = choice ? init_method_t::grow : init_method_t::full;
}
// Fill tree
while (!arity_stack.empty()) {
int depth = arity_stack.size();
p_out.depth = std::max(depth, p_out.depth);
bool node_choice = dist_nodeChoice(rng);
if ((node_choice == false || method == init_method_t::full) && depth < max_depth) {
// Add a function to node list
curr_node = node(params.function_set[dist_function(rng)]);
nodelist.push_back(curr_node);
arity_stack.push(curr_node.arity());
} else {
// Add terminal
int terminal_choice = dist_terminalChoice(rng);
if (terminal_choice == params.num_features) {
// Add constant
float val = dist_constVal(rng);
curr_node = node(val);
} else {
// Add variable
int fid = terminal_choice;
curr_node = node(fid);
}
// Modify nodelist
nodelist.push_back(curr_node);
// Modify stack
int e = arity_stack.top();
arity_stack.pop();
arity_stack.push(e - 1);
while (arity_stack.top() == 0) {
arity_stack.pop();
if (arity_stack.empty()) { break; }
e = arity_stack.top();
arity_stack.pop();
arity_stack.push(e - 1);
}
}
}
// Set new program parameters - need to do a copy as
// nodelist will be deleted using RAII semantics
p_out.nodes = new node[nodelist.size()];
std::copy(nodelist.begin(), nodelist.end(), p_out.nodes);
p_out.len = nodelist.size();
p_out.metric = params.metric;
p_out.raw_fitness_ = 0.0f;
}
void point_mutation(const program& prog, program& p_out, const param& params, std::mt19937& rng)
{
// deep-copy program
p_out = prog;
// Specify RNGs
std::uniform_real_distribution<float> dist_uniform(0.0f, 1.0f);
std::uniform_int_distribution<int> dist_terminalChoice(0, params.num_features);
std::uniform_real_distribution<float> dist_constantVal(params.const_range[0],
params.const_range[1]);
// Fill with uniform numbers
std::vector<float> node_probs(p_out.len);
std::generate(
node_probs.begin(), node_probs.end(), [&dist_uniform, &rng] { return dist_uniform(rng); });
// Mutate nodes
int len = p_out.len;
for (int i = 0; i < len; ++i) {
node curr(prog.nodes[i]);
if (node_probs[i] < params.p_point_replace) {
if (curr.is_terminal()) {
int choice = dist_terminalChoice(rng);
if (choice == params.num_features) {
// Add a randomly generated constant
curr = node(dist_constantVal(rng));
} else {
// Add a variable with fid=choice
curr = node(choice);
}
} else if (curr.is_nonterminal()) {
// Replace current function with another function of the same arity
int ar = curr.arity();
// CUML_LOG_DEBUG("Arity is %d, curr function is
// %d",ar,static_cast<std::underlying_type<node::type>::type>(curr.t));
std::vector<node::type> fset = params.arity_set.at(ar);
std::uniform_int_distribution<> dist_fset(0, fset.size() - 1);
int choice = dist_fset(rng);
curr = node(fset[choice]);
}
// Update p_out with updated value
p_out.nodes[i] = curr;
}
}
}
void crossover(
const program& prog, const program& donor, program& p_out, const param& params, std::mt19937& rng)
{
// Get a random subtree of prog to replace
std::pair<int, int> prog_slice = get_subtree(prog.nodes, prog.len, rng);
int prog_start = prog_slice.first;
int prog_end = prog_slice.second;
// Set metric of output program
p_out.metric = prog.metric;
// MAX_STACK_SIZE can only handle tree of depth MAX_STACK_SIZE - max(func_arity=2) + 1
// Thus we continuously hoist the donor subtree.
// Actual indices in donor
int donor_start = 0;
int donor_end = donor.len;
int output_depth = 0;
int iter = 0;
do {
++iter;
// Get donor subtree
std::pair<int, int> donor_slice =
get_subtree(donor.nodes + donor_start, donor_end - donor_start, rng);
// Get indices w.r.t current subspace [donor_start,donor_end)
int donor_substart = donor_slice.first;
int donor_subend = donor_slice.second;
// Update relative indices to global indices
donor_substart += donor_start;
donor_subend += donor_start;
// Update to new subspace
donor_start = donor_substart;
donor_end = donor_subend;
// Evolve on current subspace
p_out.len = (prog_start) + (donor_end - donor_start) + (prog.len - prog_end);
delete[] p_out.nodes;
p_out.nodes = new node[p_out.len];
// Copy slices using std::copy
std::copy(prog.nodes, prog.nodes + prog_start, p_out.nodes);
std::copy(donor.nodes + donor_start, donor.nodes + donor_end, p_out.nodes + prog_start);
std::copy(prog.nodes + prog_end,
prog.nodes + prog.len,
p_out.nodes + (prog_start) + (donor_end - donor_start));
output_depth = get_depth(p_out);
} while (output_depth >= MAX_STACK_SIZE);
// Set the depth of the final program
p_out.depth = output_depth;
}
void subtree_mutation(const program& prog, program& p_out, const param& params, std::mt19937& rng)
{
// Generate a random program and perform crossover
program new_program;
build_program(new_program, params, rng);
crossover(prog, new_program, p_out, params, rng);
}
void hoist_mutation(const program& prog, program& p_out, const param& params, std::mt19937& rng)
{
// Replace program subtree with a random sub-subtree
std::pair<int, int> prog_slice = get_subtree(prog.nodes, prog.len, rng);
int prog_start = prog_slice.first;
int prog_end = prog_slice.second;
std::pair<int, int> sub_slice = get_subtree(prog.nodes + prog_start, prog_end - prog_start, rng);
int sub_start = sub_slice.first;
int sub_end = sub_slice.second;
// Update subtree indices to global indices
sub_start += prog_start;
sub_end += prog_start;
p_out.len = (prog_start) + (sub_end - sub_start) + (prog.len - prog_end);
p_out.nodes = new node[p_out.len];
p_out.metric = prog.metric;
// Copy node slices using std::copy
std::copy(prog.nodes, prog.nodes + prog_start, p_out.nodes);
std::copy(prog.nodes + sub_start, prog.nodes + sub_end, p_out.nodes + prog_start);
std::copy(prog.nodes + prog_end,
prog.nodes + prog.len,
p_out.nodes + (prog_start) + (sub_end - sub_start));
// Update depth
p_out.depth = get_depth(p_out);
}
} // namespace genetic
} // namespace cuml | 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/genetic/constants.h | /*
* Copyright (c) 2021, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** @file constants.h Common GPU functionality + constants for all operations */
#pragma once
namespace cuml {
namespace genetic {
// Max number of threads per block to use with tournament and evaluation kernels
const int GENE_TPB = 256;
// Max size of stack used for AST evaluation
const int MAX_STACK_SIZE = 20;
} // namespace genetic
} // namespace cuml | 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/genetic/reg_stack.cuh | /*
* Copyright (c) 2020-2021, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <raft/util/cuda_utils.cuh>
#ifndef CUDA_PRAGMA_UNROLL
#ifdef __CUDA_ARCH__
#define CUDA_PRAGMA_UNROLL _Pragma("unroll")
#else
#define CUDA_PRAGMA_UNROLL
#endif // __CUDA_ARCH__
#endif // CUDA_PRAGMA_UNROLL
namespace cuml {
namespace genetic {
/**
* @brief A fixed capacity stack on device currently used for AST evaluation
*
* The idea is to use only the registers to store the elements of the stack,
* thereby achieving the best performance.
*
* @tparam DataT data type of the stack elements
* @tparam MaxSize max capacity of the stack
*/
template <typename DataT, int MaxSize>
struct stack {
explicit HDI stack() : elements_(0)
{
CUDA_PRAGMA_UNROLL
for (int i = 0; i < MaxSize; ++i) {
regs_[i] = DataT(0);
}
}
/** Checks if the stack is empty */
HDI bool empty() const { return elements_ == 0; }
/** Current number of elements in the stack */
HDI int size() const { return elements_; }
/** Checks if the number of elements in the stack equal its capacity */
HDI bool full() const { return elements_ == MaxSize; }
/**
* @brief Pushes the input element to the top of the stack
*
* @param[in] val input element to be pushed
*
* @note If called when the stack is already full, then it is a no-op! To keep
* the device-side logic simpler, it has been designed this way. Trying
* to push more than `MaxSize` elements leads to all sorts of incorrect
* behavior.
*/
HDI void push(DataT val)
{
CUDA_PRAGMA_UNROLL
for (int i = MaxSize - 1; i >= 0; --i) {
if (elements_ == i) {
++elements_;
regs_[i] = val;
}
}
}
/**
* @brief Lazily pops the top element from the stack
*
* @return pops the element and returns it, if already reached bottom, then it
* returns zero.
*
* @note If called when the stack is already empty, then it just returns a
* value of zero! To keep the device-side logic simpler, it has been
* designed this way. Trying to pop beyond the bottom of the stack leads
* to all sorts of incorrect behavior.
*/
HDI DataT pop()
{
CUDA_PRAGMA_UNROLL
for (int i = 0; i < MaxSize; ++i) {
if (elements_ == (i + 1)) {
elements_--;
return regs_[i];
}
}
return DataT(0);
}
private:
int elements_;
DataT regs_[MaxSize];
}; // struct stack
} // namespace genetic
} // namespace cuml
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/genetic/node.cuh | /*
* Copyright (c) 2020-2021, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <cuml/genetic/node.h>
#include <raft/util/cuda_utils.cuh>
namespace cuml {
namespace genetic {
namespace detail {
static constexpr float MIN_VAL = 0.001f;
HDI bool is_terminal(node::type t)
{
return t == node::type::variable || t == node::type::constant;
}
HDI bool is_nonterminal(node::type t) { return !is_terminal(t); }
HDI int arity(node::type t)
{
if (node::type::unary_begin <= t && t <= node::type::unary_end) { return 1; }
if (node::type::binary_begin <= t && t <= node::type::binary_end) { return 2; }
return 0;
}
// `data` assumed to be stored in col-major format
DI float evaluate_node(
const node& n, const float* data, const uint64_t stride, const uint64_t idx, const float* in)
{
if (n.t == node::type::constant) {
return n.u.val;
} else if (n.t == node::type::variable) {
return data[(stride * n.u.fid) + idx];
} else {
auto abs_inval = fabsf(in[0]), abs_inval1 = fabsf(in[1]);
// note: keep the case statements in alphabetical order under each category
// of operators.
switch (n.t) {
// binary operators
case node::type::add: return in[0] + in[1];
case node::type::atan2: return atan2f(in[0], in[1]);
case node::type::div: return abs_inval1 < MIN_VAL ? 1.0f : fdividef(in[0], in[1]);
case node::type::fdim: return fdimf(in[0], in[1]);
case node::type::max: return fmaxf(in[0], in[1]);
case node::type::min: return fminf(in[0], in[1]);
case node::type::mul: return in[0] * in[1];
case node::type::pow: return powf(in[0], in[1]);
case node::type::sub: return in[0] - in[1];
// unary operators
case node::type::abs: return abs_inval;
case node::type::acos: return acosf(in[0]);
case node::type::acosh: return acoshf(in[0]);
case node::type::asin: return asinf(in[0]);
case node::type::asinh: return asinhf(in[0]);
case node::type::atan: return atanf(in[0]);
case node::type::atanh: return atanhf(in[0]);
case node::type::cbrt: return cbrtf(in[0]);
case node::type::cos: return cosf(in[0]);
case node::type::cosh: return coshf(in[0]);
case node::type::cube: return in[0] * in[0] * in[0];
case node::type::exp: return expf(in[0]);
case node::type::inv: return abs_inval < MIN_VAL ? 0.f : 1.f / in[0];
case node::type::log: return abs_inval < MIN_VAL ? 0.f : logf(abs_inval);
case node::type::neg: return -in[0];
case node::type::rcbrt: return rcbrtf(in[0]);
case node::type::rsqrt: return rsqrtf(abs_inval);
case node::type::sin: return sinf(in[0]);
case node::type::sinh: return sinhf(in[0]);
case node::type::sq: return in[0] * in[0];
case node::type::sqrt: return sqrtf(abs_inval);
case node::type::tan: return tanf(in[0]);
case node::type::tanh: return tanhf(in[0]);
// shouldn't reach here!
default: return 0.f;
};
}
}
} // namespace detail
} // namespace genetic
} // namespace cuml
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/genetic/node.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "node.cuh"
#include <cuml/common/utils.hpp>
namespace cuml {
namespace genetic {
const int node::kInvalidFeatureId = -1;
node::node() {}
node::node(node::type ft) : t(ft)
{
ASSERT(is_nonterminal(), "node: ctor with `type` argument expects functions type only!");
u.fid = kInvalidFeatureId;
}
node::node(int fid) : t(node::type::variable) { u.fid = fid; }
node::node(float val) : t(node::type::constant) { u.val = val; }
node::node(const node& src) : t(src.t), u(src.u) {}
node& node::operator=(const node& src)
{
t = src.t;
u = src.u;
return *this;
}
bool node::is_terminal() const { return detail::is_terminal(t); }
bool node::is_nonterminal() const { return detail::is_nonterminal(t); }
int node::arity() const { return detail::arity(t); }
#define CASE(str, val) \
if (#val == str) return node::type::val
node::type node::from_str(const std::string& ntype)
{
CASE(ntype, variable);
CASE(ntype, constant);
// note: keep the case statements in alphabetical order under each category of
// operators.
// binary operators
CASE(ntype, add);
CASE(ntype, atan2);
CASE(ntype, div);
CASE(ntype, fdim);
CASE(ntype, max);
CASE(ntype, min);
CASE(ntype, mul);
CASE(ntype, pow);
CASE(ntype, sub);
// unary operators
CASE(ntype, abs);
CASE(ntype, acos);
CASE(ntype, asin);
CASE(ntype, atan);
CASE(ntype, acosh);
CASE(ntype, asinh);
CASE(ntype, atanh);
CASE(ntype, cbrt);
CASE(ntype, cos);
CASE(ntype, cosh);
CASE(ntype, cube);
CASE(ntype, exp);
CASE(ntype, inv);
CASE(ntype, log);
CASE(ntype, neg);
CASE(ntype, rcbrt);
CASE(ntype, rsqrt);
CASE(ntype, sq);
CASE(ntype, sqrt);
CASE(ntype, sin);
CASE(ntype, sinh);
CASE(ntype, tan);
CASE(ntype, tanh);
ASSERT(false, "node::from_str: Bad type passed '%s'!", ntype.c_str());
}
#undef CASE
} // namespace genetic
} // namespace cuml
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/genetic/genetic.cu | /*
* Copyright (c) 2020-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "constants.h"
#include "node.cuh"
#include <cuml/common/logger.hpp>
#include <cuml/genetic/common.h>
#include <cuml/genetic/genetic.h>
#include <cuml/genetic/program.h>
#include <raft/linalg/add.cuh>
#include <raft/linalg/unary_op.cuh>
#include <raft/random/rng.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <algorithm>
#include <numeric>
#include <random>
#include <stack>
#include <device_launch_parameters.h>
#include <rmm/device_uvector.hpp>
#include <rmm/mr/device/per_device_resource.hpp>
namespace cuml {
namespace genetic {
/**
* @brief Simultaneously execute tournaments for all programs.
* The fitness values being compared are adjusted for bloat (program length),
* using the given parsimony coefficient.
*
* @param progs Device pointer to programs
* @param win_indices Winning indices for every tournament
* @param seeds Init seeds for choice selection
* @param n_progs Number of programs
* @param n_tours No of tournaments to be conducted
* @param tour_size No of programs considered per tournament(@c <=n_progs><)
* @param criterion Selection criterion for choices(min/max)
* @param parsimony Parsimony coefficient to account for bloat
*/
__global__ void batched_tournament_kernel(const program_t progs,
int* win_indices,
const int* seeds,
const int n_progs,
const int n_tours,
const int tour_size,
const int criterion,
const float parsimony)
{
int idx = blockIdx.x * blockDim.x + threadIdx.x;
if (idx >= n_tours) return;
raft::random::detail::PhiloxGenerator rng(seeds[idx], idx, 0);
int r;
rng.next(r);
// Define optima values
int opt = r % n_progs;
float opt_penalty = parsimony * progs[opt].len * (2 * criterion - 1);
float opt_score = progs[opt].raw_fitness_ - opt_penalty;
for (int s = 1; s < tour_size; ++s) {
rng.next(r);
int curr = r % n_progs;
float curr_penalty = parsimony * progs[curr].len * (2 * criterion - 1);
float curr_score = progs[curr].raw_fitness_ - curr_penalty;
// Eliminate thread divergence - b takes values in {0,1}
// All threads have same criterion but mostly have different 'b'
int b = (opt_score < curr_score);
if (criterion) {
opt = (1 - b) * opt + b * curr;
opt_penalty = (1 - b) * opt_penalty + b * curr_penalty;
opt_score = (1 - b) * opt_score + b * curr_score;
} else {
opt = b * opt + (1 - b) * curr;
opt_penalty = b * opt_penalty + (1 - b) * curr_penalty;
opt_score = b * opt_score + (1 - b) * curr_score;
}
}
// Set win index
win_indices[idx] = opt;
}
/**
* @brief Driver function for evolving a generation of programs
*
* @param h cuML handle
* @param h_oldprogs previous generation host programs
* @param d_oldprogs previous generation device programs
* @param h_nextprogs next generation host programs
* @param d_nextprogs next generation device programs
* @param n_samples No of samples in input dataset
* @param data Device pointer to input dataset
* @param y Device pointer to input predictions
* @param sample_weights Device pointer to input weights
* @param params Training hyperparameters
* @param generation Current generation id
* @param seed Random seed for generators
*/
void parallel_evolve(const raft::handle_t& h,
const std::vector<program>& h_oldprogs,
const program_t& d_oldprogs,
std::vector<program>& h_nextprogs,
program_t& d_nextprogs,
const int n_samples,
const float* data,
const float* y,
const float* sample_weights,
const param& params,
const int generation,
const int seed)
{
cudaStream_t stream = h.get_stream();
auto n_progs = params.population_size;
auto tour_size = params.tournament_size;
auto n_tours = n_progs; // at least num_progs tournaments
// Seed engines
std::mt19937 h_gen(seed); // CPU rng
raft::random::Rng d_gen(seed); // GPU rng
std::uniform_real_distribution<float> dist_U(0.0f, 1.0f);
// Build, Mutate and Run Tournaments
if (generation == 1) {
// Build random programs for the first generation
for (auto i = 0; i < n_progs; ++i) {
build_program(h_nextprogs[i], params, h_gen);
}
} else {
// Set mutation type
float mut_probs[4];
mut_probs[0] = params.p_crossover;
mut_probs[1] = params.p_subtree_mutation;
mut_probs[2] = params.p_hoist_mutation;
mut_probs[3] = params.p_point_mutation;
std::partial_sum(mut_probs, mut_probs + 4, mut_probs);
for (auto i = 0; i < n_progs; ++i) {
float prob = dist_U(h_gen);
if (prob < mut_probs[0]) {
h_nextprogs[i].mut_type = mutation_t::crossover;
n_tours++;
} else if (prob < mut_probs[1]) {
h_nextprogs[i].mut_type = mutation_t::subtree;
} else if (prob < mut_probs[2]) {
h_nextprogs[i].mut_type = mutation_t::hoist;
} else if (prob < mut_probs[3]) {
h_nextprogs[i].mut_type = mutation_t::point;
} else {
h_nextprogs[i].mut_type = mutation_t::reproduce;
}
}
// Run tournaments
rmm::device_uvector<int> tour_seeds(n_tours, stream);
rmm::device_uvector<int> d_win_indices(n_tours, stream);
d_gen.uniformInt(tour_seeds.data(), n_tours, 1, INT_MAX, stream);
auto criterion = params.criterion();
dim3 nblks(raft::ceildiv(n_tours, GENE_TPB), 1, 1);
batched_tournament_kernel<<<nblks, GENE_TPB, 0, stream>>>(d_oldprogs,
d_win_indices.data(),
tour_seeds.data(),
n_progs,
n_tours,
tour_size,
criterion,
params.parsimony_coefficient);
RAFT_CUDA_TRY(cudaPeekAtLastError());
// Make sure tournaments have finished running before copying win indices
h.sync_stream(stream);
// Perform host mutations
auto donor_pos = n_progs;
for (auto pos = 0; pos < n_progs; ++pos) {
auto parent_index = d_win_indices.element(pos, stream);
if (h_nextprogs[pos].mut_type == mutation_t::crossover) {
// Get secondary index
auto donor_index = d_win_indices.element(donor_pos, stream);
donor_pos++;
crossover(
h_oldprogs[parent_index], h_oldprogs[donor_index], h_nextprogs[pos], params, h_gen);
} else if (h_nextprogs[pos].mut_type == mutation_t::subtree) {
subtree_mutation(h_oldprogs[parent_index], h_nextprogs[pos], params, h_gen);
} else if (h_nextprogs[pos].mut_type == mutation_t::hoist) {
hoist_mutation(h_oldprogs[parent_index], h_nextprogs[pos], params, h_gen);
} else if (h_nextprogs[pos].mut_type == mutation_t::point) {
point_mutation(h_oldprogs[parent_index], h_nextprogs[pos], params, h_gen);
} else if (h_nextprogs[pos].mut_type == mutation_t::reproduce) {
h_nextprogs[pos] = h_oldprogs[parent_index];
} else {
// Should not come here
}
}
}
/* Memcpy individual host nodes to device and destroy previous generation device nodes
TODO: Find a better way to do this. */
for (auto i = 0; i < n_progs; ++i) {
program tmp(h_nextprogs[i]);
delete[] tmp.nodes;
// Set current generation device nodes
tmp.nodes = (node*)rmm::mr::get_current_device_resource()->allocate(
h_nextprogs[i].len * sizeof(node), stream);
raft::copy(tmp.nodes, h_nextprogs[i].nodes, h_nextprogs[i].len, stream);
raft::copy(d_nextprogs + i, &tmp, 1, stream);
if (generation > 1) {
// Free device memory allocated to program nodes in previous generation
raft::copy(&tmp, d_oldprogs + i, 1, stream);
rmm::mr::get_current_device_resource()->deallocate(
tmp.nodes, h_nextprogs[i].len * sizeof(node), stream);
}
tmp.nodes = nullptr;
}
// Make sure all copying is done
h.sync_stream(stream);
// Update raw fitness for all programs
set_batched_fitness(
h, n_progs, d_nextprogs, h_nextprogs, params, n_samples, data, y, sample_weights);
}
float param::p_reproduce() const
{
auto sum =
this->p_crossover + this->p_subtree_mutation + this->p_hoist_mutation + this->p_point_mutation;
auto ret = 1.f - sum;
return fmaxf(0.f, fminf(ret, 1.f));
}
int param::max_programs() const
{
// in the worst case every generation's top program ends up reproducing,
// thereby adding another program into the population
return this->population_size + this->generations;
}
int param::criterion() const
{
// Returns 0 if a smaller value is preferred and 1 for the opposite
switch (this->metric) {
case metric_t::mse: return 0;
case metric_t::logloss: return 0;
case metric_t::mae: return 0;
case metric_t::rmse: return 0;
case metric_t::pearson: return 1;
case metric_t::spearman: return 1;
default: return -1;
}
}
std::string stringify(const program& prog)
{
std::string eqn = "( ";
std::string delim = "";
std::stack<int> ar_stack;
ar_stack.push(0);
for (int i = 0; i < prog.len; ++i) {
if (prog.nodes[i].is_terminal()) {
eqn += delim;
if (prog.nodes[i].t == node::type::variable) {
// variable
eqn += "X";
eqn += std::to_string(prog.nodes[i].u.fid);
} else {
// const
eqn += std::to_string(prog.nodes[i].u.val);
}
int end_elem = ar_stack.top();
ar_stack.pop();
ar_stack.push(end_elem - 1);
while (ar_stack.top() == 0) {
ar_stack.pop();
eqn += ") ";
if (ar_stack.empty()) { break; }
end_elem = ar_stack.top();
ar_stack.pop();
ar_stack.push(end_elem - 1);
}
delim = ", ";
} else {
ar_stack.push(prog.nodes[i].arity());
eqn += delim;
switch (prog.nodes[i].t) {
// binary operators
case node::type::add: eqn += "add("; break;
case node::type::atan2: eqn += "atan2("; break;
case node::type::div: eqn += "div("; break;
case node::type::fdim: eqn += "fdim("; break;
case node::type::max: eqn += "max("; break;
case node::type::min: eqn += "min("; break;
case node::type::mul: eqn += "mult("; break;
case node::type::pow: eqn += "pow("; break;
case node::type::sub: eqn += "sub("; break;
// unary operators
case node::type::abs: eqn += "abs("; break;
case node::type::acos: eqn += "acos("; break;
case node::type::acosh: eqn += "acosh("; break;
case node::type::asin: eqn += "asin("; break;
case node::type::asinh: eqn += "asinh("; break;
case node::type::atan: eqn += "atan("; break;
case node::type::atanh: eqn += "atanh("; break;
case node::type::cbrt: eqn += "cbrt("; break;
case node::type::cos: eqn += "cos("; break;
case node::type::cosh: eqn += "cosh("; break;
case node::type::cube: eqn += "cube("; break;
case node::type::exp: eqn += "exp("; break;
case node::type::inv: eqn += "inv("; break;
case node::type::log: eqn += "log("; break;
case node::type::neg: eqn += "neg("; break;
case node::type::rcbrt: eqn += "rcbrt("; break;
case node::type::rsqrt: eqn += "rsqrt("; break;
case node::type::sin: eqn += "sin("; break;
case node::type::sinh: eqn += "sinh("; break;
case node::type::sq: eqn += "sq("; break;
case node::type::sqrt: eqn += "sqrt("; break;
case node::type::tan: eqn += "tan("; break;
case node::type::tanh: eqn += "tanh("; break;
default: break;
}
eqn += " ";
delim = "";
}
}
eqn += ")";
return eqn;
}
void symFit(const raft::handle_t& handle,
const float* input,
const float* labels,
const float* sample_weights,
const int n_rows,
const int n_cols,
param& params,
program_t& final_progs,
std::vector<std::vector<program>>& history)
{
cudaStream_t stream = handle.get_stream();
// Update arity map in params - Need to do this only here, as all operations will call Fit at
// least once
for (auto f : params.function_set) {
int ar = 1;
if (node::type::binary_begin <= f && f <= node::type::binary_end) { ar = 2; }
if (params.arity_set.find(ar) == params.arity_set.end()) {
// Create map entry for current arity
std::vector<node::type> vec_f(1, f);
params.arity_set.insert(std::make_pair(ar, vec_f));
} else {
// Insert into map
std::vector<node::type> vec_f = params.arity_set.at(ar);
if (std::find(vec_f.begin(), vec_f.end(), f) == vec_f.end()) {
params.arity_set.at(ar).push_back(f);
}
}
}
// Check terminalRatio to dynamically set it
bool growAuto = (params.terminalRatio == 0.0f);
if (growAuto) {
params.terminalRatio =
1.0f * params.num_features / (params.num_features + params.function_set.size());
}
/* Initializations */
std::vector<program> h_currprogs(params.population_size);
std::vector<program> h_nextprogs(params.population_size);
std::vector<float> h_fitness(params.population_size, 0.0f);
program_t d_currprogs; // pointer to current programs
d_currprogs = (program_t)rmm::mr::get_current_device_resource()->allocate(
params.population_size * sizeof(program), stream);
program_t d_nextprogs = final_progs; // Reuse memory already allocated for final_progs
final_progs = nullptr;
std::mt19937_64 h_gen_engine(params.random_state);
std::uniform_int_distribution<int> seed_dist;
/* Begin training */
auto gen = 0;
params.num_epochs = 0;
while (gen < params.generations) {
// Generate an init seed
auto init_seed = seed_dist(h_gen_engine);
// Evolve current generation
parallel_evolve(handle,
h_currprogs,
d_currprogs,
h_nextprogs,
d_nextprogs,
n_rows,
input,
labels,
sample_weights,
params,
(gen + 1),
init_seed);
// Update epochs
++params.num_epochs;
// Update h_currprogs (deepcopy)
h_currprogs = h_nextprogs;
// Update evolution history, depending on the low memory flag
if (!params.low_memory || gen == 0) {
history.push_back(h_currprogs);
} else {
history.back() = h_currprogs;
}
// Swap d_currprogs(to preserve device memory)
program_t d_tmp = d_currprogs;
d_currprogs = d_nextprogs;
d_nextprogs = d_tmp;
// Update fitness array [host] and compute stopping criterion
auto crit = params.criterion();
h_fitness[0] = h_currprogs[0].raw_fitness_;
auto opt_fit = h_fitness[0];
for (auto i = 1; i < params.population_size; ++i) {
h_fitness[i] = h_currprogs[i].raw_fitness_;
if (crit == 0) {
opt_fit = std::min(opt_fit, h_fitness[i]);
} else {
opt_fit = std::max(opt_fit, h_fitness[i]);
}
}
// Check for stop criterion
if ((crit == 0 && opt_fit <= params.stopping_criteria) ||
(crit == 1 && opt_fit >= params.stopping_criteria)) {
CUML_LOG_DEBUG(
"Early stopping criterion reached in Generation #%d, fitness=%f", (gen + 1), opt_fit);
break;
}
// Update generation
++gen;
}
// Set final generation programs
final_progs = d_currprogs;
// Reset automatic growth parameter
if (growAuto) { params.terminalRatio = 0.0f; }
// Deallocate the previous generation device memory
rmm::mr::get_current_device_resource()->deallocate(
d_nextprogs, params.population_size * sizeof(program), stream);
d_currprogs = nullptr;
d_nextprogs = nullptr;
}
void symRegPredict(const raft::handle_t& handle,
const float* input,
const int n_rows,
const program_t& best_prog,
float* output)
{
// Assume best_prog is on device
execute(handle, best_prog, n_rows, 1, input, output);
}
void symClfPredictProbs(const raft::handle_t& handle,
const float* input,
const int n_rows,
const param& params,
const program_t& best_prog,
float* output)
{
cudaStream_t stream = handle.get_stream();
// Assume output is of shape [n_rows, 2] in colMajor format
execute(handle, best_prog, n_rows, 1, input, output);
// Apply 2 map operations to get probabilities!
// TODO: Modification needed for n_classes
if (params.transformer == transformer_t::sigmoid) {
raft::linalg::unaryOp(
output + n_rows,
output,
n_rows,
[] __device__(float in) { return 1.0f / (1.0f + expf(-in)); },
stream);
raft::linalg::unaryOp(
output, output + n_rows, n_rows, [] __device__(float in) { return 1.0f - in; }, stream);
} else {
// Only sigmoid supported for now
}
}
void symClfPredict(const raft::handle_t& handle,
const float* input,
const int n_rows,
const param& params,
const program_t& best_prog,
float* output)
{
cudaStream_t stream = handle.get_stream();
// Memory for probabilities
rmm::device_uvector<float> probs(2 * n_rows, stream);
symClfPredictProbs(handle, input, n_rows, params, best_prog, probs.data());
// Take argmax along columns
// TODO: Further modification needed for n_classes
raft::linalg::binaryOp(
output,
probs.data(),
probs.data() + n_rows,
n_rows,
[] __device__(float p0, float p1) { return 1.0f * (p0 <= p1); },
stream);
}
void symTransform(const raft::handle_t& handle,
const float* input,
const param& params,
const program_t& final_progs,
const int n_rows,
const int n_cols,
float* output)
{
cudaStream_t stream = handle.get_stream();
// Execute final_progs(ordered by fitness) on input
// output of size [n_rows,hall_of_fame]
execute(handle, final_progs, n_rows, params.n_components, input, output);
}
} // namespace genetic
} // namespace cuml
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/common/cumlHandle.hpp | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <cuml/cuml_api.h>
#include <raft/core/handle.hpp>
namespace ML {
/**
* Map from integral cumlHandle_t identifiers to cumlHandle pointer protected
* by a mutex for thread-safe access.
*/
class HandleMap {
public:
/**
* @brief Creates new handle object with associated handle ID and insert into map.
*
* @param[in] stream the stream to which cuML work should be ordered.
* @return std::pair with handle and error code. If error code is not CUML_SUCCESS
* the handle is INVALID_HANDLE.
*/
std::pair<cumlHandle_t, cumlError_t> createAndInsertHandle(cudaStream_t stream);
/**
* @brief Lookup pointer to handle object for handle ID in map.
*
* @return std::pair with handle and error code. If error code is not CUML_SUCCESS
* the handle is INVALID_HANDLE. Error code CUML_INAVLID_HANDLE
* is returned if the provided `handle` is invalid.
*/
std::pair<raft::handle_t*, cumlError_t> lookupHandlePointer(cumlHandle_t handle) const;
/**
* @brief Remove handle from map and destroy associated handle object.
*
* @return cumlError_t CUML_SUCCESS or CUML_INVALID_HANDLE.
* Error code CUML_INAVLID_HANDLE is returned if the provided
* `handle` is invalid.
*/
cumlError_t removeAndDestroyHandle(cumlHandle_t handle);
static const cumlHandle_t INVALID_HANDLE = -1; //!< sentinel value for invalid ID
private:
std::unordered_map<cumlHandle_t, raft::handle_t*> _handleMap; //!< map from ID to pointer
mutable std::mutex _mapMutex; //!< mutex protecting the map
cumlHandle_t _nextHandle; //!< value of next handle ID
};
/// Static handle map instance (see cumlHandle.cpp)
extern HandleMap handleMap;
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/common/cumlHandle.cpp | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cumlHandle.hpp"
#include <cuml/common/logger.hpp>
#include <raft/util/cudart_utils.hpp>
// #TODO: Replace with public header when ready
#include <raft/linalg/detail/cublas_wrappers.hpp>
// #TODO: Replace with public header when ready
#include <raft/linalg/detail/cusolver_wrappers.hpp>
namespace ML {
HandleMap handleMap;
std::pair<cumlHandle_t, cumlError_t> HandleMap::createAndInsertHandle(cudaStream_t stream)
{
cumlError_t status = CUML_SUCCESS;
cumlHandle_t chosen_handle;
try {
auto handle_ptr = new raft::handle_t{stream};
bool inserted;
{
std::lock_guard<std::mutex> guard(_mapMutex);
cumlHandle_t initial_next = _nextHandle;
do {
// try to insert using next free handle identifier
chosen_handle = _nextHandle;
inserted = _handleMap.insert({chosen_handle, handle_ptr}).second;
_nextHandle += 1;
} while (!inserted && _nextHandle != initial_next);
}
if (!inserted) {
// no free handle identifier available
chosen_handle = INVALID_HANDLE;
status = CUML_ERROR_UNKNOWN;
}
}
// TODO: Implement this
// catch (const MLCommon::Exception& e)
//{
// //log e.what()?
// status = e.getErrorCode();
//}
catch (...) {
status = CUML_ERROR_UNKNOWN;
chosen_handle = CUML_ERROR_UNKNOWN;
}
return std::pair<cumlHandle_t, cumlError_t>(chosen_handle, status);
}
std::pair<raft::handle_t*, cumlError_t> HandleMap::lookupHandlePointer(cumlHandle_t handle) const
{
std::lock_guard<std::mutex> guard(_mapMutex);
auto it = _handleMap.find(handle);
if (it == _handleMap.end()) {
return std::pair<raft::handle_t*, cumlError_t>(nullptr, CUML_INVALID_HANDLE);
} else {
return std::pair<raft::handle_t*, cumlError_t>(it->second, CUML_SUCCESS);
}
}
cumlError_t HandleMap::removeAndDestroyHandle(cumlHandle_t handle)
{
raft::handle_t* handle_ptr;
{
std::lock_guard<std::mutex> guard(_mapMutex);
auto it = _handleMap.find(handle);
if (it == _handleMap.end()) { return CUML_INVALID_HANDLE; }
handle_ptr = it->second;
_handleMap.erase(it);
}
cumlError_t status = CUML_SUCCESS;
try {
delete handle_ptr;
}
// TODO: Implement this
// catch (const MLCommon::Exception& e)
//{
// //log e.what()?
// status = e.getErrorCode();
//}
catch (...) {
status = CUML_ERROR_UNKNOWN;
}
return status;
}
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/common/logger.cpp | /*
* Copyright (c) 2020-2021, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define SPDLOG_HEADER_ONLY
#include <spdlog/sinks/stdout_color_sinks.h> // NOLINT
#include <spdlog/spdlog.h> // NOLINT
#include <algorithm>
#include <cuml/common/callbackSink.hpp>
#include <cuml/common/logger.hpp>
#include <memory>
namespace ML {
std::string format(const char* fmt, va_list& vl)
{
char buf[4096];
vsnprintf(buf, sizeof(buf), fmt, vl);
return std::string(buf);
}
std::string format(const char* fmt, ...)
{
va_list vl;
va_start(vl, fmt);
std::string str = format(fmt, vl);
va_end(vl);
return str;
}
int convert_level_to_spdlog(int level)
{
level = std::max(CUML_LEVEL_OFF, std::min(CUML_LEVEL_TRACE, level));
return CUML_LEVEL_TRACE - level;
}
const std::string Logger::DefaultPattern("[%L] [%H:%M:%S.%f] %v");
Logger& Logger::get()
{
static Logger logger;
return logger;
}
Logger::Logger()
: sink{std::make_shared<spdlog::sinks::callback_sink_mt>()},
logger{std::make_shared<spdlog::logger>("cuml", sink)},
currPattern()
{
setPattern(DefaultPattern);
setLevel(CUML_LEVEL_INFO);
}
void Logger::setLevel(int level)
{
level = convert_level_to_spdlog(level);
logger->set_level(static_cast<spdlog::level::level_enum>(level));
}
void Logger::setPattern(const std::string& pattern)
{
currPattern = pattern;
logger->set_pattern(pattern);
}
void Logger::setCallback(spdlog::sinks::LogCallback callback) { sink->set_callback(callback); }
void Logger::setFlush(void (*flush)()) { sink->set_flush(flush); }
bool Logger::shouldLogFor(int level) const
{
level = convert_level_to_spdlog(level);
auto level_e = static_cast<spdlog::level::level_enum>(level);
return logger->should_log(level_e);
}
int Logger::getLevel() const
{
auto level_e = logger->level();
return CUML_LEVEL_TRACE - static_cast<int>(level_e);
}
void Logger::log(int level, const char* fmt, ...)
{
level = convert_level_to_spdlog(level);
auto level_e = static_cast<spdlog::level::level_enum>(level);
// explicit check to make sure that we only expand messages when required
if (logger->should_log(level_e)) {
va_list vl;
va_start(vl, fmt);
auto msg = format(fmt, vl);
va_end(vl);
logger->log(level_e, msg);
}
}
void Logger::flush() { logger->flush(); }
PatternSetter::PatternSetter(const std::string& pattern) : prevPattern()
{
prevPattern = Logger::get().getPattern();
Logger::get().setPattern(pattern);
}
PatternSetter::~PatternSetter() { Logger::get().setPattern(prevPattern); }
} // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/common/cuml_api.cpp | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cumlHandle.hpp"
#include <cuml/common/utils.hpp>
#include <cuml/cuml_api.h>
#include <raft/util/cudart_utils.hpp>
#include <cstddef>
#include <functional>
extern "C" const char* cumlGetErrorString(cumlError_t error)
{
switch (error) {
case CUML_SUCCESS: return "success";
case CUML_ERROR_UNKNOWN:
// Intentional fall through
default: return "unknown";
}
}
extern "C" cumlError_t cumlCreate(cumlHandle_t* handle, cudaStream_t stream)
{
cumlError_t status;
std::tie(*handle, status) = ML::handleMap.createAndInsertHandle(stream);
return status;
}
extern "C" cumlError_t cumlGetStream(cumlHandle_t handle, cudaStream_t* stream)
{
cumlError_t status;
raft::handle_t* handle_ptr;
std::tie(handle_ptr, status) = ML::handleMap.lookupHandlePointer(handle);
if (status == CUML_SUCCESS) {
try {
*stream = handle_ptr->get_stream();
}
// TODO: Implement this
// catch (const MLCommon::Exception& e)
//{
// //log e.what()?
// status = e.getErrorCode();
//}
catch (...) {
status = CUML_ERROR_UNKNOWN;
}
}
return status;
}
extern "C" cumlError_t cumlDestroy(cumlHandle_t handle)
{
return ML::handleMap.removeAndDestroyHandle(handle);
}
| 0 |
rapidsai_public_repos/cuml/cpp/src | rapidsai_public_repos/cuml/cpp/src/common/nvtx.hpp | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <raft/core/nvtx.hpp>
namespace ML {
/**
* @brief Synchronize CUDA stream and push a named nvtx range
* @param name range name
* @param stream stream to synchronize
*/
[[deprecated("Use new raft::common::nvtx::push_range from <raft/core/nvtx.hpp>")]] inline void
PUSH_RANGE(const char* name, cudaStream_t stream)
{
raft::common::nvtx::push_range(name);
}
/**
* @brief Synchronize CUDA stream and pop the latest nvtx range
* @param stream stream to synchronize
*/
[[deprecated("Use new raft::common::nvtx::pop_range from <raft/core/nvtx.hpp>")]] inline void
POP_RANGE(cudaStream_t stream)
{
raft::common::nvtx::pop_range();
}
/**
* @brief Push a named nvtx range
* @param name range name
*/
[[deprecated("Use new raft::common::nvtx::push_range from <raft/core/nvtx.hpp>")]] inline void
PUSH_RANGE(const char* name)
{
raft::common::nvtx::push_range(name);
}
/** Pop the latest range */
[[deprecated("Use new raft::common::nvtx::pop_range from <raft/core/nvtx.hpp>")]] inline void
POP_RANGE()
{
raft::common::nvtx::pop_range();
}
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp | rapidsai_public_repos/cuml/cpp/test/CMakeLists.txt | #=============================================================================
# Copyright (c) 2018-2023, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#=============================================================================
enable_testing()
# We use rapids-cmake testing infrastructure to allow us to run multiple
# GPU tests concurrently without causing OOM issues.
# Use the `GPUS` and `PERCENT` options to control how 'much' of the GPUs
# you need for a test:
#
# GPUS 1 PERCENT 25 -> I need 25% of a single GPU
# GPUS 1 PERCENT 100 -> all of 1 GPU
# GPUS 2 PERCENT 200 -> all of 2 GPUs (will only run this test on 2 GPU machines)
include(rapids-test)
rapids_test_init()
function(ConfigureTest)
set(options CUMLPRIMS MPI ML_INCLUDE RAFT_DISTRIBUTED)
set(one_value PREFIX NAME GPUS PERCENT)
set(multi_value TARGETS CONFIGURATIONS)
cmake_parse_arguments(_CUML_TEST "${options}" "${one_value}" "${multi_value}" ${ARGN})
if(NOT DEFINED _CUML_TEST_GPUS AND NOT DEFINED _CUML_TEST_PERCENT)
set(_CUML_TEST_GPUS 1)
set(_CUML_TEST_PERCENT 15)
endif()
if(NOT DEFINED _CUML_TEST_GPUS)
set(_CUML_TEST_GPUS 1)
endif()
if(NOT DEFINED _CUML_TEST_PERCENT)
set(_CUML_TEST_PERCENT 100)
endif()
string(PREPEND _CUML_TEST_NAME "${_CUML_TEST_PREFIX}_")
add_executable(${_CUML_TEST_NAME} ${_CUML_TEST_UNPARSED_ARGUMENTS})
target_link_libraries(${_CUML_TEST_NAME}
PRIVATE
${CUML_CPP_TARGET}
$<$<BOOL:BUILD_CUML_C_LIBRARY>:${CUML_C_TARGET}>
CUDA::cublas${_ctk_static_suffix}
CUDA::curand${_ctk_static_suffix}
CUDA::cusolver${_ctk_static_suffix}
CUDA::cudart${_ctk_static_suffix}
CUDA::cusparse${_ctk_static_suffix}
$<$<BOOL:${LINK_CUFFT}>:CUDA::cufft${_ctk_static_suffix_cufft}>
rmm::rmm
raft::raft
$<$<BOOL:${CUML_RAFT_COMPILED}>:raft::compiled>
GTest::gtest
GTest::gtest_main
GTest::gmock
${OpenMP_CXX_LIB_NAMES}
Threads::Threads
$<$<BOOL:${_CUML_TEST_CUMLPRIMS}>:cumlprims_mg::cumlprims_mg>
$<$<BOOL:${_CUML_TEST_MPI}>:${MPI_CXX_LIBRARIES}>
$<$<BOOL:${_CUML_TEST_RAFT_DISTRIBUTED}>:raft::distributed>
${TREELITE_LIBS}
$<TARGET_NAME_IF_EXISTS:conda_env>
)
target_compile_options(${_CUML_TEST_NAME}
PRIVATE "$<$<COMPILE_LANGUAGE:CXX>:${CUML_CXX_FLAGS}>"
"$<$<COMPILE_LANGUAGE:CUDA>:${CUML_CUDA_FLAGS}>"
)
target_include_directories(${_CUML_TEST_NAME}
PRIVATE
$<$<BOOL:${_CUML_TEST_ML_INCLUDE}>:$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../include>>
$<$<BOOL:${_CUML_TEST_ML_INCLUDE}>:$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../src>>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../src_prims>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/prims>
)
set_target_properties(
${_CUML_TEST_NAME}
PROPERTIES INSTALL_RPATH "\$ORIGIN/../../../lib"
)
set(_CUML_TEST_COMPONENT_NAME testing)
if(_CUML_TEST_PREFIX STREQUAL "PRIMS")
set(_CUML_TEST_COMPONENT_NAME cumlprims_testing)
endif()
rapids_test_add(
NAME ${_CUML_TEST_NAME}
COMMAND ${_CUML_TEST_NAME}
GPUS ${_CUML_TEST_GPUS}
PERCENT ${_CUML_TEST_PERCENT}
INSTALL_COMPONENT_SET ${_CUML_TEST_COMPONENT_NAME}
)
endfunction()
##############################################################################
# - build ml_test executable -------------------------------------------------
if(all_algo)
ConfigureTest(PREFIX SG NAME LOGGER_TEST sg/logger.cpp ML_INCLUDE)
endif()
if(all_algo OR dbscan_algo)
ConfigureTest(PREFIX SG NAME DBSCAN_TEST sg/dbscan_test.cu ML_INCLUDE)
endif()
if(all_algo OR explainer_algo)
ConfigureTest(PREFIX SG NAME SHAP_KERNEL_TEST sg/shap_kernel.cu ML_INCLUDE)
endif()
if(all_algo OR fil_algo)
ConfigureTest(PREFIX SG NAME FIL_CHILD_INDEX_TEST sg/fil_child_index_test.cu ML_INCLUDE)
ConfigureTest(PREFIX SG NAME FIL_TEST sg/fil_test.cu ML_INCLUDE)
ConfigureTest(PREFIX SG NAME FNV_HASH_TEST sg/fnv_hash_test.cpp ML_INCLUDE)
ConfigureTest(PREFIX SG NAME MULTI_SUM_TEST sg/multi_sum_test.cu ML_INCLUDE)
ConfigureTest(PREFIX SG NAME HOST_BUFFER_TEST sg/experimental/fil/raft_proto/buffer.cpp ML_INCLUDE)
ConfigureTest(PREFIX SG NAME DEVICE_BUFFER_TEST sg/experimental/fil/raft_proto/buffer.cu ML_INCLUDE)
endif()
# todo: organize linear models better
if(all_algo OR linearregression_algo OR ridge_algo OR lasso_algo OR logisticregression_algo)
ConfigureTest(PREFIX SG NAME OLS_TEST sg/ols.cu ML_INCLUDE)
ConfigureTest(PREFIX SG NAME RIDGE_TEST sg/ridge.cu ML_INCLUDE)
endif()
if(all_algo OR genetic_algo)
ConfigureTest(PREFIX SG NAME GENETIC_NODE_TEST sg/genetic/node_test.cpp ML_INCLUDE)
ConfigureTest(PREFIX SG NAME GENETIC_PARAM_TEST sg/genetic/param_test.cu ML_INCLUDE)
endif()
if("${CMAKE_CUDA_COMPILER_VERSION}" VERSION_GREATER_EQUAL "11.2")
# An HDBSCAN gtest is failing w/ CUDA 11.2 for some reason.
if(all_algo OR hdbscan_algo)
ConfigureTest(PREFIX SG NAME HDBSCAN_TEST sg/hdbscan_test.cu ML_INCLUDE)
endif()
endif()
if(all_algo OR holtwinters_algo)
ConfigureTest(PREFIX SG NAME HOLTWINTERS_TEST sg/holtwinters_test.cu ML_INCLUDE)
endif()
if(all_algo OR knn_algo)
ConfigureTest(PREFIX SG NAME KNN_TEST sg/knn_test.cu ML_INCLUDE)
endif()
if(all_algo OR hierarchicalclustering_algo)
ConfigureTest(PREFIX SG NAME LINKAGE_TEST sg/linkage_test.cu ML_INCLUDE)
endif()
if(all_algo OR metrics_algo)
ConfigureTest(PREFIX SG NAME TRUSTWORTHINESS_TEST sg/trustworthiness_test.cu ML_INCLUDE)
endif()
if(all_algo OR pca_algo)
ConfigureTest(PREFIX SG NAME PCA_TEST sg/pca_test.cu ML_INCLUDE)
endif()
if(all_algo OR randomforest_algo)
ConfigureTest(PREFIX SG NAME RF_TEST sg/rf_test.cu ML_INCLUDE)
endif()
if(all_algo OR randomprojection_algo)
ConfigureTest(PREFIX SG NAME RPROJ_TEST sg/rproj_test.cu ML_INCLUDE)
endif()
# todo: separate solvers better
if(all_algo OR solvers_algo)
ConfigureTest(PREFIX SG NAME CD_TEST sg/cd_test.cu ML_INCLUDE)
ConfigureTest(PREFIX SG NAME LARS_TEST sg/lars_test.cu ML_INCLUDE)
ConfigureTest(PREFIX SG NAME QUASI_NEWTON sg/quasi_newton.cu ML_INCLUDE)
ConfigureTest(PREFIX SG NAME SGD_TEST sg/sgd.cu ML_INCLUDE)
endif()
if(all_algo OR svm_algo)
ConfigureTest(PREFIX SG NAME SVC_TEST sg/svc_test.cu ML_INCLUDE)
# The SVC Test tries to verify it has no memory leaks by checking
# how much free memory on the GPU exists after execution. This
# check requires no other GPU tests to be running or it fails
# since it thinks it has a memory leak
set_tests_properties(SG_SVC_TEST PROPERTIES RUN_SERIAL ON)
endif()
if(all_algo OR tsne_algo)
ConfigureTest(PREFIX SG NAME TSNE_TEST sg/tsne_test.cu ML_INCLUDE)
endif()
if(all_algo OR tsvd_algo)
ConfigureTest(PREFIX SG NAME TSVD_TEST sg/tsvd_test.cu ML_INCLUDE)
endif()
if(all_algo OR umap_algo)
ConfigureTest(PREFIX SG NAME UMAP_PARAMETRIZABLE_TEST sg/umap_parametrizable_test.cu ML_INCLUDE)
endif()
if(BUILD_CUML_C_LIBRARY)
ConfigureTest(PREFIX SG NAME HANDLE_TEST sg/handle_test.cu ML_INCLUDE)
endif()
#############################################################################
# - build test_ml_mg executable ----------------------------------------------
if(BUILD_CUML_MG_TESTS)
# This test needs to be rewritten to use the MPI comms, not the std comms, and moved
# to RAFT: https://github.com/rapidsai/cuml/issues/5058
#ConfigureTest(PREFIX MG NAME KMEANS_TEST mg/kmeans_test.cu NCCL CUMLPRIMS ML_INCLUDE)
if(MPI_CXX_FOUND)
# (please keep the filenames in alphabetical order)
ConfigureTest(PREFIX MG NAME KNN_TEST mg/knn.cu CUMLPRIMS MPI RAFT_DISTRIBUTED ML_INCLUDE)
ConfigureTest(PREFIX MG NAME KNN_CLASSIFY_TEST mg/knn_classify.cu CUMLPRIMS MPI RAFT_DISTRIBUTED ML_INCLUDE)
ConfigureTest(PREFIX MG NAME KNN_REGRESS_TEST mg/knn_regress.cu CUMLPRIMS MPI RAFT_DISTRIBUTED ML_INCLUDE)
ConfigureTest(PREFIX MG NAME MAIN_TEST mg/main.cu CUMLPRIMS MPI RAFT_DISTRIBUTED ML_INCLUDE)
ConfigureTest(PREFIX MG NAME PCA_TEST mg/pca.cu CUMLPRIMS MPI RAFT_DISTRIBUTED ML_INCLUDE)
else(MPI_CXX_FOUND)
message("OpenMPI not found. Skipping MultiGPU tests '${CUML_MG_TEST_TARGET}'")
endif()
endif()
##############################################################################
# - build prims_test executable ----------------------------------------------
if(BUILD_PRIMS_TESTS)
# (please keep the filenames in alphabetical order)
ConfigureTest(PREFIX PRIMS NAME ADD_SUB_DEV_SCALAR_TEST prims/add_sub_dev_scalar.cu)
ConfigureTest(PREFIX PRIMS NAME BATCHED_CSR_TEST prims/batched/csr.cu)
ConfigureTest(PREFIX PRIMS NAME BATCHED_GEMV_TEST prims/batched/gemv.cu)
ConfigureTest(PREFIX PRIMS NAME BATCHED_MAKE_SYMM_TEST prims/batched/make_symm.cu)
ConfigureTest(PREFIX PRIMS NAME BATCHED_MATRIX_TEST prims/batched/matrix.cu)
ConfigureTest(PREFIX PRIMS NAME DECOUPLED_LOOKBACK_TEST prims/decoupled_lookback.cu)
ConfigureTest(PREFIX PRIMS NAME DEVICE_UTILS_TEST prims/device_utils.cu)
ConfigureTest(PREFIX PRIMS NAME ELTWISE2D_TEST prims/eltwise2d.cu)
ConfigureTest(PREFIX PRIMS NAME FAST_INT_DIV_TEST prims/fast_int_div.cu)
ConfigureTest(PREFIX PRIMS NAME FILLNA_TEST prims/fillna.cu)
ConfigureTest(PREFIX PRIMS NAME GRID_SYNC_TEST prims/grid_sync.cu)
ConfigureTest(PREFIX PRIMS NAME HINGE_TEST prims/hinge.cu)
ConfigureTest(PREFIX PRIMS NAME JONES_TRANSFORM_TEST prims/jones_transform.cu)
ConfigureTest(PREFIX PRIMS NAME KNN_CLASSIFY_TEST prims/knn_classify.cu)
ConfigureTest(PREFIX PRIMS NAME KNN_REGRESSION_TEST prims/knn_regression.cu)
ConfigureTest(PREFIX PRIMS NAME KSELECTION_TEST prims/kselection.cu)
ConfigureTest(PREFIX PRIMS NAME LINALG_BLOCK_TEST prims/linalg_block.cu)
ConfigureTest(PREFIX PRIMS NAME LINEARREG_TEST prims/linearReg.cu)
ConfigureTest(PREFIX PRIMS NAME LOG_TEST prims/log.cu)
ConfigureTest(PREFIX PRIMS NAME LOGISTICREG_TEST prims/logisticReg.cu)
ConfigureTest(PREFIX PRIMS NAME MAKE_ARIMA_TEST prims/make_arima.cu)
ConfigureTest(PREFIX PRIMS NAME PENALTY_TEST prims/penalty.cu)
ConfigureTest(PREFIX PRIMS NAME SIGMOID_TEST prims/sigmoid.cu)
rapids_test_install_relocatable(INSTALL_COMPONENT_SET cumlprims_testing DESTINATION bin/gtests/libcuml_prims)
endif()
rapids_test_install_relocatable(INSTALL_COMPONENT_SET testing DESTINATION bin/gtests/libcuml)
##############################################################################
# - build C-API test library -------------------------------------------------
if(BUILD_CUML_C_LIBRARY)
enable_language(C)
add_library(${CUML_C_TEST_TARGET} SHARED
c_api/dbscan_api_test.c
c_api/glm_api_test.c
c_api/holtwinters_api_test.c
c_api/knn_api_test.c
c_api/svm_api_test.c
)
target_link_libraries(${CUML_C_TEST_TARGET} PUBLIC ${CUML_C_TARGET})
endif()
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/knn_classify.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <gtest/gtest.h>
#include <iostream>
#include <raft/label/classlabels.cuh>
#include <raft/random/make_blobs.cuh>
#include <raft/spatial/knn/knn.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <selection/knn.cuh>
#include <vector>
namespace MLCommon {
namespace Selection {
struct KNNClassifyInputs {
int rows;
int cols;
int n_labels;
float cluster_std;
int k;
};
class KNNClassifyTest : public ::testing::TestWithParam<KNNClassifyInputs> {
public:
KNNClassifyTest()
: params(::testing::TestWithParam<KNNClassifyInputs>::GetParam()),
stream(handle.get_stream()),
train_samples(params.rows * params.cols, stream),
train_labels(params.rows, stream),
pred_labels(params.rows, stream),
knn_indices(params.rows * params.k, stream),
knn_dists(params.rows * params.k, stream)
{
basicTest();
}
protected:
void basicTest()
{
raft::random::make_blobs<float, int>(train_samples.data(),
train_labels.data(),
params.rows,
params.cols,
params.n_labels,
stream,
true,
nullptr,
nullptr,
params.cluster_std);
rmm::device_uvector<int> unique_labels(0, stream);
auto n_classes =
raft::label::getUniquelabels(unique_labels, train_labels.data(), params.rows, stream);
std::vector<float*> ptrs(1);
std::vector<int> sizes(1);
ptrs[0] = train_samples.data();
sizes[0] = params.rows;
raft::spatial::knn::brute_force_knn(handle,
ptrs,
sizes,
params.cols,
train_samples.data(),
params.rows,
knn_indices.data(),
knn_dists.data(),
params.k);
std::vector<int*> y;
y.push_back(train_labels.data());
std::vector<int*> uniq_labels;
uniq_labels.push_back(unique_labels.data());
std::vector<int> n_unique;
n_unique.push_back(n_classes);
knn_classify(handle,
pred_labels.data(),
knn_indices.data(),
y,
params.rows,
params.rows,
params.k,
uniq_labels,
n_unique);
handle.sync_stream(stream);
}
protected:
KNNClassifyInputs params;
raft::handle_t handle;
cudaStream_t stream;
rmm::device_uvector<float> train_samples;
rmm::device_uvector<int> train_labels;
rmm::device_uvector<int> pred_labels;
rmm::device_uvector<int64_t> knn_indices;
rmm::device_uvector<float> knn_dists;
};
typedef KNNClassifyTest KNNClassifyTestF;
TEST_P(KNNClassifyTestF, Fit)
{
ASSERT_TRUE(
devArrMatch(train_labels.data(), pred_labels.data(), params.rows, MLCommon::Compare<int>()));
}
const std::vector<KNNClassifyInputs> inputsf = {{100, 10, 2, 0.01f, 2},
{1000, 10, 5, 0.01f, 2},
{10000, 10, 5, 0.01f, 2},
{100, 10, 2, 0.01f, 10},
{1000, 10, 5, 0.01f, 10},
{10000, 10, 5, 0.01f, 10},
{100, 10, 2, 0.01f, 50},
{1000, 10, 5, 0.01f, 50},
{10000, 10, 5, 0.01f, 50}};
INSTANTIATE_TEST_CASE_P(KNNClassifyTest, KNNClassifyTestF, ::testing::ValuesIn(inputsf));
}; // end namespace Selection
}; // namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/jones_transform.cu | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION. *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <algorithm>
#include <gtest/gtest.h>
#include <iostream>
#include <raft/core/handle.hpp>
#include <raft/util/cudart_utils.hpp>
#include <random>
#include <rmm/device_uvector.hpp>
#include <timeSeries/jones_transform.cuh>
namespace MLCommon {
namespace TimeSeries {
// parameter structure definition
struct JonesTransParam {
int batchSize;
int pValue;
double tolerance;
};
// test fixture class
template
<typename DataT>
class JonesTransTest : public ::testing::TestWithParam<JonesTransParam> {
public:
JonesTransTest()
: params(::testing::TestWithParam<JonesTransParam>::GetParam()),
stream(handle.get_stream()),
nElements(params.batchSize * params.pValue),
d_golden_ar_trans(0, stream),
d_computed_ar_trans(0, stream),
d_params(0, stream),
d_golden_ma_trans(0, stream),
d_computed_ma_trans(0, stream),
d_computed_ar_invtrans(0, stream),
d_computed_ma_invtrans(0, stream)
{
}
protected:
// the constructor
void SetUp() override
{
// generating random value test input that is stored in row major
std::vector<double> arr1(nElements, 0);
std::random_device rd;
std::default_random_engine dre(rd());
std::uniform_real_distribution<double> realGenerator(0, 1);
std::generate(arr1.begin(), arr1.end(), [&]() { return realGenerator(dre); });
//>>>>>>>>> AR transform golden output generation<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
double* newParams = (double*)malloc(nElements * sizeof(double*));
double* tmp = (double*)malloc(params.pValue * sizeof(double*));
// for every model in the batch
for (int i = 0; i < params.batchSize; ++i) {
// storing the partial autocorrelation of each ar coefficient of a given batch in newParams
// and the same in another temporary copy
for (int j = 0; j < params.pValue; ++j) {
newParams[i * params.pValue + j] = ((1 - exp(-1 * arr1[i * params.pValue + j])) /
(1 + exp(-1 * arr1[i * params.pValue + j])));
tmp[j] = newParams[i * params.pValue + j];
}
// calculating according to jone's recursive formula: phi(j,k) = phi(j-1,k) -
// a(j)*phi(j-1,j-k)
for (int j = 1; j < params.pValue; ++j) {
// a is partial autocorrelation for jth coefficient
DataT a = newParams[i * params.pValue + j];
/*the recursive implementation of the transformation with:
- lhs tmp[k] => phi(j,k)
- rhs tmp[k] => phi(j-1,k)
- a => a(j)
- newParam[i*params.pValue + j-k-1] => phi(j-1, j-k)
*/
for (int k = 0; k < j; ++k) {
tmp[k] -= a * newParams[i * params.pValue + (j - k - 1)];
}
// copying it back for the next iteration
for (int iter = 0; iter < j; ++iter) {
newParams[i * params.pValue + iter] = tmp[iter];
}
}
}
// allocating and initializing device memory
d_golden_ar_trans.resize(nElements, stream);
d_computed_ar_trans.resize(nElements, stream);
d_params.resize(nElements, stream);
RAFT_CUDA_TRY(cudaMemsetAsync(
d_golden_ar_trans.data(), 0, d_golden_ar_trans.size() * sizeof(DataT), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(
d_computed_ar_trans.data(), 0, d_computed_ar_trans.size() * sizeof(DataT), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(d_params.data(), 0, d_params.size() * sizeof(DataT), stream));
raft::update_device(d_params.data(), &arr1[0], (size_t)nElements, stream);
raft::update_device(d_golden_ar_trans.data(), newParams, (size_t)nElements, stream);
// calling the ar_trans_param CUDA implementation
MLCommon::TimeSeries::jones_transform(d_params.data(),
params.batchSize,
params.pValue,
d_computed_ar_trans.data(),
true,
false,
stream,
false);
//>>>>>>>>> MA transform golden output generation<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
// for every model in the batch
for (int i = 0; i < params.batchSize; ++i) {
// storing the partial autocorrelation of each ma coefficient of a given batch in newParams
// and the same in another temporary copy
for (int j = 0; j < params.pValue; ++j) {
newParams[i * params.pValue + j] = ((1 - exp(-1 * arr1[i * params.pValue + j])) /
(1 + exp(-1 * arr1[i * params.pValue + j])));
tmp[j] = newParams[i * params.pValue + j];
}
// calculating according to jone's recursive formula: phi(j,k) = phi(j-1,k) -
// a(j)*phi(j-1,j-k)
for (int j = 1; j < params.pValue; ++j) {
// a is partial autocorrelation for jth coefficient
DataT a = newParams[i * params.pValue + j];
/*the recursive implementation of the transformation with:
- lhs tmp[k] => phi(j,k)
- rhs tmp[k] => phi(j-1,k)
- a => a(j)
- newParam[i*params.pValue + j-k-1] => phi(j-1, j-k)
*/
for (int k = 0; k < j; ++k) {
tmp[k] += a * newParams[i * params.pValue + (j - k - 1)];
}
// copying it back for the next iteration
for (int iter = 0; iter < j; ++iter) {
newParams[i * params.pValue + iter] = tmp[iter];
}
}
}
d_golden_ma_trans.resize(nElements, stream);
d_computed_ma_trans.resize(nElements, stream);
RAFT_CUDA_TRY(cudaMemsetAsync(
d_golden_ma_trans.data(), 0, d_golden_ma_trans.size() * sizeof(DataT), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(
d_computed_ma_trans.data(), 0, d_computed_ma_trans.size() * sizeof(DataT), stream));
raft::update_device(d_golden_ma_trans.data(), newParams, (size_t)nElements, stream);
// calling the ma_param_transform CUDA implementation
MLCommon::TimeSeries::jones_transform(d_params.data(),
params.batchSize,
params.pValue,
d_computed_ma_trans.data(),
false,
false,
stream,
false);
//>>>>>>>>> AR inverse transform <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
d_computed_ar_invtrans.resize(nElements, stream);
RAFT_CUDA_TRY(cudaMemsetAsync(
d_computed_ar_invtrans.data(), 0, d_computed_ar_invtrans.size() * sizeof(DataT), stream));
// calling the ar_param_inverse_transform CUDA implementation
MLCommon::TimeSeries::jones_transform(d_computed_ar_trans.data(),
params.batchSize,
params.pValue,
d_computed_ar_invtrans.data(),
true,
true,
stream);
//>>>>>>>>> MA inverse transform <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
d_computed_ma_invtrans.resize(nElements, stream);
RAFT_CUDA_TRY(cudaMemsetAsync(
d_computed_ma_invtrans.data(), 0, d_computed_ma_invtrans.size() * sizeof(DataT), stream));
// calling the ma_param_inverse_transform CUDA implementation
MLCommon::TimeSeries::jones_transform(d_computed_ma_trans.data(),
params.batchSize,
params.pValue,
d_computed_ma_invtrans.data(),
false,
true,
stream);
}
raft::handle_t handle;
cudaStream_t stream = 0;
// declaring the data values
JonesTransParam params;
rmm::device_uvector<DataT> d_golden_ar_trans, d_golden_ma_trans, d_computed_ar_trans,
d_computed_ma_trans, d_computed_ar_invtrans, d_computed_ma_invtrans, d_params;
int nElements = -1;
};
// setting test parameter values
const std::vector<JonesTransParam> inputs = {{500, 4, 0.001},
{500, 3, 0.001},
{500, 2, 0.001},
{500, 1, 0.001},
{5000, 4, 0.001},
{5000, 3, 0.001},
{5000, 2, 0.001},
{5000, 1, 0.001},
{4, 4, 0.001},
{4, 3, 0.001},
{4, 2, 0.001},
{4, 1, 0.001},
{500000, 4, 0.0001},
{500000, 3, 0.0001},
{500000, 2, 0.0001},
{500000, 1, 0.0001}};
// writing the test suite
typedef JonesTransTest<double> JonesTransTestClass;
TEST_P(JonesTransTestClass, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(d_golden_ar_trans.data(),
d_computed_ar_trans.data(),
nElements,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(d_golden_ma_trans.data(),
d_computed_ma_trans.data(),
nElements,
MLCommon::CompareApprox<double>(params.tolerance)));
/*
Test verifying the inversion property:
initially generated random coefficients -> ar_param_transform() / ma_param_transform() ->
transformed coefficients -> ar_param_inverse_transform()/ma_param_inverse_transform() ->
initially generated random coefficients
*/
ASSERT_TRUE(MLCommon::devArrMatch(d_computed_ma_invtrans.data(),
d_params.data(),
nElements,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(d_computed_ar_invtrans.data(),
d_params.data(),
nElements,
MLCommon::CompareApprox<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(JonesTrans, JonesTransTestClass, ::testing::ValuesIn(inputs));
} // end namespace TimeSeries
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/dist_adj.cu | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <distance/distance.cuh>
#include <gtest/gtest.h>
#include <raft/random/rng.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
namespace MLCommon {
namespace Distance {
template <typename DataType>
__global__ void naiveDistanceAdjKernel(bool* dist,
const DataType* x,
const DataType* y,
int m,
int n,
int k,
DataType eps,
bool isRowMajor)
{
int midx = threadIdx.x + blockIdx.x * blockDim.x;
int nidx = threadIdx.y + blockIdx.y * blockDim.y;
if (midx >= m || nidx >= n) return;
DataType acc = DataType(0);
for (int i = 0; i < k; ++i) {
int xidx = isRowMajor ? i + midx * k : i * m + midx;
int yidx = isRowMajor ? i + nidx * k : i * n + nidx;
auto diff = x[xidx] - y[yidx];
acc += diff * diff;
}
int outidx = isRowMajor ? midx * n + nidx : midx + m * nidx;
dist[outidx] = acc <= eps;
}
template <typename DataType>
void naiveDistanceAdj(bool* dist,
const DataType* x,
const DataType* y,
int m,
int n,
int k,
DataType eps,
bool isRowMajor)
{
static const dim3 TPB(16, 32, 1);
dim3 nblks(raft::ceildiv(m, (int)TPB.x), raft::ceildiv(n, (int)TPB.y), 1);
naiveDistanceAdjKernel<DataType> < <<nblks, TPB>>(dist, x, y, m, n, k, eps, isRowMajor);
RAFT_CUDA_TRY(cudaPeekAtLastError());
}
template <typename DataType>
struct DistanceAdjInputs {
DataType eps;
int m, n, k;
bool isRowMajor;
unsigned long long int seed;
};
template <typename DataType>
::std::ostream& operator<<(::std::ostream& os, const DistanceAdjInputs<DataType>& dims)
{
return os;
}
template <typename DataType>
class DistanceAdjTest : public ::testing::TestWithParam<DistanceAdjInputs<DataType>> {
public:
DistanceAdjTest() : x(0, stream), y(0, stream), dist_ref(0, stream), dist(0, stream) {}
void SetUp() override
{
params = ::testing::TestWithParam < DistanceAdjInputs<DataType>::GetParam();
raft::random::Rng r(params.seed);
auto m = params.m;
auto n = params.n;
auto k = params.k;
bool isRowMajor = params.isRowMajor;
cudaStream_t stream = 0;
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
x = rmm::device_scalar<DataType>(m * k, stream);
y = rmm::device_scalar<DataType>(n * k, stream);
dist_ref = rmm::device_scalar<bool>(m * n, stream);
dist = rmm::device_scalar<bool>(m * n, stream);
r.uniform(x.data(), m * k, DataType(-1.0), DataType(1.0), stream);
r.uniform(y.data(), n * k, DataType(-1.0), DataType(1.0), stream);
DataType threshold = params.eps;
naiveDistanceAdj(dist_ref.data(), x.data(), y.data(), m, n, k, threshold, isRowMajor);
size_t worksize =
getWorkspaceSize<raft::distance::DistanceType::L2Expanded, DataType, DataType, bool>(
x, y, m, n, k);
rmm::device_uvector<char> workspace(worksize, stream);
auto fin_op = [threshold] __device__(DataType d_val, int g_d_idx) {
return d_val <= threshold;
};
distance<raft::distance::DistanceType::L2Expanded, DataType, DataType, bool>(x.data(),
y.data(),
dist.data(),
m,
n,
k,
workspace.data(),
worksize,
fin_op,
stream,
isRowMajor);
RAFT_CUDA_TRY(cudaStreamDestroy(stream));
}
protected:
DistanceAdjInputs<DataType> params;
rmm::device_scalar<DataType> x, y;
rmm::device_scalar<bool> dist_ref, dist;
};
const std::vector<DistanceAdjInputs<float>> inputsf = {
{0.01f, 1024, 1024, 32, true, 1234ULL},
{0.1f, 1024, 1024, 32, true, 1234ULL},
{1.0f, 1024, 1024, 32, true, 1234ULL},
{10.0f, 1024, 1024, 32, true, 1234ULL},
{0.01f, 1024, 1024, 32, false, 1234ULL},
{0.1f, 1024, 1024, 32, false, 1234ULL},
{1.0f, 1024, 1024, 32, false, 1234ULL},
{10.0f, 1024, 1024, 32, false, 1234ULL},
};
typedef DistanceAdjTest<float> DistanceAdjTestF;
TEST_P(DistanceAdjTestF, Result)
{
int m = params.isRowMajor ? params.m : params.n;
int n = params.isRowMajor ? params.n : params.m;
ASSERT_TRUE(devArrMatch(dist_ref.data(), dist.data(), m, n, MLCommon::Compare<bool>()));
}
INSTANTIATE_TEST_CASE_P(DistanceAdjTests, DistanceAdjTestF, ::testing::ValuesIn(inputsf));
const std::vector<DistanceAdjInputs<double>> inputsd = {
{0.01, 1024, 1024, 32, true, 1234ULL},
{0.1, 1024, 1024, 32, true, 1234ULL},
{1.0, 1024, 1024, 32, true, 1234ULL},
{10.0, 1024, 1024, 32, true, 1234ULL},
{0.01, 1024, 1024, 32, false, 1234ULL},
{0.1, 1024, 1024, 32, false, 1234ULL},
{1.0, 1024, 1024, 32, false, 1234ULL},
{10.0, 1024, 1024, 32, false, 1234ULL},
};
typedef DistanceAdjTest<double> DistanceAdjTestD;
TEST_P(DistanceAdjTestD, Result)
{
int m = params.isRowMajor ? params.m : params.n;
int n = params.isRowMajor ? params.n : params.m;
ASSERT_TRUE(devArrMatch(dist_ref.data(), dist.data(), m, n, MLCommon::Compare<bool>()));
}
INSTANTIATE_TEST_CASE_P(DistanceAdjTests, DistanceAdjTestD, ::testing::ValuesIn(inputsd));
} // namespace Distance
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/log.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <functions/log.cuh>
#include <gtest/gtest.h>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
namespace MLCommon {
namespace Functions {
template <typename T>
struct LogInputs {
T tolerance;
int len;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const LogInputs<T>& dims)
{
return os;
}
template <typename T>
class LogTest : public ::testing::TestWithParam<LogInputs<T>> {
protected:
LogTest() : result(0, stream), result_ref(0, stream) {}
void SetUp() override
{
params = ::testing::TestWithParam<LogInputs<T>>::GetParam();
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
int len = params.len;
rmm::device_uvector<T> data(len, stream);
T data_h[params.len] = {2.1, 4.5, 0.34, 10.0};
raft::update_device(data.data(), data_h, len, stream);
result.resize(len, stream);
result_ref.resize(len, stream);
T result_ref_h[params.len] = {0.74193734, 1.5040774, -1.07880966, 2.30258509};
raft::update_device(result_ref.data(), result_ref_h, len, stream);
f_log(result.data(), data.data(), T(1), len, stream);
RAFT_CUDA_TRY(cudaStreamDestroy(stream));
}
protected:
cudaStream_t stream = 0;
LogInputs<T> params;
rmm::device_uvector<T> result;
rmm::device_uvector<T> result_ref;
};
const std::vector<LogInputs<float>> inputsf2 = {{0.001f, 4}};
const std::vector<LogInputs<double>> inputsd2 = {{0.001, 4}};
typedef LogTest<float> LogTestValF;
TEST_P(LogTestValF, Result)
{
ASSERT_TRUE(devArrMatch(result_ref.data(),
result.data(),
params.len,
MLCommon::CompareApproxAbs<float>(params.tolerance)));
}
typedef LogTest<double> LogTestValD;
TEST_P(LogTestValD, Result)
{
ASSERT_TRUE(devArrMatch(result_ref.data(),
result.data(),
params.len,
MLCommon::CompareApproxAbs<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(LogTests, LogTestValF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(LogTests, LogTestValD, ::testing::ValuesIn(inputsd2));
} // end namespace Functions
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/logisticReg.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <functions/logisticReg.cuh>
#include <gtest/gtest.h>
#include <raft/random/rng.cuh>
#include <raft/util/cudart_utils.hpp>
namespace MLCommon {
namespace Functions {
template <typename T>
struct LogRegLossInputs {
T tolerance;
T n_rows;
T n_cols;
int len;
};
template <typename T>
class LogRegLossTest : public ::testing::TestWithParam<LogRegLossInputs<T>> {
public:
LogRegLossTest()
: params(::testing::TestWithParam<LogRegLossInputs<T>>::GetParam()),
stream(handle.get_stream()),
in(params.len, stream),
out(1, stream),
out_lasso(1, stream),
out_ridge(1, stream),
out_elasticnet(1, stream),
out_grad(params.n_cols, stream),
out_lasso_grad(params.n_cols, stream),
out_ridge_grad(params.n_cols, stream),
out_elasticnet_grad(params.n_cols, stream),
out_ref(1, stream),
out_lasso_ref(1, stream),
out_ridge_ref(1, stream),
out_elasticnet_ref(1, stream),
out_grad_ref(params.n_cols, stream),
out_lasso_grad_ref(params.n_cols, stream),
out_ridge_grad_ref(params.n_cols, stream),
out_elasticnet_grad_ref(params.n_cols, stream)
{
}
protected:
void SetUp() override
{
int len = params.len;
int n_rows = params.n_rows;
int n_cols = params.n_cols;
rmm::device_uvector<T> labels(params.n_rows, stream);
rmm::device_uvector<T> coef(params.n_cols, stream);
T h_in[len] = {0.1, 0.35, -0.9, -1.4, 2.0, 3.1};
raft::update_device(in.data(), h_in, len, stream);
T h_labels[n_rows] = {0.3, 2.0, -1.1};
raft::update_device(labels.data(), h_labels, n_rows, stream);
T h_coef[n_cols] = {0.35, -0.24};
raft::update_device(coef.data(), h_coef, n_cols, stream);
T h_out_ref[1] = {0.38752545};
raft::update_device(out_ref.data(), h_out_ref, 1, stream);
T h_out_lasso_ref[1] = {0.74152};
raft::update_device(out_lasso_ref.data(), h_out_lasso_ref, 1, stream);
T h_out_ridge_ref[1] = {0.4955854};
raft::update_device(out_ridge_ref.data(), h_out_ridge_ref, 1, stream);
T h_out_elasticnet_ref[1] = {0.618555};
raft::update_device(out_elasticnet_ref.data(), h_out_elasticnet_ref, 1, stream);
T h_out_grad_ref[n_cols] = {-0.58284, 0.207666};
raft::update_device(out_grad_ref.data(), h_out_grad_ref, n_cols, stream);
T h_out_lasso_grad_ref[n_cols] = {0.0171, -0.39233};
raft::update_device(out_lasso_grad_ref.data(), h_out_lasso_grad_ref, n_cols, stream);
T h_out_ridge_grad_ref[n_cols] = {-0.16284, -0.080333};
raft::update_device(out_ridge_grad_ref.data(), h_out_ridge_grad_ref, n_cols, stream);
T h_out_elasticnet_grad_ref[n_cols] = {-0.07284, -0.23633};
raft::update_device(out_elasticnet_grad_ref.data(), h_out_elasticnet_grad_ref, n_cols, stream);
T alpha = 0.6;
T l1_ratio = 0.5;
logisticRegLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out.data(),
penalty::NONE,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
logisticRegLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_grad.data(),
penalty::NONE,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
logisticRegLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_lasso.data(),
penalty::L1,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
logisticRegLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_lasso_grad.data(),
penalty::L1,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
logisticRegLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_ridge.data(),
penalty::L2,
alpha,
l1_ratio,
stream);
logisticRegLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_ridge_grad.data(),
penalty::L2,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
logisticRegLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_elasticnet.data(),
penalty::ELASTICNET,
alpha,
l1_ratio,
stream);
logisticRegLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_elasticnet_grad.data(),
penalty::ELASTICNET,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
}
protected:
LogRegLossInputs<T> params;
raft::handle_t handle;
cudaStream_t stream = 0;
rmm::device_uvector<T> in, out, out_lasso, out_ridge, out_elasticnet;
rmm::device_uvector<T> out_ref, out_lasso_ref, out_ridge_ref, out_elasticnet_ref;
rmm::device_uvector<T> out_grad, out_lasso_grad, out_ridge_grad, out_elasticnet_grad;
rmm::device_uvector<T> out_grad_ref, out_lasso_grad_ref, out_ridge_grad_ref,
out_elasticnet_grad_ref;
};
const std::vector<LogRegLossInputs<float>> inputsf = {{0.01f, 3, 2, 6}};
const std::vector<LogRegLossInputs<double>> inputsd = {{0.01, 3, 2, 6}};
typedef LogRegLossTest<float> LogRegLossTestF;
TEST_P(LogRegLossTestF, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(
out_ref.data(), out.data(), 1, MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(
out_lasso_ref.data(), out_lasso.data(), 1, MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(
out_ridge_ref.data(), out_ridge.data(), 1, MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_elasticnet_ref.data(),
out_elasticnet.data(),
1,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_grad_ref.data(),
out_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_lasso_grad_ref.data(),
out_lasso_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_ridge_grad_ref.data(),
out_ridge_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_elasticnet_grad_ref.data(),
out_elasticnet_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
}
typedef LogRegLossTest<double> LogRegLossTestD;
TEST_P(LogRegLossTestD, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(
out_ref.data(), out.data(), 1, MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(
out_lasso_ref.data(), out_lasso.data(), 1, MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(
out_ridge_ref.data(), out_ridge.data(), 1, MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_elasticnet_ref.data(),
out_elasticnet.data(),
1,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_grad_ref.data(),
out_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_lasso_grad_ref.data(),
out_lasso_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_ridge_grad_ref.data(),
out_ridge_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_elasticnet_grad_ref.data(),
out_elasticnet_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(LogRegLossTests, LogRegLossTestF, ::testing::ValuesIn(inputsf));
INSTANTIATE_TEST_CASE_P(LogRegLossTests, LogRegLossTestD, ::testing::ValuesIn(inputsd));
} // end namespace Functions
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/fillna.cu | /*
* Copyright (c) 2021-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <gtest/gtest.h>
#include <random>
#include <vector>
#include <raft/core/handle.hpp>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include "test_utils.h"
#include <timeSeries/fillna.cuh>
namespace MLCommon {
namespace TimeSeries {
using namespace std;
struct SeriesDescriptor {
int leading_nan;
int random_nan;
int trailing_nan;
};
template <typename T>
struct FillnaInputs {
int batch_size;
int n_obs;
std::vector<SeriesDescriptor> descriptors;
unsigned long long int seed;
T tolerance;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const FillnaInputs<T>& dims)
{
return os;
}
template <typename T>
class FillnaTest : public ::testing::TestWithParam<FillnaInputs<T>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<FillnaInputs<T>>::GetParam();
rmm::device_uvector<T> y(params.n_obs * params.batch_size, handle.get_stream());
std::vector<T> h_y(params.n_obs * params.batch_size);
/* Generate random data */
std::default_random_engine generator(params.seed);
std::uniform_real_distribution<T> real_distribution(-2.0, 2.0);
std::uniform_int_distribution<int> int_distribution(0, params.n_obs - 1);
for (int i = 0; i < params.n_obs * params.batch_size; i++)
h_y[i] = real_distribution(generator);
for (int bid = 0; bid < params.batch_size; bid++) {
for (int i = 0; i < params.descriptors[bid].leading_nan; i++)
h_y[bid * params.n_obs + i] = nan("");
for (int i = 0; i < params.descriptors[bid].trailing_nan; i++)
h_y[(bid + 1) * params.n_obs - 1 - i] = nan("");
for (int i = 0; i < params.descriptors[bid].random_nan; i++) {
h_y[bid * params.n_obs + int_distribution(generator)] = nan("");
}
}
/* Copy to device */
raft::update_device(
y.data(), h_y.data(), params.n_obs * params.batch_size, handle.get_stream());
handle.sync_stream(handle.get_stream());
/* Compute using tested prims */
fillna(y.data(), params.batch_size, params.n_obs, handle.get_stream());
/* Compute reference results.
* Note: this is done with a sliding window: we find ranges of missing
* values bordered by valid values at indices `start` and `end`.
* Special cases on extremities are also handled with the special values
* -1 for `start` and `n_obs` for `end`.
*/
for (int bid = 0; bid < params.batch_size; bid++) {
int start = -1;
int end = 0;
while (start < params.n_obs - 1) {
if (!std::isnan(h_y[bid * params.n_obs + start + 1])) {
start++;
end = start + 1;
} else if (end < params.n_obs && std::isnan(h_y[bid * params.n_obs + end])) {
end++;
} else {
if (start == -1) {
T value = h_y[bid * params.n_obs + end];
for (int j = 0; j < end; j++) {
h_y[bid * params.n_obs + j] = value;
}
} else if (end == params.n_obs) {
T value = h_y[bid * params.n_obs + start];
for (int j = start + 1; j < params.n_obs; j++) {
h_y[bid * params.n_obs + j] = value;
}
} else {
T value0 = h_y[bid * params.n_obs + start];
T value1 = h_y[bid * params.n_obs + end];
for (int j = start + 1; j < end; j++) {
T coef = (T)(j - start) / (T)(end - start);
h_y[bid * params.n_obs + j] = ((T)1 - coef) * value0 + coef * value1;
}
}
start = end;
end++;
}
}
}
/* Check results */
match = devArrMatchHost(h_y.data(),
y.data(),
params.n_obs * params.batch_size,
MLCommon::CompareApprox<T>(params.tolerance),
handle.get_stream());
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
FillnaInputs<T> params;
testing::AssertionResult match = testing::AssertionFailure();
};
const std::vector<FillnaInputs<float>> inputsf = {
{1, 20, {{1, 5, 1}}, 12345U, 1e-6},
{3, 42, {{10, 0, 0}, {0, 10, 0}, {0, 0, 10}}, 12345U, 1e-6},
{4, 100, {{70, 0, 0}, {0, 20, 0}, {0, 0, 63}, {31, 25, 33}, {20, 15, 42}}, 12345U, 1e-6},
};
const std::vector<FillnaInputs<double>> inputsd = {
{1, 20, {{1, 5, 1}}, 12345U, 1e-6},
{3, 42, {{10, 0, 0}, {0, 10, 0}, {0, 0, 10}}, 12345U, 1e-6},
{4, 100, {{70, 0, 0}, {0, 20, 0}, {0, 0, 63}, {31, 25, 33}, {20, 15, 42}}, 12345U, 1e-6},
};
typedef FillnaTest<float> FillnaTestF;
TEST_P(FillnaTestF, Result) { EXPECT_TRUE(match); }
typedef FillnaTest<double> FillnaTestD;
TEST_P(FillnaTestD, Result) { EXPECT_TRUE(match); }
INSTANTIATE_TEST_CASE_P(FillnaTests, FillnaTestF, ::testing::ValuesIn(inputsf));
INSTANTIATE_TEST_CASE_P(FillnaTests, FillnaTestD, ::testing::ValuesIn(inputsd));
} // namespace TimeSeries
} // namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/linearReg.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <functions/linearReg.cuh>
#include <gtest/gtest.h>
#include <raft/random/rng.cuh>
#include <raft/util/cudart_utils.hpp>
namespace MLCommon {
namespace Functions {
template <typename T>
struct LinRegLossInputs {
T tolerance;
T n_rows;
T n_cols;
int len;
};
template <typename T>
class LinRegLossTest : public ::testing::TestWithParam<LinRegLossInputs<T>> {
public:
LinRegLossTest()
: params(::testing::TestWithParam<LinRegLossInputs<T>>::GetParam()),
stream(handle.get_stream()),
in(params.len, stream),
out(1, stream),
out_lasso(1, stream),
out_ridge(1, stream),
out_elasticnet(1, stream),
out_grad(params.n_cols, stream),
out_lasso_grad(params.n_cols, stream),
out_ridge_grad(params.n_cols, stream),
out_elasticnet_grad(params.n_cols, stream),
out_ref(1, stream),
out_lasso_ref(1, stream),
out_ridge_ref(1, stream),
out_elasticnet_ref(1, stream),
out_grad_ref(params.n_cols, stream),
out_lasso_grad_ref(params.n_cols, stream),
out_ridge_grad_ref(params.n_cols, stream),
out_elasticnet_grad_ref(params.n_cols, stream)
{
}
protected:
void SetUp() override
{
int len = params.len;
int n_rows = params.n_rows;
int n_cols = params.n_cols;
rmm::device_uvector<T> labels(params.n_rows, stream);
rmm::device_uvector<T> coef(params.n_cols, stream);
T h_in[len] = {0.1, 0.35, -0.9, -1.4, 2.0, 3.1};
raft::update_device(in.data(), h_in, len, stream);
T h_labels[n_rows] = {0.3, 2.0, -1.1};
raft::update_device(labels.data(), h_labels, n_rows, stream);
T h_coef[n_cols] = {0.35, -0.24};
raft::update_device(coef.data(), h_coef, n_cols, stream);
T h_out_ref[1] = {1.854842};
raft::update_device(out_ref.data(), h_out_ref, 1, stream);
T h_out_lasso_ref[1] = {2.2088};
raft::update_device(out_lasso_ref.data(), h_out_lasso_ref, 1, stream);
T h_out_ridge_ref[1] = {1.9629};
raft::update_device(out_ridge_ref.data(), h_out_ridge_ref, 1, stream);
T h_out_elasticnet_ref[1] = {2.0858};
raft::update_device(out_elasticnet_ref.data(), h_out_elasticnet_ref, 1, stream);
T h_out_grad_ref[n_cols] = {-0.56995, -3.12486};
raft::update_device(out_grad_ref.data(), h_out_grad_ref, n_cols, stream);
T h_out_lasso_grad_ref[n_cols] = {0.03005, -3.724866};
raft::update_device(out_lasso_grad_ref.data(), h_out_lasso_grad_ref, n_cols, stream);
T h_out_ridge_grad_ref[n_cols] = {-0.14995, -3.412866};
raft::update_device(out_ridge_grad_ref.data(), h_out_ridge_grad_ref, n_cols, stream);
T h_out_elasticnet_grad_ref[n_cols] = {-0.05995, -3.568866};
raft::update_device(out_elasticnet_grad_ref.data(), h_out_elasticnet_grad_ref, n_cols, stream);
T alpha = 0.6;
T l1_ratio = 0.5;
linearRegLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out.data(),
penalty::NONE,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
linearRegLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_grad.data(),
penalty::NONE,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
linearRegLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_lasso.data(),
penalty::L1,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
linearRegLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_lasso_grad.data(),
penalty::L1,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
linearRegLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_ridge.data(),
penalty::L2,
alpha,
l1_ratio,
stream);
linearRegLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_ridge_grad.data(),
penalty::L2,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
linearRegLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_elasticnet.data(),
penalty::ELASTICNET,
alpha,
l1_ratio,
stream);
linearRegLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_elasticnet_grad.data(),
penalty::ELASTICNET,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
}
protected:
LinRegLossInputs<T> params;
raft::handle_t handle;
cudaStream_t stream;
rmm::device_uvector<T> in, out, out_lasso, out_ridge, out_elasticnet;
rmm::device_uvector<T> out_ref, out_lasso_ref, out_ridge_ref, out_elasticnet_ref;
rmm::device_uvector<T> out_grad, out_lasso_grad, out_ridge_grad, out_elasticnet_grad;
rmm::device_uvector<T> out_grad_ref, out_lasso_grad_ref, out_ridge_grad_ref,
out_elasticnet_grad_ref;
};
const std::vector<LinRegLossInputs<float>> inputsf = {{0.01f, 3, 2, 6}};
const std::vector<LinRegLossInputs<double>> inputsd = {{0.01, 3, 2, 6}};
typedef LinRegLossTest<float> LinRegLossTestF;
TEST_P(LinRegLossTestF, Result)
{
ASSERT_TRUE(
devArrMatch(out_ref.data(), out.data(), 1, MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(
out_lasso_ref.data(), out_lasso.data(), 1, MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(
out_ridge_ref.data(), out_ridge.data(), 1, MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_elasticnet_ref.data(),
out_elasticnet.data(),
1,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_grad_ref.data(),
out_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_lasso_grad_ref.data(),
out_lasso_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_ridge_grad_ref.data(),
out_ridge_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_elasticnet_grad_ref.data(),
out_elasticnet_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
}
typedef LinRegLossTest<double> LinRegLossTestD;
TEST_P(LinRegLossTestD, Result)
{
ASSERT_TRUE(
devArrMatch(out_ref.data(), out.data(), 1, MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(
out_lasso_ref.data(), out_lasso.data(), 1, MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(
out_ridge_ref.data(), out_ridge.data(), 1, MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_elasticnet_ref.data(),
out_elasticnet.data(),
1,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_grad_ref.data(),
out_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_lasso_grad_ref.data(),
out_lasso_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_ridge_grad_ref.data(),
out_ridge_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_elasticnet_grad_ref.data(),
out_elasticnet_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(LinRegLossTests, LinRegLossTestF, ::testing::ValuesIn(inputsf));
INSTANTIATE_TEST_CASE_P(LinRegLossTests, LinRegLossTestD, ::testing::ValuesIn(inputsd));
} // end namespace Functions
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/distance_base.cuh | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <distance/distance.cuh>
#include <gtest/gtest.h>
#include <raft/core/resource/cuda_stream.hpp>
#include <raft/core/resources.hpp>
#include <raft/random/rng.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
namespace MLCommon {
namespace Distance {
template <typename DataType>
__global__ void naiveDistanceKernel(DataType* dist,
const DataType* x,
const DataType* y,
int m,
int n,
int k,
raft::distance::DistanceType type,
bool isRowMajor)
{
int midx = threadIdx.x + blockIdx.x * blockDim.x;
int nidx = threadIdx.y + blockIdx.y * blockDim.y;
if (midx >= m || nidx >= n) return;
DataType acc = DataType(0);
for (int i = 0; i < k; ++i) {
int xidx = isRowMajor ? i + midx * k : i * m + midx;
int yidx = isRowMajor ? i + nidx * k : i * n + nidx;
auto diff = x[xidx] - y[yidx];
acc += diff * diff;
}
if (type == raft::distance::DistanceType::L2SqrtExpanded ||
type == raft::distance::DistanceType::L2SqrtUnexpanded)
acc = raft::mySqrt(acc);
int outidx = isRowMajor ? midx * n + nidx : midx + m * nidx;
dist[outidx] = acc;
}
template <typename DataType>
__global__ void naiveL1DistanceKernel(
DataType* dist, const DataType* x, const DataType* y, int m, int n, int k, bool isRowMajor)
{
int midx = threadIdx.x + blockIdx.x * blockDim.x;
int nidx = threadIdx.y + blockIdx.y * blockDim.y;
if (midx >= m || nidx >= n) { return; }
DataType acc = DataType(0);
for (int i = 0; i < k; ++i) {
int xidx = isRowMajor ? i + midx * k : i * m + midx;
int yidx = isRowMajor ? i + nidx * k : i * n + nidx;
auto a = x[xidx];
auto b = y[yidx];
auto diff = (a > b) ? (a - b) : (b - a);
acc += diff;
}
int outidx = isRowMajor ? midx * n + nidx : midx + m * nidx;
dist[outidx] = acc;
}
template <typename DataType>
__global__ void naiveCosineDistanceKernel(
DataType* dist, const DataType* x, const DataType* y, int m, int n, int k, bool isRowMajor)
{
int midx = threadIdx.x + blockIdx.x * blockDim.x;
int nidx = threadIdx.y + blockIdx.y * blockDim.y;
if (midx >= m || nidx >= n) { return; }
DataType acc_a = DataType(0);
DataType acc_b = DataType(0);
DataType acc_ab = DataType(0);
for (int i = 0; i < k; ++i) {
int xidx = isRowMajor ? i + midx * k : i * m + midx;
int yidx = isRowMajor ? i + nidx * k : i * n + nidx;
auto a = x[xidx];
auto b = y[yidx];
acc_a += a * a;
acc_b += b * b;
acc_ab += a * b;
}
int outidx = isRowMajor ? midx * n + nidx : midx + m * nidx;
// Use 1.0 - (cosine similarity) to calc the distance
dist[outidx] = (DataType)1.0 - acc_ab / (raft::mySqrt(acc_a) * raft::mySqrt(acc_b));
}
template <typename DataType>
void naiveDistance(DataType* dist,
const DataType* x,
const DataType* y,
int m,
int n,
int k,
raft::distance::DistanceType type,
bool isRowMajor)
{
static const dim3 TPB(16, 32, 1);
dim3 nblks(raft::ceildiv(m, (int)TPB.x), raft::ceildiv(n, (int)TPB.y), 1);
switch (type) {
case raft::distance::DistanceType::L1:
naiveL1DistanceKernel<DataType> < <<nblks, TPB>>(dist, x, y, m, n, k, isRowMajor);
break;
case raft::distance::DistanceType::L2SqrtUnexpanded:
case raft::distance::DistanceType::L2Unexpanded:
case raft::distance::DistanceType::L2SqrtExpanded:
case raft::distance::DistanceType::L2Expanded:
naiveDistanceKernel<DataType> < <<nblks, TPB>>(dist, x, y, m, n, k, type, isRowMajor);
break;
case raft::distance::DistanceType::CosineExpanded:
naiveCosineDistanceKernel<DataType> < <<nblks, TPB>>(dist, x, y, m, n, k, isRowMajor);
break;
default: FAIL() << "should be here\n";
}
RAFT_CUDA_TRY(cudaPeekAtLastError());
}
template <typename DataType>
struct DistanceInputs {
DataType tolerance;
int m, n, k;
bool isRowMajor;
unsigned long long int seed;
};
template <typename DataType>
::std::ostream& operator<<(::std::ostream& os, const DistanceInputs<DataType>& dims)
{
return os;
}
template <raft::distance::DistanceType distanceType, typename DataType>
void distanceLauncher(raft::resources const& handle,
DataType* x,
DataType* y,
DataType* dist,
DataType* dist2,
int m,
int n,
int k,
DistanceInputs<DataType>& params,
DataType threshold,
char* workspace,
size_t worksize,
cudaStream_t stream,
bool isRowMajor)
{
auto fin_op = [dist2, threshold] __device__(DataType d_val, int g_d_idx) {
dist2[g_d_idx] = (d_val < threshold) ? 0.f : d_val;
return d_val;
};
distance<distanceType, DataType, DataType, DataType>(
handle, x, y, dist, m, n, k, workspace, worksize, fin_op, isRowMajor);
}
template <raft::distance::DistanceType distanceType, typename DataType>
class DistanceTest : public ::testing::TestWithParam<DistanceInputs<DataType>> {
public:
DistanceTest()
: x(0, stream), y(0, stream), dist_ref(0, stream), dist(0, stream), dist2(0, stream)
{
}
void SetUp() override
{
params = ::testing::TestWithParam < DistanceInputs<DataType>::GetParam();
raft::random::Rng r(params.seed);
int m = params.m;
int n = params.n;
int k = params.k;
bool isRowMajor = params.isRowMajor;
raft::resources handle;
auto stream = raft::resource::get_cuda_stream(handle);
x.resize(m * k, stream);
y.resize(n * k, stream);
dist_ref.resize(m * n, stream);
dist.resize(m * n, stream);
dist2.resize(m * n, stream);
r.uniform(x.data(), m * k, DataType(-1.0), DataType(1.0), stream);
r.uniform(y.data(), n * k, DataType(-1.0), DataType(1.0), stream);
naiveDistance(dist_ref.data(), x.data(), y.data(), m, n, k, distanceType, isRowMajor);
size_t worksize = getWorkspaceSize<distanceType, DataType, DataType, DataType>(x, y, m, n, k);
rmm::device_uvector<char> workspace(worksize);
DataType threshold = -10000.f;
distanceLauncher<distanceType, DataType>(handle,
x.data(),
y.data(),
dist.data(),
dist2.data(),
m,
n,
k,
params,
threshold,
workspace.data(),
worksize,
isRowMajor);
}
protected:
DistanceInputs<DataType> params;
rmm::device_uvector<DataType> x, y, dist_ref, dist, dist2;
};
} // end namespace Distance
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/linalg_naive.h | /*
* Copyright (c) 2018-2021, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
namespace MLCommon {
namespace LinAlg {
namespace Naive {
/**
* @brief CPU sequential version of the Kronecker product
*
* @note All the matrices are in column-major order
*
* @tparam DataT Type of the data
* @param[out] K Pointer to the result of the Kronecker product A (x) B
* @param[in] A Matrix A
* @param[in] B Matrix B
* @param[in] m Rows of matrix A
* @param[in] n Columns of matrix B
* @param[in] p Rows of matrix A
* @param[in] q Columns of matrix B
*/
template <typename DataT>
void kronecker(DataT* K, const DataT* A, const DataT* B, int m, int n, int p, int q)
{
int k_m = m * p;
#pragma omp parallel for collapse(2)
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
DataT a_ij = A[i + m * j];
for (int v = 0; v < p; v++) {
for (int w = 0; w < q; w++) {
DataT b_vw = B[v + p * w];
K[i * p + v + (j * q + w) * k_m] = a_ij * b_vw;
}
}
}
}
}
/**
* @brief CPU sequential matrix multiplication out = alpha * A*B + beta * out
*
* @note All the matrices are in column-major order
*
* @tparam DataT Type of the data
* @param[out] out Pointer to the result
* @param[in] A Matrix A
* @param[in] B Matrix B
* @param[in] m Rows of A
* @param[in] k Columns of A / rows of B
* @param[in] n Columns of B
* @param[in] alpha Scalar alpha
* @param[in] beta Scalar beta
*/
template <typename DataT>
void matMul(
DataT* out, const DataT* A, const DataT* B, int m, int k, int n, DataT alpha = 1, DataT beta = 0)
{
#pragma omp parallel for collapse(2)
for (int j = 0; j < n; j++) {
for (int i = 0; i < m; i++) {
DataT s = 0.0;
for (int r = 0; r < k; r++) {
s += A[i + r * m] * B[r + j * k];
}
out[i + j * m] = alpha * s + beta * out[i + j * m];
}
}
}
/**
* @brief CPU sequential vector add (u + alpha * v)
*
* @tparam DataT Type of the data
* @param[out] out Pointer to the result
* @param[in] u Vector u
* @param[in] v Vector v
* @param[in] len Length of the vectors to add
* @param[in] alpha Coefficient to multiply the elements of v with
*/
template <typename DataT>
void add(DataT* out, const DataT* u, const DataT* v, int len, DataT alpha = 1.0)
{
#pragma omp parallel for
for (int i = 0; i < len; i++) {
out[i] = u[i] + alpha * v[i];
}
}
/**
* @brief CPU lagged matrix
*
* @tparam DataT Type of the data
* @param[out] out Pointer to the result
* @param[in] in Pointer to the input vector
* @param[in] len Length or the vector
* @param[in] lags Number of lags
*/
template <typename DataT>
void laggedMat(DataT* out, const DataT* in, int len, int lags)
{
int lagged_len = len - lags;
#pragma omp parallel for
for (int lag = 1; lag <= lags; lag++) {
DataT* out_ = out + (lag - 1) * lagged_len;
const DataT* in_ = in + lags - lag;
for (int i = 0; i < lagged_len; i++) {
out_[i] = in_[i];
}
}
}
/**
* @brief CPU matrix 2D copy
*
* @tparam DataT Type of the data
* @param[out] out Pointer to the result
* @param[in] in Pointer to the input matrix
* @param[in] starting_row Starting row
* @param[in] starting_col Starting column
* @param[in] in_rows Number of rows in the input matrix
* @param[in] out_rows Number of rows in the output matrix
* @param[in] out_cols Number of columns in the input matrix
*/
template <typename DataT>
void copy2D(DataT* out,
const DataT* in,
int starting_row,
int starting_col,
int in_rows,
int out_rows,
int out_cols)
{
#pragma omp parallel for collapse(2)
for (int i = 0; i < out_rows; i++) {
for (int j = 0; j < out_cols; j++) {
out[i + j * out_rows] = in[starting_row + i + (starting_col + j) * in_rows];
}
}
}
/**
* @brief CPU first difference of a vector
*
* @tparam DataT Type of the data
* @param[out] out Pointer to the result
* @param[in] in Pointer to the input vector
* @param[in] len Length of the input vector
*/
template <typename DataT>
void diff(DataT* out, const DataT* in, int len)
{
#pragma omp parallel for
for (int i = 0; i < len - 1; i++) {
out[i] = in[i + 1] - in[i];
}
}
} // namespace Naive
} // namespace LinAlg
} // namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/add_sub_dev_scalar.cu | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <gtest/gtest.h>
#include <raft/linalg/add.cuh>
#include <raft/linalg/subtract.cuh>
#include <raft/linalg/unary_op.cuh>
#include <raft/random/rng.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_scalar.hpp>
#include <rmm/device_uvector.hpp>
namespace raft {
namespace linalg {
template <typename T, typename IdxType = int>
struct DevScalarInputs {
T tolerance;
IdxType len;
T scalar;
bool add;
unsigned long long int seed;
};
// Or else, we get the following compilation error
// for an extended __device__ lambda cannot have private or protected access
// within its class
template <typename T, typename IdxType = int>
void unaryOpLaunch(T* out, const T* in, T scalar, IdxType len, bool add, cudaStream_t stream)
{
raft::linalg::unaryOp(
out,
in,
len,
[scalar, add] __device__(T in) { return add ? in + scalar : in - scalar; },
stream);
}
template <typename T, typename IdxType>
class DevScalarTest : public ::testing::TestWithParam<DevScalarInputs<T, IdxType>> {
protected:
DevScalarTest() : in(0, stream), out_ref(0, stream), out(0, stream), scalar(stream) {}
void SetUp() override
{
params = ::testing::TestWithParam<DevScalarInputs<T, IdxType>>::GetParam();
raft::random::Rng r(params.seed);
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
auto len = params.len;
in.resize(len, stream);
out_ref.resize(len, stream);
out.resize(len, stream);
raft::update_device(scalar.data(), ¶ms.scalar, 1, stream);
r.uniform(in.data(), len, T(-1.0), T(1.0), stream);
unaryOpLaunch(out_ref.data(), in.data(), params.scalar, len, params.add, stream);
if (params.add) {
addDevScalar(out.data(), in.data(), scalar.data(), len, stream);
} else {
subtractDevScalar(out.data(), in.data(), scalar.data(), len, stream);
}
RAFT_CUDA_TRY(cudaStreamDestroy(stream));
}
protected:
cudaStream_t stream = 0;
DevScalarInputs<T, IdxType> params;
rmm::device_uvector<T> in, out_ref, out;
rmm::device_scalar<T> scalar;
};
const std::vector<DevScalarInputs<float, int>> inputsf_i32 = {
{0.000001f, 1024 * 1024, 2.f, true, 1234ULL}, {0.000001f, 1024 * 1024, 2.f, false, 1234ULL}};
typedef DevScalarTest<float, int> DevScalarTestF_i32;
TEST_P(DevScalarTestF_i32, Result)
{
ASSERT_TRUE(devArrMatch(
out_ref.data(), out.data(), params.len, MLCommon::CompareApprox<float>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(DevScalarTests, DevScalarTestF_i32, ::testing::ValuesIn(inputsf_i32));
const std::vector<DevScalarInputs<float, size_t>> inputsf_i64 = {
{0.000001f, 1024 * 1024, 2.f, true, 1234ULL}, {0.000001f, 1024 * 1024, 2.f, false, 1234ULL}};
typedef DevScalarTest<float, size_t> DevScalarTestF_i64;
TEST_P(DevScalarTestF_i64, Result)
{
ASSERT_TRUE(devArrMatch(
out_ref.data(), out.data(), params.len, MLCommon::CompareApprox<float>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(DevScalarTests, DevScalarTestF_i64, ::testing::ValuesIn(inputsf_i64));
const std::vector<DevScalarInputs<double, int>> inputsd_i32 = {
{0.00000001, 1024 * 1024, 2.0, true, 1234ULL}, {0.00000001, 1024 * 1024, 2.0, false, 1234ULL}};
typedef DevScalarTest<double, int> DevScalarTestD_i32;
TEST_P(DevScalarTestD_i32, Result)
{
ASSERT_TRUE(devArrMatch(
out_ref.data(), out.data(), params.len, MLCommon::CompareApprox<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(DevScalarTests, DevScalarTestD_i32, ::testing::ValuesIn(inputsd_i32));
const std::vector<DevScalarInputs<double, size_t>> inputsd_i64 = {
{0.00000001, 1024 * 1024, 2.0, true, 1234ULL}, {0.00000001, 1024 * 1024, 2.0, false, 1234ULL}};
typedef DevScalarTest<double, size_t> DevScalarTestD_i64;
TEST_P(DevScalarTestD_i64, Result)
{
ASSERT_TRUE(devArrMatch(
out_ref.data(), out.data(), params.len, MLCommon::CompareApprox<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(DevScalarTests, DevScalarTestD_i64, ::testing::ValuesIn(inputsd_i64));
} // end namespace linalg
} // end namespace raft
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/kselection.cu | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <gtest/gtest.h>
#include <limits>
#include <raft/random/rng.cuh>
#include <raft/util/cudart_utils.hpp>
#include <selection/kselection.cuh>
#include <stdlib.h>
namespace MLCommon {
namespace Selection {
template <typename TypeV, typename TypeK, int N, int TPB, bool Greater>
__global__ void sortTestKernel(TypeK* key)
{
KVArray<TypeV, TypeK, N, Greater> arr;
#pragma unroll
for (int i = 0; i < N; ++i) {
arr.arr[i].val = (TypeV)raft::laneId();
arr.arr[i].key = (TypeK)raft::laneId();
}
raft::warpFence();
arr.sort();
raft::warpFence();
#pragma unroll
for (int i = 0; i < N; ++i)
arr.arr[i].store(nullptr, key + threadIdx.x + i * TPB);
}
template <typename TypeV, typename TypeK, int N, int TPB, bool Greater>
void sortTest(TypeK* key)
{
rmm::device_uvector<TypeK> dkey(TPB * N);
sortTestKernel<TypeV, TypeK, N, TPB, Greater><<<1, TPB>>>(dkey.data());
RAFT_CUDA_TRY(cudaPeekAtLastError());
raft::update_host<TypeK>(key, dkey.data(), TPB * N, 0);
}
/************************************************************************/
/********************** Add the function for CPU test *******************/
/************************************************************************/
template <typename TypeV, typename TypeK, bool Greater>
int cmp(KVPair<TypeV, TypeK> a, KVPair<TypeV, TypeK> b)
{
if (Greater == 0) {
return a.val > b.val;
} else {
return a.val < b.val;
}
}
template <typename TypeV, typename TypeK, bool Greater>
void partSortKVPair(KVPair<TypeV, TypeK>* arr, int N, int k)
{
std::partial_sort(arr, arr + k, arr + N, cmp<TypeV, TypeK, Greater>);
}
template <typename TypeV, typename TypeK, int N, bool Greater>
void sortKVArray(KVArray<TypeV, TypeK, N, Greater>& arr)
{
std::sort(arr.arr, arr.arr + N, cmp<TypeV, TypeK, Greater>);
}
template <typename TypeV, typename TypeK, bool Greater>
::testing::AssertionResult checkResult(
TypeV* d_arr, TypeV* d_outv, TypeK* d_outk, int rows, int N, int k, TypeV tolerance)
{
for (int rIndex = 0; rIndex < rows; rIndex++) {
// input data
TypeV* h_arr = new TypeV[N];
raft::update_host(h_arr, d_arr + rIndex * N, N, rmm::cuda_stream_default);
KVPair<TypeV, TypeK>* topk = new KVPair<TypeV, TypeK>[N];
for (int j = 0; j < N; j++) {
topk[j].val = h_arr[j];
topk[j].key = j;
}
// result reference
TypeV* h_outv = new TypeV[k];
raft::update_host(h_outv, d_outv + rIndex * k, k, rmm::cuda_stream_default);
TypeK* h_outk = new TypeK[k];
raft::update_host(h_outk, d_outk + rIndex * k, k, rmm::cuda_stream_default);
// calculate the result
partSortKVPair<TypeV, TypeK, Greater>(topk, N, k);
// check result
for (int j = 0; j < k; j++) {
// std::cout<<"Get value at ("<<rIndex<<" "<<j<<") Cpu "
// <<topk[j].val<<" "<<topk[j].key<<" Gpu "<<h_outv[j]<<" "
//<<h_outk[j] <<std::endl<<std::endl;
if (abs(h_outv[j] - topk[j].val) > tolerance) {
return ::testing::AssertionFailure()
<< "actual=" << topk[j].val << " != expected=" << h_outv[j];
}
}
// delete resource
delete[] h_arr;
delete[] h_outv;
delete[] h_outk;
delete[] topk;
}
return ::testing::AssertionSuccess();
}
// Structure WarpTopKInputs
template <typename T>
struct WarpTopKInputs {
T tolerance;
int rows; // batch size
int cols; // N the length of variables
int k; // the top-k value
unsigned long long int seed; // seed to generate data
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const WarpTopKInputs<T>& dims)
{
return os;
}
// Define functions WarpTopKTest
template <typename T>
class WarpTopKTest : public ::testing::TestWithParam<WarpTopKInputs<T>> {
protected:
WarpTopKTest() : arr(0, stream), outv(0, stream), outk(0, stream) {}
void SetUp() override
{
params = ::testing::TestWithParam<WarpTopKInputs<T>>::GetParam();
raft::random::Rng r(params.seed);
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
arr.resize(params.rows * params.cols, stream);
outk.resize(params.rows * params.k, stream);
outv.resize(params.rows * params.k, stream);
r.uniform(arr.data(), params.rows * params.cols, T(-1.0), T(1.0), stream);
static const bool Sort = false;
static const bool Greater = true;
warpTopK<T, int, Greater, Sort>(
outv.data(), outk.data(), arr.data(), params.k, params.rows, params.cols, stream);
RAFT_CUDA_TRY(cudaStreamDestroy(stream));
}
protected:
cudaStream_t stream = 0;
WarpTopKInputs<T> params;
rmm::device_uvector<T> arr;
rmm::device_uvector<T> outv;
rmm::device_uvector<int> outk;
};
// Parameters
// Milestone 1: Verify the result of current implementation
// Milestone 2: Support all the values of k between 1 and 1024; both inclusive
// Milestone 2.1: Using the POC code to Support all the values
const std::vector<WarpTopKInputs<float>> inputs2_0 = {{0.00000001, 2, 1024, 256, 1234ULL}};
const std::vector<WarpTopKInputs<float>> inputs2_1 = {{0.00000001, 4, 2048, 1024, 1234ULL}};
const std::vector<WarpTopKInputs<float>> inputs2_2 = {{0.00000001, 4, 2048, 1, 1234ULL}};
// Milestone 2.2: Using the full thread queue and warp queue code to support
// all the values
// @TODO: Milestone 3: Support not sorted
// @TODO: Milestone 4: Support multi-gpu
// Define the function TEST_P
typedef WarpTopKTest<float> TestD2_0;
typedef WarpTopKTest<float> TestD2_1;
typedef WarpTopKTest<float> TestD2_2;
TEST_P(TestD2_0, Result)
{
const static bool Greater = true;
ASSERT_TRUE((checkResult<float, int, Greater>(
arr.data(), outv.data(), outk.data(), params.rows, params.cols, params.k, params.tolerance)));
}
TEST_P(TestD2_1, Result)
{
const static bool Greater = true;
ASSERT_TRUE((checkResult<float, int, Greater>(
arr.data(), outv.data(), outk.data(), params.rows, params.cols, params.k, params.tolerance)));
}
TEST_P(TestD2_2, Result)
{
const static bool Greater = true;
ASSERT_TRUE((checkResult<float, int, Greater>(
arr.data(), outv.data(), outk.data(), params.rows, params.cols, params.k, params.tolerance)));
}
// Instantiate
INSTANTIATE_TEST_CASE_P(WarpTopKTests, TestD2_0, ::testing::ValuesIn(inputs2_0));
INSTANTIATE_TEST_CASE_P(WarpTopKTests, TestD2_1, ::testing::ValuesIn(inputs2_1));
INSTANTIATE_TEST_CASE_P(WarpTopKTests, TestD2_2, ::testing::ValuesIn(inputs2_2));
} // end namespace Selection
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/linalg_block.cu | /*
* Copyright (c) 2021-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <gtest/gtest.h>
#include <random>
#include <vector>
#include <raft/core/handle.hpp>
#include <raft/random/rng.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include "test_utils.h"
#include <cuml/common/logger.hpp>
#include <linalg/block.cuh>
namespace MLCommon {
namespace LinAlg {
using namespace std;
/* GEMM */
template <typename T>
struct BlockGemmInputs {
int m, k, n;
bool transa, transb;
int batch_size;
int vec_len;
T eps;
unsigned long long int seed;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const BlockGemmInputs<T>& dims)
{
return os;
}
template <typename Policy, typename T>
__global__ void block_gemm_test_kernel(
bool transa, bool transb, int m, int n, int k, T alpha, const T* a, const T* b, T* c)
{
__shared__ MLCommon::LinAlg::GemmStorage<Policy, T> gemm_storage;
_block_gemm<Policy>(transa,
transb,
m,
n,
k,
alpha,
a + m * k * blockIdx.x,
b + k * n * blockIdx.x,
c + m * n * blockIdx.x,
gemm_storage);
}
template <typename Policy, typename T>
class BlockGemmTest : public ::testing::TestWithParam<BlockGemmInputs<T>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<BlockGemmInputs<T>>::GetParam();
rmm::device_uvector<T> a(params.m * params.k * params.batch_size, handle.get_stream());
rmm::device_uvector<T> b(params.k * params.n * params.batch_size, handle.get_stream());
rmm::device_uvector<T> c(params.m * params.n * params.batch_size, handle.get_stream());
std::vector<T> h_a(params.m * params.k * params.batch_size);
std::vector<T> h_b(params.k * params.n * params.batch_size);
std::vector<T> h_c_ref(params.m * params.n * params.batch_size);
/* Generate random data on device */
raft::random::Rng r(params.seed);
r.uniform(a.data(), params.m * params.k * params.batch_size, (T)-2, (T)2, handle.get_stream());
r.uniform(b.data(), params.k * params.n * params.batch_size, (T)-2, (T)2, handle.get_stream());
/* Generate random alpha */
std::default_random_engine generator(params.seed);
std::uniform_real_distribution<T> distribution(-2.0, 2.0);
T alpha = distribution(generator);
/* Copy to host */
raft::update_host(
h_a.data(), a.data(), params.m * params.k * params.batch_size, handle.get_stream());
raft::update_host(
h_b.data(), b.data(), params.k * params.n * params.batch_size, handle.get_stream());
handle.sync_stream(handle.get_stream());
/* Compute using tested prims */
block_gemm_test_kernel<Policy>
<<<params.batch_size, Policy::BlockSize, 0, handle.get_stream()>>>(params.transa,
params.transb,
params.m,
params.n,
params.k,
alpha,
a.data(),
b.data(),
c.data());
/* Compute reference results */
for (int bid = 0; bid < params.batch_size; bid++) {
for (int i = 0; i < params.m; i++) {
for (int j = 0; j < params.n; j++) {
T acc = (T)0;
for (int h = 0; h < params.k; h++) {
T _a = params.transa ? h_a[bid * params.m * params.k + i * params.k + h]
: h_a[bid * params.m * params.k + h * params.m + i];
T _b = params.transb ? h_b[bid * params.k * params.n + h * params.n + j]
: h_b[bid * params.k * params.n + j * params.k + h];
acc += _a * _b;
}
h_c_ref[bid * params.m * params.n + j * params.m + i] = alpha * acc;
}
}
}
/* Check results */
match = devArrMatchHost(h_c_ref.data(),
c.data(),
params.m * params.n * params.batch_size,
MLCommon::CompareApprox<T>(params.eps),
handle.get_stream());
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
BlockGemmInputs<T> params;
testing::AssertionResult match = testing::AssertionFailure();
};
const std::vector<BlockGemmInputs<float>> gemm_inputsf = {
{42, 42, 42, false, false, 20, 1, 1e-4, 12345U},
{65, 10, 20, false, true, 50, 1, 1e-4, 12345U},
{5, 80, 31, true, false, 80, 1, 1e-4, 12345U},
{11, 50, 41, true, true, 100, 1, 1e-4, 12345U},
};
const std::vector<BlockGemmInputs<double>> gemm_inputsd = {
{42, 42, 42, false, false, 20, 1, 1e-4, 12345U},
{65, 10, 20, false, true, 50, 1, 1e-4, 12345U},
{5, 80, 31, true, false, 80, 1, 1e-4, 12345U},
{11, 50, 41, true, true, 100, 1, 1e-4, 12345U},
};
const std::vector<BlockGemmInputs<float>> gemm_inputsf_vec2 = {
{30, 34, 16, false, false, 20, 2, 1e-4, 12345U},
{10, 42, 20, false, true, 20, 2, 1e-4, 12345U},
{14, 8, 22, true, false, 20, 2, 1e-4, 12345U},
{56, 72, 28, true, true, 20, 2, 1e-4, 12345U},
};
const std::vector<BlockGemmInputs<double>> gemm_inputsd_vec2 = {
{30, 34, 16, false, false, 20, 2, 1e-4, 12345U},
{10, 42, 20, false, true, 20, 2, 1e-4, 12345U},
{14, 8, 22, true, false, 20, 2, 1e-4, 12345U},
{56, 72, 28, true, true, 20, 2, 1e-4, 12345U},
};
typedef BlockGemmTest<BlockGemmPolicy<1, 16, 1, 4, 16, 4>, float> BlockGemmTestF_1_16_1_4_16_4;
TEST_P(BlockGemmTestF_1_16_1_4_16_4, Result) { EXPECT_TRUE(match); }
typedef BlockGemmTest<BlockGemmPolicy<1, 16, 1, 4, 16, 4>, double> BlockGemmTestD_1_16_1_4_16_4;
TEST_P(BlockGemmTestD_1_16_1_4_16_4, Result) { EXPECT_TRUE(match); }
typedef BlockGemmTest<BlockGemmPolicy<1, 32, 1, 4, 32, 8>, float> BlockGemmTestF_1_32_1_4_32_8;
TEST_P(BlockGemmTestF_1_32_1_4_32_8, Result) { EXPECT_TRUE(match); }
typedef BlockGemmTest<BlockGemmPolicy<1, 32, 1, 4, 32, 8>, double> BlockGemmTestD_1_32_1_4_32_8;
TEST_P(BlockGemmTestD_1_32_1_4_32_8, Result) { EXPECT_TRUE(match); }
typedef BlockGemmTest<BlockGemmPolicy<1, 32, 1, 16, 64, 4>, float> BlockGemmTestF_1_32_1_16_64_4;
TEST_P(BlockGemmTestF_1_32_1_16_64_4, Result) { EXPECT_TRUE(match); }
typedef BlockGemmTest<BlockGemmPolicy<1, 32, 1, 16, 64, 4>, double> BlockGemmTestD_1_32_1_16_64_4;
TEST_P(BlockGemmTestD_1_32_1_16_64_4, Result) { EXPECT_TRUE(match); }
typedef BlockGemmTest<BlockGemmPolicy<1, 16, 1, 16, 128, 2>, float> BlockGemmTestF_1_16_1_16_128_2;
TEST_P(BlockGemmTestF_1_16_1_16_128_2, Result) { EXPECT_TRUE(match); }
typedef BlockGemmTest<BlockGemmPolicy<1, 16, 1, 16, 128, 2>, double> BlockGemmTestD_1_16_1_16_128_2;
TEST_P(BlockGemmTestD_1_16_1_16_128_2, Result) { EXPECT_TRUE(match); }
typedef BlockGemmTest<BlockGemmPolicy<2, 32, 2, 2, 16, 16>, float> BlockGemmTestF_2_32_2_2_16_16;
TEST_P(BlockGemmTestF_2_32_2_2_16_16, Result) { EXPECT_TRUE(match); }
typedef BlockGemmTest<BlockGemmPolicy<2, 32, 2, 2, 16, 16>, double> BlockGemmTestD_2_32_2_2_16_16;
TEST_P(BlockGemmTestD_2_32_2_2_16_16, Result) { EXPECT_TRUE(match); }
INSTANTIATE_TEST_CASE_P(BlockGemmTests,
BlockGemmTestF_1_16_1_4_16_4,
::testing::ValuesIn(gemm_inputsf));
INSTANTIATE_TEST_CASE_P(BlockGemmTests,
BlockGemmTestD_1_16_1_4_16_4,
::testing::ValuesIn(gemm_inputsd));
INSTANTIATE_TEST_CASE_P(BlockGemmTests,
BlockGemmTestF_1_32_1_4_32_8,
::testing::ValuesIn(gemm_inputsf));
INSTANTIATE_TEST_CASE_P(BlockGemmTests,
BlockGemmTestD_1_32_1_4_32_8,
::testing::ValuesIn(gemm_inputsd));
INSTANTIATE_TEST_CASE_P(BlockGemmTests,
BlockGemmTestF_1_32_1_16_64_4,
::testing::ValuesIn(gemm_inputsf));
INSTANTIATE_TEST_CASE_P(BlockGemmTests,
BlockGemmTestD_1_32_1_16_64_4,
::testing::ValuesIn(gemm_inputsd));
INSTANTIATE_TEST_CASE_P(BlockGemmTests,
BlockGemmTestF_1_16_1_16_128_2,
::testing::ValuesIn(gemm_inputsf));
INSTANTIATE_TEST_CASE_P(BlockGemmTests,
BlockGemmTestD_1_16_1_16_128_2,
::testing::ValuesIn(gemm_inputsd));
INSTANTIATE_TEST_CASE_P(BlockGemmTests,
BlockGemmTestF_2_32_2_2_16_16,
::testing::ValuesIn(gemm_inputsf_vec2));
INSTANTIATE_TEST_CASE_P(BlockGemmTests,
BlockGemmTestD_2_32_2_2_16_16,
::testing::ValuesIn(gemm_inputsd_vec2));
/* GEMV */
template <typename T>
struct BlockGemvInputs {
bool preload;
int m, n;
int batch_size;
T eps;
unsigned long long int seed;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const BlockGemvInputs<T>& dims)
{
return os;
}
template <typename Policy, typename T>
__global__ void block_gemv_test_kernel(
int m, int n, T alpha, const T* a, const T* x, T* y, bool preload)
{
__shared__ MLCommon::LinAlg::GemvStorage<Policy, T> gemv_storage;
extern __shared__ char dyna_shared_mem[];
T* shared_vec = (T*)dyna_shared_mem;
if (preload) {
_block_gemv<Policy, true>(m,
n,
alpha,
a + m * n * blockIdx.x,
x + n * blockIdx.x,
y + m * blockIdx.x,
gemv_storage,
shared_vec);
} else {
for (int i = threadIdx.x; i < n; i += Policy::BlockSize) {
shared_vec[i] = x[n * blockIdx.x + i];
}
__syncthreads();
_block_gemv<Policy, false>(
m, n, alpha, a + m * n * blockIdx.x, shared_vec, y + m * blockIdx.x, gemv_storage);
}
}
template <typename Policy, typename T>
class BlockGemvTest : public ::testing::TestWithParam<BlockGemvInputs<T>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<BlockGemvInputs<T>>::GetParam();
rmm::device_uvector<T> a(params.m * params.n * params.batch_size, handle.get_stream());
rmm::device_uvector<T> x(params.n * params.batch_size, handle.get_stream());
rmm::device_uvector<T> y(params.m * params.batch_size, handle.get_stream());
std::vector<T> h_a(params.m * params.n * params.batch_size);
std::vector<T> h_x(params.n * params.batch_size);
std::vector<T> h_y_ref(params.m * params.batch_size);
/* Generate random data on device */
raft::random::Rng r(params.seed);
r.uniform(a.data(), params.m * params.n * params.batch_size, (T)-2, (T)2, handle.get_stream());
r.uniform(x.data(), params.n * params.batch_size, (T)-2, (T)2, handle.get_stream());
/* Generate random alpha */
std::default_random_engine generator(params.seed);
std::uniform_real_distribution<T> distribution(-2.0, 2.0);
T alpha = distribution(generator);
/* Copy to host */
raft::update_host(
h_a.data(), a.data(), params.m * params.n * params.batch_size, handle.get_stream());
raft::update_host(h_x.data(), x.data(), params.n * params.batch_size, handle.get_stream());
handle.sync_stream(handle.get_stream());
/* Compute using tested prims */
int shared_mem_size = params.n * sizeof(T);
block_gemv_test_kernel<Policy>
<<<params.batch_size, Policy::BlockSize, shared_mem_size, handle.get_stream()>>>(
params.m, params.n, alpha, a.data(), x.data(), y.data(), params.preload);
/* Compute reference results */
for (int bid = 0; bid < params.batch_size; bid++) {
for (int i = 0; i < params.m; i++) {
T acc = (T)0;
for (int j = 0; j < params.n; j++) {
acc += h_a[bid * params.m * params.n + j * params.m + i] * h_x[bid * params.n + j];
}
h_y_ref[bid * params.m + i] = alpha * acc;
}
}
/* Check results */
match = devArrMatchHost(h_y_ref.data(),
y.data(),
params.m * params.batch_size,
MLCommon::CompareApprox<T>(params.eps),
handle.get_stream());
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
BlockGemvInputs<T> params;
testing::AssertionResult match = testing::AssertionFailure();
};
const std::vector<BlockGemvInputs<float>> gemv_inputsf = {{true, 42, 42, 20, 1e-4, 12345U},
{true, 65, 10, 50, 1e-4, 12345U},
{false, 5, 80, 100, 1e-4, 12345U}};
const std::vector<BlockGemvInputs<double>> gemv_inputsd = {{true, 42, 42, 20, 1e-4, 12345U},
{true, 65, 10, 50, 1e-4, 12345U},
{false, 5, 80, 100, 1e-4, 12345U}};
typedef BlockGemvTest<BlockGemvPolicy<16, 4>, float> BlockGemvTestF_16_4;
TEST_P(BlockGemvTestF_16_4, Result) { EXPECT_TRUE(match); }
typedef BlockGemvTest<BlockGemvPolicy<16, 4>, double> BlockGemvTestD_16_4;
TEST_P(BlockGemvTestD_16_4, Result) { EXPECT_TRUE(match); }
typedef BlockGemvTest<BlockGemvPolicy<32, 8>, float> BlockGemvTestF_32_8;
TEST_P(BlockGemvTestF_32_8, Result) { EXPECT_TRUE(match); }
typedef BlockGemvTest<BlockGemvPolicy<32, 8>, double> BlockGemvTestD_32_8;
TEST_P(BlockGemvTestD_32_8, Result) { EXPECT_TRUE(match); }
typedef BlockGemvTest<BlockGemvPolicy<128, 2>, float> BlockGemvTestF_128_2;
TEST_P(BlockGemvTestF_128_2, Result) { EXPECT_TRUE(match); }
typedef BlockGemvTest<BlockGemvPolicy<128, 2>, double> BlockGemvTestD_128_2;
TEST_P(BlockGemvTestD_128_2, Result) { EXPECT_TRUE(match); }
INSTANTIATE_TEST_CASE_P(BlockGemvTests, BlockGemvTestF_16_4, ::testing::ValuesIn(gemv_inputsf));
INSTANTIATE_TEST_CASE_P(BlockGemvTests, BlockGemvTestD_16_4, ::testing::ValuesIn(gemv_inputsd));
INSTANTIATE_TEST_CASE_P(BlockGemvTests, BlockGemvTestF_32_8, ::testing::ValuesIn(gemv_inputsf));
INSTANTIATE_TEST_CASE_P(BlockGemvTests, BlockGemvTestD_32_8, ::testing::ValuesIn(gemv_inputsd));
INSTANTIATE_TEST_CASE_P(BlockGemvTests, BlockGemvTestF_128_2, ::testing::ValuesIn(gemv_inputsf));
INSTANTIATE_TEST_CASE_P(BlockGemvTests, BlockGemvTestD_128_2, ::testing::ValuesIn(gemv_inputsd));
/* DOT */
template <typename T>
struct BlockDotInputs {
bool broadcast;
int n;
int batch_size;
T eps;
unsigned long long int seed;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const BlockDotInputs<T>& dims)
{
return os;
}
template <int BlockSize, bool Broadcast, typename T>
__global__ void block_dot_test_kernel(int n, const T* x, const T* y, T* d_dot)
{
__shared__ ReductionStorage<BlockSize, T> reduction_storage;
T dot_ =
_block_dot<BlockSize, Broadcast>(n, x + n * blockIdx.x, y + n * blockIdx.x, reduction_storage);
if (!Broadcast && threadIdx.x == 0)
d_dot[blockIdx.x] = dot_;
else if (Broadcast && threadIdx.x == BlockSize - 1)
d_dot[blockIdx.x] = dot_;
}
template <typename T>
class BlockDotTest : public ::testing::TestWithParam<BlockDotInputs<T>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<BlockDotInputs<T>>::GetParam();
rmm::device_uvector<T> x(params.n * params.batch_size, handle.get_stream());
rmm::device_uvector<T> y(params.n * params.batch_size, handle.get_stream());
rmm::device_uvector<T> dot_dev(params.batch_size, handle.get_stream());
std::vector<T> h_x(params.n * params.batch_size);
std::vector<T> h_y(params.n * params.batch_size);
std::vector<T> h_dot_ref(params.batch_size, (T)0);
/* Generate random data on device */
raft::random::Rng r(params.seed);
r.uniform(x.data(), params.n * params.batch_size, (T)-2, (T)2, handle.get_stream());
r.uniform(y.data(), params.n * params.batch_size, (T)-2, (T)2, handle.get_stream());
/* Copy to host */
raft::update_host(h_x.data(), x.data(), params.n * params.batch_size, handle.get_stream());
raft::update_host(h_y.data(), y.data(), params.n * params.batch_size, handle.get_stream());
handle.sync_stream(handle.get_stream());
/* Compute using tested prims */
constexpr int BlockSize = 64;
if (params.broadcast)
block_dot_test_kernel<BlockSize, true>
<<<params.batch_size, BlockSize, 0, handle.get_stream()>>>(
params.n, x.data(), y.data(), dot_dev.data());
else
block_dot_test_kernel<BlockSize, false>
<<<params.batch_size, BlockSize, 0, handle.get_stream()>>>(
params.n, x.data(), y.data(), dot_dev.data());
/* Compute reference results */
for (int bid = 0; bid < params.batch_size; bid++) {
for (int i = 0; i < params.n; i++) {
h_dot_ref[bid] += h_x[bid * params.n + i] * h_y[bid * params.n + i];
}
}
/* Check results */
match = devArrMatchHost(h_dot_ref.data(),
dot_dev.data(),
params.batch_size,
MLCommon::CompareApprox<T>(params.eps),
handle.get_stream());
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
BlockDotInputs<T> params;
testing::AssertionResult match = testing::AssertionFailure();
};
const std::vector<BlockDotInputs<float>> dot_inputsf = {{true, 9, 20, 1e-4, 12345U},
{true, 65, 50, 1e-4, 12345U},
{true, 200, 100, 1e-4, 12345U},
{false, 200, 100, 1e-4, 12345U}};
const std::vector<BlockDotInputs<double>> dot_inputsd = {{true, 9, 20, 1e-4, 12345U},
{true, 65, 50, 1e-4, 12345U},
{true, 200, 100, 1e-4, 12345U},
{false, 200, 100, 1e-4, 12345U}};
typedef BlockDotTest<float> BlockDotTestF;
TEST_P(BlockDotTestF, Result) { EXPECT_TRUE(match); }
typedef BlockDotTest<double> BlockDotTestD;
TEST_P(BlockDotTestD, Result) { EXPECT_TRUE(match); }
INSTANTIATE_TEST_CASE_P(BlockDotTests, BlockDotTestF, ::testing::ValuesIn(dot_inputsf));
INSTANTIATE_TEST_CASE_P(BlockDotTests, BlockDotTestD, ::testing::ValuesIn(dot_inputsd));
/* x*A*x' */
template <typename T>
struct BlockXaxtInputs {
bool broadcast;
bool preload;
int n;
int batch_size;
T eps;
unsigned long long int seed;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const BlockXaxtInputs<T>& dims)
{
return os;
}
template <int BlockSize, bool Broadcast, typename T>
__global__ void block_xAxt_test_kernel(int n, const T* x, const T* A, T* d_res, bool preload)
{
extern __shared__ char dyna_shared_mem[];
T* shared_vec = (T*)dyna_shared_mem;
__shared__ ReductionStorage<BlockSize, T> reduction_storage;
T res_;
if (preload) {
res_ = _block_xAxt<BlockSize, Broadcast, true>(
n, x + n * blockIdx.x, A + n * n * blockIdx.x, reduction_storage, shared_vec);
} else {
for (int i = threadIdx.x; i < n; i += BlockSize) {
shared_vec[i] = x[n * blockIdx.x + i];
}
__syncthreads();
res_ = _block_xAxt<BlockSize, Broadcast, false>(
n, shared_vec, A + n * n * blockIdx.x, reduction_storage);
}
if (!Broadcast && threadIdx.x == 0)
d_res[blockIdx.x] = res_;
else if (Broadcast && threadIdx.x == BlockSize - 1)
d_res[blockIdx.x] = res_;
}
template <typename T>
class BlockXaxtTest : public ::testing::TestWithParam<BlockXaxtInputs<T>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<BlockXaxtInputs<T>>::GetParam();
rmm::device_uvector<T> x(params.n * params.batch_size, handle.get_stream());
rmm::device_uvector<T> A(params.n * params.n * params.batch_size, handle.get_stream());
rmm::device_uvector<T> res_dev(params.batch_size, handle.get_stream());
std::vector<T> h_x(params.n * params.batch_size);
std::vector<T> h_A(params.n * params.n * params.batch_size);
std::vector<T> h_res_ref(params.batch_size, (T)0);
/* Generate random data on device */
raft::random::Rng r(params.seed);
r.uniform(x.data(), params.n * params.batch_size, (T)-2, (T)2, handle.get_stream());
r.uniform(A.data(), params.n * params.n * params.batch_size, (T)-2, (T)2, handle.get_stream());
/* Copy to host */
raft::update_host(h_x.data(), x.data(), params.n * params.batch_size, handle.get_stream());
raft::update_host(
h_A.data(), A.data(), params.n * params.n * params.batch_size, handle.get_stream());
handle.sync_stream(handle.get_stream());
/* Compute using tested prims */
constexpr int BlockSize = 64;
int shared_mem_size = params.n * sizeof(T);
if (params.broadcast)
block_xAxt_test_kernel<BlockSize, true>
<<<params.batch_size, BlockSize, shared_mem_size, handle.get_stream()>>>(
params.n, x.data(), A.data(), res_dev.data(), params.preload);
else
block_xAxt_test_kernel<BlockSize, false>
<<<params.batch_size, BlockSize, shared_mem_size, handle.get_stream()>>>(
params.n, x.data(), A.data(), res_dev.data(), params.preload);
/* Compute reference results */
for (int bid = 0; bid < params.batch_size; bid++) {
for (int i = 0; i < params.n; i++) {
T acc = 0;
for (int j = 0; j < params.n; j++) {
acc += h_A[bid * params.n * params.n + j * params.n + i] * h_x[bid * params.n + j];
}
h_res_ref[bid] += acc * h_x[bid * params.n + i];
}
}
/* Check results */
match = devArrMatchHost(h_res_ref.data(),
res_dev.data(),
params.batch_size,
MLCommon::CompareApprox<T>(params.eps),
handle.get_stream());
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
BlockXaxtInputs<T> params;
testing::AssertionResult match = testing::AssertionFailure();
};
const std::vector<BlockXaxtInputs<float>> xAxt_inputsf = {{true, true, 9, 20, 1e-2, 12345U},
{true, true, 65, 50, 1e-2, 12345U},
{true, true, 200, 100, 1e-2, 12345U},
{false, true, 200, 100, 1e-2, 12345U},
{true, false, 200, 100, 1e-2, 12345U}};
const std::vector<BlockXaxtInputs<double>> xAxt_inputsd = {{true, true, 9, 20, 1e-4, 12345U},
{true, true, 65, 50, 1e-4, 12345U},
{true, true, 200, 100, 1e-4, 12345U},
{false, true, 200, 100, 1e-4, 12345U},
{true, false, 200, 100, 1e-2, 12345U}};
typedef BlockXaxtTest<float> BlockXaxtTestF;
TEST_P(BlockXaxtTestF, Result) { EXPECT_TRUE(match); }
typedef BlockXaxtTest<double> BlockXaxtTestD;
TEST_P(BlockXaxtTestD, Result) { EXPECT_TRUE(match); }
INSTANTIATE_TEST_CASE_P(BlockXaxtTests, BlockXaxtTestF, ::testing::ValuesIn(xAxt_inputsf));
INSTANTIATE_TEST_CASE_P(BlockXaxtTests, BlockXaxtTestD, ::testing::ValuesIn(xAxt_inputsd));
/* y=alpha*x */
template <typename T>
struct BlockAxInputs {
int n;
int batch_size;
T eps;
unsigned long long int seed;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const BlockAxInputs<T>& dims)
{
return os;
}
template <typename T>
__global__ void block_ax_test_kernel(int n, T alpha, const T* x, T* y)
{
_block_ax(n, alpha, x + n * blockIdx.x, y + n * blockIdx.x);
}
template <typename T>
class BlockAxTest : public ::testing::TestWithParam<BlockAxInputs<T>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<BlockAxInputs<T>>::GetParam();
rmm::device_uvector<T> x(params.n * params.batch_size, handle.get_stream());
rmm::device_uvector<T> y(params.n * params.batch_size, handle.get_stream());
std::vector<T> h_x(params.n * params.batch_size);
std::vector<T> h_y_ref(params.n * params.batch_size, (T)0);
/* Generate random data on device */
raft::random::Rng r(params.seed);
r.uniform(x.data(), params.n * params.batch_size, (T)-2, (T)2, handle.get_stream());
/* Generate random alpha */
std::default_random_engine generator(params.seed);
std::uniform_real_distribution<T> distribution(-2.0, 2.0);
T alpha = distribution(generator);
/* Copy to host */
raft::update_host(h_x.data(), x.data(), params.n * params.batch_size, handle.get_stream());
handle.sync_stream(handle.get_stream());
/* Compute using tested prims */
constexpr int BlockSize = 64;
block_ax_test_kernel<<<params.batch_size, BlockSize, 0, handle.get_stream()>>>(
params.n, alpha, x.data(), y.data());
/* Compute reference results */
for (int bid = 0; bid < params.batch_size; bid++) {
for (int i = 0; i < params.n; i++) {
h_y_ref[bid * params.n + i] = alpha * h_x[bid * params.n + i];
}
}
/* Check results */
match = devArrMatchHost(h_y_ref.data(),
y.data(),
params.n * params.batch_size,
MLCommon::CompareApprox<T>(params.eps),
handle.get_stream());
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
BlockAxInputs<T> params;
testing::AssertionResult match = testing::AssertionFailure();
};
const std::vector<BlockAxInputs<float>> ax_inputsf = {
{9, 20, 1e-4, 12345U}, {65, 50, 1e-4, 12345U}, {200, 100, 1e-4, 12345U}};
const std::vector<BlockAxInputs<double>> ax_inputsd = {
{9, 20, 1e-4, 12345U}, {65, 50, 1e-4, 12345U}, {200, 100, 1e-4, 12345U}};
typedef BlockAxTest<float> BlockAxTestF;
TEST_P(BlockAxTestF, Result) { EXPECT_TRUE(match); }
typedef BlockAxTest<double> BlockAxTestD;
TEST_P(BlockAxTestD, Result) { EXPECT_TRUE(match); }
INSTANTIATE_TEST_CASE_P(BlockAxTests, BlockAxTestF, ::testing::ValuesIn(ax_inputsf));
INSTANTIATE_TEST_CASE_P(BlockAxTests, BlockAxTestD, ::testing::ValuesIn(ax_inputsd));
/* Covariance stability */
template <typename T>
struct BlockCovStabilityInputs {
int n;
int batch_size;
T eps;
unsigned long long int seed;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const BlockCovStabilityInputs<T>& dims)
{
return os;
}
template <typename CovPolicy, typename T>
__global__ void block_cov_stability_test_kernel(int n, const T* in, T* out)
{
__shared__ CovStabilityStorage<CovPolicy, T> cov_stability_storage;
_block_covariance_stability<CovPolicy>(
n, in + n * n * blockIdx.x, out + n * n * blockIdx.x, cov_stability_storage);
}
template <typename CovPolicy, typename T>
class BlockCovStabilityTest : public ::testing::TestWithParam<BlockCovStabilityInputs<T>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<BlockCovStabilityInputs<T>>::GetParam();
rmm::device_uvector<T> d_in(params.n * params.n * params.batch_size, handle.get_stream());
rmm::device_uvector<T> d_out(params.n * params.n * params.batch_size, handle.get_stream());
std::vector<T> h_in(params.n * params.n * params.batch_size);
std::vector<T> h_out(params.n * params.n * params.batch_size);
/* Generate random data on device */
raft::random::Rng r(params.seed);
r.uniform(
d_in.data(), params.n * params.n * params.batch_size, (T)-2, (T)2, handle.get_stream());
/* Copy to host */
raft::update_host(
h_in.data(), d_in.data(), params.n * params.n * params.batch_size, handle.get_stream());
handle.sync_stream(handle.get_stream());
/* Compute using tested prims */
block_cov_stability_test_kernel<CovPolicy>
<<<params.batch_size, CovPolicy::BlockSize, 0, handle.get_stream()>>>(
params.n, d_in.data(), d_out.data());
/* Compute reference results */
for (int bid = 0; bid < params.batch_size; bid++) {
for (int i = 0; i < params.n - 1; i++) {
for (int j = i + 1; j < params.n; j++) {
T val = 0.5 * (h_in[bid * params.n * params.n + j * params.n + i] +
h_in[bid * params.n * params.n + i * params.n + j]);
h_out[bid * params.n * params.n + j * params.n + i] = val;
h_out[bid * params.n * params.n + i * params.n + j] = val;
}
}
for (int i = 0; i < params.n; i++) {
h_out[bid * params.n * params.n + i * params.n + i] =
abs(h_in[bid * params.n * params.n + i * params.n + i]);
}
}
/* Check results */
match = devArrMatchHost(h_out.data(),
d_out.data(),
params.n * params.n * params.batch_size,
MLCommon::CompareApprox<T>(params.eps),
handle.get_stream());
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
BlockCovStabilityInputs<T> params;
testing::AssertionResult match = testing::AssertionFailure();
};
const std::vector<BlockCovStabilityInputs<float>> cs_inputsf = {
{15, 4, 1e-4, 12345U},
{33, 10, 1e-4, 12345U},
{220, 130, 1e-4, 12345U},
};
const std::vector<BlockCovStabilityInputs<double>> cs_inputsd = {
{15, 4, 1e-4, 12345U},
{33, 10, 1e-4, 12345U},
{220, 130, 1e-4, 12345U},
};
typedef BlockCovStabilityTest<BlockPolicy<1, 1, 8, 4>, float> BlockCovStabilityTestF_1_1_8_4;
TEST_P(BlockCovStabilityTestF_1_1_8_4, Result) { EXPECT_TRUE(match); }
typedef BlockCovStabilityTest<BlockPolicy<1, 1, 8, 4>, double> BlockCovStabilityTestD_1_1_8_4;
TEST_P(BlockCovStabilityTestD_1_1_8_4, Result) { EXPECT_TRUE(match); }
typedef BlockCovStabilityTest<BlockPolicy<1, 4, 32, 8>, float> BlockCovStabilityTestF_1_4_32_8;
TEST_P(BlockCovStabilityTestF_1_4_32_8, Result) { EXPECT_TRUE(match); }
typedef BlockCovStabilityTest<BlockPolicy<1, 4, 32, 8>, double> BlockCovStabilityTestD_1_4_32_8;
TEST_P(BlockCovStabilityTestD_1_4_32_8, Result) { EXPECT_TRUE(match); }
INSTANTIATE_TEST_CASE_P(BlockCovStabilityTests,
BlockCovStabilityTestF_1_1_8_4,
::testing::ValuesIn(cs_inputsf));
INSTANTIATE_TEST_CASE_P(BlockCovStabilityTests,
BlockCovStabilityTestD_1_1_8_4,
::testing::ValuesIn(cs_inputsd));
INSTANTIATE_TEST_CASE_P(BlockCovStabilityTests,
BlockCovStabilityTestF_1_4_32_8,
::testing::ValuesIn(cs_inputsf));
INSTANTIATE_TEST_CASE_P(BlockCovStabilityTests,
BlockCovStabilityTestD_1_4_32_8,
::testing::ValuesIn(cs_inputsd));
} // namespace LinAlg
} // namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/make_arima.cu | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <gtest/gtest.h>
#include <thrust/count.h>
#include <thrust/device_vector.h>
#include "test_utils.h"
#include <raft/core/interruptible.hpp>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <random/make_arima.cuh>
namespace MLCommon {
namespace Random {
/* This test only proves that the generator runs without errors, not
* correctness! */
struct MakeArimaInputs {
int batch_size, n_obs;
int p, d, q, P, D, Q, s, k;
raft::random::GeneratorType gtype;
uint64_t seed;
};
template <typename T>
class MakeArimaTest : public ::testing::TestWithParam<MakeArimaInputs> {
protected:
MakeArimaTest() : data(0, stream) {}
void SetUp() override
{
params = ::testing::TestWithParam<MakeArimaInputs>::GetParam();
// Scales of the different random components
T scale = 1.0, noise_scale = 0.2;
T intercept_scale = params.d + params.D == 0 ? 1.0 : (params.d + params.D == 1 ? 0.2 : 0.01);
ML::ARIMAOrder order = {
params.p, params.d, params.q, params.P, params.D, params.Q, params.s, params.k};
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
data.resize(params.batch_size * params.n_obs, stream);
// Create the time series dataset
make_arima(data.data(),
params.batch_size,
params.n_obs,
order,
stream,
scale,
noise_scale,
intercept_scale,
params.seed,
params.gtype);
}
void TearDown() override { RAFT_CUDA_TRY(cudaStreamDestroy(stream)); }
protected:
MakeArimaInputs params;
rmm::device_uvector<T> data;
cudaStream_t stream = 0;
};
const std::vector<MakeArimaInputs> make_arima_inputs = {
{100, 200, 1, 1, 2, 0, 0, 0, 0, 1, raft::random::GenPhilox, 1234ULL},
{1000, 100, 3, 0, 0, 1, 1, 0, 4, 1, raft::random::GenPhilox, 1234ULL},
{10000, 150, 2, 1, 2, 0, 1, 2, 4, 0, raft::random::GenPhilox, 1234ULL}};
typedef MakeArimaTest<float> MakeArimaTestF;
TEST_P(MakeArimaTestF, Result) { raft::interruptible::synchronize(stream); }
INSTANTIATE_TEST_CASE_P(MakeArimaTests, MakeArimaTestF, ::testing::ValuesIn(make_arima_inputs));
typedef MakeArimaTest<double> MakeArimaTestD;
TEST_P(MakeArimaTestD, Result) { raft::interruptible::synchronize(stream); }
INSTANTIATE_TEST_CASE_P(MakeArimaTests, MakeArimaTestD, ::testing::ValuesIn(make_arima_inputs));
} // end namespace Random
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/test_utils.h | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <gtest/gtest.h>
#include <iostream>
#include <memory>
#include <raft/core/interruptible.hpp>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
namespace MLCommon {
template <typename T>
struct Compare {
bool operator()(const T& a, const T& b) const { return a == b; }
};
template <typename T>
struct CompareApprox {
CompareApprox(T eps_) : eps(eps_) {}
bool operator()(const T& a, const T& b) const
{
T diff = abs(a - b);
T m = std::max(abs(a), abs(b));
T ratio = diff >= eps ? diff / m : diff;
return (ratio <= eps);
}
private:
T eps;
};
template <typename T>
struct CompareApproxAbs {
CompareApproxAbs(T eps_) : eps(eps_) {}
bool operator()(const T& a, const T& b) const
{
T diff = abs(abs(a) - abs(b));
T m = std::max(abs(a), abs(b));
T ratio = diff >= eps ? diff / m : diff;
return (ratio <= eps);
}
private:
T eps;
};
template <typename T>
HDI T abs(const T& a)
{
return a > T(0) ? a : -a;
}
/*
* @brief Helper function to compare 2 device n-D arrays with custom comparison
* @tparam T the data type of the arrays
* @tparam L the comparator lambda or object function
* @param expected expected value(s)
* @param actual actual values
* @param eq_compare the comparator
* @param stream cuda stream
* @return the testing assertion to be later used by ASSERT_TRUE/EXPECT_TRUE
* @{
*/
template <typename T, typename L>
testing::AssertionResult devArrMatch(
const T* expected, const T* actual, size_t size, L eq_compare, cudaStream_t stream = 0)
{
std::unique_ptr<T[]> exp_h(new T[size]);
std::unique_ptr<T[]> act_h(new T[size]);
raft::update_host<T>(exp_h.get(), expected, size, stream);
raft::update_host<T>(act_h.get(), actual, size, stream);
raft::interruptible::synchronize(stream);
for (size_t i(0); i < size; ++i) {
auto exp = exp_h.get()[i];
auto act = act_h.get()[i];
if (!eq_compare(exp, act)) {
return testing::AssertionFailure() << "actual=" << act << " != expected=" << exp << " @" << i;
}
}
return testing::AssertionSuccess();
}
template <typename T, typename L>
testing::AssertionResult devArrMatch(
T expected, const T* actual, size_t size, L eq_compare, cudaStream_t stream = 0)
{
std::unique_ptr<T[]> act_h(new T[size]);
raft::update_host<T>(act_h.get(), actual, size, stream);
raft::interruptible::synchronize(stream);
for (size_t i(0); i < size; ++i) {
auto act = act_h.get()[i];
if (!eq_compare(expected, act)) {
return testing::AssertionFailure()
<< "actual=" << act << " != expected=" << expected << " @" << i;
}
}
return testing::AssertionSuccess();
}
template <typename T, typename L>
testing::AssertionResult devArrMatch(const T* expected,
const T* actual,
size_t rows,
size_t cols,
L eq_compare,
cudaStream_t stream = 0)
{
size_t size = rows * cols;
std::unique_ptr<T[]> exp_h(new T[size]);
std::unique_ptr<T[]> act_h(new T[size]);
raft::update_host<T>(exp_h.get(), expected, size, stream);
raft::update_host<T>(act_h.get(), actual, size, stream);
raft::interruptible::synchronize(stream);
for (size_t i(0); i < rows; ++i) {
for (size_t j(0); j < cols; ++j) {
auto idx = i * cols + j; // row major assumption!
auto exp = exp_h.get()[idx];
auto act = act_h.get()[idx];
if (!eq_compare(exp, act)) {
return testing::AssertionFailure()
<< "actual=" << act << " != expected=" << exp << " @" << i << "," << j;
}
}
}
return testing::AssertionSuccess();
}
template <typename T, typename L>
testing::AssertionResult devArrMatch(
T expected, const T* actual, size_t rows, size_t cols, L eq_compare, cudaStream_t stream = 0)
{
size_t size = rows * cols;
std::unique_ptr<T[]> act_h(new T[size]);
raft::update_host<T>(act_h.get(), actual, size, stream);
raft::interruptible::synchronize(stream);
for (size_t i(0); i < rows; ++i) {
for (size_t j(0); j < cols; ++j) {
auto idx = i * cols + j; // row major assumption!
auto act = act_h.get()[idx];
if (!eq_compare(expected, act)) {
return testing::AssertionFailure()
<< "actual=" << act << " != expected=" << expected << " @" << i << "," << j;
}
}
}
return testing::AssertionSuccess();
}
/*
* @brief Helper function to compare a device n-D arrays with an expected array
* on the host, using a custom comparison
* @tparam T the data type of the arrays
* @tparam L the comparator lambda or object function
* @param expected_h host array of expected value(s)
* @param actual_d device array actual values
* @param eq_compare the comparator
* @param stream cuda stream
* @return the testing assertion to be later used by ASSERT_TRUE/EXPECT_TRUE
*/
template <typename T, typename L>
testing::AssertionResult devArrMatchHost(
const T* expected_h, const T* actual_d, size_t size, L eq_compare, cudaStream_t stream = 0)
{
std::unique_ptr<T[]> act_h(new T[size]);
raft::update_host<T>(act_h.get(), actual_d, size, stream);
raft::interruptible::synchronize(stream);
bool ok = true;
auto fail = testing::AssertionFailure();
for (size_t i(0); i < size; ++i) {
auto exp = expected_h[i];
auto act = act_h.get()[i];
if (!eq_compare(exp, act)) {
ok = false;
fail << "actual=" << act << " != expected=" << exp << " @" << i << "; ";
}
}
if (!ok) return fail;
return testing::AssertionSuccess();
}
/*
* @brief Helper function to compare diagonal values of a 2D matrix
* @tparam T the data type of the arrays
* @tparam L the comparator lambda or object function
* @param expected expected value along diagonal
* @param actual actual matrix
* @param eq_compare the comparator
* @param stream cuda stream
* @return the testing assertion to be later used by ASSERT_TRUE/EXPECT_TRUE
*/
template <typename T, typename L>
testing::AssertionResult diagonalMatch(
T expected, const T* actual, size_t rows, size_t cols, L eq_compare, cudaStream_t stream = 0)
{
size_t size = rows * cols;
std::unique_ptr<T[]> act_h(new T[size]);
raft::update_host<T>(act_h.get(), actual, size, stream);
raft::interruptible::synchronize(stream);
for (size_t i(0); i < rows; ++i) {
for (size_t j(0); j < cols; ++j) {
if (i != j) continue;
auto idx = i * cols + j; // row major assumption!
auto act = act_h.get()[idx];
if (!eq_compare(expected, act)) {
return testing::AssertionFailure()
<< "actual=" << act << " != expected=" << expected << " @" << i << "," << j;
}
}
}
return testing::AssertionSuccess();
}
template <typename T, typename L>
testing::AssertionResult match(const T expected, T actual, L eq_compare)
{
if (!eq_compare(expected, actual)) {
return testing::AssertionFailure() << "actual=" << actual << " != expected=" << expected;
}
return testing::AssertionSuccess();
}
/** @} */
/** time the function call 'func' using cuda events */
#define TIMEIT_LOOP(ms, count, func) \
do { \
cudaEvent_t start, stop; \
RAFT_CUDA_TRY(cudaEventCreate(&start)); \
RAFT_CUDA_TRY(cudaEventCreate(&stop)); \
RAFT_CUDA_TRY(cudaEventRecord(start)); \
for (int i = 0; i < count; ++i) { \
func; \
} \
RAFT_CUDA_TRY(cudaEventRecord(stop)); \
RAFT_CUDA_TRY(cudaEventSynchronize(stop)); \
ms = 0.f; \
RAFT_CUDA_TRY(cudaEventElapsedTime(&ms, start, stop)); \
ms /= args.runs; \
} while (0)
}; // namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/sigmoid.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <functions/sigmoid.cuh>
#include <gtest/gtest.h>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
namespace MLCommon {
namespace Functions {
template <typename T>
struct SigmoidInputs {
T tolerance;
int len;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const SigmoidInputs<T>& dims)
{
return os;
}
template <typename T>
class SigmoidTest : public ::testing::TestWithParam<SigmoidInputs<T>> {
protected:
SigmoidTest() : data(0, stream), result(0, stream), result_ref(0, stream) {}
void SetUp() override
{
params = ::testing::TestWithParam<SigmoidInputs<T>>::GetParam();
int len = params.len;
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
data.resize(len, stream);
T data_h[params.len] = {2.1, -4.5, -0.34, 10.0};
raft::update_device(data.data(), data_h, len, stream);
result.resize(len, stream);
result_ref.resize(len, stream);
T result_ref_h[params.len] = {0.89090318, 0.01098694, 0.41580948, 0.9999546};
raft::update_device(result_ref.data(), result_ref_h, len, stream);
sigmoid(result.data(), data.data(), len, stream);
RAFT_CUDA_TRY(cudaStreamDestroy(stream));
}
protected:
cudaStream_t stream = 0;
SigmoidInputs<T> params;
rmm::device_uvector<T> data, result, result_ref;
};
const std::vector<SigmoidInputs<float>> inputsf2 = {{0.001f, 4}};
const std::vector<SigmoidInputs<double>> inputsd2 = {{0.001, 4}};
typedef SigmoidTest<float> SigmoidTestValF;
TEST_P(SigmoidTestValF, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(result_ref.data(),
result.data(),
params.len,
MLCommon::CompareApproxAbs<float>(params.tolerance)));
}
typedef SigmoidTest<double> SigmoidTestValD;
TEST_P(SigmoidTestValD, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(result_ref.data(),
result.data(),
params.len,
MLCommon::CompareApproxAbs<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(SigmoidTests, SigmoidTestValF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(SigmoidTests, SigmoidTestValD, ::testing::ValuesIn(inputsd2));
} // end namespace Functions
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/penalty.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <functions/penalty.cuh>
#include <gtest/gtest.h>
#include <raft/random/rng.cuh>
#include <raft/util/cudart_utils.hpp>
namespace MLCommon {
namespace Functions {
template <typename T>
struct PenaltyInputs {
T tolerance;
int len;
};
template <typename T>
class PenaltyTest : public ::testing::TestWithParam<PenaltyInputs<T>> {
public:
PenaltyTest()
: params(::testing::TestWithParam<PenaltyInputs<T>>::GetParam()),
stream(handle.get_stream()),
in(params.len, stream),
out_lasso(1, stream),
out_ridge(1, stream),
out_elasticnet(1, stream),
out_lasso_grad(params.len, stream),
out_ridge_grad(params.len, stream),
out_elasticnet_grad(params.len, stream),
out_lasso_ref(1, stream),
out_ridge_ref(1, stream),
out_elasticnet_ref(1, stream),
out_lasso_grad_ref(params.len, stream),
out_ridge_grad_ref(params.len, stream),
out_elasticnet_grad_ref(params.len, stream)
{
}
protected:
void SetUp() override
{
int len = params.len;
T h_in[len] = {0.1, 0.35, -0.9, -1.4};
raft::update_device(in.data(), h_in, len, stream);
T h_out_lasso_ref[1] = {1.65};
raft::update_device(out_lasso_ref.data(), h_out_lasso_ref, 1, stream);
T h_out_ridge_ref[1] = {1.741499};
raft::update_device(out_ridge_ref.data(), h_out_ridge_ref, 1, stream);
T h_out_elasticnet_ref[1] = {1.695749};
raft::update_device(out_elasticnet_ref.data(), h_out_elasticnet_ref, 1, stream);
T h_out_lasso_grad_ref[len] = {0.6, 0.6, -0.6, -0.6};
raft::update_device(out_lasso_grad_ref.data(), h_out_lasso_grad_ref, len, stream);
T h_out_ridge_grad_ref[len] = {0.12, 0.42, -1.08, -1.68};
raft::update_device(out_ridge_grad_ref.data(), h_out_ridge_grad_ref, len, stream);
T h_out_elasticnet_grad_ref[len] = {0.36, 0.51, -0.84, -1.14};
raft::update_device(out_elasticnet_grad_ref.data(), h_out_elasticnet_grad_ref, len, stream);
T alpha = 0.6;
T l1_ratio = 0.5;
lasso(out_lasso.data(), in.data(), len, alpha, stream);
ridge(out_ridge.data(), in.data(), len, alpha, stream);
elasticnet(out_elasticnet.data(), in.data(), len, alpha, l1_ratio, stream);
lassoGrad(out_lasso_grad.data(), in.data(), len, alpha, stream);
ridgeGrad(out_ridge_grad.data(), in.data(), len, alpha, stream);
elasticnetGrad(out_elasticnet_grad.data(), in.data(), len, alpha, l1_ratio, stream);
}
protected:
PenaltyInputs<T> params;
raft::handle_t handle;
cudaStream_t stream;
rmm::device_uvector<T> in, out_lasso, out_ridge, out_elasticnet;
rmm::device_uvector<T> out_lasso_ref, out_ridge_ref, out_elasticnet_ref;
rmm::device_uvector<T> out_lasso_grad, out_ridge_grad, out_elasticnet_grad;
rmm::device_uvector<T> out_lasso_grad_ref, out_ridge_grad_ref, out_elasticnet_grad_ref;
};
const std::vector<PenaltyInputs<float>> inputsf = {{0.01f, 4}};
const std::vector<PenaltyInputs<double>> inputsd = {{0.01, 4}};
typedef PenaltyTest<float> PenaltyTestF;
TEST_P(PenaltyTestF, Result)
{
ASSERT_TRUE(devArrMatch(
out_lasso_ref.data(), out_lasso.data(), 1, MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_lasso_grad_ref.data(),
out_lasso_grad.data(),
params.len,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(
out_ridge_ref.data(), out_ridge.data(), 1, MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_ridge_grad_ref.data(),
out_ridge_grad.data(),
params.len,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_elasticnet_ref.data(),
out_elasticnet.data(),
1,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_elasticnet_grad_ref.data(),
out_elasticnet_grad.data(),
params.len,
MLCommon::CompareApprox<float>(params.tolerance)));
}
typedef PenaltyTest<double> PenaltyTestD;
TEST_P(PenaltyTestD, Result)
{
ASSERT_TRUE(devArrMatch(
out_lasso_ref.data(), out_lasso.data(), 1, MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_lasso_grad_ref.data(),
out_lasso_grad.data(),
params.len,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(
out_ridge_ref.data(), out_ridge.data(), 1, MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_ridge_grad_ref.data(),
out_ridge_grad.data(),
params.len,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_elasticnet_ref.data(),
out_elasticnet.data(),
1,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(devArrMatch(out_elasticnet_grad_ref.data(),
out_elasticnet_grad.data(),
params.len,
MLCommon::CompareApprox<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(PenaltyTests, PenaltyTestF, ::testing::ValuesIn(inputsf));
INSTANTIATE_TEST_CASE_P(PenaltyTests, PenaltyTestD, ::testing::ValuesIn(inputsd));
} // end namespace Functions
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/hinge.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <functions/hinge.cuh>
#include <gtest/gtest.h>
#include <raft/random/rng.cuh>
#include <raft/util/cudart_utils.hpp>
namespace MLCommon {
namespace Functions {
template <typename T>
struct HingeLossInputs {
T tolerance;
T n_rows;
T n_cols;
int len;
};
template <typename T>
class HingeLossTest : public ::testing::TestWithParam<HingeLossInputs<T>> {
public:
HingeLossTest()
: params(::testing::TestWithParam<HingeLossInputs<T>>::GetParam()),
stream(handle.get_stream()),
in(params.len, stream),
out(1, stream),
out_lasso(1, stream),
out_ridge(1, stream),
out_elasticnet(1, stream),
out_grad(params.n_cols, stream),
out_lasso_grad(params.n_cols, stream),
out_ridge_grad(params.n_cols, stream),
out_elasticnet_grad(params.n_cols, stream),
out_ref(1, stream),
out_lasso_ref(1, stream),
out_ridge_ref(1, stream),
out_elasticnet_ref(1, stream),
out_grad_ref(params.n_cols, stream),
out_lasso_grad_ref(params.n_cols, stream),
out_ridge_grad_ref(params.n_cols, stream),
out_elasticnet_grad_ref(params.n_cols, stream)
{
}
protected:
void SetUp() override
{
int len = params.len;
int n_rows = params.n_rows;
int n_cols = params.n_cols;
rmm::device_uvector<T> labels(params.n_rows, stream);
rmm::device_uvector<T> coef(params.n_cols, stream);
T h_in[len] = {0.1, 0.35, -0.9, -1.4, 2.0, 3.1};
raft::update_device(in.data(), h_in, len, stream);
T h_labels[n_rows] = {0.3, 2.0, -1.1};
raft::update_device(labels.data(), h_labels, n_rows, stream);
T h_coef[n_cols] = {0.35, -0.24};
raft::update_device(coef.data(), h_coef, n_cols, stream);
T h_out_ref[1] = {2.6037};
raft::update_device(out_ref.data(), h_out_ref, 1, stream);
T h_out_lasso_ref[1] = {2.9577};
raft::update_device(out_lasso_ref.data(), h_out_lasso_ref, 1, stream);
T h_out_ridge_ref[1] = {2.71176};
raft::update_device(out_ridge_ref.data(), h_out_ridge_ref, 1, stream);
T h_out_elasticnet_ref[1] = {2.83473};
raft::update_device(out_elasticnet_ref.data(), h_out_elasticnet_ref, 1, stream);
T h_out_grad_ref[n_cols] = {-0.24333, -1.1933};
raft::update_device(out_grad_ref.data(), h_out_grad_ref, n_cols, stream);
T h_out_lasso_grad_ref[n_cols] = {0.3566, -1.7933};
raft::update_device(out_lasso_grad_ref.data(), h_out_lasso_grad_ref, n_cols, stream);
T h_out_ridge_grad_ref[n_cols] = {0.1766, -1.4813};
raft::update_device(out_ridge_grad_ref.data(), h_out_ridge_grad_ref, n_cols, stream);
T h_out_elasticnet_grad_ref[n_cols] = {0.2666, -1.63733};
raft::update_device(out_elasticnet_grad_ref.data(), h_out_elasticnet_grad_ref, n_cols, stream);
T alpha = 0.6;
T l1_ratio = 0.5;
hingeLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out.data(),
penalty::NONE,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
hingeLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_grad.data(),
penalty::NONE,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
hingeLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_lasso.data(),
penalty::L1,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
hingeLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_lasso_grad.data(),
penalty::L1,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
hingeLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_ridge.data(),
penalty::L2,
alpha,
l1_ratio,
stream);
hingeLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_ridge_grad.data(),
penalty::L2,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
hingeLoss(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_elasticnet.data(),
penalty::ELASTICNET,
alpha,
l1_ratio,
stream);
hingeLossGrads(handle,
in.data(),
params.n_rows,
params.n_cols,
labels.data(),
coef.data(),
out_elasticnet_grad.data(),
penalty::ELASTICNET,
alpha,
l1_ratio,
stream);
raft::update_device(in.data(), h_in, len, stream);
}
protected:
HingeLossInputs<T> params;
raft::handle_t handle;
cudaStream_t stream;
rmm::device_uvector<T> in, out, out_lasso, out_ridge, out_elasticnet;
rmm::device_uvector<T> out_ref, out_lasso_ref, out_ridge_ref, out_elasticnet_ref;
rmm::device_uvector<T> out_grad, out_lasso_grad, out_ridge_grad, out_elasticnet_grad;
rmm::device_uvector<T> out_grad_ref, out_lasso_grad_ref, out_ridge_grad_ref,
out_elasticnet_grad_ref;
};
const std::vector<HingeLossInputs<float>> inputsf = {{0.01f, 3, 2, 6}};
const std::vector<HingeLossInputs<double>> inputsd = {{0.01, 3, 2, 6}};
typedef HingeLossTest<float> HingeLossTestF;
TEST_P(HingeLossTestF, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(
out_ref.data(), out.data(), 1, MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(
out_lasso_ref.data(), out_lasso.data(), 1, MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(
out_ridge_ref.data(), out_ridge.data(), 1, MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_elasticnet_ref.data(),
out_elasticnet.data(),
1,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_grad_ref.data(),
out_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_lasso_grad_ref.data(),
out_lasso_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_ridge_grad_ref.data(),
out_ridge_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_elasticnet_grad_ref.data(),
out_elasticnet_grad.data(),
params.n_cols,
MLCommon::CompareApprox<float>(params.tolerance)));
}
typedef HingeLossTest<double> HingeLossTestD;
TEST_P(HingeLossTestD, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(
out_ref.data(), out.data(), 1, MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(
out_lasso_ref.data(), out_lasso.data(), 1, MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(
out_ridge_ref.data(), out_ridge.data(), 1, MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_elasticnet_ref.data(),
out_elasticnet.data(),
1,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_grad_ref.data(),
out_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_lasso_grad_ref.data(),
out_lasso_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_ridge_grad_ref.data(),
out_ridge_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
ASSERT_TRUE(MLCommon::devArrMatch(out_elasticnet_grad_ref.data(),
out_elasticnet_grad.data(),
params.n_cols,
MLCommon::CompareApprox<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(HingeLossTests, HingeLossTestF, ::testing::ValuesIn(inputsf));
INSTANTIATE_TEST_CASE_P(HingeLossTests, HingeLossTestD, ::testing::ValuesIn(inputsd));
} // end namespace Functions
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/decoupled_lookback.cu | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <decoupled_lookback.cuh>
#include <gtest/gtest.h>
#include <raft/core/interruptible.hpp>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
namespace MLCommon {
template <int TPB>
__global__ void dlbTestKernel(void* workspace, int len, int* out)
{
DecoupledLookBack<int> dlb(workspace);
int count = threadIdx.x == blockDim.x - 1 ? 1 : 0;
auto prefix = dlb(count);
if (threadIdx.x == blockDim.x - 1) out[blockIdx.x] = prefix;
}
void dlbTest(int len, int* out, cudaStream_t stream)
{
constexpr int TPB = 256;
int nblks = len;
size_t workspaceSize = DecoupledLookBack<int>::computeWorkspaceSize(nblks);
rmm::device_uvector<char> workspace(workspaceSize, stream);
RAFT_CUDA_TRY(cudaMemset(workspace.data(), 0, workspace.size()));
dlbTestKernel<TPB><<<nblks, TPB>>>(workspace.data(), len, out);
RAFT_CUDA_TRY(cudaPeekAtLastError());
}
struct DlbInputs {
int len;
};
::std::ostream& operator<<(::std::ostream& os, const DlbInputs& dims) { return os; }
class DlbTest : public ::testing::TestWithParam<DlbInputs> {
protected:
DlbTest() : out(0, stream) {}
void SetUp() override
{
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
params = ::testing::TestWithParam<DlbInputs>::GetParam();
int len = params.len;
out.resize(len, stream);
dlbTest(len, out.data(), stream);
}
protected:
cudaStream_t stream = 0;
DlbInputs params;
rmm::device_uvector<int> out;
};
template <typename T, typename L>
::testing::AssertionResult devArrMatchCustom(const T* actual,
size_t size,
L eq_compare,
cudaStream_t stream = 0)
{
std::vector<T> act_h(size);
raft::update_host<T>(&(act_h[0]), actual, size, stream);
raft::interruptible::synchronize(stream);
for (size_t i(0); i < size; ++i) {
auto act = act_h[i];
auto expected = (T)i;
if (!eq_compare(expected, act)) {
return ::testing::AssertionFailure()
<< "actual=" << act << " != expected=" << expected << " @" << i;
}
}
return ::testing::AssertionSuccess();
}
const std::vector<DlbInputs> inputs = {{4}, {16}, {64}, {256}, {2048}};
TEST_P(DlbTest, Result)
{
ASSERT_TRUE(devArrMatchCustom(out.data(), params.len, MLCommon::Compare<int>()));
}
INSTANTIATE_TEST_CASE_P(DlbTests, DlbTest, ::testing::ValuesIn(inputs));
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/knn_regression.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <gtest/gtest.h>
#include <raft/label/classlabels.cuh>
#include <raft/linalg/reduce.cuh>
#include <raft/random/rng.cuh>
#include <raft/spatial/knn/knn.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <selection/knn.cuh>
#include <thrust/device_ptr.h>
#include <thrust/execution_policy.h>
#include <thrust/extrema.h>
#include <iostream>
#include <vector>
namespace MLCommon {
namespace Selection {
struct KNNRegressionInputs {
int rows;
int cols;
int n_labels;
float cluster_std;
int k;
};
void generate_data(
float* out_samples, float* out_labels, int n_rows, int n_cols, cudaStream_t stream)
{
raft::random::Rng r(0ULL, raft::random::GenPC);
r.uniform(out_samples, n_rows * n_cols, 0.0f, 1.0f, stream);
raft::linalg::unaryOp<float>(
out_samples,
out_samples,
n_rows,
[=] __device__(float input) { return 2 * input - 1; },
stream);
raft::linalg::reduce(
out_labels,
out_samples,
n_cols,
n_rows,
0.0f,
true,
true,
stream,
false,
[=] __device__(float in, int n) { return in * in; },
raft::Sum<float>(),
[=] __device__(float in) { return sqrt(in); });
thrust::device_ptr<float> d_ptr = thrust::device_pointer_cast(out_labels);
float max = *(thrust::max_element(thrust::cuda::par.on(stream), d_ptr, d_ptr + n_rows));
raft::linalg::unaryOp<float>(
out_labels, out_labels, n_rows, [=] __device__(float input) { return input / max; }, stream);
}
class KNNRegressionTest : public ::testing::TestWithParam<KNNRegressionInputs> {
public:
KNNRegressionTest()
: params(::testing::TestWithParam<KNNRegressionInputs>::GetParam()),
stream(handle.get_stream()),
train_samples(params.rows * params.cols, stream),
train_labels(params.rows, stream),
pred_labels(params.rows, stream),
knn_indices(params.rows * params.k, stream),
knn_dists(params.rows * params.k, stream)
{
}
protected:
void basicTest()
{
generate_data(train_samples.data(), train_labels.data(), params.rows, params.cols, stream);
std::vector<float*> ptrs(1);
std::vector<int> sizes(1);
ptrs[0] = train_samples.data();
sizes[0] = params.rows;
raft::spatial::knn::brute_force_knn(handle,
ptrs,
sizes,
params.cols,
train_samples.data(),
params.rows,
knn_indices.data(),
knn_dists.data(),
params.k);
std::vector<float*> y;
y.push_back(train_labels.data());
knn_regress(
handle, pred_labels.data(), knn_indices.data(), y, params.rows, params.rows, params.k);
handle.sync_stream(stream);
}
void SetUp() override { basicTest(); }
protected:
raft::handle_t handle;
cudaStream_t stream;
KNNRegressionInputs params;
rmm::device_uvector<float> train_samples;
rmm::device_uvector<float> train_labels;
rmm::device_uvector<float> pred_labels;
rmm::device_uvector<int64_t> knn_indices;
rmm::device_uvector<float> knn_dists;
};
typedef KNNRegressionTest KNNRegressionTestF;
TEST_P(KNNRegressionTestF, Fit)
{
ASSERT_TRUE(devArrMatch(
train_labels.data(), pred_labels.data(), params.rows, MLCommon::CompareApprox<float>(0.3)));
}
const std::vector<KNNRegressionInputs> inputsf = {{100, 10, 2, 0.01f, 2},
{1000, 10, 5, 0.01f, 2},
{10000, 10, 5, 0.01f, 2},
{100, 10, 2, 0.01f, 10},
{1000, 10, 5, 0.01f, 10},
{10000, 10, 5, 0.01f, 10},
{100, 10, 2, 0.01f, 15},
{1000, 10, 5, 0.01f, 15},
{10000, 10, 5, 0.01f, 15}};
INSTANTIATE_TEST_CASE_P(KNNRegressionTest, KNNRegressionTestF, ::testing::ValuesIn(inputsf));
}; // end namespace Selection
}; // namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/device_utils.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <common/device_utils.cuh>
#include <gtest/gtest.h>
#include <raft/core/interruptible.hpp>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
namespace MLCommon {
/*
* Testing Methodology:
* 0. Testing with a kernel of only one block is enough to verify this prim
* 1. Assume that the threads in the block contain the following values:
* 0 1 2 .... NThreads - 1
* NThreads ......................
* ................................
* ...................... blockDim.x - 1
* 2. This means, the resulting output of batchedBlockReduce<int, NThreads>
* will be NThreads values and each of them is just a column-wise sum of
* the above matrix
* 3. Repeat this for different block dimensions
* 4. Repeat this for different values of NThreads
*/
template <int NThreads>
__global__ void batchedBlockReduceTestKernel(int* out)
{
extern __shared__ char smem[];
int val = threadIdx.x;
val = batchedBlockReduce<int, NThreads>(val, reinterpret_cast<char*>(smem));
int gid = threadIdx.x / NThreads;
int lid = threadIdx.x % NThreads;
if (gid == 0) { out[lid] = val; }
}
struct BatchedBlockReduceInputs {
int blkDim;
};
template <int NThreads>
void batchedBlockReduceTest(int* out, const BatchedBlockReduceInputs& param, cudaStream_t stream)
{
size_t smemSize = sizeof(int) * (param.blkDim / raft::WarpSize) * NThreads;
batchedBlockReduceTestKernel<NThreads><<<1, param.blkDim, smemSize, stream>>>(out);
RAFT_CUDA_TRY(cudaGetLastError());
}
::std::ostream& operator<<(::std::ostream& os, const BatchedBlockReduceInputs& dims) { return os; }
template <int NThreads>
class BatchedBlockReduceTest : public ::testing::TestWithParam<BatchedBlockReduceInputs> {
protected:
BatchedBlockReduceTest() : out(0, stream), refOut(0, stream) {}
void SetUp() override
{
params = ::testing::TestWithParam<BatchedBlockReduceInputs>::GetParam();
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
out.resize(NThreads, stream);
refOut.resize(NThreads, stream);
RAFT_CUDA_TRY(cudaMemset(out.data(), 0, out.size() * sizeof(int)));
RAFT_CUDA_TRY(cudaMemset(refOut.data(), 0, refOut.size() * sizeof(int)));
computeRef();
batchedBlockReduceTest<NThreads>(out.data(), params, stream);
}
void TearDown() override { RAFT_CUDA_TRY(cudaStreamDestroy(stream)); }
void computeRef()
{
int* ref = new int[NThreads];
int nGroups = params.blkDim / NThreads;
for (int i = 0; i < NThreads; ++i) {
ref[i] = 0;
for (int j = 0; j < nGroups; ++j) {
ref[i] += j * NThreads + i;
}
}
raft::update_device(refOut.data(), ref, NThreads, stream);
raft::interruptible::synchronize(stream);
delete[] ref;
}
protected:
BatchedBlockReduceInputs params;
rmm::device_uvector<int> out, refOut;
cudaStream_t stream = 0;
};
typedef BatchedBlockReduceTest<8> BBTest8;
typedef BatchedBlockReduceTest<16> BBTest16;
typedef BatchedBlockReduceTest<32> BBTest32;
const std::vector<BatchedBlockReduceInputs> inputs = {
{32},
{64},
{128},
{256},
{512},
};
TEST_P(BBTest8, Result)
{
ASSERT_TRUE(devArrMatch(refOut.data(), out.data(), 8, MLCommon::Compare<int>()));
}
INSTANTIATE_TEST_CASE_P(BatchedBlockReduceTests, BBTest8, ::testing::ValuesIn(inputs));
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/grid_sync.cu | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <common/grid_sync.cuh>
#include <gtest/gtest.h>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
namespace MLCommon {
__global__ void gridSyncTestKernel(void* workspace, int* out, SyncType type)
{
GridSync gs(workspace, type, true);
bool master;
int updatePosition;
if (type == ACROSS_ALL) {
master = threadIdx.x == 0 && threadIdx.y == 0 && threadIdx.z == 0 && blockIdx.x == 0 &&
blockIdx.y == 0 && blockIdx.z == 0;
updatePosition = 0;
} else {
master = threadIdx.x == 0 && threadIdx.y == 0 && threadIdx.z == 0 && blockIdx.x == 0;
updatePosition = blockIdx.y + blockIdx.z * gridDim.y;
}
if (master) {
out[updatePosition] = 1;
__threadfence();
}
gs.sync();
int val = out[updatePosition];
// make sure everybody has read the updated value!
gs.sync();
raft::myAtomicAdd(out + updatePosition, val);
}
struct GridSyncInputs {
dim3 gridDim, blockDim;
bool checkWorkspaceReuse;
SyncType type;
};
void gridSyncTest(int* out, int* out1, const GridSyncInputs& params, cudaStream_t stream)
{
size_t workspaceSize = GridSync::computeWorkspaceSize(params.gridDim, params.type, true);
rmm::device_uvector<char> workspace(workspaceSize, stream);
RAFT_CUDA_TRY(cudaMemset(workspace.data(), 0, workspace.size()));
gridSyncTestKernel<<<params.gridDim, params.blockDim>>>(workspace.data(), out, params.type);
RAFT_CUDA_TRY(cudaPeekAtLastError());
if (params.checkWorkspaceReuse) {
RAFT_CUDA_TRY(cudaDeviceSynchronize());
gridSyncTestKernel<<<params.gridDim, params.blockDim>>>(workspace.data(), out1, params.type);
RAFT_CUDA_TRY(cudaPeekAtLastError());
}
}
::std::ostream& operator<<(::std::ostream& os, const GridSyncInputs& dims) { return os; }
class GridSyncTest : public ::testing::TestWithParam<GridSyncInputs> {
protected:
GridSyncTest() : out(0, stream), out1(0, stream) {}
void SetUp() override
{
params = ::testing::TestWithParam<GridSyncInputs>::GetParam();
size_t len = computeOutLen();
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
out.resize(len, stream);
out1.resize(len, stream);
gridSyncTest(out.data(), out1.data(), params, stream);
}
size_t computeOutLen() const
{
size_t len;
if (params.type == ACROSS_ALL) {
len = 1;
} else {
len = params.gridDim.y * params.gridDim.z;
}
return len;
}
protected:
cudaStream_t stream = 0;
GridSyncInputs params;
rmm::device_uvector<int> out, out1;
};
const std::vector<GridSyncInputs> inputs = {
{{2, 1, 1}, {32, 1, 1}, false, ACROSS_ALL}, {{2, 1, 1}, {32, 2, 1}, false, ACROSS_ALL},
{{2, 1, 1}, {32, 2, 4}, false, ACROSS_ALL}, {{2, 1, 1}, {32, 1, 1}, true, ACROSS_ALL},
{{2, 1, 1}, {32, 2, 1}, true, ACROSS_ALL}, {{2, 1, 1}, {32, 2, 4}, true, ACROSS_ALL},
{{2, 1, 1}, {32, 1, 1}, false, ACROSS_X}, {{2, 2, 1}, {32, 1, 1}, false, ACROSS_X},
{{2, 2, 2}, {32, 1, 1}, false, ACROSS_X}, {{2, 1, 1}, {32, 2, 1}, false, ACROSS_X},
{{2, 2, 1}, {32, 2, 1}, false, ACROSS_X}, {{2, 2, 2}, {32, 2, 1}, false, ACROSS_X},
{{2, 1, 1}, {32, 2, 4}, false, ACROSS_X}, {{2, 2, 1}, {32, 2, 4}, false, ACROSS_X},
{{2, 2, 2}, {32, 2, 4}, false, ACROSS_X}, {{32, 256, 1}, {1, 1, 1}, false, ACROSS_X},
{{2, 1, 1}, {32, 1, 1}, true, ACROSS_X}, {{2, 2, 1}, {32, 1, 1}, true, ACROSS_X},
{{2, 2, 2}, {32, 1, 1}, true, ACROSS_X}, {{2, 1, 1}, {32, 2, 1}, true, ACROSS_X},
{{2, 2, 1}, {32, 2, 1}, true, ACROSS_X}, {{2, 2, 2}, {32, 2, 1}, true, ACROSS_X},
{{2, 1, 1}, {32, 2, 4}, true, ACROSS_X}, {{2, 2, 1}, {32, 2, 4}, true, ACROSS_X},
{{2, 2, 2}, {32, 2, 4}, true, ACROSS_X}, {{32, 256, 1}, {1, 1, 1}, true, ACROSS_X}};
TEST_P(GridSyncTest, Result)
{
size_t len = computeOutLen();
// number of blocks raft::myAtomicAdd'ing the same location
int nblks = params.type == ACROSS_X ? params.gridDim.x
: params.gridDim.x * params.gridDim.y * params.gridDim.z;
int nthreads = params.blockDim.x * params.blockDim.y * params.blockDim.z;
int expected = (nblks * nthreads) + 1;
ASSERT_TRUE(MLCommon::devArrMatch(expected, out.data(), len, MLCommon::Compare<int>()));
if (params.checkWorkspaceReuse) {
ASSERT_TRUE(MLCommon::devArrMatch(expected, out1.data(), len, MLCommon::Compare<int>()));
}
}
INSTANTIATE_TEST_CASE_P(GridSyncTests, GridSyncTest, ::testing::ValuesIn(inputs));
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/eltwise2d.cu | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <gtest/gtest.h>
#include <linalg/eltwise2d.cuh>
#include <raft/random/rng.cuh>
#include <raft/util/cudart_utils.hpp>
namespace MLCommon {
namespace LinAlg {
template <typename Type>
__global__ void naiveEltwise2DAddKernel(int rows,
int cols,
const Type* aPtr,
const Type* bPtr,
const Type* cPtr,
Type* dPtr,
Type alpha,
Type beta)
{
auto tid = blockIdx.x * blockDim.x + threadIdx.x;
if (tid < cols * rows) {
const auto x = tid % cols;
const auto y = tid / cols;
const auto d = dPtr[tid];
const auto a = aPtr[y];
const auto b = bPtr[x];
Type accm = alpha * (a + b + d);
if (beta) { accm += beta * cPtr[tid]; }
dPtr[tid] = accm;
}
}
template <typename Type>
void naiveEltwise2DAdd(int rows,
int cols,
const Type* aPtr,
const Type* bPtr,
const Type* cPtr,
Type* dPtr,
Type alpha,
Type beta,
cudaStream_t stream)
{
static const int TPB = 64;
int nblks = raft::ceildiv(rows * cols, TPB);
naiveEltwise2DAddKernel<Type>
<<<nblks, TPB, 0, stream>>>(rows, cols, aPtr, bPtr, cPtr, dPtr, alpha, beta);
RAFT_CUDA_TRY(cudaPeekAtLastError());
}
template <typename T>
struct Eltwise2dInputs {
T tolerance;
int w;
int h;
unsigned long long int seed;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const Eltwise2dInputs<T>& dims)
{
return os;
}
template <typename Type>
void WrapperEltwise2d(int rows,
int cols,
const Type* aPtr,
const Type* bPtr,
const Type* cPtr,
Type* dPtr,
Type alpha,
Type beta)
{
auto op_ = [] __device__(Type a, Type b, Type c) { return a + b + c; };
eltwise2D<Type>(rows, cols, aPtr, bPtr, cPtr, dPtr, alpha, beta, op_, 0);
}
template <typename T>
class Eltwise2dTest : public ::testing::TestWithParam<Eltwise2dInputs<T>> {
protected:
Eltwise2dTest() : out_ref(0, stream), out(0, stream) {}
void SetUp() override
{
params = ::testing::TestWithParam<Eltwise2dInputs<T>>::GetParam();
raft::random::Rng r(params.seed);
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
auto w = params.w;
auto h = params.h;
auto len = w * h;
rmm::device_uvector<T> in1(h, stream);
rmm::device_uvector<T> in2(w, stream);
out_ref.resize(len, stream);
out.resize(len, stream);
r.uniform(in1.data(), h, T(-1.0), T(1.0), stream);
r.uniform(in2.data(), w, T(-1.0), T(1.0), stream);
naiveEltwise2DAdd(
h, w, in1.data(), in2.data(), out_ref.data(), out_ref.data(), (T)1, (T)1, stream);
WrapperEltwise2d<T>(h, w, in1.data(), in2.data(), out.data(), out.data(), (T)1, (T)1);
RAFT_CUDA_TRY(cudaStreamDestroy(stream));
}
protected:
cudaStream_t stream = 0;
Eltwise2dInputs<T> params;
rmm::device_uvector<T> out_ref, out;
};
const std::vector<Eltwise2dInputs<float>> inputsf2 = {{0.000001f, 1024, 1024, 1234ULL}};
const std::vector<Eltwise2dInputs<double>> inputsd2 = {{0.00000001, 1024, 1024, 1234ULL}};
typedef Eltwise2dTest<float> Eltwise2dTestF;
TEST_P(Eltwise2dTestF, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(out_ref.data(),
out.data(),
params.w * params.h,
MLCommon::CompareApprox<float>(params.tolerance)));
}
typedef Eltwise2dTest<double> Eltwise2dTestD;
TEST_P(Eltwise2dTestD, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(out_ref.data(),
out.data(),
params.w * params.h,
MLCommon::CompareApprox<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(Eltwise2dTests, Eltwise2dTestF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(Eltwise2dTests, Eltwise2dTestD, ::testing::ValuesIn(inputsd2));
} // end namespace LinAlg
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/prims/fast_int_div.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "test_utils.h"
#include <common/fast_int_div.cuh>
#include <gtest/gtest.h>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
namespace MLCommon {
TEST(FastIntDiv, CpuTest)
{
for (int i = 0; i < 100; ++i) {
// get a positive divisor
int divisor;
do {
divisor = rand();
} while (divisor <= 0);
FastIntDiv fid(divisor);
// run it against a few random numbers and compare the outputs
for (int i = 0; i < 10000; ++i) {
auto num = rand();
auto correct = num / divisor;
auto computed = num / fid;
ASSERT_EQ(correct, computed) << " divisor=" << divisor << " num=" << num;
num = rand();
correct = num % divisor;
computed = num % fid;
ASSERT_EQ(correct, computed) << " divisor=" << divisor << " num=" << num;
num = -num;
correct = num / divisor;
computed = num / fid;
ASSERT_EQ(correct, computed) << " divisor=" << divisor << " num=" << num;
num = rand();
correct = num % divisor;
computed = num % fid;
ASSERT_EQ(correct, computed) << " divisor=" << divisor << " num=" << num;
}
}
}
__global__ void fastIntDivTestKernel(
int* computed, int* correct, const int* in, FastIntDiv fid, int divisor, int len)
{
auto tid = threadIdx.x + blockIdx.x * blockDim.x;
if (tid < len) {
computed[tid] = in[tid] % fid;
correct[tid] = in[tid] % divisor;
computed[len + tid] = -in[tid] % fid;
correct[len + tid] = -in[tid] % divisor;
}
}
TEST(FastIntDiv, GpuTest)
{
cudaStream_t stream = 0;
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
static const int len = 100000;
static const int TPB = 128;
rmm::device_uvector<int> computed(len * 2, stream);
rmm::device_uvector<int> correct(len * 2, stream);
rmm::device_uvector<int> in(len, stream);
for (int i = 0; i < 100; ++i) {
// get a positive divisor
int divisor;
do {
divisor = rand();
} while (divisor <= 0);
FastIntDiv fid(divisor);
// run it against a few random numbers and compare the outputs
std::vector<int> h_in(len);
for (int i = 0; i < len; ++i) {
h_in[i] = rand();
}
raft::update_device(in.data(), h_in.data(), len, stream);
int nblks = raft::ceildiv(len, TPB);
fastIntDivTestKernel<<<nblks, TPB, 0, 0>>>(
computed.data(), correct.data(), in.data(), fid, divisor, len);
RAFT_CUDA_TRY(cudaStreamSynchronize(0));
ASSERT_TRUE(devArrMatch(correct.data(), computed.data(), len * 2, MLCommon::Compare<int>()))
<< " divisor=" << divisor;
}
}
FastIntDiv dummyFunc(int num)
{
FastIntDiv fd(num);
return fd;
}
TEST(FastIntDiv, IncorrectUsage)
{
ASSERT_THROW(dummyFunc(-1), raft::exception);
ASSERT_THROW(dummyFunc(0), raft::exception);
}
} // namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test/prims | rapidsai_public_repos/cuml/cpp/test/prims/batched/csr.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <linalg_naive.h>
#include <test_utils.h>
#include <linalg/batched/matrix.cuh>
#include <sparse/batched/csr.cuh>
#include <raft/util/cudart_utils.hpp>
#include <gtest/gtest.h>
#include <cstddef>
#include <random>
#include <vector>
namespace MLCommon {
namespace Sparse {
namespace Batched {
enum CSROperation { SpMV_op, SpMM_op };
template <typename T>
struct CSRInputs {
CSROperation operation;
int batch_size;
int m; // Dimensions of A
int n;
int nnz; // Number of non-zero elements in A
int p; // Dimensions of B or x
int q;
T alpha; // Scalars
T beta;
T tolerance;
};
template <typename T>
class CSRTest : public ::testing::TestWithParam<CSRInputs<T>> {
protected:
void SetUp() override
{
using std::vector;
params = ::testing::TestWithParam<CSRInputs<T>>::GetParam();
// Check if the dimensions are valid and compute the output dimensions
int m_r{};
int n_r{};
switch (params.operation) {
case SpMV_op:
ASSERT_TRUE(params.n == params.p);
ASSERT_TRUE(params.q == 1);
m_r = params.m;
n_r = 1;
break;
case SpMM_op:
ASSERT_TRUE(params.n == params.p);
m_r = params.m;
n_r = params.q;
break;
}
// Create test matrices/vectors
std::vector<T> A;
std::vector<T> Bx;
A.resize(params.batch_size * params.m * params.n, (T)0.0);
Bx.resize(params.batch_size * params.p * params.q);
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_real_distribution<T> idis(0, params.m * params.n - 1);
std::uniform_real_distribution<T> udis(-1.0, 3.0);
// Generate a random sparse matrix (with dense representation)
std::vector<bool> mask = std::vector<bool>(params.m * params.n, false);
for (int idx = 0; idx < params.nnz; idx++) {
int k;
do {
k = idis(gen);
} while (mask[k]);
mask[k] = true;
int i = k % params.m;
int j = k / params.m;
for (int bid = 0; bid < params.batch_size; bid++) {
A[bid * params.m * params.n + j * params.m + i] = udis(gen);
}
}
// Generate random dense matrices/vectors
for (std::size_t i = 0; i < Bx.size(); i++)
Bx[i] = udis(gen);
res_h.resize(params.batch_size * m_r * n_r);
for (std::size_t i = 0; i < res_h.size(); i++)
res_h[i] = udis(gen);
// Create handles, stream
RAFT_CUBLAS_TRY(cublasCreate(&handle));
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
RAFT_CUSOLVER_TRY(cusolverSpCreate(&cusolverSpHandle));
// Created batched dense matrices
LinAlg::Batched::Matrix<T> AbM(params.m, params.n, params.batch_size, handle, stream);
LinAlg::Batched::Matrix<T> BxbM(params.p, params.q, params.batch_size, handle, stream);
// Create matrix that will hold the results
res_bM = new LinAlg::Batched::Matrix<T>(m_r, n_r, params.batch_size, handle, stream);
// Copy the data to the device
raft::update_device(AbM.raw_data(), A.data(), A.size(), stream);
raft::update_device(BxbM.raw_data(), Bx.data(), Bx.size(), stream);
raft::update_device(res_bM->raw_data(), res_h.data(), res_h.size(), stream);
// Create sparse matrix A from the dense A and the mask
CSR<T> AbS = CSR<T>::from_dense(AbM, mask, cusolverSpHandle);
// Compute the tested results
switch (params.operation) {
case SpMV_op: b_spmv(params.alpha, AbS, BxbM, params.beta, *res_bM); break;
case SpMM_op: b_spmm(params.alpha, AbS, BxbM, params.beta, *res_bM); break;
}
// Compute the expected results
switch (params.operation) {
case SpMV_op:
for (int bid = 0; bid < params.batch_size; bid++) {
LinAlg::Naive::matMul(res_h.data() + bid * m_r,
A.data() + bid * params.m * params.n,
Bx.data() + bid * params.p,
params.m,
params.n,
1,
params.alpha,
params.beta);
}
break;
case SpMM_op:
for (int bid = 0; bid < params.batch_size; bid++) {
LinAlg::Naive::matMul(res_h.data() + bid * m_r * n_r,
A.data() + bid * params.m * params.n,
Bx.data() + bid * params.p * params.q,
params.m,
params.n,
params.q,
params.alpha,
params.beta);
}
break;
}
raft::interruptible::synchronize(stream);
}
void TearDown() override
{
delete res_bM;
RAFT_CUBLAS_TRY(cublasDestroy(handle));
RAFT_CUDA_TRY(cudaStreamDestroy(stream));
RAFT_CUSOLVER_TRY(cusolverSpDestroy(cusolverSpHandle));
}
protected:
CSRInputs<T> params;
LinAlg::Batched::Matrix<T>* res_bM;
std::vector<T> res_h;
cublasHandle_t handle;
cusolverSpHandle_t cusolverSpHandle;
cudaStream_t stream = 0;
};
// Test parameters (op, batch_size, m, n, nnz, p, q, tolerance)
const std::vector<CSRInputs<double>> inputsd = {{SpMV_op, 1, 90, 150, 440, 150, 1, 1.0, 0.0, 1e-6},
{SpMV_op, 5, 13, 12, 75, 12, 1, -1.0, 1.0, 1e-6},
{SpMV_op, 15, 8, 4, 6, 4, 1, 0.5, 0.5, 1e-6},
{SpMV_op, 33, 7, 7, 23, 7, 1, -0.5, -0.5, 1e-6},
{SpMM_op, 1, 20, 15, 55, 15, 30, 1.0, 0.0, 1e-6},
{SpMM_op, 9, 10, 9, 31, 9, 11, -1.0, 0.5, 1e-6},
{SpMM_op, 20, 7, 12, 11, 12, 13, 0.5, 0.5, 1e-6}};
// Test parameters (op, batch_size, m, n, nnz, p, q, tolerance)
const std::vector<CSRInputs<float>> inputsf = {{SpMV_op, 1, 90, 150, 440, 150, 1, 1.0f, 0.0f, 1e-2},
{SpMV_op, 5, 13, 12, 75, 12, 1, -1.0f, 1.0f, 1e-2},
{SpMV_op, 15, 8, 4, 6, 4, 1, 0.5f, 0.5f, 1e-2},
{SpMV_op, 33, 7, 7, 23, 7, 1, -0.5f, -0.5f, 1e-2},
{SpMM_op, 1, 20, 15, 55, 15, 30, 1.0f, 0.0f, 1e-2},
{SpMM_op, 9, 10, 9, 31, 9, 11, -1.0f, 0.5f, 1e-2},
{SpMM_op, 20, 7, 12, 11, 12, 13, 0.5f, 0.5f, 1e-2}};
using BatchedCSRTestD = CSRTest<double>;
using BatchedCSRTestF = CSRTest<float>;
TEST_P(BatchedCSRTestD, Result)
{
ASSERT_TRUE(devArrMatchHost(res_h.data(),
res_bM->raw_data(),
res_h.size(),
MLCommon::CompareApprox<double>(params.tolerance),
stream));
}
TEST_P(BatchedCSRTestF, Result)
{
ASSERT_TRUE(devArrMatchHost(res_h.data(),
res_bM->raw_data(),
res_h.size(),
MLCommon::CompareApprox<float>(params.tolerance),
stream));
}
INSTANTIATE_TEST_CASE_P(BatchedCSRTests, BatchedCSRTestD, ::testing::ValuesIn(inputsd));
INSTANTIATE_TEST_CASE_P(BatchedCSRTests, BatchedCSRTestF, ::testing::ValuesIn(inputsf));
} // namespace Batched
} // namespace Sparse
} // namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test/prims | rapidsai_public_repos/cuml/cpp/test/prims/batched/gemv.cu | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "../test_utils.h"
#include <gtest/gtest.h>
#include <linalg/batched/gemv.cuh>
#include <raft/random/rng.cuh>
#include <raft/util/cudart_utils.hpp>
#include <test_utils.h>
namespace MLCommon {
namespace LinAlg {
namespace Batched {
template <typename T>
struct BatchGemvInputs {
T tolerance;
int m, n, batchSize;
unsigned long long int seed;
};
template <typename T, typename IdxType = int>
::std::ostream& operator<<(::std::ostream& os, const BatchGemvInputs<T>& dims)
{
return os;
}
template <typename Type>
__global__ void naiveBatchGemvKernel(Type* y, const Type* A, const Type* x, int m, int n)
{
int batch = blockIdx.y;
int row = blockIdx.x;
int col = threadIdx.x;
if (row < m && col < n) {
auto prod = A[batch * m * n + row * n + col] * x[batch * n + col];
raft::myAtomicAdd(y + batch * m + row, prod);
}
}
template <typename Type>
void naiveBatchGemv(
Type* y, const Type* A, const Type* x, int m, int n, int batchSize, cudaStream_t stream)
{
static int TPB = raft::ceildiv(n, raft::WarpSize) * raft::WarpSize;
dim3 nblks(m, batchSize);
naiveBatchGemvKernel<Type><<<nblks, TPB, 0, stream>>>(y, A, x, m, n);
RAFT_CUDA_TRY(cudaPeekAtLastError());
}
template <typename T>
class BatchGemvTest : public ::testing::TestWithParam<BatchGemvInputs<T>> {
protected:
BatchGemvTest() : out_ref(0, stream), out(0, stream) {}
void SetUp() override
{
params = ::testing::TestWithParam<BatchGemvInputs<T>>::GetParam();
raft::random::Rng r(params.seed);
int len = params.batchSize * params.m * params.n;
int vecleny = params.batchSize * params.m;
int veclenx = params.batchSize * params.n;
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
rmm::device_uvector<T> A(len, stream);
rmm::device_uvector<T> x(veclenx, stream);
out_ref.resize(vecleny, stream);
out.resize(vecleny, stream);
r.uniform(A.data(), len, T(-1.0), T(1.0), stream);
r.uniform(x.data(), veclenx, T(-1.0), T(1.0), stream);
RAFT_CUDA_TRY(cudaMemsetAsync(out_ref.data(), 0, sizeof(T) * vecleny, stream));
naiveBatchGemv(
out_ref.data(), A.data(), x.data(), params.m, params.n, params.batchSize, stream);
gemv<T, int>(out.data(),
A.data(),
x.data(),
nullptr,
T(1.0),
T(0.0),
params.m,
params.n,
params.batchSize,
stream);
}
void TearDown() override { RAFT_CUDA_TRY(cudaStreamDestroy(stream)); }
protected:
cudaStream_t stream = 0;
BatchGemvInputs<T> params;
rmm::device_uvector<T> out_ref;
rmm::device_uvector<T> out;
};
const std::vector<BatchGemvInputs<float>> inputsf = {
{0.005f, 128, 128, 32, 1234ULL},
{0.005f, 128, 126, 32, 1234ULL},
{0.005f, 128, 125, 32, 1234ULL},
{0.005f, 126, 128, 32, 1234ULL},
{0.005f, 126, 126, 32, 1234ULL},
{0.005f, 126, 125, 32, 1234ULL},
{0.005f, 125, 128, 32, 1234ULL},
{0.005f, 125, 126, 32, 1234ULL},
{0.005f, 125, 125, 32, 1234ULL},
};
typedef BatchGemvTest<float> BatchGemvTestF;
TEST_P(BatchGemvTestF, Result)
{
int vecleny = params.batchSize * params.m;
ASSERT_TRUE(devArrMatch(
out_ref.data(), out.data(), vecleny, MLCommon::CompareApprox<float>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(BatchGemvTests, BatchGemvTestF, ::testing::ValuesIn(inputsf));
typedef BatchGemvTest<double> BatchGemvTestD;
const std::vector<BatchGemvInputs<double>> inputsd = {
{0.0000001, 128, 128, 32, 1234ULL},
{0.0000001, 128, 126, 32, 1234ULL},
{0.0000001, 128, 125, 32, 1234ULL},
{0.0000001, 126, 128, 32, 1234ULL},
{0.0000001, 126, 126, 32, 1234ULL},
{0.0000001, 126, 125, 32, 1234ULL},
{0.0000001, 125, 128, 32, 1234ULL},
{0.0000001, 125, 126, 32, 1234ULL},
{0.0000001, 125, 125, 32, 1234ULL},
};
TEST_P(BatchGemvTestD, Result)
{
int vecleny = params.batchSize * params.m;
ASSERT_TRUE(devArrMatch(
out_ref.data(), out.data(), vecleny, MLCommon::CompareApprox<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(BatchGemvTests, BatchGemvTestD, ::testing::ValuesIn(inputsd));
} // end namespace Batched
} // end namespace LinAlg
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test/prims | rapidsai_public_repos/cuml/cpp/test/prims/batched/make_symm.cu | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "../test_utils.h"
#include <gtest/gtest.h>
#include <linalg/batched/make_symm.cuh>
#include <raft/random/rng.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <test_utils.h>
namespace MLCommon {
namespace LinAlg {
namespace Batched {
template <typename T>
struct BatchMakeSymmInputs {
T tolerance;
int n, batchSize;
unsigned long long int seed;
};
template <typename T, typename IdxType = int>
::std::ostream& operator<<(::std::ostream& os, const BatchMakeSymmInputs<T>& dims)
{
return os;
}
template <typename Type>
__global__ void naiveBatchMakeSymmKernel(Type* y, const Type* x, int n)
{
int batch = blockIdx.z;
int row = threadIdx.y + blockDim.y * blockIdx.y;
int col = threadIdx.x + blockDim.x * blockIdx.x;
if (row < n && col < n) {
int idx = batch * n * n + row * n + col;
int other = batch * n * n + col * n + row;
y[idx] = (x[idx] + x[other]) * Type(0.5);
}
}
template <typename Type>
void naiveBatchMakeSymm(Type* y, const Type* x, int batchSize, int n, cudaStream_t stream)
{
dim3 blk(16, 16);
int nblks = raft::ceildiv<int>(n, blk.x);
dim3 grid(nblks, nblks, batchSize);
naiveBatchMakeSymmKernel<Type><<<grid, blk, 0, stream>>>(y, x, n);
RAFT_CUDA_TRY(cudaPeekAtLastError());
}
template <typename T>
class BatchMakeSymmTest : public ::testing::TestWithParam<BatchMakeSymmInputs<T>> {
protected:
BatchMakeSymmTest() : x(0, stream), out_ref(0, stream), out(0, stream) {}
void SetUp() override
{
params = ::testing::TestWithParam<BatchMakeSymmInputs<T>>::GetParam();
raft::random::Rng r(params.seed);
int len = params.batchSize * params.n * params.n;
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
x.resize(len, stream);
out_ref.resize(len, stream);
out.resize(len, stream);
r.uniform(x.data(), len, T(-1.0), T(1.0), stream);
naiveBatchMakeSymm(out_ref.data(), x.data(), params.batchSize, params.n, stream);
make_symm<T, int>(out.data(), x.data(), params.batchSize, params.n, stream);
RAFT_CUDA_TRY(cudaStreamDestroy(stream));
}
protected:
cudaStream_t stream = 0;
BatchMakeSymmInputs<T> params;
rmm::device_uvector<T> x;
rmm::device_uvector<T> out_ref;
rmm::device_uvector<T> out;
};
const std::vector<BatchMakeSymmInputs<float>> inputsf = {
{0.000001f, 128, 32, 1234ULL},
{0.000001f, 126, 32, 1234ULL},
{0.000001f, 125, 32, 1234ULL},
};
typedef BatchMakeSymmTest<float> BatchMakeSymmTestF;
TEST_P(BatchMakeSymmTestF, Result)
{
int len = params.batchSize * params.n * params.n;
ASSERT_TRUE(
devArrMatch(out_ref.data(), out.data(), len, MLCommon::CompareApprox<float>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(BatchMakeSymmTests, BatchMakeSymmTestF, ::testing::ValuesIn(inputsf));
typedef BatchMakeSymmTest<double> BatchMakeSymmTestD;
const std::vector<BatchMakeSymmInputs<double>> inputsd = {
{0.0000001, 128, 32, 1234ULL},
{0.0000001, 126, 32, 1234ULL},
{0.0000001, 125, 32, 1234ULL},
};
TEST_P(BatchMakeSymmTestD, Result)
{
int len = params.batchSize * params.n * params.n;
ASSERT_TRUE(devArrMatch(
out_ref.data(), out.data(), len, MLCommon::CompareApprox<double>(params.tolerance)));
}
INSTANTIATE_TEST_CASE_P(BatchMakeSymmTests, BatchMakeSymmTestD, ::testing::ValuesIn(inputsd));
} // end namespace Batched
} // end namespace LinAlg
} // end namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test/prims | rapidsai_public_repos/cuml/cpp/test/prims/batched/matrix.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <linalg_naive.h>
#include <test_utils.h>
#include <linalg/batched/matrix.cuh>
#include <raft/linalg/add.cuh>
#include <raft/util/cudart_utils.hpp>
#include <algorithm>
#include <cmath>
#include <cstddef>
#include <gtest/gtest.h>
#include <raft/core/math.hpp>
#include <random>
#include <vector>
namespace MLCommon {
namespace LinAlg {
namespace Batched {
enum MatrixOperation {
AB_op, // Matrix-matrix product (with GEMM)
AZT_op, // Matrix-vector product (with GEMM)
ZA_op, // Vector-matrix product (with GEMM)
ApB_op, // Addition
AmB_op, // Subtraction
AkB_op, // Kronecker product
AsolveZ_op, // Linear equation solver Ax=b
LaggedZ_op, // Lag matrix
CopyA2D_op, // 2D copy
DiffA_op, // Vector first difference
Hessenberg_op, // Hessenberg decomposition A=UHU'
Schur_op, // Schur decomposition A=USU'
Lyapunov_op, // Lyapunov equation solver AXA'-X+B=0
};
template <typename T>
struct MatrixInputs {
MatrixOperation operation;
int batch_size;
int m; // Usually the dimensions of A and/or Z
int n;
int p; // Usually the dimensions of B or other parameters
int q;
int s; // Additional parameters for operations that need more than 4
int t;
T tolerance;
};
template <typename T>
class MatrixTest : public ::testing::TestWithParam<MatrixInputs<T>> {
protected:
void SetUp() override
{
using std::vector;
params = ::testing::TestWithParam<MatrixInputs<T>>::GetParam();
// Find out whether A, B and Z will be used (depending on the operation)
bool use_A = (params.operation != LaggedZ_op);
bool use_B = (params.operation == AB_op) || (params.operation == ApB_op) ||
(params.operation == AmB_op) || (params.operation == AkB_op) ||
(params.operation == Lyapunov_op);
bool use_Z = (params.operation == AZT_op) || (params.operation == ZA_op) ||
(params.operation == AsolveZ_op) || (params.operation == LaggedZ_op);
bool Z_col = (params.operation == AsolveZ_op);
int r = params.operation == AZT_op ? params.n : params.m;
// Check if the dimensions are valid and compute the output dimensions
int m_r{};
int n_r{};
switch (params.operation) {
case AB_op:
ASSERT_TRUE(params.n == params.p);
m_r = params.m;
n_r = params.q;
break;
case ApB_op:
case AmB_op:
ASSERT_TRUE(params.m == params.p && params.n == params.q);
m_r = params.m;
n_r = params.n;
break;
case AkB_op:
m_r = params.m * params.p;
n_r = params.n * params.q;
break;
case AZT_op:
m_r = params.m;
n_r = 1;
break;
case ZA_op:
m_r = 1;
n_r = params.n;
break;
case AsolveZ_op:
ASSERT_TRUE(params.n == params.m);
// For this test we multiply A by the solution and check against Z
m_r = params.m;
n_r = 1;
break;
case LaggedZ_op:
// For this operation params.n holds the number of lags
m_r = params.m - params.n;
n_r = params.n;
break;
case CopyA2D_op:
// For this operation p and q are the dimensions of the copy window
m_r = params.p;
n_r = params.q;
break;
case DiffA_op:
// Note: A can represent either a row or column vector
ASSERT_TRUE(params.m == 1 || params.n == 1);
m_r = std::max(1, params.m - 1);
n_r = std::max(1, params.n - 1);
break;
case Hessenberg_op:
case Schur_op:
case Lyapunov_op:
ASSERT_TRUE(params.m == params.n && params.m == params.p && params.m == params.q);
m_r = params.m;
n_r = params.m;
break;
}
// Create test matrices and vector
std::vector<T> A;
std::vector<T> B;
std::vector<T> Z;
if (use_A) A.resize(params.batch_size * params.m * params.n);
if (use_B) B.resize(params.batch_size * params.p * params.q);
if (use_Z) Z.resize(params.batch_size * r);
// Generate random data
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_real_distribution<T> udis(-1.0, 3.0);
for (std::size_t i = 0; i < A.size(); i++)
A[i] = udis(gen);
for (std::size_t i = 0; i < B.size(); i++)
B[i] = udis(gen);
for (std::size_t i = 0; i < Z.size(); i++)
Z[i] = udis(gen);
// Create handles, stream
RAFT_CUBLAS_TRY(cublasCreate(&handle));
RAFT_CUDA_TRY(cudaStreamCreate(&stream));
// Created batched matrices
Matrix<T> AbM(params.m, params.n, params.batch_size, handle, stream);
Matrix<T> BbM(params.p, params.q, params.batch_size, handle, stream);
Matrix<T> ZbM(Z_col ? r : 1, Z_col ? 1 : r, params.batch_size, handle, stream);
// Copy the data to the device
if (use_A) raft::update_device(AbM.raw_data(), A.data(), A.size(), stream);
if (use_B) raft::update_device(BbM.raw_data(), B.data(), B.size(), stream);
if (use_Z) raft::update_device(ZbM.raw_data(), Z.data(), Z.size(), stream);
// Create fake batched matrices to be overwritten by results
res_bM = new Matrix<T>(1, 1, 1, handle, stream);
// Compute the tested results
switch (params.operation) {
case AB_op: *res_bM = AbM * BbM; break;
case ApB_op: *res_bM = AbM + BbM; break;
case AmB_op: *res_bM = AbM - BbM; break;
case AkB_op: *res_bM = b_kron(AbM, BbM); break;
case AZT_op: *res_bM = b_gemm(AbM, ZbM, false, true); break;
case ZA_op: *res_bM = ZbM * AbM; break;
case AsolveZ_op:
// A * A\Z -> should be Z
*res_bM = AbM * b_solve(AbM, ZbM);
break;
case LaggedZ_op: *res_bM = b_lagged_mat(ZbM, params.n); break;
case CopyA2D_op: *res_bM = b_2dcopy(AbM, params.s, params.t, params.p, params.q); break;
case DiffA_op: *res_bM = AbM.difference(); break;
case Hessenberg_op: {
constexpr T zero_tolerance = std::is_same<T, double>::value ? 1e-7 : 1e-3f;
int n = params.m;
Matrix<T> HbM(n, n, params.batch_size, handle, stream);
Matrix<T> UbM(n, n, params.batch_size, handle, stream);
b_hessenberg(AbM, UbM, HbM);
// Check that H is in Hessenberg form
std::vector<T> H = std::vector<T>(n * n * params.batch_size);
raft::update_host(H.data(), HbM.raw_data(), H.size(), stream);
raft::interruptible::synchronize(stream);
for (int ib = 0; ib < params.batch_size; ib++) {
for (int j = 0; j < n - 2; j++) {
for (int i = j + 2; i < n; i++) {
ASSERT_TRUE(raft::abs(H[n * n * ib + n * j + i]) < zero_tolerance);
}
}
}
// Check that U is unitary (UU'=I)
std::vector<T> UUt = std::vector<T>(n * n * params.batch_size);
raft::update_host(UUt.data(), b_gemm(UbM, UbM, false, true).raw_data(), UUt.size(), stream);
raft::interruptible::synchronize(stream);
for (int ib = 0; ib < params.batch_size; ib++) {
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
ASSERT_TRUE(raft::abs(UUt[n * n * ib + n * j + i] - (i == j ? (T)1 : (T)0)) <
zero_tolerance);
}
}
}
// Write UHU' in the result (will be compared against A)
*res_bM = UbM * b_gemm(HbM, UbM, false, true);
break;
}
case Schur_op: {
constexpr T zero_tolerance = std::is_same<T, double>::value ? 1e-7 : 1e-3f;
int n = params.m;
Matrix<T> SbM(n, n, params.batch_size, handle, stream);
Matrix<T> UbM(n, n, params.batch_size, handle, stream);
b_schur(AbM, UbM, SbM);
// Check that S is in Schur form
std::vector<T> S = std::vector<T>(n * n * params.batch_size);
raft::update_host(S.data(), SbM.raw_data(), S.size(), stream);
raft::interruptible::synchronize(stream);
for (int ib = 0; ib < params.batch_size; ib++) {
for (int j = 0; j < n - 2; j++) {
for (int i = j + 2; i < n; i++) {
ASSERT_TRUE(raft::abs(S[n * n * ib + n * j + i]) < zero_tolerance);
}
}
}
for (int ib = 0; ib < params.batch_size; ib++) {
for (int k = 0; k < n - 3; k++) {
ASSERT_FALSE(raft::abs(S[n * n * ib + n * k + k + 1]) > zero_tolerance &&
raft::abs(S[n * n * ib + n * (k + 1) + k + 2]) > zero_tolerance &&
raft::abs(S[n * n * ib + n * (k + 2) + k + 3]) > zero_tolerance);
}
}
// Check that U is unitary (UU'=I)
std::vector<T> UUt = std::vector<T>(n * n * params.batch_size);
raft::update_host(UUt.data(), b_gemm(UbM, UbM, false, true).raw_data(), UUt.size(), stream);
raft::interruptible::synchronize(stream);
for (int ib = 0; ib < params.batch_size; ib++) {
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
ASSERT_TRUE(raft::abs(UUt[n * n * ib + n * j + i] - (i == j ? (T)1 : (T)0)) <
zero_tolerance);
}
}
}
// Write USU' in the result (will be compared against A)
*res_bM = UbM * b_gemm(SbM, UbM, false, true);
break;
}
case Lyapunov_op: {
Matrix<T> XbM = b_lyapunov(AbM, BbM);
// Write AXA'-X in the result (will be compared against -B)
*res_bM = AbM * b_gemm(XbM, AbM, false, true) - XbM;
break;
}
}
// Compute the expected results
res_h.resize(params.batch_size * m_r * n_r);
switch (params.operation) {
case AB_op:
for (int bid = 0; bid < params.batch_size; bid++) {
Naive::matMul(res_h.data() + bid * m_r * n_r,
A.data() + bid * params.m * params.n,
B.data() + bid * params.p * params.q,
params.m,
params.n,
params.q);
}
break;
case ApB_op: Naive::add(res_h.data(), A.data(), B.data(), A.size()); break;
case AmB_op: Naive::add(res_h.data(), A.data(), B.data(), A.size(), T(-1.0)); break;
case AkB_op:
for (int bid = 0; bid < params.batch_size; bid++) {
Naive::kronecker(res_h.data() + bid * m_r * n_r,
A.data() + bid * params.m * params.n,
B.data() + bid * params.p * params.q,
params.m,
params.n,
params.p,
params.q);
}
break;
case AZT_op:
for (int bid = 0; bid < params.batch_size; bid++) {
Naive::matMul(res_h.data() + bid * m_r * n_r,
A.data() + bid * params.m * params.n,
Z.data() + bid * r,
params.m,
params.n,
1);
}
break;
case ZA_op:
for (int bid = 0; bid < params.batch_size; bid++) {
Naive::matMul(res_h.data() + bid * m_r * n_r,
Z.data() + bid * r,
A.data() + bid * params.m * params.n,
1,
params.m,
params.n);
}
break;
case AsolveZ_op:
// Simply copy Z in the result
memcpy(res_h.data(), Z.data(), r * params.batch_size * sizeof(T));
break;
case LaggedZ_op:
for (int bid = 0; bid < params.batch_size; bid++) {
Naive::laggedMat(
res_h.data() + bid * m_r * n_r, Z.data() + bid * params.m, params.m, params.n);
}
break;
case CopyA2D_op:
for (int bid = 0; bid < params.batch_size; bid++) {
Naive::copy2D(res_h.data() + bid * m_r * n_r,
A.data() + bid * params.m * params.n,
params.s,
params.t,
params.m,
m_r,
n_r);
}
break;
case DiffA_op: {
int len = params.m * params.n;
for (int bid = 0; bid < params.batch_size; bid++) {
Naive::diff(res_h.data() + bid * (len - 1), A.data() + bid * len, len);
}
break;
}
case Hessenberg_op:
case Schur_op:
// Simply copy A (will be compared against UHU')
memcpy(res_h.data(), A.data(), params.m * params.m * params.batch_size * sizeof(T));
break;
case Lyapunov_op:
// Simply copy -B (will be compared against AXA'-X)
for (int i = 0; i < params.m * params.m * params.batch_size; i++) {
res_h[i] = -B[i];
}
break;
}
raft::interruptible::synchronize(stream);
}
void TearDown() override
{
delete res_bM;
RAFT_CUBLAS_TRY(cublasDestroy(handle));
RAFT_CUDA_TRY(cudaStreamDestroy(stream));
}
protected:
MatrixInputs<T> params;
Matrix<T>* res_bM;
std::vector<T> res_h;
cublasHandle_t handle;
cudaStream_t stream = 0;
};
// Test parameters (op, batch_size, m, n, p, q, s, t, tolerance)
const std::vector<MatrixInputs<double>> inputsd = {
{AB_op, 7, 15, 37, 37, 11, 0, 0, 1e-6},
{AZT_op, 5, 33, 65, 1, 1, 0, 0, 1e-6},
{ZA_op, 8, 12, 41, 1, 1, 0, 0, 1e-6},
{ApB_op, 4, 16, 48, 16, 48, 0, 0, 1e-6},
{AmB_op, 17, 9, 3, 9, 3, 0, 0, 1e-6},
{AkB_op, 5, 3, 13, 31, 8, 0, 0, 1e-6},
{AkB_op, 3, 7, 12, 31, 15, 0, 0, 1e-6},
{AkB_op, 2, 11, 2, 8, 46, 0, 0, 1e-6},
{AsolveZ_op, 6, 17, 17, 1, 1, 0, 0, 1e-6},
{LaggedZ_op, 5, 31, 9, 1, 1, 0, 0, 1e-6},
{LaggedZ_op, 7, 129, 3, 1, 1, 0, 0, 1e-6},
{CopyA2D_op, 11, 31, 63, 17, 14, 5, 9, 1e-6},
{CopyA2D_op, 4, 33, 7, 30, 4, 3, 0, 1e-6},
{DiffA_op, 5, 11, 1, 1, 1, 0, 0, 1e-6},
{DiffA_op, 15, 1, 37, 1, 1, 0, 0, 1e-6},
{Hessenberg_op, 10, 15, 15, 15, 15, 0, 0, 1e-6},
{Hessenberg_op, 30, 61, 61, 61, 61, 0, 0, 1e-6},
// {Schur_op, 7, 12, 12, 12, 12, 0, 0, 1e-3},
// {Schur_op, 17, 77, 77, 77, 77, 0, 0, 1e-3},
// {Lyapunov_op, 5, 14, 14, 14, 14, 0, 0, 1e-2},
// {Lyapunov_op, 13, 100, 100, 100, 100, 0, 0, 1e-2}
};
// Note: Schur and Lyapunov tests have had stability issues on CI so
// they are disabled temporarily. See issue:
// https://github.com/rapidsai/cuml/issues/1949
// Test parameters (op, batch_size, m, n, p, q, s, t, tolerance)
const std::vector<MatrixInputs<float>> inputsf = {
{AB_op, 7, 15, 37, 37, 11, 0, 0, 1e-2},
{AZT_op, 5, 33, 65, 1, 1, 0, 0, 1e-2},
{ZA_op, 8, 12, 41, 1, 1, 0, 0, 1e-2},
{ApB_op, 4, 16, 48, 16, 48, 0, 0, 1e-2},
{AmB_op, 17, 9, 3, 9, 3, 0, 0, 1e-2},
{AkB_op, 5, 3, 13, 31, 8, 0, 0, 1e-2},
{AkB_op, 3, 7, 12, 31, 15, 0, 0, 1e-2},
{AkB_op, 2, 11, 2, 8, 46, 0, 0, 1e-2},
{AsolveZ_op, 6, 17, 17, 1, 1, 0, 0, 1e-2},
{LaggedZ_op, 5, 31, 9, 1, 1, 0, 0, 1e-5},
{LaggedZ_op, 7, 129, 3, 1, 1, 0, 0, 1e-5},
{CopyA2D_op, 11, 31, 63, 17, 14, 5, 9, 1e-5},
{CopyA2D_op, 4, 33, 7, 30, 4, 3, 0, 1e-5},
{DiffA_op, 5, 11, 1, 1, 1, 0, 0, 1e-2},
{DiffA_op, 15, 1, 37, 1, 1, 0, 0, 1e-2},
{Hessenberg_op, 10, 15, 15, 15, 15, 0, 0, 1e-2},
{Hessenberg_op, 30, 61, 61, 61, 61, 0, 0, 1e-2},
// {Schur_op, 7, 12, 12, 12, 12, 0, 0, 1e-2},
// {Schur_op, 17, 77, 77, 77, 77, 0, 0, 1e-2},
// {Lyapunov_op, 5, 14, 14, 14, 14, 0, 0, 1e-2},
// {Lyapunov_op, 13, 100, 100, 100, 100, 0, 0, 1e-2}
};
// Note: Schur and Lyapunov operations don't give good precision for
// single-precision floating-point numbers yet...
using BatchedMatrixTestD = MatrixTest<double>;
using BatchedMatrixTestF = MatrixTest<float>;
TEST_P(BatchedMatrixTestD, Result)
{
ASSERT_TRUE(MLCommon::devArrMatchHost(res_h.data(),
res_bM->raw_data(),
res_h.size(),
MLCommon::CompareApprox<double>(params.tolerance),
stream));
}
TEST_P(BatchedMatrixTestF, Result)
{
ASSERT_TRUE(MLCommon::devArrMatchHost(res_h.data(),
res_bM->raw_data(),
res_h.size(),
MLCommon::CompareApprox<float>(params.tolerance),
stream));
}
INSTANTIATE_TEST_CASE_P(BatchedMatrixTests, BatchedMatrixTestD, ::testing::ValuesIn(inputsd));
INSTANTIATE_TEST_CASE_P(BatchedMatrixTests, BatchedMatrixTestF, ::testing::ValuesIn(inputsf));
} // namespace Batched
} // namespace LinAlg
} // namespace MLCommon
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/ridge.cu | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/linear_model/glm.hpp>
#include <gtest/gtest.h>
#include <raft/core/handle.hpp>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <test_utils.h>
namespace ML {
namespace GLM {
template <typename T>
struct RidgeInputs {
T tol;
size_t n_row;
size_t n_col;
size_t n_row_2;
int algo;
T alpha;
};
template <typename T>
class RidgeTest : public ::testing::TestWithParam<RidgeInputs<T>> {
public:
RidgeTest()
: params(::testing::TestWithParam<RidgeInputs<T>>::GetParam()),
stream(handle.get_stream()),
coef(params.n_col, stream),
coef2(params.n_col, stream),
coef3(params.n_col, stream),
coef_ref(params.n_col, stream),
coef2_ref(params.n_col, stream),
coef3_ref(params.n_col, stream),
pred(params.n_row_2, stream),
pred_ref(params.n_row_2, stream),
pred2(params.n_row_2, stream),
pred2_ref(params.n_row_2, stream),
pred3(params.n_row_2, stream),
pred3_ref(params.n_row_2, stream),
coef_sc(1, stream),
coef_sc_ref(1, stream),
coef_sw(1, stream),
coef_sw_ref(1, stream)
{
basicTest();
basicTest2();
testSampleWeight();
}
protected:
void basicTest()
{
int len = params.n_row * params.n_col;
int len2 = params.n_row_2 * params.n_col;
rmm::device_uvector<T> data(len, stream);
rmm::device_uvector<T> pred_data(len2, stream);
rmm::device_uvector<T> labels(params.n_row, stream);
T alpha = params.alpha;
/* How to reproduce the coefficients for this test:
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import Ridge
scaler = StandardScaler(with_mean=True, with_std=True)
x_norm = scaler.fit_transform(x_train)
m = Ridge(
fit_intercept=False, normalize=False, alpha=0.5)
m.fit(x_train, y)
print(m.coef_, m.predict(x_test))
m = Ridge(
fit_intercept=True, normalize=False, alpha=0.5)
m.fit(x_train, y)
print(m.coef_, m.predict(x_test))
m = Ridge(
fit_intercept=True, normalize=False, alpha=0.5)
m.fit(x_norm, y)
print(m.coef_ / scaler.scale_, m.predict(scaler.transform(x_test)))
*/
T data_h[len] = {0.0, 0.0, 1.0, 0.0, 0.0, 1.0};
raft::update_device(data.data(), data_h, len, stream);
T labels_h[params.n_row] = {0.0, 0.1, 1.0};
raft::update_device(labels.data(), labels_h, params.n_row, stream);
T coef_ref_h[params.n_col] = {0.4, 0.4};
raft::update_device(coef_ref.data(), coef_ref_h, params.n_col, stream);
T coef2_ref_h[params.n_col] = {0.3454546, 0.34545454};
raft::update_device(coef2_ref.data(), coef2_ref_h, params.n_col, stream);
T coef3_ref_h[params.n_col] = {0.43846154, 0.43846154};
raft::update_device(coef3_ref.data(), coef3_ref_h, params.n_col, stream);
T pred_data_h[len2] = {0.5, 2.0, 0.2, 1.0};
raft::update_device(pred_data.data(), pred_data_h, len2, stream);
T pred_ref_h[params.n_row_2] = {0.28, 1.2};
raft::update_device(pred_ref.data(), pred_ref_h, params.n_row_2, stream);
T pred2_ref_h[params.n_row_2] = {0.37818182, 1.17272727};
raft::update_device(pred2_ref.data(), pred2_ref_h, params.n_row_2, stream);
T pred3_ref_h[params.n_row_2] = {0.38128205, 1.38974359};
raft::update_device(pred3_ref.data(), pred3_ref_h, params.n_row_2, stream);
intercept = T(0);
ridgeFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
&alpha,
1,
coef.data(),
&intercept,
false,
false,
params.algo);
gemmPredict(
handle, pred_data.data(), params.n_row_2, params.n_col, coef.data(), intercept, pred.data());
raft::update_device(data.data(), data_h, len, stream);
raft::update_device(labels.data(), labels_h, params.n_row, stream);
intercept2 = T(0);
ridgeFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
&alpha,
1,
coef2.data(),
&intercept2,
true,
false,
params.algo);
gemmPredict(handle,
pred_data.data(),
params.n_row_2,
params.n_col,
coef2.data(),
intercept2,
pred2.data());
raft::update_device(data.data(), data_h, len, stream);
raft::update_device(labels.data(), labels_h, params.n_row, stream);
intercept3 = T(0);
ridgeFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
&alpha,
1,
coef3.data(),
&intercept3,
true,
true,
params.algo);
gemmPredict(handle,
pred_data.data(),
params.n_row_2,
params.n_col,
coef3.data(),
intercept3,
pred3.data());
}
void basicTest2()
{
int len = params.n_row * params.n_col;
rmm::device_uvector<T> data_sc(len, stream);
rmm::device_uvector<T> labels_sc(len, stream);
std::vector<T> data_h = {1.0, 1.0, 2.0, 2.0, 1.0, 2.0};
data_h.resize(len);
raft::update_device(data_sc.data(), data_h.data(), len, stream);
std::vector<T> labels_h = {6.0, 8.0, 9.0, 11.0, -1.0, 2.0};
labels_h.resize(len);
raft::update_device(labels_sc.data(), labels_h.data(), len, stream);
std::vector<T> coef_sc_ref_h = {1.8};
coef_sc_ref_h.resize(1);
raft::update_device(coef_sc_ref.data(), coef_sc_ref_h.data(), 1, stream);
T intercept_sc = T(0);
T alpha_sc = T(1.0);
ridgeFit(handle,
data_sc.data(),
len,
1,
labels_sc.data(),
&alpha_sc,
1,
coef_sc.data(),
&intercept_sc,
true,
false,
params.algo);
}
void testSampleWeight()
{
int len = params.n_row * params.n_col;
rmm::device_uvector<T> data_sw(len, stream);
rmm::device_uvector<T> labels_sw(len, stream);
rmm::device_uvector<T> sample_weight(len, stream);
std::vector<T> data_h = {1.0, 1.0, 2.0, 2.0, 1.0, 2.0};
data_h.resize(len);
raft::update_device(data_sw.data(), data_h.data(), len, stream);
std::vector<T> labels_h = {6.0, 8.0, 9.0, 11.0, -1.0, 2.0};
labels_h.resize(len);
raft::update_device(labels_sw.data(), labels_h.data(), len, stream);
std::vector<T> coef_sw_ref_h = {0.26052};
coef_sw_ref_h.resize(1);
raft::update_device(coef_sw_ref.data(), coef_sw_ref_h.data(), 1, stream);
std::vector<T> sample_weight_h = {0.2, 0.3, 0.09, 0.15, 0.11, 0.15};
sample_weight_h.resize(len);
raft::update_device(sample_weight.data(), sample_weight_h.data(), len, stream);
T intercept_sw = T(0);
T alpha_sw = T(1.0);
ridgeFit(handle,
data_sw.data(),
len,
1,
labels_sw.data(),
&alpha_sw,
1,
coef_sw.data(),
&intercept_sw,
true,
false,
params.algo,
sample_weight.data());
}
protected:
raft::handle_t handle;
cudaStream_t stream = 0;
RidgeInputs<T> params;
rmm::device_uvector<T> coef, coef_ref, pred, pred_ref;
rmm::device_uvector<T> coef2, coef2_ref, pred2, pred2_ref;
rmm::device_uvector<T> coef3, coef3_ref, pred3, pred3_ref;
rmm::device_uvector<T> coef_sc, coef_sc_ref;
rmm::device_uvector<T> coef_sw, coef_sw_ref;
T intercept, intercept2, intercept3;
};
const std::vector<RidgeInputs<float>> inputsf2 = {{0.001f, 3, 2, 2, 0, 0.5f},
{0.001f, 3, 2, 2, 1, 0.5f}};
const std::vector<RidgeInputs<double>> inputsd2 = {{0.001, 3, 2, 2, 0, 0.5},
{0.001, 3, 2, 2, 1, 0.5}};
typedef RidgeTest<float> RidgeTestF;
TEST_P(RidgeTestF, Fit)
{
ASSERT_TRUE(MLCommon::devArrMatch(
coef_ref.data(), coef.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef2_ref.data(), coef2.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef3_ref.data(), coef3.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
pred_ref.data(), pred.data(), params.n_row_2, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
pred2_ref.data(), pred2.data(), params.n_row_2, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
pred3_ref.data(), pred3.data(), params.n_row_2, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef_sc_ref.data(), coef_sc.data(), 1, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef_sw_ref.data(), coef_sw.data(), 1, MLCommon::CompareApproxAbs<float>(params.tol)));
}
typedef RidgeTest<double> RidgeTestD;
TEST_P(RidgeTestD, Fit)
{
ASSERT_TRUE(MLCommon::devArrMatch(
coef_ref.data(), coef.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef2_ref.data(), coef2.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef3_ref.data(), coef3.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
pred_ref.data(), pred.data(), params.n_row_2, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(pred2_ref.data(),
pred2.data(),
params.n_row_2,
MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(pred3_ref.data(),
pred3.data(),
params.n_row_2,
MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef_sc_ref.data(), coef_sc.data(), 1, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef_sw_ref.data(), coef_sw.data(), 1, MLCommon::CompareApproxAbs<double>(params.tol)));
}
INSTANTIATE_TEST_CASE_P(RidgeTests, RidgeTestF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(RidgeTests, RidgeTestD, ::testing::ValuesIn(inputsd2));
} // namespace GLM
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/pca_test.cu | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/decomposition/params.hpp>
#include <gtest/gtest.h>
#include <pca/pca.cuh>
#include <raft/core/handle.hpp>
#include <raft/random/rng.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <test_utils.h>
#include <vector>
namespace ML {
template <typename T>
struct PcaInputs {
T tolerance;
int len;
int n_row;
int n_col;
int len2;
int n_row2;
int n_col2;
unsigned long long int seed;
int algo;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const PcaInputs<T>& dims)
{
return os;
}
template <typename T>
class PcaTest : public ::testing::TestWithParam<PcaInputs<T>> {
public:
PcaTest()
: params(::testing::TestWithParam<PcaInputs<T>>::GetParam()),
stream(handle.get_stream()),
explained_vars(params.n_col, stream),
explained_vars_ref(params.n_col, stream),
components(params.n_col * params.n_col, stream),
components_ref(params.n_col * params.n_col, stream),
trans_data(params.len, stream),
trans_data_ref(params.len, stream),
data(params.len, stream),
data_back(params.len, stream),
data2(params.len2, stream),
data2_back(params.len2, stream)
{
basicTest();
advancedTest();
}
protected:
void basicTest()
{
raft::random::Rng r(params.seed, raft::random::GenPC);
int len = params.len;
std::vector<T> data_h = {1.0, 2.0, 5.0, 4.0, 2.0, 1.0};
data_h.resize(len);
raft::update_device(data.data(), data_h.data(), len, stream);
std::vector<T> trans_data_ref_h = {-2.3231, -0.3517, 2.6748, -0.3979, 0.6571, -0.2592};
trans_data_ref_h.resize(len);
raft::update_device(trans_data_ref.data(), trans_data_ref_h.data(), len, stream);
int len_comp = params.n_col * params.n_col;
rmm::device_uvector<T> explained_var_ratio(params.n_col, stream);
rmm::device_uvector<T> singular_vals(params.n_col, stream);
rmm::device_uvector<T> mean(params.n_col, stream);
rmm::device_uvector<T> noise_vars(1, stream);
std::vector<T> components_ref_h = {0.8163, 0.5776, -0.5776, 0.8163};
components_ref_h.resize(len_comp);
std::vector<T> explained_vars_ref_h = {6.338, 0.3287};
explained_vars_ref_h.resize(params.n_col);
raft::update_device(components_ref.data(), components_ref_h.data(), len_comp, stream);
raft::update_device(
explained_vars_ref.data(), explained_vars_ref_h.data(), params.n_col, stream);
paramsPCA prms;
prms.n_cols = params.n_col;
prms.n_rows = params.n_row;
prms.n_components = params.n_col;
prms.whiten = false;
if (params.algo == 0)
prms.algorithm = solver::COV_EIG_DQ;
else
prms.algorithm = solver::COV_EIG_JACOBI;
pcaFit(handle,
data.data(),
components.data(),
explained_vars.data(),
explained_var_ratio.data(),
singular_vals.data(),
mean.data(),
noise_vars.data(),
prms,
stream);
pcaTransform(handle,
data.data(),
components.data(),
trans_data.data(),
singular_vals.data(),
mean.data(),
prms,
stream);
pcaInverseTransform(handle,
trans_data.data(),
components.data(),
singular_vals.data(),
mean.data(),
data_back.data(),
prms,
stream);
}
void advancedTest()
{
raft::random::Rng r(params.seed, raft::random::GenPC);
int len = params.len2;
paramsPCA prms;
prms.n_cols = params.n_col2;
prms.n_rows = params.n_row2;
prms.n_components = params.n_col2;
prms.whiten = false;
if (params.algo == 0)
prms.algorithm = solver::COV_EIG_DQ;
else if (params.algo == 1)
prms.algorithm = solver::COV_EIG_JACOBI;
r.uniform(data2.data(), len, T(-1.0), T(1.0), stream);
rmm::device_uvector<T> data2_trans(prms.n_rows * prms.n_components, stream);
int len_comp = params.n_col2 * prms.n_components;
rmm::device_uvector<T> components2(len_comp, stream);
rmm::device_uvector<T> explained_vars2(prms.n_components, stream);
rmm::device_uvector<T> explained_var_ratio2(prms.n_components, stream);
rmm::device_uvector<T> singular_vals2(prms.n_components, stream);
rmm::device_uvector<T> mean2(prms.n_cols, stream);
rmm::device_uvector<T> noise_vars2(1, stream);
pcaFitTransform(handle,
data2.data(),
data2_trans.data(),
components2.data(),
explained_vars2.data(),
explained_var_ratio2.data(),
singular_vals2.data(),
mean2.data(),
noise_vars2.data(),
prms,
stream);
pcaInverseTransform(handle,
data2_trans.data(),
components2.data(),
singular_vals2.data(),
mean2.data(),
data2_back.data(),
prms,
stream);
}
protected:
raft::handle_t handle;
cudaStream_t stream = 0;
PcaInputs<T> params;
rmm::device_uvector<T> explained_vars, explained_vars_ref, components, components_ref, trans_data,
trans_data_ref, data, data_back, data2, data2_back;
};
const std::vector<PcaInputs<float>> inputsf2 = {
{0.01f, 3 * 2, 3, 2, 1024 * 128, 1024, 128, 1234ULL, 0},
{0.01f, 3 * 2, 3, 2, 256 * 32, 256, 32, 1234ULL, 1}};
const std::vector<PcaInputs<double>> inputsd2 = {
{0.01, 3 * 2, 3, 2, 1024 * 128, 1024, 128, 1234ULL, 0},
{0.01, 3 * 2, 3, 2, 256 * 32, 256, 32, 1234ULL, 1}};
typedef PcaTest<float> PcaTestValF;
TEST_P(PcaTestValF, Result)
{
ASSERT_TRUE(devArrMatch(explained_vars.data(),
explained_vars_ref.data(),
params.n_col,
MLCommon::CompareApproxAbs<float>(params.tolerance),
handle.get_stream()));
}
typedef PcaTest<double> PcaTestValD;
TEST_P(PcaTestValD, Result)
{
ASSERT_TRUE(devArrMatch(explained_vars.data(),
explained_vars_ref.data(),
params.n_col,
MLCommon::CompareApproxAbs<double>(params.tolerance),
handle.get_stream()));
}
typedef PcaTest<float> PcaTestLeftVecF;
TEST_P(PcaTestLeftVecF, Result)
{
ASSERT_TRUE(devArrMatch(components.data(),
components_ref.data(),
(params.n_col * params.n_col),
MLCommon::CompareApproxAbs<float>(params.tolerance),
handle.get_stream()));
}
typedef PcaTest<double> PcaTestLeftVecD;
TEST_P(PcaTestLeftVecD, Result)
{
ASSERT_TRUE(devArrMatch(components.data(),
components_ref.data(),
(params.n_col * params.n_col),
MLCommon::CompareApproxAbs<double>(params.tolerance),
handle.get_stream()));
}
typedef PcaTest<float> PcaTestTransDataF;
TEST_P(PcaTestTransDataF, Result)
{
ASSERT_TRUE(devArrMatch(trans_data.data(),
trans_data_ref.data(),
(params.n_row * params.n_col),
MLCommon::CompareApproxAbs<float>(params.tolerance),
handle.get_stream()));
}
typedef PcaTest<double> PcaTestTransDataD;
TEST_P(PcaTestTransDataD, Result)
{
ASSERT_TRUE(devArrMatch(trans_data.data(),
trans_data_ref.data(),
(params.n_row * params.n_col),
MLCommon::CompareApproxAbs<double>(params.tolerance),
handle.get_stream()));
}
typedef PcaTest<float> PcaTestDataVecSmallF;
TEST_P(PcaTestDataVecSmallF, Result)
{
ASSERT_TRUE(devArrMatch(data.data(),
data_back.data(),
(params.n_col * params.n_col),
MLCommon::CompareApproxAbs<float>(params.tolerance),
handle.get_stream()));
}
typedef PcaTest<double> PcaTestDataVecSmallD;
TEST_P(PcaTestDataVecSmallD, Result)
{
ASSERT_TRUE(devArrMatch(data.data(),
data_back.data(),
(params.n_col * params.n_col),
MLCommon::CompareApproxAbs<double>(params.tolerance),
handle.get_stream()));
}
// FIXME: These tests are disabled due to driver 418+ making them fail:
// https://github.com/rapidsai/cuml/issues/379
typedef PcaTest<float> PcaTestDataVecF;
TEST_P(PcaTestDataVecF, Result)
{
ASSERT_TRUE(devArrMatch(data2.data(),
data2_back.data(),
(params.n_col2 * params.n_col2),
MLCommon::CompareApproxAbs<float>(params.tolerance),
handle.get_stream()));
}
typedef PcaTest<double> PcaTestDataVecD;
TEST_P(PcaTestDataVecD, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(data2.data(),
data2_back.data(),
(params.n_col2 * params.n_col2),
MLCommon::CompareApproxAbs<double>(params.tolerance),
handle.get_stream()));
}
INSTANTIATE_TEST_CASE_P(PcaTests, PcaTestValF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(PcaTests, PcaTestValD, ::testing::ValuesIn(inputsd2));
INSTANTIATE_TEST_CASE_P(PcaTests, PcaTestLeftVecF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(PcaTests, PcaTestLeftVecD, ::testing::ValuesIn(inputsd2));
INSTANTIATE_TEST_CASE_P(PcaTests, PcaTestDataVecSmallF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(PcaTests, PcaTestDataVecSmallD, ::testing::ValuesIn(inputsd2));
INSTANTIATE_TEST_CASE_P(PcaTests, PcaTestTransDataF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(PcaTests, PcaTestTransDataD, ::testing::ValuesIn(inputsd2));
INSTANTIATE_TEST_CASE_P(PcaTests, PcaTestDataVecF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(PcaTests, PcaTestDataVecD, ::testing::ValuesIn(inputsd2));
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/handle_test.cu | /*
* Copyright (c) 2019-2021, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <gtest/gtest.h>
#include <raft/core/handle.hpp>
#include <cuml/cuml_api.h>
TEST(HandleTest, CreateHandleAndDestroy)
{
cumlHandle_t handle;
cudaStream_t stream;
cudaStreamCreate(&stream);
cumlError_t status = cumlCreate(&handle, stream);
EXPECT_EQ(CUML_SUCCESS, status);
status = cumlDestroy(handle);
EXPECT_EQ(CUML_SUCCESS, status);
}
TEST(HandleTest, DoubleDestoryFails)
{
cumlHandle_t handle;
cudaStream_t stream;
cudaStreamCreate(&stream);
cumlError_t status = cumlCreate(&handle, stream);
EXPECT_EQ(CUML_SUCCESS, status);
status = cumlDestroy(handle);
EXPECT_EQ(CUML_SUCCESS, status);
// handle is destroyed
status = cumlDestroy(handle);
EXPECT_EQ(CUML_INVALID_HANDLE, status);
} | 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/cd_test.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/solvers/params.hpp>
#include <cuml/solvers/solver.hpp>
#include <gtest/gtest.h>
#include <raft/core/handle.hpp>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <test_utils.h>
#include <raft/stats/mean.cuh>
#include <raft/stats/meanvar.cuh>
#include <raft/stats/stddev.cuh>
namespace ML {
namespace Solver {
template <typename T>
struct CdInputs {
T tol;
int n_row;
int n_col;
};
template <typename T>
class CdTest : public ::testing::TestWithParam<CdInputs<T>> {
public:
CdTest()
: params(::testing::TestWithParam<CdInputs<T>>::GetParam()),
stream(handle.get_stream()),
data(params.n_row * params.n_col, stream),
labels(params.n_row, stream),
sample_weight(params.n_row, stream),
coef(params.n_col, stream),
coef2(params.n_col, stream),
coef3(params.n_col, stream),
coef4(params.n_col, stream),
coef5(params.n_col, stream),
coef_ref(params.n_col, stream),
coef2_ref(params.n_col, stream),
coef3_ref(params.n_col, stream),
coef4_ref(params.n_col, stream),
coef5_ref(params.n_col, stream)
{
RAFT_CUDA_TRY(cudaMemsetAsync(coef.data(), 0, coef.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(coef2.data(), 0, coef2.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(coef3.data(), 0, coef3.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(coef4.data(), 0, coef4.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(coef5.data(), 0, coef5.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(coef_ref.data(), 0, coef_ref.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(coef2_ref.data(), 0, coef2_ref.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(coef3_ref.data(), 0, coef3_ref.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(coef4_ref.data(), 0, coef4_ref.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(coef5_ref.data(), 0, coef5_ref.size() * sizeof(T), stream));
}
protected:
void lasso()
{
int len = params.n_row * params.n_col;
T data_h[len] = {1.0, 1.2, 2.0, 2.0, 4.5, 2.0, 2.0, 3.0};
raft::update_device(data.data(), data_h, len, stream);
T labels_h[params.n_row] = {6.0, 8.3, 9.8, 11.2};
raft::update_device(labels.data(), labels_h, params.n_row, stream);
T sample_weight_h[params.n_row] = {1.0, 0.1, 1.81, 3.2};
raft::update_device(sample_weight.data(), sample_weight_h, params.n_row, stream);
/* How to reproduce the coefficients for this test:
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler(with_mean=True, with_std=True)
x_norm = scaler.fit_transform(data_h)
m = ElasticNet(fit_intercept=, normalize=, alpha=, l1_ratio=)
m.fit(x_norm, y)
print(m.coef_ / scaler.scale_ if normalize else m.coef_)
*/
T coef_ref_h[params.n_col] = {4.90832, 0.35031};
raft::update_device(coef_ref.data(), coef_ref_h, params.n_col, stream);
T coef2_ref_h[params.n_col] = {2.53530, -0.36832};
raft::update_device(coef2_ref.data(), coef2_ref_h, params.n_col, stream);
T coef3_ref_h[params.n_col] = {2.932841, 1.15248};
raft::update_device(coef3_ref.data(), coef3_ref_h, params.n_col, stream);
T coef4_ref_h[params.n_col] = {1.75420431, -0.16215289};
raft::update_device(coef4_ref.data(), coef4_ref_h, params.n_col, stream);
T coef5_ref_h[params.n_col] = {0.12381484, -0.31647292};
raft::update_device(coef5_ref.data(), coef5_ref_h, params.n_col, stream);
bool fit_intercept = false;
bool normalize = false;
int epochs = 200;
T alpha = T(0.2);
T l1_ratio = T(1.0);
bool shuffle = false;
T tol = T(1e-4);
ML::loss_funct loss = ML::loss_funct::SQRD_LOSS;
T* sample_weight_ptr = nullptr;
intercept = T(0);
cdFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
coef.data(),
&intercept,
fit_intercept,
normalize,
epochs,
loss,
alpha,
l1_ratio,
shuffle,
tol,
sample_weight_ptr);
fit_intercept = true;
intercept2 = T(0);
cdFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
coef2.data(),
&intercept2,
fit_intercept,
normalize,
epochs,
loss,
alpha,
l1_ratio,
shuffle,
tol,
sample_weight_ptr);
alpha = T(1.0);
l1_ratio = T(0.5);
fit_intercept = false;
intercept = T(0);
cdFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
coef3.data(),
&intercept,
fit_intercept,
normalize,
epochs,
loss,
alpha,
l1_ratio,
shuffle,
tol,
sample_weight_ptr);
fit_intercept = true;
normalize = true;
intercept2 = T(0);
cdFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
coef4.data(),
&intercept2,
fit_intercept,
normalize,
epochs,
loss,
alpha,
l1_ratio,
shuffle,
tol,
sample_weight_ptr);
fit_intercept = true;
normalize = false;
intercept2 = T(0);
sample_weight_ptr = sample_weight.data();
cdFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
coef5.data(),
&intercept2,
fit_intercept,
normalize,
epochs,
loss,
alpha,
l1_ratio,
shuffle,
tol,
sample_weight_ptr);
}
void SetUp() override { lasso(); }
protected:
CdInputs<T> params;
raft::handle_t handle;
cudaStream_t stream = 0;
rmm::device_uvector<T> data, labels, sample_weight, coef, coef_ref;
rmm::device_uvector<T> coef2, coef2_ref;
rmm::device_uvector<T> coef3, coef3_ref;
rmm::device_uvector<T> coef4, coef4_ref;
rmm::device_uvector<T> coef5, coef5_ref;
T intercept, intercept2;
};
const std::vector<CdInputs<float>> inputsf2 = {{0.01f, 4, 2}};
const std::vector<CdInputs<double>> inputsd2 = {{0.01, 4, 2}};
typedef CdTest<float> CdTestF;
TEST_P(CdTestF, Fit)
{
ASSERT_TRUE(MLCommon::devArrMatch(
coef_ref.data(), coef.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef2_ref.data(), coef2.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef3_ref.data(), coef3.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
rmm::device_uvector<float> means_1(params.n_col, stream);
rmm::device_uvector<float> means_2(params.n_col, stream);
rmm::device_uvector<float> vars_1(params.n_col, stream);
rmm::device_uvector<float> vars_2(params.n_col, stream);
raft::stats::mean(means_1.data(), data.data(), params.n_col, params.n_row, false, false, stream);
raft::stats::vars(
vars_1.data(), data.data(), means_1.data(), params.n_col, params.n_row, false, false, stream);
raft::stats::meanvar(
means_2.data(), vars_2.data(), data.data(), params.n_col, params.n_row, false, false, stream);
ASSERT_TRUE(MLCommon::devArrMatch(
means_1.data(), means_2.data(), params.n_col, MLCommon::CompareApprox<float>(0.0001)));
ASSERT_TRUE(MLCommon::devArrMatch(
vars_1.data(), vars_2.data(), params.n_col, MLCommon::CompareApprox<float>(0.0001)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef4_ref.data(), coef4.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef5_ref.data(), coef5.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
}
typedef CdTest<double> CdTestD;
TEST_P(CdTestD, Fit)
{
ASSERT_TRUE(MLCommon::devArrMatch(
coef_ref.data(), coef.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef2_ref.data(), coef2.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef3_ref.data(), coef3.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
rmm::device_uvector<double> means_1(params.n_col, stream);
rmm::device_uvector<double> means_2(params.n_col, stream);
rmm::device_uvector<double> vars_1(params.n_col, stream);
rmm::device_uvector<double> vars_2(params.n_col, stream);
raft::stats::mean(means_1.data(), data.data(), params.n_col, params.n_row, false, false, stream);
raft::stats::vars(
vars_1.data(), data.data(), means_1.data(), params.n_col, params.n_row, false, false, stream);
raft::stats::meanvar(
means_2.data(), vars_2.data(), data.data(), params.n_col, params.n_row, false, false, stream);
ASSERT_TRUE(MLCommon::devArrMatch(
means_1.data(), means_2.data(), params.n_col, MLCommon::CompareApprox<double>(0.0001)));
ASSERT_TRUE(MLCommon::devArrMatch(
vars_1.data(), vars_2.data(), params.n_col, MLCommon::CompareApprox<double>(0.0001)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef4_ref.data(), coef4.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef5_ref.data(), coef5.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
}
INSTANTIATE_TEST_CASE_P(CdTests, CdTestF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(CdTests, CdTestD, ::testing::ValuesIn(inputsd2));
} // namespace Solver
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/trustworthiness_test.cu | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/metrics/metrics.hpp>
#include <gtest/gtest.h>
#include <raft/core/handle.hpp>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <vector>
using namespace ML::Metrics;
class TrustworthinessScoreTest : public ::testing::Test {
protected:
void basicTest()
{
std::vector<float> X = {
5.6142087, 8.59787, -4.382763, -3.6452143, -5.8816037, -0.6330313, 4.6920023,
-0.79210913, 0.6106314, 2.1210914, 5.919943, -8.43784, -6.4819884, 0.41001374,
-6.1052523, -4.0825715, -5.314755, -2.834671, 5.751696, -6.5012555, -0.4719201,
-7.53353, 7.6789393, -1.4959852, -5.5977287, -9.564147, 1.2902534, 3.559834,
-6.7659483, 8.265964, 4.595404, 9.133477, -6.1553917, -6.319754, -2.9039452,
4.4150834, -3.094395, -4.426273, 9.584571, -5.64133, 6.6209483, 7.4044604,
3.9620576, 5.639907, 10.33007, -0.8792053, 5.143776, -7.464049, 1.2448754,
-5.6300974, 5.4518576, 4.119535, 6.749645, 7.627064, -7.2298336, 1.9681473,
-6.9083176, 6.404673, 0.07186685, 9.0994835, 8.51037, -8.986389, 0.40534487,
2.115397, 4.086756, 1.2284287, -2.6272132, 0.06527536, -9.587425, -7.206078,
7.864875, 7.4397306, -6.9233336, -2.6643622, 3.3466153, 7.0408177, -3.6069896,
-9.971769, 4.4075623, 7.9063697, 2.559074, 4.323717, 1.6867131, -1.1576937,
-9.893141, -3.251416, -7.4889135, -4.0588717, -2.73338, -7.4852257, 3.4460473,
9.759119, -5.4680476, -4.722435, -8.032619, -1.4598992, 4.227361, 3.135568,
1.1950601, 1.1982028, 6.998856, -6.131138, -6.6921015, 0.5361224, -7.1213965,
-5.6104236, -7.2212887, -2.2710054, 8.544764, -6.0254574, 1.4582269, -5.5587835,
8.031556, -0.26328218, -5.2591386, -9.262641, 2.8691363, 5.299787, -9.209455,
8.523085, 5.180329, 10.655528, -5.7171874, -6.7739563, -3.6306462, 4.067106,
-1.5912259, -3.2345476, 8.042973, -3.6364832, 4.1242137, 9.886953, 5.4743724,
6.3058076, 9.369645, -0.5175337, 4.9859877, -7.879498, 1.358422, -4.147944,
3.8984218, 5.894656, 6.4903927, 8.702036, -8.023722, 2.802145, -7.748032,
5.8461113, -0.34215945, 11.298865, 1.4107164, -9.949621, -1.6257563, -10.655836,
2.4528909, 1.1570255, 5.170669, 2.8398793, 7.1838694, 9.088459, 2.631155,
3.964414, 2.8769252, 0.04198391, -0.16993195, 3.6747139, -2.8377378, 6.1782537,
10.759618, -4.5642614, -8.522967, 0.8614642, 6.623416, -1.029324, 5.5488334,
-7.804511, 2.128833, 7.9042315, 7.789576, -2.7944536, 0.72271067, -10.511495,
-0.78634536, -10.661714, 2.9376361, 1.9148129, 6.22859, 0.26264945, 8.028384,
6.8743043, 0.9351067, 7.0690722, 4.2846055, 1.4134506, -0.18144785, 5.2778087,
-1.7140163, 9.217541, 8.602799, -2.6537218, -7.8377395, 1.1244944, 5.4540544,
-0.38506773, 3.9885726, -10.76455, 1.4440702, 9.136163, 6.664117, -5.7046547,
8.038592, -9.229767, -0.2799413, 3.6064725, 4.187257, 1.0516582, -2.0707326,
-0.7615968, -8.561018, -3.7831352, 10.300297, 5.332594, -6.5880876, -4.2508664,
1.7985519, 5.7226253, -4.1223383, -9.6697855, 1.4885283, 7.524974, 1.7206005,
4.890457, 3.7264557, 0.4428284, -9.922455, -4.250455, -6.4410596, -2.107994,
-1.4109765, -6.1325397, 0.32883006, 6.0489736, 7.7257385, -8.281174, 1.0129383,
-10.792166, 8.378851, 10.802716, 9.848448, -9.188757, 1.3151443, 1.9971865,
-2.521849, 4.3268294, -7.775683, -2.2902298, 3.0824065, -7.17559, 9.6100855,
7.3965735, -10.476525, 5.895973, -3.6974669, -7.6688933, 1.7354839, -7.4045196,
-1.7992063, -4.0394845, 5.2471714, -2.250571, 2.528036, -8.343515, -2.2374575,
-10.019771, 0.73371273, 3.1853926, 2.7994921, 2.6637669, 7.620401, 7.515571,
0.68636256, 5.834537, 4.650282, -1.0362619, 0.4461701, 3.7870514, -4.1340904,
7.202998, 9.736904, -3.005512, -8.920467, 1.1228397, 6.2598724, 1.2812365,
4.5442104, -8.791537, 0.92113096, 8.464749, 8.359035, -4.3923397, 1.2252625,
-10.1986475, -1.4409319, -10.013967, 3.9071581, 1.683064, 4.877419, 1.6570637,
9.559105, 7.3546534, 0.36635467, 5.220211, 4.6303267, 0.6601065, 0.16149978,
3.8818731, -3.4438233, 8.42085, 8.659159, -3.0935583, -8.039611, 2.3060374,
5.134666, 1.0458113, 6.0190983, -9.143728, 0.99048865, 9.210842, 6.670241,
-5.9614363, 0.8747396, 7.078824, 8.067469, -10.314754, 0.45977542, -9.28306,
9.1838665, 9.318644, 7.189082, -11.092555, 1.0320464, 3.882163, 0.10953151,
7.9029684, -6.9068265, -1.3526366, 5.3996363, -8.430931, 11.452577, 6.39663,
-11.090514, 4.6662245, -3.1268113, -8.357452, 2.2276728, -10.357126, -0.9291848,
-3.4193344, 3.1289792, -2.5030103, 6.772719, 11.457757, -4.2125936, -6.684548,
-4.7611327, 3.6960156, -2.3030636, -3.0591488, 10.452471, -4.1267314, 5.66614,
7.501461, 5.072407, 6.636537, 8.990381, -0.2559256, 4.737867, -6.2149944,
2.535682, -5.5484023, 5.7113924, 3.4742818, 7.9915137, 7.0052586, -7.156467,
1.4354781, -8.286235, 5.7523417, -2.4175215, 9.678009, 0.05066403, -9.645226,
-2.2658763, -9.518178, 4.493372, 2.3232365, 2.1659086, 0.42507997, 8.360246,
8.23535, 2.6878164, 5.236947, 3.4924245, -0.6089895, 0.8884741, 4.359464,
-4.6073823, 7.83441, 8.958755, -3.4690795, -9.182282, 1.2478025, 5.6311107,
-1.2408862, 3.6316886, -8.684654, 2.1078515, 7.2813864, 7.9265943, -3.6135032,
0.4571511, 8.493568, 10.496853, -7.432897, 0.8625995, -9.607528, 7.2899456,
8.83158, 8.908199, -10.300263, 1.1451302, 3.7871468, -0.97040755, 5.7664757,
-8.9688, -2.146672, 5.9641485, -6.2908535, 10.126465, 6.1553903, -12.066902,
6.301596, -5.0419583, -8.228695, 2.4879954, -8.918582, -3.7434099, -4.1593685,
3.7431836, -1.1704745, 0.5524103, 9.109399, 9.571567, -11.209955, 1.2462777,
-9.554555, 9.091726, 11.477966, 7.630937, -10.450911, 1.9205878, 5.358983,
-0.44546837, 6.7611346, -9.74753, -0.5939732, 3.8892255, -6.437991, 10.294727,
5.6723895, -10.7883, 6.192348, -5.293862, -10.811491, 1.0194173, -7.074576,
-3.192368, -2.5231771, 4.2791643, -0.53309685, 0.501366, 9.636625, 7.710316,
-6.4219728, 1.0975566, -8.218886, 6.9011984, 9.873679, 8.903804, -9.316832,
1.2404599, 4.9039655, 1.2272617, 4.541515, -5.2753224, -3.2196746, 3.1303136,
-7.285681, 9.041425, 5.6417427, -9.93667, 5.7548947, -5.113397, -8.544622,
4.182665, -7.7709813, -3.2810235, -3.312072, 3.8900535, -2.0604856, 6.709082,
-8.461194, 1.2666026, 4.8770437, 2.6955879, 3.0340345, -1.1614609, -3.536341,
-7.090382, -5.36146, 9.072544, 6.4554095, -4.4728956, -1.88395, 3.1095037,
8.782348, -3.316743, -8.65248, 1.6802986, 8.186188, 2.1783829, 4.931278,
4.158475, 1.4033595, -11.320101, -3.7084908, -6.740436, -2.5555193, -1.0451177,
-6.5569925, 0.82810307, 8.505919, 8.332857, -9.488569, -0.21588463, -8.056692,
8.493993, 7.6401625, 8.812983, -9.377281, 2.4369764, 3.1766508, 0.6300803,
5.6666765, -7.913654, -0.42301777, 4.506412, -7.8954244, 10.904591, 5.042256,
-9.626183, 8.347351, -3.605006, -7.923387, 1.1024277, -8.705793, -2.5151258,
-2.5066147, 4.0515003, -2.060757, 6.2635093, 8.286584, -6.0509276, -6.76452,
-3.1158175, 1.6578803, -1.4608748, -1.24211, 8.151246, -4.2970877, 6.093071,
7.4911637, 4.51018, 4.8425875, 9.211085, -2.4386222, 4.5830803, -5.6079445,
2.3713675, -4.0707507, 3.1787417, 5.462342, 6.915912, 6.3928423, -7.2970796,
5.0112796, -9.140893, 4.9990606, 0.38391754, 7.7088532, 1.9340848, 8.18833,
8.16617, -9.42086, -0.3388326, -9.659727, 8.243045, 8.099073, 8.439428,
-7.038694, 2.1077902, 3.3866816, -1.9975324, 7.4972878, -7.2525196, -1.553731,
4.08758, -6.6922374, 9.50525, 4.026735, -9.243538, 7.2740564, -3.9319072,
-6.3228955, 1.6693478, -7.923119, -3.7423058, -2.2813146, 5.3469067, -1.8285407,
3.3118162, 8.826356, -4.4641976, -6.4751124, -9.200089, -2.519147, 4.225298,
2.4105988, -0.4344186, 0.53441775, 5.2836394, -8.2816105, -4.996147, -1.6870759,
-7.8543897, -3.9788852, -7.0346904, -3.1289773, 7.4567637, -5.6227813, 1.0709786,
-8.866012, 8.427324, -1.1755563, -5.789216, -8.197835, 5.3342214, 6.0646234,
-6.8975716, 7.717031, 3.480355, 8.312151, -3.6645212, -3.0976524, -8.090359,
-1.9176173, 2.4257212, 1.9700835, 0.4098958, 2.1341088, 7.652741, -9.9595585,
-5.989757, 0.10119354, -7.935407, -5.792786, -5.22783, -4.318978, 5.414037,
-6.4621663, 1.670883, -6.9224787, 8.696932, -2.0214002, -6.6681314, -8.326418,
4.9049683, 5.4442496, -6.403739, 7.5822453, 7.0972915, -9.072851, -0.23897195,
1.7662339, 5.3096304, 1.983179, -2.222645, -0.34700772, -9.094717, -6.107907,
9.525174, 8.1550665, -5.6940084, -4.1636486, 1.7360662, 8.528821, -3.7299833,
-9.341266, 2.608542, 9.108706, 0.7978509, 4.2488184, 2.454484, 0.9446999,
-10.106636, -3.8973773, -6.6566644, -4.5647273, -0.99837756, -6.568582, 9.324853,
-7.9020953, 2.0910501, 2.2896829, 1.6790711, 1.3159255, -3.5258796, 1.8898442,
-8.105812, -4.924962, 8.771129, 7.1202874, -5.991957, -3.4106019, 2.4450088,
7.796387, -3.055946, -7.8971434, 1.9856719, 9.001636, 1.8511922, 3.019749,
3.1227696, 0.4822102, -10.021213, -3.530504, -6.225959, -3.0029628, -1.7881511,
-7.3879776, 1.3925704, 9.499782, -3.7318087, -3.7074296, -7.7466836, -1.5284524,
4.0535855, 3.112011, 0.10340207, -0.5429599, 6.67026, -9.155924, -4.924038,
0.64248866, -10.0103655, -3.2742946, -4.850029, -3.6707063, 8.586258, -5.855605,
4.906918, -6.7813993, 7.9938135, -2.5473144, -5.688948, -7.822478, 2.1421318,
4.66659, -9.701272, 9.549149, 0.8998125, -8.651497, -0.56899565, -8.639817,
2.3088377, 2.1264515, 3.2764478, 2.341989, 8.594338, 8.630639, 2.8440373,
6.2043204, 4.433932, 0.6320018, -1.8179281, 5.09452, -1.5741565, 8.153934,
8.744339, -3.6945698, -8.883078, 1.5329908, 5.2745943, 0.44716078, 4.8809066,
-7.9594903, 1.134374, 9.233994, 6.5528665, -4.520542, 9.477355, -8.622195,
-0.23191702, 2.0485356, 3.9379985, 1.5916302, -1.4516805, -0.0843819, -7.8554378,
-5.88308, 7.999766, 6.2572145, -5.585321, -4.0097756, 0.42382592, 6.160884,
-3.631315, -8.333449, 2.770595, 7.8495173, 3.3331623, 4.940415, 3.6207345,
-0.037517, -11.034698, -3.185103, -6.614664, -3.2177854, -2.0792234, -6.8879867,
7.821685, -8.455084, 1.0784642, 4.0033927, 2.7343264, 2.6052725, -4.1224284,
-0.89305353, -6.8267674, -4.9715133, 8.880253, 5.6994023, -5.9695024, -4.9181266,
1.3017995, 7.972617, -3.9452884, -10.424556, 2.4504194, 6.21529, 0.93840516,
4.2070026, 6.159839, 0.91979957, -8.706724, -4.317946, -6.6823545, -3.0388,
-2.464262, -7.3716645, 1.3926703, 6.544412, -5.6251183, -5.122411, -8.622049,
-2.3905911, 3.9138813, 1.9779967, -0.05011125, 0.13310997, 7.229751, -9.742043,
-8.08724, 1.2426697, -7.9230795, -3.3162494, -7.129571, -3.5488048, 7.4701195,
-5.2357526, 0.5917681, -6.272206, 6.342328, -2.909731, -4.991607, -8.845513,
3.3228495, 7.033246, -7.8180246, 8.214469, 6.3910093, 9.185153, -6.20472,
-7.713809, -3.8481297, 3.5579286, 0.7078448, -3.2893546, 7.384514, -4.448121,
3.0104196, 9.492943, 8.024847, 4.9114385, 9.965594, -3.014036, 5.182494,
-5.8806014, 2.5312455, -5.9926524, 4.474469, 6.3717875, 6.993105, 6.493093,
-8.935534, 3.004074, -8.055647, 8.315765, -1.3026813, 8.250377, 0.02606229,
6.8508425, 9.655665, -7.0116496, -0.41060972, -10.049198, 7.897801, 6.7791023,
8.3362, -9.821014, 2.491157, 3.5160472, -1.6228812, 7.398063, -8.769123,
-3.1743705, 3.2827861, -6.497855, 10.831924, 5.2761307, -9.704417, 4.3817043,
-3.9841619, -8.111647, 1.1883026, -8.115312, -2.9240117, -5.8879666, 4.20928,
-0.3587938, 6.935672, -10.177582, 0.48819053, 3.1250648, 2.9306343, 3.082544,
-3.477687, -1.3768549, -7.4922366, -3.756631, 10.039836, 3.6670392, -5.9761434,
-4.4728765, 3.244255, 7.027899, -2.3806512, -10.4100685, 1.605716, 7.7953773,
0.5408159, 1.7156523, 3.824097, -1.0604783, -10.142124, -5.246805, -6.5283823,
-4.579547, -2.42714, -6.709197, 2.7782338, 7.33353, -6.454507, -2.9929368,
-7.8362985, -2.695445, 2.4900775, 1.6682367, 0.4641757, -1.0495365, 6.9631333,
-9.291356, -8.23837, -0.34263706, -8.275113, -2.8454232, -5.0864096, -2.681942,
7.5450225, -6.2517986, 0.06810654, -6.470652, 4.9042645, -1.8369255, -6.6937943,
-7.9625087, 2.8510258, 6.180508, -8.282598, 7.919079, 1.4897474, 6.7217417,
-4.2459426, -4.114431, -8.375707, -2.143264, 5.6972933, 1.5574739, 0.39375135,
1.7930849, 5.1737595, -7.826241, -5.160268, -0.80433255, -7.839536, -5.2620406,
-5.4643164, -3.185536, 6.620315, -7.065227, 1.0524757, -6.125088, 5.7126627,
-1.6161644, -3.852159, -9.164279, 2.7005782, 5.946544, -8.468236, 8.2145405,
1.1035942, 6.590157, -4.0461283, -4.8090615, -7.6702685, -2.1121511, 5.1147075,
1.6128504, 2.0064135, 1.0544407, 6.0038295, -7.8282537, -4.801278, 0.32349443,
-8.0649805, -4.372714, -5.61336, -5.21394, 8.176595, -5.4753284, 1.7800134,
-8.267283, 7.2133374, -0.16594432, -6.317046, -9.490406, 4.1261597, 5.473317,
-7.7551675, 7.007468, 7.478628, -8.801905, 0.10975724, 3.5478222, 4.797803,
1.3825226, -3.357369, 0.99262005, -6.94877, -5.4781394, 9.632604, 5.7492557,
-5.9014316, -3.1632116, 2.340859, 8.708098, -3.1255999, -8.848661, 4.5612836,
8.455157, 0.73460823, 4.112301, 4.392744, -0.30759293, -6.8036823, -3.0331545,
-8.269506, -2.82415, -0.9411246, -5.993506, 2.1618164, -8.716055, -0.7432543,
-10.255819, 3.095418, 2.5131428, 4.752442, 0.9907621, 7.8279433, 7.85814,
0.50430876, 5.2840405, 4.457291, 0.03330028, -0.40692952, 3.9244103, -2.117118,
7.6977615, 8.759009, -4.2157164, -9.136053, 3.247858, 4.668686, 0.76162136,
5.3833632, -9.231471, 0.44309422, 8.380872, 6.7211227, -3.091507, 2.173508,
-9.038242, -1.3666698, -9.819077, 0.37825826, 2.3898845, 4.2440815, 1.9161536,
7.24787, 6.9124637, 1.6238527, 5.1140285, 3.1935842, 1.02845, -1.1273454,
5.638998, -2.497932, 8.342559, 8.586319, -2.9069402, -7.6387944, 3.5975037,
4.4115705, 0.41506064, 4.9078383, -9.68327, 1.8159529, 9.744613, 8.40622,
-4.495336, 9.244892, -8.789869, 1.3158468, 4.018167, 3.3922846, 2.652022,
-2.7495477, 0.2528986, -8.268324, -6.004913, 10.428784, 6.6580734, -5.537176,
-1.7177434, 2.7504628, 6.7735, -2.4454272, -9.998361, 2.9483433, 6.8266654,
2.3787718, 4.472637, 2.5871701, 0.7355365, -7.7027745, -4.1879907, -7.172832,
-4.1843605, -0.03646783, -5.419406, 6.958486, 11.011111, -7.1821184, -7.956423,
-3.408451, 4.6850276, -2.348787, -4.398289, 6.9787564, -3.8324208, 5.967827,
8.433518, 4.660108, 5.5657144, 9.964243, -1.3515275, 6.404833, -6.4805903,
2.4379845, -6.0816774, 1.752272, 5.3771873, 6.9613523, 6.9788294, -6.3894596,
3.7521114, -6.8034263, 6.4458385, -0.7233525, 10.512529, 4.362273, 9.231461,
-6.3382263, -7.659, -3.461823, 4.71463, 0.17817476, -3.685746, 7.2962036,
-4.6489477, 5.218017, 11.546999, 4.7218375, 6.8498397, 9.281103, -3.900459,
6.844054, -7.0886965, -0.05019227, -8.233724, 5.5808983, 6.374517, 8.321048,
7.969449, -7.3478637, 1.4917561, -8.003144, 4.780668, -1.1981848, 7.753739,
2.0260844, -8.880096, -3.4258451, -7.141975, 1.9637157, 1.814725, 5.311151,
1.4831505, 7.8483663, 7.257948, 1.395786, 6.417756, 5.376912, 0.59505713,
0.00062552, 3.6634305, -4.159713, 7.3571978, 10.966816, -2.5419605, -8.466229,
1.904205, 5.6338267, -0.52567476, 5.59736, -8.361799, 0.5009981, 8.460681,
7.3891273, -3.5272243, 5.0552278, 9.921456, -7.69693, -7.286378, -1.9198836,
3.1666567, -2.5832257, -2.2445817, 9.888111, -5.076563, 5.677401, 7.497946,
5.662994, 5.414262, 8.566503, -2.5530663, 7.1032815, -6.0612082, 1.3419591,
-4.9595256, 4.3377542, 4.3790717, 6.793512, 8.383502, -7.1278043, 3.3240774,
-9.379446, 6.838661, -0.81241214, 8.694813, 0.79141915, 7.632467, 8.575382,
-8.533798, 0.28954387, -7.5675836, 5.8653326, 8.97235, 7.1649346, -10.575289,
0.9359381, 5.02381, -0.5609511, 5.543464, -7.69131, -2.1792977, 2.4729247,
-6.1917787, 10.373678, 7.6549597, -8.809486, 5.5657206, -3.3169382, -8.042887,
2.0874746, -7.079005, -3.33398, -3.6843317, 4.0172358, -2.0754814, 1.1726758,
7.4618697, 6.9483604, -8.469206, 0.7401797, -10.318176, 8.384557, 10.5476265,
9.146971, -9.250223, 0.6290606, 4.4941425, -0.7514017, 7.2271705, -8.309598,
-1.4761636, 4.0140634, -6.021102, 9.132852, 5.6610966, -11.249811, 8.359293,
-1.9445792, -7.7393436, -0.3931331, -8.824441, -2.5995944, -2.5714035, 4.140213,
-3.6863053, 5.517265, 9.020411, -4.9286127, -7.871219, -3.7446704, 2.5179656,
-1.4543481, -2.2703636, 7.010597, -3.6436229, 6.753862, 7.4129915, 7.1406755,
5.653706, 9.5445175, 0.15698843, 4.761813, -7.698002, 1.6870106, -4.5410123,
4.171763, 5.3747005, 6.341021, 7.456738, -8.231657, 2.763487, -9.208167,
6.676799, -1.1957736, 10.062605, 4.0975976, 7.312957, -2.4981596, -2.9658387,
-8.150425, -2.1075552, 2.64375, 1.6636052, 1.1483809, 0.09276015, 5.8556347,
-7.8481026, -5.9913163, -0.02840613, -9.937289, -1.0486673, -5.2340155, -3.83912,
7.7165728, -8.409944, 0.80863273, -6.9119215, 7.5712357, 0.36031485, -6.056131,
-8.470033, 1.8678337, 3.0121377, -7.3096333, 8.205484, 5.262654, 8.774514,
-4.7603083, -7.2096143, -4.437014, 3.6080024, -1.624254, -4.2787876, 8.880863,
-4.8984556, 5.1782074, 9.944454, 3.911282, 3.5396595, 8.867042, -1.2006199,
5.393288, -5.6455317, 0.7829499, -4.0338907, 2.479272, 6.5080743, 8.582535,
7.0097537, -6.9823785, 3.984318, -7.225381, 5.3135114, -1.0391048, 8.951443,
-0.70119005, -8.510742, -0.42949116, -10.9224825, 2.8176029, 1.6800792, 5.778404,
1.7269998, 7.1975236, 7.7258267, 2.7632928, 5.3399253, 3.4650044, 0.01971426,
-1.6468811, 4.114996, -1.5110453, 6.8689218, 8.269899, -3.1568048, -7.0344677,
1.2911975, 5.950357, 0.19028673, 4.657226, -8.199647, 2.246055, 8.989509,
5.3101015, -4.2400866};
std::vector<float> X_embedded = {
-0.41849962, -0.53906363, 0.46958843, -0.35832694, -0.23779503, -0.29751351, -0.01072748,
-0.21353109, -0.54769957, -0.55086273, 0.37093949, -0.12714292, -0.06639574, -0.36098689,
-0.13060696, -0.07362658, -1.01205945, -0.39285606, 0.2864089, -0.32031146, -0.19595343,
0.08900568, -0.04813879, -0.06563424, -0.42655188, -0.69014251, 0.51459783, -0.1942696,
-0.07767916, -0.6119386, 0.04813685, -0.22557008, -0.56890118, -0.60293794, 0.43429622,
-0.09240723, -0.00624062, -0.25800395, -0.1886092, 0.01655941, -0.01961523, -0.14147359,
0.41414487, -0.8512944, -0.61199242, -0.18586016, 0.14024924, -0.41635606, -0.02890144,
0.1065347, 0.39700791, -1.14060664, -0.95313865, 0.14416681, 0.17306046, -0.53189689,
-0.98987544, -0.67918193, 0.41787854, -0.20878236, -0.06612862, 0.03502904, -0.03765266,
-0.0980606, -0.00971657, 0.29432917, 0.36575687, -1.1645509, -0.89094597, 0.03718805,
0.2310573, -0.38345811, -0.10401925, -0.10653082, 0.38469055, -0.88302094, -0.80197543,
0.03548668, 0.02775662, -0.54374295, 0.03379983, 0.00923623, 0.29320273, -1.05263519,
-0.93360096, 0.03778313, 0.12360487, -0.56437284, 0.0644429, 0.33432651, 0.36450726,
-1.22978747, -0.83822101, -0.18796451, 0.34888434, -0.3801491, -0.45327303, -0.59747899,
0.39697698, -0.15616602, -0.06159166, -0.40301991, -0.11725303, -0.11913263, -0.12406619,
-0.11227967, 0.43083835, -0.90535849, -0.81646025, 0.10012121, -0.0141237, -0.63747931,
0.04805023, 0.34190539, 0.50725192, -1.17861414, -0.74641538, -0.09333111, 0.27992678,
-0.56214809, 0.04970971, 0.36249384, 0.57705611, -1.16913795, -0.69849908, 0.10957897,
0.27983218, -0.62088525, 0.0410459, 0.23973398, 0.40960434, -1.14183664, -0.83321381,
0.02149482, 0.21720445, -0.49869928, -0.95655465, -0.51680422, 0.45761383, -0.08351214,
-0.12151554, 0.00819737, -0.20813803, -0.01055793, 0.25319234, 0.36154974, 0.1822421,
-1.15837133, -0.92209691, -0.0501582, 0.08535917, -0.54003763, -1.08675635, -1.04009593,
0.09408128, 0.07009826, -0.01762833, -0.19180447, -0.18029785, -0.20342001, 0.04034991,
0.1814747, 0.36906669, -1.13532007, -0.8852452, 0.0782818, 0.16825101, -0.50301319,
-0.29128098, -0.65341312, 0.51484352, -0.38758236, -0.22531103, -0.55021971, 0.10804344,
-0.3521522, -0.38849035, -0.74110794, 0.53761131, -0.25142813, -0.1118066, -0.47453368,
0.06347904, -0.23796193, -1.02682328, -0.47594091, 0.39515916, -0.2782529, -0.16566519,
0.08063579, 0.00810116, -0.06213913, -1.059654, -0.62496334, 0.53698546, -0.11806234,
0.00356161, 0.11513405, -0.14213292, 0.04102662, -0.36622161, -0.73686272, 0.48323864,
-0.27338892, -0.14203401, -0.41736352, 0.03332564, -0.21907479, -0.06396769, 0.01831361,
0.46263444, -1.01878166, -0.86486858, 0.17622118, -0.01249686, -0.74530888, -0.9354887,
-0.5027945, 0.38170099, -0.15547098, 0.00677824, -0.04677663, -0.13541745, 0.07253501,
-0.97933143, -0.58001202, 0.48235369, -0.18836913, -0.02430783, 0.07572441, -0.08101331,
0.00630076, -0.16881248, -0.67989182, 0.46083611, -0.43910736, -0.29321918, -0.38735861,
0.07669903, -0.29749861, -0.40047669, -0.56722462, 0.33168188, -0.13118173, -0.06672747,
-0.56856316, -0.26269144, -0.14236671, 0.10651901, 0.4962585, 0.38848072, -1.06653547,
-0.64079332, -0.47378591, 0.43195483, -0.04856951, -0.9840439, -0.70610428, 0.34028092,
-0.2089237, -0.05382041, 0.01625874, -0.02080803, -0.12535211, -0.04146428, -1.24533033,
0.48944879, 0.0578458, 0.26708388, -0.90321028, 0.35377088, -0.36791429, -0.35382384,
-0.52748734, 0.42854419, -0.31744713, -0.19174226, -0.39073724, -0.03258846, -0.19978228,
-0.36185205, -0.57412046, 0.43681973, -0.25414538, -0.12904905, -0.46334973, -0.03123853,
-0.11303604, -0.87073672, -0.45441297, 0.41825858, -0.25303507, -0.21845073, 0.10248682,
-0.11045569, -0.10002795, -0.00572806, 0.16519061, 0.42651513, -1.11417019, -0.83789682,
0.02995787, 0.16843079, -0.53874511, 0.03056994, 0.17877036, 0.49632853, -1.03276777,
-0.74778616, -0.03971953, 0.10907949, -0.67385727, -0.9523471, -0.56550741, 0.40409449,
-0.2703723, -0.10175014, 0.13605487, -0.06306008, -0.01768126, -0.4749442, -0.56964815,
0.39389887, -0.19248079, -0.04161081, -0.38728487, -0.20341556, -0.12656988, -0.35949609,
-0.46137866, 0.28798422, -0.06603147, -0.04363992, -0.60343552, -0.23565227, -0.10242701,
-0.06792886, 0.09689897, 0.33259571, -0.98854214, -0.84444433, 0.00673901, 0.13457057,
-0.43145794, -0.51500046, -0.50821936, 0.38000089, 0.0132636, 0.0580942, -0.40157595,
-0.11967677, 0.02549113, -0.10350953, 0.22918226, 0.40411913, -1.05619383, -0.71218503,
-0.02197581, 0.26422262, -0.34765676, 0.06601537, 0.21712676, 0.34723559, -1.20982027,
-0.95646334, 0.00793948, 0.27620381, -0.43475035, -0.67326003, -0.6137197, 0.43724492,
-0.17666136, -0.06591748, -0.18937394, -0.07400128, -0.06881691, -0.5201112, -0.61088628,
0.4225319, -0.18969463, -0.06921366, -0.33993208, -0.06990873, -0.10288513, -0.70659858,
-0.56003648, 0.46628812, -0.16090363, -0.0185108, -0.1431348, -0.1128775, -0.0078648,
-0.02323332, 0.04292452, 0.39291084, -0.94897962, -0.63863206, -0.16546988, 0.23698957,
-0.30633628};
raft::handle_t h;
cudaStream_t stream = h.get_stream();
rmm::device_uvector<float> d_X(X.size(), stream);
rmm::device_uvector<float> d_X_embedded(X_embedded.size(), stream);
raft::update_device(d_X.data(), X.data(), X.size(), stream);
raft::update_device(d_X_embedded.data(), X_embedded.data(), X_embedded.size(), stream);
// euclidean test
score = trustworthiness_score<float, raft::distance::DistanceType::L2SqrtUnexpanded>(
h, d_X.data(), d_X_embedded.data(), 50, 30, 8, 5);
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
double score;
};
typedef TrustworthinessScoreTest TrustworthinessScoreTestF;
TEST_F(TrustworthinessScoreTestF, Result) { ASSERT_TRUE(0.9375 < score && score < 0.9379); }
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/fnv_hash_test.cpp | /*
* Copyright (c) 2021-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/fil/fnv_hash.h>
#include <gtest/gtest.h>
#include <raft/core/error.hpp>
struct fnv_vec_t {
std::vector<char> input;
unsigned long long correct_64bit;
uint32_t correct_32bit;
};
class FNVHashTest : public testing::TestWithParam<fnv_vec_t> {
protected:
void SetUp() override { param = GetParam(); }
void check()
{
unsigned long long hash_64bit =
fowler_noll_vo_fingerprint64(param.input.begin(), param.input.end());
ASSERT(hash_64bit == param.correct_64bit, "Wrong hash computed");
unsigned long hash_32bit =
fowler_noll_vo_fingerprint64_32(param.input.begin(), param.input.end());
ASSERT(hash_32bit == param.correct_32bit, "Wrong xor-folded hash computed");
}
fnv_vec_t param;
};
std::vector<fnv_vec_t> fnv_vecs = {
{{}, 14695981039346656037ull, 0xcbf29ce4 ^ 0x84222325}, // test #0
// 32-bit output is xor-folded 64-bit output. The format below makes this obvious.
{{0}, 0xaf63bd4c8601b7df, 0xaf63bd4c ^ 0x8601b7df},
{{1}, 0xaf63bd4c8601b7de, 0xaf63bd4c ^ 0x8601b7de},
{{2}, 0xaf63bd4c8601b7dd, 0xaf63bd4c ^ 0x8601b7dd},
{{3}, 0xaf63bd4c8601b7dc, 0xaf63bd4c ^ 0x8601b7dc},
{{1, 2}, 0x08328707b4eb6e38, 0x08328707 ^ 0xb4eb6e38}, // test #5
{{2, 1}, 0x08328607b4eb6c86, 0x08328607 ^ 0xb4eb6c86},
{{1, 2, 3}, 0xd949aa186c0c492b, 0xd949aa18 ^ 0x6c0c492b},
{{1, 3, 2}, 0xd949ab186c0c4ad9, 0xd949ab18 ^ 0x6c0c4ad9},
{{2, 1, 3}, 0xd94645186c0967b1, 0xd9464518 ^ 0x6c0967b1},
{{2, 3, 1}, 0xd94643186c09644d, 0xd9464318 ^ 0x6c09644d}, // test #10
{{3, 1, 2}, 0xd942e1186c0687ed, 0xd942e118 ^ 0x6c0687ed},
{{3, 2, 1}, 0xd942e2186c0689a3, 0xd942e218 ^ 0x6c0689a3},
};
TEST_P(FNVHashTest, Import) { check(); }
INSTANTIATE_TEST_CASE_P(FilTests, FNVHashTest, testing::ValuesIn(fnv_vecs));
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/multi_sum_test.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <test_utils.h>
#include <cuml/fil/multi_sum.cuh>
#include <raft/core/handle.hpp>
#include <raft/random/rng.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <thrust/device_vector.h>
#include <thrust/fill.h>
#include <thrust/host_vector.h>
#include <gtest/gtest.h>
#include <cstddef>
template <typename T>
__device__ void serial_multi_sum(const T* in, T* out, int n_groups, int n_values)
{
__syncthreads();
if (threadIdx.x < n_groups) {
int reduction_id = threadIdx.x;
T sum = 0;
for (int i = 0; i < n_values; ++i)
sum += in[reduction_id + i * n_groups];
out[reduction_id] = sum;
}
__syncthreads();
}
// the most threads a block can have
const int MAX_THREADS = 1024;
struct MultiSumTestParams {
int radix; // number of elements summed to 1 at each stage of the sum
int n_groups; // number of independent sums
int n_values; // number of elements to add in each sum
};
template <typename T>
struct multi_sum_test_shmem {
T work[MAX_THREADS];
T correct_result[MAX_THREADS];
};
template <int R, typename T>
__device__ void test_single_radix(multi_sum_test_shmem<T>& s,
T thread_value,
MultiSumTestParams p,
int* block_error_flag)
{
s.work[threadIdx.x] = thread_value;
serial_multi_sum(s.work, s.correct_result, p.n_groups, p.n_values);
T sum = multi_sum<R>(s.work, p.n_groups, p.n_values);
if (threadIdx.x < p.n_groups && 1e-4 < fabsf(sum - s.correct_result[threadIdx.x])) {
atomicAdd(block_error_flag, 1);
}
}
template <typename T>
__global__ void test_multi_sum_k(T* data, MultiSumTestParams* params, int* error_flags)
{
__shared__ multi_sum_test_shmem<T> s;
MultiSumTestParams p = params[blockIdx.x];
switch (p.radix) {
case 2: test_single_radix<2>(s, data[threadIdx.x], p, &error_flags[blockIdx.x]); break;
case 3: test_single_radix<3>(s, data[threadIdx.x], p, &error_flags[blockIdx.x]); break;
case 4: test_single_radix<4>(s, data[threadIdx.x], p, &error_flags[blockIdx.x]); break;
case 5: test_single_radix<5>(s, data[threadIdx.x], p, &error_flags[blockIdx.x]); break;
case 6: test_single_radix<6>(s, data[threadIdx.x], p, &error_flags[blockIdx.x]); break;
}
}
template <typename T>
class MultiSumTest : public testing::TestWithParam<int> {
protected:
void SetUp() override
{
block_dim_x = GetParam();
data_d.resize(block_dim_x);
this->generate_data();
for (int radix = 2; radix <= 6; ++radix) {
for (int n_groups = 1; n_groups < 15; ++n_groups) { // >2x the max radix
// 1..50 (if block_dim_x permits)
for (int n_values = 1; n_values <= std::min(block_dim_x, 50) / n_groups; ++n_values)
params_h.push_back({.radix = radix, .n_groups = n_groups, .n_values = n_values});
// block_dim_x - 50 .. block_dim_x (if positive)
// up until 50 would be included in previous loop
for (int n_values = std::max(block_dim_x - 50, 51) / n_groups;
n_values <= block_dim_x / n_groups;
++n_values)
params_h.push_back({.radix = radix, .n_groups = n_groups, .n_values = n_values});
}
}
params_d = params_h;
error_d.resize(params_h.size());
thrust::fill_n(error_d.begin(), params_h.size(), 0);
}
void check()
{
T* data_p = data_d.data().get();
MultiSumTestParams* p_p = params_d.data().get();
int* error_p = error_d.data().get();
test_multi_sum_k<<<params_h.size(), block_dim_x>>>(data_p, p_p, error_p);
RAFT_CUDA_TRY(cudaPeekAtLastError());
error = error_d;
RAFT_CUDA_TRY(cudaDeviceSynchronize());
for (std::size_t i = 0; i < params_h.size(); ++i) {
ASSERT(error[i] == 0,
"test # %lu: block_dim_x %d multi_sum<%d>(on %d sets sized"
" %d) gave wrong result",
i,
block_dim_x,
params_h[i].radix,
params_h[i].n_values,
params_h[i].n_groups);
}
}
virtual void generate_data() = 0;
// parameters
raft::handle_t handle;
int block_dim_x;
thrust::host_vector<MultiSumTestParams> params_h;
thrust::device_vector<MultiSumTestParams> params_d;
thrust::host_vector<int> error;
thrust::device_vector<int> error_d;
thrust::device_vector<T> data_d;
};
std::vector<int> block_sizes = []() {
std::vector<int> res;
for (int i = 2; i < 50; ++i)
res.push_back(i);
for (int i = MAX_THREADS - 50; i <= MAX_THREADS; ++i)
res.push_back(i);
return res;
}();
class MultiSumTestFloat32 : public MultiSumTest<float> {
public:
void generate_data()
{
raft::random::Rng r(4321);
r.uniform(data_d.data().get(), data_d.size(), -1.0f, 1.0f, cudaStreamDefault);
}
};
TEST_P(MultiSumTestFloat32, Import) { check(); }
INSTANTIATE_TEST_CASE_P(FilTests, MultiSumTestFloat32, testing::ValuesIn(block_sizes));
class MultiSumTestFloat64 : public MultiSumTest<double> {
public:
void generate_data()
{
raft::random::Rng r(4321);
r.uniform(data_d.data().get(), data_d.size(), -1.0, 1.0, cudaStreamDefault);
}
};
TEST_P(MultiSumTestFloat64, Import) { check(); }
INSTANTIATE_TEST_CASE_P(FilTests, MultiSumTestFloat64, testing::ValuesIn(block_sizes));
class MultiSumTestInt : public MultiSumTest<int> {
public:
void generate_data()
{
raft::random::Rng r(4321);
r.uniformInt(data_d.data().get(), data_d.size(), -123'456, 123'456, cudaStreamDefault);
}
};
TEST_P(MultiSumTestInt, Import) { check(); }
INSTANTIATE_TEST_CASE_P(FilTests, MultiSumTestInt, testing::ValuesIn(block_sizes));
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/linear_svm_test.cu | /*
* Copyright (c) 2021-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cmath>
#include <cuml/datasets/make_blobs.hpp>
#include <cuml/datasets/make_regression.hpp>
#include <cuml/svm/linear.hpp>
#include <raft/core/handle.hpp>
#include <gtest/gtest.h>
#include <raft/linalg/map_then_reduce.cuh>
#include <raft/linalg/reduce.cuh>
#include <raft/linalg/transpose.cuh>
#include <raft/linalg/unary_op.cuh>
#include <raft/random/rng.cuh>
#include <rmm/device_scalar.hpp>
#include <rmm/device_uvector.hpp>
#include <test_utils.h>
namespace ML {
namespace SVM {
struct LinearSVMTestParams {
int nRowsTrain;
int nRowsTest;
int nCols;
/** nClasses == 1 implies regression. */
int nClasses;
/** Standard deviation of clusters or noise. */
double errStd;
double bias;
double tolerance;
uint64_t seed;
LinearSVMParams modelParams;
};
template <typename T, typename ParamsReader>
struct LinearSVMTest : public ::testing::TestWithParam<typename ParamsReader::Params> {
const LinearSVMTestParams params;
const raft::handle_t handle;
cudaStream_t stream;
LinearSVMTest()
: testing::TestWithParam<typename ParamsReader::Params>(),
params(
ParamsReader::read(::testing::TestWithParam<typename ParamsReader::Params>::GetParam())),
handle(rmm::cuda_stream_per_thread, std::make_shared<rmm::cuda_stream_pool>(8)),
stream(handle.get_stream())
{
}
bool isInputValid() const
{
/* Fail to fit data with bias. */
if (params.nClasses == 1 && params.bias != 0 && !params.modelParams.fit_intercept) return false;
/* This means we don't have enough dimensions to linearly separate every cluster
from the rest.
In such case, the error is always huge (fitting is impossible).
*/
if (params.nClasses > 1 && params.nClasses > (1 << min(30, params.nCols))) return false;
return true;
}
testing::AssertionResult errorRate()
{
auto [XBuf, yBuf] = genData(params.nRowsTrain + params.nRowsTest);
auto [XTrain, XTest] = splitData(XBuf, params.nRowsTrain, params.nCols);
auto [yTrain, yTest] = splitData(yBuf, params.nRowsTrain, 1);
auto model = LinearSVMModel<T>::fit(handle,
params.modelParams,
XTrain.data(),
params.nRowsTrain,
params.nCols,
yTrain.data(),
(const T*)nullptr);
rmm::device_uvector<T> yOut(yTest.size(), stream);
LinearSVMModel<T>::predict(
handle, params.modelParams, model, XTest.data(), params.nRowsTest, params.nCols, yOut.data());
rmm::device_scalar<T> errorBuf(stream);
if (params.nClasses == 1) // regression
raft::linalg::mapThenSumReduce(
errorBuf.data(),
params.nRowsTest,
[] __device__(const T yRef, const T yOut) {
T s = yRef * yRef + yOut * yOut;
T d = yRef - yOut;
return d * d / s;
},
stream,
yTest.data(),
yOut.data());
else // classification
raft::linalg::mapThenSumReduce(
errorBuf.data(),
params.nRowsTest,
[] __device__(const T yRef, const T yOut) { return T(yRef != yOut); },
stream,
yTest.data(),
yOut.data());
// getting the error value forces the stream synchronization
T error = errorBuf.value(stream) / T(params.nRowsTest);
LinearSVMModel<T>::free(handle, model);
if (error <= params.tolerance)
return testing::AssertionSuccess();
else
return testing::AssertionFailure()
<< "Error rate = " << error << " > tolerance = " << params.tolerance;
}
testing::AssertionResult probabilitySumsToOne()
{
if (!params.modelParams.probability)
return testing::AssertionFailure() << "Non-probabolistic model does not support this test.";
if (params.nClasses < 2)
return testing::AssertionFailure() << "Regression model does not support this test.";
auto [XBuf, yBuf] = genData(params.nRowsTrain + params.nRowsTest);
auto [XTrain, XTest] = splitData(XBuf, params.nRowsTrain, params.nCols);
auto [yTrain, yTest] = splitData(yBuf, params.nRowsTrain, 1);
auto model = LinearSVMModel<T>::fit(handle,
params.modelParams,
XTrain.data(),
params.nRowsTrain,
params.nCols,
yTrain.data(),
(const T*)nullptr);
rmm::device_scalar<T> errorBuf(stream);
rmm::device_uvector<T> yProbs(yTest.size() * params.nClasses, stream);
LinearSVMModel<T>::predictProba(handle,
params.modelParams,
model,
XTest.data(),
params.nRowsTest,
params.nCols,
false,
yProbs.data());
rmm::device_uvector<T> yOut(yTest.size(), stream);
raft::linalg::reduce<T, T, int>(
yOut.data(), yProbs.data(), params.nClasses, params.nRowsTest, 0, true, true, stream);
raft::linalg::mapThenReduce(
errorBuf.data(),
params.nRowsTest,
T(0),
[] __device__(const T yOut) { return raft::myAbs<T>(1.0 - yOut); },
cub::Max(),
stream,
yOut.data());
T error = errorBuf.value(stream);
LinearSVMModel<T>::free(handle, model);
if (error <= params.tolerance)
return testing::AssertionSuccess();
else
return testing::AssertionFailure()
<< "Sum of probabilities deviated from zero (error = " << error << ")";
}
testing::AssertionResult probabilityErrorRate()
{
if (!params.modelParams.probability)
return testing::AssertionFailure() << "Non-probabolistic model does not support this test.";
if (params.nClasses < 2)
return testing::AssertionFailure() << "Regression model does not support this test.";
auto [XBuf, yBuf] = genData(params.nRowsTrain + params.nRowsTest);
auto [XTrain, XTest] = splitData(XBuf, params.nRowsTrain, params.nCols);
auto [yTrain, yTest] = splitData(yBuf, params.nRowsTrain, 1);
auto model = LinearSVMModel<T>::fit(handle,
params.modelParams,
XTrain.data(),
params.nRowsTrain,
params.nCols,
yTrain.data(),
(const T*)nullptr);
rmm::device_scalar<T> errorBuf(stream);
rmm::device_uvector<T> yProbs(yTest.size() * params.nClasses, stream);
rmm::device_uvector<T> yOut(yTest.size(), stream);
LinearSVMModel<T>::predictProba(handle,
params.modelParams,
model,
XTest.data(),
params.nRowsTest,
params.nCols,
false,
yProbs.data());
raft::linalg::reduce<T, T, int>(
yOut.data(),
yProbs.data(),
params.nClasses,
params.nRowsTest,
0,
true,
true,
stream,
false,
[] __device__(const T p, const int i) { return T(i * 2) + p + 0.5; },
[] __device__(const T a, const T b) { return fmod(a, 2) >= fmod(b, 2) ? a : b; });
raft::linalg::mapThenSumReduce(
errorBuf.data(),
params.nRowsTest,
[] __device__(const T yRef, const T yOut) {
T p = yOut - 2 * yRef;
return T(p <= 0 || p >= 2);
},
stream,
yTest.data(),
yOut.data());
// getting the error value forces the stream synchronization
T error = errorBuf.value(stream) / T(params.nRowsTest);
LinearSVMModel<T>::free(handle, model);
if (error <= params.tolerance)
return testing::AssertionSuccess();
else
return testing::AssertionFailure()
<< "Error rate = " << error << " > tolerance = " << params.tolerance;
}
/** Generate a required amount of (X, y) data at once. */
std::tuple<rmm::device_uvector<T>, rmm::device_uvector<T>> genData(const int nRows)
{
rmm::device_uvector<T> X(nRows * params.nCols, stream);
rmm::device_uvector<T> y(nRows * params.nClasses, stream);
if (params.nClasses == 1) // regression
{
int nInformative = max(params.nCols / 3, min(params.nCols, 5));
rmm::device_uvector<T> Xt(nRows * params.nCols, stream);
ML::Datasets::make_regression(handle,
Xt.data(),
y.data(),
nRows,
params.nCols,
nInformative,
nullptr,
1,
params.bias,
-1,
T(0),
T(params.errStd),
true,
params.seed);
raft::linalg::transpose(handle, Xt.data(), X.data(), params.nCols, nRows, stream);
} else // classification
{
rmm::device_uvector<int> labels(nRows * params.nClasses, stream);
raft::random::Rng r(params.seed);
rmm::device_uvector<T> centers(params.nCols * params.nClasses, stream);
r.uniform(centers.data(), params.nCols * params.nClasses, T(0), T(1), stream);
// override manually some of the cluster coordinates to ensure
// the distance between any of them is large enough.
int d = max(2, int(std::ceil(std::pow(double(params.nClasses), 1.0 / double(params.nCols)))));
int modCols = int(std::ceil(std::log2(double(params.nClasses)) / std::log2(double(d))));
for (int i = 0; i < params.nClasses; i++) {
int r = i;
for (int j = 0; j < modCols; j++) {
T value = T((r % d) * params.nClasses) + T(params.bias);
centers.set_element_async(j * params.nClasses + i, value, stream);
r /= d;
}
}
ML::Datasets::make_blobs(handle,
X.data(),
labels.data(),
nRows,
params.nCols,
params.nClasses,
false,
centers.data(),
nullptr,
T(params.errStd),
true,
0,
0,
params.seed);
raft::linalg::unaryOp(
y.data(), labels.data(), labels.size(), [] __device__(int x) { return T(x); }, stream);
}
return std::make_tuple(std::move(X), std::move(y));
}
/** Split a column-major matrix in two along the rows. */
std::tuple<rmm::device_uvector<T>, rmm::device_uvector<T>> splitData(rmm::device_uvector<T>& x,
const int takeNRows,
const int nCols)
{
const int nRows = x.size() / nCols;
const int dropNRows = nRows - takeNRows;
rmm::device_uvector<T> x1(takeNRows * nCols, stream);
rmm::device_uvector<T> x2(dropNRows * nCols, stream);
RAFT_CUDA_TRY(cudaMemcpy2DAsync(x1.data(),
sizeof(T) * takeNRows,
x.data(),
sizeof(T) * nRows,
sizeof(T) * takeNRows,
nCols,
cudaMemcpyDeviceToDevice,
stream));
RAFT_CUDA_TRY(cudaMemcpy2DAsync(x2.data(),
sizeof(T) * dropNRows,
x.data() + takeNRows,
sizeof(T) * nRows,
sizeof(T) * dropNRows,
nCols,
cudaMemcpyDeviceToDevice,
stream));
return std::make_tuple(std::move(x1), std::move(x2));
}
};
#define TEST_SVM(fun, TestClass, ElemType) \
typedef LinearSVMTest<ElemType, TestClass> TestClass##_##ElemType; \
TEST_P(TestClass##_##ElemType, fun) \
{ \
if (!isInputValid()) GTEST_SKIP(); \
ASSERT_TRUE(fun()); \
} \
INSTANTIATE_TEST_SUITE_P(LinearSVM, TestClass##_##ElemType, TestClass##Params)
auto TestClasTargetsParams =
::testing::Combine(::testing::Values(LinearSVMParams::HINGE, LinearSVMParams::SQUARED_HINGE),
::testing::Values(LinearSVMParams::L1, LinearSVMParams::L2),
::testing::Values(2, 3, 8),
::testing::Values(1, 50));
struct TestClasTargets {
typedef std::tuple<LinearSVMParams::Loss, LinearSVMParams::Penalty, int, int> Params;
static LinearSVMTestParams read(Params ps)
{
LinearSVMParams mp;
mp.penalty = std::get<1>(ps);
mp.loss = std::get<0>(ps);
return {/* .nRowsTrain */ 100,
/* .nRowsTest */ 100,
/* .nCols */ std::get<3>(ps),
/* .nClasses */ std::get<2>(ps),
/* .errStd */ 0.4,
/* .bias */ 0.0,
/* .tolerance */ 0.05,
/* .seed */ 42ULL,
/* .modelParams */ mp};
}
};
auto TestClasBiasParams = ::testing::Combine(::testing::Bool(),
::testing::Bool(),
::testing::Values(2, 3),
::testing::Values(10, 50),
::testing::Values(0.0, -10.0));
struct TestClasBias {
typedef std::tuple<bool, bool, int, int, double> Params;
static LinearSVMTestParams read(Params ps)
{
LinearSVMParams mp;
mp.fit_intercept = std::get<0>(ps);
mp.penalized_intercept = std::get<1>(ps);
return {/* .nRowsTrain */ 1000,
/* .nRowsTest */ 100,
/* .nCols */ std::get<3>(ps),
/* .nClasses */ std::get<2>(ps),
/* .errStd */ 0.2,
/* .bias */ std::get<4>(ps),
/* .tolerance */ 0.05,
/* .seed */ 42ULL,
/* .modelParams */ mp};
}
};
auto TestClasManyClassesParams = ::testing::Values(2, 3, 16, 31, 32, 33, 67);
struct TestClasManyClasses {
typedef int Params;
static LinearSVMTestParams read(Params ps)
{
LinearSVMParams mp;
return {/* .nRowsTrain */ 1000,
/* .nRowsTest */ 1000,
/* .nCols */ 200,
/* .nClasses */ ps,
/* .errStd */ 1.0,
/* .bias */ 0,
/* .tolerance */ 0.01,
/* .seed */ 42ULL,
/* .modelParams */ mp};
}
};
auto TestClasProbsSumParams = ::testing::Values(2, 3, 16, 31, 32, 33, 67);
struct TestClasProbsSum {
typedef int Params;
static LinearSVMTestParams read(Params ps)
{
LinearSVMParams mp;
mp.probability = true;
mp.max_iter = 100;
return {/* .nRowsTrain */ 100,
/* .nRowsTest */ 100,
/* .nCols */ 80,
/* .nClasses */ ps,
/* .errStd */ 1.0,
/* .bias */ 0,
/* .tolerance */ 1e-5,
/* .seed */ 42ULL,
/* .modelParams */ mp};
}
};
auto TestClasProbsParams = ::testing::Values(2, 3, 16, 31, 32, 33, 67);
struct TestClasProbs {
typedef int Params;
static LinearSVMTestParams read(Params ps)
{
LinearSVMParams mp;
mp.probability = true;
return {/* .nRowsTrain */ 1000,
/* .nRowsTest */ 1000,
/* .nCols */ 200,
/* .nClasses */ ps,
/* .errStd */ 0.9,
/* .bias */ 0,
/* .tolerance */ 0.01,
/* .seed */ 42ULL,
/* .modelParams */ mp};
}
};
auto TestRegTargetsParams =
::testing::Combine(::testing::Values(LinearSVMParams::EPSILON_INSENSITIVE,
LinearSVMParams::SQUARED_EPSILON_INSENSITIVE),
::testing::Values(LinearSVMParams::L1, LinearSVMParams::L2),
::testing::Bool(),
::testing::Values(1, 50),
::testing::Values(0.0, -10.0),
::testing::Values(0.0, 0.01));
struct TestRegTargets {
typedef std::tuple<LinearSVMParams::Loss, LinearSVMParams::Penalty, bool, int, double, double>
Params;
static LinearSVMTestParams read(Params ps)
{
LinearSVMParams mp;
mp.loss = std::get<0>(ps);
mp.penalty = std::get<1>(ps);
mp.fit_intercept = std::get<2>(ps);
// The regularization parameter strongly affects the model performance in some cases,
// a larger-than-default value of C seems to always yield better scores on this generated
// dataset.
mp.C = 100.0;
mp.epsilon = std::get<5>(ps);
mp.verbose = 2;
return {/* .nRowsTrain */ 1000,
/* .nRowsTest */ 100,
/* .nCols */ std::get<3>(ps),
/* .nClasses */ 1,
/* .errStd */ 0.02,
/* .bias */ std::get<4>(ps),
/* .tolerance */ 0.05,
/* .seed */ 42ULL,
/* .modelParams */ mp};
}
};
TEST_SVM(errorRate, TestClasTargets, float);
TEST_SVM(errorRate, TestClasTargets, double);
TEST_SVM(errorRate, TestClasBias, float);
TEST_SVM(errorRate, TestClasManyClasses, float);
TEST_SVM(errorRate, TestClasManyClasses, double);
TEST_SVM(errorRate, TestRegTargets, float);
TEST_SVM(errorRate, TestRegTargets, double);
TEST_SVM(probabilitySumsToOne, TestClasProbsSum, float);
TEST_SVM(probabilitySumsToOne, TestClasProbsSum, double);
TEST_SVM(probabilityErrorRate, TestClasProbs, float);
TEST_SVM(probabilityErrorRate, TestClasProbs, double);
} // namespace SVM
} // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/dbscan_test.cu | /*
* Copyright (c) 2018-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <gtest/gtest.h>
#include <vector>
#include <cuml/cluster/dbscan.hpp>
#include <cuml/datasets/make_blobs.hpp>
#include <cuml/metrics/metrics.hpp>
#include <raft/core/handle.hpp>
#include <raft/distance/distance.cuh>
#include <raft/distance/distance_types.hpp>
#include <raft/linalg/transpose.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <test_utils.h>
#include <cuml/common/logger.hpp>
namespace ML {
using namespace Datasets;
using namespace Metrics;
using namespace std;
// Note: false negatives are theoretically possible, given that border
// points are ambiguous.
// If test failures are observed, these tests might need to be re-written
// (cf how the Python tests work).
template <typename T, typename IdxT>
struct DbscanInputs {
IdxT n_row;
IdxT n_col;
IdxT n_centers;
T cluster_std;
T eps;
int min_pts;
size_t max_bytes_per_batch;
unsigned long long int seed;
raft::distance::DistanceType metric;
};
template <typename T, typename IdxT>
::std::ostream& operator<<(::std::ostream& os, const DbscanInputs<T, IdxT>& dims)
{
return os;
}
template <typename T, typename IdxT>
class DbscanTest : public ::testing::TestWithParam<DbscanInputs<T, IdxT>> {
protected:
void basicTest()
{
raft::handle_t handle;
auto stream = handle.get_stream();
params = ::testing::TestWithParam<DbscanInputs<T, IdxT>>::GetParam();
rmm::device_uvector<T> out(params.n_row * params.n_col, stream);
rmm::device_uvector<IdxT> l(params.n_row, stream);
rmm::device_uvector<T> dist(
params.metric == raft::distance::Precomputed ? params.n_row * params.n_row : 0, stream);
make_blobs(handle,
out.data(),
l.data(),
params.n_row,
params.n_col,
params.n_centers,
true,
nullptr,
nullptr,
params.cluster_std,
true,
-10.0f,
10.0f,
params.seed);
if (params.metric == raft::distance::Precomputed) {
ML::Metrics::pairwise_distance(handle,
out.data(),
out.data(),
dist.data(),
params.n_row,
params.n_row,
params.n_col,
raft::distance::L2SqrtUnexpanded);
}
rmm::device_uvector<IdxT> labels(params.n_row, stream);
rmm::device_uvector<IdxT> labels_ref(params.n_row, stream);
raft::copy(labels_ref.data(), l.data(), params.n_row, stream);
handle.sync_stream(stream);
Dbscan::fit(handle,
params.metric == raft::distance::Precomputed ? dist.data() : out.data(),
params.n_row,
params.n_col,
params.eps,
params.min_pts,
params.metric,
labels.data(),
nullptr,
nullptr,
params.max_bytes_per_batch);
handle.sync_stream(stream);
score = adjusted_rand_index(handle, labels_ref.data(), labels.data(), params.n_row);
if (score < 1.0) {
auto str = raft::arr2Str(labels_ref.data(), params.n_row, "labels_ref", handle.get_stream());
CUML_LOG_DEBUG("y: %s", str.c_str());
str = raft::arr2Str(labels.data(), params.n_row, "labels", handle.get_stream());
CUML_LOG_DEBUG("y_hat: %s", str.c_str());
CUML_LOG_DEBUG("Score = %lf", score);
}
}
void SetUp() override { basicTest(); }
protected:
DbscanInputs<T, IdxT> params;
double score;
};
const std::vector<DbscanInputs<float, int>> inputsf2 = {
{500, 16, 5, 0.01, 2, 2, (size_t)100, 1234ULL, raft::distance::L2SqrtUnexpanded},
{500, 16, 5, 0.01, 2, 2, (size_t)100, 1234ULL, raft::distance::Precomputed},
{1000, 1000, 10, 0.01, 2, 2, (size_t)13e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{20000, 10000, 10, 0.01, 2, 2, (size_t)13e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{20000, 100, 5000, 0.01, 2, 2, (size_t)13e3, 1234ULL, raft::distance::L2SqrtUnexpanded}};
const std::vector<DbscanInputs<float, int64_t>> inputsf3 = {
{500, 16, 5, 0.01, 2, 2, (size_t)100, 1234ULL, raft::distance::L2SqrtUnexpanded},
{500, 16, 5, 0.01, 2, 2, (size_t)100, 1234ULL, raft::distance::Precomputed},
{1000, 1000, 10, 0.01, 2, 2, (size_t)9e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{50000, 16, 5, 0.01, 2, 2, (size_t)9e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{20000, 10000, 10, 0.01, 2, 2, (size_t)9e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{20000, 100, 5000, 0.01, 2, 2, (size_t)9e3, 1234ULL, raft::distance::L2SqrtUnexpanded}};
const std::vector<DbscanInputs<double, int>> inputsd2 = {
{50000, 16, 5, 0.01, 2, 2, (size_t)13e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{10000, 16, 5, 0.01, 2, 2, (size_t)13e3, 1234ULL, raft::distance::Precomputed},
{500, 16, 5, 0.01, 2, 2, (size_t)100, 1234ULL, raft::distance::L2SqrtUnexpanded},
{1000, 1000, 10, 0.01, 2, 2, (size_t)13e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{100, 10000, 10, 0.01, 2, 2, (size_t)13e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{20000, 10000, 10, 0.01, 2, 2, (size_t)13e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{20000, 100, 5000, 0.01, 2, 2, (size_t)13e3, 1234ULL, raft::distance::L2SqrtUnexpanded}};
const std::vector<DbscanInputs<double, int64_t>> inputsd3 = {
{50000, 16, 5, 0.01, 2, 2, (size_t)9e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{10000, 16, 5, 0.01, 2, 2, (size_t)9e3, 1234ULL, raft::distance::Precomputed},
{500, 16, 5, 0.01, 2, 2, (size_t)100, 1234ULL, raft::distance::L2SqrtUnexpanded},
{1000, 1000, 10, 0.01, 2, 2, (size_t)9e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{100, 10000, 10, 0.01, 2, 2, (size_t)9e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{20000, 10000, 10, 0.01, 2, 2, (size_t)9e3, 1234ULL, raft::distance::L2SqrtUnexpanded},
{20000, 100, 5000, 0.01, 2, 2, (size_t)9e3, 1234ULL, raft::distance::L2SqrtUnexpanded}};
typedef DbscanTest<float, int> DbscanTestF_Int;
TEST_P(DbscanTestF_Int, Result) { ASSERT_TRUE(score == 1.0); }
typedef DbscanTest<float, int64_t> DbscanTestF_Int64;
TEST_P(DbscanTestF_Int64, Result) { ASSERT_TRUE(score == 1.0); }
typedef DbscanTest<double, int> DbscanTestD_Int;
TEST_P(DbscanTestD_Int, Result) { ASSERT_TRUE(score == 1.0); }
typedef DbscanTest<double, int64_t> DbscanTestD_Int64;
TEST_P(DbscanTestD_Int64, Result) { ASSERT_TRUE(score == 1.0); }
INSTANTIATE_TEST_CASE_P(DbscanTests, DbscanTestF_Int, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(DbscanTests, DbscanTestF_Int64, ::testing::ValuesIn(inputsf3));
INSTANTIATE_TEST_CASE_P(DbscanTests, DbscanTestD_Int, ::testing::ValuesIn(inputsd2));
INSTANTIATE_TEST_CASE_P(DbscanTests, DbscanTestD_Int64, ::testing::ValuesIn(inputsd3));
template <typename T>
struct DBScan2DArrayInputs {
const T* points;
const int* out;
size_t n_row;
// n_out allows to compare less labels than we have inputs
// (some output labels can be ambiguous)
size_t n_out;
T eps;
int min_pts;
const int* core_indices; // Expected core_indices
const T* sample_weight = nullptr;
};
template <typename T>
class Dbscan2DSimple : public ::testing::TestWithParam<DBScan2DArrayInputs<T>> {
protected:
void basicTest()
{
raft::handle_t handle;
auto stream = handle.get_stream();
params = ::testing::TestWithParam<DBScan2DArrayInputs<T>>::GetParam();
rmm::device_uvector<T> inputs(params.n_row * 2, stream);
rmm::device_uvector<int> labels(params.n_row, stream);
rmm::device_uvector<int> labels_ref(params.n_out, stream);
rmm::device_uvector<int> core_sample_indices_d(params.n_row, stream);
rmm::device_uvector<T> sample_weight_d(params.n_row, stream);
raft::copy(inputs.data(), params.points, params.n_row * 2, stream);
raft::copy(labels_ref.data(), params.out, params.n_out, stream);
T* sample_weight = nullptr;
if (params.sample_weight != nullptr) {
raft::copy(sample_weight_d.data(), params.sample_weight, params.n_row, stream);
sample_weight = sample_weight_d.data();
}
handle.sync_stream(stream);
Dbscan::fit(handle,
inputs.data(),
(int)params.n_row,
2,
params.eps,
params.min_pts,
raft::distance::L2SqrtUnexpanded,
labels.data(),
core_sample_indices_d.data(),
sample_weight);
handle.sync_stream(handle.get_stream());
score = adjusted_rand_index(handle, labels_ref.data(), labels.data(), (int)params.n_out);
if (score < 1.0) {
auto str = raft::arr2Str(labels_ref.data(), params.n_out, "labels_ref", stream);
CUML_LOG_DEBUG("y: %s", str.c_str());
str = raft::arr2Str(labels.data(), params.n_row, "labels", stream);
CUML_LOG_DEBUG("y_hat: %s", str.c_str());
CUML_LOG_DEBUG("Score = %lf", score);
}
EXPECT_TRUE(MLCommon::devArrMatchHost(params.core_indices,
core_sample_indices_d.data(),
params.n_row,
MLCommon::Compare<int>(),
stream));
}
void SetUp() override { basicTest(); }
protected:
DBScan2DArrayInputs<T> params;
double score;
};
// The input looks like a latin cross or a star with a chain:
// .
// . . . . .
// .
// There is 1 core-point (intersection of the bars)
// and the two points to the very right are not reachable from it
// So there should be one cluster (the plus/star on the left)
// and two noise points
const std::vector<float> test2d1_f = {0, 0, 1, 0, 1, 1, 1, -1, 2, 0, 3, 0, 4, 0};
const std::vector<double> test2d1_d(test2d1_f.begin(), test2d1_f.end());
const std::vector<int> test2d1_l = {0, 0, 0, 0, 0, -1, -1};
const std::vector<int> test2d1c_l = {1, -1, -1, -1, -1, -1, -1};
// modified for weighted samples --> wheights are shifted so that
// the rightmost point will be a core point as well
const std::vector<float> test2d1w_f = {1, 2, 1, 1, -1, 1, 3};
const std::vector<double> test2d1w_d(test2d1w_f.begin(), test2d1w_f.end());
const std::vector<int> test2d1w_l = {0, 0, 0, 0, 0, 1, 1};
const std::vector<int> test2d1wc_l = {1, 6, -1, -1, -1, -1, -1};
// The input looks like a long two-barred (orhodox) cross or
// two stars next to each other:
// . .
// . . . . . .
// . .
// There are 2 core-points but they are not reachable from each other
// So there should be two clusters, both in the form of a plus/star
const std::vector<float> test2d2_f = {0, 0, 1, 0, 1, 1, 1, -1, 2, 0, 3, 0, 4, 0, 4, 1, 4, -1, 5, 0};
const std::vector<double> test2d2_d(test2d2_f.begin(), test2d2_f.end());
const std::vector<int> test2d2_l = {0, 0, 0, 0, 0, 1, 1, 1, 1, 1};
const std::vector<int> test2d2c_l = {1, 6, -1, -1, -1, -1, -1, -1, -1, -1};
// modified for weighted samples --> wheight for the right center
// is negative that the whole right star is noise
const std::vector<float> test2d2w_f = {1, 1, 1, 1, 1, 1, -2, 1, 1, 1};
const std::vector<double> test2d2w_d(test2d2w_f.begin(), test2d2w_f.end());
const std::vector<int> test2d2w_l = {0, 0, 0, 0, 0, -1, -1, -1, -1, -1};
const std::vector<int> test2d2wc_l = {1, -1, -1, -1, -1, -1, -1, -1, -1, -1};
// The input looks like a two-barred (orhodox) cross or
// two stars sharing a link:
// . .
// . . . . .
// . .
// There are 2 core-points but they are not reachable from each other
// So there should be two clusters.
// However, the link that is shared between the stars
// actually has an ambiguous label (to the best of my knowledge)
// as it will depend on the order in which we process the core-points.
// Note that there are 9 input points, but only 8 labels for this reason
const std::vector<float> test2d3_f = {
0,
0,
1,
0,
1,
1,
1,
-1,
3,
0,
3,
1,
3,
-1,
4,
0,
2,
0,
};
const std::vector<double> test2d3_d(test2d3_f.begin(), test2d3_f.end());
const std::vector<int> test2d3_l = {0, 0, 0, 0, 1, 1, 1, 1};
const std::vector<int> test2d3c_l = {1, 4, -1, -1, -1, -1, -1, -1, -1};
// ones for functional sample_weight testing
const std::vector<float> test2d_ones_f = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1};
const std::vector<double> test2d_ones_d(test2d_ones_f.begin(), test2d_ones_f.end());
const std::vector<DBScan2DArrayInputs<float>> inputs2d_f = {
{test2d1_f.data(),
test2d1_l.data(),
test2d1_f.size() / 2,
test2d1_l.size(),
1.1f,
4,
test2d1c_l.data()},
{test2d2_f.data(),
test2d2_l.data(),
test2d2_f.size() / 2,
test2d2_l.size(),
1.1f,
4,
test2d2c_l.data()},
{test2d3_f.data(),
test2d3_l.data(),
test2d3_f.size() / 2,
test2d3_l.size(),
1.1f,
4,
test2d3c_l.data()},
// add dummy sample weights
{test2d1_f.data(),
test2d1_l.data(),
test2d1_f.size() / 2,
test2d1_l.size(),
1.1f,
4,
test2d1c_l.data(),
test2d_ones_f.data()},
{test2d2_f.data(),
test2d2_l.data(),
test2d2_f.size() / 2,
test2d2_l.size(),
1.1f,
4,
test2d2c_l.data(),
test2d_ones_f.data()},
{test2d3_f.data(),
test2d3_l.data(),
test2d3_f.size() / 2,
test2d3_l.size(),
1.1f,
4,
test2d3c_l.data(),
test2d_ones_f.data()},
// special sample_weight cases
{test2d1_f.data(),
test2d1w_l.data(),
test2d1_f.size() / 2,
test2d1w_l.size(),
1.1f,
4,
test2d2wc_l.data(),
test2d2w_f.data()},
{test2d2_f.data(),
test2d2w_l.data(),
test2d2_f.size() / 2,
test2d2w_l.size(),
1.1f,
4,
test2d2wc_l.data(),
test2d2w_f.data()},
};
const std::vector<DBScan2DArrayInputs<double>> inputs2d_d = {
{test2d1_d.data(),
test2d1_l.data(),
test2d1_d.size() / 2,
test2d1_l.size(),
1.1,
4,
test2d1c_l.data()},
{test2d2_d.data(),
test2d2_l.data(),
test2d2_d.size() / 2,
test2d2_l.size(),
1.1,
4,
test2d2c_l.data()},
{test2d3_d.data(),
test2d3_l.data(),
test2d3_d.size() / 2,
test2d3_l.size(),
1.1,
4,
test2d3c_l.data()},
// add dummy sample weights
{test2d1_d.data(),
test2d1_l.data(),
test2d1_d.size() / 2,
test2d1_l.size(),
1.1,
4,
test2d1c_l.data(),
test2d_ones_d.data()},
{test2d2_d.data(),
test2d2_l.data(),
test2d2_d.size() / 2,
test2d2_l.size(),
1.1,
4,
test2d2c_l.data(),
test2d_ones_d.data()},
{test2d3_d.data(),
test2d3_l.data(),
test2d3_d.size() / 2,
test2d3_l.size(),
1.1,
4,
test2d3c_l.data(),
test2d_ones_d.data()},
// special sample_weight cases
{test2d1_d.data(),
test2d1w_l.data(),
test2d1_d.size() / 2,
test2d1w_l.size(),
1.1f,
4,
test2d1wc_l.data(),
test2d1w_d.data()},
{test2d2_d.data(),
test2d2w_l.data(),
test2d2_d.size() / 2,
test2d2w_l.size(),
1.1f,
4,
test2d2wc_l.data(),
test2d2w_d.data()},
};
typedef Dbscan2DSimple<float> Dbscan2DSimple_F;
TEST_P(Dbscan2DSimple_F, Result) { ASSERT_TRUE(score == 1.0); }
typedef Dbscan2DSimple<double> Dbscan2DSimple_D;
TEST_P(Dbscan2DSimple_D, Result) { ASSERT_TRUE(score == 1.0); }
INSTANTIATE_TEST_CASE_P(DbscanTests, Dbscan2DSimple_F, ::testing::ValuesIn(inputs2d_f));
INSTANTIATE_TEST_CASE_P(DbscanTests, Dbscan2DSimple_D, ::testing::ValuesIn(inputs2d_d));
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/rproj_test.cu | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/metrics/metrics.hpp>
#include <cuml/random_projection/rproj_c.h>
#include <gtest/gtest.h>
#include <iostream>
#include <raft/core/handle.hpp>
#include <raft/distance/distance.cuh>
#include <raft/linalg/transpose.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <random>
#include <test_utils.h>
#include <vector>
namespace ML {
template <typename T, int N, int M>
class RPROJTest : public ::testing::Test {
public:
RPROJTest()
: stream(handle.get_stream()),
random_matrix1(stream),
random_matrix2(stream),
d_input(0, stream),
d_output1(0, stream),
d_output2(0, stream)
{
}
protected:
void generate_data()
{
std::random_device rd;
std::mt19937 rng(rd());
std::uniform_real_distribution<T> dist(0, 1);
h_input.resize(N * M);
for (auto& i : h_input) {
i = dist(rng);
}
d_input.resize(h_input.size(), stream);
raft::update_device(d_input.data(), h_input.data(), h_input.size(), stream);
// transpose(d_input, d_input, N, M);
// From row major to column major (this operation is only useful for non-random datasets)
}
void gaussianTest()
{
params1 = {
N, // number of samples
M, // number of features
-1, // number of components
epsilon, // error tolerance
true, // gaussian or sparse method
-1.0, // auto density
false, // not used
42 // random seed
};
RPROJfit(handle, &random_matrix1, ¶ms1);
d_output1.resize(N * params1.n_components, stream);
rmm::device_uvector<T> tmp(d_output1.size(), stream);
RPROJtransform(handle, d_input.data(), &random_matrix1, tmp.data(), ¶ms1);
raft::linalg::transpose(handle,
tmp.data(),
d_output1.data(),
N,
params1.n_components,
stream); // From column major to row major
handle.sync_stream(stream);
}
void sparseTest()
{
params2 = {
N, // number of samples
M, // number of features
-1, // number of components (-1: auto-deduction)
epsilon, // error tolerance
false, // gaussian or sparse method
-1.0, // auto density (-1: auto-deduction)
false, // not used
42 // random seed
};
RPROJfit(handle, &random_matrix2, ¶ms2);
d_output2.resize(N * params2.n_components, stream);
rmm::device_uvector<T> tmp(d_output2.size(), stream);
RPROJtransform(handle, d_input.data(), &random_matrix2, tmp.data(), ¶ms2);
raft::linalg::transpose(handle,
tmp.data(),
d_output2.data(),
N,
params2.n_components,
stream); // From column major to row major
handle.sync_stream(stream);
}
void SetUp() override
{
epsilon = 0.2;
generate_data();
gaussianTest();
sparseTest();
}
void random_matrix_check()
{
int D = johnson_lindenstrauss_min_dim(N, epsilon);
ASSERT_TRUE(params1.n_components == D);
ASSERT_TRUE(random_matrix1.dense_data.size() > 0);
ASSERT_TRUE(random_matrix1.type == dense);
ASSERT_TRUE(params2.n_components == D);
ASSERT_TRUE(params2.density == 1 / sqrt(M));
ASSERT_TRUE(random_matrix2.indices.size() > 0);
ASSERT_TRUE(random_matrix2.indptr.size() > 0);
ASSERT_TRUE(random_matrix2.sparse_data.size() > 0);
ASSERT_TRUE(random_matrix2.type == sparse);
}
void epsilon_check()
{
int D = johnson_lindenstrauss_min_dim(N, epsilon);
constexpr auto distance_type = raft::distance::DistanceType::L2SqrtUnexpanded;
rmm::device_uvector<T> d_pdist(N * N, stream);
ML::Metrics::pairwise_distance(
handle, d_input.data(), d_input.data(), d_pdist.data(), N, N, M, distance_type);
RAFT_CUDA_TRY(cudaPeekAtLastError());
T* h_pdist = new T[N * N];
raft::update_host(h_pdist, d_pdist.data(), N * N, stream);
rmm::device_uvector<T> d_pdist1(N * N, stream);
ML::Metrics::pairwise_distance(
handle, d_output1.data(), d_output1.data(), d_pdist1.data(), N, N, D, distance_type);
RAFT_CUDA_TRY(cudaPeekAtLastError());
T* h_pdist1 = new T[N * N];
raft::update_host(h_pdist1, d_pdist1.data(), N * N, stream);
rmm::device_uvector<T> d_pdist2(N * N, stream);
ML::Metrics::pairwise_distance(
handle, d_output2.data(), d_output2.data(), d_pdist2.data(), N, N, D, distance_type);
RAFT_CUDA_TRY(cudaPeekAtLastError());
T* h_pdist2 = new T[N * N];
raft::update_host(h_pdist2, d_pdist2.data(), N * N, stream);
for (size_t i = 0; i < N; i++) {
for (size_t j = 0; j <= i; j++) {
T pdist = h_pdist[i * N + j];
T pdist1 = h_pdist1[i * N + j];
T pdist2 = h_pdist2[i * N + j];
T lower_bound = (1.0 - epsilon) * pdist;
T upper_bound = (1.0 + epsilon) * pdist;
ASSERT_TRUE(lower_bound <= pdist1 && pdist1 <= upper_bound);
ASSERT_TRUE(lower_bound <= pdist2 && pdist2 <= upper_bound);
}
}
delete[] h_pdist;
delete[] h_pdist1;
delete[] h_pdist2;
}
protected:
raft::handle_t handle;
cudaStream_t stream = 0;
T epsilon;
std::vector<T> h_input;
rmm::device_uvector<T> d_input;
paramsRPROJ params1;
rand_mat<T> random_matrix1;
rmm::device_uvector<T> d_output1;
paramsRPROJ params2;
rand_mat<T> random_matrix2;
rmm::device_uvector<T> d_output2;
};
typedef RPROJTest<float, 500, 2000> RPROJTestF1;
TEST_F(RPROJTestF1, RandomMatrixCheck) { random_matrix_check(); }
TEST_F(RPROJTestF1, EpsilonCheck) { epsilon_check(); }
typedef RPROJTest<double, 500, 2000> RPROJTestD1;
TEST_F(RPROJTestD1, RandomMatrixCheck) { random_matrix_check(); }
TEST_F(RPROJTestD1, EpsilonCheck) { epsilon_check(); }
typedef RPROJTest<float, 5000, 3500> RPROJTestF2;
TEST_F(RPROJTestF2, RandomMatrixCheck) { random_matrix_check(); }
TEST_F(RPROJTestF2, EpsilonCheck) { epsilon_check(); }
typedef RPROJTest<double, 5000, 3500> RPROJTestD2;
TEST_F(RPROJTestD2, RandomMatrixCheck) { random_matrix_check(); }
TEST_F(RPROJTestD2, EpsilonCheck) { epsilon_check(); }
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/fil_test.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "../../src/fil/internal.cuh"
#include <test_utils.h>
#include <cuml/fil/fil.h>
#include <raft/core/handle.hpp>
#include <raft/random/rng.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <test_utils.h>
#include <thrust/execution_policy.h>
#include <thrust/functional.h>
#include <thrust/iterator/counting_iterator.h>
#include <thrust/transform.h>
#include <treelite/c_api.h>
#include <treelite/frontend.h>
#include <treelite/tree.h>
#include <gtest/gtest.h>
#include <cmath>
#include <cstdio>
#include <limits>
#include <memory>
#include <numeric>
#include <ostream>
#include <utility>
#define TL_CPP_CHECK(call) ASSERT(int(call) >= 0, "treelite call error")
namespace ML {
namespace tl = treelite;
namespace tlf = treelite::frontend;
using namespace fil;
struct FilTestParams {
// input data parameters
int num_rows = 20'000;
int num_cols = 50;
float nan_prob = 0.05;
// forest parameters
int depth = 8;
int num_trees = 50;
float leaf_prob = 0.05;
// below, categorical nodes means categorical inner nodes
// probability that a node is categorical (given that its feature is categorical)
float node_categorical_prob = 0.0f;
// probability that a feature is categorical (pertains to data generation, can
// still be interpreted as numerical by a node)
float feature_categorical_prob = 0.0f;
// during model creation, how often categories < fid_num_cats are marked as matching?
float cat_match_prob = 0.5f;
// Order Of Magnitude for maximum matching category for categorical nodes
float max_magnitude_of_matching_cat = 1.0f;
// output parameters
output_t output = output_t::RAW;
double threshold = 0.0f;
double global_bias = 0.0f;
// runtime parameters
int blocks_per_sm = 0;
int threads_per_tree = 1;
int n_items = 0;
algo_t algo = algo_t::NAIVE;
int seed = 42;
float tolerance = 2e-3f;
bool print_forest_shape = false;
// treelite parameters, only used for treelite tests
tl::Operator op = tl::Operator::kLT;
leaf_algo_t leaf_algo = leaf_algo_t::FLOAT_UNARY_BINARY;
// when FLOAT_UNARY_BINARY == leaf_algo:
// num_classes = 1 means it's regression
// num_classes = 2 means it's binary classification
// (complement probabilities, then use threshold)
// when GROVE_PER_CLASS == leaf_algo:
// it's multiclass classification (num_classes must be > 2),
// done by splitting the forest in num_classes groups,
// each of which computes one-vs-all probability for its class.
// when CATEGORICAL_LEAF == leaf_algo:
// num_classes must be > 1 and it's multiclass classification.
// done by storing the class label in each leaf and voting.
// it's used in treelite ModelBuilder initialization
int num_classes = 1;
size_t num_proba_outputs() { return num_rows * std::max(num_classes, 2); }
size_t num_preds_outputs() { return num_rows; }
};
std::string output2str(fil::output_t output)
{
if (output == fil::RAW) return "RAW";
std::string s = "";
if (output & fil::AVG) s += "| AVG";
if (output & fil::CLASS) s += "| CLASS";
if (output & fil::SIGMOID) s += "| SIGMOID";
if (output & fil::SOFTMAX) s += "| SOFTMAX";
return s;
}
std::ostream& operator<<(std::ostream& os, const FilTestParams& ps)
{
os << "num_rows = " << ps.num_rows << ", num_cols = " << ps.num_cols
<< ", nan_prob = " << ps.nan_prob << ", depth = " << ps.depth
<< ", num_trees = " << ps.num_trees << ", leaf_prob = " << ps.leaf_prob
<< ", output = " << output2str(ps.output) << ", threshold = " << ps.threshold
<< ", threads_per_tree = " << ps.threads_per_tree << ", n_items = " << ps.n_items
<< ", blocks_per_sm = " << ps.blocks_per_sm << ", algo = " << ps.algo << ", seed = " << ps.seed
<< ", tolerance = " << ps.tolerance << ", op = " << tl::OpName(ps.op)
<< ", global_bias = " << ps.global_bias << ", leaf_algo = " << ps.leaf_algo
<< ", num_classes = " << ps.num_classes
<< ", node_categorical_prob = " << ps.node_categorical_prob
<< ", feature_categorical_prob = " << ps.feature_categorical_prob
<< ", cat_match_prob = " << ps.cat_match_prob
<< ", max_magnitude_of_matching_cat = " << ps.max_magnitude_of_matching_cat;
return os;
}
template <typename real_t>
__global__ void nan_kernel(real_t* data, const bool* mask, int len, real_t nan)
{
int tid = threadIdx.x + blockIdx.x * blockDim.x;
if (tid >= len) return;
if (!mask[tid]) data[tid] = nan;
}
template <typename real_t>
real_t sigmoid(real_t x)
{
return real_t(1) / (real_t(1) + exp(-x));
}
void hard_clipped_bernoulli(
raft::random::Rng rng, float* d, std::size_t n_vals, float prob_of_zero, cudaStream_t stream)
{
rng.uniform(d, n_vals, 0.0f, 1.0f, stream);
thrust::transform(
thrust::cuda::par.on(stream), d, d + n_vals, d, [=] __device__(float uniform_0_1) -> float {
// if prob_of_zero == 0.0f, we should never generate a zero
if (prob_of_zero == 0.0f) return 1.0f;
float truly_0_1 = fmax(fmin(uniform_0_1, 1.0f), 0.0f);
// if prob_of_zero == 1.0f, we should never generate a one, hence ">"
return truly_0_1 > prob_of_zero ? 1.0f : 0.0f;
});
}
template <typename real_t>
struct replace_some_floating_with_categorical {
float* fid_num_cats_d;
int num_cols;
__device__ real_t operator()(real_t data, int data_idx)
{
auto fid_num_cats = static_cast<real_t>(fid_num_cats_d[data_idx % num_cols]);
if (fid_num_cats == real_t(0)) return data;
// Transform `data` from (uniform on) [-1.0, 1.0] into [-fid_num_cats-3, fid_num_cats+3].
real_t tmp = data * (fid_num_cats + real_t(3));
// Also test invalid (negative and above fid_num_cats) categories: samples within
// [fid_num_cats+2.5, fid_num_cats+3) and opposite will test infinite floats as categorical.
if (tmp + fid_num_cats < real_t(-2.5f)) return -std::numeric_limits<real_t>::infinity();
if (tmp - fid_num_cats > real_t(+2.5f)) return +std::numeric_limits<real_t>::infinity();
// Samples within [fid_num_cats+2, fid_num_cats+2.5) (and their negative counterparts) will
// test huge invalid categories.
if (tmp + fid_num_cats < real_t(-2.0f)) tmp -= real_t(MAX_FIL_INT_FLOAT);
if (tmp - fid_num_cats > real_t(+2.0f)) tmp += real_t(MAX_FIL_INT_FLOAT);
// Samples within [0, fid_num_cats+2) will be valid categories, rounded towards 0 with a cast.
// Negative categories are always invalid. For correct interpretation, see
// cpp/src/fil/internal.cuh `int category_matches(node_t node, float category)`
return tmp;
}
};
template <typename real_t>
__global__ void floats_to_bit_stream_k(uint8_t* dst, real_t* src, std::size_t size)
{
std::size_t idx = std::size_t(blockIdx.x) * blockDim.x + threadIdx.x;
if (idx >= size) return;
int byte = 0;
#pragma unroll
for (int i = 0; i < BITS_PER_BYTE; ++i)
byte |= (int)src[idx * BITS_PER_BYTE + i] << i;
dst[idx] = byte;
}
template <typename real_t>
void adjust_threshold_to_treelite(
real_t* pthreshold, int* tl_left, int* tl_right, bool* default_left, tl::Operator comparison_op)
{
// in treelite (take left node if val [op] threshold),
// the meaning of the condition is reversed compared to FIL;
// thus, "<" in treelite corresponds to comparison ">=" used by FIL
// https://github.com/dmlc/treelite/blob/master/include/treelite/tree.h#L243
// TODO(levsnv): remove workaround once confirmed to work with empty category lists in Treelite
if (isnan(*pthreshold)) {
std::swap(*tl_left, *tl_right);
*default_left = !*default_left;
return;
}
switch (comparison_op) {
case tl::Operator::kLT: break;
case tl::Operator::kLE:
// x <= y is equivalent to x < y', where y' is the next representable float
*pthreshold = std::nextafterf(*pthreshold, -std::numeric_limits<real_t>::infinity());
break;
case tl::Operator::kGT:
// x > y is equivalent to x >= y', where y' is the next representable float
// left and right still need to be swapped
*pthreshold = std::nextafterf(*pthreshold, -std::numeric_limits<real_t>::infinity());
case tl::Operator::kGE:
// swap left and right
std::swap(*tl_left, *tl_right);
*default_left = !*default_left;
break;
default: ASSERT(false, "only <, >, <= and >= comparisons are supported");
}
}
template <typename real_t>
class BaseFilTest : public testing::TestWithParam<FilTestParams> {
public:
BaseFilTest()
: ps(::testing::TestWithParam<FilTestParams>::GetParam()),
stream(handle.get_stream()),
preds_d(0, stream),
want_preds_d(0, stream),
want_proba_d(0, stream),
data_d(ps.num_rows * ps.num_cols, stream),
proba_d(0, stream)
{
}
protected:
void setup_helper()
{
generate_forest();
generate_data();
predict_on_cpu();
predict_on_gpu();
}
void SetUp() override { setup_helper(); }
void generate_forest()
{
auto stream = handle.get_stream();
size_t num_nodes = forest_num_nodes();
// helper data
rmm::device_uvector<int> weights_int_d(num_nodes, stream);
rmm::device_uvector<real_t> weights_real_d(num_nodes, stream);
rmm::device_uvector<real_t> thresholds_d(num_nodes, stream);
rmm::device_uvector<bool> def_lefts_d(num_nodes, stream);
rmm::device_uvector<bool> is_leafs_d(num_nodes, stream);
rmm::device_uvector<float> is_categoricals_d(num_nodes, stream);
fids_d.resize(num_nodes, stream);
fid_num_cats_d.resize(ps.num_cols, stream);
// generate on-GPU random data
raft::random::Rng r(ps.seed);
if (ps.leaf_algo == fil::leaf_algo_t::CATEGORICAL_LEAF) {
// [0..num_classes)
r.uniformInt(weights_int_d.data(), num_nodes, 0, ps.num_classes, stream);
} else if (ps.leaf_algo == fil::leaf_algo_t::VECTOR_LEAF) {
std::mt19937 gen(3);
std::uniform_real_distribution<real_t> dist(0, 1);
vector_leaf.resize(num_nodes * ps.num_classes);
for (size_t i = 0; i < vector_leaf.size(); i++) {
vector_leaf[i] = dist(gen);
}
// Normalise probabilities to 1
for (size_t i = 0; i < vector_leaf.size(); i += ps.num_classes) {
auto sum = std::accumulate(&vector_leaf[i], &vector_leaf[i + ps.num_classes], real_t(0));
for (size_t j = i; j < i + ps.num_classes; j++) {
vector_leaf[j] /= sum;
}
}
} else {
r.uniform(weights_real_d.data(), num_nodes, real_t(-1), real_t(1), stream);
}
r.uniform(thresholds_d.data(), num_nodes, real_t(-1), real_t(1), stream);
r.uniformInt(fids_d.data(), num_nodes, 0, ps.num_cols, stream);
r.bernoulli(def_lefts_d.data(), num_nodes, 0.5f, stream);
r.bernoulli(is_leafs_d.data(), num_nodes, ps.leaf_prob, stream);
hard_clipped_bernoulli(
r, is_categoricals_d.data(), num_nodes, 1.0f - ps.node_categorical_prob, stream);
// copy data to host
std::vector<real_t> thresholds_h(num_nodes), weights_real_h(num_nodes);
std::vector<float> is_categoricals_h(num_nodes);
std::vector<int> weights_int_h(num_nodes), fids_h(num_nodes), node_cat_set(num_nodes);
std::vector<float> fid_num_cats_h(ps.num_cols);
std::vector<bool> feature_categorical(ps.num_cols);
// bool vectors are not guaranteed to be stored byte-per-value
bool* def_lefts_h = new bool[num_nodes];
bool* is_leafs_h = new bool[num_nodes];
// uniformily distributed in orders of magnitude: smaller models which
// still stress large bitfields.
// up to 10**ps.max_magnitude_of_matching_cat (only if feature is categorical, else -1)
std::mt19937 gen(ps.seed);
std::uniform_real_distribution mmc(-1.0f, ps.max_magnitude_of_matching_cat);
std::bernoulli_distribution fc(ps.feature_categorical_prob);
cat_sets_h.fid_num_cats.resize(ps.num_cols);
for (int fid = 0; fid < ps.num_cols; ++fid) {
feature_categorical[fid] = fc(gen);
if (feature_categorical[fid]) {
// categorical features will never have fid_num_cats == 0
float mm = ceil(pow(10, mmc(gen)));
ASSERT(mm < float(MAX_FIL_INT_FLOAT),
"internal error: max_magnitude_of_matching_cat %f is too large",
ps.max_magnitude_of_matching_cat);
cat_sets_h.fid_num_cats[fid] = mm;
} else {
cat_sets_h.fid_num_cats[fid] = 0.0f;
}
}
raft::update_host(weights_int_h.data(), weights_int_d.data(), num_nodes, stream);
raft::update_host(weights_real_h.data(), weights_real_d.data(), num_nodes, stream);
raft::update_host(thresholds_h.data(), thresholds_d.data(), num_nodes, stream);
raft::update_host(fids_h.data(), fids_d.data(), num_nodes, stream);
raft::update_host(def_lefts_h, def_lefts_d.data(), num_nodes, stream);
raft::update_host(is_leafs_h, is_leafs_d.data(), num_nodes, stream);
raft::update_host(is_categoricals_h.data(), is_categoricals_d.data(), num_nodes, stream);
handle.sync_stream();
// mark leaves
for (int i = 0; i < ps.num_trees; ++i) {
int num_tree_nodes = tree_num_nodes();
size_t leaf_start = num_tree_nodes * i + num_tree_nodes / 2;
size_t leaf_end = num_tree_nodes * (i + 1);
for (size_t j = leaf_start; j < leaf_end; ++j) {
is_leafs_h[j] = true;
}
}
// count nodes for each feature id, while splitting the sets between nodes
std::size_t bit_pool_size = 0;
cat_sets_h.n_nodes = std::vector<std::size_t>(ps.num_cols, 0);
for (std::size_t node_id = 0; node_id < num_nodes; ++node_id) {
int fid = fids_h[node_id];
if (!feature_categorical[fid] || is_leafs_h[node_id]) is_categoricals_h[node_id] = 0.0f;
if (is_categoricals_h[node_id] == 1.0f) {
// might allocate a categorical set for an unreachable inner node. That's OK.
++cat_sets_h.n_nodes[fid];
node_cat_set[node_id] = bit_pool_size;
bit_pool_size += cat_sets_h.accessor().sizeof_mask(fid);
}
}
cat_sets_h.bits.resize(bit_pool_size);
raft::update_device(fid_num_cats_d.data(), cat_sets_h.fid_num_cats.data(), ps.num_cols, stream);
// calculate sizes and allocate arrays for category sets
// fill category sets
// there is a faster trick with a 256-byte LUT, but we can implement it later if the tests
// become too slow
rmm::device_uvector<float> bits_precursor_d(cat_sets_h.bits.size() * BITS_PER_BYTE, stream);
rmm::device_uvector<uint8_t> bits_d(cat_sets_h.bits.size(), stream);
if (cat_sets_h.bits.size() != 0) {
hard_clipped_bernoulli(r,
bits_precursor_d.data(),
cat_sets_h.bits.size() * BITS_PER_BYTE,
1.0f - ps.cat_match_prob,
stream);
floats_to_bit_stream_k<<<raft::ceildiv(cat_sets_h.bits.size(), (std::size_t)FIL_TPB),
FIL_TPB,
0,
stream>>>(
bits_d.data(), bits_precursor_d.data(), cat_sets_h.bits.size());
raft::update_host(cat_sets_h.bits.data(), bits_d.data(), cat_sets_h.bits.size(), stream);
}
// initialize nodes
nodes.resize(num_nodes);
for (size_t i = 0; i < num_nodes; ++i) {
fil::val_t<real_t> w;
switch (ps.leaf_algo) {
case fil::leaf_algo_t::CATEGORICAL_LEAF: w.idx = weights_int_h[i]; break;
case fil::leaf_algo_t::FLOAT_UNARY_BINARY:
case fil::leaf_algo_t::GROVE_PER_CLASS:
// not relying on fil::val_t<float> internals
// merely that we copied floats into weights_h earlier
w.f = weights_real_h[i];
break;
case fil::leaf_algo_t::VECTOR_LEAF: w.idx = i; break;
default: ASSERT(false, "internal error: invalid ps.leaf_algo");
}
// make sure nodes are categorical only when their feature ID is categorical
bool is_categorical = is_categoricals_h[i] == 1.0f;
val_t<real_t> split;
if (is_categorical)
split.idx = node_cat_set[i];
else
split.f = thresholds_h[i];
nodes[i] =
fil::dense_node<real_t>(w, split, fids_h[i], def_lefts_h[i], is_leafs_h[i], is_categorical);
}
// clean up
delete[] def_lefts_h;
delete[] is_leafs_h;
// cat_sets_h.bits and fid_num_cats_d are now visible to host
}
void generate_data()
{
auto stream = handle.get_stream();
// allocate arrays
size_t num_data = ps.num_rows * ps.num_cols;
rmm::device_uvector<bool> mask_d(num_data, stream);
// generate random data
raft::random::Rng r(ps.seed);
r.uniform(data_d.data(), num_data, real_t(-1), real_t(1), stream);
thrust::transform(
thrust::cuda::par.on(stream),
data_d.data(),
data_d.data() + num_data,
thrust::counting_iterator(0),
data_d.data(),
replace_some_floating_with_categorical<real_t>{fid_num_cats_d.data(), ps.num_cols});
r.bernoulli(mask_d.data(), num_data, 1 - ps.nan_prob, stream);
int tpb = 256;
nan_kernel<<<raft::ceildiv(int(num_data), tpb), tpb, 0, stream>>>(
data_d.data(), mask_d.data(), num_data, std::numeric_limits<real_t>::quiet_NaN());
RAFT_CUDA_TRY(cudaPeekAtLastError());
// copy to host
data_h.resize(num_data);
raft::update_host(data_h.data(), data_d.data(), num_data, stream);
handle.sync_stream();
}
void apply_softmax(real_t* class_scores)
{
real_t max = *std::max_element(class_scores, &class_scores[ps.num_classes]);
for (int i = 0; i < ps.num_classes; ++i)
class_scores[i] = exp(class_scores[i] - max);
real_t sum = std::accumulate(class_scores, &class_scores[ps.num_classes], real_t(0));
for (int i = 0; i < ps.num_classes; ++i)
class_scores[i] /= sum;
}
void transform(real_t f, real_t& proba, real_t& output)
{
if ((ps.output & fil::output_t::AVG) != 0) {
if (ps.leaf_algo == fil::leaf_algo_t::GROVE_PER_CLASS) {
f /= ps.num_trees / ps.num_classes;
} else {
f *= real_t(1) / ps.num_trees;
}
}
f += ps.global_bias;
if ((ps.output & fil::output_t::SIGMOID) != 0) { f = sigmoid(f); }
proba = f;
if ((ps.output & fil::output_t::CLASS) != 0) { f = f > ps.threshold ? real_t(1) : real_t(0); }
output = f;
}
void complement(real_t* proba) { proba[0] = real_t(1) - proba[1]; }
void predict_on_cpu()
{
auto stream = handle.get_stream();
// predict on host
std::vector<real_t> want_preds_h(ps.num_preds_outputs());
want_proba_h.resize(ps.num_proba_outputs());
int num_nodes = tree_num_nodes();
std::vector<real_t> class_scores(ps.num_classes);
// we use tree_base::child_index() on CPU
tree_base base{cat_sets_h.accessor()};
switch (ps.leaf_algo) {
case fil::leaf_algo_t::FLOAT_UNARY_BINARY:
for (int i = 0; i < ps.num_rows; ++i) {
real_t pred = 0;
for (int j = 0; j < ps.num_trees; ++j) {
pred += infer_one_tree(&nodes[j * num_nodes], &data_h[i * ps.num_cols], base).f;
}
transform(pred, want_proba_h[i * 2 + 1], want_preds_h[i]);
complement(&(want_proba_h[i * 2]));
}
break;
case fil::leaf_algo_t::GROVE_PER_CLASS:
for (int row = 0; row < ps.num_rows; ++row) {
std::fill(class_scores.begin(), class_scores.end(), real_t(0));
for (int tree = 0; tree < ps.num_trees; ++tree) {
class_scores[tree % ps.num_classes] +=
infer_one_tree(&nodes[tree * num_nodes], &data_h[row * ps.num_cols], base).f;
}
want_preds_h[row] =
std::max_element(class_scores.begin(), class_scores.end()) - class_scores.begin();
for (int c = 0; c < ps.num_classes; ++c) {
real_t thresholded_proba; // not used;
transform(class_scores[c], want_proba_h[row * ps.num_classes + c], thresholded_proba);
}
if ((ps.output & fil::output_t::SOFTMAX) != 0)
apply_softmax(&want_proba_h[row * ps.num_classes]);
}
break;
case fil::leaf_algo_t::CATEGORICAL_LEAF: {
std::vector<int> class_votes(ps.num_classes);
for (int r = 0; r < ps.num_rows; ++r) {
std::fill(class_votes.begin(), class_votes.end(), 0);
for (int j = 0; j < ps.num_trees; ++j) {
int class_label =
infer_one_tree(&nodes[j * num_nodes], &data_h[r * ps.num_cols], base).idx;
++class_votes[class_label];
}
for (int c = 0; c < ps.num_classes; ++c) {
real_t thresholded_proba; // not used; do argmax instead
transform(class_votes[c], want_proba_h[r * ps.num_classes + c], thresholded_proba);
}
want_preds_h[r] =
std::max_element(class_votes.begin(), class_votes.end()) - class_votes.begin();
}
break;
}
case fil::leaf_algo_t::VECTOR_LEAF:
for (int r = 0; r < ps.num_rows; ++r) {
std::vector<real_t> class_probabilities(ps.num_classes);
for (int j = 0; j < ps.num_trees; ++j) {
int vector_index =
infer_one_tree(&nodes[j * num_nodes], &data_h[r * ps.num_cols], base).idx;
real_t sum = 0;
for (int k = 0; k < ps.num_classes; k++) {
class_probabilities[k] += vector_leaf[vector_index * ps.num_classes + k];
sum += vector_leaf[vector_index * ps.num_classes + k];
}
ASSERT_LE(std::abs(sum - real_t(1)), real_t(1e-5));
}
for (int c = 0; c < ps.num_classes; ++c) {
want_proba_h[r * ps.num_classes + c] = class_probabilities[c];
}
want_preds_h[r] =
std::max_element(class_probabilities.begin(), class_probabilities.end()) -
class_probabilities.begin();
}
break;
case fil::leaf_algo_t::GROVE_PER_CLASS_FEW_CLASSES:
case fil::leaf_algo_t::GROVE_PER_CLASS_MANY_CLASSES: break;
}
// copy to GPU
want_preds_d.resize(ps.num_preds_outputs(), stream);
want_proba_d.resize(ps.num_proba_outputs(), stream);
raft::update_device(want_preds_d.data(), want_preds_h.data(), ps.num_preds_outputs(), stream);
raft::update_device(want_proba_d.data(), want_proba_h.data(), ps.num_proba_outputs(), stream);
handle.sync_stream();
}
virtual void init_forest(fil::forest_t<real_t>* pforest) = 0;
void predict_on_gpu()
{
auto stream = handle.get_stream();
fil::forest_t<real_t> forest = nullptr;
init_forest(&forest);
// predict
preds_d.resize(ps.num_preds_outputs(), stream);
proba_d.resize(ps.num_proba_outputs(), stream);
fil::predict(handle, forest, preds_d.data(), data_d.data(), ps.num_rows);
fil::predict(handle, forest, proba_d.data(), data_d.data(), ps.num_rows, true);
handle.sync_stream();
// cleanup
fil::free(handle, forest);
}
void compare()
{
ASSERT_TRUE(MLCommon::devArrMatch(want_proba_d.data(),
proba_d.data(),
ps.num_proba_outputs(),
MLCommon::CompareApprox<real_t>(ps.tolerance),
stream));
float tolerance = ps.leaf_algo == fil::leaf_algo_t::FLOAT_UNARY_BINARY
? ps.tolerance
: std::numeric_limits<real_t>::epsilon();
// in multi-class prediction, floats represent the most likely class
// and would be generated by converting an int to float
ASSERT_TRUE(MLCommon::devArrMatch(want_preds_d.data(),
preds_d.data(),
ps.num_rows,
MLCommon::CompareApprox<real_t>(tolerance),
stream));
}
fil::val_t<real_t> infer_one_tree(fil::dense_node<real_t>* root,
real_t* data,
const tree_base& tree)
{
int curr = 0;
fil::val_t<real_t> output{.f = 0.0f};
for (;;) {
const fil::dense_node<real_t>& node = root[curr];
if (node.is_leaf()) return node.template output<val_t<real_t>>();
real_t val = data[node.fid()];
curr = tree.child_index<true>(node, curr, val);
}
return output;
}
int tree_num_nodes() { return (1 << (ps.depth + 1)) - 1; }
int forest_num_nodes() { return tree_num_nodes() * ps.num_trees; }
// parameters
FilTestParams ps;
raft::handle_t handle;
cudaStream_t stream = 0;
// predictions
rmm::device_uvector<real_t> preds_d;
rmm::device_uvector<real_t> proba_d;
rmm::device_uvector<real_t> want_preds_d;
rmm::device_uvector<real_t> want_proba_d;
// input data
rmm::device_uvector<real_t> data_d;
std::vector<real_t> data_h;
std::vector<real_t> want_proba_h;
// forest data
std::vector<fil::dense_node<real_t>> nodes;
std::vector<real_t> vector_leaf;
cat_sets_owner cat_sets_h;
rmm::device_uvector<int> fids_d = rmm::device_uvector<int>(0, cudaStream_t());
rmm::device_uvector<float> fid_num_cats_d = rmm::device_uvector<float>(0, cudaStream_t());
};
template <typename fil_node_t>
class BasePredictFilTest : public BaseFilTest<typename fil_node_t::real_type> {
using real_t = typename fil_node_t::real_type;
protected:
void dense2sparse_node(const fil::dense_node<real_t>* dense_root,
int i_dense,
int i_sparse_root,
int i_sparse)
{
const fil::dense_node<real_t>& node = dense_root[i_dense];
if (node.is_leaf()) {
// leaf sparse node
sparse_nodes[i_sparse] = fil_node_t(node.template output<fil::val_t<real_t>>(),
{},
node.fid(),
node.def_left(),
node.is_leaf(),
false,
0);
return;
}
// inner sparse node
// reserve space for children
int left_index = sparse_nodes.size();
sparse_nodes.push_back(fil_node_t());
sparse_nodes.push_back(fil_node_t());
sparse_nodes[i_sparse] = fil_node_t({},
node.split(),
node.fid(),
node.def_left(),
node.is_leaf(),
node.is_categorical(),
left_index - i_sparse_root);
dense2sparse_node(dense_root, 2 * i_dense + 1, i_sparse_root, left_index);
dense2sparse_node(dense_root, 2 * i_dense + 2, i_sparse_root, left_index + 1);
}
void dense2sparse_tree(const fil::dense_node<real_t>* dense_root)
{
int i_sparse_root = sparse_nodes.size();
sparse_nodes.push_back(fil_node_t());
dense2sparse_node(dense_root, 0, i_sparse_root, i_sparse_root);
trees.push_back(i_sparse_root);
}
void dense2sparse()
{
for (int tree = 0; tree < this->ps.num_trees; ++tree) {
dense2sparse_tree(&this->nodes[tree * this->tree_num_nodes()]);
}
}
void init_forest(fil::forest_t<real_t>* pforest) override
{
constexpr bool IS_DENSE = node_traits<fil_node_t>::IS_DENSE;
std::vector<fil_node_t> init_nodes;
if constexpr (!IS_DENSE) {
dense2sparse();
init_nodes = sparse_nodes;
} else {
init_nodes = this->nodes;
}
ASSERT(init_nodes.size() < std::size_t(INT_MAX), "generated too many nodes");
// init FIL model
fil::forest_params_t fil_params = {
.num_nodes = static_cast<int>(init_nodes.size()),
.depth = this->ps.depth,
.num_trees = this->ps.num_trees,
.num_cols = this->ps.num_cols,
.leaf_algo = this->ps.leaf_algo,
.algo = this->ps.algo,
.output = this->ps.output,
.threshold = this->ps.threshold,
.global_bias = this->ps.global_bias,
.num_classes = this->ps.num_classes,
.blocks_per_sm = this->ps.blocks_per_sm,
.threads_per_tree = this->ps.threads_per_tree,
.n_items = this->ps.n_items,
};
fil::init(this->handle,
pforest,
this->cat_sets_h.accessor(),
this->vector_leaf,
trees.data(),
init_nodes.data(),
&fil_params);
}
std::vector<fil_node_t> sparse_nodes;
std::vector<int> trees;
};
using PredictDenseFloat32FilTest = BasePredictFilTest<fil::dense_node<float>>;
using PredictDenseFloat64FilTest = BasePredictFilTest<fil::dense_node<double>>;
using PredictSparse16Float32FilTest = BasePredictFilTest<fil::sparse_node16<float>>;
using PredictSparse16Float64FilTest = BasePredictFilTest<fil::sparse_node16<double>>;
using PredictSparse8FilTest = BasePredictFilTest<fil::sparse_node8>;
template <typename real_t>
class TreeliteFilTest : public BaseFilTest<real_t> {
protected:
/** adds nodes[node] of tree starting at index root to builder
at index at *pkey, increments *pkey,
and returns the treelite key of the node */
int node_to_treelite(tlf::TreeBuilder* builder, int* pkey, int root, int node)
{
int key = (*pkey)++;
builder->CreateNode(key);
const fil::dense_node<real_t>& dense_node = this->nodes[node];
std::vector<std::uint32_t> left_categories;
if (dense_node.is_leaf()) {
switch (this->ps.leaf_algo) {
case fil::leaf_algo_t::FLOAT_UNARY_BINARY:
case fil::leaf_algo_t::GROVE_PER_CLASS:
// default is fil::FLOAT_UNARY_BINARY
builder->SetLeafNode(key, tlf::Value::Create(dense_node.template output<real_t>()));
break;
case fil::leaf_algo_t::CATEGORICAL_LEAF: {
std::vector<tlf::Value> vec(this->ps.num_classes);
for (int i = 0; i < this->ps.num_classes; ++i) {
vec[i] =
tlf::Value::Create(i == dense_node.template output<int>() ? real_t(1) : real_t(0));
}
builder->SetLeafVectorNode(key, vec);
break;
}
case fil::leaf_algo_t::VECTOR_LEAF: {
std::vector<tlf::Value> vec(this->ps.num_classes);
for (int i = 0; i < this->ps.num_classes; ++i) {
auto idx = dense_node.template output<int>();
vec[i] = tlf::Value::Create(this->vector_leaf[idx * this->ps.num_classes + i]);
}
builder->SetLeafVectorNode(key, vec);
break;
}
case fil::leaf_algo_t::GROVE_PER_CLASS_FEW_CLASSES:
case fil::leaf_algo_t::GROVE_PER_CLASS_MANY_CLASSES: break;
}
} else {
int left = root + 2 * (node - root) + 1;
int right = root + 2 * (node - root) + 2;
bool default_left = dense_node.def_left();
real_t threshold = dense_node.is_categorical() ? std::numeric_limits<real_t>::quiet_NaN()
: dense_node.thresh();
if (dense_node.is_categorical()) {
uint8_t byte = 0;
for (int category = 0;
category < static_cast<int>(this->cat_sets_h.fid_num_cats[dense_node.fid()]);
++category) {
if (category % BITS_PER_BYTE == 0) {
byte = this->cat_sets_h.bits[dense_node.set() + category / BITS_PER_BYTE];
}
if ((byte & (1 << (category % BITS_PER_BYTE))) != 0) {
left_categories.push_back(category);
}
}
}
int left_key = node_to_treelite(builder, pkey, root, left);
int right_key = node_to_treelite(builder, pkey, root, right);
// TODO(levsnv): remove workaround once confirmed to work with empty category lists in
// Treelite
if (!left_categories.empty() && dense_node.is_categorical()) {
// Treelite builder APIs don't allow to set categorical_split_right_child
// (which child the categories pertain to). Only the Tree API allows that.
// in FIL, categories always pertain to the right child, and the default in treelite
// is left categories in SetCategoricalTestNode
std::swap(left_key, right_key);
default_left = !default_left;
builder->SetCategoricalTestNode(
key, dense_node.fid(), left_categories, default_left, left_key, right_key);
} else {
adjust_threshold_to_treelite(&threshold, &left_key, &right_key, &default_left, this->ps.op);
builder->SetNumericalTestNode(key,
dense_node.fid(),
this->ps.op,
tlf::Value::Create(threshold),
default_left,
left_key,
right_key);
}
}
return key;
}
void init_forest_impl(fil::forest_t<real_t>* pforest, fil::storage_type_t storage_type)
{
auto stream = this->handle.get_stream();
bool random_forest_flag = (this->ps.output & fil::output_t::AVG) != 0;
tl::TypeInfo tl_type_info =
std::is_same_v<real_t, float> ? tl::TypeInfo::kFloat32 : tl::TypeInfo::kFloat64;
int treelite_num_classes =
this->ps.leaf_algo == fil::leaf_algo_t::FLOAT_UNARY_BINARY ? 1 : this->ps.num_classes;
std::unique_ptr<tlf::ModelBuilder> model_builder(new tlf::ModelBuilder(
this->ps.num_cols, treelite_num_classes, random_forest_flag, tl_type_info, tl_type_info));
// prediction transform
if ((this->ps.output & fil::output_t::SIGMOID) != 0) {
if (this->ps.num_classes > 2)
model_builder->SetModelParam("pred_transform", "multiclass_ova");
else
model_builder->SetModelParam("pred_transform", "sigmoid");
} else if (this->ps.leaf_algo != fil::leaf_algo_t::FLOAT_UNARY_BINARY) {
model_builder->SetModelParam("pred_transform", "max_index");
this->ps.output = fil::output_t(this->ps.output | fil::output_t::CLASS);
} else if (this->ps.leaf_algo == GROVE_PER_CLASS) {
model_builder->SetModelParam("pred_transform", "identity_multiclass");
} else {
model_builder->SetModelParam("pred_transform", "identity");
}
// global bias
char* global_bias_str = nullptr;
ASSERT(asprintf(&global_bias_str, "%f", double(this->ps.global_bias)) > 0,
"cannot convert global_bias into a string");
model_builder->SetModelParam("global_bias", global_bias_str);
::free(global_bias_str);
// build the trees
for (int i_tree = 0; i_tree < this->ps.num_trees; ++i_tree) {
tlf::TreeBuilder* tree_builder = new tlf::TreeBuilder(tl_type_info, tl_type_info);
int key_counter = 0;
int root = i_tree * this->tree_num_nodes();
int root_key = node_to_treelite(tree_builder, &key_counter, root, root);
tree_builder->SetRootNode(root_key);
// InsertTree() consumes tree_builder
TL_CPP_CHECK(model_builder->InsertTree(tree_builder));
}
// commit the model
std::unique_ptr<tl::Model> model = model_builder->CommitModel();
// init FIL forest with the model
char* forest_shape_str = nullptr;
fil::treelite_params_t params;
params.algo = this->ps.algo;
params.threshold = this->ps.threshold;
params.output_class = (this->ps.output & fil::output_t::CLASS) != 0;
params.storage_type = storage_type;
params.blocks_per_sm = this->ps.blocks_per_sm;
params.threads_per_tree = this->ps.threads_per_tree;
params.n_items = this->ps.n_items;
params.pforest_shape_str = this->ps.print_forest_shape ? &forest_shape_str : nullptr;
params.precision = fil::PRECISION_NATIVE;
fil::forest_variant forest_variant;
fil::from_treelite(this->handle, &forest_variant, (ModelHandle)model.get(), ¶ms);
*pforest = std::get<fil::forest_t<real_t>>(forest_variant);
this->handle.sync_stream(stream);
if (this->ps.print_forest_shape) {
std::string str(forest_shape_str);
for (const char* substr : {"model size",
" MB",
"Depth histogram:",
"Avg nodes per tree",
"Leaf depth",
"Depth histogram fingerprint"}) {
ASSERT(str.find(substr) != std::string::npos,
"\"%s\" not found in forest shape :\n%s",
substr,
str.c_str());
}
}
::free(forest_shape_str);
}
};
template <typename real_t>
class TreeliteDenseFilTest : public TreeliteFilTest<real_t> {
protected:
void init_forest(fil::forest_t<real_t>* pforest) override
{
this->init_forest_impl(pforest, fil::storage_type_t::DENSE);
}
};
template <typename real_t>
class TreeliteSparse16FilTest : public TreeliteFilTest<real_t> {
protected:
void init_forest(fil::forest_t<real_t>* pforest) override
{
this->init_forest_impl(pforest, fil::storage_type_t::SPARSE);
}
};
class TreeliteSparse8FilTest : public TreeliteFilTest<float> {
protected:
void init_forest(fil::forest_t<float>* pforest) override
{
this->init_forest_impl(pforest, fil::storage_type_t::SPARSE8);
}
};
template <typename real_t>
class TreeliteAutoFilTest : public TreeliteFilTest<real_t> {
protected:
void init_forest(fil::forest_t<real_t>* pforest) override
{
this->init_forest_impl(pforest, fil::storage_type_t::AUTO);
}
};
using TreeliteDenseFloat32FilTest = TreeliteDenseFilTest<float>;
using TreeliteDenseFloat64FilTest = TreeliteDenseFilTest<double>;
using TreeliteSparse16Float32FilTest = TreeliteDenseFilTest<float>;
using TreeliteSparse16Float64FilTest = TreeliteDenseFilTest<double>;
using TreeliteAutoFloat32FilTest = TreeliteAutoFilTest<float>;
using TreeliteAutoFloat64FilTest = TreeliteAutoFilTest<double>;
// test for failures; currently only supported for sparse8 nodes
class TreeliteThrowSparse8FilTest : public TreeliteSparse8FilTest {
protected:
// model import happens in check(), so this function is empty
void SetUp() override {}
void check() { ASSERT_THROW(setup_helper(), raft::exception); }
};
/** mechanism to use named aggregate initialization before C++20, and also use
the struct defaults. Using it directly only works if all defaulted
members come after ones explicitly mentioned.
**/
#define FIL_TEST_PARAMS(...) \
[]() { \
struct NonDefaultFilTestParams : public FilTestParams { \
NonDefaultFilTestParams() { __VA_ARGS__; } \
}; \
return FilTestParams(NonDefaultFilTestParams()); \
}()
// kEQ is intentionally unused, and kLT is default
static const tl::Operator kLE = tl::Operator::kLE;
static const tl::Operator kGT = tl::Operator::kGT;
static const tl::Operator kGE = tl::Operator::kGE;
std::vector<FilTestParams> predict_dense_inputs = {
FIL_TEST_PARAMS(),
FIL_TEST_PARAMS(algo = TREE_REORG),
FIL_TEST_PARAMS(algo = BATCH_TREE_REORG),
FIL_TEST_PARAMS(output = SIGMOID),
FIL_TEST_PARAMS(output = SIGMOID, algo = TREE_REORG),
FIL_TEST_PARAMS(output = SIGMOID, algo = BATCH_TREE_REORG),
FIL_TEST_PARAMS(output = SIGMOID_CLASS, num_classes = 2),
FIL_TEST_PARAMS(output = SIGMOID_CLASS, algo = TREE_REORG, num_classes = 2),
FIL_TEST_PARAMS(output = SIGMOID_CLASS, algo = BATCH_TREE_REORG, num_classes = 2),
FIL_TEST_PARAMS(output = AVG),
FIL_TEST_PARAMS(output = AVG, algo = TREE_REORG),
FIL_TEST_PARAMS(output = AVG, algo = BATCH_TREE_REORG),
FIL_TEST_PARAMS(output = AVG_CLASS, num_classes = 2),
FIL_TEST_PARAMS(output = AVG_CLASS, algo = TREE_REORG, num_classes = 2),
FIL_TEST_PARAMS(output = AVG_CLASS, algo = BATCH_TREE_REORG, num_classes = 2),
FIL_TEST_PARAMS(global_bias = 0.5, algo = TREE_REORG),
FIL_TEST_PARAMS(output = SIGMOID, global_bias = 0.5, algo = BATCH_TREE_REORG),
FIL_TEST_PARAMS(output = AVG, global_bias = 0.5),
FIL_TEST_PARAMS(
output = AVG_CLASS, threshold = 1.0, global_bias = 0.5, algo = TREE_REORG, num_classes = 2),
FIL_TEST_PARAMS(output = SIGMOID, algo = ALGO_AUTO),
FIL_TEST_PARAMS(
output = AVG_CLASS, algo = BATCH_TREE_REORG, leaf_algo = CATEGORICAL_LEAF, num_classes = 5),
FIL_TEST_PARAMS(output = AVG_CLASS, num_classes = 2),
FIL_TEST_PARAMS(algo = TREE_REORG, leaf_algo = CATEGORICAL_LEAF, num_classes = 5),
FIL_TEST_PARAMS(output = SIGMOID, leaf_algo = CATEGORICAL_LEAF, num_classes = 7),
FIL_TEST_PARAMS(
global_bias = 0.5, algo = TREE_REORG, leaf_algo = CATEGORICAL_LEAF, num_classes = 4),
FIL_TEST_PARAMS(output = AVG, global_bias = 0.5, leaf_algo = CATEGORICAL_LEAF, num_classes = 4),
FIL_TEST_PARAMS(
output = AVG_CLASS, algo = BATCH_TREE_REORG, leaf_algo = GROVE_PER_CLASS, num_classes = 5),
FIL_TEST_PARAMS(algo = TREE_REORG, leaf_algo = GROVE_PER_CLASS, num_classes = 5),
FIL_TEST_PARAMS(num_trees = 49, output = SIGMOID, leaf_algo = GROVE_PER_CLASS, num_classes = 7),
FIL_TEST_PARAMS(num_trees = 52,
global_bias = 0.5,
algo = TREE_REORG,
leaf_algo = GROVE_PER_CLASS,
num_classes = 4),
FIL_TEST_PARAMS(
num_trees = 52, output = AVG, global_bias = 0.5, leaf_algo = GROVE_PER_CLASS, num_classes = 4),
FIL_TEST_PARAMS(blocks_per_sm = 1),
FIL_TEST_PARAMS(blocks_per_sm = 4),
FIL_TEST_PARAMS(num_classes = 3, blocks_per_sm = 1, leaf_algo = CATEGORICAL_LEAF),
FIL_TEST_PARAMS(num_classes = 3, blocks_per_sm = 4, leaf_algo = CATEGORICAL_LEAF),
FIL_TEST_PARAMS(num_classes = 5, blocks_per_sm = 1, leaf_algo = GROVE_PER_CLASS),
FIL_TEST_PARAMS(num_classes = 5, blocks_per_sm = 4, leaf_algo = GROVE_PER_CLASS),
FIL_TEST_PARAMS(
leaf_algo = GROVE_PER_CLASS, blocks_per_sm = 1, num_trees = 512, num_classes = 512),
FIL_TEST_PARAMS(
leaf_algo = GROVE_PER_CLASS, blocks_per_sm = 4, num_trees = 512, num_classes = 512),
FIL_TEST_PARAMS(num_trees = 52, output = SOFTMAX, leaf_algo = GROVE_PER_CLASS, num_classes = 4),
FIL_TEST_PARAMS(
num_trees = 52, output = AVG_SOFTMAX, leaf_algo = GROVE_PER_CLASS, num_classes = 4),
FIL_TEST_PARAMS(num_trees = 3 * (FIL_TPB + 1),
output = SOFTMAX,
leaf_algo = GROVE_PER_CLASS,
num_classes = FIL_TPB + 1),
FIL_TEST_PARAMS(num_trees = 3 * (FIL_TPB + 1),
output = AVG_SOFTMAX,
leaf_algo = GROVE_PER_CLASS,
num_classes = FIL_TPB + 1),
FIL_TEST_PARAMS(num_rows = 10'000,
num_cols = 100'000,
depth = 5,
num_trees = 1,
leaf_algo = FLOAT_UNARY_BINARY),
FIL_TEST_PARAMS(num_rows = 101,
num_cols = 100'000,
depth = 5,
num_trees = 9,
algo = BATCH_TREE_REORG,
leaf_algo = GROVE_PER_CLASS,
num_classes = 3),
FIL_TEST_PARAMS(num_rows = 102,
num_cols = 100'000,
depth = 5,
num_trees = 3 * (FIL_TPB + 1),
algo = BATCH_TREE_REORG,
leaf_algo = GROVE_PER_CLASS,
num_classes = FIL_TPB + 1),
FIL_TEST_PARAMS(num_rows = 103,
num_cols = 100'000,
depth = 5,
num_trees = 1,
algo = BATCH_TREE_REORG,
leaf_algo = CATEGORICAL_LEAF,
num_classes = 3),
// use shared memory opt-in carveout if available, or infer out of L1 cache
FIL_TEST_PARAMS(num_rows = 103, num_cols = MAX_SHM_STD / sizeof(float) + 1024, algo = NAIVE),
FIL_TEST_PARAMS(num_rows = 103,
num_cols = MAX_SHM_STD / sizeof(float) + 1024,
leaf_algo = GROVE_PER_CLASS,
num_classes = 5),
FIL_TEST_PARAMS(num_rows = 103,
num_cols = MAX_SHM_STD / sizeof(float) + 1024,
num_trees = FIL_TPB + 1,
leaf_algo = GROVE_PER_CLASS,
num_classes = FIL_TPB + 1),
FIL_TEST_PARAMS(num_rows = 103,
num_cols = MAX_SHM_STD / sizeof(float) + 1024,
leaf_algo = CATEGORICAL_LEAF,
num_classes = 3),
FIL_TEST_PARAMS(algo = BATCH_TREE_REORG, threads_per_tree = 2),
FIL_TEST_PARAMS(algo = NAIVE, threads_per_tree = 4),
FIL_TEST_PARAMS(algo = TREE_REORG, threads_per_tree = 8),
FIL_TEST_PARAMS(algo = ALGO_AUTO, threads_per_tree = 16),
FIL_TEST_PARAMS(algo = BATCH_TREE_REORG, threads_per_tree = 32),
FIL_TEST_PARAMS(algo = NAIVE, threads_per_tree = 64),
FIL_TEST_PARAMS(algo = BATCH_TREE_REORG, threads_per_tree = 128, n_items = 3),
FIL_TEST_PARAMS(algo = BATCH_TREE_REORG, threads_per_tree = 256),
FIL_TEST_PARAMS(algo = TREE_REORG, threads_per_tree = 32, n_items = 1),
FIL_TEST_PARAMS(algo = BATCH_TREE_REORG, threads_per_tree = 16, n_items = 4),
FIL_TEST_PARAMS(algo = NAIVE, threads_per_tree = 32, n_items = 4),
FIL_TEST_PARAMS(
num_rows = 500, num_cols = 2000, algo = BATCH_TREE_REORG, threads_per_tree = 64, n_items = 4),
FIL_TEST_PARAMS(leaf_algo = VECTOR_LEAF, num_classes = 2),
FIL_TEST_PARAMS(leaf_algo = VECTOR_LEAF, num_trees = 9, num_classes = 20),
FIL_TEST_PARAMS(num_rows = 103,
num_cols = 100'000,
depth = 5,
num_trees = 1,
algo = BATCH_TREE_REORG,
leaf_algo = VECTOR_LEAF,
num_classes = 3),
FIL_TEST_PARAMS(num_rows = 103,
num_cols = 5,
depth = 5,
num_trees = 3,
leaf_algo = VECTOR_LEAF,
num_classes = 4000),
FIL_TEST_PARAMS(node_categorical_prob = 0.5, feature_categorical_prob = 0.5),
FIL_TEST_PARAMS(
node_categorical_prob = 1.0, feature_categorical_prob = 1.0, cat_match_prob = 1.0),
FIL_TEST_PARAMS(
node_categorical_prob = 1.0, feature_categorical_prob = 1.0, cat_match_prob = 0.0),
FIL_TEST_PARAMS(depth = 3,
node_categorical_prob = 0.5,
feature_categorical_prob = 0.5,
max_magnitude_of_matching_cat = 5),
};
TEST_P(PredictDenseFloat32FilTest, Predict) { compare(); }
TEST_P(PredictDenseFloat64FilTest, Predict) { compare(); }
INSTANTIATE_TEST_CASE_P(FilTests,
PredictDenseFloat32FilTest,
testing::ValuesIn(predict_dense_inputs));
INSTANTIATE_TEST_CASE_P(FilTests,
PredictDenseFloat64FilTest,
testing::ValuesIn(predict_dense_inputs));
std::vector<FilTestParams> predict_sparse_inputs = {
FIL_TEST_PARAMS(),
FIL_TEST_PARAMS(output = SIGMOID),
FIL_TEST_PARAMS(output = SIGMOID_CLASS, num_classes = 2),
FIL_TEST_PARAMS(output = AVG),
FIL_TEST_PARAMS(output = AVG_CLASS, global_bias = 0.5, num_classes = 2),
FIL_TEST_PARAMS(global_bias = 0.5),
FIL_TEST_PARAMS(output = SIGMOID, global_bias = 0.5),
FIL_TEST_PARAMS(output = AVG, global_bias = 0.5),
FIL_TEST_PARAMS(output = AVG_CLASS, threshold = 1.0, global_bias = 0.5, num_classes = 2),
FIL_TEST_PARAMS(output = SIGMOID_CLASS, algo = ALGO_AUTO, num_classes = 2),
FIL_TEST_PARAMS(output = AVG_CLASS,
threshold = 1.0,
global_bias = 0.5,
leaf_algo = CATEGORICAL_LEAF,
num_classes = 5000),
FIL_TEST_PARAMS(global_bias = 0.5, leaf_algo = CATEGORICAL_LEAF, num_classes = 6),
FIL_TEST_PARAMS(output = CLASS, leaf_algo = CATEGORICAL_LEAF, num_classes = 3),
FIL_TEST_PARAMS(leaf_algo = CATEGORICAL_LEAF, num_classes = 3),
FIL_TEST_PARAMS(depth = 2,
num_trees = 5000,
output = AVG_CLASS,
threshold = 1.0,
global_bias = 0.5,
leaf_algo = GROVE_PER_CLASS,
num_classes = 5000),
FIL_TEST_PARAMS(num_trees = 60, global_bias = 0.5, leaf_algo = GROVE_PER_CLASS, num_classes = 6),
FIL_TEST_PARAMS(num_trees = 51, output = CLASS, leaf_algo = GROVE_PER_CLASS, num_classes = 3),
FIL_TEST_PARAMS(num_trees = 51, leaf_algo = GROVE_PER_CLASS, num_classes = 3),
FIL_TEST_PARAMS(algo = NAIVE, threads_per_tree = 2),
FIL_TEST_PARAMS(algo = NAIVE, threads_per_tree = 8, n_items = 1),
FIL_TEST_PARAMS(algo = ALGO_AUTO, threads_per_tree = 16, n_items = 1),
FIL_TEST_PARAMS(algo = ALGO_AUTO, threads_per_tree = 32),
FIL_TEST_PARAMS(num_cols = 1, num_trees = 1, algo = NAIVE, threads_per_tree = 64, n_items = 1),
FIL_TEST_PARAMS(num_rows = 500, num_cols = 2000, algo = NAIVE, threads_per_tree = 64),
FIL_TEST_PARAMS(
num_rows = 500, num_cols = 2000, algo = ALGO_AUTO, threads_per_tree = 256, n_items = 1),
FIL_TEST_PARAMS(num_trees = 51, leaf_algo = VECTOR_LEAF, num_classes = 15),
FIL_TEST_PARAMS(leaf_algo = VECTOR_LEAF, num_trees = 9, num_classes = 20),
FIL_TEST_PARAMS(num_rows = 103,
num_cols = 1000,
depth = 5,
num_trees = 1,
leaf_algo = VECTOR_LEAF,
num_classes = 3),
FIL_TEST_PARAMS(num_rows = 103,
num_cols = 5,
depth = 5,
num_trees = 3,
leaf_algo = VECTOR_LEAF,
num_classes = 4000),
FIL_TEST_PARAMS(num_rows = 103,
num_cols = 5,
depth = 5,
num_trees = 530,
leaf_algo = VECTOR_LEAF,
num_classes = 11),
FIL_TEST_PARAMS(num_rows = 103,
num_cols = 5,
depth = 5,
num_trees = 530,
leaf_algo = VECTOR_LEAF,
num_classes = 1111),
FIL_TEST_PARAMS(node_categorical_prob = 0.5, feature_categorical_prob = 0.5),
FIL_TEST_PARAMS(
node_categorical_prob = 1.0, feature_categorical_prob = 1.0, cat_match_prob = 1.0),
FIL_TEST_PARAMS(
node_categorical_prob = 1.0, feature_categorical_prob = 1.0, cat_match_prob = 0.0),
FIL_TEST_PARAMS(depth = 3,
node_categorical_prob = 0.5,
feature_categorical_prob = 0.5,
max_magnitude_of_matching_cat = 5),
};
TEST_P(PredictSparse16Float32FilTest, Predict) { compare(); }
TEST_P(PredictSparse16Float64FilTest, Predict) { compare(); }
INSTANTIATE_TEST_CASE_P(FilTests,
PredictSparse16Float32FilTest,
testing::ValuesIn(predict_sparse_inputs));
INSTANTIATE_TEST_CASE_P(FilTests,
PredictSparse16Float64FilTest,
testing::ValuesIn(predict_sparse_inputs));
TEST_P(PredictSparse8FilTest, Predict) { compare(); }
INSTANTIATE_TEST_CASE_P(FilTests, PredictSparse8FilTest, testing::ValuesIn(predict_sparse_inputs));
std::vector<FilTestParams> import_dense_inputs = {
FIL_TEST_PARAMS(),
FIL_TEST_PARAMS(output = SIGMOID, op = kLE),
FIL_TEST_PARAMS(output = SIGMOID_CLASS, op = kGT, num_classes = 2),
FIL_TEST_PARAMS(output = AVG, op = kGE),
FIL_TEST_PARAMS(output = AVG_CLASS, num_classes = 2),
FIL_TEST_PARAMS(algo = TREE_REORG, op = kLE),
FIL_TEST_PARAMS(output = SIGMOID, algo = TREE_REORG, op = kGT),
FIL_TEST_PARAMS(output = SIGMOID_CLASS, algo = TREE_REORG, op = kGE, num_classes = 2),
FIL_TEST_PARAMS(output = AVG, algo = TREE_REORG),
FIL_TEST_PARAMS(output = AVG_CLASS, algo = TREE_REORG, op = kLE, num_classes = 2),
FIL_TEST_PARAMS(algo = BATCH_TREE_REORG),
FIL_TEST_PARAMS(output = SIGMOID, algo = BATCH_TREE_REORG),
FIL_TEST_PARAMS(algo = BATCH_TREE_REORG, op = kLE),
FIL_TEST_PARAMS(output = SIGMOID, algo = BATCH_TREE_REORG, op = kLE),
FIL_TEST_PARAMS(algo = BATCH_TREE_REORG, op = kGT),
FIL_TEST_PARAMS(output = SIGMOID, algo = BATCH_TREE_REORG, op = kGT),
FIL_TEST_PARAMS(algo = BATCH_TREE_REORG, op = kGE),
FIL_TEST_PARAMS(output = SIGMOID, algo = BATCH_TREE_REORG, op = kGE),
FIL_TEST_PARAMS(output = SIGMOID_CLASS, algo = BATCH_TREE_REORG, num_classes = 2),
FIL_TEST_PARAMS(output = SIGMOID_CLASS, algo = BATCH_TREE_REORG, op = kLE, num_classes = 2),
FIL_TEST_PARAMS(output = AVG, algo = BATCH_TREE_REORG),
FIL_TEST_PARAMS(output = AVG, algo = BATCH_TREE_REORG, op = kLE),
FIL_TEST_PARAMS(output = AVG_CLASS, algo = BATCH_TREE_REORG, op = kGT, num_classes = 2),
FIL_TEST_PARAMS(output = AVG_CLASS, algo = BATCH_TREE_REORG, op = kGE, num_classes = 2),
FIL_TEST_PARAMS(global_bias = 0.5, algo = TREE_REORG),
FIL_TEST_PARAMS(output = SIGMOID, global_bias = 0.5, algo = BATCH_TREE_REORG, op = kLE),
FIL_TEST_PARAMS(output = AVG, global_bias = 0.5, op = kGT),
FIL_TEST_PARAMS(output = AVG_CLASS,
threshold = 1.0,
global_bias = 0.5,
algo = TREE_REORG,
op = kGE,
num_classes = 2),
FIL_TEST_PARAMS(output = SIGMOID, algo = ALGO_AUTO, op = kLE),
FIL_TEST_PARAMS(output = SIGMOID, algo = ALGO_AUTO, op = kLE),
FIL_TEST_PARAMS(
output = AVG, algo = BATCH_TREE_REORG, op = kGE, leaf_algo = CATEGORICAL_LEAF, num_classes = 5),
FIL_TEST_PARAMS(
output = AVG, algo = BATCH_TREE_REORG, op = kGT, leaf_algo = CATEGORICAL_LEAF, num_classes = 6),
FIL_TEST_PARAMS(
output = AVG, algo = BATCH_TREE_REORG, op = kLE, leaf_algo = CATEGORICAL_LEAF, num_classes = 3),
FIL_TEST_PARAMS(
output = AVG, algo = BATCH_TREE_REORG, op = kLE, leaf_algo = CATEGORICAL_LEAF, num_classes = 5),
FIL_TEST_PARAMS(
output = AVG_CLASS, algo = TREE_REORG, op = kLE, leaf_algo = CATEGORICAL_LEAF, num_classes = 5),
FIL_TEST_PARAMS(
output = AVG, algo = TREE_REORG, op = kLE, leaf_algo = CATEGORICAL_LEAF, num_classes = 7),
FIL_TEST_PARAMS(output = AVG, leaf_algo = CATEGORICAL_LEAF, num_classes = 6),
FIL_TEST_PARAMS(output = CLASS,
algo = BATCH_TREE_REORG,
op = kGE,
leaf_algo = GROVE_PER_CLASS,
num_classes = 5),
FIL_TEST_PARAMS(num_trees = 48,
output = CLASS,
algo = BATCH_TREE_REORG,
op = kGT,
leaf_algo = GROVE_PER_CLASS,
num_classes = 6),
FIL_TEST_PARAMS(num_trees = 51,
output = CLASS,
algo = BATCH_TREE_REORG,
op = kLE,
leaf_algo = GROVE_PER_CLASS,
num_classes = 3),
FIL_TEST_PARAMS(output = CLASS,
algo = BATCH_TREE_REORG,
op = kLE,
leaf_algo = GROVE_PER_CLASS,
num_classes = 5),
FIL_TEST_PARAMS(
output = CLASS, algo = TREE_REORG, op = kLE, leaf_algo = GROVE_PER_CLASS, num_classes = 5),
FIL_TEST_PARAMS(num_trees = 49,
output = CLASS,
algo = TREE_REORG,
op = kLE,
leaf_algo = GROVE_PER_CLASS,
num_classes = 7),
FIL_TEST_PARAMS(num_trees = 48, output = CLASS, leaf_algo = GROVE_PER_CLASS, num_classes = 6),
FIL_TEST_PARAMS(print_forest_shape = true),
FIL_TEST_PARAMS(leaf_algo = VECTOR_LEAF, num_classes = 2),
FIL_TEST_PARAMS(leaf_algo = VECTOR_LEAF, num_trees = 19, num_classes = 20),
FIL_TEST_PARAMS(node_categorical_prob = 0.5, feature_categorical_prob = 0.5),
FIL_TEST_PARAMS(
node_categorical_prob = 1.0, feature_categorical_prob = 1.0, cat_match_prob = 1.0),
FIL_TEST_PARAMS(
node_categorical_prob = 1.0, feature_categorical_prob = 1.0, cat_match_prob = 0.0),
FIL_TEST_PARAMS(depth = 3,
node_categorical_prob = 0.5,
feature_categorical_prob = 0.5,
max_magnitude_of_matching_cat = 5),
};
TEST_P(TreeliteDenseFloat32FilTest, Import) { compare(); }
TEST_P(TreeliteDenseFloat64FilTest, Import) { compare(); }
INSTANTIATE_TEST_CASE_P(FilTests,
TreeliteDenseFloat32FilTest,
testing::ValuesIn(import_dense_inputs));
INSTANTIATE_TEST_CASE_P(FilTests,
TreeliteDenseFloat64FilTest,
testing::ValuesIn(import_dense_inputs));
std::vector<FilTestParams> import_sparse_inputs = {
FIL_TEST_PARAMS(),
FIL_TEST_PARAMS(output = SIGMOID, op = kLE),
FIL_TEST_PARAMS(output = SIGMOID_CLASS, op = kGT, num_classes = 2),
FIL_TEST_PARAMS(output = AVG, op = kGE),
FIL_TEST_PARAMS(output = AVG_CLASS, num_classes = 2),
FIL_TEST_PARAMS(global_bias = 0.5),
FIL_TEST_PARAMS(output = SIGMOID, global_bias = 0.5, op = kLE),
FIL_TEST_PARAMS(output = AVG, global_bias = 0.5, op = kGT),
FIL_TEST_PARAMS(
output = AVG_CLASS, threshold = 1.0, global_bias = 0.5, op = kGE, num_classes = 2),
FIL_TEST_PARAMS(algo = ALGO_AUTO),
FIL_TEST_PARAMS(
output = AVG_CLASS, threshold = 1.0, op = kGE, leaf_algo = CATEGORICAL_LEAF, num_classes = 10),
FIL_TEST_PARAMS(output = AVG, algo = ALGO_AUTO, leaf_algo = CATEGORICAL_LEAF, num_classes = 4),
FIL_TEST_PARAMS(output = AVG, op = kLE, leaf_algo = CATEGORICAL_LEAF, num_classes = 5),
FIL_TEST_PARAMS(output = AVG, leaf_algo = CATEGORICAL_LEAF, num_classes = 3),
FIL_TEST_PARAMS(output = CLASS,
threshold = 1.0,
global_bias = 0.5,
op = kGE,
leaf_algo = GROVE_PER_CLASS,
num_classes = 10),
FIL_TEST_PARAMS(
num_trees = 52, output = CLASS, algo = ALGO_AUTO, leaf_algo = GROVE_PER_CLASS, num_classes = 4),
FIL_TEST_PARAMS(output = CLASS, op = kLE, leaf_algo = GROVE_PER_CLASS, num_classes = 5),
FIL_TEST_PARAMS(num_trees = 51,
output = CLASS,
global_bias = 0.5,
leaf_algo = GROVE_PER_CLASS,
num_classes = 3),
FIL_TEST_PARAMS(num_trees = 51,
output = SIGMOID_CLASS,
global_bias = 0.5,
leaf_algo = GROVE_PER_CLASS,
num_classes = 3),
FIL_TEST_PARAMS(leaf_algo = VECTOR_LEAF, num_classes = 2),
FIL_TEST_PARAMS(leaf_algo = VECTOR_LEAF, num_trees = 19, num_classes = 20),
FIL_TEST_PARAMS(node_categorical_prob = 0.5, feature_categorical_prob = 0.5),
FIL_TEST_PARAMS(
node_categorical_prob = 1.0, feature_categorical_prob = 1.0, cat_match_prob = 1.0),
FIL_TEST_PARAMS(
node_categorical_prob = 1.0, feature_categorical_prob = 1.0, cat_match_prob = 0.0),
FIL_TEST_PARAMS(depth = 3,
node_categorical_prob = 0.5,
feature_categorical_prob = 0.5,
max_magnitude_of_matching_cat = 5),
};
TEST_P(TreeliteSparse16Float32FilTest, Import) { compare(); }
TEST_P(TreeliteSparse16Float64FilTest, Import) { compare(); }
INSTANTIATE_TEST_CASE_P(FilTests,
TreeliteSparse16Float32FilTest,
testing::ValuesIn(import_sparse_inputs));
INSTANTIATE_TEST_CASE_P(FilTests,
TreeliteSparse16Float64FilTest,
testing::ValuesIn(import_sparse_inputs));
TEST_P(TreeliteSparse8FilTest, Import) { compare(); }
INSTANTIATE_TEST_CASE_P(FilTests, TreeliteSparse8FilTest, testing::ValuesIn(import_sparse_inputs));
std::vector<FilTestParams> import_auto_inputs = {
FIL_TEST_PARAMS(depth = 10, algo = ALGO_AUTO),
FIL_TEST_PARAMS(depth = 15, algo = ALGO_AUTO),
FIL_TEST_PARAMS(depth = 19, algo = ALGO_AUTO),
FIL_TEST_PARAMS(depth = 19, algo = BATCH_TREE_REORG),
FIL_TEST_PARAMS(
depth = 10, output = AVG, algo = ALGO_AUTO, leaf_algo = CATEGORICAL_LEAF, num_classes = 3),
FIL_TEST_PARAMS(depth = 10,
num_trees = 51,
output = CLASS,
algo = ALGO_AUTO,
leaf_algo = GROVE_PER_CLASS,
num_classes = 3),
FIL_TEST_PARAMS(leaf_algo = VECTOR_LEAF, num_classes = 3, algo = ALGO_AUTO),
#if 0
FIL_TEST_PARAMS(depth = 19, output = AVG, algo = BATCH_TREE_REORG,
leaf_algo = CATEGORICAL_LEAF, num_classes = 6),
#endif
};
TEST_P(TreeliteAutoFloat32FilTest, Import) { compare(); }
TEST_P(TreeliteAutoFloat64FilTest, Import) { compare(); }
INSTANTIATE_TEST_CASE_P(FilTests,
TreeliteAutoFloat32FilTest,
testing::ValuesIn(import_auto_inputs));
INSTANTIATE_TEST_CASE_P(FilTests,
TreeliteAutoFloat64FilTest,
testing::ValuesIn(import_auto_inputs));
// adjust test parameters if the sparse8 format changes
std::vector<FilTestParams> import_throw_sparse8_inputs = {
// too many features
FIL_TEST_PARAMS(num_rows = 100, num_cols = 20000, depth = 10),
// too many tree nodes
FIL_TEST_PARAMS(depth = 16, num_trees = 5, leaf_prob = 0),
};
TEST_P(TreeliteThrowSparse8FilTest, Import) { check(); }
INSTANTIATE_TEST_CASE_P(FilTests,
TreeliteThrowSparse8FilTest,
testing::ValuesIn(import_throw_sparse8_inputs));
} // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/holtwinters_test.cu | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "time_series_datasets.h"
#include <algorithm>
#include <raft/core/handle.hpp>
#include <cuml/common/logger.hpp>
#include <cuml/tsa/holtwinters.h>
#include <gtest/gtest.h>
#include <raft/core/handle.hpp>
#include <raft/core/math.hpp>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <test_utils.h>
namespace ML {
template <typename T>
struct HoltWintersInputs {
T* dataset_h;
T* test;
int n;
int h;
int batch_size;
int frequency;
ML::SeasonalType seasonal;
int start_periods;
T epsilon;
T mae_tolerance;
};
template <typename T>
class HoltWintersTest : public ::testing::TestWithParam<HoltWintersInputs<T>> {
public:
HoltWintersTest()
: params(::testing::TestWithParam<HoltWintersInputs<T>>::GetParam()),
stream(handle.get_stream()),
level_ptr(0, stream),
trend_ptr(0, stream),
season_ptr(0, stream),
SSE_error_ptr(0, stream),
forecast_ptr(0, stream),
data(0, stream)
{
}
void basicTest()
{
dataset_h = params.dataset_h;
test = params.test;
n = params.n;
h = params.h;
batch_size = params.batch_size;
frequency = params.frequency;
ML::SeasonalType seasonal = params.seasonal;
start_periods = params.start_periods;
epsilon = params.epsilon;
mae_tolerance = params.mae_tolerance;
ML::HoltWinters::buffer_size(
n,
batch_size,
frequency,
&leveltrend_seed_len, // = batch_size
&season_seed_len, // = frequency*batch_size
&components_len, // = (n-w_len)*batch_size
&error_len, // = batch_size
&leveltrend_coef_offset, // = (n-wlen-1)*batch_size (last row)
&season_coef_offset); // = (n-wlen-frequency)*batch_size(last freq rows)
level_ptr.resize(components_len, stream);
trend_ptr.resize(components_len, stream);
season_ptr.resize(components_len, stream);
SSE_error_ptr.resize(batch_size, stream);
forecast_ptr.resize(batch_size * h, stream);
data.resize(batch_size * n, stream);
raft::update_device(data.data(), dataset_h, batch_size * n, stream);
raft::handle_t handle{stream};
ML::HoltWinters::fit(handle,
n,
batch_size,
frequency,
start_periods,
seasonal,
epsilon,
data.data(),
level_ptr.data(),
trend_ptr.data(),
season_ptr.data(),
SSE_error_ptr.data());
ML::HoltWinters::forecast(handle,
n,
batch_size,
frequency,
h,
seasonal,
level_ptr.data(),
trend_ptr.data(),
season_ptr.data(),
forecast_ptr.data());
handle.sync_stream(stream);
}
void SetUp() override { basicTest(); }
public:
raft::handle_t handle;
cudaStream_t stream = 0;
HoltWintersInputs<T> params;
T *dataset_h, *test;
rmm::device_uvector<T> data;
int n, h;
int leveltrend_seed_len, season_seed_len, components_len;
int leveltrend_coef_offset, season_coef_offset;
int error_len;
int batch_size, frequency, start_periods;
rmm::device_uvector<T> SSE_error_ptr, level_ptr, trend_ptr, season_ptr, forecast_ptr;
T epsilon, mae_tolerance;
};
const std::vector<HoltWintersInputs<float>> inputsf = {{additive_trainf.data(),
additive_testf.data(),
90,
10,
1,
25,
ML::SeasonalType::ADDITIVE,
2,
2.24e-3,
1e-6},
{multiplicative_trainf.data(),
multiplicative_testf.data(),
132,
12,
1,
12,
ML::SeasonalType::MULTIPLICATIVE,
2,
2.24e-3,
3e-2},
{additive_normalized_trainf.data(),
additive_normalized_testf.data(),
90,
10,
1,
25,
ML::SeasonalType::ADDITIVE,
2,
2.24e-3,
1e-6},
{multiplicative_normalized_trainf.data(),
multiplicative_normalized_testf.data(),
132,
12,
1,
12,
ML::SeasonalType::MULTIPLICATIVE,
2,
2.24e-3,
2.5e-1}};
const std::vector<HoltWintersInputs<double>> inputsd = {{additive_traind.data(),
additive_testd.data(),
90,
10,
1,
25,
ML::SeasonalType::ADDITIVE,
2,
2.24e-7,
1e-6},
{multiplicative_traind.data(),
multiplicative_testd.data(),
132,
12,
1,
12,
ML::SeasonalType::MULTIPLICATIVE,
2,
2.24e-7,
3e-2},
{additive_normalized_traind.data(),
additive_normalized_testd.data(),
90,
10,
1,
25,
ML::SeasonalType::ADDITIVE,
2,
2.24e-7,
1e-6},
{multiplicative_normalized_traind.data(),
multiplicative_normalized_testd.data(),
132,
12,
1,
12,
ML::SeasonalType::MULTIPLICATIVE,
2,
2.24e-7,
5e-2}};
template <typename T>
void normalise(T* data, int len)
{
T min = *std::min_element(data, data + len);
T max = *std::max_element(data, data + len);
for (int i = 0; i < len; i++) {
data[i] = (data[i] - min) / (max - min);
}
}
template <typename T>
T calculate_MAE(T* test, T* forecast, int batch_size, int h)
{
normalise(test, batch_size * h);
normalise(forecast, batch_size * h);
std::vector<T> ae(batch_size * h);
for (int i = 0; i < batch_size * h; i++) {
ae[i] = raft::abs(test[i] - forecast[i]);
}
std::sort(ae.begin(), ae.end());
T mae;
if (h % 2 == 0) {
mae = (ae[h / 2 - 1] + ae[h / 2]) / 2;
} else {
mae = ae[(int)h / 2];
}
return mae;
}
typedef HoltWintersTest<float> HoltWintersTestF;
TEST_P(HoltWintersTestF, Fit)
{
std::vector<float> forecast_h(batch_size * h);
raft::update_host(forecast_h.data(), forecast_ptr.data(), batch_size * h, stream);
raft::print_host_vector("forecast", forecast_h.data(), batch_size * h, std::cout);
float mae = calculate_MAE<float>(test, forecast_h.data(), batch_size, h);
CUML_LOG_DEBUG("MAE: %f", mae);
ASSERT_TRUE(mae < mae_tolerance);
}
typedef HoltWintersTest<double> HoltWintersTestD;
TEST_P(HoltWintersTestD, Fit)
{
std::vector<double> forecast_h(batch_size * h);
raft::update_host(forecast_h.data(), forecast_ptr.data(), batch_size * h, stream);
raft::print_host_vector("forecast", forecast_h.data(), batch_size * h, std::cout);
double mae = calculate_MAE<double>(test, forecast_h.data(), batch_size, h);
CUML_LOG_DEBUG("MAE: %f", mae);
ASSERT_TRUE(mae < mae_tolerance);
}
INSTANTIATE_TEST_CASE_P(HoltWintersTests, HoltWintersTestF, ::testing::ValuesIn(inputsf));
INSTANTIATE_TEST_CASE_P(HoltWintersTests, HoltWintersTestD, ::testing::ValuesIn(inputsd));
} // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/hdbscan_test.cu | /*
* Copyright (c) 2021-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "hdbscan_inputs.hpp"
#include <raft/core/handle.hpp>
#include <gtest/gtest.h>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <vector>
#include <cuml/cluster/hdbscan.hpp>
#include <hdbscan/detail/condense.cuh>
#include <hdbscan/detail/extract.cuh>
#include <hdbscan/detail/reachability.cuh>
#include <raft/stats/adjusted_rand_index.cuh>
#include <raft/cluster/detail/agglomerative.cuh>
#include <raft/distance/distance_types.hpp>
#include <raft/linalg/transpose.cuh>
#include <raft/sparse/coo.hpp>
#include <raft/sparse/op/sort.cuh>
#include <rmm/device_uvector.hpp>
#include <thrust/execution_policy.h>
#include <thrust/transform.h>
#include "../prims/test_utils.h"
namespace ML {
namespace HDBSCAN {
using namespace std;
template <typename T, typename IdxT>
::std::ostream& operator<<(::std::ostream& os, const HDBSCANInputs<T, IdxT>& dims)
{
return os;
}
template <typename T, typename IdxT>
class HDBSCANTest : public ::testing::TestWithParam<HDBSCANInputs<T, IdxT>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<HDBSCANInputs<T, IdxT>>::GetParam();
rmm::device_uvector<T> data(params.n_row * params.n_col, handle.get_stream());
// Allocate result labels and expected labels on device
rmm::device_uvector<IdxT> labels_ref(params.n_row, handle.get_stream());
raft::copy(data.data(), params.data.data(), data.size(), handle.get_stream());
raft::copy(labels_ref.data(), params.expected_labels.data(), params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> out_children(params.n_row * 2, handle.get_stream());
rmm::device_uvector<T> out_deltas(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> out_sizes(params.n_row * 2, handle.get_stream());
rmm::device_uvector<IdxT> out_labels(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> mst_src(params.n_row - 1, handle.get_stream());
rmm::device_uvector<IdxT> mst_dst(params.n_row - 1, handle.get_stream());
rmm::device_uvector<T> mst_weights(params.n_row - 1, handle.get_stream());
rmm::device_uvector<T> core_dists(params.n_row, handle.get_stream());
rmm::device_uvector<T> out_probabilities(params.n_row, handle.get_stream());
Logger::get().setLevel(CUML_LEVEL_DEBUG);
HDBSCAN::Common::hdbscan_output<IdxT, T> out(handle,
params.n_row,
out_labels.data(),
out_probabilities.data(),
out_children.data(),
out_sizes.data(),
out_deltas.data(),
mst_src.data(),
mst_dst.data(),
mst_weights.data());
HDBSCAN::Common::HDBSCANParams hdbscan_params;
hdbscan_params.min_cluster_size = params.min_cluster_size;
hdbscan_params.min_samples = params.min_pts;
hdbscan(handle,
data.data(),
params.n_row,
params.n_col,
raft::distance::DistanceType::L2SqrtExpanded,
hdbscan_params,
out,
core_dists.data());
handle.sync_stream(handle.get_stream());
score = raft::stats::adjusted_rand_index(
out.get_labels(), labels_ref.data(), params.n_row, handle.get_stream());
if (score < 0.85) {
std::cout << "Test failed. score=" << score << std::endl;
raft::print_device_vector("actual labels", out.get_labels(), params.n_row, std::cout);
raft::print_device_vector("expected labels", labels_ref.data(), params.n_row, std::cout);
}
}
void SetUp() override { basicTest(); }
protected:
HDBSCANInputs<T, IdxT> params;
IdxT* labels_ref;
double score;
};
typedef HDBSCANTest<float, int> HDBSCANTestF_Int;
TEST_P(HDBSCANTestF_Int, Result) { EXPECT_TRUE(score >= 0.85); }
INSTANTIATE_TEST_CASE_P(HDBSCANTest, HDBSCANTestF_Int, ::testing::ValuesIn(hdbscan_inputsf2));
template <typename T, typename IdxT>
class ClusterCondensingTest : public ::testing::TestWithParam<ClusterCondensingInputs<T, IdxT>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<ClusterCondensingInputs<T, IdxT>>::GetParam();
rmm::device_uvector<IdxT> mst_src(params.n_row - 1, handle.get_stream());
rmm::device_uvector<IdxT> mst_dst(params.n_row - 1, handle.get_stream());
rmm::device_uvector<T> mst_data(params.n_row - 1, handle.get_stream());
raft::copy(mst_src.data(), params.mst_src.data(), params.mst_src.size(), handle.get_stream());
raft::copy(mst_dst.data(), params.mst_dst.data(), params.mst_dst.size(), handle.get_stream());
raft::copy(
mst_data.data(), params.mst_data.data(), params.mst_data.size(), handle.get_stream());
rmm::device_uvector<IdxT> expected_device(params.expected.size(), handle.get_stream());
raft::copy(
expected_device.data(), params.expected.data(), params.expected.size(), handle.get_stream());
rmm::device_uvector<IdxT> out_children(params.n_row * 2, handle.get_stream());
rmm::device_uvector<IdxT> out_size(params.n_row, handle.get_stream());
rmm::device_uvector<T> out_delta(params.n_row, handle.get_stream());
Logger::get().setLevel(CUML_LEVEL_DEBUG);
raft::sparse::op::coo_sort_by_weight(
mst_src.data(), mst_dst.data(), mst_data.data(), (IdxT)mst_src.size(), handle.get_stream());
/**
* Build dendrogram of MST
*/
raft::cluster::detail::build_dendrogram_host(handle,
mst_src.data(),
mst_dst.data(),
mst_data.data(),
params.n_row - 1,
out_children.data(),
out_delta.data(),
out_size.data());
/**
* Condense Hierarchy
*/
HDBSCAN::Common::CondensedHierarchy<IdxT, T> condensed_tree(handle, params.n_row);
HDBSCAN::detail::Condense::build_condensed_hierarchy(handle,
out_children.data(),
out_delta.data(),
out_size.data(),
params.min_cluster_size,
params.n_row,
condensed_tree);
handle.sync_stream(handle.get_stream());
rmm::device_uvector<IdxT> labels(params.n_row, handle.get_stream());
rmm::device_uvector<T> stabilities(condensed_tree.get_n_clusters(), handle.get_stream());
rmm::device_uvector<T> probabilities(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> label_map(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> inverse_label_map(0, handle.get_stream());
HDBSCAN::detail::Extract::extract_clusters(handle,
condensed_tree,
params.n_row,
labels.data(),
stabilities.data(),
probabilities.data(),
label_map.data(),
HDBSCAN::Common::CLUSTER_SELECTION_METHOD::EOM,
inverse_label_map,
false);
// CUML_LOG_DEBUG("Evaluating results");
// if (params.expected.size() == params.n_row) {
// score = MLCommon::Metrics::compute_adjusted_rand_index(
// labels.data(), expected_device.data(), params.n_row,
// handle.get_stream());
// } else {
// score = 1.0;
// }
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
ClusterCondensingInputs<T, IdxT> params;
double score;
};
#if 0
// gtest-1.11.0 makes it a runtime error to define and not instantiate this test case.
typedef ClusterCondensingTest<float, int> ClusterCondensingTestF_Int;
TEST_P(ClusterCondensingTestF_Int, Result) { EXPECT_TRUE(score == 1.0); }
// This will be reactivate in 21.08 with better, contrived examples to
// test Cluster Condensation correctly
// INSTANTIATE_TEST_CASE_P(ClusterCondensingTest, ClusterCondensingTestF_Int,
// ::testing::ValuesIn(cluster_condensing_inputs));
#endif
template <typename T, typename IdxT>
class ClusterSelectionTest : public ::testing::TestWithParam<ClusterSelectionInputs<T, IdxT>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<ClusterSelectionInputs<T, IdxT>>::GetParam();
Logger::get().setLevel(CUML_LEVEL_DEBUG);
rmm::device_uvector<IdxT> condensed_parents(params.condensed_parents.size(),
handle.get_stream());
rmm::device_uvector<IdxT> condensed_children(params.condensed_children.size(),
handle.get_stream());
rmm::device_uvector<T> condensed_lambdas(params.condensed_lambdas.size(), handle.get_stream());
rmm::device_uvector<IdxT> condensed_sizes(params.condensed_sizes.size(), handle.get_stream());
// outputs
rmm::device_uvector<T> stabilities(params.n_row, handle.get_stream());
rmm::device_uvector<T> probabilities(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> labels(params.n_row, handle.get_stream());
raft::copy(condensed_parents.data(),
params.condensed_parents.data(),
condensed_parents.size(),
handle.get_stream());
raft::copy(condensed_children.data(),
params.condensed_children.data(),
condensed_children.size(),
handle.get_stream());
raft::copy(condensed_lambdas.data(),
params.condensed_lambdas.data(),
condensed_lambdas.size(),
handle.get_stream());
raft::copy(condensed_sizes.data(),
params.condensed_sizes.data(),
condensed_sizes.size(),
handle.get_stream());
ML::HDBSCAN::Common::CondensedHierarchy<IdxT, T> condensed_tree(handle,
params.n_row,
params.condensed_parents.size(),
condensed_parents.data(),
condensed_children.data(),
condensed_lambdas.data(),
condensed_sizes.data());
rmm::device_uvector<IdxT> label_map(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> inverse_label_map(0, handle.get_stream());
ML::HDBSCAN::detail::Extract::extract_clusters(handle,
condensed_tree,
params.n_row,
labels.data(),
stabilities.data(),
probabilities.data(),
label_map.data(),
params.cluster_selection_method,
inverse_label_map,
params.allow_single_cluster,
0,
params.cluster_selection_epsilon);
handle.sync_stream(handle.get_stream());
ASSERT_TRUE(MLCommon::devArrMatch(probabilities.data(),
params.probabilities.data(),
params.n_row,
MLCommon::CompareApprox<float>(1e-4),
handle.get_stream()));
rmm::device_uvector<IdxT> labels_ref(params.n_row, handle.get_stream());
raft::update_device(labels_ref.data(), params.labels.data(), params.n_row, handle.get_stream());
score = raft::stats::adjusted_rand_index(
labels.data(), labels_ref.data(), params.n_row, handle.get_stream());
handle.sync_stream(handle.get_stream());
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
ClusterSelectionInputs<T, IdxT> params;
T score;
};
typedef ClusterSelectionTest<float, int> ClusterSelectionTestF_Int;
TEST_P(ClusterSelectionTestF_Int, Result) { EXPECT_TRUE(score == 1.0); }
INSTANTIATE_TEST_CASE_P(ClusterSelectionTest,
ClusterSelectionTestF_Int,
::testing::ValuesIn(cluster_selection_inputs));
template <typename IdxT>
void transformLabels(const raft::handle_t& handle, IdxT* labels, IdxT* label_map, IdxT m)
{
thrust::transform(
handle.get_thrust_policy(), labels, labels + m, labels, [label_map] __device__(IdxT label) {
if (label != -1) return label_map[label];
return -1;
});
}
// This test was constructed in the following manner: The same condensed tree and set of selected
// clusters need to be passed to the reference implementation and then compare the results from
// cuML and the reference implementation for an approximate match of probabilities. To fetch the
// condensed hierarchy in the same format as required by the reference implementation, a simple
// python script can be written:
// 1. Print the parents, children, lambdas and sizes array of the condensed hierarchy.
// 2. Convert them into a list ``condensed_tree`` of tuples where each tuples is of the form.
// ``(parents[i], children[i], lambdas[i], sizes[i])``
// 3. Convert the list into a numpy array with the following command:
// ``condensed_tree_array = np.array(condened_tree, dtype=[('parent', np.intp), ('child',
// np.intp), ('lambda_val', float), ('child_size',
// np.intp)])``
// 4. Store it in a pickle file.
// The reference source code is modified in the following way: Edit the raw tree in the init
// function of the PredictionData object in prediction.py by loading it from the pickle file. Also
// edit the selected clusters array. Do the same in the all_points_membership_vectors function and
// the approximate_predict functions.
template <typename T, typename IdxT>
class AllPointsMembershipVectorsTest
: public ::testing::TestWithParam<AllPointsMembershipVectorsInputs<T, IdxT>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<AllPointsMembershipVectorsInputs<T, IdxT>>::GetParam();
rmm::device_uvector<IdxT> condensed_parents(params.condensed_parents.size(),
handle.get_stream());
rmm::device_uvector<IdxT> condensed_children(params.condensed_children.size(),
handle.get_stream());
rmm::device_uvector<T> condensed_lambdas(params.condensed_lambdas.size(), handle.get_stream());
rmm::device_uvector<IdxT> condensed_sizes(params.condensed_sizes.size(), handle.get_stream());
raft::copy(condensed_parents.data(),
params.condensed_parents.data(),
condensed_parents.size(),
handle.get_stream());
raft::copy(condensed_children.data(),
params.condensed_children.data(),
condensed_children.size(),
handle.get_stream());
raft::copy(condensed_lambdas.data(),
params.condensed_lambdas.data(),
condensed_lambdas.size(),
handle.get_stream());
raft::copy(condensed_sizes.data(),
params.condensed_sizes.data(),
condensed_sizes.size(),
handle.get_stream());
rmm::device_uvector<T> data(params.n_row * params.n_col, handle.get_stream());
raft::copy(data.data(), params.data.data(), data.size(), handle.get_stream());
ML::HDBSCAN::Common::CondensedHierarchy<IdxT, T> condensed_tree(handle,
params.n_row,
params.condensed_parents.size(),
condensed_parents.data(),
condensed_children.data(),
condensed_lambdas.data(),
condensed_sizes.data());
rmm::device_uvector<IdxT> label_map(params.n_row, handle.get_stream());
// intermediate outputs
rmm::device_uvector<T> stabilities(params.n_row, handle.get_stream());
rmm::device_uvector<T> probabilities(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> labels(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> inverse_label_map(0, handle.get_stream());
int n_selected_clusters =
ML::HDBSCAN::detail::Extract::extract_clusters(handle,
condensed_tree,
params.n_row,
labels.data(),
stabilities.data(),
probabilities.data(),
label_map.data(),
params.cluster_selection_method,
inverse_label_map,
params.allow_single_cluster,
0,
params.cluster_selection_epsilon);
rmm::device_uvector<T> membership_vec(params.n_row * n_selected_clusters, handle.get_stream());
ML::HDBSCAN::Common::PredictionData<IdxT, T> prediction_data_(
handle, params.n_row, params.n_col, nullptr);
transformLabels(handle, labels.data(), label_map.data(), params.n_row);
ML::HDBSCAN::Common::generate_prediction_data(handle,
condensed_tree,
labels.data(),
inverse_label_map.data(),
n_selected_clusters,
prediction_data_);
ML::compute_all_points_membership_vectors(handle,
condensed_tree,
prediction_data_,
data.data(),
raft::distance::DistanceType::L2SqrtExpanded,
membership_vec.data());
ASSERT_TRUE(MLCommon::devArrMatch(membership_vec.data(),
params.expected_probabilities.data(),
params.n_row * n_selected_clusters,
MLCommon::CompareApprox<float>(1e-5),
handle.get_stream()));
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
AllPointsMembershipVectorsInputs<T, IdxT> params;
// T score;
};
typedef AllPointsMembershipVectorsTest<float, int> AllPointsMembershipVectorsTestF_Int;
TEST_P(AllPointsMembershipVectorsTestF_Int, Result) { EXPECT_TRUE(true); }
INSTANTIATE_TEST_CASE_P(AllPointsMembershipVectorsTest,
AllPointsMembershipVectorsTestF_Int,
::testing::ValuesIn(all_points_membership_vectors_inputs));
template <typename T, typename IdxT>
class ApproximatePredictTest : public ::testing::TestWithParam<ApproximatePredictInputs<T, IdxT>> {
public:
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<ApproximatePredictInputs<T, IdxT>>::GetParam();
rmm::device_uvector<IdxT> condensed_parents(params.condensed_parents.size(),
handle.get_stream());
rmm::device_uvector<IdxT> condensed_children(params.condensed_children.size(),
handle.get_stream());
rmm::device_uvector<T> condensed_lambdas(params.condensed_lambdas.size(), handle.get_stream());
rmm::device_uvector<IdxT> condensed_sizes(params.condensed_sizes.size(), handle.get_stream());
raft::copy(condensed_parents.data(),
params.condensed_parents.data(),
condensed_parents.size(),
handle.get_stream());
raft::copy(condensed_children.data(),
params.condensed_children.data(),
condensed_children.size(),
handle.get_stream());
raft::copy(condensed_lambdas.data(),
params.condensed_lambdas.data(),
condensed_lambdas.size(),
handle.get_stream());
raft::copy(condensed_sizes.data(),
params.condensed_sizes.data(),
condensed_sizes.size(),
handle.get_stream());
rmm::device_uvector<T> data(params.n_row * params.n_col, handle.get_stream());
raft::copy(data.data(), params.data.data(), data.size(), handle.get_stream());
rmm::device_uvector<T> points_to_predict(params.n_points_to_predict * params.n_col,
handle.get_stream());
raft::copy(points_to_predict.data(),
params.points_to_predict.data(),
points_to_predict.size(),
handle.get_stream());
ML::HDBSCAN::Common::CondensedHierarchy<IdxT, T> condensed_tree(handle,
params.n_row,
params.condensed_parents.size(),
condensed_parents.data(),
condensed_children.data(),
condensed_lambdas.data(),
condensed_sizes.data());
rmm::device_uvector<IdxT> label_map(params.n_row, handle.get_stream());
// intermediate outputs
rmm::device_uvector<T> stabilities(params.n_row, handle.get_stream());
rmm::device_uvector<T> probabilities(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> labels(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> inverse_label_map(0, handle.get_stream());
int n_selected_clusters =
ML::HDBSCAN::detail::Extract::extract_clusters(handle,
condensed_tree,
params.n_row,
labels.data(),
stabilities.data(),
probabilities.data(),
label_map.data(),
params.cluster_selection_method,
inverse_label_map,
params.allow_single_cluster,
0,
params.cluster_selection_epsilon);
rmm::device_uvector<T> core_dists{static_cast<size_t>(params.n_row), handle.get_stream()};
ML::HDBSCAN::Common::PredictionData<IdxT, T> pred_data(
handle, params.n_row, params.n_col, core_dists.data());
auto stream = handle.get_stream();
rmm::device_uvector<IdxT> mutual_reachability_indptr(params.n_row + 1, stream);
raft::sparse::COO<T, IdxT> mutual_reachability_coo(stream,
(params.min_samples + 1) * params.n_row * 2);
ML::HDBSCAN::detail::Reachability::mutual_reachability_graph(
handle,
data.data(),
(size_t)params.n_row,
(size_t)params.n_col,
raft::distance::DistanceType::L2SqrtExpanded,
params.min_samples + 1,
(float)1.0,
mutual_reachability_indptr.data(),
pred_data.get_core_dists(),
mutual_reachability_coo);
transformLabels(handle, labels.data(), label_map.data(), params.n_row);
ML::HDBSCAN::Common::generate_prediction_data(handle,
condensed_tree,
labels.data(),
inverse_label_map.data(),
n_selected_clusters,
pred_data);
// outputs
rmm::device_uvector<IdxT> out_labels(params.n_points_to_predict, handle.get_stream());
rmm::device_uvector<T> out_probabilities(params.n_points_to_predict, handle.get_stream());
ML::out_of_sample_predict(handle,
condensed_tree,
pred_data,
const_cast<float*>(data.data()),
labels.data(),
const_cast<float*>(points_to_predict.data()),
(size_t)params.n_points_to_predict,
raft::distance::DistanceType::L2SqrtExpanded,
params.min_samples,
out_labels.data(),
out_probabilities.data());
handle.sync_stream(handle.get_stream());
cudaDeviceSynchronize();
ASSERT_TRUE(MLCommon::devArrMatch(out_labels.data(),
params.expected_labels.data(),
params.n_points_to_predict,
MLCommon::Compare<int>(),
handle.get_stream()));
ASSERT_TRUE(MLCommon::devArrMatch(out_probabilities.data(),
params.expected_probabilities.data(),
params.n_points_to_predict,
MLCommon::CompareApprox<float>(1e-2),
handle.get_stream()));
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
ApproximatePredictInputs<T, IdxT> params;
// T score;
};
typedef ApproximatePredictTest<float, int> ApproximatePredictTestF_Int;
TEST_P(ApproximatePredictTestF_Int, Result) { EXPECT_TRUE(true); }
INSTANTIATE_TEST_CASE_P(ApproximatePredictTest,
ApproximatePredictTestF_Int,
::testing::ValuesIn(approximate_predict_inputs));
template <typename T, typename IdxT>
class MembershipVectorTest : public ::testing::TestWithParam<MembershipVectorInputs<T, IdxT>> {
protected:
void basicTest()
{
raft::handle_t handle;
params = ::testing::TestWithParam<MembershipVectorInputs<T, IdxT>>::GetParam();
rmm::device_uvector<IdxT> condensed_parents(params.condensed_parents.size(),
handle.get_stream());
rmm::device_uvector<IdxT> condensed_children(params.condensed_children.size(),
handle.get_stream());
rmm::device_uvector<T> condensed_lambdas(params.condensed_lambdas.size(), handle.get_stream());
rmm::device_uvector<IdxT> condensed_sizes(params.condensed_sizes.size(), handle.get_stream());
raft::copy(condensed_parents.data(),
params.condensed_parents.data(),
condensed_parents.size(),
handle.get_stream());
raft::copy(condensed_children.data(),
params.condensed_children.data(),
condensed_children.size(),
handle.get_stream());
raft::copy(condensed_lambdas.data(),
params.condensed_lambdas.data(),
condensed_lambdas.size(),
handle.get_stream());
raft::copy(condensed_sizes.data(),
params.condensed_sizes.data(),
condensed_sizes.size(),
handle.get_stream());
rmm::device_uvector<T> data(params.n_row * params.n_col, handle.get_stream());
raft::copy(data.data(), params.data.data(), data.size(), handle.get_stream());
rmm::device_uvector<T> points_to_predict(params.n_points_to_predict * params.n_col,
handle.get_stream());
raft::copy(points_to_predict.data(),
params.points_to_predict.data(),
points_to_predict.size(),
handle.get_stream());
ML::HDBSCAN::Common::CondensedHierarchy<IdxT, T> condensed_tree(handle,
params.n_row,
params.condensed_parents.size(),
condensed_parents.data(),
condensed_children.data(),
condensed_lambdas.data(),
condensed_sizes.data());
rmm::device_uvector<IdxT> label_map(params.n_row, handle.get_stream());
// intermediate outputs
rmm::device_uvector<T> stabilities(params.n_row, handle.get_stream());
rmm::device_uvector<T> probabilities(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> labels(params.n_row, handle.get_stream());
rmm::device_uvector<IdxT> inverse_label_map(0, handle.get_stream());
int n_selected_clusters =
ML::HDBSCAN::detail::Extract::extract_clusters(handle,
condensed_tree,
params.n_row,
labels.data(),
stabilities.data(),
probabilities.data(),
label_map.data(),
params.cluster_selection_method,
inverse_label_map,
params.allow_single_cluster,
0,
params.cluster_selection_epsilon);
rmm::device_uvector<T> membership_vec(params.n_points_to_predict * n_selected_clusters,
handle.get_stream());
rmm::device_uvector<T> core_dists{static_cast<size_t>(params.n_row), handle.get_stream()};
ML::HDBSCAN::Common::PredictionData<IdxT, T> prediction_data_(
handle, params.n_row, params.n_col, core_dists.data());
auto stream = handle.get_stream();
rmm::device_uvector<IdxT> mutual_reachability_indptr(params.n_row + 1, stream);
raft::sparse::COO<T, IdxT> mutual_reachability_coo(stream,
(params.min_samples + 1) * params.n_row * 2);
ML::HDBSCAN::detail::Reachability::mutual_reachability_graph(
handle,
data.data(),
(size_t)params.n_row,
(size_t)params.n_col,
raft::distance::DistanceType::L2SqrtExpanded,
params.min_samples + 1,
(float)1.0,
mutual_reachability_indptr.data(),
prediction_data_.get_core_dists(),
mutual_reachability_coo);
transformLabels(handle, labels.data(), label_map.data(), params.n_row);
ML::HDBSCAN::Common::generate_prediction_data(handle,
condensed_tree,
labels.data(),
inverse_label_map.data(),
n_selected_clusters,
prediction_data_);
ML::compute_membership_vector(handle,
condensed_tree,
prediction_data_,
data.data(),
points_to_predict.data(),
params.n_points_to_predict,
params.min_samples,
raft::distance::DistanceType::L2SqrtExpanded,
membership_vec.data());
ASSERT_TRUE(MLCommon::devArrMatch(membership_vec.data(),
params.expected_probabilities.data(),
params.n_points_to_predict * n_selected_clusters,
MLCommon::CompareApprox<float>(1e-4),
handle.get_stream()));
}
void SetUp() override { basicTest(); }
void TearDown() override {}
protected:
MembershipVectorInputs<T, IdxT> params;
// T score;
};
typedef MembershipVectorTest<float, int> MembershipVectorTestF_Int;
TEST_P(MembershipVectorTestF_Int, Result) { EXPECT_TRUE(true); }
INSTANTIATE_TEST_CASE_P(MembershipVectorTest,
MembershipVectorTestF_Int,
::testing::ValuesIn(membership_vector_inputs));
} // namespace HDBSCAN
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/linkage_test.cu | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <gtest/gtest.h>
#include <raft/core/handle.hpp>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <vector>
#include <cuml/cluster/linkage.hpp>
#include <cuml/datasets/make_blobs.hpp>
#include <raft/distance/distance_types.hpp>
#include <raft/linalg/transpose.cuh>
#include <raft/sparse/coo.hpp>
#include <cuml/common/logger.hpp>
#include <test_utils.h>
namespace ML {
using namespace Datasets;
using namespace std;
template <typename T, typename IdxT>
struct LinkageInputs {
IdxT n_row;
IdxT n_col;
std::vector<T> data;
std::vector<IdxT> expected_labels;
int n_clusters;
bool use_knn;
int c;
};
template <typename T, typename IdxT>
::std::ostream& operator<<(::std::ostream& os, const LinkageInputs<T, IdxT>& dims)
{
return os;
}
template <typename T, typename IdxT>
class LinkageTest : public ::testing::TestWithParam<LinkageInputs<T, IdxT>> {
protected:
LinkageTest() : labels(0, stream), labels_ref(0, stream) {}
void basicTest()
{
raft::handle_t handle;
stream = handle.get_stream();
params = ::testing::TestWithParam<LinkageInputs<T, IdxT>>::GetParam();
rmm::device_uvector<T> data(params.n_row * params.n_col, stream);
// // Allocate result labels and expected labels on device
labels.resize(params.n_row, stream);
labels_ref.resize(params.n_row, stream);
//
raft::copy(data.data(), params.data.data(), data.size(), handle.get_stream());
raft::copy(labels_ref.data(), params.expected_labels.data(), params.n_row, handle.get_stream());
handle.sync_stream(handle.get_stream());
raft::hierarchy::linkage_output<IdxT> out_arrs;
out_arrs.labels = labels.data();
rmm::device_uvector<IdxT> out_children((params.n_row - 1) * 2, handle.get_stream());
out_arrs.children = out_children.data();
if (params.use_knn) {
ML::single_linkage_neighbors(handle,
data.data(),
params.n_row,
params.n_col,
&out_arrs,
raft::distance::DistanceType::L2Unexpanded,
params.c,
params.n_clusters);
} else {
ML::single_linkage_pairwise(handle,
data.data(),
params.n_row,
params.n_col,
&out_arrs,
raft::distance::DistanceType::L2Expanded,
params.n_clusters);
}
handle.sync_stream(handle.get_stream());
}
void SetUp() override { basicTest(); }
protected:
cudaStream_t stream = 0;
LinkageInputs<T, IdxT> params;
rmm::device_uvector<IdxT> labels, labels_ref;
double score;
};
const std::vector<LinkageInputs<float, int>> linkage_inputsf2 = {
// Test n_clusters == n_points
{10,
5,
{0.21390334, 0.50261639, 0.91036676, 0.59166485, 0.71162682, 0.10248392, 0.77782677, 0.43772379,
0.4035871, 0.3282796, 0.47544681, 0.59862974, 0.12319357, 0.06239463, 0.28200272, 0.1345717,
0.50498218, 0.5113505, 0.16233086, 0.62165332, 0.42281548, 0.933117, 0.41386077, 0.23264562,
0.73325968, 0.37537541, 0.70719873, 0.14522645, 0.73279625, 0.9126674, 0.84854131, 0.28890216,
0.85267903, 0.74703138, 0.83842071, 0.34942792, 0.27864171, 0.70911132, 0.21338564, 0.32035554,
0.73788331, 0.46926692, 0.57570162, 0.42559178, 0.87120209, 0.22734951, 0.01847905, 0.75549396,
0.76166195, 0.66613745},
{9, 8, 7, 6, 5, 4, 3, 2, 1, 0},
10,
false,
5},
// Test outlier points
{9,
2,
{-1, -50, 3, 4, 5000, 10000, 1, 3, 4, 5, 0.000005, 0.00002, 2000000, 500000, 10, 50, 30, 5},
{6, 0, 5, 0, 0, 4, 3, 2, 1},
7,
false,
5},
// Test n_clusters == (n_points / 2)
{10,
5,
{0.21390334, 0.50261639, 0.91036676, 0.59166485, 0.71162682, 0.10248392, 0.77782677, 0.43772379,
0.4035871, 0.3282796, 0.47544681, 0.59862974, 0.12319357, 0.06239463, 0.28200272, 0.1345717,
0.50498218, 0.5113505, 0.16233086, 0.62165332, 0.42281548, 0.933117, 0.41386077, 0.23264562,
0.73325968, 0.37537541, 0.70719873, 0.14522645, 0.73279625, 0.9126674, 0.84854131, 0.28890216,
0.85267903, 0.74703138, 0.83842071, 0.34942792, 0.27864171, 0.70911132, 0.21338564, 0.32035554,
0.73788331, 0.46926692, 0.57570162, 0.42559178, 0.87120209, 0.22734951, 0.01847905, 0.75549396,
0.76166195, 0.66613745},
{1, 0, 4, 0, 0, 3, 2, 0, 2, 1},
5,
false,
10},
// Test n_points == 100
{100,
10,
{6.26168372e-01, 9.30437651e-01, 6.02450208e-01, 2.73025296e-01, 9.53050619e-01, 3.32164396e-01,
6.88942598e-01, 5.79163537e-01, 6.70341547e-01, 2.70140602e-02, 9.30429671e-01, 7.17721157e-01,
9.89948537e-01, 7.75253347e-01, 1.34491522e-02, 2.48522428e-02, 3.51413378e-01, 7.64405834e-01,
7.86373507e-01, 7.18748577e-01, 8.66998621e-01, 6.80316582e-01, 2.51288712e-01, 4.91078420e-01,
3.76246281e-01, 4.86828710e-01, 5.67464772e-01, 5.30734742e-01, 8.99478296e-01, 7.66699088e-01,
9.49339111e-01, 3.55248484e-01, 9.06046929e-01, 4.48407772e-01, 6.96395305e-01, 2.44277335e-01,
7.74840000e-01, 5.21046603e-01, 4.66423971e-02, 5.12019638e-02, 8.95019614e-01, 5.28956953e-01,
4.31536306e-01, 5.83857744e-01, 4.41787364e-01, 4.68656523e-01, 5.73971433e-01, 6.79989654e-01,
3.19650588e-01, 6.12579596e-01, 6.49126442e-02, 8.39131142e-01, 2.85252117e-01, 5.84848929e-01,
9.46507115e-01, 8.58440748e-01, 3.61528940e-01, 2.44215959e-01, 3.80101125e-01, 4.57128957e-02,
8.82216988e-01, 8.31498633e-01, 7.23474381e-01, 7.75788607e-01, 1.40864146e-01, 6.62092382e-01,
5.13985168e-01, 3.00686418e-01, 8.70109949e-01, 2.43187753e-01, 2.89391938e-01, 2.84214238e-01,
8.70985521e-01, 8.77491176e-01, 6.72537226e-01, 3.30929686e-01, 1.85934324e-01, 9.16222614e-01,
6.18239142e-01, 2.64768597e-01, 5.76145451e-01, 8.62961369e-01, 6.84757925e-01, 7.60549082e-01,
1.27645356e-01, 4.51004673e-01, 3.92292980e-01, 4.63170803e-01, 4.35449330e-02, 2.17583404e-01,
5.71832605e-02, 2.06763039e-01, 3.70116249e-01, 2.09750028e-01, 6.17283019e-01, 8.62549231e-01,
9.84156240e-02, 2.66249156e-01, 3.87635103e-01, 2.85591012e-02, 4.24826068e-01, 4.45795088e-01,
6.86227676e-01, 1.08848960e-01, 5.96731841e-02, 3.71770228e-01, 1.91548833e-01, 6.95136078e-01,
9.00700636e-01, 8.76363105e-01, 2.67334632e-01, 1.80619709e-01, 7.94060419e-01, 1.42854171e-02,
1.09372387e-01, 8.74028108e-01, 6.46403232e-01, 4.86588834e-01, 5.93446175e-02, 6.11886291e-01,
8.83865057e-01, 3.15879821e-01, 2.27043992e-01, 9.76764951e-01, 6.15620336e-01, 9.76199360e-01,
2.40548962e-01, 3.21795663e-01, 8.75087904e-02, 8.11234663e-01, 6.96070480e-01, 8.12062321e-01,
1.21958818e-01, 3.44348628e-02, 8.72630414e-01, 3.06162776e-01, 1.76043529e-02, 9.45894971e-01,
5.33896401e-01, 6.21642973e-01, 4.93062535e-01, 4.48984262e-01, 2.24560379e-01, 4.24052195e-02,
4.43447610e-01, 8.95646149e-01, 6.05220676e-01, 1.81840491e-01, 9.70831206e-01, 2.12563586e-02,
6.92582693e-01, 7.55946922e-01, 7.95086143e-01, 6.05328941e-01, 3.99350764e-01, 4.32846636e-01,
9.81114529e-01, 4.98266428e-01, 6.37127930e-03, 1.59085889e-01, 6.34682067e-05, 5.59429440e-01,
7.38827633e-01, 8.93214770e-01, 2.16494306e-01, 9.35430573e-02, 4.75665868e-02, 7.80503518e-01,
7.86240041e-01, 7.06854594e-01, 2.13725879e-02, 7.68246091e-01, 4.50234808e-01, 5.21231104e-01,
5.01989826e-03, 4.22081572e-02, 1.65337732e-01, 8.54134740e-01, 4.99430262e-01, 8.94525601e-01,
1.14028379e-01, 3.69739861e-01, 1.32955599e-01, 2.65563824e-01, 2.52811151e-01, 1.44792843e-01,
6.88449594e-01, 4.44921417e-01, 8.23296587e-01, 1.93266317e-01, 1.19033309e-01, 1.36368966e-01,
3.42600285e-01, 5.64505195e-01, 5.57594559e-01, 7.44257892e-01, 8.38231569e-02, 4.11548847e-01,
3.21010077e-01, 8.55081359e-01, 4.30105779e-01, 1.16229135e-01, 9.87731964e-02, 3.14712335e-01,
4.50880592e-01, 2.72289598e-01, 6.31615256e-01, 8.97432958e-01, 4.44764250e-01, 8.03776440e-01,
2.68767748e-02, 2.43374608e-01, 4.02141103e-01, 4.98881209e-01, 5.33173003e-01, 8.82890436e-01,
7.16149148e-01, 4.19664401e-01, 2.29335357e-01, 2.88637806e-01, 3.44696803e-01, 6.78171906e-01,
5.69849716e-01, 5.86454477e-01, 3.54474989e-01, 9.03876540e-01, 6.45980000e-01, 6.34887593e-01,
7.88039746e-02, 2.04814126e-01, 7.82251754e-01, 2.43147074e-01, 7.50951808e-01, 1.72799092e-02,
2.95349590e-01, 6.57991826e-01, 8.81214312e-01, 5.73970708e-01, 2.77610881e-01, 1.82155097e-01,
7.69797417e-02, 6.44792402e-01, 9.46950998e-01, 7.73064845e-01, 6.04733624e-01, 5.80094567e-01,
1.67498426e-01, 2.66514296e-01, 6.50140368e-01, 1.91170299e-01, 2.08752199e-01, 3.01664091e-01,
9.85033484e-01, 2.92909152e-01, 8.65816607e-01, 1.85222119e-01, 2.28814559e-01, 1.34286382e-02,
2.89234322e-01, 8.18668708e-01, 4.71706924e-01, 9.23199803e-01, 2.80879188e-01, 1.47319284e-01,
4.13915748e-01, 9.31274932e-02, 6.66322195e-01, 9.66953974e-01, 3.19405786e-01, 6.69486551e-01,
5.03096313e-02, 6.95225201e-01, 5.78469859e-01, 6.29481655e-01, 1.39252534e-01, 1.22564968e-01,
6.80663678e-01, 6.34607157e-01, 6.42765834e-01, 1.57127410e-02, 2.92132086e-01, 5.24423878e-01,
4.68676824e-01, 2.86003928e-01, 7.18608322e-01, 8.95617933e-01, 5.48844309e-01, 1.74517278e-01,
5.24379196e-01, 2.13526524e-01, 5.88375435e-01, 9.88560185e-01, 4.17435771e-01, 6.14438688e-01,
9.53760881e-01, 5.27151288e-01, 7.03017278e-01, 3.44448559e-01, 4.47059676e-01, 2.83414901e-01,
1.98979011e-01, 4.24917361e-01, 5.73172761e-01, 2.32398853e-02, 1.65887230e-01, 4.05552785e-01,
9.29665524e-01, 2.26135696e-01, 9.20563384e-01, 7.65259963e-01, 4.54820075e-01, 8.97710267e-01,
3.78559302e-03, 9.15219382e-01, 3.55705698e-01, 6.94905124e-01, 8.58540202e-01, 3.89790666e-01,
2.49478206e-01, 7.93679304e-01, 4.75830027e-01, 4.40425353e-01, 3.70579459e-01, 1.40578049e-01,
1.70386675e-01, 7.04056121e-01, 4.85963102e-01, 9.68450060e-01, 6.77178001e-01, 2.65934654e-01,
2.58915007e-01, 6.70052890e-01, 2.61945109e-01, 8.46207759e-01, 1.01928951e-01, 2.85611334e-01,
2.45776933e-01, 2.66658783e-01, 3.71724077e-01, 4.34319025e-01, 4.24407347e-01, 7.15417683e-01,
8.07997684e-01, 1.64296275e-01, 6.01638065e-01, 8.60606804e-02, 2.68719187e-01, 5.11764101e-01,
9.75844338e-01, 7.81226782e-01, 2.20925515e-01, 7.18135040e-01, 9.82395577e-01, 8.39160243e-01,
9.08058083e-01, 6.88010677e-01, 8.14271847e-01, 5.12460821e-01, 1.17311345e-01, 5.96075228e-01,
9.17455497e-01, 2.12052706e-01, 7.04074603e-01, 8.72872565e-02, 8.76047818e-01, 6.96235046e-01,
8.54801557e-01, 2.49729159e-01, 9.76594604e-01, 2.87386363e-01, 2.36461559e-02, 9.94075254e-01,
4.25193986e-01, 7.61869994e-01, 5.13334255e-01, 6.44711165e-02, 8.92156689e-01, 3.55235167e-01,
1.08154647e-01, 8.78446825e-01, 2.43833016e-01, 9.23071293e-01, 2.72724115e-01, 9.46631338e-01,
3.74510294e-01, 4.08451278e-02, 9.78392777e-01, 3.65079221e-01, 6.37199516e-01, 5.51144906e-01,
5.25978080e-01, 1.42803678e-01, 4.05451674e-01, 7.79788219e-01, 6.26009784e-01, 3.35249497e-01,
1.43159543e-02, 1.80363779e-01, 5.05096904e-01, 2.82619947e-01, 5.83561392e-01, 3.10951324e-01,
8.73223968e-01, 4.38545619e-01, 4.81348800e-01, 6.68497085e-01, 3.79345401e-01, 9.58832501e-01,
1.89869550e-01, 2.34083070e-01, 2.94066207e-01, 5.74892667e-02, 6.92106828e-02, 9.61127686e-02,
6.72650672e-02, 8.47345378e-01, 2.80916761e-01, 7.32177357e-03, 9.80785961e-01, 5.73192225e-02,
8.48781331e-01, 8.83225408e-01, 7.34398275e-01, 7.70381941e-01, 6.20778343e-01, 8.96822048e-01,
5.40732486e-01, 3.69704071e-01, 5.77305837e-01, 2.08221827e-01, 7.34275341e-01, 1.06110900e-01,
3.49496706e-01, 8.34948910e-01, 1.56403291e-02, 6.78576376e-01, 8.96141268e-01, 5.94835119e-01,
1.43943153e-01, 3.49618530e-01, 2.10440392e-01, 3.46585620e-01, 1.05153093e-01, 3.45446174e-01,
2.72177079e-01, 7.07946300e-01, 4.33717726e-02, 3.31232203e-01, 3.91874320e-01, 4.76338141e-01,
6.22777789e-01, 2.95989228e-02, 4.32855769e-01, 7.61049310e-01, 3.63279149e-01, 9.47210350e-01,
6.43721247e-01, 6.58025802e-01, 1.05247633e-02, 5.29974442e-01, 7.30675767e-01, 4.30041079e-01,
6.62634841e-01, 8.25936616e-01, 9.91253704e-01, 6.79399281e-01, 5.44177006e-01, 7.52876048e-01,
3.32139049e-01, 7.98732398e-01, 7.38865223e-01, 9.16055132e-01, 6.11736493e-01, 9.63672879e-01,
1.83778839e-01, 7.27558919e-02, 5.91602822e-01, 3.25235484e-01, 2.34741217e-01, 9.52346277e-01,
9.18556407e-01, 9.35373324e-01, 6.89209070e-01, 2.56049054e-01, 6.17975395e-01, 7.82285691e-01,
9.84983432e-01, 6.62322741e-01, 2.04144457e-01, 3.98446577e-01, 1.38918297e-01, 3.05919921e-01,
3.14043787e-01, 5.91072666e-01, 7.44703771e-01, 8.92272567e-01, 9.78017873e-01, 9.01203161e-01,
1.41526372e-01, 4.14878484e-01, 6.80683651e-01, 5.01733152e-02, 8.14635389e-01, 2.27926375e-01,
9.03269815e-01, 8.68443745e-01, 9.86939190e-01, 7.40779486e-01, 2.61005311e-01, 3.19276232e-01,
9.69509248e-01, 1.11908818e-01, 4.49198556e-01, 1.27056715e-01, 3.84064823e-01, 5.14591811e-01,
2.10747488e-01, 9.53884090e-01, 8.43167950e-01, 4.51187972e-01, 3.75331782e-01, 6.23566461e-01,
3.55290379e-01, 2.95705968e-01, 1.69622690e-01, 1.42981830e-01, 2.72180991e-01, 9.46468040e-01,
3.70932500e-01, 9.94292830e-01, 4.62587505e-01, 7.14817405e-01, 2.45370540e-02, 3.00906377e-01,
5.75768304e-01, 9.71448393e-01, 6.95574827e-02, 3.93693854e-01, 5.29306116e-01, 5.04694554e-01,
6.73797120e-02, 6.76596969e-01, 5.50948898e-01, 3.24909641e-01, 7.70337719e-01, 6.51842631e-03,
3.03264879e-01, 7.61037886e-03, 2.72289601e-01, 1.50502041e-01, 6.71103888e-02, 7.41503703e-01,
1.92088941e-01, 2.19043977e-01, 9.09320161e-01, 2.37993569e-01, 6.18107973e-02, 8.31447852e-01,
2.23355609e-01, 1.84789435e-01, 4.16104518e-01, 4.21573859e-01, 8.72446305e-02, 2.97294197e-01,
4.50328256e-01, 8.72199917e-01, 2.51279916e-01, 4.86219272e-01, 7.57071329e-01, 4.85655942e-01,
1.06187277e-01, 4.92341327e-01, 1.46017513e-01, 5.25421017e-01, 4.22637906e-01, 2.24685018e-01,
8.72648431e-01, 5.54051490e-01, 1.80745062e-01, 2.12756336e-01, 5.20883169e-01, 7.60363654e-01,
8.30254678e-01, 5.00003328e-01, 4.69017439e-01, 6.38105527e-01, 3.50638261e-02, 5.22217353e-02,
9.06516882e-02, 8.52975842e-01, 1.19985883e-01, 3.74926753e-01, 6.50302066e-01, 1.98875727e-01,
6.28362507e-02, 4.32693501e-01, 3.10500685e-01, 6.20732833e-01, 4.58503272e-01, 3.20790034e-01,
7.91284868e-01, 7.93054570e-01, 2.93406765e-01, 8.95399023e-01, 1.06441034e-01, 7.53085241e-02,
8.67523104e-01, 1.47963482e-01, 1.25584706e-01, 3.81545040e-02, 6.34338619e-01, 1.76368938e-02,
5.75553531e-02, 5.31607516e-01, 2.63869588e-01, 9.41945823e-01, 9.24028838e-02, 5.21496463e-01,
7.74866558e-01, 5.65210610e-01, 7.28015327e-02, 6.51963790e-01, 8.94727453e-01, 4.49571590e-01,
1.29932405e-01, 8.64026259e-01, 9.92599934e-01, 7.43721560e-01, 8.87300215e-01, 1.06369925e-01,
8.11335531e-01, 7.87734900e-01, 9.87344678e-01, 5.32502820e-01, 4.42612382e-01, 9.64041183e-01,
1.66085871e-01, 1.12937664e-01, 5.24423470e-01, 6.54689333e-01, 4.59119726e-01, 5.22774091e-01,
3.08722276e-02, 6.26979315e-01, 4.49754105e-01, 8.07495757e-01, 2.34199499e-01, 1.67765675e-01,
9.22168418e-01, 3.73210378e-01, 8.04432575e-01, 5.61890354e-01, 4.47025593e-01, 6.43155678e-01,
2.40407640e-01, 5.91631279e-01, 1.59369206e-01, 7.75799090e-01, 8.32067212e-01, 5.59791576e-02,
6.39105224e-01, 4.85274738e-01, 2.12630838e-01, 2.81431312e-02, 7.16205363e-01, 6.83885011e-01,
5.23869697e-01, 9.99418314e-01, 8.35331599e-01, 4.69877463e-02, 6.74712562e-01, 7.99273684e-01,
2.77001890e-02, 5.75809742e-01, 2.78513031e-01, 8.36209905e-01, 7.25472379e-01, 4.87173943e-01,
7.88311357e-01, 9.64676177e-01, 1.75752651e-01, 4.98112580e-01, 8.08850418e-02, 6.40981131e-01,
4.06647450e-01, 8.46539387e-01, 2.12620694e-01, 9.11012851e-01, 8.25041445e-01, 8.90065575e-01,
9.63626055e-01, 5.96689242e-01, 1.63372670e-01, 4.51640148e-01, 3.43026542e-01, 5.80658851e-01,
2.82327625e-01, 4.75535418e-01, 6.27760926e-01, 8.46314115e-01, 9.61961932e-01, 3.19806094e-01,
5.05508062e-01, 5.28102944e-01, 6.13045057e-01, 7.44714938e-01, 1.50586073e-01, 7.91878033e-01,
4.89839179e-01, 3.10496849e-01, 8.82309038e-01, 2.86922314e-01, 4.84687559e-01, 5.20838630e-01,
4.62955493e-01, 2.38185305e-01, 5.47259907e-02, 7.10916137e-01, 7.31887202e-01, 6.25602317e-01,
8.77741168e-01, 4.19881322e-01, 4.81222328e-01, 1.28224501e-01, 2.46034010e-01, 3.34971854e-01,
7.37216484e-01, 5.62134821e-02, 7.14089724e-01, 9.85549393e-01, 4.66295827e-01, 3.08722434e-03,
4.70237690e-01, 2.66524167e-01, 7.93875484e-01, 4.54795911e-02, 8.09702944e-01, 1.47709735e-02,
1.70082405e-01, 6.35905179e-01, 3.75379109e-01, 4.30315011e-01, 3.15788760e-01, 5.58065230e-01,
2.24643800e-01, 2.42142981e-01, 6.57283636e-01, 3.34921891e-01, 1.26588975e-01, 7.68064155e-01,
9.43856291e-01, 4.47518596e-01, 5.44453573e-01, 9.95764932e-01, 7.16444391e-01, 8.51019765e-01,
1.01179183e-01, 4.45473958e-01, 4.60327322e-01, 4.96895844e-02, 4.72907738e-01, 5.58987444e-01,
3.41027487e-01, 1.56175026e-01, 7.58283148e-01, 6.83600909e-01, 2.14623396e-01, 3.27348880e-01,
3.92517893e-01, 6.70418431e-01, 5.16440832e-01, 8.63140348e-01, 5.73277464e-01, 3.46608058e-01,
7.39396341e-01, 7.20852434e-01, 2.35653246e-02, 3.89935659e-01, 7.53783745e-01, 6.34563528e-01,
8.79339335e-01, 7.41599159e-02, 5.62433904e-01, 6.15553852e-01, 4.56956324e-01, 5.20047447e-01,
5.26845015e-02, 5.58471266e-01, 1.63632233e-01, 5.38936665e-02, 6.49593683e-01, 2.56838748e-01,
8.99035326e-01, 7.20847756e-01, 5.68954684e-01, 7.43684755e-01, 5.70924238e-01, 3.82318724e-01,
4.89328290e-01, 5.62208561e-01, 4.97540804e-02, 4.18011085e-01, 6.88041565e-01, 2.16234653e-01,
7.89548214e-01, 8.46136387e-01, 8.46816189e-01, 1.73842353e-01, 6.11627842e-02, 8.44440559e-01,
4.50646654e-01, 3.74785037e-01, 4.87196697e-01, 4.56276448e-01, 9.13284391e-01, 4.15715464e-01,
7.13597697e-01, 1.23641270e-02, 5.10031271e-01, 4.74601930e-02, 2.55731159e-01, 3.22090006e-01,
1.91165703e-01, 4.51170940e-01, 7.50843157e-01, 4.42420576e-01, 4.25380660e-01, 4.50667257e-01,
6.55689206e-01, 9.68257670e-02, 1.96528793e-01, 8.97343028e-01, 4.99940904e-01, 6.65504083e-01,
9.41828079e-01, 4.54397338e-01, 5.61893331e-01, 5.09839880e-01, 4.53117514e-01, 8.96804127e-02,
1.74888861e-01, 6.65641378e-01, 2.81668336e-01, 1.89532742e-01, 5.61668382e-01, 8.68330157e-02,
8.25092797e-01, 5.18106324e-01, 1.71904024e-01, 3.68385523e-01, 1.62005436e-01, 7.48507399e-01,
9.30274827e-01, 2.38198517e-01, 9.52222901e-01, 5.23587800e-01, 6.94384557e-01, 1.09338652e-01,
4.83356794e-01, 2.73050402e-01, 3.68027050e-01, 5.92366466e-01, 1.83192289e-01, 8.60376029e-01,
7.13926203e-01, 8.16750052e-01, 1.57890291e-01, 6.25691951e-01, 5.24831646e-01, 1.73873797e-01,
1.02429784e-01, 9.17488471e-01, 4.03584434e-01, 9.31170884e-01, 2.79386137e-01, 8.77745206e-01,
2.45200576e-01, 1.28896951e-01, 3.15713052e-01, 5.27874291e-01, 2.16444335e-01, 7.03883817e-01,
7.74738919e-02, 8.42422142e-01, 3.75598924e-01, 3.51002411e-01, 6.22752776e-01, 4.82407943e-01,
7.43107867e-01, 9.46182666e-01, 9.44344819e-01, 3.28124763e-01, 1.06147431e-01, 1.65102684e-01,
3.84060507e-01, 2.91057722e-01, 7.68173662e-02, 1.03543651e-01, 6.76698940e-01, 1.43141994e-01,
7.21342202e-01, 6.69471294e-03, 9.07298311e-01, 5.57080171e-01, 8.10954489e-01, 4.11120526e-01,
2.06407453e-01, 2.59590556e-01, 7.58512718e-01, 5.79873897e-01, 2.92875650e-01, 2.83686529e-01,
2.42829343e-01, 9.19323719e-01, 3.46832864e-01, 3.58238858e-01, 7.42827585e-01, 2.05760059e-01,
9.58438860e-01, 5.66326411e-01, 6.60292846e-01, 5.61095078e-02, 6.79465531e-01, 7.05118513e-01,
4.44713264e-01, 2.09732933e-01, 5.22732436e-01, 1.74396512e-01, 5.29356748e-01, 4.38475687e-01,
4.94036404e-01, 4.09785794e-01, 6.40025507e-01, 5.79371821e-01, 1.57726118e-01, 6.04572263e-01,
5.41072639e-01, 5.18847173e-01, 1.97093284e-01, 8.91767002e-01, 4.29050835e-01, 8.25490570e-01,
3.87699807e-01, 4.50705808e-01, 2.49371643e-01, 3.36074898e-01, 9.29925118e-01, 6.65393649e-01,
9.07275994e-01, 3.73075859e-01, 4.14044139e-03, 2.37463702e-01, 2.25893784e-01, 2.46900245e-01,
4.50350196e-01, 3.48618117e-01, 5.07193932e-01, 5.23435142e-01, 8.13611417e-01, 8.92715622e-01,
1.02623450e-01, 3.06088345e-01, 7.80461650e-01, 2.21453645e-01, 2.01419652e-01, 2.84254457e-01,
3.68286735e-01, 7.39358243e-01, 8.97879394e-01, 9.81599566e-01, 7.56526442e-01, 7.37645545e-01,
4.23976657e-02, 8.25922012e-01, 2.60956996e-01, 2.90702065e-01, 8.98388344e-01, 3.03733299e-01,
8.49071471e-01, 3.45835425e-01, 7.65458276e-01, 5.68094872e-01, 8.93770930e-01, 9.93161641e-01,
5.63368667e-02, 4.26548945e-01, 5.46745780e-01, 5.75674571e-01, 7.94599487e-01, 7.18935553e-02,
4.46492976e-01, 6.40240123e-01, 2.73246969e-01, 2.00465968e-01, 1.30718835e-01, 1.92492005e-01,
1.96617189e-01, 6.61271644e-01, 8.12687657e-01, 8.66342445e-01
},
{0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 4, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
10,
false,
5}};
typedef LinkageTest<float, int> LinkageTestF_Int;
TEST_P(LinkageTestF_Int, Result)
{
EXPECT_TRUE(MLCommon::devArrMatch(
labels.data(), labels_ref.data(), params.n_row, MLCommon::Compare<int>()));
}
INSTANTIATE_TEST_CASE_P(LinkageTest, LinkageTestF_Int, ::testing::ValuesIn(linkage_inputsf2));
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/lars_test.cu | /*
* Copyright (c) 2020-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <gtest/gtest.h>
#include <iomanip>
#include <raft/core/handle.hpp>
#include <raft/util/cudart_utils.hpp>
// #TODO: Replace with public header when ready
#include <raft/linalg/detail/cusolver_wrappers.hpp>
#include <raft/random/rng.cuh>
#include <rmm/device_uvector.hpp>
#include <solver/lars_impl.cuh>
#include <sstream>
#include <test_utils.h>
#include <vector>
namespace ML {
namespace Solver {
namespace Lars {
template <typename math_t>
class LarsTest : public ::testing::Test {
protected:
LarsTest()
: cor(n_cols, handle.get_stream()),
X(n_cols * n_rows, handle.get_stream()),
G(n_cols * n_cols, handle.get_stream()),
sign(n_cols, handle.get_stream()),
ws(n_cols, handle.get_stream()),
A(1, handle.get_stream())
{
auto stream = handle.get_stream();
raft::update_device(cor.data(), cor_host, n_cols, stream);
raft::update_device(X.data(), X_host, n_cols * n_rows, stream);
raft::update_device(G.data(), G_host, n_cols * n_cols, stream);
raft::update_device(sign.data(), sign_host, n_cols, stream);
}
void testSelectMostCorrelated()
{
auto stream = handle.get_stream();
math_t cj;
int idx;
rmm::device_uvector<math_t> workspace(n_cols, stream);
ML::Solver::Lars::selectMostCorrelated(
n_active, n_cols, cor.data(), &cj, workspace, &idx, n_rows, indices, 1, stream);
EXPECT_EQ(idx, 3);
EXPECT_EQ(7, cj);
}
void testMoveToActive()
{
auto stream = handle.get_stream();
ML::Solver::Lars::moveToActive(handle.get_cublas_handle(),
&n_active,
3,
X.data(),
n_rows,
n_cols,
n_rows,
cor.data(),
indices,
G.data(),
n_cols,
sign.data(),
stream);
EXPECT_EQ(n_active, 3);
EXPECT_TRUE(
MLCommon::devArrMatchHost(cor_exp, cor.data(), n_cols, MLCommon::Compare<math_t>(), stream));
EXPECT_TRUE(MLCommon::devArrMatchHost(
G_exp, G.data(), n_cols * n_cols, MLCommon::Compare<math_t>(), stream));
EXPECT_TRUE(MLCommon::devArrMatch(
(math_t)1.0, sign.data() + n_active - 1, 1, MLCommon::Compare<math_t>(), stream));
// Do it again with G == nullptr to test if X is properly changed
n_active = 2;
ML::Solver::Lars::moveToActive(handle.get_cublas_handle(),
&n_active,
3,
X.data(),
n_rows,
n_cols,
n_rows,
cor.data(),
indices,
(math_t*)nullptr,
n_cols,
sign.data(),
stream);
EXPECT_TRUE(MLCommon::devArrMatchHost(
X_exp, X.data(), n_rows * n_cols, MLCommon::Compare<math_t>(), stream));
}
void calcUExp(math_t* G, int n_cols, math_t* U_dev_exp)
{
auto stream = handle.get_stream();
rmm::device_scalar<int> devInfo(stream);
rmm::device_uvector<math_t> workspace(0, stream);
int n_work;
const int ld_U = n_cols;
// #TODO: Call from public API when ready
RAFT_CUSOLVER_TRY(raft::linalg::detail::cusolverDnpotrf_bufferSize(
handle.get_cusolver_dn_handle(), CUBLAS_FILL_MODE_UPPER, n_cols, U_dev_exp, ld_U, &n_work));
workspace.resize(n_work, stream);
// Expected solution using Cholesky factorization from scratch
raft::copy(U_dev_exp, G, n_cols * ld_U, stream);
// #TODO: Call from public API when ready
RAFT_CUSOLVER_TRY(raft::linalg::detail::cusolverDnpotrf(handle.get_cusolver_dn_handle(),
CUBLAS_FILL_MODE_UPPER,
n_cols,
U_dev_exp,
ld_U,
workspace.data(),
n_work,
devInfo.data(),
stream));
}
// Initialize a mix of G and U matrices to test updateCholesky
void initGU(math_t* GU, math_t* G, math_t* U, int n_active, bool copy_G)
{
auto stream = handle.get_stream();
const int ld_U = n_cols;
// First we copy over all elements, because the factorization only replaces
// the upper triangular part. This way it will be easier to compare to the
// reference solution.
raft::copy(GU, G, n_cols * n_cols, stream);
if (!copy_G) {
// zero the new column of G
RAFT_CUDA_TRY(
cudaMemsetAsync(GU + (n_active - 1) * n_cols, 0, n_cols * sizeof(math_t), stream));
}
for (int i = 0; i < n_active - 1; i++) {
raft::copy(GU + i * ld_U, U + i * ld_U, i + 1, stream);
}
}
void testUpdateCholesky()
{
auto stream = handle.get_stream();
const int ld_X = n_rows;
const int ld_G = n_cols;
const int ld_U = ld_G;
rmm::device_uvector<math_t> workspace(0, stream);
rmm::device_uvector<math_t> U_dev_exp(n_cols * n_cols, stream);
calcUExp(G.data(), n_cols, U_dev_exp.data());
rmm::device_uvector<math_t> U(n_cols * n_cols, stream);
n_active = 4;
math_t eps = -1;
// First test with U already initialized
initGU(U.data(), G.data(), U_dev_exp.data(), n_active, true);
ML::Solver::Lars::updateCholesky(handle,
n_active,
X.data(),
n_rows,
n_cols,
ld_X,
U.data(),
ld_U,
U.data(),
ld_G,
workspace,
eps,
stream);
EXPECT_TRUE(MLCommon::devArrMatch(
U_dev_exp.data(), U.data(), n_cols * n_cols, MLCommon::CompareApprox<math_t>(1e-5), stream));
// Next test where G and U are separate arrays
initGU(U.data(), G.data(), U_dev_exp.data(), n_active, false);
ML::Solver::Lars::updateCholesky(handle,
n_active,
X.data(),
n_rows,
n_cols,
ld_X,
U.data(),
ld_U,
G.data(),
ld_G,
workspace,
eps,
stream);
EXPECT_TRUE(MLCommon::devArrMatch(
U_dev_exp.data(), U.data(), n_cols * n_cols, MLCommon::CompareApprox<math_t>(1e-5), stream));
// Third test without Gram matrix.
initGU(U.data(), G.data(), U_dev_exp.data(), n_active, false);
ML::Solver::Lars::updateCholesky(handle,
n_active,
X.data(),
n_rows,
n_cols,
ld_X,
U.data(),
ld_U,
(math_t*)nullptr,
0,
workspace,
eps,
stream);
EXPECT_TRUE(MLCommon::devArrMatch(
U_dev_exp.data(), U.data(), n_cols * n_cols, MLCommon::CompareApprox<math_t>(1e-4), stream));
}
void testCalcW0()
{
auto stream = handle.get_stream();
n_active = 4;
const int ld_U = n_cols;
rmm::device_uvector<math_t> ws(n_active, stream);
rmm::device_uvector<math_t> U(n_cols * ld_U, stream);
calcUExp(G.data(), n_cols, U.data());
ML::Solver::Lars::calcW0(
handle, n_active, n_cols, sign.data(), U.data(), ld_U, ws.data(), stream);
EXPECT_TRUE(MLCommon::devArrMatchHost(
ws0_exp, ws.data(), n_active, MLCommon::CompareApprox<math_t>(1e-3), stream));
}
void testCalcA()
{
auto stream = handle.get_stream();
n_active = 4;
rmm::device_uvector<math_t> ws(n_active, stream);
raft::update_device(ws.data(), ws0_exp, n_active, stream);
ML::Solver::Lars::calcA(handle, A.data(), n_active, sign.data(), ws.data(), stream);
EXPECT_TRUE(MLCommon::devArrMatch(
(math_t)0.20070615686577709, A.data(), 1, MLCommon::CompareApprox<math_t>(1e-6), stream));
}
void testEquiangular()
{
auto stream = handle.get_stream();
n_active = 4;
rmm::device_uvector<math_t> workspace(0, stream);
rmm::device_uvector<math_t> u_eq(n_rows, stream);
rmm::device_uvector<math_t> U(n_cols * n_cols, stream);
calcUExp(G.data(), n_cols, U.data());
initGU(G.data(), G.data(), U.data(), n_active, true);
const int ld_X = n_rows;
const int ld_U = n_cols;
const int ld_G = n_cols;
ML::Solver::Lars::calcEquiangularVec(handle,
n_active,
X.data(),
n_rows,
n_cols,
ld_X,
sign.data(),
G.data(),
ld_U,
G.data(),
ld_G,
workspace,
ws.data(),
A.data(),
u_eq.data(),
(math_t)-1,
stream);
EXPECT_TRUE(MLCommon::devArrMatchHost(
ws_exp, ws.data(), n_active, MLCommon::CompareApprox<math_t>(1e-3), stream));
EXPECT_TRUE(MLCommon::devArrMatch(
(math_t)0.20070615686577709, A.data(), 1, MLCommon::CompareApprox<math_t>(1e-4), stream));
// Now test without Gram matrix, u should be calculated in this case
initGU(G.data(), G.data(), U.data(), n_active, false);
ML::Solver::Lars::calcEquiangularVec(handle,
n_active,
X.data(),
n_rows,
n_cols,
ld_X,
sign.data(),
G.data(),
ld_U,
(math_t*)nullptr,
0,
workspace,
ws.data(),
A.data(),
u_eq.data(),
(math_t)-1,
stream);
EXPECT_TRUE(MLCommon::devArrMatchHost(
u_eq_exp, u_eq.data(), 1, MLCommon::CompareApprox<math_t>(1e-3), stream));
}
void testCalcMaxStep()
{
auto stream = handle.get_stream();
n_active = 2;
math_t A_host = 3.6534305290498055;
math_t ws_host[2] = {0.25662594, -0.01708941};
math_t u_host[4] = {0.10282127, -0.01595011, 0.07092104, -0.99204011};
math_t cor_host[4] = {137, 42, 4.7, 13.2};
const int ld_X = n_rows;
const int ld_G = n_cols;
rmm::device_uvector<math_t> u(n_rows, stream);
rmm::device_uvector<math_t> ws(n_active, stream);
rmm::device_scalar<math_t> gamma(stream);
rmm::device_uvector<math_t> U(n_cols * n_cols, stream);
rmm::device_uvector<math_t> a_vec(n_cols - n_active, stream);
raft::update_device(A.data(), &A_host, 1, stream);
raft::update_device(ws.data(), ws_host, n_active, stream);
raft::update_device(u.data(), u_host, n_rows, stream);
raft::update_device(cor.data(), cor_host, n_cols, stream);
const int max_iter = n_cols;
math_t cj = 42;
ML::Solver::Lars::calcMaxStep(handle,
max_iter,
n_rows,
n_cols,
n_active,
cj,
A.data(),
cor.data(),
G.data(),
ld_G,
X.data(),
ld_X,
(math_t*)nullptr,
ws.data(),
gamma.data(),
a_vec.data(),
stream);
math_t gamma_exp = 0.20095407186830386;
EXPECT_TRUE(MLCommon::devArrMatch(
gamma_exp, gamma.data(), 1, MLCommon::CompareApprox<math_t>(1e-6), stream));
math_t a_vec_exp[2] = {24.69447886, -139.66289908};
EXPECT_TRUE(MLCommon::devArrMatchHost(
a_vec_exp, a_vec.data(), a_vec.size(), MLCommon::CompareApprox<math_t>(1e-4), stream));
// test without G matrix, we use U as input in this case
RAFT_CUDA_TRY(cudaMemsetAsync(gamma.data(), 0, sizeof(math_t), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(a_vec.data(), 0, a_vec.size() * sizeof(math_t), stream));
ML::Solver::Lars::calcMaxStep(handle,
max_iter,
n_rows,
n_cols,
n_active,
cj,
A.data(),
cor.data(),
(math_t*)nullptr,
0,
X.data(),
ld_X,
u.data(),
ws.data(),
gamma.data(),
a_vec.data(),
stream);
EXPECT_TRUE(MLCommon::devArrMatch(
gamma_exp, gamma.data(), 1, MLCommon::CompareApprox<math_t>(1e-6), stream));
EXPECT_TRUE(MLCommon::devArrMatchHost(
a_vec_exp, a_vec.data(), a_vec.size(), MLCommon::CompareApprox<math_t>(1e-4), stream));
// Last iteration
n_active = max_iter;
RAFT_CUDA_TRY(cudaMemsetAsync(gamma.data(), 0, sizeof(math_t), stream));
ML::Solver::Lars::calcMaxStep(handle,
max_iter,
n_rows,
n_cols,
n_active,
cj,
A.data(),
cor.data(),
(math_t*)nullptr,
0,
X.data(),
ld_X,
u.data(),
ws.data(),
gamma.data(),
a_vec.data(),
stream);
gamma_exp = 11.496044516528272;
EXPECT_TRUE(MLCommon::devArrMatch(
gamma_exp, gamma.data(), 1, MLCommon::CompareApprox<math_t>(1e-6), stream));
}
raft::handle_t handle;
const int n_rows = 4;
const int n_cols = 4;
int n_active = 2;
math_t cor_host[4] = {0, 137, 4, 7};
math_t cor_exp[4] = {0, 137, 7, 4};
// clang-format off
// Keep in mind that we actually define column major matrices, so a row here
// corresponds to a column of the matrix.
math_t X_host[16] = { 1., 4., 9., -3.,
9., 61., 131., 13.,
3., 22., 111., -17.,
0., 40., 40., 143.};
math_t X_exp[16] = { 1., 4., 9., -3.,
9., 61., 131., 13.,
0., 40., 40., 143.,
3., 22., 111., -17.};
math_t G_host[16] = { 107., 1393., 1141., 91.,
1393., 21132., 15689., 9539.,
1141., 15689., 13103., 2889.,
91., 9539., 2889., 23649.};
math_t G_exp[16] = { 107., 1393., 91., 1141.,
1393., 21132., 9539., 15689.,
91., 9539., 23649., 2889.,
1141., 15689., 2889., 13103.};
// clang-format on
int indices[4] = {3, 2, 1, 0};
int indices_exp[4] = {3, 4, 0, 1};
math_t sign_host[4] = {1, -1, 1, -1};
math_t ws0_exp[4] = {22.98636271, -2.15225918, 0.41474128, 0.72897179};
math_t ws_exp[4] = {4.61350452, -0.43197167, 0.08324113, 0.14630913};
math_t u_eq_exp[4] = {0.97548288, -0.21258388, 0.02538227, 0.05096055};
rmm::device_uvector<math_t> cor;
rmm::device_uvector<math_t> X;
rmm::device_uvector<math_t> G;
rmm::device_uvector<math_t> sign;
rmm::device_uvector<math_t> ws;
rmm::device_uvector<math_t> A;
};
typedef ::testing::Types<float, double> FloatTypes;
TYPED_TEST_CASE(LarsTest, FloatTypes);
TYPED_TEST(LarsTest, select) { this->testSelectMostCorrelated(); }
TYPED_TEST(LarsTest, moveToActive) { this->testMoveToActive(); }
TYPED_TEST(LarsTest, updateCholesky) { this->testUpdateCholesky(); }
TYPED_TEST(LarsTest, calcW0) { this->testCalcW0(); }
TYPED_TEST(LarsTest, calcA) { this->testCalcA(); }
TYPED_TEST(LarsTest, equiangular) { this->testEquiangular(); }
TYPED_TEST(LarsTest, maxStep) { this->testCalcMaxStep(); }
template <typename math_t>
class LarsTestFitPredict : public ::testing::Test {
protected:
LarsTestFitPredict()
: X(n_cols * n_rows, handle.get_stream()),
y(n_rows, handle.get_stream()),
G(n_cols * n_cols, handle.get_stream()),
beta(n_cols, handle.get_stream()),
coef_path((n_cols + 1) * n_cols, handle.get_stream()),
alphas(n_cols + 1, handle.get_stream()),
active_idx(n_cols, handle.get_stream())
{
auto stream = handle.get_stream();
raft::update_device(X.data(), X_host, n_cols * n_rows, stream);
raft::update_device(y.data(), y_host, n_rows, stream);
raft::update_device(G.data(), G_host, n_cols * n_cols, stream);
}
void testFitGram()
{
auto stream = handle.get_stream();
int max_iter = 10;
int verbosity = 0;
int n_active;
ML::Solver::Lars::larsFit(handle,
X.data(),
n_rows,
n_cols,
y.data(),
beta.data(),
active_idx.data(),
alphas.data(),
&n_active,
G.data(),
max_iter,
(math_t*)nullptr, // coef_path.data(),
verbosity,
n_rows,
n_cols,
(math_t)-1);
EXPECT_EQ(n_cols, n_active);
EXPECT_TRUE(MLCommon::devArrMatchHost(
beta_exp, beta.data(), n_cols, MLCommon::CompareApprox<math_t>(1e-5), stream));
EXPECT_TRUE(MLCommon::devArrMatchHost(
alphas_exp, alphas.data(), n_cols + 1, MLCommon::CompareApprox<math_t>(1e-4), stream));
EXPECT_TRUE(MLCommon::devArrMatchHost(
indices_exp, active_idx.data(), n_cols, MLCommon::Compare<int>(), stream));
}
void testFitX()
{
auto stream = handle.get_stream();
int max_iter = 10;
int verbosity = 0;
int n_active;
ML::Solver::Lars::larsFit(handle,
X.data(),
n_rows,
n_cols,
y.data(),
beta.data(),
active_idx.data(),
alphas.data(),
&n_active,
(math_t*)nullptr,
max_iter,
(math_t*)nullptr, // coef_path.data(),
verbosity,
n_rows,
n_cols,
(math_t)-1);
EXPECT_EQ(n_cols, n_active);
EXPECT_TRUE(MLCommon::devArrMatchHost(
beta_exp, beta.data(), n_cols, MLCommon::CompareApprox<math_t>(2e-4), stream));
EXPECT_TRUE(MLCommon::devArrMatchHost(
alphas_exp, alphas.data(), n_cols + 1, MLCommon::CompareApprox<math_t>(1e-4), stream));
EXPECT_TRUE(MLCommon::devArrMatchHost(
indices_exp, active_idx.data(), n_cols, MLCommon::Compare<int>(), stream));
}
void testPredictV1()
{
auto stream = handle.get_stream();
int ld_X = n_rows;
int n_active = n_cols;
raft::update_device(beta.data(), beta_exp, n_active, stream);
raft::update_device(active_idx.data(), indices_exp, n_active, stream);
RAFT_CUDA_TRY(cudaMemsetAsync(y.data(), 0, n_rows * sizeof(math_t), stream));
math_t intercept = 0;
ML::Solver::Lars::larsPredict(handle,
X.data(),
n_rows,
n_cols,
ld_X,
beta.data(),
n_active,
active_idx.data(),
intercept,
y.data());
EXPECT_TRUE(MLCommon::devArrMatchHost(
pred_exp, y.data(), n_rows, MLCommon::CompareApprox<math_t>(1e-5), stream));
}
void testPredictV2()
{
auto stream = handle.get_stream();
int ld_X = n_rows;
int n_active = n_cols;
// We set n_cols > n_active to trigger prediction path where columns of X
// are copied.
int n_cols_loc = n_cols + 1;
raft::update_device(beta.data(), beta_exp, n_active, stream);
raft::update_device(active_idx.data(), indices_exp, n_active, stream);
RAFT_CUDA_TRY(cudaMemsetAsync(y.data(), 0, n_rows * sizeof(math_t), stream));
math_t intercept = 0;
ML::Solver::Lars::larsPredict(handle,
X.data(),
n_rows,
n_cols_loc,
ld_X,
beta.data(),
n_active,
active_idx.data(),
intercept,
y.data());
EXPECT_TRUE(MLCommon::devArrMatchHost(
pred_exp, y.data(), n_rows, MLCommon::CompareApprox<math_t>(1e-5), stream));
}
void testFitLarge()
{
auto stream = handle.get_stream();
int n_rows = 65536;
int n_cols = 10;
int max_iter = n_cols;
int verbosity = 0;
int n_active;
rmm::device_uvector<math_t> X(n_rows * n_cols, stream);
rmm::device_uvector<math_t> y(n_rows, stream);
beta.resize(max_iter, stream);
active_idx.resize(max_iter, stream);
alphas.resize(max_iter + 1, stream);
raft::random::Rng r(1234);
r.uniform(X.data(), n_rows * n_cols, math_t(-1.0), math_t(1.0), stream);
r.uniform(y.data(), n_rows, math_t(-1.0), math_t(1.0), stream);
ML::Solver::Lars::larsFit(handle,
X.data(),
n_rows,
n_cols,
y.data(),
beta.data(),
active_idx.data(),
alphas.data(),
&n_active,
(math_t*)nullptr,
max_iter,
(math_t*)nullptr,
verbosity,
n_rows,
n_cols,
(math_t)-1);
EXPECT_EQ(n_cols, n_active);
}
raft::handle_t handle;
const int n_rows = 10;
const int n_cols = 5;
math_t cor_host[4] = {0, 137, 4, 7};
math_t cor_exp[4] = {0, 137, 7, 4};
// clang-format off
// We actually define column major matrices, so a row here corresponds to a
// column of the matrix.
math_t X_host[50] = {
-1.59595376, 1.02675861, 0.45079426, 0.32621407, 0.29018821,
-1.30640121, 0.67025452, 0.30196285, 1.28636261, -1.45018015,
-1.39544855, 0.90533337, -0.36980987, 0.23706301, 1.33296593,
-0.524911 , -0.86187751, 0.30764958, -1.24415885, 1.61319389,
-0.01500442, -2.25985187, -0.11147508, 1.08410381, 0.59451579,
0.62568849, 0.99811378, -1.09709453, -0.51940485, 0.70040887,
-1.81995734, -0.24101756, 1.21308053, 0.87517302, -0.19806613,
1.50733111, 0.06332581, -0.65824129, 0.45640974, -1.19803788,
0.13838875, -1.01018604, -0.15828873, -1.26652781, 0.41229797,
-0.00953721, -0.10602222, -0.51746536, -0.10397987, 2.62132051};
math_t G_host[25] = {
10. , -0.28482905, -3.98401069, 3.63094793, -5.77295066,
-0.28482905, 10. , -0.68437245, -1.73251284, 3.49545153,
-3.98401069, -0.68437245, 10. , 1.92006934, 3.51643227,
3.63094793, -1.73251284, 1.92006934, 10. , -4.25887055,
-5.77295066, 3.49545153, 3.51643227, -4.25887055, 10. };
math_t y_host[10] = {
-121.34354343, -170.25131089, 19.34173641, 89.75429795, 99.97210232,
83.67110463, 40.65749808, -109.1490306 , -72.97243308, 140.31957861};
// clang-format on
math_t beta_exp[10] = {
7.48589389e+01, 3.90513025e+01, 3.81912823e+01, 2.69095277e+01, -4.74545001e-02};
math_t alphas_exp[6] = {8.90008255e+01,
4.00677648e+01,
2.46147690e+01,
2.06052321e+01,
3.70155968e-02,
0.0740366429090};
math_t pred_exp[10] = {-121.32409183,
-170.25278892,
19.26177047,
89.73931476,
100.07545046,
83.71217894,
40.59397899,
-109.19137223,
-72.89633962,
140.28189898};
int indices_exp[5] = {2, 1, 3, 4, 0};
rmm::device_uvector<math_t> X;
rmm::device_uvector<math_t> G;
rmm::device_uvector<math_t> y;
rmm::device_uvector<math_t> beta;
rmm::device_uvector<math_t> alphas;
rmm::device_uvector<math_t> coef_path;
rmm::device_uvector<int> active_idx;
};
TYPED_TEST_CASE(LarsTestFitPredict, FloatTypes);
TYPED_TEST(LarsTestFitPredict, fitGram)
{
#if CUDART_VERSION >= 11020
GTEST_SKIP();
#else
this->testFitGram();
#endif
}
TYPED_TEST(LarsTestFitPredict, fitX)
{
#if CUDART_VERSION >= 11020
GTEST_SKIP();
#else
this->testFitX();
#endif
}
TYPED_TEST(LarsTestFitPredict, fitLarge) { this->testFitLarge(); }
TYPED_TEST(LarsTestFitPredict, predictV1) { this->testPredictV1(); }
TYPED_TEST(LarsTestFitPredict, predictV2) { this->testPredictV2(); }
}; // namespace Lars
}; // namespace Solver
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/logger.cpp | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/common/logger.hpp>
#include <gtest/gtest.h>
#include <string>
namespace ML {
TEST(Logger, Test)
{
CUML_LOG_CRITICAL("This is a critical message");
CUML_LOG_ERROR("This is an error message");
CUML_LOG_WARN("This is a warning message");
CUML_LOG_INFO("This is an info message");
Logger::get().setLevel(CUML_LEVEL_WARN);
ASSERT_EQ(CUML_LEVEL_WARN, Logger::get().getLevel());
Logger::get().setLevel(CUML_LEVEL_INFO);
ASSERT_EQ(CUML_LEVEL_INFO, Logger::get().getLevel());
ASSERT_FALSE(Logger::get().shouldLogFor(CUML_LEVEL_TRACE));
ASSERT_FALSE(Logger::get().shouldLogFor(CUML_LEVEL_DEBUG));
ASSERT_TRUE(Logger::get().shouldLogFor(CUML_LEVEL_INFO));
ASSERT_TRUE(Logger::get().shouldLogFor(CUML_LEVEL_WARN));
}
std::string logged = "";
void exampleCallback(int lvl, const char* msg) { logged = std::string(msg); }
int flushCount = 0;
void exampleFlush() { ++flushCount; }
class LoggerTest : public ::testing::Test {
protected:
void SetUp() override
{
flushCount = 0;
logged = "";
Logger::get().setLevel(CUML_LEVEL_TRACE);
}
void TearDown() override
{
Logger::get().setCallback(nullptr);
Logger::get().setFlush(nullptr);
Logger::get().setLevel(CUML_LEVEL_INFO);
}
};
TEST_F(LoggerTest, callback)
{
std::string testMsg;
Logger::get().setCallback(exampleCallback);
testMsg = "This is a critical message";
CUML_LOG_CRITICAL(testMsg.c_str());
ASSERT_TRUE(logged.find(testMsg) != std::string::npos);
testMsg = "This is an error message";
CUML_LOG_ERROR(testMsg.c_str());
ASSERT_TRUE(logged.find(testMsg) != std::string::npos);
testMsg = "This is a warning message";
CUML_LOG_WARN(testMsg.c_str());
ASSERT_TRUE(logged.find(testMsg) != std::string::npos);
testMsg = "This is an info message";
CUML_LOG_INFO(testMsg.c_str());
ASSERT_TRUE(logged.find(testMsg) != std::string::npos);
testMsg = "This is a debug message";
CUML_LOG_DEBUG(testMsg.c_str());
ASSERT_TRUE(logged.find(testMsg) != std::string::npos);
}
TEST_F(LoggerTest, flush)
{
Logger::get().setFlush(exampleFlush);
Logger::get().flush();
ASSERT_EQ(1, flushCount);
}
} // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/fil_child_index_test.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "../../src/fil/internal.cuh"
#include <test_utils.h>
#include <cuml/fil/fil.h>
#include <gtest/gtest.h>
#include <cmath>
#include <cstdio>
#include <limits>
#include <memory>
#include <numeric>
#include <ostream>
#include <utility>
namespace ML {
using namespace fil;
struct proto_inner_node {
bool def_left = false; // default left, see base_node::def_left
bool is_categorical = false; // see base_node::is_categorical
int fid = 0; // feature id, see base_node::fid
int set = 0; // which bit set represents the matching category list
double thresh = 0.0; // threshold, see base_node::thresh
int left = 1; // left child idx, see sparse_node*::left_index()
template <typename real_t>
val_t<real_t> split()
{
val_t<real_t> split;
if (is_categorical)
split.idx = set;
else if (std::isnan(thresh))
split.f = std::numeric_limits<real_t>::quiet_NaN();
else
split.f = static_cast<real_t>(thresh);
return split;
}
template <typename real_t>
operator dense_node<real_t>()
{
return dense_node<real_t>({}, split<real_t>(), fid, def_left, false, is_categorical);
}
template <typename real_t>
operator sparse_node16<real_t>()
{
return sparse_node16<real_t>({}, split<real_t>(), fid, def_left, false, is_categorical, left);
}
operator sparse_node8()
{
return sparse_node8({}, split<float>(), fid, def_left, false, is_categorical, left);
}
};
std::ostream& operator<<(std::ostream& os, const proto_inner_node& node)
{
os << "def_left " << node.def_left << " is_categorical " << node.is_categorical << " fid "
<< node.fid << " set " << node.set << " thresh " << node.thresh << " left " << node.left;
return os;
}
/** mechanism to use named aggregate initialization before C++20, and also use
the struct defaults. Using it directly only works if all defaulted
members come after ones explicitly mentioned. C++ doesn't have reflection,
so any non-macro alternative would need a separate list of member accessors.
**/
// proto inner node
#define NODE(...) \
[]() { \
struct NonDefaultProtoInnerNode : public proto_inner_node { \
NonDefaultProtoInnerNode() { __VA_ARGS__; } \
}; \
return proto_inner_node(NonDefaultProtoInnerNode()); \
}()
// proto category sets for one node
struct ProtoCategorySets {
// each bit set for each feature id is in a separate vector
// read each uint8_t from right to left, and the vector(s) - from left to right
std::vector<std::vector<uint8_t>> bits;
std::vector<float> fid_num_cats;
operator cat_sets_owner()
{
ASSERT(bits.size() == fid_num_cats.size(),
"internal error: ProtoCategorySets::bits.size() != "
"ProtoCategorySets::fid_num_cats.size()");
std::vector<uint8_t> flat;
for (std::vector<uint8_t> v : bits) {
for (uint8_t b : v)
flat.push_back(b);
}
return {flat, fid_num_cats};
}
};
struct ChildIndexTestParams {
proto_inner_node node;
int parent_node_idx = 0;
cat_sets_owner cso;
double input = 0.0;
int correct = INT_MAX;
bool skip_f32 = false; // if true, the test only runs for float64
};
std::ostream& operator<<(std::ostream& os, const ChildIndexTestParams& ps)
{
os << "node = {\n"
<< ps.node << "\n} "
<< "parent_node_idx = " << ps.parent_node_idx << " cat_sets_owner = {\n"
<< ps.cso << "\n} input = " << ps.input << " correct = " << ps.correct;
return os;
}
/** mechanism to use named aggregate initialization before C++20, and also use
the struct defaults. Using it directly only works if all defaulted
members come after ones explicitly mentioned. C++ doesn't have reflection,
so any non-macro alternative would need a separate list of member accessors.
**/
#define CHILD_INDEX_TEST_PARAMS(...) \
[]() { \
struct NonDefaultChildIndexTestParams : public ChildIndexTestParams { \
NonDefaultChildIndexTestParams() { __VA_ARGS__; } \
}; \
return ChildIndexTestParams(NonDefaultChildIndexTestParams()); \
}()
template <typename fil_node_t>
class ChildIndexTest : public testing::TestWithParam<ChildIndexTestParams> {
using real_t = typename fil_node_t::real_type;
protected:
void check()
{
ChildIndexTestParams param = GetParam();
// skip tests that require float64 to work correctly
if (std::is_same_v<real_t, float> && param.skip_f32) return;
tree_base tree{param.cso.accessor()};
if constexpr (!std::is_same_v<fil_node_t, fil::dense_node<real_t>>) {
// test that the logic uses node.left instead of parent_node_idx
param.node.left = param.parent_node_idx * 2 + 1;
param.parent_node_idx = INT_MIN;
}
real_t input = isnan(param.input) ? std::numeric_limits<real_t>::quiet_NaN()
: static_cast<real_t>(param.input);
// nan -> !def_left, categorical -> if matches, numerical -> input >= threshold
int test_idx = tree.child_index<true>((fil_node_t)param.node, param.parent_node_idx, input);
ASSERT_EQ(test_idx, param.correct)
<< "child index test: actual " << test_idx << " != correct %d" << param.correct;
}
};
using ChildIndexTestDenseFloat32 = ChildIndexTest<fil::dense_node<float>>;
using ChildIndexTestDenseFloat64 = ChildIndexTest<fil::dense_node<double>>;
using ChildIndexTestSparse16Float32 = ChildIndexTest<fil::sparse_node16<float>>;
using ChildIndexTestSparse16Float64 = ChildIndexTest<fil::sparse_node16<double>>;
using ChildIndexTestSparse8 = ChildIndexTest<fil::sparse_node8>;
/* for dense nodes, left (false) == parent * 2 + 1, right (true) == parent * 2 + 2
E.g. see tree below:
0 -> 1, 2
1 -> 3, 4
2 -> 5, 6
3 -> 7, 8
4 -> 9, 10
*/
const double INF = std::numeric_limits<double>::infinity();
const double QNAN = std::numeric_limits<double>::quiet_NaN();
std::vector<ChildIndexTestParams> params = {
CHILD_INDEX_TEST_PARAMS(node = NODE(thresh = 0.0), input = -INF, correct = 1), // val !>= thresh
CHILD_INDEX_TEST_PARAMS(node = NODE(thresh = 0.0), input = 0.0, correct = 2), // val >= thresh
CHILD_INDEX_TEST_PARAMS(node = NODE(thresh = 0.0), input = +INF, correct = 2), // val >= thresh
// the following two tests only work for float64
CHILD_INDEX_TEST_PARAMS(node = NODE(thresh = 0.0), input = -1e-50, correct = 1, skip_f32 = true),
CHILD_INDEX_TEST_PARAMS(node = NODE(thresh = 1e-50), input = 0.0, correct = 1, skip_f32 = true),
CHILD_INDEX_TEST_PARAMS(
node = NODE(thresh = 1.0), input = -3.141592, correct = 1), // val !>= thresh
CHILD_INDEX_TEST_PARAMS( // val >= thresh (e**pi > pi**e)
node = NODE(thresh = 22.459158),
input = 23.140693,
correct = 2),
CHILD_INDEX_TEST_PARAMS( // val >= thresh for both negative
node = NODE(thresh = -0.37),
input = -0.36,
correct = 2), // val >= thresh
CHILD_INDEX_TEST_PARAMS(node = NODE(thresh = -INF), input = 0.36, correct = 2), // val >= thresh
CHILD_INDEX_TEST_PARAMS(node = NODE(thresh = 0.0f), input = QNAN, correct = 2), // !def_left
CHILD_INDEX_TEST_PARAMS(node = NODE(def_left = true), input = QNAN, correct = 1), // !def_left
CHILD_INDEX_TEST_PARAMS(node = NODE(thresh = QNAN), input = QNAN, correct = 2), // !def_left
CHILD_INDEX_TEST_PARAMS(
node = NODE(def_left = true, thresh = QNAN), input = QNAN, correct = 1), // !def_left
CHILD_INDEX_TEST_PARAMS(node = NODE(thresh = QNAN), input = 0.0, correct = 1), // val !>= thresh
CHILD_INDEX_TEST_PARAMS(
node = NODE(thresh = 0.0), parent_node_idx = 1, input = -INF, correct = 3),
CHILD_INDEX_TEST_PARAMS(
node = NODE(thresh = 0.0), parent_node_idx = 1, input = 0.0f, correct = 4),
CHILD_INDEX_TEST_PARAMS(
node = NODE(thresh = 0.0), parent_node_idx = 2, input = -INF, correct = 5),
CHILD_INDEX_TEST_PARAMS(
node = NODE(thresh = 0.0), parent_node_idx = 2, input = 0.0f, correct = 6),
CHILD_INDEX_TEST_PARAMS(
node = NODE(thresh = 0.0), parent_node_idx = 3, input = -INF, correct = 7),
CHILD_INDEX_TEST_PARAMS(
node = NODE(thresh = 0.0), parent_node_idx = 3, input = 0.0f, correct = 8),
CHILD_INDEX_TEST_PARAMS(
node = NODE(thresh = 0.0), parent_node_idx = 4, input = -INF, correct = 9),
CHILD_INDEX_TEST_PARAMS(
node = NODE(thresh = 0.0), parent_node_idx = 4, input = 0.0, correct = 10),
CHILD_INDEX_TEST_PARAMS(parent_node_idx = 4, input = QNAN, correct = 10), // !def_left
CHILD_INDEX_TEST_PARAMS(
node = NODE(def_left = true), input = QNAN, parent_node_idx = 4, correct = 9), // !def_left
// cannot match ( < 0 and realistic fid_num_cats)
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true),
cso.bits = {},
cso.fid_num_cats = {11.0f},
input = -5,
correct = 1),
// Skipping category < 0 and dummy categorical node: fid_num_cats == 0. Prevented by FIL
// import. cannot match ( > INT_MAX)
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true),
cso.bits = {0b1111'1111},
cso.fid_num_cats = {8.0f},
input = (float)(1ll << 33ll),
correct = 1),
// cannot match ( >= fid_num_cats and integer)
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true),
cso.bits = {0b1111'1111},
cso.fid_num_cats = {2.0f},
input = 2,
correct = 1),
// matches ( < fid_num_cats because comparison is floating-point and there's no rounding)
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true),
cso.bits = {0b1111'1111},
cso.fid_num_cats = {2.0f},
input = 1.8f,
correct = 2),
// cannot match ( >= fid_num_cats)
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true),
cso.bits = {0b1111'1111},
cso.fid_num_cats = {2.0f},
input = 2.1f,
correct = 1),
// does not match (bits[category] == 0, category == 0)
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true),
cso.bits = {0b0000'0000},
cso.fid_num_cats = {1.0f},
input = 0,
correct = 1),
// matches (negative zero)
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true),
cso.bits = {0b0000'0001},
cso.fid_num_cats = {1.0f},
input = -0.0f,
correct = 2),
// matches (positive zero)
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true),
cso.bits = {0b0000'0001},
cso.fid_num_cats = {1.0f},
input = 0,
correct = 2),
// matches
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true),
cso.bits = {0b0000'0101},
cso.fid_num_cats = {3.0f, 1.0f},
input = 2,
correct = 2),
// does not match (bits[category] == 0, category > 0)
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true),
cso.bits = {0b0000'0101},
cso.fid_num_cats = {3.0f},
input = 1,
correct = 1),
// cannot match (fid_num_cats[fid=1] <= input)
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true),
node.fid = 1,
cso.bits = {0b0000'0101},
cso.fid_num_cats = {3.0f, 1.0f},
input = 2,
correct = 1),
// default left
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true, def_left = true),
cso.bits = {0b0000'0101},
cso.fid_num_cats = {3.0f},
input = QNAN,
correct = 1),
// default right
CHILD_INDEX_TEST_PARAMS(node = NODE(is_categorical = true, def_left = false),
cso.bits = {0b0000'0101},
cso.fid_num_cats = {3.0f},
input = QNAN,
correct = 2),
};
TEST_P(ChildIndexTestDenseFloat32, Predict) { check(); }
TEST_P(ChildIndexTestDenseFloat64, Predict) { check(); }
TEST_P(ChildIndexTestSparse16Float32, Predict) { check(); }
TEST_P(ChildIndexTestSparse16Float64, Predict) { check(); }
TEST_P(ChildIndexTestSparse8, Predict) { check(); }
INSTANTIATE_TEST_CASE_P(FilTests, ChildIndexTestDenseFloat32, testing::ValuesIn(params));
INSTANTIATE_TEST_CASE_P(FilTests, ChildIndexTestDenseFloat64, testing::ValuesIn(params));
INSTANTIATE_TEST_CASE_P(FilTests, ChildIndexTestSparse16Float32, testing::ValuesIn(params));
INSTANTIATE_TEST_CASE_P(FilTests, ChildIndexTestSparse16Float64, testing::ValuesIn(params));
INSTANTIATE_TEST_CASE_P(FilTests, ChildIndexTestSparse8, testing::ValuesIn(params));
} // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/ols.cu | /*
* Copyright (c) 2019-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/linear_model/glm.hpp>
#include <gtest/gtest.h>
#include <raft/core/handle.hpp>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/cuda_stream_pool.hpp>
#include <rmm/device_uvector.hpp>
#include <test_utils.h>
#include <vector>
namespace ML {
namespace GLM {
enum class hconf { SINGLE, LEGACY_ONE, LEGACY_TWO, NON_BLOCKING_ONE, NON_BLOCKING_TWO };
raft::handle_t create_handle(hconf type)
{
switch (type) {
case hconf::LEGACY_ONE:
return raft::handle_t(rmm::cuda_stream_legacy, std::make_shared<rmm::cuda_stream_pool>(1));
case hconf::LEGACY_TWO:
return raft::handle_t(rmm::cuda_stream_legacy, std::make_shared<rmm::cuda_stream_pool>(2));
case hconf::NON_BLOCKING_ONE:
return raft::handle_t(rmm::cuda_stream_per_thread,
std::make_shared<rmm::cuda_stream_pool>(1));
case hconf::NON_BLOCKING_TWO:
return raft::handle_t(rmm::cuda_stream_per_thread,
std::make_shared<rmm::cuda_stream_pool>(2));
case hconf::SINGLE:
default: return raft::handle_t();
}
}
template <typename T>
struct OlsInputs {
hconf hc;
T tol;
int n_row;
int n_col;
int n_row_2;
int algo;
};
template <typename T>
class OlsTest : public ::testing::TestWithParam<OlsInputs<T>> {
public:
OlsTest()
: params(::testing::TestWithParam<OlsInputs<T>>::GetParam()),
handle(create_handle(params.hc)),
stream(handle.get_stream()),
coef(params.n_col, stream),
coef2(params.n_col, stream),
coef3(params.n_col, stream),
coef_ref(params.n_col, stream),
coef2_ref(params.n_col, stream),
coef3_ref(params.n_col, stream),
pred(params.n_row_2, stream),
pred_ref(params.n_row_2, stream),
pred2(params.n_row_2, stream),
pred2_ref(params.n_row_2, stream),
pred3(params.n_row_2, stream),
pred3_ref(params.n_row_2, stream),
coef_sc(1, stream),
coef_sc_ref(1, stream)
{
basicTest();
basicTest2();
}
protected:
void basicTest()
{
int len = params.n_row * params.n_col;
int len2 = params.n_row_2 * params.n_col;
rmm::device_uvector<T> data(len, stream);
rmm::device_uvector<T> labels(params.n_row, stream);
rmm::device_uvector<T> pred_data(len2, stream);
std::vector<T> data_h = {1.0, 1.0, 2.0, 2.0, 1.0, 2.0, 2.0, 3.0};
data_h.resize(len);
raft::update_device(data.data(), data_h.data(), len, stream);
std::vector<T> labels_h = {6.0, 8.0, 9.0, 11.0};
labels_h.resize(params.n_row);
raft::update_device(labels.data(), labels_h.data(), params.n_row, stream);
std::vector<T> coef_ref_h = {2.090908, 2.5454557};
coef_ref_h.resize(params.n_col);
raft::update_device(coef_ref.data(), coef_ref_h.data(), params.n_col, stream);
std::vector<T> coef2_ref_h = {1.000001, 1.9999998};
coef2_ref_h.resize(params.n_col);
raft::update_device(coef2_ref.data(), coef2_ref_h.data(), params.n_col, stream);
std::vector<T> coef3_ref_h = {0.99999, 2.00000};
coef3_ref_h.resize(params.n_col);
raft::update_device(coef3_ref.data(), coef3_ref_h.data(), params.n_col, stream);
std::vector<T> pred_data_h = {3.0, 2.0, 5.0, 5.0};
pred_data_h.resize(len2);
raft::update_device(pred_data.data(), pred_data_h.data(), len2, stream);
std::vector<T> pred_ref_h = {19.0, 16.9090};
pred_ref_h.resize(params.n_row_2);
raft::update_device(pred_ref.data(), pred_ref_h.data(), params.n_row_2, stream);
std::vector<T> pred2_ref_h = {16.0, 15.0};
pred2_ref_h.resize(params.n_row_2);
raft::update_device(pred2_ref.data(), pred2_ref_h.data(), params.n_row_2, stream);
std::vector<T> pred3_ref_h = {16.0, 15.0};
pred3_ref_h.resize(params.n_row_2);
raft::update_device(pred3_ref.data(), pred3_ref_h.data(), params.n_row_2, stream);
intercept = T(0);
olsFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
coef.data(),
&intercept,
false,
false,
params.algo);
gemmPredict(
handle, pred_data.data(), params.n_row_2, params.n_col, coef.data(), intercept, pred.data());
raft::update_device(data.data(), data_h.data(), len, stream);
raft::update_device(labels.data(), labels_h.data(), params.n_row, stream);
intercept2 = T(0);
olsFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
coef2.data(),
&intercept2,
true,
false,
params.algo);
gemmPredict(handle,
pred_data.data(),
params.n_row_2,
params.n_col,
coef2.data(),
intercept2,
pred2.data());
raft::update_device(data.data(), data_h.data(), len, stream);
raft::update_device(labels.data(), labels_h.data(), params.n_row, stream);
intercept3 = T(0);
olsFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
coef3.data(),
&intercept3,
true,
true,
params.algo);
gemmPredict(handle,
pred_data.data(),
params.n_row_2,
params.n_col,
coef3.data(),
intercept3,
pred3.data());
}
void basicTest2()
{
int len = params.n_row * params.n_col;
rmm::device_uvector<T> data_sc(len, stream);
rmm::device_uvector<T> labels_sc(len, stream);
std::vector<T> data_h = {1.0, 1.0, 2.0, 2.0, 1.0, 2.0, 2.0, 3.0};
data_h.resize(len);
raft::update_device(data_sc.data(), data_h.data(), len, stream);
std::vector<T> labels_h = {6.0, 8.0, 9.0, 11.0, -1.0, 2.0, -3.6, 3.3};
labels_h.resize(len);
raft::update_device(labels_sc.data(), labels_h.data(), len, stream);
std::vector<T> coef_sc_ref_h = {-0.29285714};
coef_sc_ref_h.resize(1);
raft::update_device(coef_sc_ref.data(), coef_sc_ref_h.data(), 1, stream);
T intercept_sc = T(0);
olsFit(handle,
data_sc.data(),
len,
1,
labels_sc.data(),
coef_sc.data(),
&intercept_sc,
true,
false,
params.algo);
}
protected:
OlsInputs<T> params;
raft::handle_t handle;
cudaStream_t stream = 0;
rmm::device_uvector<T> coef, coef_ref, pred, pred_ref;
rmm::device_uvector<T> coef2, coef2_ref, pred2, pred2_ref;
rmm::device_uvector<T> coef3, coef3_ref, pred3, pred3_ref;
rmm::device_uvector<T> coef_sc, coef_sc_ref;
T *data, *labels, *data_sc, *labels_sc;
T intercept, intercept2, intercept3;
};
const std::vector<OlsInputs<float>> inputsf2 = {{hconf::NON_BLOCKING_ONE, 0.001f, 4, 2, 2, 0},
{hconf::NON_BLOCKING_TWO, 0.001f, 4, 2, 2, 1},
{hconf::LEGACY_ONE, 0.001f, 4, 2, 2, 2},
{hconf::LEGACY_TWO, 0.001f, 4, 2, 2, 2},
{hconf::SINGLE, 0.001f, 4, 2, 2, 2}};
const std::vector<OlsInputs<double>> inputsd2 = {{hconf::SINGLE, 0.001, 4, 2, 2, 0},
{hconf::LEGACY_ONE, 0.001, 4, 2, 2, 1},
{hconf::LEGACY_TWO, 0.001, 4, 2, 2, 2}};
typedef OlsTest<float> OlsTestF;
TEST_P(OlsTestF, Fit)
{
ASSERT_TRUE(devArrMatch(
coef_ref.data(), coef.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(devArrMatch(
coef2_ref.data(), coef2.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(devArrMatch(
coef3_ref.data(), coef3.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(devArrMatch(
pred_ref.data(), pred.data(), params.n_row_2, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(devArrMatch(
pred2_ref.data(), pred2.data(), params.n_row_2, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(devArrMatch(
pred3_ref.data(), pred3.data(), params.n_row_2, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(devArrMatch(
coef_sc_ref.data(), coef_sc.data(), 1, MLCommon::CompareApproxAbs<float>(params.tol)));
}
typedef OlsTest<double> OlsTestD;
TEST_P(OlsTestD, Fit)
{
ASSERT_TRUE(MLCommon::devArrMatch(
coef_ref.data(), coef.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef2_ref.data(), coef2.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef3_ref.data(), coef3.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
pred_ref.data(), pred.data(), params.n_row_2, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(devArrMatch(pred2_ref.data(),
pred2.data(),
params.n_row_2,
MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(pred3_ref.data(),
pred3.data(),
params.n_row_2,
MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(devArrMatch(
coef_sc_ref.data(), coef_sc.data(), 1, MLCommon::CompareApproxAbs<double>(params.tol)));
}
INSTANTIATE_TEST_CASE_P(OlsTests, OlsTestF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(OlsTests, OlsTestD, ::testing::ValuesIn(inputsd2));
} // namespace GLM
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/sgd.cu | /*
* Copyright (c) 2020-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <gtest/gtest.h>
#include <raft/core/handle.hpp>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <solver/sgd.cuh>
#include <test_utils.h>
namespace ML {
namespace Solver {
using namespace MLCommon;
template <typename T>
struct SgdInputs {
T tol;
int n_row;
int n_col;
int n_row2;
int n_col2;
int batch_size;
};
template <typename T>
class SgdTest : public ::testing::TestWithParam<SgdInputs<T>> {
public:
SgdTest()
: params(::testing::TestWithParam<SgdInputs<T>>::GetParam()),
stream(handle.get_stream()),
coef(params.n_col, stream),
coef_ref(params.n_col, stream),
coef2(params.n_col, stream),
coef2_ref(params.n_col, stream),
pred_log(0, stream),
pred_log_ref(0, stream),
pred_svm(0, stream),
pred_svm_ref(0, stream)
{
RAFT_CUDA_TRY(cudaMemsetAsync(coef.data(), 0, coef.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(coef2.data(), 0, coef2.size() * sizeof(T), stream));
linearRegressionTest();
logisticRegressionTest();
svmTest();
}
protected:
void linearRegressionTest()
{
int len = params.n_row * params.n_col;
rmm::device_uvector<T> data(len, stream);
rmm::device_uvector<T> labels(params.n_row, stream);
T data_h[len] = {1.0, 1.0, 2.0, 2.0, 1.0, 2.0, 2.0, 3.0};
raft::update_device(data.data(), data_h, len, stream);
T labels_h[params.n_row] = {6.0, 8.0, 9.0, 11.0};
raft::update_device(labels.data(), labels_h, params.n_row, stream);
T coef_ref_h[params.n_col] = {2.087, 2.5454557};
raft::update_device(coef_ref.data(), coef_ref_h, params.n_col, stream);
T coef2_ref_h[params.n_col] = {1.000001, 1.9999998};
raft::update_device(coef2_ref.data(), coef2_ref_h, params.n_col, stream);
bool fit_intercept = false;
intercept = T(0);
int epochs = 2000;
T lr = T(0.01);
ML::lr_type lr_type = ML::lr_type::ADAPTIVE;
T power_t = T(0.5);
T alpha = T(0.0001);
T l1_ratio = T(0.15);
bool shuffle = true;
T tol = T(1e-10);
ML::loss_funct loss = ML::loss_funct::SQRD_LOSS;
MLCommon::Functions::penalty pen = MLCommon::Functions::penalty::NONE;
int n_iter_no_change = 10;
sgdFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
coef.data(),
&intercept,
fit_intercept,
params.batch_size,
epochs,
lr_type,
lr,
power_t,
loss,
pen,
alpha,
l1_ratio,
shuffle,
tol,
n_iter_no_change,
stream);
fit_intercept = true;
intercept2 = T(0);
sgdFit(handle,
data.data(),
params.n_row,
params.n_col,
labels.data(),
coef2.data(),
&intercept2,
fit_intercept,
params.batch_size,
epochs,
ML::lr_type::CONSTANT,
lr,
power_t,
loss,
pen,
alpha,
l1_ratio,
shuffle,
tol,
n_iter_no_change,
stream);
}
void logisticRegressionTest()
{
int len = params.n_row2 * params.n_col2;
rmm::device_uvector<T> data_logreg(len, stream);
rmm::device_uvector<T> data_logreg_test(len, stream);
rmm::device_uvector<T> labels_logreg(params.n_row2, stream);
rmm::device_uvector<T> coef_class(params.n_row2, stream);
pred_log.resize(params.n_row2, stream);
pred_log_ref.resize(params.n_row2, stream);
RAFT_CUDA_TRY(cudaMemsetAsync(coef_class.data(), 0, coef_class.size() * sizeof(T), stream));
T data_h[len] = {0.1, -2.1, 5.4, 5.4, -1.5, -2.15, 2.65, 2.65, 3.25, -0.15, -7.35, -7.35};
raft::update_device(data_logreg.data(), data_h, len, stream);
T data_test_h[len] = {0.3, 1.1, 2.1, -10.1, 0.5, 2.5, -3.55, -20.5, -1.3, 3.0, -5.0, 15.0};
raft::update_device(data_logreg_test.data(), data_test_h, len, stream);
T labels_logreg_h[params.n_row2] = {0.0, 1.0, 1.0, 0.0};
raft::update_device(labels_logreg.data(), labels_logreg_h, params.n_row2, stream);
T pred_log_ref_h[params.n_row2] = {1.0, 0.0, 1.0, 1.0};
raft::update_device(pred_log_ref.data(), pred_log_ref_h, params.n_row2, stream);
bool fit_intercept = true;
T intercept_class = T(0);
int epochs = 1000;
T lr = T(0.05);
ML::lr_type lr_type = ML::lr_type::CONSTANT;
T power_t = T(0.5);
T alpha = T(0.0);
T l1_ratio = T(0.0);
bool shuffle = false;
T tol = T(0.0);
ML::loss_funct loss = ML::loss_funct::LOG;
MLCommon::Functions::penalty pen = MLCommon::Functions::penalty::NONE;
int n_iter_no_change = 10;
sgdFit(handle,
data_logreg.data(),
params.n_row2,
params.n_col2,
labels_logreg.data(),
coef_class.data(),
&intercept_class,
fit_intercept,
params.batch_size,
epochs,
lr_type,
lr,
power_t,
loss,
pen,
alpha,
l1_ratio,
shuffle,
tol,
n_iter_no_change,
stream);
sgdPredictBinaryClass(handle,
data_logreg_test.data(),
params.n_row2,
params.n_col2,
coef_class.data(),
intercept_class,
pred_log.data(),
loss,
stream);
}
void svmTest()
{
int len = params.n_row2 * params.n_col2;
rmm::device_uvector<T> data_svmreg(len, stream);
rmm::device_uvector<T> data_svmreg_test(len, stream);
rmm::device_uvector<T> labels_svmreg(params.n_row2, stream);
rmm::device_uvector<T> coef_class(params.n_row2, stream);
pred_svm.resize(params.n_row2, stream);
pred_svm_ref.resize(params.n_row2, stream);
RAFT_CUDA_TRY(cudaMemsetAsync(coef_class.data(), 0, coef_class.size() * sizeof(T), stream));
T data_h[len] = {0.1, -2.1, 5.4, 5.4, -1.5, -2.15, 2.65, 2.65, 3.25, -0.15, -7.35, -7.35};
raft::update_device(data_svmreg.data(), data_h, len, stream);
T data_test_h[len] = {0.3, 1.1, 2.1, -10.1, 0.5, 2.5, -3.55, -20.5, -1.3, 3.0, -5.0, 15.0};
raft::update_device(data_svmreg_test.data(), data_test_h, len, stream);
T labels_svmreg_h[params.n_row2] = {0.0, 1.0, 1.0, 0.0};
raft::update_device(labels_svmreg.data(), labels_svmreg_h, params.n_row2, stream);
T pred_svm_ref_h[params.n_row2] = {1.0, 0.0, 1.0, 1.0};
raft::update_device(pred_svm_ref.data(), pred_svm_ref_h, params.n_row2, stream);
bool fit_intercept = true;
T intercept_class = T(0);
int epochs = 1000;
T lr = T(0.05);
ML::lr_type lr_type = ML::lr_type::CONSTANT;
T power_t = T(0.5);
T alpha = T(1) / T(epochs);
T l1_ratio = T(0.0);
bool shuffle = false;
T tol = T(0.0);
ML::loss_funct loss = ML::loss_funct::HINGE;
MLCommon::Functions::penalty pen = MLCommon::Functions::penalty::L2;
int n_iter_no_change = 10;
sgdFit(handle,
data_svmreg.data(),
params.n_row2,
params.n_col2,
labels_svmreg.data(),
coef_class.data(),
&intercept_class,
fit_intercept,
params.batch_size,
epochs,
lr_type,
lr,
power_t,
loss,
pen,
alpha,
l1_ratio,
shuffle,
tol,
n_iter_no_change,
stream);
sgdPredictBinaryClass(handle,
data_svmreg_test.data(),
params.n_row2,
params.n_col2,
coef_class.data(),
intercept_class,
pred_svm.data(),
loss,
stream);
}
protected:
raft::handle_t handle;
cudaStream_t stream = 0;
SgdInputs<T> params;
rmm::device_uvector<T> coef, coef_ref;
rmm::device_uvector<T> coef2, coef2_ref;
rmm::device_uvector<T> pred_log, pred_log_ref;
rmm::device_uvector<T> pred_svm, pred_svm_ref;
T intercept, intercept2;
};
const std::vector<SgdInputs<float>> inputsf2 = {{0.01f, 4, 2, 4, 3, 2}};
const std::vector<SgdInputs<double>> inputsd2 = {{0.01, 4, 2, 4, 3, 2}};
typedef SgdTest<float> SgdTestF;
TEST_P(SgdTestF, Fit)
{
ASSERT_TRUE(MLCommon::devArrMatch(
coef_ref.data(), coef.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef2_ref.data(), coef2.data(), params.n_col, MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(pred_log_ref.data(),
pred_log.data(),
params.n_row,
MLCommon::CompareApproxAbs<float>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(pred_svm_ref.data(),
pred_svm.data(),
params.n_row,
MLCommon::CompareApproxAbs<float>(params.tol)));
}
typedef SgdTest<double> SgdTestD;
TEST_P(SgdTestD, Fit)
{
ASSERT_TRUE(MLCommon::devArrMatch(
coef_ref.data(), coef.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(
coef2_ref.data(), coef2.data(), params.n_col, MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(pred_log_ref.data(),
pred_log.data(),
params.n_row,
MLCommon::CompareApproxAbs<double>(params.tol)));
ASSERT_TRUE(MLCommon::devArrMatch(pred_svm_ref.data(),
pred_svm.data(),
params.n_row,
MLCommon::CompareApproxAbs<double>(params.tol)));
}
INSTANTIATE_TEST_CASE_P(SgdTests, SgdTestF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(SgdTests, SgdTestD, ::testing::ValuesIn(inputsd2));
} // namespace Solver
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/time_series_datasets.h | /*
* Copyright (c) 2021, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <vector>
std::vector<float> additive_trainf = {
0.0, 0.248689887, 0.481753674, 0.684547106, 0.844327926, 0.951056516,
0.998026728, 0.982287251, 0.904827052, 0.770513243, 0.587785252, 0.368124553,
0.125333234, -0.125333234, -0.368124553, -0.587785252, -0.770513243, -0.904827052,
-0.982287251, -0.998026728, -0.951056516, -0.844327926, -0.684547106, -0.481753674,
-0.248689887, -2.4492936e-16, 0.248689887, 0.481753674, 0.684547106, 0.844327926,
0.951056516, 0.998026728, 0.982287251, 0.904827052, 0.770513243, 0.587785252,
0.368124553, 0.125333234, -0.125333234, -0.368124553, -0.587785252, -0.770513243,
-0.904827052, -0.982287251, -0.998026728, -0.951056516, -0.844327926, -0.684547106,
-0.481753674, -0.248689887, -4.8985872e-16, 0.248689887, 0.481753674, 0.684547106,
0.844327926, 0.951056516, 0.998026728, 0.982287251, 0.904827052, 0.770513243,
0.587785252, 0.368124553, 0.125333234, -0.125333234, -0.368124553, -0.587785252,
-0.770513243, -0.904827052, -0.982287251, -0.998026728, -0.951056516, -0.844327926,
-0.684547106, -0.481753674, -0.248689887, -7.34788079e-16, 0.248689887, 0.481753674,
0.684547106, 0.844327926, 0.951056516, 0.998026728, 0.982287251, 0.904827052,
0.770513243, 0.587785252, 0.368124553, 0.125333234, -0.125333234, -0.368124553};
std::vector<float> additive_testf = {-0.587785252,
-0.770513243,
-0.904827052,
-0.982287251,
-0.998026728,
-0.951056516,
-0.844327926,
-0.684547106,
-0.481753674,
-0.248689887};
std::vector<double> additive_traind = {
0.0, 0.248689887, 0.481753674, 0.684547106, 0.844327926, 0.951056516,
0.998026728, 0.982287251, 0.904827052, 0.770513243, 0.587785252, 0.368124553,
0.125333234, -0.125333234, -0.368124553, -0.587785252, -0.770513243, -0.904827052,
-0.982287251, -0.998026728, -0.951056516, -0.844327926, -0.684547106, -0.481753674,
-0.248689887, -2.4492936e-16, 0.248689887, 0.481753674, 0.684547106, 0.844327926,
0.951056516, 0.998026728, 0.982287251, 0.904827052, 0.770513243, 0.587785252,
0.368124553, 0.125333234, -0.125333234, -0.368124553, -0.587785252, -0.770513243,
-0.904827052, -0.982287251, -0.998026728, -0.951056516, -0.844327926, -0.684547106,
-0.481753674, -0.248689887, -4.8985872e-16, 0.248689887, 0.481753674, 0.684547106,
0.844327926, 0.951056516, 0.998026728, 0.982287251, 0.904827052, 0.770513243,
0.587785252, 0.368124553, 0.125333234, -0.125333234, -0.368124553, -0.587785252,
-0.770513243, -0.904827052, -0.982287251, -0.998026728, -0.951056516, -0.844327926,
-0.684547106, -0.481753674, -0.248689887, -7.34788079e-16, 0.248689887, 0.481753674,
0.684547106, 0.844327926, 0.951056516, 0.998026728, 0.982287251, 0.904827052,
0.770513243, 0.587785252, 0.368124553, 0.125333234, -0.125333234, -0.368124553};
std::vector<double> additive_testd = {-0.587785252,
-0.770513243,
-0.904827052,
-0.982287251,
-0.998026728,
-0.951056516,
-0.844327926,
-0.684547106,
-0.481753674,
-0.248689887};
std::vector<float> additive_normalized_trainf = {
0.5, 0.6245908, 0.74135309, 0.84295029, 0.92299865, 0.97646846, 1., 0.9921147,
0.95330803, 0.88601834, 0.7944737, 0.6844262, 0.56279052, 0.43720948, 0.3155738, 0.2055263,
0.11398166, 0.04669197, 0.0078853, 0., 0.02353154, 0.07700135, 0.15704971, 0.25864691,
0.3754092, 0.5, 0.6245908, 0.74135309, 0.84295029, 0.92299865, 0.97646846, 1.,
0.9921147, 0.95330803, 0.88601834, 0.7944737, 0.6844262, 0.56279052, 0.43720948, 0.3155738,
0.2055263, 0.11398166, 0.04669197, 0.0078853, 0., 0.02353154, 0.07700135, 0.15704971,
0.25864691, 0.3754092, 0.5, 0.6245908, 0.74135309, 0.84295029, 0.92299865, 0.97646846,
1., 0.9921147, 0.95330803, 0.88601834, 0.7944737, 0.6844262, 0.56279052, 0.43720948,
0.3155738, 0.2055263, 0.11398166, 0.04669197, 0.0078853, 0., 0.02353154, 0.07700135,
0.15704971, 0.25864691, 0.3754092, 0.5, 0.6245908, 0.74135309, 0.84295029, 0.92299865,
0.97646846, 1., 0.9921147, 0.95330803, 0.88601834, 0.7944737, 0.6844262, 0.56279052,
0.43720948, 0.3155738};
std::vector<float> additive_normalized_testf = {0.2055263,
0.11398166,
0.04669197,
0.0078853,
0.,
0.02353154,
0.07700135,
0.15704971,
0.25864691,
0.3754092};
std::vector<double> additive_normalized_traind = {
0.5, 0.6245908, 0.74135309, 0.84295029, 0.92299865, 0.97646846, 1., 0.9921147,
0.95330803, 0.88601834, 0.7944737, 0.6844262, 0.56279052, 0.43720948, 0.3155738, 0.2055263,
0.11398166, 0.04669197, 0.0078853, 0., 0.02353154, 0.07700135, 0.15704971, 0.25864691,
0.3754092, 0.5, 0.6245908, 0.74135309, 0.84295029, 0.92299865, 0.97646846, 1.,
0.9921147, 0.95330803, 0.88601834, 0.7944737, 0.6844262, 0.56279052, 0.43720948, 0.3155738,
0.2055263, 0.11398166, 0.04669197, 0.0078853, 0., 0.02353154, 0.07700135, 0.15704971,
0.25864691, 0.3754092, 0.5, 0.6245908, 0.74135309, 0.84295029, 0.92299865, 0.97646846,
1., 0.9921147, 0.95330803, 0.88601834, 0.7944737, 0.6844262, 0.56279052, 0.43720948,
0.3155738, 0.2055263, 0.11398166, 0.04669197, 0.0078853, 0., 0.02353154, 0.07700135,
0.15704971, 0.25864691, 0.3754092, 0.5, 0.6245908, 0.74135309, 0.84295029, 0.92299865,
0.97646846, 1., 0.9921147, 0.95330803, 0.88601834, 0.7944737, 0.6844262, 0.56279052,
0.43720948, 0.3155738};
std::vector<double> additive_normalized_testd = {0.2055263,
0.11398166,
0.04669197,
0.0078853,
0.,
0.02353154,
0.07700135,
0.15704971,
0.25864691,
0.3754092};
std::vector<float> multiplicative_trainf = {
112, 118, 132, 129, 121, 135, 148, 148, 136, 119, 104, 118, 115, 126, 141, 135, 125, 149, 170,
170, 158, 133, 114, 140, 145, 150, 178, 163, 172, 178, 199, 199, 184, 162, 146, 166, 171, 180,
193, 181, 183, 218, 230, 242, 209, 191, 172, 194, 196, 196, 236, 235, 229, 243, 264, 272, 237,
211, 180, 201, 204, 188, 235, 227, 234, 264, 302, 293, 259, 229, 203, 229, 242, 233, 267, 269,
270, 315, 364, 347, 312, 274, 237, 278, 284, 277, 317, 313, 318, 374, 413, 405, 355, 306, 271,
306, 315, 301, 356, 348, 355, 422, 465, 467, 404, 347, 305, 336, 340, 318, 362, 348, 363, 435,
491, 505, 404, 359, 310, 337, 360, 342, 406, 396, 420, 472, 548, 559, 463, 407, 362, 405};
std::vector<float> multiplicative_testf = {
417, 391, 419, 461, 472, 535, 622, 606, 508, 461, 390, 432};
std::vector<double> multiplicative_traind = {
112, 118, 132, 129, 121, 135, 148, 148, 136, 119, 104, 118, 115, 126, 141, 135, 125, 149, 170,
170, 158, 133, 114, 140, 145, 150, 178, 163, 172, 178, 199, 199, 184, 162, 146, 166, 171, 180,
193, 181, 183, 218, 230, 242, 209, 191, 172, 194, 196, 196, 236, 235, 229, 243, 264, 272, 237,
211, 180, 201, 204, 188, 235, 227, 234, 264, 302, 293, 259, 229, 203, 229, 242, 233, 267, 269,
270, 315, 364, 347, 312, 274, 237, 278, 284, 277, 317, 313, 318, 374, 413, 405, 355, 306, 271,
306, 315, 301, 356, 348, 355, 422, 465, 467, 404, 347, 305, 336, 340, 318, 362, 348, 363, 435,
491, 505, 404, 359, 310, 337, 360, 342, 406, 396, 420, 472, 548, 559, 463, 407, 362, 405};
std::vector<double> multiplicative_testd = {
417, 391, 419, 461, 472, 535, 622, 606, 508, 461, 390, 432};
std::vector<float> multiplicative_normalized_trainf = {
0.01644402, 0.02802703, 0.05505405, 0.04926255, 0.03381853, 0.06084556, 0.08594208, 0.08594208,
0.06277606, 0.02995753, 0.001, 0.02802703, 0.02223552, 0.04347104, 0.07242857, 0.06084556,
0.04154054, 0.08787259, 0.12841313, 0.12841313, 0.1052471, 0.05698456, 0.02030502, 0.07049807,
0.08015058, 0.08980309, 0.14385714, 0.11489961, 0.13227413, 0.14385714, 0.18439768, 0.18439768,
0.15544015, 0.11296911, 0.08208108, 0.12069112, 0.13034363, 0.14771815, 0.17281467, 0.14964865,
0.15350965, 0.22107722, 0.24424324, 0.26740927, 0.2037027, 0.16895367, 0.13227413, 0.17474517,
0.17860618, 0.17860618, 0.25582625, 0.25389575, 0.24231274, 0.26933977, 0.30988031, 0.32532432,
0.25775676, 0.20756371, 0.14771815, 0.18825869, 0.19405019, 0.16316216, 0.25389575, 0.23845174,
0.25196525, 0.30988031, 0.38323938, 0.36586486, 0.3002278, 0.24231274, 0.19211969, 0.24231274,
0.26740927, 0.25003475, 0.31567181, 0.31953282, 0.32146332, 0.40833591, 0.5029305, 0.47011197,
0.4025444, 0.32918533, 0.25775676, 0.33690734, 0.34849035, 0.33497683, 0.41219691, 0.4044749,
0.41412741, 0.52223552, 0.5975251, 0.58208108, 0.48555598, 0.39096139, 0.32339382, 0.39096139,
0.40833591, 0.38130888, 0.48748649, 0.47204247, 0.48555598, 0.61489961, 0.6979112, 0.7017722,
0.58015058, 0.47011197, 0.38903089, 0.44887645, 0.45659846, 0.41412741, 0.4990695, 0.47204247,
0.501, 0.63999614, 0.74810425, 0.77513127, 0.58015058, 0.49327799, 0.3986834, 0.45080695,
0.49520849, 0.46045946, 0.58401158, 0.56470656, 0.61103861, 0.71142471, 0.85814286, 0.87937838,
0.69405019, 0.58594208, 0.4990695, 0.58208108};
std::vector<float> multiplicative_normalized_testf = {0.6052471,
0.55505405,
0.60910811,
0.69018919,
0.71142471,
0.83304633,
1.001,
0.97011197,
0.78092278,
0.69018919,
0.55312355,
0.63420463};
std::vector<double> multiplicative_normalized_traind = {
0.01644402, 0.02802703, 0.05505405, 0.04926255, 0.03381853, 0.06084556, 0.08594208, 0.08594208,
0.06277606, 0.02995753, 0.001, 0.02802703, 0.02223552, 0.04347104, 0.07242857, 0.06084556,
0.04154054, 0.08787259, 0.12841313, 0.12841313, 0.1052471, 0.05698456, 0.02030502, 0.07049807,
0.08015058, 0.08980309, 0.14385714, 0.11489961, 0.13227413, 0.14385714, 0.18439768, 0.18439768,
0.15544015, 0.11296911, 0.08208108, 0.12069112, 0.13034363, 0.14771815, 0.17281467, 0.14964865,
0.15350965, 0.22107722, 0.24424324, 0.26740927, 0.2037027, 0.16895367, 0.13227413, 0.17474517,
0.17860618, 0.17860618, 0.25582625, 0.25389575, 0.24231274, 0.26933977, 0.30988031, 0.32532432,
0.25775676, 0.20756371, 0.14771815, 0.18825869, 0.19405019, 0.16316216, 0.25389575, 0.23845174,
0.25196525, 0.30988031, 0.38323938, 0.36586486, 0.3002278, 0.24231274, 0.19211969, 0.24231274,
0.26740927, 0.25003475, 0.31567181, 0.31953282, 0.32146332, 0.40833591, 0.5029305, 0.47011197,
0.4025444, 0.32918533, 0.25775676, 0.33690734, 0.34849035, 0.33497683, 0.41219691, 0.4044749,
0.41412741, 0.52223552, 0.5975251, 0.58208108, 0.48555598, 0.39096139, 0.32339382, 0.39096139,
0.40833591, 0.38130888, 0.48748649, 0.47204247, 0.48555598, 0.61489961, 0.6979112, 0.7017722,
0.58015058, 0.47011197, 0.38903089, 0.44887645, 0.45659846, 0.41412741, 0.4990695, 0.47204247,
0.501, 0.63999614, 0.74810425, 0.77513127, 0.58015058, 0.49327799, 0.3986834, 0.45080695,
0.49520849, 0.46045946, 0.58401158, 0.56470656, 0.61103861, 0.71142471, 0.85814286, 0.87937838,
0.69405019, 0.58594208, 0.4990695, 0.58208108};
std::vector<double> multiplicative_normalized_testd = {0.6052471,
0.55505405,
0.60910811,
0.69018919,
0.71142471,
0.83304633,
1.001,
0.97011197,
0.78092278,
0.69018919,
0.55312355,
0.63420463};
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/shap_kernel.cu | /*
* Copyright (c) 2020-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/explainer/kernel_shap.hpp>
#include <test_utils.h>
#include <raft/core/handle.hpp>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <thrust/count.h>
#include <thrust/device_ptr.h>
#include <thrust/device_vector.h>
#include <thrust/execution_policy.h>
#include <thrust/fill.h>
#include <test_utils.h>
namespace MLCommon {
}
#include <gtest/gtest.h>
namespace ML {
namespace Explainer {
struct MakeKSHAPDatasetInputs {
int nrows_exact;
int nrows_sampled;
int ncols;
int nrows_background;
int max_samples;
uint64_t seed;
};
template <typename T>
class MakeKSHAPDatasetTest : public ::testing::TestWithParam<MakeKSHAPDatasetInputs> {
protected:
void SetUp() override
{
params = ::testing::TestWithParam<MakeKSHAPDatasetInputs>::GetParam();
stream = handle.get_stream();
int i, j;
nrows_X = params.nrows_exact + params.nrows_sampled;
rmm::device_uvector<T> background(params.nrows_background * params.ncols, stream);
rmm::device_uvector<T> observation(params.ncols, stream);
rmm::device_uvector<int> nsamples(params.nrows_sampled / 2, stream);
rmm::device_uvector<float> X(nrows_X * params.ncols, stream);
rmm::device_uvector<T> dataset(nrows_X * params.nrows_background * params.ncols, stream);
thrust::device_ptr<T> b_ptr = thrust::device_pointer_cast(background.data());
thrust::device_ptr<T> o_ptr = thrust::device_pointer_cast(observation.data());
thrust::device_ptr<int> n_ptr = thrust::device_pointer_cast(nsamples.data());
thrust::device_ptr<float> X_ptr = thrust::device_pointer_cast(X.data());
thrust::device_ptr<T> d_ptr = thrust::device_pointer_cast(dataset.data());
// Initialize arrays:
// Assign a sentinel value to the observation to check easily later
T sent_value = nrows_X * params.nrows_background * params.ncols * 100;
for (i = 0; i < params.ncols; i++) {
o_ptr[i] = sent_value;
}
// Initialize background array with different odd value per row, makes
// it easier to debug if something goes wrong.
for (i = 0; i < params.nrows_background; i++) {
for (j = 0; j < params.ncols; j++) {
b_ptr[i * params.ncols + j] = (i * 2) + 1;
}
}
// Initialize the exact part of X. We create 2 `1` values per row for the test
thrust::fill(thrust::device, X_ptr, &X_ptr[nrows_X * params.ncols - 1], 0);
for (i = 0; i < params.nrows_exact; i++) {
for (j = i; j < i + 2; j++) {
X_ptr[i * params.ncols + j] = (float)1.0;
}
}
// Initialize the number of samples per row, we initialize each even row to
// max samples and each odd row to max_samples - 1
for (i = 0; i < params.nrows_sampled / 2; i++) {
n_ptr[i] = params.max_samples - i % 2;
}
kernel_dataset(handle,
X.data(),
nrows_X,
params.ncols,
background.data(),
params.nrows_background,
dataset.data(),
observation.data(),
nsamples.data(),
params.nrows_sampled,
params.max_samples,
params.seed);
handle.sync_stream(stream);
int counter;
// Check the generated part of X by sampling. The first nrows_exact
// correspond to the exact part generated before, so we just test after that.
test_sampled_X = true;
j = 0;
for (i = params.nrows_exact * params.ncols; i < nrows_X * params.ncols / 2;
i += 2 * params.ncols) {
// check that number of samples is the number indicated by nsamples.
counter = thrust::count(&X_ptr[i], &X_ptr[i + params.ncols], 1);
test_sampled_X = (test_sampled_X && (counter == n_ptr[j]));
// check that number of samples of the next line is the compliment,
// i.e. ncols - nsamples[j]
counter = thrust::count(&X_ptr[i + params.ncols], &X_ptr[i + 2 * params.ncols], 1);
test_sampled_X = (test_sampled_X && (counter == (params.ncols - n_ptr[j])));
j++;
}
// Check for the exact part of the generated dataset.
test_scatter_exact = true;
for (i = 0; i < params.nrows_exact; i++) {
for (j = i * params.nrows_background * params.ncols;
j < (i + 1) * params.nrows_background * params.ncols;
j += params.ncols) {
counter = thrust::count(&d_ptr[j], &d_ptr[j + params.ncols], sent_value);
// Check that indeed we have two observation entries ber row
test_scatter_exact = test_scatter_exact && (counter == 2);
if (not test_scatter_exact) {
std::cout << "test_scatter_exact counter failed with: " << counter
<< ", expected value was 2." << std::endl;
break;
}
}
if (not test_scatter_exact) { break; }
}
// Check for the sampled part of the generated dataset
test_scatter_sampled = true;
// compliment_ctr is a helper counter to help check nrows_dataset per entry in
// nsamples without complicating indexing since sampled part starts at nrows_sampled
int compliment_ctr = 0;
for (i = params.nrows_exact; i < params.nrows_exact + params.nrows_sampled / 2; i++) {
// First set of dataset observations must correspond to nsamples[i]
for (j = (i + compliment_ctr) * params.nrows_background * params.ncols;
j < (i + compliment_ctr + 1) * params.nrows_background * params.ncols;
j += params.ncols) {
counter = thrust::count(&d_ptr[j], &d_ptr[j + params.ncols], sent_value);
test_scatter_sampled = test_scatter_sampled && (counter == n_ptr[i - params.nrows_exact]);
}
// The next set of samples must correspond to the compliment: ncols - nsamples[i]
compliment_ctr++;
for (j = (i + compliment_ctr) * params.nrows_background * params.ncols;
j < (i + compliment_ctr + 1) * params.nrows_background * params.ncols;
j += params.ncols) {
// Check that number of observation entries corresponds to nsamples.
counter = thrust::count(&d_ptr[j], &d_ptr[j + params.ncols], sent_value);
test_scatter_sampled =
test_scatter_sampled && (counter == params.ncols - n_ptr[i - params.nrows_exact]);
}
}
}
protected:
MakeKSHAPDatasetInputs params;
int nrows_X;
bool test_sampled_X;
bool test_scatter_exact;
bool test_scatter_sampled;
raft::handle_t handle;
cudaStream_t stream = 0;
};
const std::vector<MakeKSHAPDatasetInputs> inputsf = {{10, 10, 12, 2, 3, 1234ULL},
{10, 0, 12, 2, 3, 1234ULL},
{100, 50, 200, 10, 10, 1234ULL},
{100, 0, 200, 10, 10, 1234ULL},
{0, 10, 12, 2, 3, 1234ULL},
{0, 50, 200, 10, 10, 1234ULL}
};
typedef MakeKSHAPDatasetTest<float> MakeKSHAPDatasetTestF;
TEST_P(MakeKSHAPDatasetTestF, Result)
{
ASSERT_TRUE(test_sampled_X);
// todo (dgd): re-enable assertions
// disabled due to a sporadic cuda 10.1 fail (by one value in one case!)
// will be re-enabled soon after 0.17 release
// ASSERT_TRUE(test_scatter_exact);
// ASSERT_TRUE(test_scatter_sampled);
}
INSTANTIATE_TEST_CASE_P(MakeKSHAPDatasetTests, MakeKSHAPDatasetTestF, ::testing::ValuesIn(inputsf));
const std::vector<MakeKSHAPDatasetInputs> inputsd = {{10, 10, 12, 2, 3, 1234ULL},
{10, 0, 12, 2, 3, 1234ULL},
{100, 50, 200, 10, 10, 1234ULL},
{100, 0, 200, 10, 10, 1234ULL},
{0, 10, 12, 2, 3, 1234ULL},
{0, 50, 200, 10, 10, 1234ULL}};
typedef MakeKSHAPDatasetTest<double> MakeKSHAPDatasetTestD;
TEST_P(MakeKSHAPDatasetTestD, Result)
{
ASSERT_TRUE(test_sampled_X);
// todo (dgd): re-enable assertions
// disabled due to a sporadic cuda 10.1 fail (by one value in one case!)
// will be re-enabled soon after 0.17 release
// ASSERT_TRUE(test_scatter_exact);
// ASSERT_TRUE(test_scatter_sampled);
}
INSTANTIATE_TEST_CASE_P(MakeKSHAPDatasetTests, MakeKSHAPDatasetTestD, ::testing::ValuesIn(inputsd));
} // end namespace Explainer
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/umap_parametrizable_test.cu | /*
* Copyright (c) 2020-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <test_utils.h>
#include <raft/core/handle.hpp>
#include <umap/runner.cuh>
#include <cuml/datasets/make_blobs.hpp>
#include <cuml/manifold/umap.hpp>
#include <cuml/manifold/umapparams.h>
#include <cuml/metrics/metrics.hpp>
#include <cuml/neighbors/knn.hpp>
#include <datasets/digits.h>
#include <test_utils.h>
#include <datasets/digits.h>
#include <raft/linalg/reduce_rows_by_key.cuh>
#include <raft/spatial/knn/knn.cuh>
#include <raft/core/handle.hpp>
#include <raft/distance/distance.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <umap/runner.cuh>
#include <gtest/gtest.h>
#include <cstddef>
#include <iostream>
#include <type_traits>
#include <vector>
using namespace ML;
using namespace ML::Metrics;
using namespace MLCommon;
using namespace MLCommon::Datasets::Digits;
template <typename T>
__global__ void has_nan_kernel(T* data, size_t len, bool* answer)
{
static_assert(std::is_floating_point<T>());
std::size_t tid = threadIdx.x + blockIdx.x * blockDim.x;
if ((tid < len) && isnan(data[tid])) { *answer = true; }
}
template <typename T>
bool has_nan(T* data, size_t len, cudaStream_t stream)
{
dim3 blk(256);
dim3 grid(raft::ceildiv(len, (size_t)blk.x));
bool h_answer = false;
rmm::device_scalar<bool> d_answer(stream);
raft::update_device(d_answer.data(), &h_answer, 1, stream);
has_nan_kernel<<<grid, blk, 0, stream>>>(data, len, d_answer.data());
h_answer = d_answer.value(stream);
return h_answer;
}
template <typename T>
__global__ void are_equal_kernel(T* embedding1, T* embedding2, size_t len, double* diff)
{
int tid = threadIdx.x + blockIdx.x * blockDim.x;
if (tid >= len) return;
if (embedding1[tid] != embedding2[tid]) {
atomicAdd(diff, abs(embedding1[tid] - embedding2[tid]));
}
}
template <typename T>
bool are_equal(T* embedding1, T* embedding2, size_t len, cudaStream_t stream)
{
double h_answer = 0.;
rmm::device_scalar<double> d_answer(stream);
raft::update_device(d_answer.data(), &h_answer, 1, stream);
are_equal_kernel<<<raft::ceildiv(len, (size_t)32), 32, 0, stream>>>(
embedding1, embedding2, len, d_answer.data());
h_answer = d_answer.value(stream);
double tolerance = 1.0;
if (h_answer > tolerance) {
std::cout << "Not equal, difference : " << h_answer << std::endl;
return false;
}
return true;
}
class UMAPParametrizableTest : public ::testing::Test {
protected:
struct TestParams {
bool fit_transform;
bool supervised;
bool knn_params;
bool refine;
int n_samples;
int n_features;
int n_clusters;
double min_trustworthiness;
};
void get_embedding(raft::handle_t& handle,
float* X,
float* y,
float* embedding_ptr,
TestParams& test_params,
UMAPParams& umap_params)
{
cudaStream_t stream = handle.get_stream();
int& n_samples = test_params.n_samples;
int& n_features = test_params.n_features;
rmm::device_uvector<int64_t>* knn_indices_b{};
rmm::device_uvector<float>* knn_dists_b{};
int64_t* knn_indices{};
float* knn_dists{};
if (test_params.knn_params) {
knn_indices_b = new rmm::device_uvector<int64_t>(n_samples * umap_params.n_neighbors, stream);
knn_dists_b = new rmm::device_uvector<float>(n_samples * umap_params.n_neighbors, stream);
knn_indices = knn_indices_b->data();
knn_dists = knn_dists_b->data();
std::vector<float*> ptrs(1);
std::vector<int> sizes(1);
ptrs[0] = X;
sizes[0] = n_samples;
raft::spatial::knn::brute_force_knn<long, float, int>(handle,
ptrs,
sizes,
n_features,
X,
n_samples,
knn_indices,
knn_dists,
umap_params.n_neighbors);
handle.sync_stream(stream);
}
float* model_embedding = nullptr;
rmm::device_uvector<float>* model_embedding_b{};
if (test_params.fit_transform) {
model_embedding = embedding_ptr;
} else {
model_embedding_b =
new rmm::device_uvector<float>(n_samples * umap_params.n_components, stream);
model_embedding = model_embedding_b->data();
}
RAFT_CUDA_TRY(cudaMemsetAsync(
model_embedding, 0, n_samples * umap_params.n_components * sizeof(float), stream));
handle.sync_stream(stream);
auto graph = raft::sparse::COO<float, int>(stream);
if (test_params.supervised) {
ML::UMAP::fit(handle,
X,
y,
n_samples,
n_features,
knn_indices,
knn_dists,
&umap_params,
model_embedding,
&graph);
} else {
ML::UMAP::fit(handle,
X,
nullptr,
n_samples,
n_features,
knn_indices,
knn_dists,
&umap_params,
model_embedding,
&graph);
}
if (test_params.refine) {
std::cout << "using refine";
if (test_params.supervised) {
auto cgraph_coo =
ML::UMAP::get_graph(handle, X, y, n_samples, n_features, nullptr, nullptr, &umap_params);
ML::UMAP::refine(
handle, X, n_samples, n_features, cgraph_coo.get(), &umap_params, model_embedding);
} else {
auto cgraph_coo = ML::UMAP::get_graph(
handle, X, nullptr, n_samples, n_features, nullptr, nullptr, &umap_params);
ML::UMAP::refine(
handle, X, n_samples, n_features, cgraph_coo.get(), &umap_params, model_embedding);
}
}
handle.sync_stream(stream);
if (!test_params.fit_transform) {
RAFT_CUDA_TRY(cudaMemsetAsync(
embedding_ptr, 0, n_samples * umap_params.n_components * sizeof(float), stream));
handle.sync_stream(stream);
ML::UMAP::transform(handle,
X,
n_samples,
umap_params.n_components,
X,
n_samples,
model_embedding,
n_samples,
&umap_params,
embedding_ptr);
handle.sync_stream(stream);
delete model_embedding_b;
}
if (test_params.knn_params) {
delete knn_indices_b;
delete knn_dists_b;
}
}
void assertions(raft::handle_t& handle,
float* X,
float* embedding_ptr,
TestParams& test_params,
UMAPParams& umap_params)
{
cudaStream_t stream = handle.get_stream();
int& n_samples = test_params.n_samples;
int& n_features = test_params.n_features;
ASSERT_TRUE(!has_nan(embedding_ptr, n_samples * umap_params.n_components, stream));
double trustworthiness =
trustworthiness_score<float, raft::distance::DistanceType::L2SqrtUnexpanded>(
handle,
X,
embedding_ptr,
n_samples,
n_features,
umap_params.n_components,
umap_params.n_neighbors);
std::cout << "min. expected trustworthiness: " << test_params.min_trustworthiness << std::endl;
std::cout << "trustworthiness: " << trustworthiness << std::endl;
ASSERT_TRUE(trustworthiness > test_params.min_trustworthiness);
}
void test(TestParams& test_params, UMAPParams& umap_params)
{
std::cout << "\numap_params : [" << std::boolalpha << umap_params.n_neighbors << "-"
<< umap_params.n_components << "-" << umap_params.n_epochs << "-"
<< umap_params.random_state << std::endl;
std::cout << "test_params : [" << std::boolalpha << test_params.fit_transform << "-"
<< test_params.supervised << "-" << test_params.refine << "-"
<< test_params.knn_params << "-" << test_params.n_samples << "-"
<< test_params.n_features << "-" << test_params.n_clusters << "-"
<< test_params.min_trustworthiness << "]" << std::endl;
raft::handle_t handle;
cudaStream_t stream = handle.get_stream();
int& n_samples = test_params.n_samples;
int& n_features = test_params.n_features;
UMAP::find_ab(handle, &umap_params);
rmm::device_uvector<float> X_d(n_samples * n_features, stream);
rmm::device_uvector<int> y_d(n_samples, stream);
ML::Datasets::make_blobs(handle,
X_d.data(),
y_d.data(),
n_samples,
n_features,
test_params.n_clusters,
true,
nullptr,
nullptr,
1.f,
true,
-10.f,
10.f,
1234ULL);
handle.sync_stream(stream);
raft::linalg::convert_array((float*)y_d.data(), y_d.data(), n_samples, stream);
handle.sync_stream(stream);
rmm::device_uvector<float> embeddings1(n_samples * umap_params.n_components, stream);
float* e1 = embeddings1.data();
#if CUDART_VERSION >= 11020
// Always use random init w/ CUDA 11.2. For some reason the
// spectral solver doesn't always converge w/ this CUDA version.
umap_params.init = 0;
umap_params.random_state = 43;
umap_params.n_epochs = 500;
#endif
get_embedding(handle, X_d.data(), (float*)y_d.data(), e1, test_params, umap_params);
assertions(handle, X_d.data(), e1, test_params, umap_params);
// v21.08: Reproducibility looks to be busted for CTK 11.4. Need to figure out
// why this is happening and re-enable this.
#if CUDART_VERSION == 11040
return;
#else
// Disable reproducibility tests after transformation
if (!test_params.fit_transform) { return; }
#endif
rmm::device_uvector<float> embeddings2(n_samples * umap_params.n_components, stream);
float* e2 = embeddings2.data();
get_embedding(handle, X_d.data(), (float*)y_d.data(), e2, test_params, umap_params);
#if CUDART_VERSION >= 11020
auto equal = are_equal(e1, e2, n_samples * umap_params.n_components, stream);
if (!equal) {
raft::print_device_vector("e1", e1, 25, std::cout);
raft::print_device_vector("e2", e2, 25, std::cout);
}
ASSERT_TRUE(equal);
#else
ASSERT_TRUE(MLCommon::devArrMatch(
e1, e2, n_samples * umap_params.n_components, MLCommon::Compare<float>{}));
#endif
}
void SetUp() override
{
std::vector<TestParams> test_params_vec = {{false, false, false, true, 2000, 50, 20, 0.45},
{true, false, false, false, 2000, 50, 20, 0.45},
{false, true, false, true, 2000, 50, 20, 0.45},
{false, false, true, false, 2000, 50, 20, 0.45},
{true, true, false, true, 2000, 50, 20, 0.45},
{true, false, true, false, 2000, 50, 20, 0.45},
{false, true, true, true, 2000, 50, 20, 0.45},
{true, true, true, false, 2000, 50, 20, 0.45}};
std::vector<UMAPParams> umap_params_vec(4);
umap_params_vec[0].n_components = 2;
umap_params_vec[1].n_components = 10;
umap_params_vec[2].n_components = 21;
umap_params_vec[2].random_state = 43;
umap_params_vec[2].init = 0;
umap_params_vec[2].n_epochs = 500;
umap_params_vec[3].n_components = 25;
umap_params_vec[3].random_state = 43;
umap_params_vec[3].init = 0;
umap_params_vec[3].n_epochs = 500;
for (auto& umap_params : umap_params_vec) {
for (auto& test_params : test_params_vec) {
test(test_params, umap_params);
}
}
}
void TearDown() override {}
};
typedef UMAPParametrizableTest UMAPParametrizableTest;
TEST_F(UMAPParametrizableTest, Result) {}
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/knn_test.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <gtest/gtest.h>
#include <iostream>
#include <raft/core/handle.hpp>
#include <raft/random/rng.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <test_utils.h>
#include <vector>
#include <cuml/datasets/make_blobs.hpp>
#include <cuml/neighbors/knn.hpp>
namespace ML {
using namespace raft::random;
using namespace std;
struct KNNInputs {
int n_rows;
int n_cols;
int n_centers;
int n_query_row;
int n_neighbors;
int n_parts;
};
template <typename T, typename IdxT>
::std::ostream& operator<<(::std::ostream& os, const KNNInputs& dims)
{
return os;
}
template <typename T>
void gen_blobs(
raft::handle_t& handle, T* out, int* l, int rows, int cols, int centers, const T* centroids)
{
Datasets::make_blobs(handle,
out,
l,
rows,
cols,
centers,
true,
centroids,
nullptr,
0.1f,
true,
-10.0f,
10.0f,
1234ULL);
}
void create_index_parts(raft::handle_t& handle,
float* query_data,
int* query_labels,
vector<float*>& part_inputs,
vector<int*>& part_labels,
vector<int>& part_sizes,
const KNNInputs& params,
const float* centers)
{
cudaStream_t stream = handle.get_stream();
gen_blobs<float>(handle,
query_data,
query_labels,
params.n_rows * params.n_parts,
params.n_cols,
params.n_centers,
centers);
for (int i = 0; i < params.n_parts; i++) {
part_inputs.push_back(query_data + (i * params.n_rows * params.n_cols));
part_labels.push_back(query_labels + (i * params.n_rows));
part_sizes.push_back(params.n_rows);
}
}
__global__ void to_float(float* out, int* in, int size)
{
int element = threadIdx.x + blockDim.x * blockIdx.x;
if (element >= size) return;
out[element] = float(in[element]);
}
__global__ void build_actual_output(
int* output, int n_rows, int k, const int* idx_labels, const int64_t* indices)
{
int element = threadIdx.x + blockDim.x * blockIdx.x;
if (element >= n_rows * k) return;
int ind = (int)indices[element];
output[element] = idx_labels[ind];
}
__global__ void build_expected_output(int* output, int n_rows, int k, const int* labels)
{
int row = threadIdx.x + blockDim.x * blockIdx.x;
if (row >= n_rows) return;
int cur_label = labels[row];
for (int i = 0; i < k; i++) {
output[row * k + i] = cur_label;
}
}
template <typename T>
class KNNTest : public ::testing::TestWithParam<KNNInputs> {
public:
KNNTest()
: params(::testing::TestWithParam<KNNInputs>::GetParam()),
stream(handle.get_stream()),
index_data(params.n_rows * params.n_cols * params.n_parts, stream),
index_labels(params.n_rows * params.n_parts, stream),
search_data(params.n_query_row * params.n_cols, stream),
search_labels(params.n_query_row, stream),
output_indices(params.n_query_row * params.n_neighbors * params.n_parts, stream),
output_dists(params.n_query_row * params.n_neighbors * params.n_parts, stream)
{
RAFT_CUDA_TRY(cudaMemsetAsync(index_data.data(), 0, index_data.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(index_labels.data(), 0, index_labels.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(search_data.data(), 0, search_data.size() * sizeof(T), stream));
RAFT_CUDA_TRY(
cudaMemsetAsync(search_labels.data(), 0, search_labels.size() * sizeof(T), stream));
RAFT_CUDA_TRY(
cudaMemsetAsync(output_indices.data(), 0, output_indices.size() * sizeof(T), stream));
RAFT_CUDA_TRY(cudaMemsetAsync(output_dists.data(), 0, output_dists.size() * sizeof(T), stream));
}
protected:
void testBruteForce()
{
rmm::device_uvector<int> actual_labels(params.n_query_row * params.n_neighbors * params.n_parts,
stream);
rmm::device_uvector<int> expected_labels(
params.n_query_row * params.n_neighbors * params.n_parts, stream);
RAFT_CUDA_TRY(
cudaMemsetAsync(actual_labels.data(), 0, actual_labels.size() * sizeof(T), stream));
RAFT_CUDA_TRY(
cudaMemsetAsync(expected_labels.data(), 0, expected_labels.size() * sizeof(T), stream));
create_data();
brute_force_knn(handle,
part_inputs,
part_sizes,
params.n_cols,
search_data.data(),
params.n_query_row,
output_indices.data(),
output_dists.data(),
params.n_neighbors,
true,
true);
build_actual_output<<<raft::ceildiv(params.n_query_row * params.n_neighbors, 32),
32,
0,
stream>>>(actual_labels.data(),
params.n_query_row,
params.n_neighbors,
index_labels.data(),
output_indices.data());
build_expected_output<<<raft::ceildiv(params.n_query_row, 32), 32, 0, stream>>>(
expected_labels.data(), params.n_query_row, params.n_neighbors, search_labels.data());
ASSERT_TRUE(devArrMatch(expected_labels.data(),
actual_labels.data(),
params.n_query_row * params.n_neighbors,
MLCommon::Compare<int>()));
}
void testClassification()
{
rmm::device_uvector<int> actual_labels(params.n_query_row, stream);
rmm::device_uvector<int> expected_labels(params.n_query_row, stream);
RAFT_CUDA_TRY(
cudaMemsetAsync(actual_labels.data(), 0, actual_labels.size() * sizeof(T), stream));
RAFT_CUDA_TRY(
cudaMemsetAsync(expected_labels.data(), 0, expected_labels.size() * sizeof(T), stream));
create_data();
brute_force_knn(handle,
part_inputs,
part_sizes,
params.n_cols,
search_data.data(),
params.n_query_row,
output_indices.data(),
output_dists.data(),
params.n_neighbors,
true,
true);
vector<int*> full_labels(1);
full_labels[0] = index_labels.data();
knn_classify(handle,
actual_labels.data(),
output_indices.data(),
full_labels,
params.n_rows * params.n_parts,
params.n_query_row,
params.n_neighbors);
ASSERT_TRUE(devArrMatch(
search_labels.data(), actual_labels.data(), params.n_query_row, MLCommon::Compare<int>()));
}
void testRegression()
{
rmm::device_uvector<int> actual_labels(params.n_query_row, stream);
rmm::device_uvector<int> expected_labels(params.n_query_row, stream);
RAFT_CUDA_TRY(
cudaMemsetAsync(actual_labels.data(), 0, actual_labels.size() * sizeof(T), stream));
RAFT_CUDA_TRY(
cudaMemsetAsync(expected_labels.data(), 0, expected_labels.size() * sizeof(T), stream));
create_data();
brute_force_knn(handle,
part_inputs,
part_sizes,
params.n_cols,
search_data.data(),
params.n_query_row,
output_indices.data(),
output_dists.data(),
params.n_neighbors,
true,
true);
rmm::device_uvector<float> index_labels_float(params.n_rows * params.n_parts, stream);
rmm::device_uvector<float> query_labels_float(params.n_query_row, stream);
to_float<<<raft::ceildiv((int)index_labels_float.size(), 32), 32, 0, stream>>>(
index_labels_float.data(), index_labels.data(), index_labels_float.size());
to_float<<<raft::ceildiv(params.n_query_row, 32), 32, 0, stream>>>(
query_labels_float.data(), search_labels.data(), params.n_query_row);
handle.sync_stream(stream);
RAFT_CUDA_TRY(cudaPeekAtLastError());
rmm::device_uvector<float> actual_labels_float(params.n_query_row, stream);
vector<float*> full_labels(1);
full_labels[0] = index_labels_float.data();
knn_regress(handle,
actual_labels_float.data(),
output_indices.data(),
full_labels,
params.n_rows,
params.n_query_row,
params.n_neighbors);
ASSERT_TRUE(MLCommon::devArrMatch(query_labels_float.data(),
actual_labels_float.data(),
params.n_query_row,
MLCommon::Compare<float>()));
}
private:
void create_data()
{
cudaStream_t stream = handle.get_stream();
rmm::device_uvector<T> rand_centers(params.n_centers * params.n_cols, stream);
Rng r(0, GeneratorType::GenPhilox);
r.uniform(rand_centers.data(), params.n_centers * params.n_cols, -10.0f, 10.0f, stream);
// Create index parts
create_index_parts(handle,
index_data.data(),
index_labels.data(),
part_inputs,
part_labels,
part_sizes,
params,
rand_centers.data());
gen_blobs(handle,
search_data.data(),
search_labels.data(),
params.n_query_row,
params.n_cols,
params.n_centers,
rand_centers.data());
}
raft::handle_t handle;
cudaStream_t stream = 0;
KNNInputs params;
rmm::device_uvector<float> index_data;
rmm::device_uvector<int> index_labels;
vector<float*> part_inputs;
vector<int*> part_labels;
vector<int> part_sizes;
rmm::device_uvector<float> search_data;
rmm::device_uvector<int> search_labels;
rmm::device_uvector<float> output_dists;
rmm::device_uvector<int64_t> output_indices;
};
const std::vector<KNNInputs> inputs = {{50, 5, 2, 25, 5, 2},
{50, 5, 2, 25, 10, 2},
{500, 5, 2, 25, 5, 7},
{500, 50, 2, 25, 10, 7},
{500, 50, 7, 25, 5, 7},
{50, 5, 3, 15, 5, 7}};
typedef KNNTest<float> KNNTestF;
TEST_P(KNNTestF, BruteForce) { this->testBruteForce(); }
TEST_P(KNNTestF, Classification) { this->testClassification(); }
TEST_P(KNNTestF, Regression) { this->testRegression(); }
INSTANTIATE_TEST_CASE_P(KNNTest, KNNTestF, ::testing::ValuesIn(inputs));
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/tsne_test.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/manifold/tsne.h>
#include <cuml/metrics/metrics.hpp>
#include <raft/distance/distance_types.hpp>
#include <raft/linalg/map.cuh>
#include <cuml/common/logger.hpp>
#include <datasets/boston.h>
#include <datasets/breast_cancer.h>
#include <datasets/diabetes.h>
#include <datasets/digits.h>
#include <gtest/gtest.h>
#include <iostream>
#include <raft/core/handle.hpp>
#include <raft/util/cudart_utils.hpp>
#include <stdio.h>
#include <stdlib.h>
#include <thrust/reduce.h>
#include <tsne/distances.cuh>
#include <tsne/tsne_runner.cuh>
#include <tsne/utils.cuh>
#include <vector>
using namespace MLCommon;
using namespace MLCommon::Datasets;
using namespace ML;
using namespace ML::Metrics;
struct TSNEInput {
int n, p;
std::vector<float> dataset;
double trustworthiness_threshold;
};
float get_kl_div(TSNEParams& params,
raft::sparse::COO<float, int64_t>& input_matrix,
float* emb_dists,
size_t n,
cudaStream_t stream)
{
const size_t total_nn = 2 * n * params.n_neighbors;
rmm::device_uvector<float> Qs_vec(total_nn, stream);
float* Ps = input_matrix.vals();
float* Qs = Qs_vec.data();
float* KL_divs = Qs;
// Normalize Ps
float P_sum = thrust::reduce(rmm::exec_policy(stream), Ps, Ps + total_nn);
raft::linalg::scalarMultiply(Ps, Ps, 1.0f / P_sum, total_nn, stream);
// Build Qs
auto get_emb_dist = [=] __device__(const int64_t i, const int64_t j) {
return emb_dists[i * n + j];
};
raft::linalg::map_k(Qs, total_nn, get_emb_dist, stream, input_matrix.rows(), input_matrix.cols());
const float dof = fmaxf(params.dim - 1, 1); // degree of freedom
const float exponent = (dof + 1.0) / 2.0;
raft::linalg::unaryOp(
Qs,
Qs,
total_nn,
[=] __device__(float dist) { return __powf(dof / (dof + dist), exponent); },
stream);
float kl_div = compute_kl_div(Ps, Qs, KL_divs, total_nn, stream);
return kl_div;
}
class TSNETest : public ::testing::TestWithParam<TSNEInput> {
protected:
struct TSNEResults;
void assert_results(const char* test, TSNEResults& results)
{
bool test_tw = results.trustworthiness > trustworthiness_threshold;
double kl_div_tol = 0.2;
bool test_kl_div = results.kl_div_ref - kl_div_tol < results.kl_div &&
results.kl_div < results.kl_div_ref + kl_div_tol;
if (!test_tw || !test_kl_div) {
std::cout << "Testing " << test << ":" << std::endl;
std::cout << "\ttrustworthiness = " << results.trustworthiness << std::endl;
std::cout << "\tkl_div = " << results.kl_div << std::endl;
std::cout << "\tkl_div_ref = " << results.kl_div_ref << std::endl;
std::cout << std::endl;
}
ASSERT_TRUE(test_tw);
ASSERT_TRUE(test_kl_div);
}
TSNEResults runTest(TSNE_ALGORITHM algo, bool knn = false)
{
raft::handle_t handle;
auto stream = handle.get_stream();
TSNEResults results;
auto DEFAULT_DISTANCE_METRIC = raft::distance::DistanceType::L2SqrtExpanded;
float minkowski_p = 2.0;
// Setup parameters
model_params.algorithm = algo;
model_params.dim = 2;
model_params.n_neighbors = 90;
model_params.min_grad_norm = 1e-12;
model_params.verbosity = CUML_LEVEL_DEBUG;
model_params.metric = DEFAULT_DISTANCE_METRIC;
// Allocate memory
rmm::device_uvector<float> X_d(n * p, stream);
raft::update_device(X_d.data(), dataset.data(), n * p, stream);
rmm::device_uvector<float> Y_d(n * model_params.dim, stream);
rmm::device_uvector<int64_t> input_indices(0, stream);
rmm::device_uvector<float> input_dists(0, stream);
rmm::device_uvector<float> pw_emb_dists(n * n, stream);
// Run TSNE
manifold_dense_inputs_t<float> input(X_d.data(), Y_d.data(), n, p);
knn_graph<int64_t, float> k_graph(n, model_params.n_neighbors, nullptr, nullptr);
if (knn) {
input_indices.resize(n * model_params.n_neighbors, stream);
input_dists.resize(n * model_params.n_neighbors, stream);
k_graph.knn_indices = input_indices.data();
k_graph.knn_dists = input_dists.data();
TSNE::get_distances(handle, input, k_graph, stream, DEFAULT_DISTANCE_METRIC, minkowski_p);
}
handle.sync_stream(stream);
TSNE_runner<manifold_dense_inputs_t<float>, knn_indices_dense_t, float> runner(
handle, input, k_graph, model_params);
results.kl_div = runner.run();
// Compute embedding's pairwise distances
pairwise_distance(handle,
Y_d.data(),
Y_d.data(),
pw_emb_dists.data(),
n,
n,
model_params.dim,
raft::distance::DistanceType::L2Expanded,
false);
handle.sync_stream(stream);
// Compute theoretical KL div
results.kl_div_ref =
get_kl_div(model_params, runner.COO_Matrix, pw_emb_dists.data(), n, stream);
// Transfer embeddings
float* embeddings_h = (float*)malloc(sizeof(float) * n * model_params.dim);
assert(embeddings_h != NULL);
raft::update_host(embeddings_h, Y_d.data(), n * model_params.dim, stream);
handle.sync_stream(stream);
// Move embeddings to host.
// This can be used for printing if needed.
int k = 0;
float C_contiguous_embedding[n * model_params.dim];
for (int i = 0; i < n; i++) {
for (int j = 0; j < model_params.dim; j++)
C_contiguous_embedding[k++] = embeddings_h[j * n + i];
}
// Move transposed embeddings back to device, as trustworthiness requires C contiguous format
raft::update_device(Y_d.data(), C_contiguous_embedding, n * model_params.dim, stream);
handle.sync_stream(stream);
free(embeddings_h);
// Produce trustworthiness score
results.trustworthiness =
trustworthiness_score<float, raft::distance::DistanceType::L2SqrtUnexpanded>(
handle, X_d.data(), Y_d.data(), n, p, model_params.dim, 5);
return results;
}
void basicTest()
{
std::cout << "Running BH:" << std::endl;
score_bh = runTest(TSNE_ALGORITHM::BARNES_HUT);
std::cout << "Running EXACT:" << std::endl;
score_exact = runTest(TSNE_ALGORITHM::EXACT);
std::cout << "Running FFT:" << std::endl;
score_fft = runTest(TSNE_ALGORITHM::FFT);
std::cout << "Running KNN BH:" << std::endl;
knn_score_bh = runTest(TSNE_ALGORITHM::BARNES_HUT, true);
std::cout << "Running KNN EXACT:" << std::endl;
knn_score_exact = runTest(TSNE_ALGORITHM::EXACT, true);
std::cout << "Running KNN FFT:" << std::endl;
knn_score_fft = runTest(TSNE_ALGORITHM::FFT, true);
}
void SetUp() override
{
params = ::testing::TestWithParam<TSNEInput>::GetParam();
n = params.n;
p = params.p;
dataset = params.dataset;
trustworthiness_threshold = params.trustworthiness_threshold;
basicTest();
}
void TearDown() override {}
protected:
TSNEInput params;
TSNEParams model_params;
std::vector<float> dataset;
int n, p;
struct TSNEResults {
double trustworthiness;
double kl_div_ref;
double kl_div;
};
TSNEResults score_bh;
TSNEResults score_exact;
TSNEResults score_fft;
TSNEResults knn_score_bh;
TSNEResults knn_score_exact;
TSNEResults knn_score_fft;
double trustworthiness_threshold;
};
const std::vector<TSNEInput> inputs = {
{Digits::n_samples, Digits::n_features, Digits::digits, 0.98},
{Boston::n_samples, Boston::n_features, Boston::boston, 0.98},
{BreastCancer::n_samples, BreastCancer::n_features, BreastCancer::breast_cancer, 0.98},
{Diabetes::n_samples, Diabetes::n_features, Diabetes::diabetes, 0.90}};
typedef TSNETest TSNETestF;
TEST_P(TSNETestF, Result)
{
assert_results("BH", score_bh);
assert_results("EXACT", score_exact);
assert_results("FFT", score_fft);
assert_results("KNN BH", knn_score_bh);
assert_results("KNN EXACT", knn_score_exact);
assert_results("KNN FFT", knn_score_fft);
}
INSTANTIATE_TEST_CASE_P(TSNETests, TSNETestF, ::testing::ValuesIn(inputs));
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/hdbscan_inputs.hpp | /*
* Copyright (c) 2021-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <cuml/cluster/hdbscan.hpp>
#include <datasets/digits.h>
#include <vector>
namespace ML {
namespace HDBSCAN {
template <typename T, typename IdxT>
struct HDBSCANInputs {
IdxT n_row;
IdxT n_col;
int k, min_pts, min_cluster_size;
std::vector<T> data;
std::vector<IdxT> expected_labels;
};
template <typename T, typename IdxT>
struct ClusterCondensingInputs {
IdxT n_row;
int min_cluster_size;
std::vector<IdxT> mst_src;
std::vector<IdxT> mst_dst;
std::vector<T> mst_data;
std::vector<IdxT> expected;
};
template <typename T, typename IdxT>
struct ClusterSelectionInputs {
IdxT n_row;
int min_samples;
int min_cluster_size;
std::vector<IdxT> condensed_parents;
std::vector<IdxT> condensed_children;
std::vector<T> condensed_lambdas;
std::vector<IdxT> condensed_sizes;
Common::CLUSTER_SELECTION_METHOD cluster_selection_method;
bool allow_single_cluster;
T cluster_selection_epsilon;
std::vector<T> probabilities;
std::vector<IdxT> labels;
};
template <typename T, typename IdxT>
struct AllPointsMembershipVectorsInputs {
IdxT n_row;
IdxT n_col;
int min_samples;
int min_cluster_size;
std::vector<T> data;
std::vector<IdxT> condensed_parents;
std::vector<IdxT> condensed_children;
std::vector<T> condensed_lambdas;
std::vector<IdxT> condensed_sizes;
Common::CLUSTER_SELECTION_METHOD cluster_selection_method;
bool allow_single_cluster;
T cluster_selection_epsilon;
std::vector<T> expected_probabilities;
};
template <typename T, typename IdxT>
struct ApproximatePredictInputs {
IdxT n_row;
IdxT n_col;
IdxT n_points_to_predict;
int min_samples;
int min_cluster_size;
std::vector<T> data;
std::vector<T> points_to_predict;
std::vector<IdxT> condensed_parents;
std::vector<IdxT> condensed_children;
std::vector<T> condensed_lambdas;
std::vector<IdxT> condensed_sizes;
Common::CLUSTER_SELECTION_METHOD cluster_selection_method;
bool allow_single_cluster;
T cluster_selection_epsilon;
std::vector<IdxT> expected_labels;
std::vector<T> expected_probabilities;
};
template <typename T, typename IdxT>
struct MembershipVectorInputs {
IdxT n_row;
IdxT n_col;
IdxT n_points_to_predict;
int min_samples;
int min_cluster_size;
std::vector<T> data;
std::vector<T> points_to_predict;
std::vector<IdxT> condensed_parents;
std::vector<IdxT> condensed_children;
std::vector<T> condensed_lambdas;
std::vector<IdxT> condensed_sizes;
Common::CLUSTER_SELECTION_METHOD cluster_selection_method;
bool allow_single_cluster;
T cluster_selection_epsilon;
std::vector<T> expected_probabilities;
};
const std::vector<HDBSCANInputs<float, int>> hdbscan_inputsf2 = {
// Test n_clusters == n_points
{10,
5,
5,
2,
3,
{0.21390334, 0.50261639, 0.91036676, 0.59166485, 0.71162682, 0.10248392, 0.77782677, 0.43772379,
0.4035871, 0.3282796, 0.47544681, 0.59862974, 0.12319357, 0.06239463, 0.28200272, 0.1345717,
0.50498218, 0.5113505, 0.16233086, 0.62165332, 0.42281548, 0.933117, 0.41386077, 0.23264562,
0.73325968, 0.37537541, 0.70719873, 0.14522645, 0.73279625, 0.9126674, 0.84854131, 0.28890216,
0.85267903, 0.74703138, 0.83842071, 0.34942792, 0.27864171, 0.70911132, 0.21338564, 0.32035554,
0.73788331, 0.46926692, 0.57570162, 0.42559178, 0.87120209, 0.22734951, 0.01847905, 0.75549396,
0.76166195, 0.66613745},
{-1, -1, -1, -1, -1, -1, -1, -1, -1, -1}},
// // Test outlier points
{9,
2,
3,
3,
3,
{-1, -50, 3, 4, 5000, 10000, 1, 3, 4, 5, 0.000005, 0.00002, 2000000, 500000, 10, 50, 30, 5},
{-1, -1, -1, -1, -1, -1, -1, -1, -1}},
// Test n_clusters == (n_points / 2)
{10,
5,
4,
3,
4,
{0.21390334, 0.50261639, 0.91036676, 0.59166485, 0.71162682, 0.10248392, 0.77782677, 0.43772379,
0.4035871, 0.3282796, 0.47544681, 0.59862974, 0.12319357, 0.06239463, 0.28200272, 0.1345717,
0.50498218, 0.5113505, 0.16233086, 0.62165332, 0.42281548, 0.933117, 0.41386077, 0.23264562,
0.73325968, 0.37537541, 0.70719873, 0.14522645, 0.73279625, 0.9126674, 0.84854131, 0.28890216,
0.85267903, 0.74703138, 0.83842071, 0.34942792, 0.27864171, 0.70911132, 0.21338564, 0.32035554,
0.73788331, 0.46926692, 0.57570162, 0.42559178, 0.87120209, 0.22734951, 0.01847905, 0.75549396,
0.76166195, 0.66613745},
{-1, -1, -1, -1, -1, -1, -1, -1, -1, -1}},
{MLCommon::Datasets::Digits::n_samples,
MLCommon::Datasets::Digits::n_features,
50,
50,
25,
MLCommon::Datasets::Digits::digits,
{5, 3, -1, -1, -1, -1, 4, -1, -1, -1, 5, -1, -1, 6, -1, -1, -1, -1, -1, -1, 5, -1, 1, -1,
-1, -1, -1, -1, -1, -1, 5, -1, -1, -1, 4, -1, 5, -1, -1, -1, -1, 2, -1, -1, 0, -1, -1, -1,
5, 5, -1, -1, -1, -1, -1, -1, -1, -1, 4, -1, -1, -1, -1, -1, -1, 4, 4, -1, -1, -1, -1, -1,
5, -1, -1, -1, -1, -1, -1, 5, -1, 0, -1, -1, 1, -1, -1, -1, 4, -1, -1, -1, -1, -1, 0, -1,
-1, -1, -1, 3, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1, -1, -1, 0, -1, -1, -1, -1, -1, 0, -1,
-1, -1, -1, -1, 2, -1, 5, -1, -1, -1, 5, -1, 1, -1, -1, -1, 4, 0, -1, -1, 5, -1, -1, -1,
-1, -1, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4, -1, -1, -1, 5, -1, -1, -1, 4, -1, 5, -1,
-1, -1, -1, -1, -1, 0, 0, 6, -1, -1, 5, -1, 1, 1, 0, -1, -1, 5, -1, -1, 4, -1, -1, -1,
-1, -1, -1, 4, 4, 4, -1, -1, -1, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 0, -1, -1, -1, -1, -1, -1, 5, -1, -1, 4, -1, 4, -1, 0, -1, -1, -1,
-1, -1, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5, -1, -1, -1, 5, 3, 1, -1, -1, -1, 4, -1,
-1, -1, 5, -1, 1, 6, -1, -1, 4, 0, -1, -1, 5, -1, -1, 6, -1, -1, 4, -1, -1, -1, 5, -1,
-1, -1, 4, -1, -1, -1, -1, -1, -1, 2, -1, 0, 0, -1, -1, -1, 5, 5, -1, -1, 0, -1, 1, 5,
-1, -1, -1, -1, 6, -1, -1, -1, -1, 4, 4, 4, -1, -1, 3, -1, 5, -1, -1, 1, -1, -1, 5, 5,
-1, 0, 4, 6, -1, -1, 0, -1, 4, 6, 3, -1, -1, 3, -1, 4, -1, 2, -1, 3, -1, 5, -1, 6,
4, -1, -1, -1, -1, -1, -1, 2, 0, -1, -1, -1, 1, -1, 0, -1, -1, -1, -1, -1, 2, -1, 5, -1,
-1, -1, 5, -1, 1, -1, -1, -1, 4, -1, -1, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 5, -1, -1, -1, -1, -1, 5, -1, -1, 6, -1, -1, -1, -1, -1, -1,
-1, -1, 5, 5, -1, 1, -1, -1, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4, -1, 4, -1, -1,
-1, -1, 5, -1, -1, -1, -1, 1, 5, 5, -1, -1, 4, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 5, -1, -1, 4, -1, -1, -1, -1, -1, -1, -1, -1, 1, -1, 1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 5, -1, -1, -1, 5, -1, -1, -1, -1, -1, 4, -1, -1, -1, 5, -1,
-1, -1, -1, -1, 4, 0, -1, -1, 5, -1, -1, -1, -1, -1, 4, -1, -1, -1, 5, -1, -1, -1, 4, -1,
5, -1, -1, -1, -1, -1, -1, 0, 0, -1, -1, -1, 5, 5, -1, -1, -1, -1, -1, 5, -1, -1, -1, -1,
-1, 0, -1, -1, 2, -1, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6, -1, -1, 0, -1, -1,
-1, -1, 0, -1, -1, -1, -1, -1, -1, -1, 0, 4, -1, -1, -1, -1, -1, -1, -1, -1, 4, -1, -1, -1,
0, -1, -1, -1, -1, -1, -1, 1, -1, -1, 0, 6, -1, -1, -1, -1, -1, -1, 5, -1, -1, -1, 5, 3,
-1, -1, -1, -1, 4, 0, -1, -1, -1, -1, -1, -1, -1, -1, 4, -1, -1, -1, 5, -1, 1, -1, -1, -1,
4, -1, -1, -1, 5, -1, -1, -1, 4, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5, 5,
-1, -1, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6, -1, 4, 4, -1, -1, -1, -1, -1, 5, -1,
-1, -1, -1, -1, 5, 5, -1, -1, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3, -1, 4, -1, -1,
-1, -1, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1, -1, -1,
-1, -1, -1, -1, 5, -1, -1, -1, -1, 3, 1, -1, -1, -1, -1, 0, -1, 6, 5, -1, 1, -1, 2, -1,
-1, 0, -1, -1, -1, 3, -1, 6, -1, -1, -1, 0, -1, -1, 5, -1, -1, -1, -1, -1, 5, -1, -1, -1,
-1, 2, -1, -1, 0, -1, -1, 3, -1, -1, 1, -1, -1, -1, -1, 5, -1, 1, 4, -1, -1, 0, -1, -1,
2, 4, -1, -1, -1, -1, -1, -1, 5, -1, -1, -1, -1, -1, -1, 5, -1, 0, 4, 6, -1, 3, -1, -1,
-1, -1, 3, 6, -1, 3, -1, 4, -1, -1, -1, 3, -1, 5, -1, -1, -1, -1, 4, -1, -1, -1, -1, -1,
0, -1, -1, -1, -1, -1, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, 4,
-1, -1, -1, 5, -1, -1, -1, -1, -1, 4, -1, -1, -1, 5, -1, 1, -1, -1, -1, 4, -1, -1, -1, 5,
-1, -1, -1, 4, -1, 5, -1, -1, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5, -1, 1,
4, -1, -1, -1, -1, -1, -1, 4, 4, 4, -1, -1, -1, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, 0,
4, -1, 1, -1, -1, 4, -1, -1, -1, -1, -1, -1, 4, -1, -1, -1, -1, -1, 5, -1, -1, 4, -1, 4,
-1, -1, -1, -1, -1, 0, -1, -1, -1, 1, -1, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5, -1, 1,
-1, -1, -1, 4, 0, -1, -1, 5, 3, -1, -1, -1, -1, 4, 0, -1, -1, -1, 3, -1, -1, -1, -1, 4,
-1, -1, -1, 5, -1, -1, -1, 4, -1, 5, -1, -1, -1, -1, -1, 3, -1, -1, -1, -1, 3, -1, -1, -1,
-1, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4, 4, -1, -1, 3, -1, 5, -1, -1, -1, -1,
-1, 5, 5, 3, -1, -1, -1, 1, 3, -1, -1, -1, -1, -1, -1, -1, 3, -1, 4, -1, -1, -1, 3, -1,
5, -1, -1, -1, -1, 4, 3, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 2, -1, -1, -1,
-1, 5, -1, -1, -1, 5, -1, 1, 6, 2, -1, 4, -1, -1, -1, 5, -1, -1, -1, -1, -1, 4, 0, -1,
-1, -1, -1, -1, -1, 2, -1, 4, -1, -1, -1, -1, -1, -1, -1, 4, -1, -1, -1, -1, -1, -1, -1, 3,
-1, 0, -1, -1, -1, -1, -1, -1, 1, 0, -1, -1, -1, -1, -1, 4, -1, -1, -1, -1, -1, -1, 4, 4,
4, -1, -1, 3, -1, -1, -1, -1, -1, -1, -1, -1, 5, 3, -1, 4, -1, -1, -1, -1, 2, 4, -1, 3,
-1, -1, 3, 0, 4, -1, 2, -1, -1, 2, -1, -1, -1, 4, -1, 4, -1, -1, -1, -1, 2, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 5, -1, -1, -1, -1, -1, -1, 0, -1, -1, 5, -1, -1, -1, -1, -1, -1, 0, -1, -1, 5, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 3, -1, -1, -1, -1, -1, -1, -1, 5, 5, -1, -1, 0, -1, -1, 5, -1,
-1, 4, -1, 6, -1, -1, -1, -1, 4, -1, 4, -1, -1, 3, -1, 5, -1, -1, -1, -1, -1, 5, 5, -1,
-1, 4, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1, -1, -1, -1, 3, 2, 5, -1, -1, -1,
-1, -1, 3, -1, -1, -1, -1, 0, -1, -1, -1, -1, -1, 0, -1, -1, -1, 3, -1, -1, -1, -1, -1, 5,
-1, 1, -1, -1, -1, 4, 0, -1, -1, 5, -1, -1, -1, -1, -1, 4, -1, -1, -1, 5, -1, 1, -1, 2,
-1, 4, -1, -1, 6, 5, 6, -1, -1, 4, -1, 5, -1, -1, -1, -1, 2, -1, 0, -1, -1, -1, -1, 5,
5, 1, 1, -1, -1, -1, 5, -1, -1, 4, -1, -1, 0, -1, 6, -1, 4, 4, 4, 2, -1, -1, -1, 5,
-1, -1, 1, -1, -1, 5, 5, -1, 0, 4, 6, -1, -1, 0, 2, 4, 6, -1, -1, 6, -1, 0, 4, -1,
-1, -1, -1, 2, 5, -1, 6, 4, -1, 4, -1, -1, -1, 2, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1,
2, -1, -1, 2, -1, 5, -1, -1, -1, 5, 3, -1, -1, -1, -1, -1, -1, -1, -1, 5, -1, -1, -1, 2,
-1, -1, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4, -1, 5, -1, -1, -1, -1,
2, -1, 0, -1, -1, -1, 3, -1, -1, -1, 1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1,
4, -1, 4, -1, -1, 3, -1, -1, -1, -1, -1, -1, 5, 3, 0, -1, -1, 1, -1, -1, -1, -1, -1, -1,
-1, -1, 3, -1, -1, -1, -1, -1, -1, -1, 5, -1, 6, -1, -1, 2, -1, 0, -1, 2, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 2, -1, 5, -1, -1, -1, 5, -1, 1, -1, -1, -1, -1, 0, -1, -1, 5, -1, -1,
-1, -1, -1, 4, -1, -1, 6, 5, -1, -1, -1, 2, -1, 4, 0, -1, 6, 5, 6, -1, -1, 4, -1, 5,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3, 5, 5, -1, 1, 0, -1, -1, -1, -1, -1, 4, -1, -1,
-1, -1, -1, -1, -1, 4, -1, -1, 6, -1, -1, 5, 6, -1, -1, -1, -1, 5, 5, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, 3, 0, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4, -1, -1,
-1, -1, -1, -1, 1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, 2, 6, 5, -1, -1, -1}}};
const std::vector<ClusterCondensingInputs<float, int>> cluster_condensing_inputs = {
{9,
3,
{0, 2, 4, 6, 7, 1, 8, 8},
{1, 3, 5, 5, 8, 5, 3, 4},
{1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0},
{1}},
// Iris
{150,
3,
{39, 17, 34, 1, 27, 7, 49, 30, 4, 28, 34, 40, 12, 47, 29, 45, 2, 26, 3,
21, 48, 37, 38, 11, 96, 25, 42, 19, 94, 6, 92, 92, 58, 89, 35, 46, 10, 82,
86, 31, 36, 8, 149, 101, 127, 95, 120, 20, 123, 145, 55, 78, 112, 67, 88, 61, 147,
54, 97, 111, 124, 115, 116, 128, 104, 143, 54, 74, 65, 23, 5, 91, 51, 16, 71, 83,
139, 111, 126, 43, 80, 77, 77, 76, 103, 66, 90, 72, 138, 81, 63, 53, 144, 24, 32,
73, 133, 137, 56, 70, 132, 79, 110, 44, 146, 33, 121, 136, 102, 13, 84, 85, 52, 18,
141, 50, 59, 22, 64, 130, 113, 107, 14, 62, 105, 100, 87, 148, 108, 114, 15, 125, 119,
134, 135, 122, 68, 129, 60, 93, 57, 41, 109, 98, 106, 118, 117, 131, 23},
{0, 0, 9, 34, 0, 39, 7, 34, 0, 27, 49, 17, 1, 29, 30, 1, 47, 7, 47,
17, 27, 4, 3, 29, 99, 34, 38, 21, 99, 47, 99, 69, 75, 69, 49, 19, 48, 92,
58, 28, 10, 38, 101, 142, 149, 96, 140, 31, 127, 112, 96, 55, 140, 92, 96, 96, 145,
58, 78, 147, 120, 145, 147, 111, 128, 120, 74, 97, 75, 26, 10, 78, 75, 10, 97, 101,
112, 123, 123, 26, 69, 147, 86, 58, 116, 55, 94, 123, 127, 80, 91, 89, 140, 11, 46,
63, 83, 116, 51, 138, 128, 81, 147, 46, 123, 32, 101, 115, 120, 38, 66, 56, 86, 5,
145, 52, 89, 6, 82, 102, 101, 130, 33, 92, 107, 136, 72, 136, 128, 121, 33, 102, 72,
103, 130, 105, 87, 125, 93, 81, 93, 8, 143, 57, 84, 122, 105, 117, 98},
{0.17320508, 0.17320508, 0.2, 0.2236068, 0.2236068, 0.2236068, 0.2236068, 0.2236068,
0.2236068, 0.2236068, 0.24494897, 0.24494897, 0.24494897, 0.24494897, 0.24494897, 0.26457513,
0.26457513, 0.26457513, 0.26457513, 0.28284271, 0.28284271, 0.3, 0.3, 0.3,
0.3, 0.3, 0.31622777, 0.31622777, 0.31622777, 0.31622777, 0.31622777, 0.31622777,
0.31622777, 0.33166248, 0.33166248, 0.33166248, 0.33166248, 0.34641016, 0.34641016, 0.34641016,
0.36055513, 0.36055513, 0.36055513, 0.36055513, 0.36055513, 0.36055513, 0.36055513, 0.36055513,
0.37416574, 0.37416574, 0.37416574, 0.37416574, 0.37416574, 0.37416574, 0.37416574, 0.37416574,
0.38729833, 0.38729833, 0.38729833, 0.38729833, 0.38729833, 0.38729833, 0.38729833, 0.38729833,
0.38729833, 0.38729833, 0.38729833, 0.38729833, 0.38729833, 0.38729833, 0.38729833, 0.38729833,
0.38729833, 0.4, 0.41231056, 0.41231056, 0.41231056, 0.42426407, 0.42426407, 0.42426407,
0.42426407, 0.42426407, 0.42426407, 0.42426407, 0.42426407, 0.42426407, 0.42426407, 0.43588989,
0.43588989, 0.43588989, 0.43588989, 0.43588989, 0.43588989, 0.4472136, 0.45825757, 0.45825757,
0.45825757, 0.45825757, 0.45825757, 0.46904158, 0.46904158, 0.46904158, 0.46904158, 0.47958315,
0.47958315, 0.47958315, 0.48989795, 0.5, 0.5, 0.5, 0.50990195, 0.50990195,
0.50990195, 0.51961524, 0.51961524, 0.51961524, 0.53851648, 0.53851648, 0.53851648, 0.53851648,
0.54772256, 0.55677644, 0.58309519, 0.58309519, 0.60827625, 0.60827625, 0.6164414, 0.6244998,
0.6244998, 0.64031242, 0.64031242, 0.64807407, 0.65574385, 0.7, 0.7, 0.7,
0.70710678, 0.70710678, 0.72111026, 0.72111026, 0.78740079, 0.79372539, 0.80622577, 0.81853528,
0.88317609, 0.96436508, 1.0198039, 1.02469508, 1.64012195},
{1}},
// Digits
{1797,
150,
{0, 305, 434, 434, 434, 396, 396, 396, 79, 464, 79, 396, 79, 396, 512, 79,
434, 229, 396, 441, 434, 229, 79, 512, 305, 229, 305, 229, 166, 252, 79, 0,
0, 79, 0, 0, 434, 229, 0, 0, 305, 0, 79, 79, 166, 79, 0, 0,
0, 229, 79, 252, 79, 79, 0, 252, 79, 0, 252, 79, 202, 126, 126, 305,
0, 166, 130, 79, 0, 166, 0, 79, 126, 0, 130, 0, 0, 126, 0, 160,
79, 0, 276, 48, 0, 160, 126, 0, 229, 0, 79, 126, 126, 20, 305, 79,
79, 48, 79, 79, 79, 36, 36, 276, 36, 79, 0, 36, 79, 79, 0, 79,
49, 130, 160, 0, 20, 79, 20, 79, 0, 48, 20, 79, 276, 79, 0, 48,
0, 130, 20, 0, 36, 0, 20, 0, 0, 49, 48, 48, 0, 0, 48, 0,
49, 0, 166, 20, 36, 10, 72, 20, 79, 0, 20, 36, 0, 0, 48, 0,
0, 36, 0, 594, 1507, 1282, 455, 1446, 455, 1444, 1282, 1444, 944, 455, 425, 425,
1282, 425, 425, 425, 425, 455, 425, 425, 635, 455, 425, 39, 425, 39, 1424, 708,
316, 339, 259, 867, 339, 259, 259, 345, 316, 345, 259, 259, 259, 279, 13, 13,
279, 316, 13, 259, 13, 259, 13, 13, 279, 13, 279, 13, 13, 259, 13, 318,
13, 193, 259, 345, 13, 259, 259, 13, 13, 139, 13, 285, 279, 269, 425, 259,
259, 13, 259, 635, 39, 259, 279, 259, 359, 3, 269, 13, 13, 425, 3, 13,
425, 285, 13, 39, 139, 189, 13, 39, 285, 13, 13, 279, 139, 13, 425, 339,
13, 139, 3, 13, 39, 279, 39, 1454, 1461, 562, 938, 938, 1535, 562, 562, 562,
562, 562, 549, 562, 1075, 1568, 117, 117, 117, 117, 117, 330, 281, 281, 281, 330,
117, 13, 117, 39, 3, 13, 13, 318, 301, 117, 281, 175, 3, 425, 60, 45,
139, 39, 281, 405, 879, 945, 148, 1327, 242, 257, 326, 1050, 1237, 1050, 1050, 1050,
1050, 326, 326, 326, 875, 326, 326, 326, 777, 326, 326, 326, 777, 326, 326, 326,
349, 777, 326, 326, 349, 326, 326, 777, 326, 326, 326, 349, 349, 257, 349, 257,
257, 326, 257, 99, 99, 326, 257, 326, 257, 257, 257, 349, 99, 99, 242, 257,
647, 349, 99, 346, 326, 242, 1, 298, 99, 13, 285, 405, 279, 139, 285, 242,
117, 148, 165, 148, 358, 654, 358, 3, 3, 298, 281, 117, 281, 549, 117, 39,
139, 148, 1688, 187, 132, 181, 372, 1017, 463, 501, 501, 501, 463, 463, 463, 463,
181, 463, 372, 310, 181, 310, 463, 372, 181, 310, 181, 132, 181, 372, 463, 132,
181, 463, 258, 388, 388, 132, 132, 132, 463, 388, 132, 408, 388, 408, 132, 84,
84, 181, 437, 132, 388, 181, 408, 181, 180, 22, 22, 132, 388, 84, 132, 84,
117, 259, 285, 117, 180, 405, 180, 139, 39, 293, 3, 148, 1295, 13, 117, 162,
1295, 148, 654, 135, 39, 148, 293, 101, 358, 139, 657, 1647, 6, 58, 65, 195,
65, 360, 360, 262, 262, 262, 262, 262, 262, 262, 262, 262, 262, 262, 262, 65,
65, 146, 65, 262, 262, 232, 65, 65, 146, 234, 234, 234, 232, 234, 65, 65,
234, 65, 262, 65, 234, 322, 65, 196, 262, 196, 146, 234, 234, 136, 65, 65,
234, 232, 146, 195, 196, 65, 65, 65, 136, 146, 196, 65, 196, 232, 234, 65,
234, 146, 197, 58, 136, 58, 65, 136, 262, 58, 197, 58, 65, 262, 65, 136,
164, 58, 65, 146, 136, 88, 65, 65, 65, 65, 58, 146, 65, 58, 164, 146,
164, 146, 234, 58, 6, 34, 58, 146, 58, 58, 6, 6, 65, 164, 6, 146,
136, 34, 146, 6, 34, 34, 6, 146, 146, 6, 197, 146, 146, 65, 34, 234,
34, 34, 197, 164, 6, 6, 6, 262, 164, 34, 6, 65, 13, 65, 180, 705,
3, 20, 55, 242, 242, 180, 39, 122, 654, 13, 122, 59, 117, 405, 181, 189,
705, 6, 139, 132, 330, 288, 388, 114, 40, 59, 39, 13, 117, 39, 15, 22,
293, 39, 99, 6, 128, 248, 279, 40, 84, 59, 22, 67, 1, 146, 102, 339,
1, 257, 99, 84, 22, 84, 388, 6, 40, 122, 40, 45, 372, 48, 264, 148,
148, 13, 148, 264, 93, 1168, 21, 21, 21, 21, 21, 21, 11, 11, 102, 437,
40, 39, 310, 281, 32, 169, 169, 437, 285, 358, 13, 541, 117, 148, 11, 128,
40, 281, 39, 358, 135, 388, 549, 440, 388, 40, 298, 3, 15, 30, 39, 281,
11, 3, 189, 373, 102, 259, 264, 13, 21, 39, 195, 40, 15, 3, 117, 1,
11, 148, 138, 32, 114, 114, 15, 13, 13, 34, 180, 148, 34, 148, 269, 1647,
790, 41, 124, 353, 1161, 1387, 353, 353, 124, 353, 124, 367, 817, 1483, 124, 320,
124, 1483, 124, 1439, 41, 817, 41, 41, 817, 124, 41, 124, 41, 124, 41, 297,
41, 124, 580, 124, 353, 817, 580, 297, 41, 41, 41, 124, 270, 297, 450, 270,
353, 353, 450, 41, 41, 64, 41, 41, 124, 540, 64, 450, 41, 97, 540, 450,
41, 97, 297, 14, 540, 260, 297, 270, 64, 124, 377, 260, 450, 111, 97, 111,
41, 410, 14, 14, 111, 111, 520, 817, 22, 657, 138, 22, 39, 102, 635, 264,
260, 40, 132, 40, 128, 14, 6, 64, 114, 148, 21, 15, 146, 123, 285, 32,
441, 164, 169, 109, 1, 264, 301, 148, 6, 408, 3, 541, 102, 124, 25, 297,
25, 22, 104, 410, 34, 148, 21, 22, 1, 93, 3, 293, 330, 410, 279, 455,
181, 76, 405, 15, 142, 3, 104, 15, 1, 40, 39, 123, 281, 76, 39, 1,
269, 410, 15, 64, 199, 39, 358, 264, 3, 32, 15, 11, 59, 199, 264, 280,
40, 13, 32, 180, 32, 152, 13, 102, 242, 122, 146, 149, 39, 15, 11, 13,
14, 22, 92, 84, 25, 242, 293, 440, 14, 114, 40, 64, 11, 129, 261, 45,
21, 1189, 388, 199, 22, 11, 41, 20, 13, 148, 34, 251, 64, 114, 388, 139,
297, 260, 199, 76, 92, 15, 132, 139, 102, 64, 40, 152, 41, 102, 410, 39,
180, 32, 287, 1, 11, 257, 220, 301, 0, 11, 40, 324, 84, 68, 436, 146,
13, 111, 21, 97, 189, 297, 64, 388, 26, 132, 32, 32, 281, 21, 76, 109,
76, 293, 128, 138, 45, 15, 76, 102, 128, 305, 73, 32, 129, 123, 32, 3,
111, 138, 99, 330, 39, 3, 102, 152, 358, 73, 287, 400, 76, 388, 32, 6,
13, 11, 84, 152, 25, 117, 272, 40, 11, 180, 40, 296, 25, 14, 84, 220,
289, 6, 163, 3, 398, 373, 324, 117, 353, 258, 13, 40, 269, 264, 280, 128,
40, 59, 281, 281, 40, 264, 105, 84, 11, 293, 22, 105, 146, 117, 15, 1,
11, 76, 8, 40, 883, 1649, 81, 273, 597, 624, 597, 983, 94, 94, 94, 94,
273, 273, 94, 273, 94, 94, 94, 81, 81, 94, 81, 81, 81, 174, 273, 94,
559, 94, 94, 81, 273, 94, 81, 94, 94, 94, 222, 273, 81, 174, 174, 81,
81, 81, 94, 174, 273, 81, 94, 94, 94, 81, 81, 174, 94, 81, 81, 174,
182, 94, 174, 174, 81, 174, 44, 94, 81, 182, 81, 94, 174, 44, 81, 108,
44, 94, 44, 94, 81, 173, 182, 81, 81, 94, 44, 273, 337, 94, 174, 81,
273, 94, 94, 17, 94, 112, 81, 174, 94, 94, 44, 157, 44, 137, 52, 44,
94, 44, 157, 157, 44, 44, 337, 52, 174, 17, 300, 17, 52, 174, 22, 56,
14, 129, 189, 3, 3, 199, 76, 52, 443, 99, 11, 152, 148, 295, 583, 76,
254, 264, 84, 257, 3, 52, 324, 92, 15, 6, 154, 35, 64, 1, 25, 4,
232, 117, 40, 180, 99, 11, 40, 76, 64, 13, 3, 42, 264, 71, 43, 264,
84, 289, 40, 181, 137, 461, 22, 93, 32, 11, 102, 324, 15, 44, 15, 8,
174, 181, 44, 32, 10, 142, 47, 12, 25, 7, 274, 148, 25, 73, 22, 25,
148, 25, 43, 251, 29, 15, 41, 99, 15, 40, 417, 8, 94, 68, 114, 4,
309, 85, 93, 23, 13, 154, 337, 36, 4, 384, 148, 17, 81, 18, 91, 15,
257, 152, 84, 15, 236, 95, 545, 84, 570, 521, 22, 232, 258, 14, 40, 264,
180, 25, 293, 152, 11, 104, 84, 309, 8, 59, 40, 199, 14, 8, 18, 14,
18, 251, 102, 12, 45, 39, 114, 94, 94, 64, 29, 483, 128, 18, 21, 40,
6, 43, 11, 76, 70, 251, 281, 111, 5, 40, 4, 4, 384, 461, 21, 28,
459, 18, 7, 6, 141, 13, 23, 146, 52, 8, 104, 220, 48, 6, 147, 68,
52, 309, 148, 151, 21, 8, 40, 15, 11, 99, 17, 114, 12, 264, 8, 359,
101, 264, 147, 251, 293, 4, 180, 123, 293, 76, 154, 135, 353, 325, 105, 102,
114, 119, 526, 264, 325, 138, 59, 325, 359, 113, 59, 533, 116, 13, 69, 123,
325, 248, 35, 70, 59, 84, 309, 152, 45, 152, 86, 289, 40, 135, 15, 76,
3, 84, 7, 32, 157, 190, 69, 14, 325, 289, 84, 15, 117, 261, 18, 128,
116, 40, 254, 74, 173, 3, 74, 21, 8, 18, 288, 18, 8, 174, 33, 0,
1, 15, 11, 366, 23, 378, 358, 5, 28, 69, 5, 40, 11, 417, 18, 121,
4, 40, 88, 69, 52, 388, 518, 154, 289, 40, 157, 113, 2, 260, 8, 157,
116, 2, 12, 184, 482, 147, 113, 518, 1, 56, 17, 5, 384, 398, 39, 12,
518, 68, 88, 325, 461, 324, 76, 29, 11, 461, 11, 43, 144, 519, 12, 392,
76, 19, 74, 8, 275, 415, 121, 18, 375, 56, 10, 11, 6, 84, 25, 15,
174, 205, 87, 102, 37, 353, 8, 23, 375, 325, 116, 33, 125, 199, 18, 62,
527, 177, 18, 264, 325, 125, 380, 349, 148, 32, 69, 131, 82, 69, 2, 7,
3, 151, 527, 8, 325, 83, 74, 398, 184, 47, 24, 49, 43, 180, 157, 131,
184, 19, 8, 74, 375, 18, 190, 131, 5, 76, 4, 2, 76, 2, 25, 2,
2, 38, 23, 2, 12, 151, 325, 2, 86, 349, 156, 236, 477, 131, 39, 135,
131, 400, 11, 518, 40, 18, 2, 2, 69, 157, 156, 27, 325, 18, 121, 18,
117, 317, 101, 38, 353, 56, 69, 157, 131, 131, 265, 27, 4, 131, 516, 62,
8, 167, 135, 2, 121, 89, 31, 87, 210, 51, 2, 2, 442, 131, 251, 375,
27, 11, 24, 12, 184, 3, 4, 69, 37, 303, 317, 375, 16, 155, 27, 2,
38, 2, 157, 375},
{305, 434, 1039, 1463, 396, 464, 1541, 79, 441, 512, 1677, 1336, 682, 642, 812, 229,
925, 1697, 1545, 877, 1494, 1365, 1464, 1663, 806, 252, 1029, 166, 935, 1579, 406, 725,
1002, 266, 957, 276, 1099, 694, 130, 516, 1415, 311, 1451, 1667, 1563, 1153, 1157, 335,
328, 1620, 202, 1335, 382, 435, 458, 747, 1425, 334, 1388, 1516, 126, 1703, 160, 552,
1128, 546, 1307, 1445, 1342, 772, 1359, 178, 848, 646, 1065, 1167, 855, 1642, 422, 718,
1082, 941, 48, 915, 676, 724, 1555, 1739, 786, 256, 1470, 140, 20, 36, 974, 1059,
49, 304, 1746, 1487, 386, 666, 416, 526, 1793, 1493, 536, 831, 565, 1317, 1236, 1715,
1105, 564, 1745, 695, 10, 465, 185, 487, 571, 1435, 55, 981, 1297, 1687, 1716, 1106,
286, 594, 72, 30, 208, 1193, 179, 1206, 1212, 150, 357, 1366, 1177, 796, 1598, 776,
1049, 656, 595, 854, 1413, 902, 101, 1323, 78, 292, 588, 1258, 825, 980, 1768, 1187,
1229, 1722, 1235, 1507, 1282, 455, 1446, 1444, 1740, 1696, 1698, 944, 635, 425, 1686, 1676,
1452, 1736, 1188, 1792, 881, 1360, 785, 1318, 139, 936, 39, 285, 514, 1424, 708, 316,
339, 259, 867, 345, 859, 1498, 799, 1246, 13, 1478, 1518, 319, 279, 359, 1504, 175,
1370, 706, 219, 1428, 193, 1160, 385, 1639, 318, 63, 789, 709, 1074, 1438, 1644, 1090,
1346, 865, 961, 1460, 1170, 269, 431, 1376, 301, 1324, 62, 1027, 1110, 1347, 1534, 347,
928, 1300, 1474, 815, 1706, 469, 1032, 3, 1513, 1475, 1385, 59, 1624, 924, 1477, 1087,
1759, 1379, 189, 589, 405, 705, 1240, 1392, 993, 89, 1566, 1042, 1616, 60, 1196, 1390,
143, 159, 874, 879, 1484, 1116, 1454, 1461, 562, 938, 1535, 1448, 1075, 587, 1420, 1003,
1517, 549, 973, 330, 1568, 117, 885, 162, 201, 165, 1614, 281, 1447, 625, 1430, 358,
692, 1770, 822, 395, 1418, 213, 1052, 1378, 1332, 503, 1450, 749, 45, 849, 1255, 1089,
1226, 904, 288, 478, 945, 148, 1327, 242, 257, 326, 1050, 1237, 1634, 1097, 1386, 1334,
1621, 1076, 875, 1613, 777, 1112, 1134, 1357, 1329, 1120, 1247, 869, 1213, 349, 1250, 1394,
1380, 1377, 1107, 1590, 1640, 1227, 1199, 355, 1546, 797, 866, 1585, 1648, 823, 1631, 346,
1714, 1760, 99, 657, 1126, 1040, 647, 739, 1757, 1071, 861, 1626, 298, 667, 1325, 1678,
1766, 1372, 1556, 702, 1, 1409, 787, 1688, 716, 1290, 293, 415, 999, 169, 1194, 1774,
1144, 654, 809, 248, 976, 1026, 1769, 449, 962, 1737, 365, 1550, 910, 1018, 135, 149,
167, 1295, 187, 132, 181, 372, 1017, 463, 501, 1718, 1417, 1140, 1782, 1490, 833, 1466,
310, 986, 1437, 1669, 826, 1031, 470, 1528, 1159, 927, 331, 268, 1600, 1492, 1780, 1211,
258, 388, 631, 959, 1111, 180, 499, 788, 1531, 408, 907, 1472, 437, 1469, 84, 214,
1625, 22, 1362, 778, 1084, 1744, 1465, 340, 1214, 1169, 1594, 307, 1371, 313, 830, 1721,
896, 448, 1520, 1101, 1208, 942, 762, 199, 1306, 1276, 1310, 183, 1315, 91, 679, 102,
1286, 168, 264, 763, 807, 943, 1423, 824, 1292, 1356, 1647, 6, 58, 65, 195, 146,
360, 1005, 262, 1773, 1519, 272, 741, 711, 1482, 1480, 939, 1421, 1133, 672, 969, 1261,
323, 232, 322, 234, 882, 967, 351, 290, 1354, 1503, 1510, 344, 1683, 984, 1222, 1223,
1007, 841, 1055, 1521, 1441, 652, 196, 197, 931, 136, 490, 1431, 321, 1045, 1733, 871,
968, 1449, 1610, 834, 921, 620, 468, 1191, 542, 1239, 282, 712, 164, 1497, 1122, 522,
728, 1345, 1725, 611, 1163, 1224, 1093, 1481, 911, 188, 858, 88, 1063, 532, 989, 1701,
662, 156, 1263, 453, 1035, 1577, 34, 1252, 1183, 1094, 66, 451, 1245, 582, 680, 1352,
713, 1215, 1473, 1693, 1749, 338, 1173, 996, 212, 550, 82, 1673, 1092, 704, 1762, 802,
960, 1623, 1321, 1755, 1369, 1601, 223, 474, 1636, 1608, 392, 412, 1353, 752, 880, 314,
1382, 1303, 452, 810, 750, 1609, 26, 583, 67, 1629, 1085, 1115, 1756, 362, 892, 1350,
315, 1615, 209, 846, 122, 759, 1020, 40, 1453, 98, 114, 835, 376, 505, 668, 729,
1086, 104, 287, 333, 551, 1054, 1016, 1363, 138, 821, 1543, 1630, 15, 128, 176, 369,
541, 920, 1564, 1771, 845, 1030, 1125, 1383, 306, 83, 207, 1734, 93, 864, 109, 614,
726, 753, 1083, 1427, 593, 798, 979, 1109, 1069, 129, 255, 908, 953, 1287, 515, 768,
923, 1217, 426, 955, 1168, 21, 56, 210, 186, 476, 11, 1343, 428, 456, 1312, 1402,
1596, 868, 1530, 32, 460, 971, 975, 977, 1262, 618, 389, 636, 230, 615, 493, 644,
836, 1532, 1434, 1672, 163, 440, 940, 1051, 1102, 1340, 1723, 918, 1034, 1205, 1249, 373,
407, 1058, 1506, 937, 1333, 446, 978, 1220, 1436, 1704, 622, 1401, 71, 475, 801, 471,
47, 424, 913, 1319, 1410, 509, 850, 1130, 1260, 1561, 658, 674, 444, 544, 649, 790,
41, 124, 353, 1161, 1387, 1244, 1502, 1584, 367, 817, 1254, 1483, 1539, 1515, 320, 1661,
1791, 1439, 1127, 1536, 297, 1525, 1257, 1651, 840, 580, 1549, 1559, 1456, 1691, 1148, 886,
380, 260, 1512, 1181, 1788, 450, 1429, 1681, 270, 97, 1053, 1268, 377, 1267, 844, 1638,
1764, 1171, 540, 1225, 64, 1278, 887, 1641, 1137, 613, 1198, 733, 1011, 497, 1479, 410,
14, 454, 627, 909, 520, 1023, 1355, 1526, 863, 111, 800, 356, 557, 280, 486, 1114,
343, 483, 427, 1731, 1124, 1091, 616, 900, 570, 736, 818, 1547, 1006, 1320, 934, 1028,
1567, 123, 142, 404, 445, 919, 1131, 1777, 76, 370, 433, 1192, 1393, 1790, 641, 488,
617, 842, 930, 237, 466, 529, 619, 686, 1645, 1783, 484, 531, 25, 1151, 246, 897,
1266, 152, 581, 660, 1293, 332, 387, 1014, 1298, 1599, 744, 816, 1713, 400, 990, 1316,
278, 383, 417, 1396, 1679, 1758, 598, 893, 1158, 1284, 1488, 1709, 261, 699, 1010, 397,
669, 714, 1061, 1351, 220, 1772, 1524, 309, 399, 1189, 35, 90, 153, 254, 1433, 324,
352, 1548, 302, 761, 1190, 1207, 1658, 1682, 277, 336, 1313, 1786, 92, 1322, 42, 1219,
507, 1041, 233, 243, 245, 883, 1406, 1403, 1767, 296, 1185, 366, 496, 1279, 1787, 190,
485, 1358, 443, 251, 461, 479, 100, 1591, 23, 773, 16, 459, 1328, 170, 436, 1132,
1411, 1607, 423, 775, 895, 1277, 371, 914, 1231, 1408, 158, 1232, 68, 495, 650, 73,
697, 1700, 295, 85, 200, 693, 1230, 1349, 1592, 227, 249, 640, 917, 1138, 1724, 481,
839, 1281, 1505, 154, 1180, 1374, 1398, 398, 604, 1272, 1702, 1738, 289, 1367, 684, 878,
1720, 1326, 585, 1794, 735, 781, 742, 748, 901, 1025, 203, 411, 1178, 244, 548, 477,
946, 1747, 363, 521, 553, 1248, 419, 1751, 1784, 105, 519, 743, 760, 956, 1699, 1732,
1750, 70, 184, 1081, 1129, 1182, 1361, 1606, 80, 648, 1414, 1455, 755, 1033, 1270, 1285,
1136, 95, 1560, 950, 591, 1021, 603, 791, 873, 629, 1558, 1763, 659, 1443, 1652, 805,
1117, 1146, 1486, 1489, 8, 814, 1176, 1499, 141, 535, 573, 965, 1375, 1578, 811, 1752,
221, 274, 513, 1619, 1649, 81, 273, 597, 624, 983, 764, 94, 610, 1761, 820, 1501,
559, 299, 793, 1201, 1719, 1458, 1046, 112, 1674, 174, 1209, 634, 1399, 1476, 1775, 1381,
222, 888, 1509, 698, 182, 368, 653, 783, 1013, 1036, 1174, 1622, 533, 1422, 308, 602,
1368, 1694, 837, 1304, 1164, 560, 337, 1019, 1339, 44, 1711, 300, 342, 1269, 236, 577,
1785, 137, 932, 707, 1251, 1527, 118, 803, 894, 870, 108, 350, 1442, 568, 727, 374,
1238, 1330, 1586, 1496, 173, 543, 157, 1405, 1684, 857, 1314, 1331, 995, 147, 1009, 17,
1243, 948, 884, 1459, 1056, 1653, 52, 1533, 1073, 1395, 1218, 819, 1294, 240, 1779, 1348,
1139, 862, 1373, 628, 1108, 1135, 963, 740, 1432, 61, 1523, 1121, 43, 949, 1689, 683,
1012, 731, 1216, 354, 1670, 1795, 1175, 663, 1309, 225, 172, 205, 933, 545, 1203, 1305,
1544, 722, 1654, 745, 737, 1184, 1419, 1646, 1659, 106, 228, 401, 4, 303, 1228, 1778,
782, 271, 508, 723, 856, 177, 534, 804, 1221, 217, 378, 719, 997, 651, 384, 1537,
730, 1162, 1253, 1273, 216, 537, 12, 144, 1302, 107, 1776, 584, 145, 1200, 1650, 1705,
1072, 1179, 7, 391, 701, 1241, 1416, 567, 1147, 263, 547, 1637, 1656, 29, 566, 661,
379, 671, 754, 1166, 119, 1044, 121, 171, 1643, 18, 621, 1123, 954, 110, 206, 1291,
1529, 239, 250, 691, 192, 390, 912, 1573, 473, 1542, 402, 687, 746, 1280, 1605, 33,
688, 721, 1717, 847, 1570, 1668, 1096, 889, 1143, 590, 113, 420, 539, 970, 1047, 1538,
528, 681, 829, 696, 247, 574, 853, 500, 1015, 1632, 1726, 1066, 1708, 28, 96, 929,
1603, 491, 1172, 1142, 1004, 418, 852, 1753, 922, 1397, 5, 626, 795, 1602, 890, 1789,
563, 710, 151, 1210, 1553, 608, 1098, 1692, 161, 1583, 1754, 24, 325, 991, 1457, 462,
555, 1233, 1265, 734, 738, 133, 226, 1391, 317, 224, 780, 1068, 1077, 1569, 1748, 134,
283, 439, 903, 1508, 131, 294, 1743, 1582, 267, 1256, 86, 630, 518, 607, 1156, 561,
843, 579, 364, 525, 1511, 1735, 872, 612, 1491, 284, 510, 204, 767, 1633, 409, 717,
700, 964, 1412, 482, 438, 899, 1588, 504, 576, 116, 125, 69, 703, 838, 329, 556,
375, 394, 74, 194, 1155, 241, 898, 1234, 992, 1655, 1118, 1404, 1680, 155, 1064, 1141,
19, 851, 1079, 1407, 211, 489, 361, 1043, 472, 1440, 600, 1062, 1259, 457, 1664, 511,
605, 1730, 414, 1617, 1710, 779, 1060, 1426, 1796, 38, 1612, 1666, 253, 1467, 432, 1283,
1242, 1662, 235, 876, 231, 575, 1741, 120, 53, 275, 1038, 1675, 312, 1296, 127, 530,
1384, 1781, 784, 1145, 758, 916, 1104, 238, 291, 664, 1604, 2, 524, 1095, 1103, 1587,
1289, 554, 1008, 832, 1048, 828, 860, 1299, 1204, 1500, 1728, 37, 1468, 538, 1540, 632,
1344, 87, 596, 480, 527, 637, 769, 951, 670, 982, 218, 348, 198, 813, 1565, 678,
1037, 31, 720, 1695, 1665, 578, 770, 1080, 765, 774, 492, 794, 606, 827, 1389, 1575,
430, 1557, 327, 506, 952, 1301, 1067, 1337, 381, 429, 569, 808, 265, 1186, 103, 1618,
958, 1554, 1197, 1165, 393, 1100, 1070, 601, 905, 633, 1628, 1485, 586, 523, 643, 1275,
1729, 677, 517, 1057, 403, 1341, 1765, 994, 906, 1288, 966, 1078, 27, 1271, 191, 771,
987, 1119, 1150, 1690, 655, 1712, 1742, 1471, 9, 1707, 1001, 341, 1581, 57, 46, 638,
645, 1088, 891, 50, 442, 685, 498, 592, 690, 609, 572, 1635, 599, 1462, 421, 766,
1522, 756, 1308, 1400, 1685, 1202, 639, 115, 1552, 1657, 558, 665, 413, 1727, 1022, 1195,
1576, 494, 792, 1571, 988, 715, 1611, 1627, 675, 1514, 1580, 732, 1311, 1574, 1593, 1364,
1154, 985, 1589, 51, 757, 1338, 1152, 1264, 751, 1597, 54, 502, 972, 1495, 1024, 467,
689, 215, 998, 1000, 926, 947, 1671, 1660, 1149, 623, 447, 1562, 1551, 1274, 673, 75,
1113, 77, 1595, 1572},
{18.46618531, 18.16590212, 17.20465053, 17.20465053, 17.40689519, 17.40689519, 17.40689519,
17.49285568, 17.54992877, 17.60681686, 17.60681686, 17.69180601, 17.8605711, 17.88854382,
17.88854382, 17.94435844, 17.94435844, 18.02775638, 18.05547009, 18.08314132, 18.08314132,
18.16590212, 18.16590212, 18.1934054, 18.22086716, 18.35755975, 18.38477631, 18.46618531,
18.49324201, 18.60107524, 18.62793601, 18.68154169, 18.78829423, 18.89444363, 18.89444363,
18.92088793, 18.92088793, 19., 19.07878403, 19.07878403, 19.07878403, 19.10497317,
19.10497317, 19.10497317, 19.33907961, 19.36491673, 19.41648784, 19.5192213, 19.57038579,
19.57038579, 19.59591794, 19.62141687, 19.74841766, 19.77371993, 19.87460691, 19.87460691,
19.87460691, 20.0748599, 20.1246118, 20.1246118, 20.174241, 20.174241, 20.22374842,
20.22374842, 20.24845673, 20.27313493, 20.29778313, 20.29778313, 20.32240143, 20.4450483,
20.46948949, 20.51828453, 20.54263858, 20.63976744, 20.63976744, 20.68816087, 20.71231518,
20.76053949, 20.78460969, 20.80865205, 20.88061302, 20.92844954, 20.95232684, 20.95232684,
20.97617696, 20.97617696, 21.07130751, 21.16601049, 21.23676058, 21.26029163, 21.26029163,
21.33072901, 21.54065923, 21.54065923, 21.54065923, 21.54065923, 21.63330765, 21.70253441,
21.70253441, 21.77154106, 21.81742423, 21.84032967, 22., 22.02271555, 22.06807649,
22.11334439, 22.15851981, 22.15851981, 22.20360331, 22.20360331, 22.27105745, 22.27105745,
22.3159136, 22.3383079, 22.3383079, 22.44994432, 22.60530911, 22.69361144, 22.71563338,
22.737634, 22.737634, 22.75961335, 22.89104628, 23.02172887, 23.10844002, 23.10844002,
23.10844002, 23.17326045, 23.23790008, 23.2594067, 23.28089345, 23.34523506, 23.40939982,
23.40939982, 23.53720459, 23.62202362, 23.64318084, 23.70653918, 23.70653918, 23.8117618,
24., 24.06241883, 24.16609195, 24.35159132, 24.35159132, 24.41311123, 24.49489743,
24.49489743, 24.71841419, 24.73863375, 24.81934729, 25.03996805, 25.07987241, 25.07987241,
25.25866188, 25.3179778, 25.51470164, 25.53429067, 25.57342371, 25.61249695, 25.69046516,
25.70992026, 25.8069758, 26.05762844, 22.53885534, 22.38302929, 22.24859546, 22.24859546,
22.24859546, 22.3383079, 22.58317958, 22.737634, 22.82542442, 22.93468988, 22.97825059,
23.13006701, 23.17326045, 23.28089345, 23.32380758, 23.4520788, 23.47338919, 23.49468025,
23.64318084, 23.85372088, 23.89560629, 24.06241883, 24.08318916, 24.08318916, 24.12467616,
24.18677324, 24.18677324, 22.42766149, 22.13594362, 21.88606863, 21.86321111, 21.28379665,
21.9317122, 21.9544984, 22.09072203, 22.09072203, 22.24859546, 22.24859546, 22.4053565,
22.44994432, 22.47220505, 22.58317958, 22.58317958, 22.82542442, 22.89104628, 23.02172887,
23.06512519, 23.06512519, 23.15167381, 23.21637353, 23.32380758, 23.49468025, 23.51595203,
23.55843798, 23.55843798, 23.57965225, 23.76972865, 23.76972865, 23.76972865, 23.79075451,
23.8117618, 23.83275058, 23.85372088, 23.85372088, 23.91652149, 23.93741841, 23.97915762,
24., 24.18677324, 24.24871131, 24.2899156, 24.2899156, 24.35159132, 24.35159132,
24.35159132, 24.37211521, 24.37211521, 24.39262184, 24.41311123, 24.45403852, 24.45403852,
24.4744765, 24.4744765, 24.55605832, 24.55605832, 24.57641145, 24.59674775, 24.61706725,
24.61706725, 24.63736999, 24.67792536, 24.69817807, 24.75883681, 24.79919354, 24.81934729,
24.8394847, 24.85960579, 24.8997992, 24.8997992, 24.8997992, 24.91987159, 24.97999199,
24.97999199, 25.03996805, 25.07987241, 25.0998008, 25.11971337, 25.11971337, 25.13961018,
25.15949125, 25.17935662, 25.17935662, 25.17935662, 25.19920634, 25.19920634, 25.19920634,
23.93741841, 23.85372088, 23.57965225, 23.66431913, 23.72762104, 23.87467277, 23.87467277,
24., 24.10394159, 24.16609195, 24.41311123, 24.49489743, 24.65765601, 24.65765601,
23.19482701, 24.4744765, 24.4744765, 24.59674775, 24.59674775, 24.67792536, 24.75883681,
24.81934729, 25.01999201, 25.03996805, 25.17935662, 25.19920634, 25.21904043, 25.23885893,
25.23885893, 25.29822128, 25.3179778, 25.33771892, 25.35744467, 25.3968502, 25.43619468,
25.45584412, 25.49509757, 25.49509757, 25.49509757, 25.51470164, 25.51470164, 25.55386468,
25.61249695, 25.65151068, 25.65151068, 25.65151068, 25.15949125, 24.06241883, 23.89560629,
21.72556098, 17.88854382, 17., 16.76305461, 17.29161647, 17.34935157, 17.8325545,
17.88854382, 18.02775638, 18.13835715, 18.43908891, 18.49324201, 18.49324201, 18.49324201,
19.05255888, 19.23538406, 19.31320792, 19.31320792, 19.62141687, 19.62141687, 19.6977156,
19.6977156, 19.74841766, 19.94993734, 19.97498436, 20.174241, 20.24845673, 20.61552813,
20.63976744, 20.90454496, 21., 21.02379604, 21.09502311, 21.11871208, 21.14237451,
21.33072901, 21.72556098, 21.79449472, 22.02271555, 22.15851981, 22.69361144, 23.02172887,
23.13006701, 23.13006701, 23.17326045, 23.19482701, 23.43074903, 23.47338919, 23.72762104,
23.83275058, 24.08318916, 24.4744765, 24.65765601, 24.67792536, 24.67792536, 24.67792536,
24.69817807, 24.85960579, 24.87971061, 25., 25.05992817, 25.13961018, 25.25866188,
25.43619468, 25.65151068, 25.67099531, 25.67099531, 25.67099531, 25.69046516, 25.70992026,
25.72936066, 25.74878638, 25.76819745, 25.76819745, 25.78759392, 25.78759392, 25.78759392,
25.8069758, 25.82634314, 25.82634314, 25.82634314, 25.84569597, 25.84569597, 25.88435821,
25.88435821, 25.90366769, 25.90366769, 25.90366769, 25.92296279, 25.94224354, 25.94224354,
23.66431913, 22.627417, 22.627417, 22.58317958, 22.22611077, 21.56385865, 21.74856317,
21.84032967, 22.29349681, 22.36067977, 22.627417, 22.64950331, 22.95648057, 23.06512519,
23.08679276, 23.08679276, 23.51595203, 23.51595203, 23.57965225, 23.62202362, 23.64318084,
23.89560629, 24.08318916, 24.18677324, 24.20743687, 24.22808288, 24.22808288, 24.31049156,
24.33105012, 24.37211521, 24.39262184, 24.41311123, 24.51530134, 24.63736999, 24.69817807,
24.75883681, 24.81934729, 24.85960579, 24.8997992, 24.8997992, 24.91987159, 24.91987159,
24.95996795, 24.95996795, 24.95996795, 25.01999201, 25.01999201, 25.07987241, 25.13961018,
25.13961018, 25.15949125, 25.17935662, 25.3179778, 25.35744467, 25.41653005, 25.45584412,
25.65151068, 25.78759392, 25.90366769, 25.92296279, 25.94224354, 25.96150997, 25.96150997,
25.98076211, 25.98076211, 26., 26.01922366, 26.03843313, 26.03843313, 26.05762844,
26.05762844, 26.0959767, 26.0959767, 26.11512971, 26.11512971, 26.13426869, 26.13426869,
26.15339366, 26.15339366, 26.15339366, 26.15339366, 26.15339366, 26.15339366, 26.17250466,
26.17250466, 26.17250466, 26.17250466, 26.17250466, 23.28089345, 22.09072203, 19.87460691,
19.49358869, 19.87460691, 17.43559577, 18.11077028, 18.49324201, 18.89444363, 18.97366596,
19.15724406, 19.18332609, 19.18332609, 19.39071943, 19.54482029, 19.57038579, 19.59591794,
19.6468827, 19.72308292, 19.94993734, 20., 20.02498439, 20.0748599, 20.1246118,
20.1246118, 20.1246118, 20.14944168, 20.174241, 20.19900988, 20.22374842, 20.22374842,
20.27313493, 20.29778313, 20.32240143, 20.32240143, 20.54263858, 20.59126028, 20.63976744,
20.63976744, 20.63976744, 20.71231518, 20.95232684, 20.97617696, 21.07130751, 21.11871208,
21.16601049, 21.16601049, 21.1896201, 21.23676058, 21.23676058, 21.33072901, 21.47091055,
21.56385865, 21.56385865, 21.56385865, 21.58703314, 21.63330765, 21.65640783, 21.67948339,
21.70253441, 21.77154106, 21.81742423, 21.84032967, 21.88606863, 21.9317122, 21.97726098,
22., 22.06807649, 22.09072203, 22.09072203, 22.24859546, 22.27105745, 22.27105745,
22.27105745, 22.3159136, 22.3159136, 22.38302929, 22.4053565, 22.49444376, 22.5166605,
22.53885534, 22.60530911, 22.627417, 22.6715681, 22.71563338, 22.737634, 22.737634,
22.91287847, 22.93468988, 22.95648057, 22.97825059, 23.02172887, 23.10844002, 23.13006701,
23.19482701, 23.21637353, 23.23790008, 23.2594067, 23.38803113, 23.40939982, 23.47338919,
23.51595203, 23.68543856, 23.68543856, 23.74868417, 23.76972865, 23.85372088, 23.97915762,
24.0208243, 24.0208243, 24.04163056, 24.08318916, 24.12467616, 24.16609195, 24.20743687,
24.24871131, 24.24871131, 24.24871131, 24.2899156, 24.37211521, 24.39262184, 24.4744765,
24.53568829, 24.53568829, 24.53568829, 24.55605832, 24.69817807, 25., 25.03996805,
25.0998008, 25.11971337, 25.13961018, 25.17935662, 25.19920634, 25.3968502, 25.53429067,
25.55386468, 25.57342371, 25.65151068, 25.70992026, 25.74878638, 25.8069758, 25.86503431,
25.90366769, 26.17250466, 26.19160171, 26.21068484, 26.21068484, 26.2297541, 26.2297541,
26.26785107, 26.26785107, 26.28687886, 26.28687886, 26.30589288, 26.32489316, 26.32489316,
26.34387974, 26.34387974, 26.34387974, 26.36285265, 26.36285265, 26.36285265, 26.38181192,
26.38181192, 26.40075756, 26.40075756, 26.40075756, 26.40075756, 26.40075756, 26.41968963,
26.41968963, 26.43860813, 26.43860813, 26.43860813, 26.43860813, 26.45751311, 26.45751311,
26.45751311, 26.45751311, 26.45751311, 26.45751311, 26.45751311, 26.47640459, 26.4952826,
26.4952826, 26.4952826, 26.4952826, 26.53299832, 26.55183609, 26.55183609, 26.55183609,
26.57066051, 26.57066051, 26.5894716, 26.5894716, 26.5894716, 26.5894716, 26.5894716,
26.5894716, 26.60826939, 26.60826939, 26.60826939, 26.60826939, 26.62705391, 26.66458325,
26.66458325, 26.66458325, 26.66458325, 26.66458325, 26.70205985, 26.73948391, 26.73948391,
26.73948391, 26.75817632, 26.75817632, 26.75817632, 25.21904043, 25.15949125, 25.53429067,
25.78759392, 25.88435821, 26.38181192, 26.40075756, 26.51414717, 26.64582519, 26.75817632,
26.77685568, 26.77685568, 26.79552201, 26.79552201, 26.81417536, 26.83281573, 26.83281573,
26.83281573, 26.83281573, 26.83281573, 26.85144316, 26.87005769, 26.88865932, 26.92582404,
26.92582404, 26.94438717, 26.94438717, 26.94438717, 26.96293753, 26.98147513, 26.98147513,
27., 27., 27., 27., 27., 27., 27.,
27.01851217, 27.01851217, 27.03701167, 27.03701167, 27.05549852, 27.05549852, 27.05549852,
27.05549852, 27.07397274, 27.07397274, 27.09243437, 27.09243437, 27.09243437, 27.09243437,
27.09243437, 27.11088342, 27.11088342, 27.12931993, 27.12931993, 27.12931993, 27.14774392,
27.16615541, 27.16615541, 27.16615541, 27.16615541, 27.16615541, 27.18455444, 27.18455444,
27.18455444, 27.18455444, 27.18455444, 27.20294102, 27.20294102, 27.22131518, 27.22131518,
27.22131518, 27.22131518, 24.67792536, 23.34523506, 22.29349681, 22.22611077, 21.54065923,
21.54065923, 22.24859546, 22.27105745, 22.42766149, 22.49444376, 22.5166605, 22.56102835,
22.75961335, 22.82542442, 22.86919325, 22.86919325, 23.04343724, 23.10844002, 23.13006701,
23.19482701, 23.34523506, 23.43074903, 23.47338919, 23.47338919, 23.49468025, 23.62202362,
23.62202362, 23.68543856, 23.70653918, 23.70653918, 23.76972865, 23.79075451, 23.8117618,
23.85372088, 23.9582971, 24.16609195, 24.16609195, 24.33105012, 24.35159132, 24.35159132,
24.37211521, 24.51530134, 24.51530134, 24.51530134, 24.53568829, 24.67792536, 24.85960579,
24.8997992, 24.95996795, 25., 25.05992817, 25.11971337, 25.15949125, 25.23885893,
25.25866188, 25.27844932, 25.29822128, 25.57342371, 25.57342371, 25.59296778, 25.61249695,
25.67099531, 25.70992026, 25.78759392, 25.82634314, 25.88435821, 25.90366769, 25.92296279,
26.13426869, 26.15339366, 26.19160171, 26.19160171, 26.21068484, 26.30589288, 26.32489316,
26.34387974, 26.34387974, 26.45751311, 26.4952826, 26.55183609, 26.66458325, 26.70205985,
26.81417536, 26.83281573, 26.94438717, 27.07397274, 27.11088342, 27.20294102, 27.23967694,
27.25802634, 27.25802634, 27.25802634, 27.27636339, 27.27636339, 27.29468813, 27.31300057,
27.31300057, 27.33130074, 27.33130074, 27.33130074, 27.33130074, 27.33130074, 27.33130074,
27.33130074, 27.34958866, 27.34958866, 27.34958866, 27.34958866, 27.34958866, 27.38612788,
27.40437921, 27.4226184, 27.4226184, 27.4226184, 27.4226184, 27.44084547, 27.44084547,
27.44084547, 27.44084547, 27.44084547, 27.44084547, 27.44084547, 27.45906044, 27.45906044,
27.47726333, 27.47726333, 27.49545417, 27.49545417, 27.49545417, 27.51363298, 27.51363298,
27.51363298, 27.51363298, 27.5317998, 27.5317998, 27.54995463, 27.54995463, 27.54995463,
27.5680975, 27.5680975, 27.5680975, 27.58622845, 27.58622845, 27.58622845, 27.60434748,
27.60434748, 27.60434748, 27.60434748, 27.62245463, 27.62245463, 27.64054992, 27.64054992,
27.64054992, 27.64054992, 27.64054992, 27.64054992, 27.65863337, 27.65863337, 27.65863337,
27.67670501, 27.67670501, 27.67670501, 27.67670501, 27.67670501, 27.69476485, 27.69476485,
27.71281292, 27.73084925, 27.73084925, 27.73084925, 27.74887385, 27.74887385, 27.74887385,
27.74887385, 27.76688675, 27.78488798, 27.78488798, 27.78488798, 27.80287755, 27.82085549,
27.82085549, 27.82085549, 27.83882181, 27.83882181, 27.85677655, 27.85677655, 27.85677655,
27.85677655, 27.87471973, 27.87471973, 27.89265136, 27.89265136, 27.91057147, 27.91057147,
27.92848009, 27.92848009, 27.92848009, 27.92848009, 27.92848009, 27.94637722, 27.94637722,
27.96426291, 27.96426291, 27.98213716, 27.98213716, 27.98213716, 28., 28.01785145,
28.01785145, 28.01785145, 28.03569154, 28.05352028, 28.05352028, 28.05352028, 28.0713377,
28.0713377, 28.08914381, 28.08914381, 28.10693865, 28.10693865, 28.10693865, 28.14249456,
28.14249456, 28.17800561, 28.19574436, 28.19574436, 28.21347196, 28.21347196, 28.21347196,
28.21347196, 28.24889378, 28.24889378, 28.26658805, 28.26658805, 28.28427125, 28.28427125,
28.3019434, 28.3019434, 28.3019434, 28.31960452, 28.31960452, 28.31960452, 28.33725463,
28.35489376, 28.35489376, 28.35489376, 28.35489376, 28.35489376, 28.35489376, 28.37252192,
28.37252192, 28.37252192, 28.37252192, 28.37252192, 28.37252192, 28.39013913, 28.39013913,
28.39013913, 28.39013913, 28.40774542, 28.40774542, 28.40774542, 28.40774542, 28.42534081,
28.42534081, 28.42534081, 28.42534081, 28.42534081, 28.44292531, 28.44292531, 28.46049894,
28.46049894, 28.46049894, 28.47806173, 28.4956137, 28.4956137, 28.51315486, 28.51315486,
28.53068524, 28.53068524, 28.53068524, 28.53068524, 28.54820485, 28.54820485, 28.54820485,
28.56571371, 28.56571371, 28.58321186, 28.58321186, 28.58321186, 28.60069929, 28.61817604,
28.61817604, 28.61817604, 28.63564213, 28.63564213, 28.63564213, 28.65309756, 28.65309756,
28.67054237, 28.67054237, 28.67054237, 28.67054237, 28.67054237, 28.67054237, 28.68797658,
28.68797658, 28.68797658, 28.68797658, 28.68797658, 28.70540019, 28.70540019, 28.72281323,
28.74021573, 28.74021573, 28.74021573, 28.75760769, 28.75760769, 28.75760769, 28.77498914,
28.7923601, 28.82707061, 28.82707061, 28.8444102, 28.86173938, 28.86173938, 28.87905816,
28.89636655, 28.89636655, 28.91366459, 28.91366459, 28.91366459, 28.94822965, 28.94822965,
28.94822965, 28.96549672, 28.96549672, 28.96549672, 28.96549672, 28.96549672, 29.,
29., 29., 29., 29.01723626, 29.01723626, 29.01723626, 29.01723626,
29.01723626, 29.01723626, 29.03446228, 29.03446228, 29.05167809, 29.05167809, 29.05167809,
29.05167809, 29.05167809, 26.43860813, 23.17326045, 22.3159136, 21.16601049, 21.3541565,
21.54065923, 22., 22.13594362, 22.47220505, 22.5166605, 22.53885534, 22.627417,
22.69361144, 22.97825059, 23.13006701, 23.15167381, 23.17326045, 23.2594067, 23.28089345,
23.36664289, 23.40939982, 23.51595203, 23.66431913, 23.72762104, 23.76972865, 23.93741841,
23.97915762, 24.06241883, 24.06241883, 24.06241883, 24.08318916, 24.14539294, 24.14539294,
24.20743687, 24.37211521, 24.37211521, 24.59674775, 24.61706725, 24.65765601, 24.67792536,
24.69817807, 24.8997992, 24.8997992, 24.91987159, 24.93992783, 25., 25.,
25.03996805, 25.07987241, 25.0998008, 25.11971337, 25.11971337, 25.17935662, 25.25866188,
25.29822128, 25.3179778, 25.3968502, 25.41653005, 25.43619468, 25.43619468, 25.51470164,
25.55386468, 25.57342371, 25.78759392, 25.8069758, 25.82634314, 25.94224354, 26.01922366,
26.03843313, 26.2488095, 26.26785107, 26.32489316, 26.38181192, 26.43860813, 26.47640459,
26.4952826, 26.51414717, 26.55183609, 26.5894716, 26.70205985, 26.72077843, 26.75817632,
26.79552201, 26.79552201, 26.81417536, 26.83281573, 26.87005769, 26.96293753, 27.03701167,
27.05549852, 27.12931993, 27.12931993, 27.14774392, 27.16615541, 27.27636339, 27.31300057,
27.31300057, 27.36786437, 27.36786437, 27.38612788, 27.38612788, 27.49545417, 27.5317998,
27.64054992, 27.82085549, 27.96426291, 28.0713377, 28.19574436, 28.28427125, 28.28427125,
28.3019434, 28.3019434, 28.39013913, 28.40774542, 28.44292531, 28.44292531, 28.47806173,
28.53068524, 28.54820485, 28.77498914, 28.80972058, 29.05167809, 29.06888371, 29.06888371,
29.08607914, 29.10326442, 29.12043956, 29.13760457, 29.13760457, 29.15475947, 29.17190429,
29.17190429, 29.18903904, 29.20616373, 29.20616373, 29.20616373, 29.22327839, 29.22327839,
29.22327839, 29.22327839, 29.24038303, 29.24038303, 29.25747768, 29.27456234, 29.27456234,
29.27456234, 29.27456234, 29.27456234, 29.29163703, 29.29163703, 29.29163703, 29.30870178,
29.30870178, 29.30870178, 29.30870178, 29.3257566, 29.3428015, 29.3428015, 29.35983651,
29.35983651, 29.37686164, 29.37686164, 29.37686164, 29.39387691, 29.41088234, 29.41088234,
29.41088234, 29.41088234, 29.44486373, 29.46183973, 29.46183973, 29.47880595, 29.47880595,
29.47880595, 29.47880595, 29.49576241, 29.49576241, 29.51270913, 29.51270913, 29.52964612,
29.54657341, 29.54657341, 29.59729717, 29.61418579, 29.64793416, 29.64793416, 29.64793416,
29.66479395, 29.66479395, 29.68164416, 29.68164416, 29.68164416, 29.68164416, 29.68164416,
29.69848481, 29.69848481, 29.71531592, 29.71531592, 29.71531592, 29.73213749, 29.74894956,
29.74894956, 29.74894956, 29.76575213, 29.76575213, 29.76575213, 29.76575213, 29.78254522,
29.78254522, 29.79932885, 29.79932885, 29.81610303, 29.84962311, 29.84962311, 29.84962311,
29.86636905, 29.88310559, 29.9165506, 29.9165506, 29.9165506, 29.93325909, 29.93325909,
29.93325909, 29.94995826, 29.94995826, 29.94995826, 29.94995826, 29.9833287, 29.9833287,
30., 30., 30., 30., 30., 30.01666204, 30.01666204,
30.01666204, 30.01666204, 30.03331484, 30.03331484, 30.03331484, 30.0499584, 30.06659276,
30.06659276, 30.08321791, 30.11644069, 30.11644069, 30.11644069, 30.11644069, 30.11644069,
30.11644069, 30.13303835, 30.13303835, 30.13303835, 30.14962686, 30.16620626, 30.16620626,
30.16620626, 30.19933774, 30.19933774, 30.19933774, 30.19933774, 30.21588986, 30.23243292,
30.24896692, 30.2654919, 30.2654919, 30.2654919, 30.28200786, 30.28200786, 30.29851482,
30.31501278, 30.33150178, 30.33150178, 30.33150178, 30.34798181, 30.38091506, 30.41381265,
30.41381265, 30.43024811, 30.43024811, 30.4466747, 30.4466747, 30.46309242, 30.47950131,
30.49590136, 30.5122926, 30.5450487, 30.56141358, 30.56141358, 30.5777697, 30.59411708,
30.59411708, 30.59411708, 30.61045573, 30.62678566, 30.62678566, 30.62678566, 30.64310689,
30.64310689, 30.64310689, 30.64310689, 30.65941943, 30.65941943, 30.6757233, 30.6757233,
30.69201851, 30.70830507, 30.7408523, 30.7408523, 30.7408523, 30.7408523, 30.7408523,
30.7408523, 30.757113, 30.757113, 30.78960864, 30.78960864, 30.78960864, 30.8058436,
30.8058436, 30.8058436, 30.83828789, 30.85449724, 30.85449724, 30.87069808, 30.87069808,
30.88689042, 30.88689042, 30.88689042, 30.90307428, 30.91924967, 30.9354166, 30.95157508,
30.96772513, 30.96772513, 30.98386677, 31.01612484, 31.06444913, 31.06444913, 31.09662361,
31.09662361, 31.11269837, 31.144823, 31.144823, 31.17691454, 31.17691454, 31.19294792,
31.19294792, 31.19294792, 31.20897307, 31.22498999, 31.22498999, 31.22498999, 31.2409987,
31.2409987, 31.25699922, 31.28897569, 31.30495168, 31.30495168, 31.33687923, 31.36877428,
31.38470965, 31.40063694, 31.40063694, 31.43246729, 31.43246729, 31.43246729, 31.44837039,
31.44837039, 31.44837039, 31.46426545, 31.46426545, 31.48015248, 31.48015248, 31.48015248,
31.4960315, 31.52776554, 31.52776554, 31.54362059, 31.54362059, 31.54362059, 31.54362059,
31.55946768, 31.55946768, 31.57530681, 31.57530681, 31.60696126, 31.6227766, 31.63858404,
31.63858404, 31.63858404, 31.65438358, 31.65438358, 31.67017524, 31.70173497, 31.70173497,
31.71750305, 31.71750305, 31.78049716, 31.79622619, 31.82766093, 31.85906464, 31.85906464,
31.8747549, 31.8747549, 31.8747549, 31.89043744, 31.89043744, 31.9217794, 31.93743885,
31.95309062, 31.95309062, 31.98437118, 32., 32.01562119, 32.01562119, 32.01562119,
32.03123476, 32.06243908, 32.06243908, 32.06243908, 32.06243908, 32.07802986, 32.10918872,
32.12475681, 32.14031736, 32.14031736, 32.15587038, 32.17141588, 32.17141588, 32.24903099,
32.24903099, 32.24903099, 32.26453161, 32.26453161, 32.29551052, 32.29551052, 32.31098884,
32.40370349, 32.43454948, 32.43454948, 32.43454948, 32.48076354, 32.51153641, 32.51153641,
32.52691193, 32.52691193, 32.55764119, 32.55764119, 32.60368077, 32.61901286, 32.61901286,
32.61901286, 32.64965543, 32.64965543, 32.68026928, 32.69556545, 32.71085447, 32.71085447,
32.74141109, 32.74141109, 32.75667871, 32.80243893, 32.80243893, 32.80243893, 32.80243893,
32.83291032, 32.86335345, 32.87856445, 32.90896534, 32.92415527, 32.92415527, 32.92415527,
32.93933818, 32.93933818, 32.95451411, 32.95451411, 32.95451411, 32.96968304, 32.984845,
32.984845, 32.984845, 33.03028913, 33.03028913, 33.04542328, 33.04542328, 33.09078422,
33.10589071, 33.10589071, 33.10589071, 33.12099032, 33.12099032, 33.15116891, 33.19638535,
33.21144381, 33.21144381, 33.2565783, 33.28663395, 33.33166662, 33.33166662, 33.33166662,
33.33166662, 33.346664, 33.346664, 33.39161571, 33.46640106, 33.48133809, 33.48133809,
33.51119216, 33.52610923, 33.54101966, 33.55592347, 33.57082066, 33.61547263, 33.67491648,
33.68976106, 33.68976106, 33.7194306, 33.73425559, 33.74907406, 33.74907406, 33.76388603,
33.80828301, 33.83784863, 33.85262176, 33.88214869, 33.89690251, 33.91164992, 33.91164992,
33.92639091, 33.9411255, 33.98529094, 34., 34., 34.05877273, 34.07345007,
34.0881211, 34.10278581, 34.13209633, 34.13209633, 34.13209633, 34.16138171, 34.17601498,
34.17601498, 34.17601498, 34.20526275, 34.2636834, 34.33656943, 34.42382896, 34.45286635,
34.45286635, 34.49637662, 34.525353, 34.53983208, 34.55430509, 34.55430509, 34.59768778,
34.64101615, 34.64101615, 34.64101615, 34.81379037, 34.82814953, 34.82814953, 34.85685012,
34.88552709, 34.88552709, 34.89985673, 34.92849839, 35.02855978, 35.02855978, 35.05709629,
35.07135583, 35.07135583, 35.11409973, 35.12833614, 35.17101079, 35.19943181, 35.21363372,
35.27038418, 35.29872519, 35.32704347, 35.38361203, 35.41186242, 35.48239, 35.4964787,
35.51056181, 35.51056181, 35.52463934, 35.63705936, 35.70714214, 35.76310948, 35.83294573,
35.91656999, 35.98610843, 36.01388621, 36.01388621, 36.01388621, 36.06937759, 36.0970913,
36.20773398, 36.24913792, 36.29049462, 36.30426972, 36.31803959, 36.34556369, 36.35931793,
36.37306696, 36.44173432, 36.48287269, 36.57868232, 36.82390528, 36.86461718, 36.87817783,
36.87817783, 36.87817783, 36.89173349, 36.90528417, 36.97296309, 36.97296309, 37.01351105,
37.08099244, 37.13488926, 37.17526059, 37.28270376, 37.30951621, 37.36308338, 37.41657387,
37.76241518, 38.05259518, 38.13135193, 38.31448812, 38.45776905, 38.78143886, 39.79949748,
39.87480407, 40.47221269, 40.52159918, 42.11887938},
{-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}}};
namespace Iris {
constexpr int n_row = 150;
const std::vector<int> parents = {
150, 150, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152,
152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151,
152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151,
152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151, 152, 151,
151, 152, 151, 152, 151, 152, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151,
152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152,
152, 153, 154, 153, 154, 153, 153, 153, 153, 153, 153, 154, 153, 154, 153, 154, 153, 154,
154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154,
154, 154, 154, 154, 154, 154, 154, 154, 154, 154};
const std::vector<int> children = {
151, 152, 41, 131, 15, 117, 14, 118, 22, 106, 18, 98, 13, 109, 33, 57, 44, 60,
93, 32, 129, 24, 68, 43, 122, 16, 135, 5, 134, 23, 119, 20, 125, 8, 114, 36,
108, 31, 148, 10, 87, 46, 100, 35, 105, 6, 62, 19, 107, 42, 113, 25, 130, 11,
64, 38, 59, 37, 50, 48, 141, 21, 52, 3, 85, 26, 84, 2, 102, 45, 136, 47,
29, 121, 12, 146, 40, 110, 30, 1, 34, 9, 28, 4, 49, 7, 27, 17, 39, 0,
79, 132, 70, 56, 137, 133, 73, 144, 53, 63, 81, 138, 72, 90, 66, 103, 76, 153,
154, 77, 80, 126, 71, 83, 123, 127, 142, 149, 101, 51, 139, 91, 143, 65, 104, 74,
54, 86, 58, 75, 128, 116, 115, 124, 111, 147, 145, 112, 120, 140, 97, 61, 88, 67,
78, 55, 95, 82, 89, 69, 92, 94, 96, 99};
const std::vector<float> lambdas = {
0.60971076, 0.60971076, 1.25988158, 0.97590007, 1.56173762, 0.98058068, 1.71498585, 1.03695169,
1.85695338, 1.13227703, 1.9245009, 1.22169444, 2., 1.24034735, 2.08514414, 1.27000127,
2.08514414, 1.38675049, 1.38675049, 2.1821789, 1.41421356, 2.23606798, 1.41421356, 2.3570226,
1.42857143, 2.5, 1.42857143, 2.5819889, 1.42857143, 2.5819889, 1.5249857, 2.77350098,
1.5430335, 2.77350098, 1.56173762, 2.77350098, 1.60128154, 2.88675135, 1.60128154, 3.01511345,
1.62221421, 3.01511345, 1.64398987, 3.01511345, 1.64398987, 3.16227766, 1.71498585, 3.16227766,
1.79605302, 3.16227766, 1.82574186, 3.33333333, 1.85695338, 3.33333333, 1.85695338, 3.33333333,
1.85695338, 3.33333333, 1.9245009, 3.53553391, 1.9245009, 3.53553391, 1.96116135, 3.77964473,
1.96116135, 3.77964473, 1.96116135, 3.77964473, 2., 3.77964473, 2., 4.0824829,
4.0824829, 2.04124145, 4.0824829, 2.08514414, 4.0824829, 2.13200716, 4.0824829, 4.0824829,
4.0824829, 4.0824829, 4.0824829, 4.0824829, 4.0824829, 4.0824829, 4.0824829, 4.0824829,
4.0824829, 4.0824829, 2.13200716, 2.13200716, 2.13200716, 2.1821789, 2.1821789, 2.1821789,
2.1821789, 2.29415734, 2.29415734, 2.29415734, 2.29415734, 2.29415734, 2.29415734, 2.3570226,
2.3570226, 2.3570226, 2.3570226, 2.3570226, 2.3570226, 2.3570226, 2.3570226, 2.3570226,
2.42535625, 2.3570226, 2.3570226, 2.3570226, 2.3570226, 2.3570226, 2.3570226, 2.5819889,
2.42535625, 2.5819889, 2.5819889, 2.5819889, 2.5819889, 2.5819889, 2.5819889, 2.5819889,
2.5819889, 2.5819889, 2.5819889, 2.5819889, 2.5819889, 2.5819889, 2.5819889, 2.5819889,
2.5819889, 2.5819889, 2.5819889, 2.5819889, 2.5819889, 2.67261242, 2.67261242, 2.67261242,
2.67261242, 2.67261242, 2.67261242, 2.67261242, 2.67261242, 2.67261242, 2.67261242, 2.67261242,
2.67261242, 2.67261242};
const std::vector<int> sizes = {
50, 100, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 21, 24, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1};
}; // namespace Iris
namespace Digits {
constexpr int n_row = 1797;
const std::vector<int> parents = {
1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797,
1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797,
1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797,
1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797,
1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797,
1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797,
1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797,
1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797,
1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797,
1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797,
1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797, 1797,
1797, 1797, 1797, 1797, 1797, 1798, 1799, 1798, 1799, 1799, 1799, 1799, 1799, 1799, 1799, 1799,
1799, 1799, 1798, 1798, 1798, 1798, 1798, 1798, 1798, 1798, 1798, 1798, 1798, 1800, 1801, 1800,
1801, 1801, 1800, 1801, 1800, 1801, 1800, 1801, 1801, 1801, 1801, 1800, 1801, 1800, 1801, 1800,
1801, 1800, 1801, 1801, 1800, 1801, 1800, 1801, 1800, 1801, 1800, 1801, 1800, 1801, 1801, 1800,
1802, 1803, 1800, 1802, 1802, 1802, 1803, 1803, 1800, 1802, 1802, 1802, 1803, 1800, 1802, 1803,
1800, 1802, 1803, 1800, 1802, 1803, 1800, 1802, 1803, 1800, 1802, 1803, 1803, 1800, 1802, 1803,
1800, 1802, 1803, 1800, 1802, 1803, 1800, 1800, 1802, 1803, 1800, 1802, 1803, 1800, 1800, 1802,
1803, 1800, 1802, 1803, 1800, 1802, 1803, 1800, 1802, 1803, 1800, 1802, 1803, 1800, 1802, 1802,
1803, 1803, 1800, 1802, 1803, 1800, 1802, 1803, 1803, 1800, 1802, 1803, 1800, 1802, 1803, 1800,
1802, 1803, 1800, 1802, 1803, 1800, 1802, 1802, 1803, 1800, 1804, 1805, 1803, 1800, 1804, 1805,
1803, 1800, 1804, 1805, 1803, 1800, 1804, 1804, 1805, 1803, 1800, 1804, 1805, 1803, 1800, 1804,
1805, 1803, 1800, 1804, 1804, 1805, 1803, 1800, 1804, 1804, 1804, 1805, 1803, 1800, 1800, 1804,
1804, 1805, 1803, 1800, 1806, 1807, 1807, 1805, 1803, 1800, 1806, 1807, 1805, 1805, 1803, 1800,
1806, 1807, 1805, 1803, 1800, 1806, 1807, 1805, 1803, 1803, 1800, 1806, 1807, 1805, 1803, 1800,
1806, 1807, 1805, 1803, 1803, 1800, 1806, 1807, 1807, 1805, 1805, 1805, 1805, 1805, 1805, 1805,
1805, 1805, 1805, 1805, 1803, 1800, 1806, 1806, 1807, 1803, 1800, 1806, 1806, 1807, 1803, 1800,
1806, 1807, 1803, 1800, 1806, 1807, 1803, 1803, 1800, 1806, 1807, 1803, 1800, 1806, 1806, 1807,
1803, 1800, 1806, 1806, 1807, 1803, 1800, 1806, 1807, 1803, 1800, 1806, 1807, 1803, 1800, 1806,
1807, 1803, 1800, 1800, 1806, 1806, 1807, 1803, 1800, 1806, 1807, 1807, 1803, 1800, 1806, 1807,
1803, 1800, 1806, 1807, 1803, 1800, 1806, 1807, 1803, 1800, 1806, 1807, 1803, 1800, 1806, 1807,
1807, 1807, 1807, 1803, 1800, 1806, 1807, 1803, 1803, 1803, 1800, 1806, 1806, 1807, 1803, 1800,
1808, 1809, 1807, 1803, 1803, 1800, 1808, 1809, 1807, 1803, 1803, 1800, 1800, 1808, 1809, 1807,
1810, 1811, 1800, 1808, 1808, 1808, 1808, 1809, 1807, 1810, 1811, 1811, 1811, 1811, 1811, 1811,
1811, 1811, 1811, 1811, 1800, 1808, 1809, 1807, 1810, 1800, 1808, 1809, 1807, 1810, 1810, 1800,
1808, 1808, 1809, 1807, 1807, 1810, 1800, 1808, 1809, 1807, 1810, 1810, 1810, 1800, 1808, 1809,
1807, 1810, 1800, 1808, 1809, 1807, 1810, 1800, 1808, 1809, 1807, 1810, 1800, 1800, 1808, 1809,
1807, 1810, 1800, 1808, 1809, 1809, 1807, 1810, 1800, 1808, 1809, 1807, 1810, 1800, 1808, 1809,
1807, 1810, 1800, 1800, 1808, 1809, 1809, 1807, 1810, 1800, 1800, 1808, 1809, 1807, 1810, 1800,
1808, 1809, 1807, 1807, 1810, 1810, 1810, 1800, 1800, 1808, 1809, 1807, 1810, 1800, 1808, 1809,
1807, 1810, 1810, 1800, 1808, 1809, 1807, 1810, 1800, 1800, 1808, 1809, 1807, 1810, 1800, 1808,
1808, 1809, 1807, 1810, 1800, 1808, 1809, 1807, 1810, 1800, 1808, 1808, 1809, 1807, 1810, 1800,
1812, 1812, 1812, 1812, 1812, 1812, 1812, 1813, 1809, 1809, 1807, 1807, 1810, 1800, 1812, 1813,
1809, 1807, 1810, 1800, 1812, 1813, 1809, 1807, 1810, 1800, 1800, 1800, 1812, 1813, 1809, 1809,
1807, 1810, 1800, 1812, 1813, 1813, 1813, 1813, 1813, 1813, 1813, 1813, 1813, 1813, 1809, 1807,
1807, 1810, 1800, 1812, 1809, 1807, 1810, 1800, 1812, 1809, 1807, 1810, 1800, 1812, 1809, 1807,
1810, 1800, 1812, 1809, 1807, 1810, 1800, 1812, 1809, 1807, 1810, 1800, 1812, 1809, 1807, 1810,
1810, 1800, 1812, 1809, 1807, 1807, 1814, 1815, 1815, 1815, 1815, 1815, 1815, 1815, 1815, 1815,
1815, 1800, 1800, 1812, 1809, 1807, 1807, 1814, 1800, 1812, 1809, 1807, 1814, 1800, 1812, 1809,
1807, 1814, 1800, 1812, 1812, 1809, 1807, 1807, 1814, 1800, 1812, 1809, 1807, 1814, 1800, 1800,
1812, 1812, 1809, 1809, 1807, 1814, 1814, 1814, 1814, 1814, 1814, 1814, 1814, 1814, 1814, 1800,
1812, 1812, 1809, 1807, 1800, 1812, 1812, 1809, 1809, 1807, 1800, 1812, 1809, 1807, 1807, 1800,
1812, 1809, 1807, 1807, 1800, 1812, 1812, 1809, 1807, 1800, 1812, 1809, 1807, 1800, 1800, 1812,
1809, 1807, 1800, 1800, 1812, 1809, 1807, 1800, 1812, 1809, 1807, 1807, 1807, 1807, 1807, 1807,
1807, 1807, 1807, 1800, 1812, 1809, 1809, 1809, 1807, 1800, 1800, 1812, 1809, 1807, 1800, 1800,
1800, 1800, 1800, 1812, 1809, 1807, 1800, 1812, 1809, 1809, 1807, 1800, 1812, 1812, 1809, 1809,
1807, 1800, 1812, 1812, 1809, 1807, 1807, 1807, 1807, 1800, 1812, 1809, 1807, 1800, 1800, 1800,
1800, 1812, 1809, 1807, 1800, 1812, 1812, 1812, 1809, 1809, 1809, 1809, 1807, 1800, 1812, 1809,
1807, 1800, 1812, 1809, 1807, 1800, 1800, 1800, 1812, 1809, 1807, 1800, 1812, 1809, 1807, 1800,
1800, 1800, 1800, 1800, 1800, 1800, 1800, 1800, 1800, 1812, 1809, 1807, 1812, 1809, 1807, 1807,
1807, 1812, 1809, 1807, 1812, 1809, 1807, 1812, 1809, 1807, 1812, 1812, 1809, 1807, 1807, 1812,
1809, 1807, 1812, 1809, 1807, 1812, 1809, 1807, 1812, 1809, 1809, 1807, 1812, 1812, 1809, 1807,
1816, 1817, 1809, 1809, 1809, 1809, 1809, 1809, 1807, 1807, 1807, 1807, 1816, 1816, 1817, 1809,
1807, 1816, 1817, 1809, 1809, 1807, 1807, 1816, 1817, 1818, 1819, 1807, 1807, 1807, 1807, 1807,
1807, 1807, 1807, 1807, 1807, 1816, 1816, 1816, 1817, 1817, 1818, 1818, 1818, 1818, 1818, 1818,
1818, 1818, 1818, 1818, 1819, 1816, 1816, 1817, 1817, 1819, 1816, 1817, 1819, 1816, 1817, 1819,
1816, 1817, 1817, 1817, 1819, 1819, 1819, 1816, 1817, 1819, 1816, 1817, 1817, 1819, 1816, 1817,
1819, 1816, 1817, 1819, 1816, 1817, 1817, 1817, 1817, 1817, 1817, 1817, 1817, 1817, 1819, 1816,
1817, 1819, 1819, 1819, 1816, 1816, 1817, 1819, 1819, 1820, 1821, 1817, 1819, 1820, 1821, 1821,
1817, 1817, 1819, 1819, 1820, 1821, 1817, 1819, 1819, 1820, 1821, 1817, 1822, 1823, 1820, 1821,
1817, 1817, 1822, 1823, 1820, 1821, 1817, 1822, 1822, 1822, 1822, 1822, 1822, 1822, 1822, 1822,
1822, 1823, 1820, 1821, 1817, 1823, 1820, 1821, 1817, 1823, 1820, 1821, 1817, 1823, 1820, 1821,
1817, 1823, 1823, 1823, 1823, 1823, 1823, 1823, 1823, 1823, 1823, 1823, 1823, 1820, 1821, 1817,
1820, 1821, 1817, 1817, 1820, 1821, 1817, 1817, 1820, 1821, 1817, 1820, 1821, 1817, 1820, 1820,
1821, 1817, 1820, 1821, 1817, 1820, 1821, 1817, 1820, 1821, 1817, 1820, 1821, 1817, 1820, 1821,
1817, 1820, 1821, 1817, 1817, 1820, 1821, 1824, 1825, 1820, 1821, 1824, 1825, 1820, 1820, 1821,
1824, 1824, 1825, 1820, 1821, 1821, 1824, 1825, 1820, 1826, 1826, 1827, 1824, 1825, 1820, 1828,
1829, 1827, 1827, 1824, 1824, 1825, 1820, 1828, 1828, 1829, 1827, 1830, 1831, 1825, 1820, 1828,
1828, 1828, 1828, 1828, 1828, 1828, 1829, 1829, 1827, 1830, 1831, 1825, 1820, 1828, 1829, 1827,
1830, 1831, 1825, 1820, 1828, 1829, 1827, 1830, 1831, 1825, 1820, 1828, 1829, 1827, 1830, 1831,
1825, 1825, 1820, 1828, 1829, 1827, 1827, 1827, 1827, 1827, 1827, 1827, 1827, 1827, 1827, 1827,
1827, 1827, 1827, 1830, 1831, 1825, 1820, 1828, 1829, 1830, 1831, 1831, 1825, 1820, 1828, 1829,
1830, 1831, 1825, 1820, 1820, 1828, 1829, 1830, 1831, 1831, 1831, 1831, 1831, 1831, 1831, 1831,
1825, 1820, 1828, 1829, 1830, 1831, 1825, 1820, 1828, 1829, 1830, 1831, 1831, 1825, 1820, 1828,
1829, 1830, 1832, 1833, 1825, 1820, 1820, 1820, 1828, 1829, 1830, 1830, 1832, 1833, 1825, 1820,
1828, 1829, 1830, 1832, 1833, 1825, 1820, 1828, 1829, 1830, 1832, 1833, 1833, 1825, 1820, 1828,
1829, 1830, 1832, 1833, 1825, 1820, 1828, 1829, 1830, 1832, 1833, 1825, 1820, 1820, 1828, 1829,
1830, 1830, 1830, 1832, 1833, 1825, 1820, 1828, 1829, 1829, 1829, 1829, 1829, 1829, 1830, 1832,
1833, 1825, 1820, 1820, 1828, 1829, 1830, 1832, 1832, 1832, 1833, 1825, 1820, 1820, 1828, 1828,
1829, 1830, 1830, 1832, 1833, 1825, 1820, 1828, 1828, 1829, 1830, 1832, 1833, 1825, 1820, 1828,
1829, 1829, 1830, 1832, 1833, 1833, 1825, 1820, 1828, 1828, 1828, 1828, 1828, 1828, 1828, 1829,
1830, 1830, 1832, 1833, 1833, 1833, 1825, 1820, 1828, 1829, 1830, 1830, 1832, 1833, 1833, 1825,
1820, 1828, 1829, 1830, 1832, 1833, 1825, 1820, 1828, 1829, 1829, 1830, 1832, 1833, 1825, 1820,
1820, 1820, 1828, 1829, 1830, 1832, 1833, 1833, 1825, 1820, 1828, 1829, 1830, 1832, 1833, 1825,
1820, 1820, 1828, 1829, 1830, 1832, 1833, 1825, 1820, 1828, 1828, 1829, 1829, 1829, 1829, 1829,
1829, 1829, 1829, 1829, 1829, 1830, 1832, 1833, 1833, 1833, 1833, 1833, 1833, 1833, 1833, 1833,
1833, 1825, 1820, 1828, 1830, 1832, 1825, 1820, 1820, 1820, 1828, 1830, 1832, 1825, 1820, 1828,
1830, 1832, 1825, 1820, 1828, 1830, 1832, 1825, 1820, 1820, 1828, 1830, 1832, 1825, 1820, 1828,
1830, 1832, 1825, 1820, 1820, 1820, 1828, 1830, 1832, 1825, 1820, 1828, 1830, 1830, 1832, 1825,
1825, 1820, 1820, 1828, 1830, 1832, 1825, 1820, 1828, 1828, 1830, 1832, 1825, 1820, 1820, 1820,
1820, 1828, 1830, 1832, 1825, 1820, 1820, 1828, 1830, 1832, 1832, 1832, 1832, 1832, 1832, 1832,
1832, 1832, 1832, 1832, 1825, 1820, 1828, 1830, 1825, 1820, 1828, 1830, 1825, 1825, 1820, 1828,
1830, 1825, 1825, 1820, 1828, 1830, 1825, 1820, 1828, 1830, 1825, 1825, 1820, 1820, 1820, 1820,
1828, 1830, 1825, 1825, 1820, 1828, 1830, 1825, 1820, 1828, 1830, 1825, 1820, 1820, 1828, 1830,
1825, 1820, 1820, 1828, 1830, 1825, 1820, 1828, 1830, 1825, 1825, 1825, 1820, 1828, 1830, 1825,
1820, 1828, 1830, 1825, 1825, 1825, 1820, 1828, 1830, 1830, 1825, 1820, 1820, 1828, 1830, 1825,
1834, 1834, 1835, 1828, 1830, 1830, 1830, 1830, 1825, 1834, 1835, 1828, 1830, 1830, 1830, 1830,
1830, 1830, 1830, 1830, 1830, 1830, 1830, 1830, 1825, 1834, 1835, 1828, 1825, 1834, 1834, 1835,
1828, 1825, 1834, 1835, 1828, 1825, 1834, 1835, 1828, 1825, 1834, 1835, 1828, 1825, 1825, 1825,
1834, 1835, 1828, 1825, 1834, 1835, 1828, 1825, 1834, 1835, 1835, 1835, 1835, 1835, 1835, 1835,
1835, 1835, 1835, 1828, 1825, 1834, 1828, 1825, 1834, 1828, 1825, 1834, 1828, 1828, 1828, 1828,
1828, 1825, 1825, 1825, 1825, 1834, 1828, 1825, 1834, 1828, 1825, 1825, 1825, 1834, 1828, 1825,
1834, 1828, 1825, 1834, 1828, 1825, 1825, 1834, 1828, 1828, 1828, 1828, 1828, 1828, 1828, 1828,
1828, 1828, 1825, 1834, 1825, 1834, 1825, 1834, 1834, 1834, 1834, 1834, 1834, 1834, 1834, 1834,
1834, 1825, 1825, 1825, 1825, 1825, 1825, 1825, 1825, 1825, 1825};
const std::vector<int> children = {
1113, 1149, 1572, 1660, 1595, 985, 1024, 77, 1727, 1551, 757, 1562, 891, 1581, 673, 1552,
1729, 421, 1274, 1195, 1154, 1150, 1690, 1152, 1264, 792, 9, 1038, 1593, 1580, 1628, 1712,
1685, 905, 341, 1100, 769, 1574, 502, 87, 1210, 1165, 1657, 1571, 1057, 1118, 538, 678,
215, 664, 1271, 1675, 1707, 1662, 813, 756, 1611, 1671, 670, 1037, 37, 1597, 125, 1119,
703, 576, 770, 767, 409, 1796, 1728, 1468, 1197, 1067, 492, 1296, 1155, 1078, 414, 852,
633, 103, 1553, 1088, 482, 784, 430, 1618, 1275, 899, 480, 710, 794, 578, 69, 1095,
1389, 291, 1412, 1765, 795, 46, 689, 1666, 630, 120, 1627, 283, 1440, 113, 1617, 1612,
1391, 1141, 860, 1540, 547, 1710, 1404, 1048, 53, 1080, 903, 327, 1557, 1400, 127, 1637,
75, 54, 1573, 500, 690, 2, 1789, 284, 691, 1146, 418, 116, 50, 57, 51, 115,
33, 1575, 1605, 1299, 1202, 303, 951, 766, 528, 838, 457, 988, 394, 1289, 134, 638,
1079, 1708, 539, 1123, 1288, 1664, 992, 265, 444, 231, 1589, 1060, 1186, 1695, 38, 294,
170, 1077, 1233, 1798, 1799, 1145, 472, 158, 393, 447, 467, 429, 494, 498, 403, 413,
438, 504, 1576, 1632, 1646, 1699, 1742, 96, 378, 524, 74, 1800, 1801, 843, 1781, 1592,
161, 746, 824, 1022, 701, 1619, 617, 19, 121, 1062, 1583, 588, 191, 1258, 31, 1287,
27, 1205, 605, 700, 1722, 1565, 1025, 1072, 1283, 569, 1768, 1103, 1591, 1802, 1803, 902,
720, 86, 825, 8, 28, 651, 317, 1635, 78, 1242, 722, 1156, 7, 1615, 206, 1467,
1413, 876, 364, 1598, 1582, 211, 980, 1705, 1108, 595, 808, 523, 1139, 1049, 1763, 1604,
1366, 1558, 1135, 776, 965, 1587, 292, 796, 1665, 1073, 594, 1178, 732, 981, 1297, 1689,
43, 357, 1044, 1753, 101, 774, 922, 1745, 872, 1748, 1193, 1538, 628, 1716, 489, 1004,
374, 1684, 72, 600, 1586, 1229, 1058, 1056, 1570, 150, 1341, 1395, 179, 1066, 828, 564,
1096, 758, 695, 1132, 108, 465, 1804, 1805, 1649, 10, 1680, 623, 263, 1206, 379, 606,
52, 1793, 432, 586, 1348, 286, 1302, 1730, 563, 1779, 656, 506, 947, 894, 1715, 1743,
926, 216, 209, 545, 847, 609, 884, 1323, 760, 1602, 1603, 601, 857, 30, 416, 1806,
1807, 1000, 147, 20, 1129, 1234, 1654, 596, 1331, 36, 477, 1142, 572, 832, 118, 1106,
1726, 1364, 994, 1459, 536, 295, 1041, 558, 1218, 1265, 140, 966, 629, 537, 862, 831,
133, 1751, 442, 61, 912, 1105, 1750, 152, 518, 916, 527, 906, 987, 952, 1008, 991,
958, 982, 517, 972, 1314, 724, 198, 1098, 591, 1121, 1687, 1147, 1308, 721, 1243, 1317,
1064, 632, 819, 487, 599, 205, 222, 963, 422, 5, 12, 350, 571, 491, 779, 1309,
1373, 304, 555, 950, 1338, 949, 1435, 1544, 440, 1775, 747, 804, 1273, 17, 208, 521,
593, 1339, 256, 1177, 535, 1606, 1679, 1009, 1307, 1795, 1655, 1724, 240, 1187, 1248, 1337,
954, 1563, 737, 1547, 1184, 49, 1790, 461, 308, 915, 842, 436, 1036, 646, 1361, 570,
566, 567, 573, 995, 526, 1738, 244, 1294, 1304, 1174, 55, 1808, 1809, 1179, 1785, 854,
247, 1070, 1241, 1622, 1653, 1487, 18, 1001, 1014, 1810, 1811, 1359, 1493, 1407, 998, 243,
1399, 707, 848, 1172, 742, 352, 717, 4, 1783, 1200, 173, 1269, 663, 543, 740, 727,
754, 698, 1711, 577, 382, 1285, 780, 1530, 137, 48, 1658, 626, 696, 174, 559, 1642,
192, 1176, 1151, 1272, 1403, 837, 1212, 1280, 68, 761, 1251, 44, 1019, 334, 735, 1281,
723, 1496, 1235, 1787, 873, 1081, 1330, 386, 123, 1767, 917, 560, 1128, 1739, 839, 970,
142, 568, 1388, 445, 144, 171, 84, 1381, 855, 35, 110, 371, 342, 160, 267, 400,
207, 337, 126, 1555, 574, 496, 687, 369, 1405, 718, 772, 420, 473, 443, 1523, 552,
462, 1778, 697, 1232, 1761, 182, 1013, 1082, 1153, 277, 1735, 1169, 870, 1059, 203, 483,
851, 299, 803, 786, 402, 238, 730, 948, 1065, 1470, 964, 929, 1207, 1046, 565, 1489,
1643, 510, 1717, 932, 1425, 471, 427, 1625, 1533, 1342, 1812, 1813, 1221, 307, 1432, 1746,
275, 1633, 329, 348, 384, 325, 361, 719, 946, 1731, 668, 892, 1368, 1236, 1542, 1554,
1567, 437, 94, 1579, 913, 751, 627, 1499, 157, 458, 1167, 1445, 836, 677, 324, 497,
1104, 368, 1620, 681, 655, 375, 771, 738, 381, 765, 715, 665, 685, 675, 1397, 1102,
398, 783, 1157, 1166, 887, 1362, 236, 130, 254, 1311, 1600, 1719, 974, 1670, 1398, 1780,
1458, 666, 511, 24, 408, 653, 178, 548, 390, 184, 112, 1099, 253, 356, 187, 1814,
1815, 1667, 29, 1652, 1721, 1744, 634, 888, 983, 820, 300, 1509, 1527, 1422, 1476, 1442,
1501, 435, 1703, 1396, 64, 648, 853, 610, 1663, 1720, 897, 1344, 1238, 516, 106, 530,
278, 533, 166, 684, 688, 1023, 306, 1143, 1209, 266, 1659, 790, 1270, 764, 185, 935,
513, 525, 800, 1351, 340, 1694, 597, 793, 1201, 1164, 624, 273, 81, 602, 1674, 676,
590, 805, 239, 953, 202, 1313, 1794, 507, 637, 22, 1415, 1786, 1777, 827, 241, 1335,
683, 1754, 388, 889, 725, 296, 488, 743, 499, 806, 1216, 900, 658, 311, 1516, 1277,
194, 907, 512, 694, 1279, 100, 1427, 328, 661, 14, 1371, 956, 959, 979, 1016, 1051,
1402, 977, 1084, 276, 1068, 1419, 584, 1408, 1111, 877, 335, 755, 97, 214, 434, 406,
1545, 252, 1336, 1047, 366, 1465, 1697, 1015, 1526, 1607, 788, 1002, 1588, 1758, 557, 1181,
1214, 1677, 608, 1668, 1012, 1211, 333, 798, 830, 1039, 1700, 603, 778, 941, 812, 925,
305, 404, 1137, 132, 546, 1772, 92, 1021, 1328, 1291, 1301, 1384, 181, 642, 1702, 111,
1669, 441, 509, 1374, 1594, 1464, 396, 1451, 1260, 1267, 180, 1463, 354, 1278, 986, 1494,
1029, 79, 0, 464, 1365, 1541, 229, 682, 957, 508, 616, 631, 1130, 540, 1208, 759,
762, 1491, 909, 313, 1375, 250, 927, 373, 1651, 826, 554, 1393, 1138, 331, 1528, 1020,
863, 258, 898, 844, 372, 816, 1011, 463, 45, 1411, 1764, 1017, 1816, 1817, 280, 1782,
224, 942, 1124, 1043, 1053, 1091, 1033, 1114, 1159, 1031, 310, 268, 731, 1284, 1741, 1479,
470, 782, 391, 1818, 1819, 1140, 1718, 1256, 389, 225, 640, 1531, 833, 1417, 1466, 1472,
501, 1490, 1469, 1492, 1437, 829, 1529, 1537, 1180, 1230, 450, 454, 154, 660, 714, 410,
486, 733, 650, 228, 1691, 439, 1511, 155, 930, 1681, 1085, 519, 1638, 515, 1190, 1268,
1723, 1144, 901, 1061, 1641, 343, 1355, 312, 1532, 1456, 104, 553, 748, 353, 1253, 289,
1225, 235, 1194, 1257, 383, 1192, 641, 607, 529, 575, 561, 619, 614, 579, 1198, 736,
1219, 1788, 1171, 919, 1820, 1821, 1406, 1244, 1791, 1647, 1298, 1776, 817, 1561, 122, 168,
105, 302, 520, 1148, 362, 93, 1255, 1822, 1823, 314, 95, 220, 886, 580, 1732, 933,
1136, 1162, 1539, 380, 734, 997, 1656, 1512, 1502, 1515, 1439, 1483, 1429, 1536, 840, 1525,
613, 320, 481, 773, 1548, 297, 1734, 274, 1379, 270, 1569, 255, 1378, 1661, 1369, 1175,
1704, 377, 1584, 1161, 1254, 41, 124, 1387, 367, 1549, 1127, 260, 1559, 604, 148, 1650,
581, 129, 1189, 1310, 1645, 424, 1089, 1543, 996, 138, 1319, 16, 1204, 585, 752, 1109,
1752, 1231, 1382, 264, 401, 583, 1185, 879, 810, 172, 217, 1629, 1401, 293, 1303, 645,
419, 598, 1069, 1824, 1825, 1771, 978, 25, 990, 1636, 1747, 119, 1506, 452, 1601, 693,
271, 878, 226, 880, 1826, 1827, 423, 918, 1321, 1828, 1829, 775, 850, 744, 1293, 883,
85, 1455, 1453, 1830, 1831, 1349, 864, 1286, 1383, 1416, 945, 621, 1784, 446, 1131, 592,
534, 639, 544, 643, 556, 612, 131, 177, 426, 1326, 71, 1290, 1762, 183, 890, 768,
73, 1259, 1332, 1122, 114, 1505, 814, 251, 204, 1220, 1623, 1599, 1457, 370, 1392, 893,
835, 1170, 1473, 726, 80, 699, 686, 674, 332, 923, 1028, 654, 943, 955, 309, 1026,
1433, 1423, 1443, 417, 145, 448, 392, 1737, 1514, 459, 503, 1578, 190, 1608, 745, 1500,
39, 1054, 1770, 1092, 1481, 1363, 221, 807, 1333, 460, 1203, 411, 495, 1312, 1320, 1358,
999, 622, 856, 210, 589, 920, 659, 1609, 615, 433, 1324, 1832, 1833, 23, 1173, 336,
1495, 1006, 102, 109, 153, 713, 156, 712, 1414, 227, 895, 1488, 365, 671, 1052, 532,
1117, 1522, 845, 358, 809, 315, 750, 40, 218, 1306, 1322, 1101, 1228, 484, 1115, 1709,
141, 868, 1486, 679, 821, 453, 1409, 151, 128, 531, 811, 1513, 1577, 1673, 249, 42,
904, 914, 1434, 1769, 822, 874, 34, 1083, 407, 1426, 1471, 1462, 1485, 1508, 199, 1524,
237, 189, 412, 474, 363, 387, 1226, 1713, 1682, 1692, 1560, 1347, 858, 1755, 1410, 1564,
90, 405, 644, 1672, 163, 399, 1094, 76, 1340, 107, 1484, 551, 781, 928, 1749, 753,
428, 1343, 1534, 1292, 1266, 1550, 1418, 338, 248, 1305, 1325, 1327, 1295, 1315, 1596, 466,
478, 1424, 618, 15, 1568, 1034, 143, 223, 242, 1158, 993, 1318, 910, 246, 1182, 1086,
67, 1, 1367, 1316, 1010, 245, 1346, 834, 787, 397, 485, 169, 261, 176, 962, 188,
704, 1239, 1030, 479, 1262, 1018, 763, 692, 908, 802, 739, 1436, 1356, 288, 376, 1160,
1055, 1693, 1372, 70, 971, 1003, 230, 59, 680, 818, 1688, 21, 186, 200, 456, 493,
47, 1168, 476, 11, 56, 849, 32, 791, 135, 162, 1614, 201, 885, 896, 165, 801,
117, 1475, 1093, 647, 415, 541, 475, 968, 321, 542, 702, 1360, 1075, 3, 323, 846,
815, 1535, 1376, 1725, 1040, 1706, 549, 1217, 871, 960, 1556, 1792, 1430, 1042, 1191, 667,
1759, 281, 449, 1252, 322, 1224, 1774, 285, 1448, 318, 26, 1546, 233, 936, 940, 98,
1756, 282, 1701, 716, 1520, 1420, 1300, 1245, 1678, 1766, 1454, 1461, 1566, 66, 212, 1449,
1510, 1714, 167, 330, 749, 550, 1610, 1757, 287, 937, 976, 636, 938, 587, 973, 1517,
562, 625, 1447, 1450, 1074, 1215, 861, 505, 60, 290, 1071, 635, 1090, 1125, 1063, 1760,
975, 193, 269, 728, 657, 881, 385, 196, 823, 934, 1385, 1350, 58, 1163, 82, 232,
866, 1249, 1477, 1478, 6, 99, 1736, 13, 1133, 298, 395, 859, 1222, 1482, 346, 1686,
1630, 841, 1183, 257, 1276, 1474, 234, 1199, 1196, 1390, 83, 708, 1035, 1126, 159, 1087,
662, 797, 924, 669, 1116, 1644, 989, 1626, 1698, 149, 431, 1834, 1835, 869, 1740, 469,
164, 921, 1521, 1120, 1446, 1282, 1452, 1507, 1246, 1045, 65, 1613, 785, 1188, 1027, 514,
455, 425, 1676, 944, 1444, 1696, 139, 1616, 1110, 1733, 195, 1107, 867, 351, 967, 88,
1112, 213, 882, 1263, 355, 1428, 969, 146, 349, 1370, 136, 1353, 875, 1460, 91, 799,
344, 451, 1380, 1032, 1480, 490, 1357, 1624, 1683, 611, 1261, 468, 1354, 620, 1345, 1352,
522, 582, 1223, 1590, 1240, 1773, 1386, 89, 1007, 1076, 62, 1497, 326, 1247, 1227, 1134,
1250, 63, 359, 1438, 1639, 197, 1394, 649, 911, 1631, 316, 729, 705, 931, 1640, 301,
1503, 1377, 706, 939, 1213, 175, 219, 272, 1329, 1621, 1585, 1648, 1050, 777, 1334, 1237,
1634, 1097, 345, 652, 319, 672, 961, 1519, 1421, 1431, 1441, 711, 360, 262, 984, 1005,
741, 347, 709, 789, 1504, 339, 279, 1518, 259, 1498, 865};
const std::vector<float> lambdas = {
0.02839809, 0.02897638, 0.02939905, 0.03051391, 0.03062819, 0.03100868, 0.0310236, 0.03115885,
0.03152833, 0.03160698, 0.0316386, 0.03171807, 0.03175003, 0.03202563, 0.03212463, 0.03266858,
0.03266858, 0.03280894, 0.03289758, 0.0329154, 0.03320446, 0.0335578, 0.03357671, 0.03367175,
0.03372916, 0.03384487, 0.03402069, 0.0340404, 0.03407991, 0.03415935, 0.03423935, 0.03431991,
0.03436041, 0.03440105, 0.03450328, 0.03466876, 0.03466876, 0.03471051, 0.03481553, 0.03485781,
0.03498557, 0.03502847, 0.03507153, 0.03513642, 0.03517988, 0.03520167, 0.0352235, 0.03524537,
0.03524537, 0.03528923, 0.03528923, 0.03533326, 0.03535534, 0.03539962, 0.03546635, 0.03546635,
0.03548867, 0.03564615, 0.03564615, 0.03566882, 0.03573708, 0.03582872, 0.03582872, 0.0358748,
0.03594426, 0.03606092, 0.03608439, 0.03610791, 0.03617873, 0.03620243, 0.03622618, 0.03624997,
0.03627381, 0.03632164, 0.03634562, 0.03636965, 0.03636965, 0.03636965, 0.03644203, 0.03649052,
0.03651484, 0.0365392, 0.0365392, 0.03658809, 0.0366126, 0.03666178, 0.03668644, 0.03673592,
0.03673592, 0.03681051, 0.03681051, 0.03683547, 0.03686049, 0.03688556, 0.03691067, 0.03693584,
0.03696106, 0.03696106, 0.03708795, 0.03711348, 0.03711348, 0.03711348, 0.0371904, 0.03729371,
0.03729371, 0.03731967, 0.03734568, 0.03737175, 0.03752933, 0.03752933, 0.0375823, 0.03760887,
0.0376355, 0.0376355, 0.03766218, 0.03774257, 0.03779645, 0.0378777, 0.03793216, 0.03793216,
0.03798686, 0.03806935, 0.03809697, 0.03809697, 0.03818018, 0.03818018, 0.03823596, 0.03826394,
0.03826394, 0.03826394, 0.03834825, 0.03837648, 0.03846154, 0.03854717, 0.03857584, 0.03857584,
0.03866223, 0.03872015, 0.03872015, 0.03872015, 0.03872015, 0.03872015, 0.03872015, 0.03872015,
0.03872015, 0.03877834, 0.03877834, 0.03883678, 0.03883678, 0.03883678, 0.0388661, 0.0388661,
0.0388661, 0.03889549, 0.03889549, 0.03895447, 0.03895447, 0.03898406, 0.03904344, 0.03904344,
0.03907323, 0.03907323, 0.03907323, 0.03910309, 0.03913302, 0.03919309, 0.03922323, 0.03922323,
0.03922323, 0.03922323, 0.03931406, 0.03934447, 0.03943615, 0.03943615, 0.03946685, 0.03946685,
0.03949763, 0.03949763, 0.03955939, 0.03955939, 0.03955939, 0.03959038, 0.04055536, 0.03965258,
0.04833682, 0.04833682, 0.04833682, 0.04833682, 0.04833682, 0.04833682, 0.04833682, 0.04833682,
0.04833682, 0.04833682, 0.03968379, 0.03974643, 0.03974643, 0.03974643, 0.03974643, 0.03977786,
0.03977786, 0.03977786, 0.03977786, 0.03977786, 0.03977786, 0.03977786, 0.03984095, 0.04134491,
0.03984095, 0.03984095, 0.04283529, 0.03987261, 0.04402255, 0.03990434, 0.0451754, 0.03990434,
0.03990434, 0.03990434, 0.03990434, 0.04545455, 0.03993615, 0.04598005, 0.03993615, 0.04607757,
0.03996804, 0.04622502, 0.04, 0.04, 0.04627448, 0.04003204, 0.04637389, 0.04006415,
0.04642383, 0.04006415, 0.04647394, 0.04016097, 0.046676, 0.04019339, 0.04019339, 0.04703604,
0.04025848, 0.04032389, 0.04783649, 0.04025848, 0.04025848, 0.04025848, 0.04085889, 0.04085889,
0.04856429, 0.04032389, 0.04032389, 0.04032389, 0.04089304, 0.04873702, 0.04032389, 0.04106508,
0.04956816, 0.04035672, 0.04106508, 0.04962917, 0.0404226, 0.0411345, 0.04975186, 0.04045567,
0.04120428, 0.04987547, 0.04048882, 0.0418487, 0.0418487, 0.05, 0.04052204, 0.04214498,
0.05031546, 0.04052204, 0.04225771, 0.05037927, 0.04058875, 0.04271788, 0.05044333, 0.05044333,
0.04062222, 0.04271788, 0.05057217, 0.04062222, 0.04287465, 0.05063697, 0.05063697, 0.04065578,
0.04291411, 0.05070201, 0.04065578, 0.04311306, 0.05083286, 0.04065578, 0.0433963, 0.05150262,
0.04072315, 0.04347826, 0.05157106, 0.04079085, 0.04389513, 0.05198752, 0.04082483, 0.04082483,
0.04419417, 0.04419417, 0.0521286, 0.04085889, 0.04432422, 0.05227084, 0.04089304, 0.04445542,
0.04445542, 0.05234239, 0.04092728, 0.04454354, 0.05255883, 0.0409616, 0.04454354, 0.05263158,
0.0409616, 0.04476615, 0.05270463, 0.0409616, 0.04485613, 0.05270463, 0.040996, 0.040996,
0.0451754, 0.05285164, 0.0410305, 0.04222003, 0.04522156, 0.05292561, 0.0410305, 0.04229549,
0.04526787, 0.0531494, 0.04106508, 0.04360207, 0.04559608, 0.05345225, 0.04109975, 0.04109975,
0.04476615, 0.04564355, 0.05352877, 0.0411345, 0.04593152, 0.04578685, 0.05368281, 0.04116935,
0.0474579, 0.04607757, 0.05368281, 0.04116935, 0.04116935, 0.04767313, 0.04617571, 0.05383819,
0.04120428, 0.04120428, 0.04120428, 0.04828045, 0.04642383, 0.05407381, 0.05407381, 0.04120428,
0.04120428, 0.04981355, 0.04642383, 0.05415304, 0.04127442, 0.04130962, 0.04130962, 0.05012547,
0.04657464, 0.05455447, 0.04127442, 0.04134491, 0.05096472, 0.05096472, 0.046676, 0.05488213,
0.04130962, 0.04173919, 0.05191741, 0.04688072, 0.05504819, 0.04130962, 0.0417756, 0.05263158,
0.0469841, 0.0469841, 0.05513178, 0.04134491, 0.0421076, 0.05383819, 0.0469841, 0.05521576,
0.04138029, 0.04218245, 0.05439283, 0.04729838, 0.04729838, 0.05538488, 0.04141577, 0.04237136,
0.04237136, 0.05479966, 0.05479966, 0.05479966, 0.05479966, 0.05479966, 0.05479966, 0.05479966,
0.05479966, 0.05479966, 0.05479966, 0.05479966, 0.04740455, 0.05538488, 0.04141577, 0.04141577,
0.04295368, 0.0474579, 0.05564149, 0.04145133, 0.04145133, 0.04299336, 0.04761905, 0.05598925,
0.04145133, 0.04303315, 0.04789131, 0.05607722, 0.04148699, 0.04303315, 0.04850713, 0.04850713,
0.05607722, 0.04155858, 0.04319342, 0.0489116, 0.05634362, 0.04159452, 0.04159452, 0.04389513,
0.04897021, 0.05643326, 0.04163054, 0.04163054, 0.04393748, 0.04902903, 0.05652334, 0.04166667,
0.04419417, 0.04944682, 0.05688801, 0.04166667, 0.04428074, 0.04950738, 0.05698029, 0.04173919,
0.04454354, 0.05025189, 0.05707301, 0.05707301, 0.0417756, 0.0417756, 0.0445878, 0.05025189,
0.0571662, 0.04188539, 0.04508348, 0.04508348, 0.05025189, 0.05725983, 0.04188539, 0.04508348,
0.05050763, 0.05735393, 0.04188539, 0.04526787, 0.05057217, 0.05783149, 0.04192218, 0.04526787,
0.05063697, 0.05812382, 0.04192218, 0.04526787, 0.05070201, 0.05812382, 0.04195907, 0.04545455,
0.04545455, 0.04545455, 0.04545455, 0.05070201, 0.05822225, 0.04199605, 0.04559608, 0.05103104,
0.05103104, 0.05103104, 0.05832118, 0.04199605, 0.04199605, 0.04564355, 0.05157106, 0.05872202,
0.04199605, 0.0421076, 0.04593152, 0.05170877, 0.05170877, 0.05882353, 0.04203314, 0.04225771,
0.04593152, 0.05191741, 0.05191741, 0.05902813, 0.05902813, 0.04207032, 0.04244764, 0.04617571,
0.05191741, 0.05399492, 0.05913124, 0.0421076, 0.0421076, 0.0421076, 0.0421076, 0.04275691,
0.04637389, 0.0521286, 0.05415304, 0.05415304, 0.05415304, 0.05415304, 0.05415304, 0.05415304,
0.05415304, 0.05415304, 0.05415304, 0.05415304, 0.05933908, 0.04222003, 0.04291411, 0.04729838,
0.05248639, 0.05944383, 0.04229549, 0.04295368, 0.04740455, 0.05255883, 0.05255883, 0.059655,
0.04229549, 0.04229549, 0.04351941, 0.04751143, 0.04751143, 0.05263158, 0.05976143, 0.04233338,
0.04415108, 0.04751143, 0.05270463, 0.05270463, 0.05270463, 0.05997601, 0.04233338, 0.04419417,
0.04756515, 0.05277798, 0.06008418, 0.04237136, 0.0442374, 0.04761905, 0.05277798, 0.06019293,
0.04237136, 0.04445542, 0.04778185, 0.05307449, 0.06030227, 0.06030227, 0.04244764, 0.04449942,
0.04783649, 0.0531494, 0.06052275, 0.04248593, 0.04485613, 0.04485613, 0.04783649, 0.05345225,
0.06063391, 0.04248593, 0.04494666, 0.04794633, 0.05391639, 0.06063391, 0.04252433, 0.04499213,
0.04800154, 0.05399492, 0.06074567, 0.06074567, 0.04256283, 0.04503773, 0.04503773, 0.04816831,
0.05399492, 0.06108472, 0.06108472, 0.04256283, 0.04536092, 0.04822428, 0.05407381, 0.06119901,
0.04260143, 0.04545455, 0.04828045, 0.04828045, 0.05415304, 0.05415304, 0.05415304, 0.06142951,
0.06142951, 0.04264014, 0.04588315, 0.04850713, 0.05423261, 0.06142951, 0.04264014, 0.04642383,
0.04856429, 0.05423261, 0.05423261, 0.06142951, 0.04271788, 0.04652421, 0.04856429, 0.05447347,
0.06154575, 0.06154575, 0.04275691, 0.04677803, 0.04856429, 0.05488213, 0.06154575, 0.04283529,
0.04283529, 0.04682929, 0.0488532, 0.05564149, 0.06178021, 0.04287465, 0.04688072, 0.04897021,
0.05572782, 0.06189845, 0.04291411, 0.04291411, 0.04708816, 0.04920678, 0.05572782, 0.06213698,
0.04303315, 0.04303315, 0.04303315, 0.04303315, 0.04303315, 0.04303315, 0.04303315, 0.04376881,
0.04735137, 0.04735137, 0.04932636, 0.04932636, 0.05581456, 0.06213698, 0.04303315, 0.0451754,
0.04751143, 0.04938648, 0.0559017, 0.06237829, 0.04303315, 0.04602873, 0.04751143, 0.04944682,
0.05634362, 0.0625, 0.0625, 0.0625, 0.04311306, 0.0469841, 0.04756515, 0.04756515,
0.04956816, 0.05652334, 0.06274558, 0.04311306, 0.0474579, 0.0474579, 0.0474579, 0.0474579,
0.0474579, 0.0474579, 0.0474579, 0.0474579, 0.0474579, 0.0474579, 0.04783649, 0.04962917,
0.04962917, 0.05652334, 0.06274558, 0.04315319, 0.04794633, 0.04962917, 0.05688801, 0.06274558,
0.04315319, 0.04811252, 0.0496904, 0.05688801, 0.06286946, 0.04319342, 0.04845016, 0.05006262,
0.05707301, 0.06324555, 0.04319342, 0.04850713, 0.05025189, 0.05725983, 0.06350006, 0.04323377,
0.04856429, 0.05025189, 0.05725983, 0.06362848, 0.04323377, 0.04862166, 0.05044333, 0.0574485,
0.0574485, 0.06401844, 0.04323377, 0.04897021, 0.05050763, 0.05050763, 0.05862104, 0.05783149,
0.05783149, 0.05783149, 0.05783149, 0.05783149, 0.05783149, 0.05783149, 0.05783149, 0.05783149,
0.05783149, 0.06454972, 0.06454972, 0.04331481, 0.04902903, 0.05050763, 0.05050763, 0.05933908,
0.06482037, 0.0433555, 0.04914732, 0.05057217, 0.05954913, 0.06482037, 0.0433555, 0.04944682,
0.05103104, 0.06008418, 0.06482037, 0.0433963, 0.0433963, 0.04950738, 0.05109761, 0.05109761,
0.06063391, 0.06509446, 0.04351941, 0.04950738, 0.05129892, 0.06074567, 0.06523281, 0.06523281,
0.04351941, 0.04351941, 0.0496904, 0.0496904, 0.05129892, 0.06108472, 0.06108472, 0.06108472,
0.06108472, 0.06108472, 0.06108472, 0.06108472, 0.06108472, 0.06108472, 0.06108472, 0.06565322,
0.04356068, 0.04356068, 0.04993762, 0.05157106, 0.06565322, 0.04360207, 0.04360207, 0.05,
0.05, 0.05157106, 0.06579517, 0.04364358, 0.05012547, 0.05163978, 0.05163978, 0.06608186,
0.04364358, 0.05012547, 0.05163978, 0.05163978, 0.06608186, 0.0436852, 0.0436852, 0.05018856,
0.05184758, 0.06681531, 0.04372695, 0.05037927, 0.0521286, 0.06726728, 0.06726728, 0.04376881,
0.05037927, 0.05241424, 0.06757374, 0.06757374, 0.0438108, 0.05037927, 0.05277798, 0.06772855,
0.0438108, 0.05044333, 0.05330018, 0.05330018, 0.05330018, 0.05330018, 0.05330018, 0.05330018,
0.05330018, 0.05330018, 0.05330018, 0.06772855, 0.0438529, 0.05063697, 0.05063697, 0.05063697,
0.05330018, 0.06804138, 0.06804138, 0.04389513, 0.05083286, 0.05337605, 0.06819943, 0.06819943,
0.06819943, 0.06819943, 0.06819943, 0.04397995, 0.05109761, 0.05352877, 0.06851887, 0.04397995,
0.05116445, 0.05116445, 0.05352877, 0.06900656, 0.04402255, 0.04402255, 0.05116445, 0.05116445,
0.05360563, 0.06917145, 0.04406526, 0.04406526, 0.05143445, 0.05376033, 0.05376033, 0.05376033,
0.05376033, 0.06917145, 0.04415108, 0.05150262, 0.05376033, 0.0695048, 0.0695048, 0.0695048,
0.0695048, 0.04415108, 0.05163978, 0.05376033, 0.070014, 0.04419417, 0.04419417, 0.04419417,
0.05177804, 0.05177804, 0.05177804, 0.05177804, 0.05391639, 0.070014, 0.0442374, 0.05198752,
0.05415304, 0.07106691, 0.0442374, 0.0521286, 0.05415304, 0.07124705, 0.07124705, 0.07124705,
0.04428074, 0.05227084, 0.05431254, 0.07161149, 0.04428074, 0.05255883, 0.05439283, 0.07179582,
0.07179582, 0.07179582, 0.07179582, 0.07179582, 0.07179582, 0.07179582, 0.07179582, 0.07179582,
0.07179582, 0.04432422, 0.05263158, 0.05447347, 0.04436783, 0.05263158, 0.05455447, 0.05455447,
0.05455447, 0.04441156, 0.05270463, 0.05471757, 0.04441156, 0.05285164, 0.05547002, 0.04441156,
0.05307449, 0.05547002, 0.04445542, 0.04445542, 0.05322463, 0.05555556, 0.05555556, 0.04449942,
0.05330018, 0.05598925, 0.04449942, 0.05330018, 0.05698029, 0.04449942, 0.05337605, 0.0571662,
0.04454354, 0.05360563, 0.05360563, 0.05763904, 0.04463218, 0.04463218, 0.05407381, 0.05773503,
0.04472136, 0.04463218, 0.05415304, 0.05415304, 0.05415304, 0.05415304, 0.05415304, 0.05415304,
0.05832118, 0.05832118, 0.05832118, 0.05832118, 0.04476615, 0.04476615, 0.04467671, 0.05431254,
0.05892557, 0.04481107, 0.04476615, 0.05471757, 0.05471757, 0.06119901, 0.06119901, 0.04481107,
0.04481107, 0.05538488, 0.05479966, 0.06237829, 0.06237829, 0.06237829, 0.06237829, 0.06237829,
0.06237829, 0.06237829, 0.06237829, 0.06237829, 0.06237829, 0.04485613, 0.04485613, 0.04485613,
0.04485613, 0.04485613, 0.05538488, 0.05538488, 0.05538488, 0.05538488, 0.05538488, 0.05538488,
0.05538488, 0.05538488, 0.05538488, 0.05538488, 0.05496497, 0.04485613, 0.04485613, 0.04490133,
0.04490133, 0.05530013, 0.04499213, 0.04494666, 0.05538488, 0.04503773, 0.04499213, 0.05538488,
0.04512937, 0.04499213, 0.04499213, 0.04499213, 0.05555556, 0.05555556, 0.05555556, 0.04512937,
0.04508348, 0.05572782, 0.0451754, 0.04512937, 0.04512937, 0.05607722, 0.04522156, 0.04512937,
0.0561656, 0.04522156, 0.04526787, 0.0562544, 0.04531433, 0.04526787, 0.04526787, 0.04526787,
0.04526787, 0.04526787, 0.04526787, 0.04526787, 0.04526787, 0.04526787, 0.05661385, 0.04536092,
0.04531433, 0.05698029, 0.05698029, 0.05698029, 0.04540766, 0.04540766, 0.04536092, 0.05698029,
0.05698029, 0.04598005, 0.04540766, 0.04540766, 0.05698029, 0.04607757, 0.04540766, 0.04540766,
0.04540766, 0.04540766, 0.05754353, 0.05754353, 0.04632411, 0.04540766, 0.04550158, 0.05763904,
0.05763904, 0.04647394, 0.04540766, 0.04559608, 0.05862104, 0.05763904, 0.04693233, 0.04554875,
0.04569117, 0.04569117, 0.05933908, 0.05802589, 0.04708816, 0.04559608, 0.04583492, 0.059655,
0.059655, 0.059655, 0.059655, 0.059655, 0.059655, 0.059655, 0.059655, 0.059655,
0.059655, 0.05852057, 0.0477274, 0.04559608, 0.04593152, 0.05902813, 0.04783649, 0.04564355,
0.04593152, 0.05954913, 0.04873702, 0.04569117, 0.04593152, 0.05986843, 0.04950738, 0.04578685,
0.04598005, 0.06019293, 0.06019293, 0.06019293, 0.06019293, 0.06019293, 0.06019293, 0.06019293,
0.06019293, 0.06019293, 0.06019293, 0.06019293, 0.06019293, 0.04962917, 0.04578685, 0.04602873,
0.0496904, 0.04578685, 0.04602873, 0.04602873, 0.04987547, 0.04583492, 0.04607757, 0.04607757,
0.05031546, 0.04588315, 0.04617571, 0.05031546, 0.04593152, 0.04617571, 0.05037927, 0.05037927,
0.04607757, 0.04622502, 0.05057217, 0.04612656, 0.04627448, 0.05076731, 0.04622502, 0.04642383,
0.05123155, 0.04627448, 0.04652421, 0.05150262, 0.04642383, 0.04657464, 0.05170877, 0.04647394,
0.046676, 0.05177804, 0.04657464, 0.04672693, 0.04672693, 0.05191741, 0.04657464, 0.04677803,
0.04688072, 0.05205792, 0.04662524, 0.04688072, 0.04703604, 0.05234239, 0.05234239, 0.04672693,
0.0469841, 0.0469841, 0.04708816, 0.05255883, 0.04682929, 0.04682929, 0.04703604, 0.04714045,
0.05322463, 0.0469841, 0.0469841, 0.04682929, 0.04708816, 0.04719292, 0.05330018, 0.04714045,
0.0469841, 0.04688072, 0.04688072, 0.04708816, 0.04708816, 0.04729838, 0.05383819, 0.04724556,
0.04724556, 0.04719292, 0.04688072, 0.04714045, 0.04714045, 0.0474579, 0.05399492, 0.04735137,
0.04735137, 0.04735137, 0.04735137, 0.04735137, 0.04735137, 0.04735137, 0.04751143, 0.04751143,
0.04714045, 0.04714045, 0.04714045, 0.04751143, 0.05407381, 0.04735137, 0.04756515, 0.04756515,
0.04735137, 0.04724556, 0.04756515, 0.05415304, 0.0474579, 0.04778185, 0.04761905, 0.04740455,
0.04724556, 0.04789131, 0.05423261, 0.04756515, 0.04873702, 0.04811252, 0.0477274, 0.04740455,
0.04794633, 0.04794633, 0.05423261, 0.04761905, 0.04914732, 0.04987547, 0.04987547, 0.04987547,
0.04987547, 0.04987547, 0.04987547, 0.04987547, 0.04987547, 0.04987547, 0.04987547, 0.04987547,
0.04987547, 0.04987547, 0.04987547, 0.04778185, 0.04740455, 0.04800154, 0.05431254, 0.04767313,
0.04926646, 0.04778185, 0.04751143, 0.04751143, 0.04800154, 0.05439283, 0.0477274, 0.04950738,
0.04789131, 0.04767313, 0.04811252, 0.05439283, 0.05439283, 0.04794633, 0.04981355, 0.04800154,
0.04778185, 0.04778185, 0.04778185, 0.04778185, 0.04778185, 0.04778185, 0.04778185, 0.04778185,
0.04816831, 0.05439283, 0.04811252, 0.04981355, 0.04805693, 0.04778185, 0.04816831, 0.05447347,
0.04822428, 0.04987547, 0.04839339, 0.04778185, 0.04778185, 0.04850713, 0.05455447, 0.04822428,
0.04993762, 0.04845016, 0.04778185, 0.04794633, 0.04850713, 0.05479966, 0.05479966, 0.05479966,
0.04845016, 0.05050763, 0.04856429, 0.04856429, 0.04778185, 0.04794633, 0.04856429, 0.05496497,
0.04845016, 0.05057217, 0.04873702, 0.04778185, 0.04805693, 0.048795, 0.05521576, 0.04850713,
0.05076731, 0.048795, 0.04811252, 0.04811252, 0.04811252, 0.04897021, 0.05538488, 0.048795,
0.05184758, 0.0488532, 0.04833682, 0.04822428, 0.04932636, 0.05538488, 0.0488532, 0.05227084,
0.0488532, 0.04833682, 0.04828045, 0.04944682, 0.05547002, 0.05547002, 0.0489116, 0.05330018,
0.04920678, 0.04920678, 0.04920678, 0.04845016, 0.04833682, 0.04944682, 0.05547002, 0.04908807,
0.05337605, 0.05337605, 0.05337605, 0.05337605, 0.05337605, 0.05337605, 0.04938648, 0.04850713,
0.04833682, 0.04944682, 0.05555556, 0.05555556, 0.04914732, 0.05337605, 0.04950738, 0.04867924,
0.04867924, 0.04867924, 0.04862166, 0.04950738, 0.05572782, 0.05572782, 0.04926646, 0.04926646,
0.05360563, 0.04950738, 0.04950738, 0.04867924, 0.04873702, 0.04950738, 0.05661385, 0.0496904,
0.0496904, 0.05368281, 0.04962917, 0.04867924, 0.0489116, 0.04962917, 0.05679618, 0.04975186,
0.05383819, 0.05383819, 0.04975186, 0.04873702, 0.04908807, 0.04908807, 0.0496904, 0.05688801,
0.05076731, 0.05076731, 0.05076731, 0.05076731, 0.05076731, 0.05076731, 0.05076731, 0.05407381,
0.04981355, 0.04981355, 0.048795, 0.04987547, 0.04987547, 0.04987547, 0.04975186, 0.05688801,
0.05076731, 0.05439283, 0.04987547, 0.04987547, 0.04897021, 0.05006262, 0.05006262, 0.04981355,
0.05688801, 0.05083286, 0.05547002, 0.04993762, 0.04987547, 0.05018856, 0.04993762, 0.05698029,
0.05089866, 0.05598925, 0.05598925, 0.05, 0.05037927, 0.05116445, 0.04993762, 0.05754353,
0.05754353, 0.05754353, 0.05157106, 0.05598925, 0.05006262, 0.05096472, 0.05241424, 0.05241424,
0.04993762, 0.05783149, 0.05227084, 0.05643326, 0.05012547, 0.05109761, 0.05285164, 0.05031546,
0.05802589, 0.05802589, 0.05270463, 0.05661385, 0.05025189, 0.05157106, 0.05447347, 0.05031546,
0.05812382, 0.05285164, 0.05285164, 0.0567048, 0.0567048, 0.0567048, 0.0567048, 0.0567048,
0.0567048, 0.0567048, 0.0567048, 0.0567048, 0.0567048, 0.05044333, 0.0521286, 0.05463584,
0.05463584, 0.05463584, 0.05463584, 0.05463584, 0.05463584, 0.05463584, 0.05463584, 0.05463584,
0.05463584, 0.05037927, 0.05842062, 0.05330018, 0.05050763, 0.05277798, 0.05037927, 0.05852057,
0.05852057, 0.05852057, 0.05337605, 0.05057217, 0.05292561, 0.05037927, 0.05862104, 0.05345225,
0.05057217, 0.05292561, 0.05044333, 0.05872202, 0.05368281, 0.05063697, 0.05292561, 0.05044333,
0.05923489, 0.05923489, 0.05415304, 0.05063697, 0.05299989, 0.05057217, 0.05933908, 0.05415304,
0.05083286, 0.05345225, 0.05070201, 0.05944383, 0.05944383, 0.05944383, 0.0559017, 0.05089866,
0.05352877, 0.05076731, 0.05954913, 0.0559017, 0.05109761, 0.05109761, 0.05360563, 0.05096472,
0.05096472, 0.059655, 0.059655, 0.0562544, 0.05123155, 0.05368281, 0.05109761, 0.05976143,
0.05634362, 0.05634362, 0.05157106, 0.05399492, 0.05129892, 0.05997601, 0.05997601, 0.05997601,
0.05997601, 0.0567048, 0.05163978, 0.05407381, 0.05143445, 0.06008418, 0.06008418, 0.05679618,
0.05184758, 0.05415304, 0.05415304, 0.05415304, 0.05415304, 0.05415304, 0.05415304, 0.05415304,
0.05415304, 0.05415304, 0.05415304, 0.05415304, 0.05170877, 0.06041221, 0.05688801, 0.05241424,
0.05191741, 0.06052275, 0.05698029, 0.05248639, 0.05198752, 0.05198752, 0.06097108, 0.05754353,
0.05255883, 0.05205792, 0.05205792, 0.06108472, 0.05832118, 0.05263158, 0.05227084, 0.06108472,
0.05862104, 0.05270463, 0.05248639, 0.05248639, 0.06119901, 0.06119901, 0.06119901, 0.06119901,
0.06085806, 0.05307449, 0.05255883, 0.05255883, 0.06119901, 0.06108472, 0.05376033, 0.05263158,
0.06131393, 0.06213698, 0.05383819, 0.05270463, 0.06142951, 0.06142951, 0.06237829, 0.05423261,
0.05277798, 0.06142951, 0.06142951, 0.06262243, 0.05423261, 0.05285164, 0.06189845, 0.06286946,
0.05439283, 0.05307449, 0.05307449, 0.05307449, 0.06201737, 0.06482037, 0.05455447, 0.05330018,
0.06237829, 0.06917145, 0.05463584, 0.05415304, 0.05415304, 0.05415304, 0.0625, 0.06984303,
0.05496497, 0.05496497, 0.05423261, 0.0625, 0.0625, 0.07124705, 0.05496497, 0.05431254,
0.06262243, 0.06262243, 0.0625, 0.07161149, 0.05504819, 0.05504819, 0.05504819, 0.05504819,
0.05439283, 0.06286946, 0.0625, 0.07216878, 0.05513178, 0.05513178, 0.05513178, 0.05513178,
0.05513178, 0.05513178, 0.05513178, 0.05513178, 0.05513178, 0.05513178, 0.05513178, 0.05513178,
0.05447347, 0.06337243, 0.06324555, 0.07254763, 0.05447347, 0.06337243, 0.06337243, 0.06324555,
0.0729325, 0.05455447, 0.06375767, 0.06337243, 0.07392213, 0.05463584, 0.06495698, 0.06337243,
0.0745356, 0.05479966, 0.06495698, 0.06454972, 0.0751646, 0.05504819, 0.05504819, 0.05504819,
0.06509446, 0.06482037, 0.07624929, 0.05513178, 0.06523281, 0.06509446, 0.07624929, 0.05538488,
0.06537205, 0.06523281, 0.06523281, 0.06523281, 0.06523281, 0.06523281, 0.06523281, 0.06523281,
0.06523281, 0.06523281, 0.06523281, 0.07647191, 0.05538488, 0.06579517, 0.07738232, 0.05555556,
0.06637233, 0.07784989, 0.05555556, 0.06666667, 0.07784989, 0.07784989, 0.07784989, 0.07784989,
0.07784989, 0.05572782, 0.05572782, 0.05572782, 0.05572782, 0.06666667, 0.07808688, 0.0559017,
0.06681531, 0.08084521, 0.05607722, 0.05607722, 0.05607722, 0.06726728, 0.08333333, 0.05652334,
0.06819943, 0.08333333, 0.05679618, 0.06835859, 0.08333333, 0.05832118, 0.05832118, 0.06868028,
0.08703883, 0.08703883, 0.08703883, 0.08703883, 0.08703883, 0.08703883, 0.08703883, 0.08703883,
0.08703883, 0.08703883, 0.05832118, 0.06900656, 0.05852057, 0.06900656, 0.05892557, 0.06917145,
0.06917145, 0.06917145, 0.06917145, 0.06917145, 0.06917145, 0.06917145, 0.06917145, 0.06917145,
0.06917145, 0.05902813, 0.05902813, 0.05902813, 0.05902813, 0.05902813, 0.05902813, 0.05902813,
0.05902813, 0.05902813, 0.05902813};
const std::vector<int> sizes = {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1607, 11, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 176, 1420, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1254, 148, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1197, 27, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1037, 148, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 848, 159, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 65, 11, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 805, 14, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 16, 10, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 387, 352, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 11, 51, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
163, 205, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 18,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 185, 113, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 157, 22, 1, 1, 1, 108, 49, 1, 1, 1, 1, 1, 1, 1, 1, 82, 97, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 44, 35, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 33, 19, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1};
}; // namespace Digits
const std::vector<ClusterSelectionInputs<float, int>> cluster_selection_inputs = {
{150,
5,
10,
Iris::parents,
Iris::children,
Iris::lambdas,
Iris::sizes,
Common::CLUSTER_SELECTION_METHOD::EOM,
false,
0.0,
{1., 1., 0.92582, 0.92582, 1., 0.63246, 0.7746, 1., 0.67937, 1.,
0.73855, 0.8165, 1., 0.4899, 0.42008, 0.38255, 0.61237, 1., 0.4714, 0.7746,
0.67937, 0.86603, 0.45486, 0.63246, 0.54772, 0.8165, 0.92582, 1., 1., 1.,
1., 0.70711, 0.53452, 0.51075, 1., 0.73855, 0.67937, 0.8165, 0.8165, 1.,
1., 0.30861, 0.7746, 0.57735, 0.51075, 0.92582, 0.73855, 1., 0.86603, 1.,
0.8165, 1., 0.83205, 0.97333, 1., 1., 0.92582, 0.53882, 1., 0.78784,
0.58835, 1., 0.72761, 0.97333, 0.78784, 1., 1., 1., 0.6, 1.,
0.90453, 1., 0.97333, 0.92582, 1., 1., 1., 1., 1., 0.90453,
1., 0.97333, 1., 1., 0.83205, 0.83205, 1., 0.68825, 1., 1.,
1., 1., 1., 0.58835, 1., 1., 1., 1., 0.51832, 1.,
0.69749, 1., 0.84853, 1., 1., 0.69749, 0.48038, 0.762, 0.67937, 0.52623,
0.90453, 1., 1., 0.7746, 0.66259, 1., 1., 0.41603, 0.43994, 0.647,
1., 0.86603, 0.60609, 1., 1., 0.65465, 1., 1., 1., 0.6,
0.78784, 0.41404, 0.90453, 0.92582, 0.60609, 0.60609, 0.84853, 0.92582, 0.97333, 1.,
1., 0.8165, 1., 1., 0.97333, 1., 0.88465, 1., 0.67937, 1.},
{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}},
{150,
5,
10,
Iris::parents,
Iris::children,
Iris::lambdas,
Iris::sizes,
Common::CLUSTER_SELECTION_METHOD::EOM,
true,
50.0,
{1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.},
{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}},
{1797,
5,
10,
Digits::parents,
Digits::children,
Digits::lambdas,
Digits::sizes,
Common::CLUSTER_SELECTION_METHOD::EOM,
false,
0.0,
{1., 0.58403, 0., 0.85348, 0.78141, 0., 0.97918, 0.78766, 0., 0.,
0.73614, 1., 0.69244, 0.89164, 0.92189, 0.91287, 0.80505, 0.95358, 0., 0.,
0.75426, 1., 0.82675, 0.82176, 0.8865, 0., 0.95279, 0., 0., 0.,
0.75316, 0., 0.96262, 0., 0.88752, 0., 0.75986, 0., 0., 0.86867,
0.5573, 1., 0.93996, 0.82658, 1., 0., 0., 1., 0.82796, 0.8055,
0., 0., 0.87192, 0., 0., 0.81232, 1., 0., 0.97918, 0.8524,
0.87954, 0.91103, 0.94117, 0.94409, 0.89604, 1., 0.95962, 0.91021, 0.80689, 0.,
0.9984, 0., 0.72607, 0.85888, 0., 0., 0.5709, 0., 0.67642, 1.,
0.86672, 1., 0.97918, 0.89914, 0.76688, 0.82857, 0.77669, 0., 1., 0.94117,
0.94535, 0.93258, 0., 0., 1., 0., 0., 0.929, 0.8634, 0.70181,
0.92071, 0.70802, 0.88235, 0., 0., 0., 0., 0.94671, 0.86399, 0.87756,
0.82143, 0.95011, 1., 0., 0.54525, 0., 0., 1., 0.89904, 0.,
0., 0., 0., 0., 1., 0., 0.84609, 0., 0.88612, 0.,
0.87394, 0.83787, 0.86184, 0., 0., 1., 1., 1., 0., 1.,
0.7679, 0.91434, 0.76688, 0.84285, 0.81978, 0., 1., 0.89419, 0., 0.99697,
0.72905, 0.92181, 0.67926, 0.82176, 1., 0., 0.87679, 1., 0., 0.98953,
0.84453, 0., 1., 0.89203, 1., 1., 0.90284, 0.93666, 0., 0.90692,
0., 0.81978, 0., 1., 1., 0.98802, 0.93646, 0.83787, 0.88445, 0.73206,
0.8707, 0.86435, 1., 0.54403, 0.8056, 0.90859, 1., 0.80867, 0.9207, 0.83768,
0.8132, 0., 0., 0.88192, 0.92071, 1., 0.97736, 1., 0., 0.89579,
1., 1., 0.91444, 0., 0., 0.68987, 0., 0.76952, 0.79364, 0.74772,
0.87847, 0.79231, 0.95962, 0.92421, 0.85568, 0., 0.88752, 0., 0.89529, 0.98802,
0., 0.87847, 0.93431, 0.91021, 0., 1., 0.79772, 0.89071, 1., 1.,
0.99703, 0., 0.97918, 0.92683, 0.99038, 0., 1., 0.88471, 0.85026, 0.91264,
0.96792, 0.82785, 0.58327, 0.74025, 0.73096, 0.9186, 0.9163, 0., 0.58327, 0.56195,
0.9659, 0.85984, 0.94991, 0., 0., 0., 0.79494, 0.71948, 0.89758, 1.,
1., 0.93031, 1., 0.87103, 0., 0., 0.90666, 0., 0.93496, 0.88192,
1., 0., 1., 1., 0., 0., 0.94335, 0., 0.81809, 1.,
0.98823, 0.98706, 0.95448, 0., 0., 0.92322, 0.7445, 0.94043, 0.94358, 0.,
0.96836, 0., 0.70259, 0., 0., 0., 0., 1., 0.7139, 1.,
1., 0.95757, 0., 0., 0.78602, 0.96809, 0.81916, 0.78884, 0.97534, 1.,
0.93496, 0.93692, 0., 0.87719, 0.74358, 0.82664, 0.95001, 0.787, 0.86005, 0.9914,
1., 0.93633, 0.9511, 0.93794, 0.86928, 0., 0.89443, 0., 0.94335, 0.,
0.99854, 0.89062, 1., 0.86184, 0.83537, 0.94771, 0.55405, 1., 0.91021, 1.,
0.82238, 0., 1., 1., 1., 0.98802, 0.71667, 1., 0., 0.85635,
0.9421, 1., 0., 1., 0., 0.8493, 0.88859, 0.7062, 0.88235, 0.94409,
1., 0., 0.74119, 0.56466, 0.79097, 0.88235, 0.93384, 1., 1., 0.7722,
0.96465, 0.76864, 0.91346, 0., 0.85124, 1., 0.96734, 1., 0., 0.,
1., 1., 0.8265, 0., 0., 0.88552, 0.83839, 0.9413, 0.82785, 0.,
0.88754, 0., 0.869, 1., 0., 0.97654, 0.99236, 0.98738, 0.79562, 0.83871,
0.82226, 0., 0., 1., 0., 0.89798, 0.94991, 0.9413, 0.8056, 0.,
1., 0., 0.88889, 1., 0., 0.91613, 0.75316, 0.86668, 0., 0.,
0., 0., 0.78107, 0., 0., 1., 0.94516, 0.85678, 0.94945, 1.,
0., 0.91876, 0., 0.87956, 0.94991, 0.89907, 0.7257, 0.79173, 1., 0.,
0.70849, 0.98985, 0.99258, 0.77309, 0., 0., 0.80399, 1., 0.8132, 0.85895,
1., 1., 0.83748, 0.88616, 1., 1., 1., 0., 0.87052, 0.86668,
0., 0.7257, 0., 0.91644, 1., 0.73409, 0.9536, 1., 1., 0.92011,
0.94465, 0., 0.83902, 0.829, 0.88889, 0.85348, 1., 0., 0.90354, 0.98738,
0., 0.76364, 0., 0.84843, 0.82961, 0.98738, 1., 0.78107, 0., 0.,
1., 0., 0., 1., 1., 0., 0.82309, 0.86928, 1., 0.83118,
0., 1., 0., 0., 1., 0.95071, 0., 0.91378, 0., 0.,
0.85584, 0., 0.94119, 0., 1., 0., 0.90284, 1., 0.67926, 0.,
1., 0., 1., 0.80606, 0., 0., 0.81094, 1., 0., 0.,
0.90367, 0.8926, 0.87944, 1., 0.54403, 0., 0.76673, 0.98245, 0., 0.,
0.96188, 0.97461, 0.93633, 1., 0.54403, 0., 0.97518, 0., 0., 0.97733,
0.96135, 0.89892, 0.8524, 0., 0., 0., 0.54403, 0.93506, 0.96044, 1.,
1., 0., 1., 0.81691, 0.73307, 0.85723, 0.72869, 0.72869, 1., 0.,
0.72869, 0.78478, 0.93002, 0.72869, 0., 0., 0., 1., 0., 0.,
1., 0.79505, 1., 0.81228, 0.92542, 0., 0.79566, 1., 0.63311, 0.87167,
0., 0.6886, 0.54403, 0.71409, 0.70439, 0.69468, 0.9147, 1., 0.82845, 0.,
0., 0.88104, 1., 0.94124, 0.79407, 0., 0.77182, 0., 0., 0.86995,
1., 1., 0.54403, 1., 0., 0.55405, 0.96188, 0.62922, 0.90106, 0.,
1., 0.85505, 0.87029, 0.77044, 1., 1., 0.78501, 0.8683, 0.84548, 0.67504,
0., 0.87328, 0.68987, 0., 1., 0.95202, 1., 0.91378, 0., 0.54403,
1., 0., 0.97518, 0.54403, 0.89798, 0., 0.80957, 0.61237, 0.8097, 0.94703,
1., 0., 1., 1., 1., 1., 0.74557, 0.67006, 0.83569, 0.81602,
1., 0., 0.99805, 1., 0., 1., 0.88091, 0.62217, 0.79076, 0.91741,
0., 0.87756, 1., 0., 1., 1., 0.91444, 0.99002, 0., 0.88265,
0.92998, 0., 1., 0., 0., 1., 1., 0.82309, 0., 0.,
0., 0., 0.95934, 0., 0.94119, 0.73409, 0.75995, 0.77399, 1., 1.,
0., 0.61316, 0.61324, 0., 0.9207, 0.95001, 0.96219, 1., 0.89914, 1.,
0., 1., 0.87679, 0.87679, 1., 1., 0.64631, 0., 0.85081, 0.92227,
0., 0.68924, 0., 0.76253, 0.77142, 0.92041, 0.5471, 1., 0.97736, 0.95001,
0.77854, 0., 0.82582, 1., 0.75341, 0., 0., 0., 1., 0.60055,
1., 1., 0., 0.91723, 0.7995, 0.54835, 0., 0.79236, 0., 0.87135,
0.88345, 0.96989, 0.80607, 0.57161, 1., 0., 0., 0., 0.86226, 0.87457,
0., 0.76167, 0.87457, 0.95934, 1., 1., 0., 0., 0.95368, 0.,
0., 1., 0.85081, 0., 0., 0.93892, 0.7017, 1., 0.86184, 0.,
0.78428, 0.89523, 0., 1., 0., 1., 0.85561, 0.58478, 0.85813, 1.,
0.90478, 1., 0., 1., 0., 0., 0.70259, 0.79472, 0.86184, 0.93258,
0.90813, 1., 0.9253, 1., 0., 0., 0.93063, 0.87067, 0., 0.87959,
0.8197, 0.88368, 0.96809, 0., 0.95476, 0.9173, 0., 1., 0.60722, 0.92245,
1., 0.83564, 0.88471, 0.6735, 0.59663, 0.66629, 0.88925, 0.82785, 0.85797, 0.,
0.86184, 0.76907, 0.93002, 1., 0.91168, 0.81226, 0., 1., 0., 0.,
1., 0.98287, 0., 0.55404, 0.9741, 0.88401, 0.61412, 0., 0.8236, 0.91496,
0., 0.77854, 0., 0.8097, 0.8179, 0.84453, 0.55277, 0.89419, 0.89165, 0.89287,
0., 0.65359, 0.90498, 0.9741, 0.86141, 1., 0.69921, 0.92284, 0.88612, 0.81857,
1., 0.94776, 0., 0.80847, 0.83768, 0.86358, 0., 0.94771, 0., 0.,
0.84094, 0.95465, 1., 0.5416, 0.88941, 1., 1., 0.87625, 1., 0.82785,
0.83882, 0., 0.79076, 0., 0.88192, 0.88088, 1., 0.8982, 0., 0.,
0.92071, 0., 0.65514, 0., 0.89253, 0., 1., 0.84026, 0.846, 0.96321,
0.90429, 1., 0.91103, 0., 0.89253, 0.80957, 1., 0.766, 0.79861, 1.,
0., 1., 0.83587, 1., 0.991, 0.96809, 0.86603, 0.88925, 0.84077, 0.8549,
0., 1., 1., 0., 0.95598, 0.90859, 0.92683, 1., 1., 1.,
0.98989, 0.96809, 0., 1., 1., 0.93996, 0.86538, 0.83817, 1., 0.94437,
0., 0., 1., 0.82675, 0.97285, 1., 0.85447, 1., 1., 0.85447,
0.94776, 0.99826, 0.846, 0.93431, 0., 0., 0., 1., 0.93633, 1.,
0.81326, 0.91149, 1., 1., 0.87567, 0.95333, 1., 0.85447, 0., 0.85447,
0.69296, 0.70529, 1., 1., 1., 0., 0.87198, 1., 0., 1.,
0.79421, 1., 0., 0.90466, 0.9474, 0.97659, 0.80505, 0., 0.77576, 0.81602,
0.90901, 0.77229, 0.96115, 0.95232, 0., 1., 0.87881, 1., 1., 0.96792,
0.92101, 0.97548, 0.94, 1., 0.73634, 0., 0.85447, 0.92402, 0.94112, 1.,
0., 0., 0., 0.90478, 0., 0.64591, 1., 1., 1., 1.,
0.59251, 0.93496, 0.93399, 0.98968, 0.91287, 0.99228, 0.97659, 0., 0., 0.96345,
0.61677, 0.66971, 0.85675, 0.98968, 0., 1., 1., 0., 0., 0.69642,
1., 0.85447, 0.82273, 0.98968, 0., 0.92841, 0.85627, 0., 0., 0.85561,
0., 0., 0., 0.97554, 0., 0.85723, 0., 0., 0., 0.,
0.76954, 0.65465, 0., 0.8228, 0.876, 0.97733, 0.89443, 0., 0., 0.,
0., 0.76339, 0.85561, 0.56398, 0.85447, 0., 0.8439, 0.90296, 0., 0.,
0.88072, 0.98968, 0.87029, 0.93473, 0.90582, 0., 0., 1., 0., 0.88624,
0., 0.8806, 0.79562, 0., 0.79464, 0.77142, 0.76442, 0.83351, 0.79365, 0.80607,
0.92284, 0.85447, 0.83793, 0., 0.98968, 0.88616, 0.91741, 0.55665, 0., 0.,
0.82275, 0.9141, 0.86645, 0., 0.98968, 0.88072, 0.74473, 1., 0.83991, 0.,
0., 0.86392, 0., 0.98102, 0.89443, 0.81394, 0., 0.94375, 0.97272, 0.80606,
0.98109, 0., 0.66281, 0.81916, 0., 0., 0., 0., 1., 0.,
0., 0.79535, 0., 0.85561, 0., 0., 0., 0.87394, 0.95923, 0.93496,
0.8524, 1., 0., 0.97918, 1., 0., 0., 0.87052, 1., 0.77763,
0.81226, 1., 0., 0.87287, 0.98293, 0., 0., 0.79494, 0., 0.73172,
0., 0.93506, 0.9163, 0.98287, 0.97409, 0., 0., 0.79754, 1., 0.,
0., 0.94943, 0., 0.7183, 0., 0., 0.9866, 0., 1., 0.72232,
1., 1., 0., 0., 0., 0.64384, 0.73717, 0.77854, 0.87457, 1.,
0., 0.86184, 0.83238, 0.95743, 0.85936, 0.9666, 0., 0.85456, 0.90498, 0.,
0.81133, 0.86057, 0.98287, 1., 0.9511, 1., 0.89798, 0.89443, 0.8806, 0.72805,
0., 0., 0.77399, 0., 0.66224, 0.83688, 0.86547, 1., 1., 0.9207,
0.93828, 0.73634, 0., 0.91721, 1., 0.95618, 0.92147, 0.89443, 0., 0.96268,
0.89443, 1., 0.9511, 0., 1., 0., 0., 1., 0.64043, 0.,
0., 1., 0.90805, 1., 0., 0.90498, 0.89846, 0.95528, 1., 1.,
0.82238, 0., 0.76167, 0.70987, 0., 0., 0.98369, 0., 0.96055, 0.,
0., 0.80768, 0.99848, 0.64661, 0., 0., 0.54281, 0.64179, 0., 0.,
0.80489, 0.94628, 0.89999, 0.8528, 0.98293, 0.58327, 0., 0.70529, 0., 0.,
0.86565, 0.94628, 0., 0.82734, 0.98293, 0.58327, 0.88506, 0.79623, 0., 0.70369,
0., 0.87929, 0., 0., 0.91308, 0.58327, 0.90579, 0.77984, 0.90466, 0.,
0., 0.85159, 0.88845, 0.74988, 0.87778, 0.58327, 0.85505, 0.58327, 0.94628, 1.,
1., 0.89709, 0.8058, 0., 1., 0.92041, 0.94991, 0.72274, 0.70437, 0.96792,
0.5709, 0., 0.86215, 0.94945, 0.81073, 1., 0.846, 0.83871, 0.87824, 0.80129,
0.88918, 0.90813, 1., 1., 1., 1., 0.90919, 0.87604, 0., 0.82217,
0.9173, 0., 0.79562, 0.55086, 0.66913, 1., 0.70081, 0.97822, 1., 0.79212,
0.92837, 0.85447, 0.60553, 0.94323, 0.95268, 0., 0.85456, 0.95743, 0., 0.,
0.87604, 1., 0.80915, 0.54281, 0.94628, 0.88918, 0.88906, 1., 0.84298, 0.,
0.89914, 0., 0.8657, 0., 0.89715, 0.85797, 0., 0.87424, 0.88546, 1.,
0., 0., 0.85447, 0.76167, 0., 1., 0., 0., 0.92542, 0.56128,
0.56603, 0.97968, 0., 0.6904, 0.55665, 0.91642, 0.83226, 1., 0.84181, 0.92542,
0.99132, 1., 1., 1., 0.90354, 0.8605, 0.9413, 0.8461, 0.92559, 1.,
0.97871, 1., 1., 1., 0.89253, 0.78728, 0.99521, 1., 0.94409, 1.,
0., 1., 1., 1., 1., 0.87052, 0.99848, 1., 0.98847, 0.95962,
1., 0.99236, 0.99848, 0.93996, 0.93541, 0.93996, 1., 0.85949, 1., 0.90299,
0.93258, 0.99708, 0.9413, 0.99743, 0.99236, 0.85813, 1., 0.79097, 0., 1.,
0.85723, 0.9413, 1., 0.86772, 0.89536, 0.85348, 1., 0.8904, 0.8904, 0.9926,
1., 0.87029, 0.98287, 1., 0.90019, 0.9413, 0.8926, 0.81932, 0.88088, 0.,
1., 0., 1., 0.82217, 1., 0.88066, 1., 1., 1., 0.79269,
0.87307, 1., 1., 1., 1., 0.84264, 0.79684, 0.99848, 0.9413, 1.,
0.95962, 0., 1., 0.83768, 0.86882, 1., 0.93692, 1., 1., 1.,
0.92926, 1., 0.89185, 1., 0.89574, 1., 0.93506, 1., 0.89062, 0.,
0.75825, 1., 0., 1., 0.90242, 0.97733, 1., 0., 0., 1.,
0., 1., 0., 0., 0., 0.94991, 0.64226, 0.7257, 0., 1.,
0.89846, 0., 0., 0., 0.9519, 0.84609, 0.62217, 0., 0., 1.,
0.88992, 0.73724, 0., 0.79885, 0.56603, 0., 0.86906, 0.8683, 0.91287, 0.77979,
0.85627, 0., 0., 0., 0., 0., 0., 0.88752, 0., 0.86883,
0., 0., 0., 0., 1., 1., 0.85374, 0.8228, 0., 0.,
0.8786, 0.65012, 0.57587, 0., 0.86814, 0., 0.58327, 0., 0.69125, 0.54648,
0.7966, 0.83748, 0., 0., 0.81177, 0., 0., 0.93506, 0.87029, 0.87158,
0.96135, 0., 0., 0.82916, 1., 0.67883, 1., 0., 0., 0.,
0.87394, 1., 0.99598, 0.86772, 0.93828, 0.78505, 0.80244, 0., 0., 0.82404,
0.89412, 0.92884, 0., 0., 1., 0.787, 0.83293, 0., 1., 0.94409,
0.95743, 1., 0.8309, 0., 0.91741, 0.79801, 0., 0.73568, 1., 0.87014,
0., 0.96997, 0.89496, 0.99598, 0.66224, 0.72274, 0., 0., 0., 0.,
0., 1., 0., 0.90284, 0., 0., 0., 0.89167, 0., 0.86814,
0., 0., 0.89892, 0.88752, 1., 0., 1., 0.96345, 0.64734, 0.7148,
0., 1., 0.89892, 1., 0.85124, 0., 0.98369, 0.775, 0.60722, 0.,
0., 1., 0.89892, 0.92841, 1., 0., 1., 0.95436, 0.99697, 0.,
0., 0.95448, 0., 0.89907, 0., 0., 0.91847, 0., 0., 0.56061,
0., 1., 0., 0.89892, 0.65149, 0.74772, 0.7241, 0.78318, 0.98109, 1.,
0., 0.8097, 0.64453, 0., 0.72274, 0.93955, 0., 0., 0., 0.,
0., 0.86538, 0.75092, 1., 0.76538, 0.83855, 0.97512, 0.54772, 0., 0.83991,
0.99697, 0., 0., 0., 0.8097, 0.71735, 0.86547, 0., 0.83745, 0.90874,
0., 0.67624, 0., 0.83042, 0.91608, 0.89165, 0.8634, 0.65254, 0., 0.92202,
0.66112, 1., 0.86518, 0., 0.97968, 0., 0.64734, 0.81245, 0.64731, 0.89469,
0.81508, 0.83068, 0., 1., 0.64226, 0.95241, 0., 0.91608, 0.83071, 0.87916,
0.80256, 0., 0.92556, 0.74343, 0., 0.99333, 0., 0., 1., 0.,
0., 1., 0.91847, 0.74029, 0., 0., 0.},
{1, 10, -1, 8, 5, -1, 7, 2, -1, -1, 1, 11, 4, 8, 5, 14, 7, 2, -1, -1, 1, 11, 4, 8,
5, -1, 7, -1, -1, -1, 1, -1, 13, -1, 7, -1, 1, -1, -1, 12, 10, 5, 11, 2, 2, -1, -1, 11,
1, 1, -1, -1, 2, -1, -1, 1, 11, -1, 7, 8, 8, 2, 8, 8, 5, 7, 7, 7, 5, -1, 11, -1,
1, 12, -1, -1, 10, -1, 1, 1, 11, 2, 7, 8, 4, 11, 2, -1, 7, 8, 11, 8, -1, -1, 2, -1,
-1, 5, 8, 10, 5, 1, 13, -1, -1, -1, -1, 11, 2, 14, 5, 5, 2, -1, 10, -1, -1, 14, 2, -1,
-1, -1, -1, -1, 5, -1, 1, -1, 12, -1, 1, 11, 4, -1, -1, 14, 7, 2, -1, 12, 1, 11, 4, 8,
5, -1, 7, 2, -1, 12, 1, 11, 4, 8, 5, -1, 7, 2, -1, 12, 1, -1, 14, 14, 7, 14, 1, 12,
-1, 12, -1, 5, -1, 2, 2, 8, 14, 11, 1, 1, 4, 4, 2, 10, 4, 1, 11, 4, 7, 8, 8, -1,
-1, 8, 5, 7, 7, 7, -1, 12, 11, 14, 1, -1, -1, 4, -1, 4, 1, 1, 11, 2, 7, 8, 4, -1,
2, -1, 11, 8, -1, 11, 2, 7, -1, 5, 8, 11, 5, 1, 14, -1, 7, 12, 7, -1, 2, 14, 5, 5,
2, 4, 10, 4, 4, 14, 14, -1, 10, 10, 5, 12, 1, -1, -1, -1, 1, 10, 4, 8, 5, 13, 7, 2,
-1, -1, 1, -1, 4, 8, 5, -1, 7, 2, -1, -1, 1, -1, 4, 8, 5, 13, 7, -1, -1, 12, 1, 12,
13, -1, 7, -1, 1, -1, -1, -1, -1, 5, 10, 2, 2, 8, -1, -1, 1, 1, 4, 4, 2, 9, 4, 1,
-1, 4, 7, 8, 8, 2, 8, 8, 5, 7, 7, 7, 5, -1, 10, -1, 1, -1, 13, 4, 9, 4, 1, 1,
10, 2, 7, 8, 4, -1, 2, 5, 7, 8, 10, 8, -1, 10, 2, 7, -1, 5, -1, 10, 5, 1, 13, 8,
7, -1, 7, 10, 2, 13, 5, 5, 2, 4, 9, 4, 4, -1, 2, 6, 14, 5, -1, -1, 5, 6, 1, -1,
-1, 8, 1, 11, 4, -1, 5, -1, 7, 0, -1, 12, 1, 11, 4, 8, 5, -1, -1, 0, -1, 12, 1, 11,
4, -1, 5, -1, 7, 0, -1, 12, 1, 12, -1, -1, -1, -1, 1, -1, -1, 12, 9, 5, 11, 0, -1, 8,
-1, 11, 1, 1, 4, 4, 0, -1, 4, 1, 3, 4, -1, -1, 8, 0, 8, 8, 5, 7, 7, 7, 5, 12,
11, -1, 1, 12, -1, 4, -1, 4, 1, 1, 11, 0, 7, 8, 4, -1, 0, 5, 7, 8, 11, -1, 12, 11,
-1, 7, -1, 5, 8, 11, 5, 1, -1, -1, 7, -1, -1, 11, 0, -1, 5, 5, 0, 4, -1, 4, -1, -1,
0, 12, -1, 5, -1, -1, 5, -1, 1, -1, 12, -1, 1, 3, 4, -1, 5, -1, 7, 2, -1, -1, 1, 3,
-1, -1, 5, 13, 7, 2, 10, -1, 1, 3, -1, -1, 5, 13, 7, 2, 10, -1, 1, -1, -1, 13, 7, 13,
1, -1, -1, -1, 10, 5, 3, 2, 2, -1, 13, 3, 1, 1, 4, 4, 2, -1, 4, 1, 3, 4, -1, -1,
-1, 2, -1, -1, 5, 7, 7, 7, 5, -1, 3, 13, 1, 12, -1, 4, 10, 4, 1, 1, 3, 2, 7, -1,
-1, 3, 2, 5, 7, -1, 3, -1, -1, 3, 2, 7, 10, 5, -1, 10, 5, 1, 13, -1, 7, 12, 7, 3,
2, 13, 5, 5, 2, 4, -1, 4, 4, -1, 2, 12, 13, 5, -1, 10, 5, -1, 1, 10, 12, -1, 1, 10,
4, 8, 5, -1, 7, 2, 9, 6, 1, 10, 4, 8, 5, -1, 7, 2, -1, 6, 1, 10, 4, 8, -1, 14,
7, -1, 9, 6, 1, 6, -1, 14, 7, -1, 1, -1, -1, 6, 9, 5, -1, -1, -1, -1, 14, -1, 1, 1,
4, 4, 2, 9, -1, 1, 10, -1, 7, 8, 8, 2, 8, 8, -1, 7, 7, 7, 5, 6, 10, -1, 1, 6,
-1, 4, -1, 4, 1, 1, 10, 2, 7, 8, 4, -1, 2, 5, 7, -1, -1, -1, 6, 10, 2, 7, -1, 5,
8, 10, -1, 1, -1, 8, 7, 6, 7, 10, 2, -1, -1, -1, 2, 4, -1, 4, 4, 14, 2, 6, -1, -1,
9, -1, -1, 6, 1, -1, -1, 9, 1, 10, 4, -1, 5, 14, -1, 2, -1, 12, 1, 10, 4, 8, 5, 14,
-1, 2, -1, -1, 1, 10, 4, 8, 5, 14, 7, 2, -1, -1, 1, 12, -1, 14, 7, 14, 1, -1, 9, 12,
-1, 5, 10, 2, 2, 8, 14, 10, 1, 1, 4, 4, 2, -1, 4, 1, 3, 4, 7, 8, -1, 2, -1, -1,
5, 7, -1, 1, 5, 12, 10, -1, 1, 12, -1, 4, -1, 4, 1, 1, 10, 2, 7, 8, -1, 10, 2, 5,
7, 8, 10, 8, 12, 10, 2, 7, -1, 5, 8, 10, -1, 1, -1, -1, 7, 12, 7, 10, 2, 14, 5, 5,
2, 4, 11, -1, 4, -1, 2, 12, 14, 5, -1, -1, 5, -1, 1, -1, 12, -1, 3, 4, 8, 5, 13, 7,
2, -1, 12, 1, 3, 4, 8, 5, -1, 7, 2, 9, 12, 1, 3, 4, 8, 5, -1, 7, 2, -1, 12, 1,
12, 13, 13, 7, 13, 1, -1, 9, 12, 9, 5, 3, 2, 2, -1, -1, 3, 4, 2, 9, 4, 1, 3, 4,
7, 8, 8, 2, -1, -1, -1, 7, 7, 7, 5, 12, 3, 13, 1, 12, 13, 4, -1, 4, 1, 1, 3, 2,
7, -1, 4, 3, -1, 7, 8, 3, -1, 12, 3, 2, 7, -1, 5, 8, 3, 5, 1, 13, -1, 7, 12, 7,
3, 2, 13, 5, 5, 2, 4, -1, 4, 4, 13, 2, -1, -1, -1, 5, -1, 1, 9, 12, 9, 1, 10, 4,
8, 5, 14, 7, 2, -1, -1, 1, 10, 4, 8, 5, -1, 7, 2, -1, -1, 1, 10, 4, 8, 5, -1, 7,
2, -1, -1, 1, -1, -1, -1, 7, -1, 1, -1, -1, -1, -1, 5, 10, -1, 2, 8, 13, 10, -1, -1, -1,
-1, 4, 1, 10, 4, -1, 8, 8, -1, -1, 8, 5, 7, 7, 7, -1, -1, 10, -1, 1, -1, 14, 4, -1,
4, 1, 1, 10, 2, 7, 8, 4, 10, -1, 5, 7, 8, 10, -1, -1, 10, 2, 7, -1, 5, 8, 10, 5,
1, -1, -1, 7, -1, 7, 10, 2, -1, 5, 5, 2, 4, -1, 4, 4, -1, -1, -1, -1, 5, -1, -1, 5,
-1, 1, -1, -1, -1, 1, 11, 4, 8, 5, -1, 7, 2, -1, -1, 1, 11, 4, 8, 5, -1, 7, 2, -1,
-1, 1, -1, 4, -1, 5, 14, 7, 2, -1, -1, 1, 12, -1, -1, 7, -1, 1, -1, -1, 12, -1, 5, 10,
2, 2, -1, -1, -1, 1, 1, 4, 4, 2, -1, 4, 1, 10, 4, 7, -1, 8, 2, -1, 8, 5, 7, 7,
7, 5, 12, 10, 14, 1, -1, -1, 4, -1, 4, 1, 1, 10, 2, 7, 8, 4, -1, 2, 5, 7, 8, 10,
-1, 12, 10, 2, 7, -1, 5, -1, -1, 5, 1, -1, -1, 7, 12, 7, -1, 2, 14, 5, 5, 2, 4, -1,
4, 4, -1, -1, 12, -1, 5, -1, -1, 5, 12, 1, -1, -1, 10, 1, -1, -1, 8, 5, 13, 7, 2, 10,
-1, 1, -1, -1, 8, 5, -1, 7, 2, 10, 12, 1, -1, 4, -1, 5, -1, -1, 2, 10, 12, 1, 12, -1,
-1, 7, 13, 1, 12, 10, 12, 10, 5, 10, 2, 2, 8, -1, 10, 1, 1, 4, 4, 2, 10, -1, 1, 11,
4, 7, 8, 8, 2, 8, 8, 5, 7, 7, 7, 5, 12, 10, -1, 1, 12, -1, 4, 10, 4, 1, 1, 11,
2, 7, 8, 4, 10, 2, 5, -1, 8, 10, -1, -1, 10, 2, 7, 10, 5, 8, 10, 5, 1, -1, 8, -1,
12, -1, 10, 2, -1, 5, 5, 2, -1, -1, 4, 4, -1, 2, -1, -1, 5, 10, 10, 5, -1, 1, 10, 1,
11, 4, 8, 5, 13, 7, 2, 9, 12, 1, 11, 4, 8, 5, 13, 7, 2, 9, 12, 1, 11, 4, 8, 5,
-1, 7, 2, 9, 12, 1, 12, 13, 13, 7, 13, 1, 12, 9, 12, 9, 5, 11, 2, 2, 8, 13, 11, 1,
1, 4, 4, 2, -1, 4, 1, 11, 4, 7, 8, 8, 2, 8, 8, 5, 7, 7, 7, 5, 12, 11, 13, 1,
12, -1, 4, -1, 4, 1, 1, 11, 2, 7, 8, 4, 11, 2, 5, 7, 8, 11, 8, 12, 11, 2, 7, -1,
5, 8, 11, 5, 1, 13, 8, 7, 12, 7, 11, 2, 13, 5, 5, 2, 4, -1, 4, 4, -1, 2, 12, 13,
5, -1, -1, 5, -1, 1, -1, -1, -1, 1, 10, 4, -1, 5, 14, -1, -1, -1, 6, 1, 10, -1, -1, 5,
14, 7, -1, 1, 10, -1, 8, 5, 14, 7, 2, -1, -1, -1, -1, -1, -1, 7, -1, 1, -1, -1, -1, -1,
5, 10, 2, 2, -1, -1, 10, 1, 1, -1, 4, -1, 10, -1, 1, 10, 4, 7, -1, -1, 2, -1, -1, 5,
7, 7, 7, -1, -1, 10, 14, 1, 12, -1, -1, -1, 1, 10, 2, 7, 8, 4, 10, -1, -1, 7, 8, 10,
-1, -1, 10, 2, 7, -1, 5, 8, 10, 5, 1, -1, 8, 7, -1, 7, 10, 2, -1, 5, 5, 2, 4, 4,
-1, -1, -1, -1, -1, 5, -1, 1, -1, -1, -1, 1, -1, 4, -1, -1, 13, 7, 2, -1, 12, 1, 10, 4,
-1, 5, 13, 7, 2, -1, 12, 1, 10, -1, -1, 5, 13, 7, 2, -1, 12, 1, 12, -1, -1, 7, -1, 1,
-1, -1, 12, -1, -1, 10, -1, 2, -1, 13, 10, 1, 1, 4, 4, 2, -1, 4, 1, -1, 4, 7, -1, -1,
-1, -1, -1, 5, 7, 7, 7, 5, 12, 10, -1, 1, 12, -1, -1, -1, 4, 1, 1, -1, 2, 7, -1, 4,
-1, 2, 5, 7, 8, 10, -1, 12, 10, 2, 7, -1, 5, -1, 10, 5, 1, 13, 8, 7, -1, 7, 10, 2,
-1, 5, 5, 2, 4, -1, 4, 4, -1, 2, -1, -1, 5, -1, -1, 5, 12, 1, -1, -1, -1}},
{1797,
5,
10,
Digits::parents,
Digits::children,
Digits::lambdas,
Digits::sizes,
Common::CLUSTER_SELECTION_METHOD::EOM,
false,
50.0,
{1., 1., 0., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 0., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 1., 0., 1.,
1., 1., 0., 0., 1., 0., 0., 1., 1., 0., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 0., 1., 0., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 0., 1., 0., 0., 1., 1., 1., 0., 1., 1., 1., 1., 0., 1., 0.,
1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0.99685, 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1.,
1., 1., 1., 0., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 0., 1.,
1., 1., 0., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 0.83902, 1., 1., 1., 1., 1., 1., 1.,
0., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1.,
1., 1., 1., 1., 0., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1.,
1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
0., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 0., 1., 1., 0., 1., 1., 1., 1., 0., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 0., 1.,
1., 0., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0.,
1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0.,
1., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
0., 1., 1., 1., 1., 1., 1., 1., 0., 1., 0., 0., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1.,
1., 1., 1., 0., 1., 1., 1., 0., 1., 0., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 0., 1., 1., 1.,
0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1.,
1., 0., 1., 1., 0., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1.,
0., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 0., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 0., 0.,
1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 0., 1., 1., 1., 0.99529, 0., 1., 1., 0., 0., 1.,
0., 1., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 0., 1., 1.,
1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
0., 1., 1., 1., 1., 1., 1., 0., 1., 1., 0., 0., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1., 1., 1., 1.,
0., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 0.,
1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 0., 1., 1., 1.,
1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0.,
0., 0., 1., 1., 1., 0., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1.,
1., 1., 1., 0., 0., 0., 0., 0., 0.99763, 1., 1., 1., 0., 0., 1., 1.,
1., 1., 1., 1., 1., 0., 1., 1., 1., 0., 1., 0., 1., 0., 1., 1.,
1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1.,
1., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 1., 1., 1.,
0.99921, 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 0.99921, 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 0., 1., 0., 1.,
0., 1., 0., 1., 1., 1., 1., 0., 1., 1., 1., 0., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 0., 1., 1., 1., 1., 0.,
1., 1., 1., 0.99921, 1., 1., 1., 1., 1., 1., 1., 0., 0., 1., 0., 1.,
0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0.,
0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0.99921, 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1.,
1., 1., 1., 1., 0.},
{0, 0, -1, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, -1, -1, 0, 0, 0, 0, 0, 0, 0, -1, 0,
0, 0, -1, -1, 0, -1, -1, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0,
0, 0, 0, -1, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, -1, 0, 0, 0,
-1, 0, 0, 0, 0, -1, 0, -1, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, -1, 0, 0, 0,
0, 0, 0, -1, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0,
0, -1, 0, 0, 0, 1, -1, 0, 0, 0, -1, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0,
0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
-1, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 1, 0, 0, 0, 1, 0, -1, 0, -1, 0,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, -1, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-1, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, -1, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, -1, 0,
0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, -1, -1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, -1, -1,
0, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0,
-1, 0, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, -1, 0, -1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, -1, 0, 0, -1, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0,
0, -1, 0, 0, -1, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, -1, -1,
-1, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, -1, 0, 0, 0, -1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, -1, -1, 0,
-1, 0, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 0, 0,
0, 0, -1, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, -1,
0, 0, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, -1, 0, 0, 0, 0, 0, 0,
-1, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1,
0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, -1, -1, 0, 0, 0, -1, 0, 0,
0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, -1, -1, -1, -1, -1, 0, 0, 0, 0, -1, -1, 0, 0,
0, 0, 0, 0, 0, -1, 0, 0, 0, -1, 0, -1, 0, -1, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0,
0, 0, 0, -1, -1, 0, 0, 0, 0, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, -1, -1, 0, 0, 0,
0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, -1, 0, 0, -1, 0, -1, 0, -1, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, -1, 0, 0, 0, 0,
0, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, -1, -1, 0, -1, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1,
-1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, -1}},
{150,
5,
10,
Iris::parents,
Iris::children,
Iris::lambdas,
Iris::sizes,
Common::CLUSTER_SELECTION_METHOD::LEAF,
false,
0.0,
{1., 1., 0.92582, 0.92582, 1., 0.63246, 0.7746, 1., 0.67937, 1.,
0.73855, 0.8165, 1., 0.4899, 0.42008, 0.38255, 0.61237, 1., 0.4714, 0.7746,
0.67937, 0.86603, 0.45486, 0.63246, 0.54772, 0.8165, 0.92582, 1., 1., 1.,
1., 0.70711, 0.53452, 0.51075, 1., 0.73855, 0.67937, 0.8165, 0.8165, 1.,
1., 0.30861, 0.7746, 0.57735, 0.51075, 0.92582, 0.73855, 1., 0.86603, 1.,
0., 0.96609, 0., 0., 0.96609, 1., 0., 0., 0.96609, 0.,
0., 1., 0., 0., 0., 0.96609, 0., 1., 0., 1.,
0., 0.90749, 0., 0., 0.96609, 0.96609, 0., 0.91287, 1., 0.,
0.88192, 0., 1., 0.91287, 0., 0., 0.96609, 0., 1., 1.,
0., 0.96609, 1., 0., 1., 1., 1., 0.96609, 0., 1.,
0., 0.91287, 0., 0., 1., 0., 0., 0., 0., 0.,
0., 1., 1., 0., 0., 1., 1., 0., 0., 0.,
1., 0., 0., 0.91287, 1., 0., 0.91287, 0.91287, 1., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0.93934,
1., 0., 0.91287, 1., 0., 1., 0., 1., 0., 0.91287},
{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, -1, 2, -1, -1, 2, 2, -1, -1, 2, -1, -1, 2, -1, -1, -1, 2,
-1, 2, -1, 2, -1, 2, -1, -1, 2, 2, -1, 1, 2, -1, 2, -1, 2, 1, -1, -1, 2, -1,
2, 2, -1, 2, 2, -1, 2, 2, 2, 2, -1, 2, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1,
-1, 1, 1, -1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, -1, 1, 1, 1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, 1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1}},
{150,
5,
10,
Iris::parents,
Iris::children,
Iris::lambdas,
Iris::sizes,
Common::CLUSTER_SELECTION_METHOD::LEAF,
false,
0.5,
{1., 1., 0.92582, 0.92582, 1., 0.63246, 0.7746, 1., 0.67937, 1.,
0.73855, 0.8165, 1., 0.4899, 0.42008, 0.38255, 0.61237, 1., 0.4714, 0.7746,
0.67937, 0.86603, 0.45486, 0.63246, 0.54772, 0.8165, 0.92582, 1., 1., 1.,
1., 0.70711, 0.53452, 0.51075, 1., 0.73855, 0.67937, 0.8165, 0.8165, 1.,
1., 0.30861, 0.7746, 0.57735, 0.51075, 0.92582, 0.73855, 1., 0.86603, 1.,
0.8165, 1., 0.83205, 0.97333, 1., 1., 0.92582, 0.53882, 1., 0.78784,
0.58835, 1., 0.72761, 0.97333, 0.78784, 1., 1., 1., 0.6, 1.,
0.90453, 1., 0.97333, 0.92582, 1., 1., 1., 1., 1., 0.90453,
1., 0.97333, 1., 1., 0.83205, 0.83205, 1., 0.68825, 1., 1.,
1., 1., 1., 0.58835, 1., 1., 1., 1., 0.51832, 1.,
0.69749, 1., 0.84853, 1., 1., 0.69749, 0.48038, 0.762, 0.67937, 0.52623,
0.90453, 1., 1., 0.7746, 0.66259, 1., 1., 0.41603, 0.43994, 0.647,
1., 0.86603, 0.60609, 1., 1., 0.65465, 1., 1., 1., 0.6,
0.78784, 0.41404, 0.90453, 0.92582, 0.60609, 0.60609, 0.84853, 0.92582, 0.97333, 1.,
1., 0.8165, 1., 1., 0.97333, 1., 0.88465, 1., 0.67937, 1.},
{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}},
{1797,
5,
10,
Digits::parents,
Digits::children,
Digits::lambdas,
Digits::sizes,
Common::CLUSTER_SELECTION_METHOD::LEAF,
false,
0.0,
{1., 0.58403, 0., 0.85348, 0., 0., 0., 0., 0., 0.,
0.73614, 1., 0.69244, 0.89164, 0., 0.91287, 0., 0., 0., 0.,
0.75426, 1., 0.82675, 0.82176, 0., 0., 0., 0., 0., 0.,
0.75316, 0., 0.96262, 0., 0., 0., 0.75986, 0., 0., 0.86867,
0.5573, 1., 0.93996, 0., 0., 0., 0., 1., 0.82796, 0.8055,
0., 0., 0., 0., 0., 0.81232, 1., 0., 0., 0.8524,
0.87954, 0., 0.94117, 0.94409, 0., 0.95811, 0., 0., 0., 0.,
0.9984, 0., 0.72607, 0.85888, 0., 0., 0.5709, 0., 0.67642, 1.,
0.86672, 1., 0., 0.89914, 0.76688, 0.82857, 0., 0., 0.96954, 0.94117,
0.94535, 0.93258, 0., 0., 0., 0., 0., 0., 0.8634, 0.70181,
0., 0.70802, 0.88235, 0., 0., 0., 0., 0.94671, 0., 0.87756,
0., 0., 0., 0., 0.54525, 0., 0., 1., 0., 0.,
0., 0., 0., 0., 1., 0., 0.84609, 0., 0.88612, 0.,
0.87394, 0.83787, 0.86184, 0., 0., 1., 0.93907, 0., 0., 1.,
0.7679, 0.91434, 0.76688, 0.84285, 0., 0., 0.97148, 0., 0., 0.99697,
0.72905, 0.92181, 0.67926, 0.82176, 1., 0., 0., 0., 0., 0.98953,
0.84453, 0., 1., 0.89203, 0.90532, 1., 0.90284, 0.93666, 0., 0.90692,
0., 0., 0., 1., 0., 0.98802, 0.93646, 0.83787, 0.88445, 0.73206,
0.8707, 0.86435, 0., 0.54403, 0.8056, 0.90859, 1., 0.80867, 0., 0.83768,
0.8132, 0., 0., 0.88192, 0., 0.96954, 0., 0.96379, 0., 0.89579,
1., 1., 0.91444, 0., 0., 0.68987, 0., 0.76952, 0.79364, 0.74772,
0.87847, 0., 0., 0.92421, 0.85568, 0., 0., 0., 0.89529, 0.98802,
0., 0.87847, 0., 0., 0., 1., 0.79772, 0.89071, 1., 1.,
0.99703, 0., 0., 0.92683, 0., 0., 0., 0.88471, 0., 0.,
0., 0.82785, 0.58327, 0.74025, 0.73096, 0.9186, 0.9163, 0., 0.58327, 0.56195,
0., 0.85984, 0.94991, 0., 0., 0., 0.79494, 0.71948, 0.89758, 1.,
1., 0.93031, 1., 0., 0., 0., 0.90666, 0., 0.93496, 0.88192,
0.9893, 0., 0.9929, 1., 0., 0., 0.94335, 0., 0.81809, 1.,
0., 0.98706, 0., 0., 0., 0.92322, 0.7445, 0.94043, 0.94358, 0.,
0., 0., 0.70259, 0., 0., 0., 0., 0.98065, 0.7139, 0.,
1., 0.95757, 0., 0., 0.78602, 0.96809, 0.81916, 0.78884, 0., 1.,
0.93496, 0.93692, 0., 0.87719, 0., 0.82664, 0.95001, 0., 0.86005, 0.9914,
0.97222, 0., 0., 0., 0., 0., 0.89443, 0., 0.94335, 0.,
0.99854, 0.89062, 1., 0.86184, 0.83537, 0.94771, 0.55405, 0., 0., 1.,
0.82238, 0., 0., 0., 0.94106, 0.98802, 0.71667, 1., 0., 0.85635,
0., 0.91616, 0., 0., 0., 0.8493, 0., 0.7062, 0.88235, 0.94409,
1., 0., 0., 0.56466, 0., 0.88235, 0., 1., 0., 0.7722,
0.96465, 0.76864, 0.91346, 0., 0., 1., 0.96734, 1., 0., 0.,
0.964, 1., 0.8265, 0., 0., 0.88552, 0.83839, 0.9413, 0.82785, 0.,
0., 0., 0., 1., 0., 0.97654, 0.99236, 0.98738, 0.79562, 0.83871,
0., 0., 0., 1., 0., 0.89798, 0.94991, 0.9413, 0.8056, 0.,
1., 0., 0., 1., 0., 0.91613, 0.75316, 0.86668, 0., 0.,
0., 0., 0.78107, 0., 0., 1., 0.94516, 0., 0.94945, 1.,
0., 0.91876, 0., 0.87956, 0.94991, 0.89907, 0.7257, 0.79173, 1., 0.,
0.70849, 0.98985, 0.99258, 0.77309, 0., 0., 0.80399, 1., 0.8132, 0.85895,
1., 0.99368, 0., 0., 1., 1., 1., 0., 0.87052, 0.86668,
0., 0.7257, 0., 0.91644, 1., 0.73409, 0.9536, 1., 1., 0.92011,
0.94465, 0., 0.83902, 0., 0., 0.85348, 1., 0., 0.90354, 0.98738,
0., 0., 0., 0., 0.82961, 0.98738, 1., 0.78107, 0., 0.,
0.99788, 0., 0., 1., 1., 0., 0., 0., 1., 0.83118,
0., 1., 0., 0., 1., 0.95071, 0., 0., 0., 0.,
0., 0., 0.94119, 0., 1., 0., 0.90284, 1., 0.67926, 0.,
0., 0., 1., 0., 0., 0., 0.81094, 1., 0., 0.,
0., 0.8926, 0., 0.98362, 0.54403, 0., 0.76673, 0.98245, 0., 0.,
0., 0.97461, 0., 1., 0.54403, 0., 0.97518, 0., 0., 0.97733,
0., 0.89892, 0.8524, 0., 0., 0., 0.54403, 0., 0.96044, 0.,
0., 0., 1., 0.81691, 0.73307, 0.85723, 0.72869, 0.72869, 0., 0.,
0.72869, 0.78478, 0.93002, 0.72869, 0., 0., 0., 1., 0., 0.,
0.95757, 0., 1., 0., 0., 0., 0.79566, 1., 0.63311, 0.87167,
0., 0.6886, 0.54403, 0.71409, 0.70439, 0.69468, 0.9147, 1., 0., 0.,
0., 0.88104, 1., 0., 0., 0., 0.77182, 0., 0., 0.86995,
0.97142, 1., 0.54403, 1., 0., 0.55405, 0., 0.62922, 0.90106, 0.,
1., 0.85505, 0., 0.77044, 1., 1., 0., 0., 0., 0.67504,
0., 0.87328, 0.68987, 0., 0.95967, 0.95202, 1., 0., 0., 0.54403,
0., 0., 0.97518, 0.54403, 0.89798, 0., 0.80957, 0.61237, 0.8097, 0.94703,
1., 0., 0.99762, 0., 1., 1., 0.74557, 0.67006, 0.83569, 0.81602,
1., 0., 0., 1., 0., 1., 0.88091, 0.62217, 0.79076, 0.91741,
0., 0.87756, 0.99762, 0., 1., 1., 0.91444, 0.99002, 0., 0.88265,
0., 0., 1., 0., 0., 1., 1., 0., 0., 0.,
0., 0., 0.95934, 0., 0.94119, 0.73409, 0.75995, 0.77399, 1., 1.,
0., 0.61316, 0.61324, 0., 0., 0.95001, 0.96219, 0.99708, 0.89914, 1.,
0., 1., 0., 0., 1., 1., 0.64631, 0., 0.85081, 0.92227,
0., 0.68924, 0., 0.76253, 0.77142, 0.92041, 0.5471, 1., 0., 0.95001,
0.77854, 0., 0., 1., 0., 0., 0., 0., 1., 0.60055,
1., 1., 0., 0., 0.7995, 0.54835, 0., 0.79236, 0., 0.87135,
0., 0.96989, 0., 0.57161, 1., 0., 0., 0., 0., 0.87457,
0., 0.76167, 0.87457, 0.95934, 0.99445, 1., 0., 0., 0.95368, 0.,
0., 1., 0.85081, 0., 0., 0.93892, 0.7017, 1., 0.86184, 0.,
0., 0.89523, 0., 0., 0., 1., 0.85561, 0.58478, 0.85813, 1.,
0., 1., 0., 1., 0., 0., 0.70259, 0.79472, 0.86184, 0.93258,
0., 1., 0., 0., 0., 0., 0.93063, 0.87067, 0., 0.87959,
0., 0.88368, 0.96809, 0., 0.95476, 0.9173, 0., 0., 0.60722, 0.,
1., 0.83564, 0.88471, 0.6735, 0.59663, 0.66629, 0.88925, 0.82785, 0., 0.,
0.86184, 0.76907, 0.93002, 1., 0., 0.81226, 0., 0., 0., 0.,
1., 0., 0., 0.55404, 0., 0.88401, 0.61412, 0., 0.8236, 0.91496,
0., 0.77854, 0., 0.8097, 0.8179, 0.84453, 0.55277, 0., 0., 0.89287,
0., 0.65359, 0., 0., 0., 1., 0.69921, 0.92284, 0.88612, 0.81857,
0., 0., 0., 0., 0.83768, 0.86358, 0., 0.94771, 0., 0.,
0., 0.95465, 0.92173, 0.5416, 0., 1., 0.98267, 0., 1., 0.82785,
0.83882, 0., 0.79076, 0., 0., 0.88088, 1., 0., 0., 0.,
0., 0., 0.65514, 0., 0.89253, 0., 1., 0.84026, 0.846, 0.,
0.90429, 0.96594, 0., 0., 0.89253, 0.80957, 1., 0.766, 0.79861, 0.,
0., 0.90532, 0., 1., 0.991, 0.96809, 0.86603, 0.88925, 0.84077, 0.,
0., 0.97247, 0., 0., 0.95598, 0.90859, 0.92683, 1., 1., 0.98825,
0.98989, 0.96809, 0., 1., 1., 0.93996, 0., 0.83817, 0., 0.,
0., 0., 1., 0.82675, 0., 1., 0.85447, 1., 1., 0.85447,
0., 0.99826, 0.846, 0., 0., 0., 0., 0.91616, 0., 0.93907,
0., 0.91149, 1., 1., 0.87567, 0.95333, 1., 0.85447, 0., 0.85447,
0.69296, 0.70529, 1., 1., 1., 0., 0.87198, 1., 0., 0.,
0.79421, 1., 0., 0.90466, 0.9474, 0., 0., 0., 0., 0.81602,
0.90901, 0., 0.96115, 0.95232, 0., 1., 0.87881, 0.95953, 1., 0.,
0.92101, 0., 0., 0., 0.73634, 0., 0.85447, 0.92402, 0.94112, 0.,
0., 0., 0., 0., 0., 0.64591, 1., 1., 1., 1.,
0.59251, 0.93496, 0.93399, 0., 0.91287, 0., 0., 0., 0., 0.96345,
0.61677, 0.66971, 0.85675, 0., 0., 0.90889, 0., 0., 0., 0.69642,
1., 0.85447, 0.82273, 0., 0., 0., 0., 0., 0., 0.85561,
0., 0., 0., 0., 0., 0.85723, 0., 0., 0., 0.,
0., 0.65465, 0., 0., 0.876, 0.97733, 0.89443, 0., 0., 0.,
0., 0.76339, 0.85561, 0.56398, 0.85447, 0., 0.8439, 0.90296, 0., 0.,
0.88072, 0., 0., 0., 0., 0., 0., 1., 0., 0.88624,
0., 0.8806, 0.79562, 0., 0.79464, 0.77142, 0.76442, 0.83351, 0., 0.,
0.92284, 0.85447, 0.83793, 0., 0., 0., 0.91741, 0.55665, 0., 0.,
0.82275, 0., 0., 0., 0., 0.88072, 0.74473, 1., 0.83991, 0.,
0., 0., 0., 0., 0.89443, 0., 0., 0., 0., 0.,
0.98109, 0., 0.66281, 0.81916, 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0.85561, 0., 0., 0., 0.87394, 0.95923, 0.93496,
0.8524, 1., 0., 0., 1., 0., 0., 0.87052, 1., 0.77763,
0.81226, 0., 0., 0., 0., 0., 0., 0.79494, 0., 0.73172,
0., 0., 0.9163, 0., 0., 0., 0., 0.79754, 1., 0.,
0., 0., 0., 0.7183, 0., 0., 0.9866, 0., 0., 0.72232,
0., 1., 0., 0., 0., 0.64384, 0.73717, 0.77854, 0.87457, 0.99262,
0., 0.86184, 0.83238, 0.95743, 0.85936, 0., 0., 0.85456, 0., 0.,
0.81133, 0., 0., 1., 0., 0., 0.89798, 0.89443, 0.8806, 0.72805,
0., 0., 0.77399, 0., 0.66224, 0.83688, 0.86547, 1., 0.97486, 0.,
0.93828, 0.73634, 0., 0., 0., 0., 0.92147, 0.89443, 0., 0.96268,
0.89443, 0., 0., 0., 1., 0., 0., 0., 0.64043, 0.,
0., 1., 0.90805, 0.97148, 0., 0., 0.89846, 0., 0., 1.,
0.82238, 0., 0.76167, 0.70987, 0., 0., 0.98369, 0., 0., 0.,
0., 0., 0.99848, 0.64661, 0., 0., 0.54281, 0.64179, 0., 0.,
0.80489, 0., 0.89999, 0., 0., 0.58327, 0., 0.70529, 0., 0.,
0.86565, 0., 0., 0., 0., 0.58327, 0.88506, 0.79623, 0., 0.70369,
0., 0., 0., 0., 0., 0.58327, 0.90579, 0.77984, 0.90466, 0.,
0., 0., 0.88845, 0.74988, 0.87778, 0.58327, 0.85505, 0.58327, 0., 1.,
0., 0., 0.8058, 0., 1., 0.92041, 0.94991, 0.72274, 0.70437, 0.,
0.5709, 0., 0.86215, 0.94945, 0.81073, 1., 0.846, 0.83871, 0., 0.80129,
0.88918, 0., 1., 0.98953, 1., 0., 0.90919, 0.87604, 0., 0.82217,
0.9173, 0., 0.79562, 0.55086, 0.66913, 1., 0.70081, 0.97822, 0., 0.,
0.92837, 0.85447, 0.60553, 0., 0., 0., 0.85456, 0.95743, 0., 0.,
0.87604, 0., 0., 0.54281, 0., 0.88918, 0.88906, 1., 0.84298, 0.,
0.89914, 0., 0.8657, 0., 0.89715, 0., 0., 0., 0., 0.,
0., 0., 0.85447, 0.76167, 0., 0., 0., 0., 0., 0.56128,
0.56603, 0., 0., 0.6904, 0.55665, 0.91642, 0.83226, 1., 0.84181, 0.,
0.99132, 1., 1., 1., 0.90354, 0.8605, 0.9413, 0.8461, 0.92559, 1.,
0.97871, 1., 0., 1., 0.89253, 0.78728, 0.99521, 1., 0.94409, 1.,
0., 1., 1., 1., 1., 0.87052, 0.99848, 1., 0.98847, 0.,
1., 0.99236, 0.99848, 0.93996, 0.93541, 0.93996, 0., 0.85949, 0., 0.,
0.93258, 0.99708, 0.9413, 0.99743, 0.99236, 0.85813, 1., 0., 0., 1.,
0.85723, 0.9413, 1., 0., 0.89536, 0.85348, 1., 0.8904, 0.8904, 0.,
0.94306, 0., 0., 1., 0.90019, 0.9413, 0.8926, 0.81932, 0.88088, 0.,
1., 0., 1., 0.82217, 1., 0.88066, 0., 0.96379, 1., 0.79269,
0.87307, 1., 1., 0.98595, 1., 0.84264, 0.79684, 0.99848, 0.9413, 1.,
0., 0., 1., 0.83768, 0.86882, 1., 0.93692, 1., 1., 1.,
0.92926, 0.95811, 0.89185, 0., 0.89574, 1., 0., 1., 0.89062, 0.,
0.75825, 1., 0., 0., 0.90242, 0.97733, 1., 0., 0., 0.9947,
0., 1., 0., 0., 0., 0.94991, 0.64226, 0.7257, 0., 1.,
0.89846, 0., 0., 0., 0.9519, 0.84609, 0.62217, 0., 0., 1.,
0.88992, 0., 0., 0.79885, 0.56603, 0., 0.86906, 0., 0.91287, 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0.86883,
0., 0., 0., 0., 1., 1., 0., 0., 0., 0.,
0.8786, 0.65012, 0.57587, 0., 0.86814, 0., 0.58327, 0., 0.69125, 0.54648,
0.7966, 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0.82916, 1., 0.67883, 1., 0., 0., 0.,
0.87394, 1., 0., 0., 0.93828, 0.78505, 0.80244, 0., 0., 0.,
0.89412, 0.92884, 0., 0., 1., 0., 0., 0., 0., 0.94409,
0.95743, 0., 0.8309, 0., 0.91741, 0., 0., 0., 1., 0.,
0., 0., 0., 0., 0.66224, 0.72274, 0., 0., 0., 0.,
0., 0.99461, 0., 0.90284, 0., 0., 0., 0.89167, 0., 0.86814,
0., 0., 0.89892, 0., 1., 0., 1., 0.96345, 0.64734, 0.7148,
0., 0., 0.89892, 0.94507, 0., 0., 0.98369, 0.775, 0.60722, 0.,
0., 0., 0.89892, 0., 1., 0., 1., 0.95436, 0.99697, 0.,
0., 0., 0., 0.89907, 0., 0., 0.91847, 0., 0., 0.56061,
0., 1., 0., 0.89892, 0.65149, 0.74772, 0.7241, 0.78318, 0.98109, 0.,
0., 0.8097, 0.64453, 0., 0.72274, 0., 0., 0., 0., 0.,
0., 0., 0., 0.91616, 0., 0., 0.97512, 0.54772, 0., 0.83991,
0.99697, 0., 0., 0., 0.8097, 0.71735, 0.86547, 0., 0., 0.,
0., 0.67624, 0., 0., 0., 0., 0.8634, 0.65254, 0., 0.92202,
0.66112, 0., 0., 0., 0., 0., 0.64734, 0., 0.64731, 0.89469,
0.81508, 0., 0., 0.95119, 0.64226, 0., 0., 0., 0., 0.,
0.80256, 0., 0.92556, 0.74343, 0., 0., 0., 0., 0., 0.,
0., 0., 0.91847, 0.74029, 0., 0., 0.},
{1, 13, -1, 11, -1, -1, -1, -1, -1, -1, 1, 14, 3, 11, -1, 17, -1, -1, -1, -1, 1, 14, 3, 11,
-1, -1, -1, -1, -1, -1, 1, -1, 16, -1, -1, -1, 1, -1, -1, 15, 13, 10, 14, -1, -1, -1, -1, 14,
1, 1, -1, -1, -1, -1, -1, 1, 14, -1, -1, 11, 11, -1, 11, 11, -1, 19, -1, -1, -1, -1, 14, -1,
1, 15, -1, -1, 13, -1, 1, 1, 14, 6, -1, 11, 3, 14, -1, -1, 19, 11, 14, 11, -1, -1, -1, -1,
-1, -1, 11, 13, -1, 1, 16, -1, -1, -1, -1, 14, -1, 17, -1, -1, -1, -1, 13, -1, -1, 17, -1, -1,
-1, -1, -1, -1, 10, -1, 1, -1, 15, -1, 1, 14, 3, -1, -1, 17, 18, -1, -1, 15, 1, 14, 3, 11,
-1, -1, 19, -1, -1, 15, 1, 14, 3, 11, 8, -1, -1, -1, -1, 15, 1, -1, 17, 17, 18, 17, 1, 15,
-1, 15, -1, -1, -1, 4, -1, 11, 17, 14, 1, 1, 3, 3, -1, 13, 3, 1, 14, 3, -1, 11, 11, -1,
-1, 11, -1, 19, -1, 18, -1, 15, 14, 17, 1, -1, -1, 3, -1, 3, 1, 1, 14, -1, -1, 11, 3, -1,
-1, -1, 14, 11, -1, 14, -1, -1, -1, 8, 11, 14, 8, 1, 17, -1, -1, 15, -1, -1, -1, 17, -1, -1,
-1, 3, 13, 3, 3, 17, 17, -1, 13, 13, -1, 15, 1, -1, -1, -1, 1, 13, 3, 11, 10, 16, 18, -1,
-1, -1, 1, -1, 3, 11, 10, -1, 18, 6, -1, -1, 1, -1, 3, 11, -1, 16, -1, -1, -1, 15, 1, 15,
16, -1, -1, -1, 1, -1, -1, -1, -1, 10, 13, -1, 7, 11, -1, -1, 1, 1, 3, 3, -1, 12, 3, 1,
-1, 3, -1, 11, 11, -1, 11, 11, 10, -1, -1, -1, -1, -1, 13, -1, 1, -1, 16, 3, 12, 3, 1, 1,
13, -1, -1, 11, 3, -1, -1, -1, 18, 11, 13, 11, -1, 13, -1, 18, -1, -1, -1, 13, -1, 1, 16, 11,
18, -1, -1, 13, -1, 16, -1, 10, -1, 3, 12, 3, 3, -1, -1, 5, 17, 10, -1, -1, 10, 5, 1, -1,
-1, 11, 1, 14, 3, -1, -1, -1, -1, 0, -1, 15, 1, 14, 3, 11, -1, -1, -1, 0, -1, 15, 1, 14,
3, -1, 8, -1, -1, 0, -1, 15, 1, 15, -1, -1, -1, -1, 1, -1, -1, 15, 12, -1, 14, 0, -1, 11,
-1, 14, 1, 1, 3, 3, 0, -1, 3, 1, 2, 3, -1, -1, 11, 0, 11, 11, 8, 19, -1, -1, 8, 15,
14, -1, 1, 15, -1, 3, -1, 3, 1, 1, 14, 0, 19, 11, 3, -1, 0, -1, -1, 11, 14, -1, 15, 14,
-1, -1, -1, -1, 11, 14, 8, 1, -1, -1, 19, -1, -1, 14, 0, -1, -1, -1, 0, 3, -1, 3, -1, -1,
0, 15, -1, -1, -1, -1, -1, -1, 1, -1, 15, -1, 1, 2, 3, -1, -1, -1, 19, -1, -1, -1, 1, 2,
-1, -1, -1, 16, -1, 6, 13, -1, 1, 2, -1, -1, -1, 16, -1, 4, 13, -1, 1, -1, -1, 16, -1, 16,
1, -1, -1, -1, 13, -1, 2, -1, -1, -1, 16, 2, 1, 1, 3, 3, -1, -1, 3, 1, 2, 3, -1, -1,
-1, 4, -1, -1, 10, -1, 19, -1, -1, -1, 2, 16, 1, 15, -1, 3, 13, 3, 1, 1, 2, 6, -1, -1,
-1, 2, 6, -1, -1, -1, 2, -1, -1, 2, 6, 19, 13, 9, -1, 13, -1, 1, 16, -1, 19, 15, -1, 2,
6, 16, -1, -1, -1, 3, -1, 3, 3, -1, 6, 15, 16, -1, -1, 13, -1, -1, 1, 13, 15, -1, 1, 13,
3, 11, 8, -1, 18, -1, 12, 5, 1, 13, 3, 11, 8, -1, -1, 4, -1, 5, 1, 13, 3, 11, -1, 17,
18, -1, 12, 5, 1, 5, -1, 17, -1, -1, 1, -1, -1, 5, 12, -1, -1, -1, -1, -1, 17, -1, 1, 1,
3, 3, 4, 12, -1, 1, 13, -1, -1, 11, 11, 4, 11, 11, -1, 18, -1, -1, 8, 5, 13, -1, 1, 5,
-1, 3, -1, 3, 1, 1, 13, 4, -1, 11, 3, -1, -1, 8, -1, -1, -1, -1, 5, 13, 4, 18, -1, -1,
11, 13, -1, 1, -1, 11, -1, 5, -1, 13, 4, -1, -1, -1, -1, 3, -1, 3, 3, 17, 6, 5, -1, -1,
12, -1, -1, 5, 1, -1, -1, 12, 1, 13, 3, -1, -1, 17, -1, -1, -1, 15, 1, 13, 3, 11, -1, 17,
-1, 6, -1, -1, 1, 13, 3, 11, -1, 17, -1, -1, -1, -1, 1, 15, -1, 17, -1, 17, 1, -1, 12, 15,
-1, -1, 13, -1, 7, 11, 17, 13, 1, 1, 3, 3, -1, -1, 3, 1, 2, 3, -1, 11, -1, -1, -1, -1,
9, -1, -1, 1, -1, 15, 13, -1, 1, 15, -1, 3, -1, 3, 1, 1, 13, -1, -1, 11, -1, 13, -1, -1,
-1, 11, 13, 11, 15, 13, -1, -1, -1, -1, 11, 13, -1, 1, -1, -1, -1, 15, 18, 13, -1, 17, 9, -1,
7, 3, 14, -1, 3, -1, -1, 15, 17, -1, -1, -1, -1, -1, 1, -1, 15, -1, 2, 3, 11, -1, 16, 18,
-1, -1, 15, 1, 2, 3, 11, -1, -1, 18, -1, 12, 15, 1, 2, 3, 11, -1, -1, 18, -1, -1, 15, 1,
15, 16, 16, 18, 16, 1, -1, 12, 15, 12, -1, 2, -1, -1, -1, -1, 2, 3, -1, 12, 3, 1, 2, 3,
-1, 11, 11, -1, -1, -1, -1, 18, -1, 18, -1, 15, 2, 16, 1, 15, 16, 3, -1, 3, 1, 1, 2, 7,
18, -1, 3, 2, -1, -1, 11, 2, -1, 15, 2, -1, -1, -1, -1, 11, 2, -1, 1, 16, -1, 18, 15, 18,
2, -1, 16, -1, -1, -1, 3, -1, 3, 3, 16, -1, -1, -1, -1, -1, -1, 1, 12, 15, 12, 1, 13, 3,
11, -1, 17, -1, -1, -1, -1, 1, 13, 3, 11, -1, -1, 18, -1, -1, -1, 1, 13, 3, 11, -1, -1, -1,
-1, -1, -1, 1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, 13, -1, -1, 11, 16, 13, -1, -1, -1,
-1, 3, 1, 13, 3, -1, 11, 11, -1, -1, 11, -1, -1, -1, -1, -1, -1, 13, -1, 1, -1, 17, 3, -1,
3, 1, 1, 13, -1, -1, 11, 3, 13, -1, -1, -1, 11, 13, -1, -1, 13, -1, -1, -1, -1, 11, 13, 10,
1, -1, -1, -1, -1, -1, 13, -1, -1, -1, -1, -1, 3, -1, 3, 3, -1, -1, -1, -1, -1, -1, -1, -1,
-1, 1, -1, -1, -1, 1, 14, 3, 11, 10, -1, -1, 6, -1, -1, 1, 14, 3, 11, -1, -1, -1, -1, -1,
-1, 1, -1, 3, -1, -1, 17, -1, -1, -1, -1, 1, 15, -1, -1, -1, -1, 1, -1, -1, 15, -1, -1, 13,
-1, 6, -1, -1, -1, 1, 1, 3, 3, 6, -1, 3, 1, 13, 3, -1, -1, 11, -1, -1, 11, -1, -1, 19,
-1, -1, 15, 13, 17, 1, -1, -1, 3, -1, 3, 1, 1, 13, 6, -1, 11, 3, -1, -1, -1, -1, 11, 13,
-1, 15, 13, -1, -1, -1, 10, -1, -1, -1, 1, -1, -1, 19, 15, 19, -1, -1, 17, -1, -1, 4, 3, -1,
3, 3, -1, -1, 15, -1, -1, -1, -1, -1, 15, 1, -1, -1, 13, 1, -1, -1, 11, -1, 16, -1, -1, 13,
-1, 1, -1, -1, 11, -1, -1, -1, -1, 13, 15, 1, -1, 3, -1, -1, -1, -1, -1, 13, 15, 1, 15, -1,
-1, -1, 16, 1, 15, 13, 15, 13, -1, 13, -1, -1, 11, -1, 13, 1, 1, 3, 3, -1, 13, -1, 1, 14,
3, 19, 11, 11, -1, 11, 11, -1, 19, 19, 19, -1, 15, 13, -1, 1, 15, -1, 3, 13, 3, 1, 1, 14,
-1, -1, 11, 3, 13, -1, -1, -1, 11, 13, -1, -1, 13, -1, -1, 13, -1, 11, 13, 10, 1, -1, 11, -1,
15, -1, 13, -1, -1, -1, -1, -1, -1, -1, 3, 3, -1, -1, -1, -1, -1, 13, 13, -1, -1, 1, 13, 1,
14, 3, 11, -1, 16, 18, 7, 12, 15, 1, 14, 3, 11, 9, 16, 18, -1, 12, 15, 1, 14, 3, 11, 9,
-1, 18, 7, 12, 15, 1, 15, 16, 16, -1, 16, 1, 15, 12, 15, 12, -1, 14, -1, -1, 11, 16, 14, 1,
1, 3, 3, -1, -1, 3, 1, 14, 3, -1, 11, 11, 7, 11, 11, -1, 18, -1, -1, 9, 15, 14, 16, 1,
15, -1, 3, -1, 3, 1, 1, 14, -1, 18, 11, 3, 14, 7, 9, 18, 11, 14, 11, 15, 14, 7, -1, -1,
9, 11, 14, 9, 1, 16, 11, 18, 15, 19, 14, -1, 16, 9, -1, 7, 3, -1, 3, 3, -1, -1, 15, 16,
9, -1, -1, 9, -1, 1, -1, -1, -1, 1, 13, 3, -1, 10, 17, -1, -1, -1, 5, 1, 13, -1, -1, 10,
17, -1, -1, 1, 13, -1, 11, -1, 17, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1,
10, 13, -1, -1, -1, -1, 13, 1, 1, -1, 3, -1, 13, -1, 1, 13, 3, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, 13, 17, 1, 15, -1, -1, -1, 1, 13, -1, -1, 11, 3, 13, -1, -1, -1, 11, 13,
-1, -1, 13, -1, -1, -1, -1, 11, 13, -1, 1, -1, 11, -1, -1, -1, 13, -1, -1, -1, -1, -1, 3, 3,
-1, -1, -1, -1, -1, 10, -1, 1, -1, -1, -1, 1, -1, 3, -1, -1, 16, -1, 6, -1, 15, 1, 13, 3,
-1, -1, 16, 18, -1, -1, 15, 1, 13, -1, -1, -1, 16, -1, 6, -1, 15, 1, 15, -1, -1, -1, -1, 1,
-1, -1, 15, -1, -1, 13, -1, 4, -1, 16, 13, 1, 1, 3, 3, -1, -1, 3, 1, -1, 3, -1, -1, -1,
-1, -1, -1, -1, -1, 18, -1, -1, 15, 13, -1, 1, 15, -1, -1, -1, 3, 1, 1, -1, -1, -1, -1, 3,
-1, -1, -1, -1, 11, 13, -1, 15, 13, -1, -1, -1, -1, -1, 13, -1, 1, 16, 11, -1, -1, 18, 13, -1,
-1, -1, -1, -1, 3, -1, 3, 3, -1, -1, -1, -1, -1, -1, -1, -1, 15, 1, -1, -1, -1}}};
const std::vector<AllPointsMembershipVectorsInputs<float, int>>
all_points_membership_vectors_inputs = {
{MLCommon::Datasets::Digits::n_samples,
MLCommon::Datasets::Digits::n_features,
5,
10,
MLCommon::Datasets::Digits::digits,
Digits::parents,
Digits::children,
Digits::lambdas,
Digits::sizes,
Common::CLUSTER_SELECTION_METHOD::EOM,
false,
0.0,
{0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.030452669, 0.026425911, 0.03859358, 0.035335384, 0.03465681, 0.04330654,
0.030934544, 0.035263974, 0.034629185, 0.040295452, 0.08203372, 0.05415639, 0.03096713,
0.030772058, 0.036201734, 0.050000593, 0.052401632, 0.057919584, 0.04858703, 0.052152585,
0.05773571, 0.04828094, 0.059159156, 0.049302336, 0.06030733, 0.059307363, 0.06011706,
0.051180135, 0.045277823, 0.045740843, 0.037570864, 0.044504564, 0.044347588, 0.051730044,
0.053687304, 0.04275065, 0.03958131, 0.050015815, 0.15312548, 0.06570544, 0.04946378,
0.04671142, 0.0658831, 0.06546232, 0.042939343, 0.045101393, 0.050759714, 0.05063975,
0.045158684, 0.042974185, 0.08172499, 0.05630307, 0.059448175, 0.046519224, 0.04837805,
0.052892454, 0.061843764, 0.04697889, 0.049889456, 0.04279918, 0.039387953, 0.04705047,
0.040773552, 0.05111941, 0.046864744, 0.04475714, 0.045765456, 0.047181815, 0.0657336,
0.059956346, 0.042626675, 0.05017978, 0.07457219, 0.06261762, 0.04092395, 0.049042735,
0.058233604, 0.051248346, 0.06454816, 0.064581126, 0.074169226, 0.05246088, 0.14148338,
0.057690255, 0.067068115, 0.068735555, 0.05563464, 0.05498004, 0.056694236, 0.06261387,
0.060159322, 0.042371504, 0.12150341, 0.038532943, 0.044014372, 0.045548994, 0.0535712,
0.04089017, 0.04691799, 0.053351197, 0.04888466, 0.045080096, 0.04578036, 0.047408894,
0.053640515, 0.040363096, 0.048835263, 0.045065925, 0.045655083, 0.0495227, 0.043399226,
0.039309103, 0.05271795, 0.055561654, 0.08126343, 0.045145925, 0.04279688, 0.054019824,
0.047170583, 0.043821707, 0.04239352, 0.050695807, 0.044193458, 0.039533243, 0.042984303,
0.047810826, 0.04684485, 0.04702607, 0.051284414, 0.053076256, 0.038314212, 0.044519763,
0.05888729, 0.05538499, 0.040876564, 0.05038418, 0.09449788, 0.047396176, 0.035478715,
0.03652438, 0.053261686, 0.05119847, 0.053149752, 0.03886862, 0.051980693, 0.03668843,
0.039044365, 0.051483423, 0.052190717, 0.04399063, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.041562784, 0.039634902,
0.04179897, 0.060897756, 0.078786716, 0.04196951, 0.035760388, 0.044400863, 0.046321124,
0.044723783, 0.047532864, 0.041702308, 0.04182573, 0.042885028, 0.042640433, 0.04720063,
0.051346548, 0.055992816, 0.053089153, 0.055864193, 0.04574576, 0.050660297, 0.046932943,
0.12161529, 0.07214019, 0.050735574, 0.057125773, 0.06974797, 0.06478319, 0.048655234,
0.05617377, 0.05407645, 0.06171904, 0.044776857, 0.04300609, 0.17745644, 0.062050287,
0.060677867, 0.045636557, 0.053859673, 0.053011436, 0.06145838, 0.05031584, 0.049790557,
0.04787618, 0.05728466, 0.04715182, 0.058531225, 0.055062577, 0.059300788, 0.052739248,
0.052468024, 0.050221205, 0.054236338, 0.060164403, 0.05764644, 0.052034825, 0.051062603,
0.06942513, 0.13554165, 0.040260587, 0.041985344, 0.049498778, 0.056590803, 0.049939,
0.06390085, 0.043947242, 0.09396397, 0.053634662, 0.056525722, 0.06376069, 0.05051667,
0.04751665, 0.046850618, 0.04615577, 0.058091648, 0.054501202, 0.11906343, 0.045020733,
0.051079616, 0.07888997, 0.059520848, 0.054763462, 0.06275523, 0.07169533, 0.063544154,
0.06266325, 0.060570393, 0.05532324, 0.05609703, 0.053483877, 0.05580013, 0.06849672,
0.052121796, 0.05515773, 0.058190882, 0.053069327, 0.0493609, 0.060789246, 0.07332433,
0.06640139, 0.05938293, 0.05454718, 0.05934984, 0.066216946, 0.04064723, 0.046893645,
0.04193273, 0.05931262, 0.056170862, 0.042508774, 0.04296904, 0.047492977, 0.067656346,
0.05242127, 0.046588674, 0.04241375, 0.078570515, 0.06400287, 0.03903062, 0.047911074,
0.10038765, 0.04797706, 0.035964448, 0.038105797, 0.053334724, 0.047459286, 0.052497342,
0.042303395, 0.052369535, 0.03680777, 0.042086925, 0.061833046, 0.05338524, 0.04184123,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.047183935, 0.04817418, 0.053099137, 0.05795278, 0.10181844, 0.045310393,
0.038659718, 0.052666884, 0.06200373, 0.06429642, 0.061166264, 0.04442642, 0.05125348,
0.04919268, 0.04954248, 0.057079356, 0.05425396, 0.065672666, 0.04504395, 0.0499737,
0.04350092, 0.055368204, 0.041843943, 0.07400529, 0.061224632, 0.048269585, 0.05183331,
0.0598402, 0.063870564, 0.049982667, 0.05110122, 0.053153623, 0.058296565, 0.0503748,
0.044951793, 0.09287239, 0.0724015, 0.06284612, 0.05308316, 0.056617364, 0.054242577,
0.08109655, 0.054547343, 0.054845735, 0.046069343, 0.053355925, 0.053734075, 0.057057668,
0.054919943, 0.053925373, 0.06141847, 0.047952402, 0.05485994, 0.05210831, 0.06109143,
0.05122484, 0.047125142, 0.048246026, 0.05947591, 0.09198123, 0.04663392, 0.05999497,
0.04852788, 0.06266831, 0.06519054, 0.06677744, 0.048364975, 0.1448948, 0.055324733,
0.06615203, 0.06279805, 0.053804986, 0.0547622, 0.05512483, 0.06176642, 0.054835014,
0.045732364, 0.101151645, 0.03875127, 0.034668814, 0.053519886, 0.064001046, 0.041709427,
0.049250238, 0.050927706, 0.043073174, 0.057084717, 0.0475938, 0.047649704, 0.039890055,
0.045602642, 0.05069977, 0.05305988, 0.045269933, 0.049507357, 0.047961313, 0.04271647,
0.04628884, 0.050186347, 0.060473263, 0.05270264, 0.04670214, 0.047885884, 0.046948407,
0.048643477, 0.034600094, 0.0396889, 0.036285453, 0.04904482, 0.046245717, 0.037674636,
0.04065536, 0.044335585, 0.06178718, 0.048114564, 0.039619796, 0.041458614, 0.078107774,
0.059037093, 0.03508473, 0.04268389, 0.13556938, 0.040326923, 0.036222804, 0.037575126,
0.047849257, 0.044489156, 0.051717002, 0.041899316, 0.051836167, 0.032191727, 0.036116168,
0.05438976, 0.058660813, 0.041633487, 0.044118688, 0.047788978, 0.046749216, 0.055078544,
0.047274116, 0.044993136, 0.046974845, 0.04710383, 0.06054247, 0.052472588, 0.0476306,
0.04655564, 0.08782648, 0.05716661, 0.03694888, 0.05813502, 0.055185247, 0.057663713,
0.0545363, 0.05640165, 0.060820047, 0.06569328, 0.055259537, 0.06710302, 0.0656899,
0.052416258, 0.055157572, 0.06403188, 0.10995138, 0.08457152, 0.055802025, 0.047692463,
0.054601144, 0.047871422, 0.053167395, 0.04865234, 0.052276615, 0.043430746, 0.054601144,
0.054950036, 0.04936794, 0.05230421, 0.051337518, 0.06074955, 0.07424422, 0.04166548,
0.047681324, 0.04893293, 0.05644534, 0.05175425, 0.06483674, 0.050152082, 0.13976763,
0.05631141, 0.06339216, 0.062587656, 0.055430617, 0.050618082, 0.050252378, 0.04769223,
0.05925664, 0.052389458, 0.06158624, 0.05513168, 0.058215715, 0.055097234, 0.056186788,
0.05053668, 0.059528723, 0.06001158, 0.055846088, 0.054700684, 0.054568972, 0.07015081,
0.0920268, 0.050069105, 0.10666876, 0.046810996, 0.037653085, 0.040255636, 0.055734847,
0.045432083, 0.0502376, 0.041735422, 0.052008357, 0.0376287, 0.03888248, 0.057302825,
0.053766116, 0.045669783, 0.03671306, 0.04142621, 0.037598994, 0.063659035, 0.05724792,
0.038987648, 0.04073836, 0.050828263, 0.06321209, 0.059646405, 0.039239094, 0.039625555,
0.06303593, 0.06648683, 0.04088914, 0.05371708, 0.052493814, 0.06932945, 0.04595691,
0.05049751, 0.04748696, 0.050327115, 0.046664488, 0.050607968, 0.07141426, 0.053439472,
0.045183863, 0.05024256, 0.055625338, 0.07350982, 0.043943044, 0.059306398, 0.045846872,
0.04613408, 0.049627338, 0.041438676, 0.048927676, 0.04883055, 0.0780368, 0.07193223,
0.042178646, 0.047662683, 0.11167239, 0.08640992, 0.04672246, 0.030181086, 0.034118887,
0.037909184, 0.033105392, 0.039164603, 0.034772795, 0.030437326, 0.034800503, 0.04182804,
0.050728332, 0.04303435, 0.03804627, 0.036739927, 0.033911206, 0.038526576, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.046498314, 0.04515856, 0.057243597, 0.051296107, 0.044182234, 0.07732149, 0.06933504,
0.048764776, 0.056927286, 0.059258644, 0.06613193, 0.16408704, 0.05445953, 0.05386574,
0.045428578, 0.0575411, 0.04849107, 0.10751922, 0.040391456, 0.038662225, 0.06469614,
0.059123043, 0.045858525, 0.050783444, 0.05619927, 0.050407916, 0.058591012, 0.050113417,
0.050923258, 0.0472831, 0.063449636, 0.05222801, 0.21403942, 0.04402854, 0.050055742,
0.05783667, 0.056654975, 0.048679426, 0.06015844, 0.065677546, 0.05693766, 0.05148547,
0.05871448, 0.05681936, 0.06323463, 0.03611212, 0.043248013, 0.04604454, 0.043265924,
0.04519172, 0.03849693, 0.038920622, 0.040537614, 0.08773864, 0.05146208, 0.04368552,
0.045058407, 0.058492426, 0.05780828, 0.036633812, 0.05257787, 0.04835012, 0.05305236,
0.052992344, 0.056722242, 0.050139915, 0.042340774, 0.044937562, 0.04769257, 0.052002292,
0.05237425, 0.04750803, 0.045047566, 0.048788317, 0.07328349, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0467433,
0.12651552, 0.045357507, 0.042329077, 0.044724625, 0.05033856, 0.046085697, 0.06546459,
0.047600437, 0.058969922, 0.04142805, 0.04209832, 0.0622002, 0.059338618, 0.04876227,
0.0457568, 0.13240588, 0.04440539, 0.042700987, 0.046776585, 0.049868982, 0.04495059,
0.060059935, 0.048751697, 0.05130874, 0.042592153, 0.042818524, 0.057402793, 0.05384006,
0.04186029, 0.056476165, 0.060097925, 0.060407665, 0.047177456, 0.052187826, 0.05225901,
0.04814211, 0.05326196, 0.050095003, 0.06510935, 0.054716438, 0.050074074, 0.05118733,
0.049001005, 0.050855443, 0.057237733, 0.056649227, 0.06277263, 0.04967442, 0.0504455,
0.056871273, 0.05055301, 0.05459996, 0.047475994, 0.058762364, 0.055498734, 0.058320824,
0.050155748, 0.04636318, 0.045668174, 0.068805605, 0.049111687, 0.14680968, 0.0391081,
0.039403494, 0.062232073, 0.062047075, 0.044608217, 0.051459447, 0.056319915, 0.048767854,
0.0516827, 0.049509805, 0.050572343, 0.05148281, 0.05342027, 0.050235633, 0.06664942,
0.04709466, 0.04865688, 0.054882247, 0.047150694, 0.04622448, 0.050620165, 0.059721116,
0.060871284, 0.048266843, 0.04955719, 0.049107186, 0.05342027, 0.056428723, 0.052976694,
0.06929687, 0.045722242, 0.045685697, 0.05725829, 0.056250736, 0.053708725, 0.045980543,
0.05897056, 0.05271681, 0.059837114, 0.04757019, 0.0451267, 0.04408073, 0.04928239,
0.10955441, 0.051165745, 0.038654856, 0.043468434, 0.06382351, 0.0483664, 0.056431893,
0.04563333, 0.05484915, 0.043256465, 0.04826169, 0.06341707, 0.051228043, 0.0449266,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.05206009, 0.05337997, 0.058081873, 0.04831555, 0.0532873, 0.061362576,
0.05156098, 0.053743705, 0.04808468, 0.058520686, 0.060080662, 0.06399251, 0.048690196,
0.045221265, 0.044666722, 0.047029603, 0.05813152, 0.050485257, 0.060718276, 0.05977578,
0.06858506, 0.051082045, 0.17687716, 0.055331502, 0.07155931, 0.058175325, 0.05286574,
0.054899555, 0.05539718, 0.05827085, 0.043952655, 0.048826713, 0.056683384, 0.056401454,
0.061069816, 0.04653131, 0.044074275, 0.043703765, 0.09768525, 0.067217, 0.058953207,
0.058263212, 0.064217225, 0.055311847, 0.04950689, 0.044729333, 0.05615804, 0.056449298,
0.05460956, 0.055304546, 0.047753364, 0.049745217, 0.048058473, 0.10459265, 0.06721226,
0.051431857, 0.05611446, 0.07594772, 0.06699986, 0.04443013, 0.059286293, 0.057675585,
0.117658645, 0.0434316, 0.050589122, 0.076741464, 0.05185981, 0.052385513, 0.062148463,
0.062148463, 0.059355594, 0.05543152, 0.057312787, 0.050561935, 0.05444439, 0.04960342,
0.05852657, 0.06300591, 0.05706017, 0.05974112, 0.050707534, 0.0518091, 0.050191112,
0.11672136, 0.07040341, 0.056071166, 0.060616836, 0.07962835, 0.067285925, 0.049798954,
0.04962443, 0.05537322, 0.06396989, 0.057628326, 0.06691589, 0.049572367, 0.049011562,
0.04852947, 0.11607271, 0.07004115, 0.06218197, 0.062199824, 0.069781594, 0.065277554,
0.05790925, 0.05108779, 0.05666999, 0.05461511, 0.046066504, 0.045309845, 0.14774962,
0.056778762, 0.07015706, 0.047760755, 0.0523227, 0.05386382, 0.060069047, 0.049434774,
0.054270856, 0.04988152, 0.04804903, 0.05877299, 0.051585294, 0.060042404, 0.05849607,
0.06958116, 0.052299943, 0.20129204, 0.055554032, 0.06685959, 0.059786804, 0.05145039,
0.05491833, 0.056404762, 0.054907177, 0.04914753, 0.05763088, 0.05202649, 0.06536126,
0.06267669, 0.06955278, 0.051055335, 0.14100167, 0.056430694, 0.066453695, 0.06235691,
0.053320218, 0.05574208, 0.05584048, 0.061019436, 0.04777599, 0.0633993, 0.050119564,
0.05994491, 0.061221246, 0.06500443, 0.046193257, 0.12890095, 0.053726003, 0.06805992,
0.051184446, 0.046320118, 0.054373838, 0.058075592, 0.055908605, 0.052260436, 0.043571424,
0.053818304, 0.04820682, 0.042167965, 0.103979595, 0.05890823, 0.052370667, 0.04249923,
0.049046732, 0.0555962, 0.05913937, 0.045669004, 0.049474243, 0.050182138, 0.059866462,
0.042307932, 0.06250479, 0.04252587, 0.051767442, 0.044298355, 0.043756787, 0.04162783,
0.050091673, 0.0622226, 0.056747008, 0.046945322, 0.05054988, 0.047559015, 0.060842987,
0.051494945, 0.045261748, 0.05532315, 0.052822325, 0.048517365, 0.082378216, 0.06498641,
0.05447503, 0.05372653, 0.060786583, 0.07299523, 0.19665901, 0.053416178, 0.054116037,
0.05143732, 0.05581672, 0.05147969, 0.059070036, 0.046615478, 0.051079106, 0.05344379,
0.060495403, 0.048395224, 0.05793205, 0.056317613, 0.04739889, 0.049848985, 0.05792948,
0.09054751, 0.08413426, 0.041115373, 0.09550213, 0.04091351, 0.037939087, 0.041354325,
0.05328031, 0.04253471, 0.052031443, 0.04354442, 0.04867645, 0.03784839, 0.042758808,
0.061490484, 0.048574243, 0.03850366, 0.042862307, 0.051183682, 0.043070283, 0.054796558,
0.05422617, 0.044425562, 0.050758865, 0.051943123, 0.0753053, 0.06271597, 0.045849018,
0.050264366, 0.10696268, 0.0789571, 0.045555145, 0.030020164, 0.033061124, 0.032680243,
0.036737125, 0.03717246, 0.03573895, 0.029622147, 0.039753668, 0.04521467, 0.04549747,
0.03323093, 0.03167111, 0.035036497, 0.042886227, 0.045718655, 0.05289601, 0.054307964,
0.059672642, 0.047194343, 0.051607847, 0.05781445, 0.05140846, 0.05133311, 0.04883945,
0.056769367, 0.05489607, 0.06510129, 0.050311815, 0.04547459, 0.043983214, 0.03207063,
0.034461174, 0.038668983, 0.032307424, 0.039666887, 0.037997056, 0.030900879, 0.035785608,
0.034233738, 0.063188076, 0.04386021, 0.033863932, 0.037879214, 0.034648117, 0.041367292,
0.04350053, 0.039972886, 0.05546168, 0.037501242, 0.038143627, 0.04902307, 0.045874763,
0.041095547, 0.038966965, 0.045104653, 0.043215863, 0.056548234, 0.039577324, 0.036255766,
0.03437721, 0.037580233, 0.09596757, 0.037893716, 0.035578072, 0.040094316, 0.03982978,
0.037443478, 0.04958189, 0.042844214, 0.046571374, 0.032760356, 0.03375861, 0.05672632,
0.052376896, 0.03741543, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.050361335, 0.041590758, 0.056317747, 0.04553416,
0.043536857, 0.062256213, 0.063566394, 0.046048395, 0.050208416, 0.06174043, 0.05917955,
0.13696691, 0.053846266, 0.048586983, 0.046981987, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.048010208, 0.05678959,
0.050396074, 0.06603215, 0.06326775, 0.06777688, 0.050770227, 0.1643126, 0.05620471,
0.06775189, 0.06346454, 0.05530917, 0.054058004, 0.054762404, 0.06027797, 0.05395617,
0.059079066, 0.070416674, 0.05088712, 0.05682597, 0.05139553, 0.05242361, 0.045176566,
0.0896386, 0.06621408, 0.05736222, 0.061052475, 0.07078893, 0.06408696, 0.04983498,
0.03674551, 0.045078658, 0.04139643, 0.062418595, 0.11228463, 0.04046985, 0.035015367,
0.051362198, 0.05345019, 0.058844708, 0.051727388, 0.037657112, 0.046335004, 0.047308024,
0.046783563, 0.029224075, 0.025265539, 0.037317842, 0.028644327, 0.027173458, 0.050347995,
0.034881175, 0.028940674, 0.030919729, 0.03741212, 0.050003268, 0.0702847, 0.029442864,
0.029634053, 0.03031438, 0.05892882, 0.052634757, 0.09208298, 0.03768897, 0.037741035,
0.062309258, 0.054953903, 0.045717195, 0.05015602, 0.051666543, 0.04329918, 0.050971415,
0.04615937, 0.04732865, 0.04505492, 0.045528322, 0.039176513, 0.050908085, 0.041727014,
0.04289814, 0.06820603, 0.04502238, 0.047846608, 0.040249277, 0.0518744, 0.055980492,
0.05412509, 0.040564902, 0.045455, 0.051581796, 0.048658255, 0.053780902, 0.054245267,
0.06900041, 0.06272835, 0.071780816, 0.05254598, 0.16276284, 0.060581986, 0.06825667,
0.07043221, 0.056356505, 0.0533607, 0.055170402, 0.060338713, 0.053636182, 0.059380442,
0.06530199, 0.056938466, 0.061573204, 0.051172145, 0.052675143, 0.049280625, 0.10770487,
0.07122844, 0.05678373, 0.06199884, 0.07626379, 0.06520499, 0.052028064, 0.05040369,
0.044419277, 0.06849302, 0.048903476, 0.042576883, 0.08334442, 0.075423285, 0.048065446,
0.055211354, 0.05962833, 0.06825142, 0.1460576, 0.05337949, 0.05390222, 0.0472856,
0.05453236, 0.053839974, 0.0705362, 0.052911364, 0.06494876, 0.05163908, 0.050924677,
0.04626627, 0.101901785, 0.0640154, 0.06429106, 0.0635102, 0.06616969, 0.06373451,
0.06335421, 0.039445356, 0.054216113, 0.04322162, 0.040164027, 0.040708296, 0.038421243,
0.042952597, 0.040766217, 0.060018018, 0.05826101, 0.03775382, 0.04436742, 0.070471056,
0.059217744, 0.03712217, 0.038500186, 0.03316167, 0.04652862, 0.04141523, 0.040369473,
0.06373361, 0.042220727, 0.04419663, 0.041432958, 0.049785443, 0.08544052, 0.07474767,
0.03839882, 0.040679727, 0.045911286, 0.066700816, 0.054158483, 0.16880146, 0.04649321,
0.05746691, 0.069823764, 0.054986738, 0.053101417, 0.06529204, 0.06882838, 0.06540839,
0.05351188, 0.059348844, 0.054373357, 0.061704315, 0.038084254, 0.04182683, 0.04123231,
0.04827028, 0.049535967, 0.052998215, 0.039854255, 0.06988876, 0.04345622, 0.057482872,
0.0629296, 0.046765316, 0.041466046, 0.041752692, 0.05097894, 0.032441583, 0.037502307,
0.038595427, 0.033511907, 0.037502307, 0.035853237, 0.030100996, 0.03399249, 0.03428503,
0.050608557, 0.03840659, 0.03275002, 0.0343519, 0.037453923, 0.046685167, 0.052997675,
0.05151573, 0.061777942, 0.05055686, 0.04575724, 0.12879014, 0.07198142, 0.06659344,
0.050243784, 0.054409795, 0.05996626, 0.074224, 0.053882, 0.055958506, 0.050349552,
0.049696013, 0.05501041, 0.061559744, 0.048557896, 0.057908345, 0.046634596, 0.048539363,
0.0449796, 0.09771892, 0.06251486, 0.05174645, 0.055353623, 0.068603404, 0.06389366,
0.05068029, 0.03573212, 0.033868484, 0.0447911, 0.04047471, 0.04226645, 0.06997232,
0.0347587, 0.048404884, 0.03821939, 0.048198897, 0.102878414, 0.045182973, 0.03419261,
0.036558416, 0.046310507, 0.054874312, 0.05638623, 0.058988508, 0.050952848, 0.04656105,
0.114323676, 0.06896261, 0.06641231, 0.052185148, 0.054466054, 0.058592137, 0.07408465,
0.05619419, 0.05824871, 0.049482252, 0.03926195, 0.07833857, 0.03760193, 0.04096897,
0.040666375, 0.050073307, 0.043699622, 0.0622194, 0.044430923, 0.047823507, 0.038531788,
0.041671038, 0.056832645, 0.049523827, 0.03637593, 0.048469942, 0.05039724, 0.055370405,
0.05309008, 0.055028167, 0.05796238, 0.049736924, 0.05943935, 0.06349006, 0.06329941,
0.0515326, 0.046882167, 0.054591477, 0.07427159, 0.09878664, 0.05164411, 0.042750854,
0.06442935, 0.043854013, 0.052949507, 0.042272642, 0.04112952, 0.037375785, 0.058013406,
0.051445205, 0.060794048, 0.04890602, 0.04671973, 0.04865315, 0.06499157, 0.039762247,
0.046951633, 0.04142844, 0.045351002, 0.044388946, 0.0636229, 0.039907087, 0.081140265,
0.03927642, 0.056068927, 0.050283995, 0.041622326, 0.039515916, 0.04284754, 0.050638754,
0.04081039, 0.053115644, 0.04237988, 0.046282817, 0.048227824, 0.040108513, 0.04367453,
0.045431763, 0.064168856, 0.05343165, 0.04322025, 0.043132424, 0.102581, 0.06439783,
0.038291235, 0.036467865, 0.041654844, 0.039244305, 0.052630153, 0.048525695, 0.04977513,
0.040914986, 0.06577549, 0.042964928, 0.0477674, 0.058146693, 0.046749193, 0.04100717,
0.039424744, 0.042639397, 0.047583178, 0.045893226, 0.0649513, 0.051236004, 0.044445556,
0.06850181, 0.08204218, 0.048708376, 0.055784434, 0.059327886, 0.05793802, 0.16498306,
0.058327477, 0.052995637, 0.043988485, 0.057956353, 0.054881737, 0.108526565, 0.043076716,
0.043593735, 0.06676646, 0.053975053, 0.048767596, 0.056675453, 0.06168906, 0.056091458,
0.05895627, 0.053885616, 0.049266413, 0.049881645, 0.050641675, 0.048668765, 0.052436806,
0.054000974, 0.054691076, 0.05217249, 0.046600156, 0.056001637, 0.055852972, 0.063375674,
0.052382898, 0.048929278, 0.051069796, 0.06332303, 0.12741466, 0.053112455, 0.04989802,
0.06273312, 0.046154685, 0.04380824, 0.107692696, 0.051263202, 0.05826262, 0.04591793,
0.05085735, 0.050658006, 0.046829917, 0.046829917, 0.052806202, 0.054605756, 0.060501322,
0.055112183, 0.06025989, 0.049611505, 0.045089174, 0.18452471, 0.059945792, 0.062122177,
0.047209915, 0.055171587, 0.052536905, 0.05258571, 0.05142776, 0.059569277, 0.054438606,
0.06622758, 0.054372784, 0.17814486, 0.044773877, 0.051633008, 0.06953491, 0.056502257,
0.052847132, 0.05979157, 0.071594924, 0.06420673, 0.056017358, 0.06044389, 0.053232618,
0.060676515, 0.044795286, 0.049975216, 0.052862372, 0.052928507, 0.06547469, 0.049137797,
0.038921453, 0.053330604, 0.054063942, 0.060548227, 0.06746876, 0.04812051, 0.049964063,
0.044453785, 0.044367675, 0.034085974, 0.03327993, 0.038140845, 0.032381862, 0.035496134,
0.03547165, 0.030868871, 0.034746874, 0.033340324, 0.052562926, 0.04444253, 0.036565028,
0.03548034, 0.031568468, 0.03681799, 0.060111437, 0.05941608, 0.06493414, 0.047369167,
0.048545606, 0.054340865, 0.047334578, 0.053213786, 0.048639055, 0.06462419, 0.057967495,
0.05123169, 0.050767165, 0.04655116, 0.04600234, 0.055012546, 0.06109448, 0.061030943,
0.045423374, 0.052326668, 0.05068768, 0.043907315, 0.053271294, 0.051788624, 0.07541667,
0.052127883, 0.04726454, 0.05559471, 0.04785493, 0.048247106, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.065324984,
0.05425515, 0.1268825, 0.042810045, 0.04899588, 0.06084894, 0.05327768, 0.050781216,
0.057978425, 0.06459538, 0.0547678, 0.050095547, 0.055989277, 0.05240836, 0.06003214,
0.044103123, 0.051630862, 0.045401935, 0.0503708, 0.047211748, 0.043891557, 0.04964288,
0.053682648, 0.06344495, 0.06273326, 0.04687756, 0.04844025, 0.13564709, 0.06669852,
0.040562127, 0.05242766, 0.050525416, 0.050941005, 0.048944928, 0.0496039, 0.05082811,
0.047064066, 0.0449559, 0.055563547, 0.056621995, 0.048816256, 0.04771541, 0.05089253,
0.058390405, 0.058784273, 0.04900541, 0.041992806, 0.058400642, 0.043353394, 0.043064944,
0.0837367, 0.050488558, 0.048624866, 0.042742234, 0.050305486, 0.06128742, 0.05578009,
0.046109553, 0.045500886, 0.048218925, 0.041269664, 0.04001084, 0.050392427, 0.043889306,
0.046639215, 0.050258476, 0.039522193, 0.04105933, 0.048259497, 0.060028754, 0.080128595,
0.0515511, 0.04296192, 0.041507687, 0.049043562, 0.047027364, 0.047963496, 0.057530623,
0.06030521, 0.07037846, 0.054226287, 0.044202834, 0.05935967, 0.06643595, 0.07666881,
0.08801011, 0.059476405, 0.055055603, 0.050461728, 0.055717405, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.043527372,
0.048409496, 0.046282034, 0.04546087, 0.048832804, 0.042332135, 0.045737814, 0.040757827,
0.05936885, 0.06048937, 0.04435695, 0.04350644, 0.06625047, 0.06034932, 0.045568638,
0.045980323, 0.12400349, 0.046404477, 0.042406823, 0.046144154, 0.06061767, 0.048115063,
0.06417736, 0.04890084, 0.057453442, 0.04228986, 0.047136616, 0.07086558, 0.057180505,
0.044413023, 0.052575022, 0.05329337, 0.06326881, 0.044261772, 0.05003705, 0.049690608,
0.048532415, 0.047803815, 0.050283913, 0.06644117, 0.05932929, 0.047322758, 0.04972493,
0.0501424, 0.058324438, 0.04762697, 0.063134186, 0.048928, 0.048795443, 0.046290353,
0.04667808, 0.05810333, 0.055289984, 0.069220595, 0.0773068, 0.0445054, 0.060832992,
0.103075854, 0.07180109, 0.044527784, 0.052889403, 0.051997185, 0.06187481, 0.05582793,
0.066760086, 0.05913404, 0.04708615, 0.058364153, 0.05831522, 0.069189765, 0.10043717,
0.055377573, 0.052821405, 0.054975297, 0.072973244, 0.049015332, 0.14951813, 0.046385925,
0.044072673, 0.045501616, 0.053861074, 0.050958335, 0.06506758, 0.05064114, 0.056316905,
0.041026462, 0.04331964, 0.065771036, 0.066828646, 0.04566025, 0.05327237, 0.047044504,
0.05343119, 0.044157498, 0.039503098, 0.052981116, 0.10357965, 0.04402851, 0.049494628,
0.0542896, 0.046198152, 0.1013836, 0.056454804, 0.052977078, 0.03907736, 0.042831503,
0.047393437, 0.048929367, 0.06678854, 0.11688555, 0.04413596, 0.038402542, 0.052470926,
0.06495743, 0.07153207, 0.06152941, 0.04167457, 0.052507725, 0.05383337, 0.057971194,
0.04928314, 0.046220113, 0.06305647, 0.043777455, 0.049809843, 0.043821946, 0.048181724,
0.038266134, 0.068524264, 0.05087549, 0.048464958, 0.04900124, 0.05087549, 0.05700172,
0.04909236, 0.04701337, 0.041509856, 0.058551844, 0.044330426, 0.04189588, 0.08953933,
0.05976377, 0.04949179, 0.04471404, 0.050038435, 0.064733684, 0.075490676, 0.045107793,
0.048160207, 0.04739594, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.046171114, 0.062408548, 0.049167357, 0.06237704,
0.05892701, 0.0639369, 0.049209047, 0.20065331, 0.057865605, 0.07726801, 0.051958688,
0.04704224, 0.056456074, 0.06174389, 0.054815166, 0.06915499, 0.058968652, 0.1564264,
0.049024094, 0.056645364, 0.0728006, 0.054432426, 0.054250278, 0.06293159, 0.06525457,
0.07488176, 0.057693895, 0.058497556, 0.052968133, 0.0560697, 0.046506613, 0.057005372,
0.057376903, 0.05038347, 0.060752314, 0.061305232, 0.048038587, 0.067513056, 0.06801034,
0.09317619, 0.06497874, 0.06011224, 0.06631185, 0.057622746, 0.06086058, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.044910092, 0.12270973, 0.043311276, 0.038323164, 0.040445395, 0.053763792, 0.04622281,
0.049145956, 0.044095784, 0.048967384, 0.036074862, 0.041323863, 0.06141342, 0.05588065,
0.041308623, 0.051753573, 0.045411028, 0.06360735, 0.047562577, 0.04190403, 0.06991143,
0.08782297, 0.047628295, 0.052094057, 0.05566795, 0.05323784, 0.14945917, 0.053536177,
0.05178394, 0.042961612, 0.043144193, 0.047340017, 0.054109044, 0.04678308, 0.07123973,
0.04300013, 0.036878403, 0.04710599, 0.059769765, 0.06917784, 0.062096298, 0.040830523,
0.0499458, 0.046711452, 0.04874495, 0.04598615, 0.05235841, 0.057097785, 0.04910948,
0.05428896, 0.044595636, 0.045909718, 0.044704366, 0.10456096, 0.06418169, 0.048119802,
0.05165515, 0.07155346, 0.06214804, 0.046580393, 0.04234859, 0.040859047, 0.054031063,
0.04391903, 0.042084403, 0.13085032, 0.05141368, 0.056485094, 0.04579904, 0.048724245,
0.06673454, 0.059029847, 0.041799694, 0.04725269, 0.048444353, 0.04854086, 0.042598628,
0.052132763, 0.058135338, 0.06435938, 0.04638951, 0.044282444, 0.047281355, 0.05673039,
0.055923752, 0.059412573, 0.045439854, 0.047517378, 0.057370864, 0.14926605, 0.04467951,
0.05440951, 0.05092486, 0.055552863, 0.05370278, 0.066636644, 0.053366803, 0.222684,
0.055759117, 0.06525546, 0.058872648, 0.05285381, 0.057388723, 0.059034094, 0.048879173,
0.056053843, 0.048373345, 0.11996609, 0.04650138, 0.056754734, 0.07292588, 0.047795508,
0.050530452, 0.058359265, 0.05820352, 0.07512707, 0.052327268, 0.050879154, 0.04761613,
0.05277252, 0.046615977, 0.056109495, 0.053376507, 0.05103408, 0.059825562, 0.058337018,
0.047406588, 0.0804982, 0.059947357, 0.1080226, 0.059283, 0.055793595, 0.07061358,
0.05481439, 0.056345485, 0.050347857, 0.061442353, 0.052194826, 0.054536246, 0.05253502,
0.052484084, 0.06263403, 0.058240313, 0.08528451, 0.0836449, 0.049721643, 0.06762478,
0.13071004, 0.08552315, 0.0500506, 0.039251097, 0.10579731, 0.03863923, 0.038645882,
0.044312596, 0.048568424, 0.04109735, 0.06038594, 0.04258307, 0.047807176, 0.037852302,
0.037307635, 0.056755014, 0.050486423, 0.039555646, 0.05406378, 0.04848245, 0.06395516,
0.04936966, 0.043039896, 0.06130517, 0.11567231, 0.04818071, 0.056298267, 0.060986675,
0.05143345, 0.10962991, 0.06010347, 0.05652656, 0.04275881, 0.044413403, 0.0413372,
0.057297535, 0.038679216, 0.055214893, 0.04167627, 0.034668878, 0.03900259, 0.051715583,
0.053230327, 0.053251717, 0.041172553, 0.04731139, 0.03988972, 0.04040326, 0.052945837,
0.051147927, 0.06732791, 0.04594783, 0.04891266, 0.047468267, 0.051571686, 0.042219825,
0.07462423, 0.062128425, 0.051657025, 0.056857083, 0.06080899, 0.057919107, 0.05022614,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.04884615, 0.05464849, 0.0506626, 0.042267896, 0.048291314, 0.05008747,
0.041279495, 0.050680317, 0.041940056, 0.054016847, 0.042784818, 0.038491886, 0.043771308,
0.05285781, 0.10005036, 0.041739788, 0.047877934, 0.04587404, 0.062669955, 0.05697991,
0.059586987, 0.043860536, 0.15171176, 0.05320311, 0.062178113, 0.056791663, 0.045689214,
0.04770032, 0.0493126, 0.05161863, 0.05954611, 0.056762476, 0.14550273, 0.051337417,
0.064352095, 0.0708159, 0.05146904, 0.057903763, 0.06373813, 0.06498414, 0.07796371,
0.05708055, 0.059398446, 0.05543687, 0.06370863, 0.031316683, 0.0328854, 0.040861312,
0.034813903, 0.03509691, 0.04015112, 0.032186177, 0.035161775, 0.03557791, 0.041641407,
0.053772114, 0.036686517, 0.033486843, 0.03318594, 0.03547249, 0.04908435, 0.062105794,
0.05033021, 0.05470646, 0.052988406, 0.04900218, 0.0592792, 0.056028426, 0.07838228,
0.0794852, 0.048889842, 0.054447476, 0.17259742, 0.07673076, 0.04547052, 0.050689798,
0.14837505, 0.048778214, 0.03985432, 0.0429513, 0.057017744, 0.04989899, 0.05317447,
0.04625529, 0.05490329, 0.039805625, 0.044138726, 0.06453409, 0.057712812, 0.04644287,
0.03995272, 0.048205238, 0.04488845, 0.05234268, 0.04207803, 0.04452, 0.05030015,
0.046062414, 0.06118072, 0.054600082, 0.04618607, 0.04820766, 0.10230475, 0.052392036,
0.03416994, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.055301804, 0.050852023, 0.0578836, 0.04966384, 0.055636857,
0.052204836, 0.048374586, 0.051850382, 0.048034362, 0.059475917, 0.04944214, 0.04441294,
0.04904229, 0.059457235, 0.16040106, 0.0488635, 0.06527098, 0.052407242, 0.06094435,
0.061865922, 0.0729742, 0.048193738, 0.18573123, 0.056845814, 0.06970745, 0.054010812,
0.048219927, 0.05523344, 0.062174626, 0.057556767, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.05018925, 0.17466852,
0.051205933, 0.04038497, 0.04367034, 0.060185634, 0.050832536, 0.06046622, 0.04824629,
0.06162914, 0.039851356, 0.042683005, 0.06987119, 0.060769882, 0.048188977, 0.04847071,
0.06610646, 0.05197789, 0.050966706, 0.04660332, 0.046277646, 0.060695406, 0.05769634,
0.0699727, 0.083785966, 0.04737384, 0.055635918, 0.13753381, 0.07033902, 0.04322512,
0.038921855, 0.046155542, 0.04942928, 0.040753413, 0.04633316, 0.049939364, 0.039822653,
0.05142185, 0.049281906, 0.06455765, 0.053225532, 0.049498312, 0.052737184, 0.044969194,
0.04947566, 0.045610443, 0.0552263, 0.047881395, 0.05158348, 0.05007626, 0.044054855,
0.04918625, 0.054498978, 0.07393168, 0.07332068, 0.04634031, 0.04551959, 0.14913474,
0.0777834, 0.04276959, 0.04716449, 0.046752755, 0.05603056, 0.055575978, 0.06248008,
0.055865627, 0.044425763, 0.050752837, 0.054026637, 0.064458966, 0.08079356, 0.050821375,
0.051140267, 0.047241528, 0.049602967, 0.03930628, 0.040257182, 0.050195925, 0.044345595,
0.04375489, 0.14529167, 0.044252206, 0.059535254, 0.045868784, 0.049727958, 0.07269773,
0.053856563, 0.039963447, 0.043850273, 0.04687186, 0.052109584, 0.04668083, 0.06458263,
0.049468786, 0.0424459, 0.06571556, 0.08416689, 0.05061953, 0.05879472, 0.06282343,
0.053643323, 0.13684727, 0.060161766, 0.055688277, 0.044051882, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08217198,
0.051949568, 0.18817867, 0.045882825, 0.05013077, 0.06142537, 0.06252912, 0.04758249,
0.060156044, 0.06600503, 0.062038954, 0.05464553, 0.05442227, 0.05586763, 0.057013754,
0.05407174, 0.056267362, 0.07494126, 0.05540346, 0.06218722, 0.05204585, 0.053074818,
0.050854366, 0.12966841, 0.075884804, 0.059736498, 0.063829444, 0.07352189, 0.06682987,
0.05970652, 0.053313017, 0.051653836, 0.057168894, 0.0560285, 0.060193397, 0.05375744,
0.050171886, 0.053228598, 0.05895696, 0.062448315, 0.053101767, 0.046960227, 0.053050477,
0.06806161, 0.1583682, 0.045682646, 0.045926563, 0.055485003, 0.04383061, 0.037417695,
0.060075022, 0.10141273, 0.046610996, 0.051910233, 0.055361055, 0.04605394, 0.099481456,
0.058927406, 0.05156904, 0.03812877, 0.044451382, 0.19369243, 0.045083117, 0.04008059,
0.0465015, 0.0518239, 0.045307722, 0.061375335, 0.04829454, 0.058687754, 0.038782958,
0.040704057, 0.06811295, 0.055367634, 0.04618757, 0.042541515, 0.089932464, 0.043209754,
0.036106486, 0.03901068, 0.059575256, 0.045551628, 0.056782946, 0.041991808, 0.048580803,
0.039161537, 0.04477594, 0.058366608, 0.047800016, 0.03867231, 0.04508063, 0.047426518,
0.052057292, 0.05563748, 0.115978435, 0.044806078, 0.03879206, 0.053908758, 0.06542843,
0.0779657, 0.064926445, 0.04425009, 0.054578837, 0.052793235, 0.05706623, 0.046099808,
0.05049152, 0.06101223, 0.05451163, 0.09714546, 0.046446268, 0.038898267, 0.051093813,
0.06970259, 0.072932884, 0.07394662, 0.04474883, 0.052826297, 0.050686684, 0.05380254,
0.06268354, 0.052090637, 0.19297504, 0.049229994, 0.055149574, 0.06727119, 0.0515877,
0.04963145, 0.05944653, 0.062035173, 0.07828632, 0.058026575, 0.05522244, 0.05040911,
0.055954717, 0.026912972, 0.03333407, 0.03172337, 0.033026524, 0.035443082, 0.03133718,
0.028054407, 0.042420443, 0.042342138, 0.0643369, 0.036460545, 0.034002468, 0.040568292,
0.033098023, 0.030965397, 0.041545488, 0.04540916, 0.044669818, 0.07108528, 0.102542534,
0.043816954, 0.038525388, 0.054011937, 0.05373953, 0.059898753, 0.058397293, 0.038844734,
0.047200724, 0.049577277, 0.05633416, 0.05001304, 0.14770669, 0.051546454, 0.04364845,
0.048528295, 0.06952269, 0.0506451, 0.066974856, 0.051254004, 0.058655225, 0.045369275,
0.050301354, 0.07088424, 0.057239547, 0.04629863, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0396262, 0.04174441,
0.04336342, 0.05882854, 0.12460002, 0.04320778, 0.034674373, 0.05079347, 0.056670394,
0.06569856, 0.06281266, 0.04122294, 0.045986924, 0.047071863, 0.05236649, 0.04392101,
0.060235072, 0.04614499, 0.060710203, 0.061959006, 0.060771115, 0.044948936, 0.15307437,
0.055094533, 0.06903277, 0.053652722, 0.04390511, 0.053105246, 0.056972157, 0.057169244,
0.045546904, 0.0535263, 0.058020532, 0.045630228, 0.050761756, 0.044584986, 0.04750728,
0.043710653, 0.102203935, 0.06638903, 0.045852166, 0.05095447, 0.074833035, 0.06420726,
0.04395376, 0.0428187, 0.05003182, 0.05598178, 0.051308386, 0.05836581, 0.047566146,
0.0430313, 0.043013457, 0.09408289, 0.057205472, 0.052999847, 0.05068687, 0.062698446,
0.058697037, 0.04470974, 0.046596624, 0.042535067, 0.07380234, 0.047628682, 0.056144714,
0.056951117, 0.044764068, 0.04690096, 0.05005385, 0.0491919, 0.06443623, 0.046133783,
0.04428453, 0.046569854, 0.053230908, 0.059582923, 0.06314475, 0.072881766, 0.045674853,
0.049675878, 0.049070515, 0.060347814, 0.046263944, 0.08281878, 0.06683776, 0.0468261,
0.05160548, 0.070536196, 0.074634, 0.051320527, 0.045174763, 0.05229699, 0.06254918,
0.05128402, 0.057566125, 0.0476757, 0.046279825, 0.044852465, 0.11756564, 0.06476071,
0.0531167, 0.05378152, 0.069870465, 0.06665629, 0.048486758, 0.04859502, 0.048948925,
0.055176582, 0.048150513, 0.04740307, 0.15589903, 0.057524994, 0.056392908, 0.050805166,
0.056989137, 0.068846226, 0.07768067, 0.048051424, 0.0524525, 0.0477985, 0.045843124,
0.0537419, 0.05056493, 0.057757653, 0.05567088, 0.07057796, 0.05171803, 0.21133035,
0.0555843, 0.06726697, 0.06307487, 0.055144742, 0.052404106, 0.054602314, 0.054717872,
0.047232985, 0.06371556, 0.049076084, 0.06215441, 0.060349245, 0.06720797, 0.049359407,
0.17708467, 0.05569495, 0.07218827, 0.056366645, 0.04833369, 0.054698456, 0.057849724,
0.05604346, 0.045300495, 0.064572334, 0.04915067, 0.055269614, 0.056300037, 0.069575965,
0.04671484, 0.22290368, 0.05502385, 0.0679074, 0.051617563, 0.04618033, 0.054553553,
0.060686834, 0.054242834, 0.037536215, 0.041025132, 0.04926668, 0.044260193, 0.0446322,
0.08801994, 0.044350527, 0.05469766, 0.047999628, 0.050894354, 0.06771662, 0.05981151,
0.03951142, 0.042565346, 0.044613335, 0.04488523, 0.055328656, 0.048983052, 0.05246645,
0.050436847, 0.052747652, 0.05535992, 0.05945953, 0.0794748, 0.075258724, 0.046153773,
0.05974416, 0.097314075, 0.071228735, 0.04694809, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.046175748,
0.21817604, 0.045856044, 0.040467847, 0.04548745, 0.05677415, 0.046576455, 0.061919775,
0.048582688, 0.0534935, 0.03974743, 0.042041134, 0.066805966, 0.05741236, 0.044926908,
0.052466042, 0.060672145, 0.05260206, 0.051882826, 0.05556496, 0.054303914, 0.056570753,
0.04909226, 0.0727232, 0.071996555, 0.048746105, 0.061189532, 0.08584431, 0.07322004,
0.05160881, 0.05306589, 0.056679994, 0.05845627, 0.048108473, 0.055305593, 0.05212032,
0.04725906, 0.054681845, 0.049517173, 0.060659397, 0.0481885, 0.042425927, 0.05097443,
0.06301952, 0.13198282, 0.051870726, 0.04408217, 0.058825564, 0.03990893, 0.050933477,
0.04348954, 0.036867164, 0.03802801, 0.049953423, 0.052550375, 0.053204283, 0.043441113,
0.045389988, 0.03989177, 0.041437298, 0.04157114, 0.042486172, 0.05071886, 0.052176002,
0.066281006, 0.047275823, 0.039161917, 0.042861924, 0.04924002, 0.055569973, 0.064916916,
0.042681832, 0.04437971, 0.04468459, 0.05183609, 0.043687403, 0.042885464, 0.05077033,
0.050003055, 0.08653197, 0.042036094, 0.0355325, 0.045794085, 0.058712337, 0.057180863,
0.07163474, 0.041524697, 0.04661927, 0.046507485, 0.050102897, 0.047652707, 0.10150514,
0.047442827, 0.0405136, 0.043731067, 0.06932821, 0.046535615, 0.058280732, 0.0441687,
0.049959697, 0.04332115, 0.048510008, 0.05809934, 0.051001623, 0.04359314, 0.040438015,
0.091466, 0.04185743, 0.039162, 0.04375677, 0.05662409, 0.04348286, 0.06426035,
0.04573202, 0.048653282, 0.040517755, 0.042836178, 0.059464585, 0.050889034, 0.038574614,
0.04682413, 0.04004314, 0.055192776, 0.0431392, 0.037497737, 0.058355823, 0.08591274,
0.04413431, 0.047857817, 0.053987574, 0.04655272, 0.18151113, 0.050191794, 0.048207216,
0.03906337, 0.05413806, 0.043450966, 0.08013524, 0.044663735, 0.051249746, 0.051713765,
0.04870578, 0.04543312, 0.04963552, 0.052421793, 0.073284425, 0.049439143, 0.048598077,
0.046084754, 0.053352356, 0.049432248, 0.06570666, 0.051698025, 0.0650265, 0.06525292,
0.06951955, 0.048652023, 0.13706502, 0.055245668, 0.07206721, 0.054920457, 0.048322234,
0.0558717, 0.060109768, 0.060726162, 0.053411875, 0.057692327, 0.07323191, 0.05106275,
0.05980871, 0.050864726, 0.051053207, 0.045892343, 0.10919399, 0.06760448, 0.05619764,
0.060255382, 0.07111521, 0.0635797, 0.053247128, 0.04436881, 0.0476603, 0.05153996,
0.06165137, 0.108183995, 0.046000455, 0.038146917, 0.05483024, 0.06300051, 0.07050482,
0.068515025, 0.042338658, 0.0508946, 0.05107044, 0.056977015, 0.03893407, 0.040521964,
0.056704823, 0.041608054, 0.03836597, 0.054294787, 0.054594427, 0.04218142, 0.046639305,
0.050594643, 0.043640297, 0.100901954, 0.044230748, 0.04032573, 0.035623755, 0.053552646,
0.048246697, 0.0973888, 0.051646084, 0.06355654, 0.06783868, 0.047050744, 0.05473467,
0.058973245, 0.058841918, 0.079081655, 0.05091457, 0.050413303, 0.050413303, 0.054863885,
0.048022687, 0.047737435, 0.06546637, 0.045264006, 0.053768262, 0.045571193, 0.043451007,
0.04019573, 0.082647316, 0.055654082, 0.052410405, 0.0517232, 0.05906671, 0.051104713,
0.046087034, 0.049105633, 0.04762538, 0.06204504, 0.04833215, 0.041807856, 0.06049319,
0.108325936, 0.049692824, 0.05435019, 0.059535705, 0.048503496, 0.11018275, 0.05978131,
0.054570507, 0.040939264, 0.051912174, 0.05514186, 0.07407136, 0.055861138, 0.06241501,
0.053309437, 0.052313194, 0.04815615, 0.14440688, 0.069113694, 0.06304648, 0.065192334,
0.0712824, 0.06579972, 0.056001667, 0.04099568, 0.052823555, 0.04654207, 0.038980894,
0.045386866, 0.04660961, 0.048626214, 0.042769197, 0.063820556, 0.06412237, 0.039387513,
0.044594195, 0.0832715, 0.069182314, 0.04533409, 0.04560237, 0.041213877, 0.05882502,
0.047735665, 0.042239644, 0.06932796, 0.066878065, 0.053652555, 0.051334187, 0.06055033,
0.056152906, 0.13998513, 0.052677736, 0.04762855, 0.0446675, 0.07257404, 0.053329706,
0.14280745, 0.04472675, 0.04975825, 0.061101377, 0.05909374, 0.047897324, 0.058930725,
0.063328594, 0.06306167, 0.053304844, 0.05438828, 0.05203044, 0.057980165, 0.04403937,
0.048519175, 0.050091293, 0.06479956, 0.056370348, 0.06357165, 0.049594607, 0.13824728,
0.057358205, 0.06361312, 0.065777704, 0.054899435, 0.051516056, 0.05026566, 0.0515447,
0.036117565, 0.04486782, 0.04333763, 0.04422749, 0.040065594, 0.047716126, 0.04421442,
0.06509356, 0.052999817, 0.06312692, 0.04555793, 0.053614162, 0.055108637, 0.04290153,
0.036592554, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.057709593, 0.050230164, 0.0730198, 0.041500423, 0.046345856,
0.045959637, 0.052859146, 0.039444033, 0.075992025, 0.057352588, 0.047183264, 0.04920281,
0.055455964, 0.056103904, 0.049364854, 0.043119054, 0.040796775, 0.06379471, 0.043443564,
0.038012702, 0.072971016, 0.06667134, 0.046452034, 0.051905923, 0.05613087, 0.051998306,
0.17677672, 0.05179935, 0.04610019, 0.040739156, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.062182825,
0.05495644, 0.06371906, 0.058776025, 0.064153194, 0.054122906, 0.04862133, 0.05405478,
0.05505318, 0.061092723, 0.057845417, 0.04587447, 0.05295678, 0.061930336, 0.20168868,
0.07052042, 0.05189392, 0.083359204, 0.043654177, 0.04574039, 0.046379533, 0.061360266,
0.04061856, 0.058652416, 0.054450456, 0.048811022, 0.0509676, 0.0518814, 0.054624885,
0.048542317, 0.04553353, 0.06237256, 0.052038673, 0.055401076, 0.056667726, 0.069624744,
0.05257369, 0.17254363, 0.05760579, 0.069744796, 0.055217467, 0.050426602, 0.060544215,
0.06471712, 0.054172534, 0.04731093, 0.056943893, 0.049835827, 0.05557631, 0.0512116,
0.052165307, 0.055274256, 0.05471769, 0.078481756, 0.06963844, 0.051627796, 0.06094841,
0.12808178, 0.06957452, 0.045438252, 0.046581678, 0.06744227, 0.04807985, 0.058919877,
0.058929708, 0.062097244, 0.049109932, 0.19525157, 0.059491813, 0.06805807, 0.04917421,
0.045965176, 0.0609194, 0.06951925, 0.05083516, 0.040487494, 0.03627164, 0.051424764,
0.038385313, 0.034682564, 0.058575217, 0.06138892, 0.043494985, 0.042630725, 0.049133144,
0.04440707, 0.10081782, 0.04487004, 0.04070307, 0.03627219, 0.06076564, 0.055943694,
0.16831453, 0.04779292, 0.060519755, 0.065167546, 0.050591398, 0.05463384, 0.064613745,
0.06797928, 0.0733957, 0.055387996, 0.060153123, 0.052981544, 0.06175929, 0.052569825,
0.052511945, 0.055871405, 0.055019084, 0.0585791, 0.054556847, 0.044869628, 0.053505674,
0.048093427, 0.062846504, 0.054808613, 0.04370816, 0.048649263, 0.0567099, 0.14240964,
0.04135152, 0.04476945, 0.054043084, 0.049145773, 0.049718317, 0.1068008, 0.04890353,
0.06056939, 0.052345328, 0.056675248, 0.07635148, 0.07109825, 0.044835817, 0.046346832,
0.047306076, 0.042253442, 0.042463686, 0.05714927, 0.045877654, 0.044110123, 0.17384434,
0.054271404, 0.056419637, 0.047892153, 0.05573374, 0.06912534, 0.0857517, 0.044359878,
0.046596423, 0.04679445, 0.059805788, 0.053653162, 0.119365446, 0.055313144, 0.06376236,
0.072482556, 0.05255472, 0.0565172, 0.06165575, 0.06170065, 0.08907164, 0.057340935,
0.053881045, 0.053195883, 0.057619557, 0.04161937, 0.04954451, 0.043725554, 0.07579245,
0.104831435, 0.0467608, 0.03995084, 0.058053534, 0.054164402, 0.05886487, 0.05308916,
0.040004697, 0.04823834, 0.05230427, 0.060904365, 0.03193043, 0.032695882, 0.039618004,
0.03576197, 0.04029084, 0.04099951, 0.030729424, 0.03769292, 0.03757441, 0.04796456,
0.06586773, 0.04044634, 0.034533843, 0.031041773, 0.03612433, 0.036427364, 0.04546264,
0.04017887, 0.05532237, 0.1008896, 0.03941739, 0.034462802, 0.051119182, 0.051405888,
0.055397626, 0.049623746, 0.03574637, 0.043990042, 0.047909297, 0.052899722, 0.04118108,
0.040177908, 0.04508069, 0.049325645, 0.06948309, 0.04240398, 0.03550139, 0.047246452,
0.05177363, 0.062443294, 0.06263091, 0.043560944, 0.045878943, 0.04418713, 0.050085638,
0.060222983, 0.05127337, 0.063474186, 0.05203558, 0.05677645, 0.055348907, 0.048086673,
0.05520311, 0.051020153, 0.0628126, 0.054289762, 0.045244046, 0.050365333, 0.056879673,
0.1555686, 0.060201447, 0.051514253, 0.061503008, 0.05824048, 0.06013386, 0.058308184,
0.050471976, 0.052054606, 0.053210948, 0.063055985, 0.055617623, 0.048360012, 0.049661044,
0.059088327, 0.13487458, 0.03703606, 0.038138825, 0.05129407, 0.04203046, 0.03817468,
0.0984931, 0.051404532, 0.050182786, 0.04358356, 0.046785418, 0.050208148, 0.098781794,
0.041689057, 0.041631054, 0.038072214, 0.030981725, 0.034756657, 0.038104884, 0.03454555,
0.039324675, 0.039076563, 0.03136391, 0.04286142, 0.04161615, 0.051782835, 0.050392,
0.038856976, 0.04171525, 0.034145754, 0.03374763, 0.028170303, 0.031928867, 0.03674692,
0.037101, 0.036752816, 0.037331935, 0.030255688, 0.04091177, 0.04360855, 0.052174333,
0.048418675, 0.039267596, 0.036655262, 0.031432696, 0.031195067, 0.04818681, 0.04578815,
0.062116895, 0.04709276, 0.047136676, 0.19405562, 0.056965463, 0.063006826, 0.04999874,
0.05576378, 0.07386823, 0.06951344, 0.048564106, 0.050681777, 0.053159613, 0.0462399,
0.06183494, 0.055264104, 0.042803608, 0.04406097, 0.05355841, 0.051738936, 0.058435217,
0.07062872, 0.077125, 0.04178745, 0.049490258, 0.090172574, 0.06937667, 0.047323983,
0.050726242, 0.1824023, 0.049719017, 0.04475679, 0.04811724, 0.06348282, 0.05211586,
0.07019263, 0.051683355, 0.059047773, 0.044798456, 0.047877762, 0.07417191, 0.062629625,
0.04818632, 0.035738587, 0.041289948, 0.04615873, 0.043839324, 0.04048414, 0.046570405,
0.041938145, 0.05447672, 0.054856353, 0.059580453, 0.05000236, 0.050377768, 0.050948545,
0.040364686, 0.035114154, 0.037479438, 0.04882324, 0.04032138, 0.036702372, 0.037798885,
0.047133584, 0.046364583, 0.04442147, 0.051031232, 0.061348937, 0.03523656, 0.044733007,
0.071255304, 0.052309968, 0.03549108, 0.052232873, 0.057341207, 0.060237356, 0.055697992,
0.059155203, 0.06565075, 0.050506685, 0.06327952, 0.05491748, 0.0937634, 0.06443662,
0.054464214, 0.062921844, 0.05588702, 0.065612875, 0.05383082, 0.11494587, 0.04638552,
0.040733363, 0.041515265, 0.052192356, 0.051115196, 0.052843817, 0.04481267, 0.050359964,
0.03899172, 0.041825198, 0.054429483, 0.063968986, 0.04698477, 0.03769983, 0.03682566,
0.048254203, 0.04026457, 0.0449802, 0.055741683, 0.036303088, 0.047821894, 0.041678272,
0.056055047, 0.10689466, 0.04733503, 0.03850291, 0.036972895, 0.044146996, 0.048411813,
0.055108756, 0.05518338, 0.058248825, 0.10734896, 0.047352094, 0.04283086, 0.059032887,
0.06771126, 0.07536119, 0.05922103, 0.04741371, 0.060553603, 0.05859523, 0.055202305,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.051023707, 0.060463626, 0.048463386, 0.05058996, 0.052155007,
0.0488787, 0.057012137, 0.05508266, 0.06969417, 0.0668859, 0.045616653, 0.05129674,
0.080277056, 0.13794835, 0.054924812, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.06968913, 0.04738072, 0.12935852,
0.042240877, 0.045903955, 0.052022494, 0.059081297, 0.044526994, 0.056681715, 0.061479162,
0.055102132, 0.049432136, 0.05117198, 0.052449234, 0.054508474, 0.047629222, 0.052472807,
0.053171918, 0.049856037, 0.05184695, 0.046578232, 0.052690685, 0.07066903, 0.06875327,
0.137734, 0.05018269, 0.056401126, 0.07156067, 0.063610926, 0.051677015, 0.063870326,
0.04995376, 0.060342014, 0.044614803, 0.04468676, 0.05297895, 0.0655111, 0.041588247,
0.052373458, 0.06148366, 0.048928205, 0.05615883, 0.056777377, 0.057901036, 0.05428804,
0.050037295, 0.18465285, 0.046435915, 0.04465666, 0.04841673, 0.05541266, 0.049880173,
0.058962986, 0.051202774, 0.05643005, 0.043439873, 0.044866957, 0.063562684, 0.060867313,
0.047835942, 0.044976592, 0.04465002, 0.06172993, 0.050923437, 0.04878966, 0.085005365,
0.0562943, 0.052004118, 0.054244682, 0.063895464, 0.073499665, 0.11712958, 0.049728073,
0.046375073, 0.046797268, 0.047339637, 0.055188168, 0.055975985, 0.064434044, 0.124244705,
0.049077526, 0.04231236, 0.059038986, 0.06808793, 0.080088355, 0.062430818, 0.047916677,
0.062241137, 0.058305733, 0.058277532, 0.03878182, 0.04278922, 0.04956708, 0.049855407,
0.051372156, 0.043232128, 0.04259705, 0.051600166, 0.15613627, 0.07313153, 0.049216747,
0.05089936, 0.06508164, 0.068479456, 0.049177103, 0.05805655, 0.053118806, 0.06346092,
0.054040767, 0.04953316, 0.20175113, 0.059758946, 0.06664938, 0.04951841, 0.056280002,
0.06381354, 0.056914546, 0.05075494, 0.056280002, 0.060068924, 0.061184783, 0.04574569,
0.05675528, 0.053621143, 0.061453555, 0.044310156, 0.047028802, 0.047288436, 0.05372332,
0.056656085, 0.05764723, 0.04437422, 0.04877249, 0.058418993, 0.11523428, 0.04436648,
0.058737855, 0.04881388, 0.049994398, 0.051027995, 0.06733031, 0.045846894, 0.26697436,
0.05168494, 0.061542116, 0.04955099, 0.044509754, 0.052669954, 0.058143526, 0.04880656,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.052218698, 0.05389726, 0.059202254, 0.051050346, 0.051948432, 0.056187533,
0.057490807, 0.053736888, 0.053531975, 0.120721616, 0.060190484, 0.05859767, 0.057962377,
0.06139237, 0.06702156, 0.056371484, 0.03813074, 0.064333394, 0.03532657, 0.03673675,
0.044657394, 0.052636843, 0.036213167, 0.04155718, 0.055565704, 0.042073883, 0.041609753,
0.044717822, 0.04438382, 0.05421589, 0.055774603, 0.16604133, 0.051841535, 0.047166068,
0.048948716, 0.055760503, 0.055956285, 0.063879535, 0.05392291, 0.06304043, 0.043463666,
0.046207793, 0.06669008, 0.07246179, 0.052194245, 0.046859335, 0.04671808, 0.06280928,
0.049544025, 0.050536215, 0.08235504, 0.055626433, 0.06090007, 0.051631417, 0.07134912,
0.07784827, 0.093805864, 0.051264845, 0.04644432, 0.05079119, 0.04368274, 0.052581657,
0.054083124, 0.05278337, 0.07770434, 0.04506212, 0.039893493, 0.055019, 0.06064382,
0.07626332, 0.053027786, 0.044023782, 0.059526507, 0.05242605, 0.051368646, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.06540515, 0.058375992, 0.06500557, 0.05599684, 0.050793342, 0.18389171, 0.056757912,
0.07247125, 0.04697979, 0.057607267, 0.056844898, 0.05041682, 0.050123945, 0.056919776,
0.060644597, 0.054191444, 0.060898785, 0.054607008, 0.053671207, 0.056725908, 0.052203633,
0.06558755, 0.05731334, 0.07059156, 0.07054385, 0.050115913, 0.05468733, 0.06954623,
0.14153676, 0.074838564, 0.047656685, 0.06589428, 0.051180057, 0.058154948, 0.06334289,
0.06859567, 0.04645442, 0.16386335, 0.054466706, 0.06778475, 0.051550854, 0.046106864,
0.053424537, 0.060304597, 0.055699393, 0.06912415, 0.048379693, 0.095825806, 0.03791324,
0.041101016, 0.054592907, 0.051437017, 0.045327734, 0.046744216, 0.051375035, 0.047121573,
0.044274885, 0.04473065, 0.046300117, 0.048904814, 0.057757284, 0.05071041, 0.059682675,
0.048033513, 0.047294214, 0.057225533, 0.054515194, 0.04959514, 0.042989135, 0.07186029,
0.051671263, 0.047605127, 0.047787312, 0.051049516, 0.06028664, 0.04496707, 0.06146168,
0.05183533, 0.049245313, 0.050052196, 0.043835957, 0.05153901, 0.05087497, 0.086296655,
0.08402056, 0.0467599, 0.048663296, 0.12706769, 0.08104093, 0.0455576, 0.054168414,
0.10030424, 0.045910217, 0.035340723, 0.037229806, 0.05031469, 0.051580373, 0.051179588,
0.041518893, 0.050090373, 0.035819836, 0.03839212, 0.050649215, 0.05766861, 0.04433655,
0.049129825, 0.061981216, 0.051093858, 0.052629102, 0.05015598, 0.04418351, 0.055985805,
0.05302388, 0.07334942, 0.07348574, 0.04832003, 0.052685883, 0.15396744, 0.07563413,
0.044804186, 0.06030582, 0.059621938, 0.052115902, 0.050787825, 0.0495679, 0.051412743,
0.069780946, 0.053114247, 0.06404446, 0.06864551, 0.048064657, 0.05585245, 0.07092684,
0.12843052, 0.060906116, 0.040035542, 0.05058972, 0.040553007, 0.04375669, 0.04582311,
0.042175427, 0.04492085, 0.061206043, 0.05831029, 0.064770736, 0.037653647, 0.041585825,
0.0586083, 0.08551774, 0.049033128, 0.04789176, 0.057607956, 0.05290267, 0.0560685,
0.05731211, 0.06849211, 0.050016206, 0.17795675, 0.056237213, 0.069133446, 0.05745737,
0.050117556, 0.05496891, 0.05867086, 0.0535306, 0.039573185, 0.047557272, 0.04062571,
0.045414507, 0.046523422, 0.04009277, 0.04417313, 0.05678318, 0.06033048, 0.063791595,
0.038850423, 0.040694796, 0.06300599, 0.09167496, 0.045564994, 0.04660337, 0.09482183,
0.04115995, 0.035690885, 0.038635653, 0.042812724, 0.04549442, 0.045671783, 0.041819695,
0.047527418, 0.032464355, 0.034560643, 0.047770526, 0.06120822, 0.046352763, 0.044610254,
0.05407254, 0.048889905, 0.03814615, 0.038123015, 0.042616885, 0.056551836, 0.04177896,
0.059591763, 0.08189015, 0.037174534, 0.043855667, 0.0821678, 0.07630913, 0.043245897,
0.047118995, 0.052488867, 0.053277012, 0.054230966, 0.050446093, 0.048224606, 0.05076215,
0.059337724, 0.05741585, 0.07994627, 0.050562464, 0.052294005, 0.054638945, 0.054668438,
0.051084228, 0.042277463, 0.05072779, 0.04549125, 0.039159294, 0.041103344, 0.03848201,
0.049346462, 0.044877652, 0.05408626, 0.06589254, 0.035666, 0.041086715, 0.087655246,
0.07750027, 0.041608498, 0.04036919, 0.04277981, 0.046097662, 0.040670738, 0.043843098,
0.043868028, 0.043442246, 0.04913941, 0.04649694, 0.07743506, 0.04190623, 0.040525775,
0.049007136, 0.04767981, 0.045702063, 0.06365727, 0.06189258, 0.06169788, 0.05191021,
0.048038714, 0.19585231, 0.06242414, 0.0700354, 0.047740728, 0.055258818, 0.058772128,
0.058394343, 0.05255034, 0.058274448, 0.053500693, 0.036310844, 0.03430577, 0.04553723,
0.046665557, 0.05422473, 0.05514761, 0.034182083, 0.048374675, 0.04210797, 0.050596476,
0.09668431, 0.043353103, 0.036794763, 0.038130734, 0.05148368, 0.061474077, 0.05287041,
0.20507562, 0.045940287, 0.054845203, 0.055919338, 0.051425423, 0.052117117, 0.06303887,
0.066272676, 0.06610009, 0.051334593, 0.057620227, 0.055017866, 0.060948208, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.041312333, 0.04785891, 0.056421947, 0.05020225, 0.053234592, 0.0413084, 0.044210408,
0.052035294, 0.20365225, 0.080612734, 0.048782174, 0.051394124, 0.075941026, 0.06356483,
0.047034904, 0.047475845, 0.04505314, 0.04398592, 0.045817528, 0.04424673, 0.04249201,
0.052915007, 0.045776453, 0.055551227, 0.053838883, 0.044060852, 0.04600682, 0.05838774,
0.09122769, 0.052418746, 0.041522257, 0.040519692, 0.056000195, 0.059720505, 0.05538103,
0.06284296, 0.044231165, 0.057410307, 0.052635096, 0.053151615, 0.08264223, 0.06604573,
0.04472928, 0.04189184, 0.044737726, 0.045041244, 0.12148905, 0.041775204, 0.04127806,
0.04295369, 0.047133982, 0.04451087, 0.06405894, 0.044998363, 0.054721847, 0.03865403,
0.038754664, 0.055437196, 0.05863086, 0.04658631, 0.053630713, 0.17747098, 0.050863158,
0.047017574, 0.05101109, 0.05500046, 0.05392771, 0.07092529, 0.05502531, 0.06908275,
0.04291036, 0.045128953, 0.072210714, 0.07055546, 0.05332926, 0.040847152, 0.050450344,
0.04811514, 0.056867298, 0.08756563, 0.04360999, 0.03918384, 0.052587084, 0.06692518,
0.07078028, 0.052967113, 0.04131057, 0.05851592, 0.055703163, 0.05372824, 0.04177749,
0.048920803, 0.05135556, 0.049116574, 0.078178935, 0.041226856, 0.037388157, 0.050016154,
0.06466226, 0.06896084, 0.058557812, 0.04094695, 0.05600538, 0.049936615, 0.05179425,
0.07101815, 0.05007963, 0.18806168, 0.044849016, 0.045365006, 0.058203768, 0.069504626,
0.04441492, 0.062557064, 0.06338836, 0.059594084, 0.05696104, 0.05396162, 0.05329977,
0.054078244, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.050315306, 0.05520363, 0.060865037, 0.058239445, 0.112141214,
0.048338067, 0.043281313, 0.057902303, 0.07113476, 0.081844255, 0.065092385, 0.049116623,
0.06273766, 0.058126297, 0.060621288, 0.05611127, 0.17092475, 0.051017225, 0.04527077,
0.049414948, 0.054315835, 0.053409763, 0.06348288, 0.052114498, 0.06284484, 0.043221246,
0.04540015, 0.066196844, 0.07003711, 0.05316261, 0.034096725, 0.03471332, 0.050985817,
0.03750872, 0.033679537, 0.0581628, 0.053897366, 0.04076535, 0.042562477, 0.04675881,
0.042196143, 0.13498105, 0.040984187, 0.037118856, 0.033658758, 0.04291986, 0.053868376,
0.05124897, 0.059652783, 0.10968191, 0.045080457, 0.039988272, 0.056806415, 0.06676795,
0.07397247, 0.05616072, 0.044526197, 0.060453612, 0.058856644, 0.05720469, 0.036167264,
0.049027175, 0.042199895, 0.046384037, 0.049840022, 0.04745852, 0.037263718, 0.111662686,
0.05131221, 0.05892768, 0.04091132, 0.03685045, 0.046238124, 0.049888745, 0.039451204,
0.03456593, 0.039562356, 0.04698241, 0.05093821, 0.062908076, 0.040223338, 0.0348749,
0.04982669, 0.14581734, 0.07193889, 0.049645323, 0.041180998, 0.055971865, 0.054019496,
0.04818396, 0.048007496, 0.052992094, 0.064174704, 0.05142662, 0.05296111, 0.046337582,
0.050149314, 0.049616247, 0.15208234, 0.07876955, 0.053245362, 0.055961326, 0.07469587,
0.07040143, 0.049187366, 0.07758165, 0.04441968, 0.102074035, 0.040490143, 0.041052304,
0.0542495, 0.055264097, 0.04252941, 0.047691602, 0.04974955, 0.04799441, 0.04324791,
0.04481602, 0.047516137, 0.0483214, 0.03882926, 0.045235373, 0.052682776, 0.046034034,
0.051816512, 0.040737487, 0.043320626, 0.046196032, 0.13782966, 0.06577663, 0.046471365,
0.04759206, 0.08253718, 0.06904656, 0.04594727, 0.041658733, 0.04952906, 0.058954418,
0.05246162, 0.05989419, 0.04599574, 0.044678833, 0.052706927, 0.2048518, 0.07365221,
0.053947896, 0.050359424, 0.07801173, 0.07158452, 0.05311429, 0.05776682, 0.057250705,
0.058856837, 0.05072617, 0.04706454, 0.25255352, 0.054136075, 0.065679446, 0.04445994,
0.05322008, 0.053757213, 0.04919658, 0.04760543, 0.05300097, 0.05472568, 0.04679929,
0.06334654, 0.04886424, 0.054344814, 0.05681043, 0.062832125, 0.04627963, 0.16772519,
0.05373166, 0.06846987, 0.049550243, 0.045176495, 0.05693039, 0.06168191, 0.05378628,
0.04587981, 0.062043585, 0.048833303, 0.05277507, 0.05425543, 0.062456787, 0.046327967,
0.18877149, 0.05448942, 0.070680805, 0.0509517, 0.04463499, 0.05442755, 0.05994709,
0.05462628, 0.047827538, 0.05571348, 0.05179434, 0.06086675, 0.05779882, 0.06719159,
0.050056133, 0.15744424, 0.054357108, 0.06403369, 0.056179065, 0.048161082, 0.05441845,
0.058835875, 0.05325847, 0.053199295, 0.05663219, 0.05494027, 0.050307665, 0.04757773,
0.14916849, 0.046058692, 0.0695457, 0.043292884, 0.050439503, 0.050778612, 0.04348717,
0.044492435, 0.05218429, 0.057179842, 0.067956954, 0.037584446, 0.06097172, 0.033325743,
0.0332051, 0.0426083, 0.07072497, 0.033705473, 0.03891331, 0.05028111, 0.039094243,
0.045449134, 0.043835342, 0.045205727, 0.04566883, 0.044715095, 0.041256923, 0.05611146,
0.048467305, 0.054488212, 0.06634122, 0.041154046, 0.054877147, 0.048253294, 0.061749134,
0.18306607, 0.053427666, 0.04399568, 0.04320983, 0.053314038, 0.048420984, 0.05106509,
0.0461492, 0.049644515, 0.05012048, 0.05640874, 0.048084725, 0.05558761, 0.050477553,
0.05747934, 0.05050163, 0.05162209, 0.048667077, 0.06090726, 0.06302001, 0.05385342,
0.16828285, 0.04908009, 0.047266785, 0.048157696, 0.05589818, 0.0554717, 0.062913634,
0.052505493, 0.06074642, 0.043502867, 0.047332127, 0.06830117, 0.07773469, 0.052302375,
0.05789699, 0.03927085, 0.06236191, 0.034904525, 0.03692327, 0.044483807, 0.049391426,
0.036094997, 0.043684803, 0.055479314, 0.042193133, 0.04257658, 0.046931233, 0.045084152,
0.051253397, 0.060376074, 0.060142763, 0.054561097, 0.05315174, 0.054529123, 0.05529656,
0.07505149, 0.055793557, 0.066414535, 0.06627186, 0.05248887, 0.059364658, 0.072725385,
0.14409389, 0.06827532, 0.045648985, 0.05553593, 0.05559203, 0.055546284, 0.11034642,
0.045189172, 0.040942304, 0.054178864, 0.06867274, 0.079823464, 0.059112728, 0.045501415,
0.0610254, 0.055613812, 0.05789378, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.043360233, 0.05522185, 0.055484116,
0.05613555, 0.08505832, 0.045833003, 0.041784596, 0.05541512, 0.070025, 0.072598405,
0.056950875, 0.046081122, 0.06325196, 0.05819733, 0.05644613, 0.051197324, 0.1304914,
0.047788553, 0.040855184, 0.041107997, 0.052163523, 0.05393766, 0.061002914, 0.046609912,
0.056858838, 0.037712585, 0.04291997, 0.06461336, 0.062982395, 0.045127485, 0.055928133,
0.17599045, 0.050809097, 0.04642908, 0.04950369, 0.056157563, 0.0532699, 0.059756983,
0.054325923, 0.05942571, 0.04404154, 0.047278762, 0.06715007, 0.07336957, 0.054270234,
0.031053782, 0.028557554, 0.04015347, 0.031453118, 0.03375191, 0.04515556, 0.032045726,
0.033576958, 0.033058617, 0.04350164, 0.061970487, 0.047318038, 0.030169347, 0.029066127,
0.03322245, 0.06637206, 0.056047697, 0.15689762, 0.046245895, 0.054084808, 0.0707566,
0.058182187, 0.057267547, 0.06436623, 0.0758479, 0.062401455, 0.056136906, 0.059085153,
0.05526744, 0.06104051, 0.047107063, 0.054343235, 0.05324094, 0.05739918, 0.05437545,
0.06824304, 0.05215363, 0.12487143, 0.055561706, 0.06533319, 0.063768305, 0.054201756,
0.053372413, 0.05431297, 0.05192384, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.04325636, 0.05319801, 0.05492287,
0.050611135, 0.07841031, 0.04148344, 0.03981797, 0.0511493, 0.06855605, 0.07281897,
0.056238748, 0.044273086, 0.060259487, 0.053778037, 0.053610414, 0.0392019, 0.039002813,
0.050222885, 0.043055136, 0.044294853, 0.057992395, 0.044405922, 0.045252834, 0.04415086,
0.055456553, 0.05869253, 0.07357852, 0.04020056, 0.03838445, 0.039917257, 0.06788893,
0.060285, 0.15334289, 0.045888975, 0.055797357, 0.07362749, 0.054053098, 0.05709723,
0.06589005, 0.07208538, 0.06280517, 0.056302123, 0.060763247, 0.054511603, 0.059661463,
0.06139039, 0.055910803, 0.06670209, 0.057157718, 0.052336145, 0.17466871, 0.057805452,
0.06992394, 0.050998602, 0.058196694, 0.064635225, 0.054636955, 0.05157335, 0.05804905,
0.06601487, 0.04373858, 0.05969819, 0.047670502, 0.055190448, 0.05638841, 0.06436326,
0.04581293, 0.2550898, 0.053190403, 0.06474144, 0.04683441, 0.044607285, 0.052456558,
0.05945654, 0.050761245, 0.04654485, 0.051680736, 0.06409308, 0.051604632, 0.054619752,
0.045264937, 0.04891352, 0.049866572, 0.17866874, 0.08567344, 0.05200664, 0.05622853,
0.07925133, 0.071366586, 0.05224016, 0.03716822, 0.035372883, 0.05175989, 0.041130897,
0.043831993, 0.05612188, 0.035964888, 0.04454301, 0.041763783, 0.05142309, 0.11431334,
0.05038656, 0.037063416, 0.0349121, 0.040916014, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.054588646, 0.04215028,
0.058846332, 0.033574495, 0.03363172, 0.044896323, 0.06431516, 0.03505154, 0.042431097,
0.051538542, 0.03665032, 0.043514263, 0.050688945, 0.05067169, 0.04598105, 0.039601672,
0.03503091, 0.04802482, 0.053084537, 0.058230493, 0.055890217, 0.036862135, 0.048859317,
0.048251998, 0.050555494, 0.2016272, 0.049653534, 0.03961089, 0.039550666, 0.05151496,
0.061642893, 0.05560566, 0.11712245, 0.047632005, 0.06250155, 0.06387715, 0.049551725,
0.057368223, 0.06543747, 0.06750939, 0.07061269, 0.051433474, 0.058248095, 0.053825255,
0.05973597, 0.049787838, 0.061280407, 0.055595364, 0.056394048, 0.058459662, 0.06934031,
0.05106446, 0.18251693, 0.05852387, 0.07075933, 0.056877296, 0.050666854, 0.059732318,
0.062643476, 0.056357816, 0.049214564, 0.055545073, 0.056289114, 0.050564155, 0.053941756,
0.054293785, 0.053792343, 0.061712686, 0.05635663, 0.10221706, 0.054726075, 0.055225614,
0.061830796, 0.060401317, 0.06115121, 0.06257584, 0.066145085, 0.060601756, 0.052588496,
0.050540224, 0.18377136, 0.060287055, 0.076483436, 0.049388506, 0.05731015, 0.058621656,
0.058549743, 0.05254702, 0.0566486, 0.053941082, 0.029739978, 0.037283126, 0.039871726,
0.044760074, 0.04968416, 0.0375479, 0.03218595, 0.046235498, 0.11528681, 0.063727185,
0.0413464, 0.036594562, 0.04633872, 0.047682147, 0.040207606, 0.043750443, 0.03641967,
0.06328436, 0.045839615, 0.048857197, 0.055558886, 0.038311064, 0.04156805, 0.046839148,
0.048164614, 0.20021163, 0.05362314, 0.041327048, 0.039283045, 0.04626268, 0.06463967,
0.05353089, 0.0648538, 0.049354278, 0.04722777, 0.13012327, 0.057569843, 0.056393582,
0.04612027, 0.055435415, 0.05470308, 0.04942775, 0.04982075, 0.051542, 0.057850808,
0.04106169, 0.08927993, 0.03774478, 0.040554177, 0.04266502, 0.043059673, 0.04137761,
0.050576556, 0.045290895, 0.049401082, 0.038972232, 0.040003046, 0.055728916, 0.051908396,
0.03857324, 0.046059173, 0.053370096, 0.046481714, 0.04711354, 0.044505313, 0.047101032,
0.0608527, 0.053369522, 0.06886603, 0.069571964, 0.04274638, 0.048443012, 0.0832284,
0.120200366, 0.050439194, 0.04430444, 0.05368738, 0.056936476, 0.050487168, 0.050525934,
0.04410468, 0.049506657, 0.051178273, 0.15616104, 0.08254562, 0.049058698, 0.052187417,
0.0837476, 0.07365069, 0.046007115, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.06452577, 0.03840986, 0.059909552,
0.034670755, 0.03626543, 0.041948035, 0.054727238, 0.034358326, 0.04217386, 0.05338188,
0.04095955, 0.042510152, 0.04571701, 0.04579417, 0.05317883, 0.041550893, 0.056543197,
0.042961154, 0.043398988, 0.045637187, 0.054432914, 0.039633874, 0.09362173, 0.045291223,
0.053025912, 0.04342148, 0.0406764, 0.047185395, 0.049607884, 0.044197526, 0.030873947,
0.031410974, 0.037013374, 0.035028927, 0.040373415, 0.04282503, 0.028456792, 0.041709837,
0.032939624, 0.04518616, 0.04937407, 0.03261542, 0.0322568, 0.034909755, 0.04968559,
0.046500884, 0.04644055, 0.08158825, 0.04303947, 0.05411537, 0.061234046, 0.04106113,
0.04947832, 0.05626943, 0.05830551, 0.061177146, 0.048965372, 0.047114182, 0.04637085,
0.049308855, 0.04992016, 0.050982203, 0.04986, 0.052413683, 0.05434845, 0.05038172,
0.054423112, 0.052841004, 0.067337774, 0.062755965, 0.048192456, 0.052475054, 0.059219703,
0.0905131, 0.08668404, 0.05720817, 0.06851372, 0.055227965, 0.05227509, 0.051247254,
0.1297725, 0.055566017, 0.076264955, 0.05057226, 0.05578281, 0.060216833, 0.055392355,
0.0521013, 0.058491353, 0.055210244, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.066386744, 0.057170205, 0.17340179,
0.045449935, 0.05398872, 0.070821926, 0.054468267, 0.054049235, 0.06563459, 0.067561544,
0.06403096, 0.055853136, 0.06033029, 0.05383832, 0.057014354, 0.04263149, 0.049803592,
0.05094425, 0.048077963, 0.071857765, 0.041118797, 0.038102362, 0.052402858, 0.061221376,
0.06867732, 0.050347038, 0.04143389, 0.054850254, 0.05136479, 0.04936296, 0.046650942,
0.05785374, 0.05561851, 0.051641077, 0.055097412, 0.051253226, 0.051719822, 0.068886444,
0.06567749, 0.14875403, 0.052696478, 0.05881198, 0.07708109, 0.06578212, 0.05712861,
0.03981691, 0.049729127, 0.05334724, 0.051244434, 0.07152032, 0.043724608, 0.038590334,
0.050175812, 0.060470156, 0.06078012, 0.053902593, 0.043075807, 0.05343458, 0.048845496,
0.049980555, 0.04542793, 0.050779276, 0.05631834, 0.061093356, 0.13481379, 0.046568118,
0.0394607, 0.054841787, 0.07036223, 0.0748197, 0.0643077, 0.04488462, 0.05694127,
0.05491281, 0.057931863, 0.036084916, 0.040940184, 0.037547886, 0.043172207, 0.042372223,
0.037222758, 0.043244738, 0.047717616, 0.062177625, 0.06051255, 0.034684267, 0.038475346,
0.059030388, 0.08388043, 0.043521818, 0.06261514, 0.052488, 0.13051516, 0.040108122,
0.041950542, 0.0645727, 0.057696853, 0.046545077, 0.051286947, 0.057790898, 0.050119977,
0.050885372, 0.04827003, 0.048334252, 0.048060816, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.059899587, 0.050165657,
0.05962667, 0.05850135, 0.065328695, 0.050506607, 0.05135631, 0.05197042, 0.05570339,
0.0600501, 0.057081643, 0.04613304, 0.052173194, 0.06353107, 0.18531606, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.026064262, 0.033337753, 0.03033326, 0.034602236, 0.03314173, 0.031568166, 0.029354183,
0.043429095, 0.05260029, 0.059697125, 0.030577367, 0.031030498, 0.044264555, 0.045819797,
0.028221134, 0.037259273, 0.03853916, 0.04115583, 0.039565668, 0.039375957, 0.03767071,
0.04214285, 0.04783295, 0.047048613, 0.0755091, 0.03592051, 0.040243633, 0.04739426,
0.048657868, 0.039452527, 0.05702271, 0.051642563, 0.06080138, 0.047231533, 0.045033317,
0.2401076, 0.061096266, 0.063998125, 0.046194058, 0.05170041, 0.05792387, 0.061365217,
0.04873796, 0.052930214, 0.05421479, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.04670639, 0.14190753, 0.043009944,
0.043487802, 0.046892107, 0.0492796, 0.045990158, 0.06261176, 0.048257627, 0.05360123,
0.03982049, 0.04127203, 0.0617048, 0.057957795, 0.04399844, 0.04486775, 0.042226247,
0.05136961, 0.04050208, 0.04251274, 0.04543878, 0.047984395, 0.04383539, 0.04185308,
0.08648555, 0.049867567, 0.04424805, 0.044331703, 0.045281608, 0.054224726, 0.061125264,
0.03974188, 0.059847478, 0.033513825, 0.03220186, 0.048720084, 0.06973768, 0.035477545,
0.03882893, 0.051065885, 0.038453937, 0.046059217, 0.043815166, 0.044358816, 0.045582835,
0.045220237, 0.05341781, 0.05907584, 0.050872747, 0.05370627, 0.043662515, 0.04717385,
0.048340324, 0.109686494, 0.08070182, 0.05031085, 0.053422064, 0.07407159, 0.06603588,
0.049825944, 0.04232, 0.16257443, 0.04182479, 0.04149957, 0.04816895, 0.047570687,
0.04414718, 0.056775358, 0.05344961, 0.054165084, 0.0374632, 0.03963008, 0.0666573,
0.060596693, 0.041547526, 0.054329935, 0.05043255, 0.05953863, 0.04975266, 0.045419633,
0.060818136, 0.0823809, 0.051185105, 0.059699535, 0.06457486, 0.052190103, 0.13992459,
0.0662188, 0.06015053, 0.044680882, 0.044476412, 0.042779867, 0.049917612, 0.06945031,
0.12534587, 0.04363072, 0.038278926, 0.049062096, 0.05821835, 0.05885594, 0.058574427,
0.04355665, 0.048116423, 0.04668863, 0.050896358, 0.038005713, 0.047642328, 0.049311716,
0.04046424, 0.04155461, 0.03685738, 0.045107763, 0.04197328, 0.09565184, 0.06648563,
0.041414544, 0.044237465, 0.06905449, 0.062229283, 0.039157424, 0.045459066, 0.051042594,
0.05211077, 0.04915508, 0.04846219, 0.13339473, 0.054563154, 0.06001606, 0.050906636,
0.055724062, 0.064392366, 0.07712741, 0.048835184, 0.051054284, 0.045301132, 0.051349718,
0.045658156, 0.044221025, 0.04439078, 0.045845054, 0.04166426, 0.052827593, 0.042764917,
0.05390164, 0.054021593, 0.044785805, 0.04912124, 0.053583004, 0.07976952, 0.054482415,
0.04376103, 0.060506996, 0.049484495, 0.047043186, 0.05118631, 0.07174184, 0.04705344,
0.1206561, 0.0523742, 0.06105934, 0.0516274, 0.04792972, 0.055039745, 0.05879721,
0.050739616, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.052457735, 0.05025572, 0.05872719, 0.044455502, 0.045115188,
0.05116927, 0.055626858, 0.056728486, 0.05789827, 0.07354885, 0.04664199, 0.05183293,
0.056199633, 0.060673676, 0.044565108, 0.051391717, 0.06940836, 0.053827032, 0.053775433,
0.05519069, 0.050020237, 0.05967308, 0.056536507, 0.080655225, 0.08138042, 0.051987085,
0.06025206, 0.11475391, 0.08591123, 0.05177342, 0.049517635, 0.26622215, 0.048217304,
0.041613795, 0.04513884, 0.05458401, 0.050857887, 0.05684523, 0.051635433, 0.058036678,
0.039777357, 0.044929307, 0.07725214, 0.06273767, 0.044991087, 0.05505461, 0.04868238,
0.06035106, 0.0556956, 0.053973358, 0.071185656, 0.06325852, 0.05921621, 0.065293744,
0.07210585, 0.0740026, 0.13175574, 0.06257471, 0.058091834, 0.05613928, 0.0457444,
0.046199944, 0.047584087, 0.06492155, 0.09237982, 0.04538903, 0.04028427, 0.04802289,
0.059836105, 0.052981302, 0.052705117, 0.046218257, 0.05378805, 0.053546954, 0.046014294,
0.038773715, 0.050735936, 0.043613017, 0.051356103, 0.06166878, 0.042570867, 0.039746188,
0.0534568, 0.10631698, 0.07995454, 0.046308, 0.041976847, 0.06931623, 0.067868456,
0.04504579, 0.045448273, 0.04601344, 0.056234643, 0.045799855, 0.04603592, 0.13356374,
0.046383068, 0.05529113, 0.044578392, 0.05296588, 0.061743364, 0.052938472, 0.041735645,
0.047608927, 0.04592036, 0.053764556, 0.046950072, 0.049668778, 0.04479959, 0.04654324,
0.045634806, 0.05336268, 0.04710624, 0.050726768, 0.057229098, 0.04819765, 0.046905134,
0.04985591, 0.06966985, 0.07352509, 0.04680028, 0.0647051, 0.055498306, 0.04707267,
0.05130421, 0.069536425, 0.051224507, 0.09464935, 0.058608565, 0.06952775, 0.05288085,
0.055379365, 0.0666051, 0.064164996, 0.052164093, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.043116175, 0.046338387,
0.050983626, 0.03815392, 0.043054044, 0.042830803, 0.043469176, 0.04316539, 0.05034422,
0.07044069, 0.04513947, 0.04553662, 0.048440624, 0.04682226, 0.048581872, 0.045602564,
0.061153788, 0.049355842, 0.050957132, 0.05146794, 0.045881737, 0.048594818, 0.06109975,
0.076409936, 0.08072334, 0.047291897, 0.051319525, 0.113534346, 0.06977234, 0.04481766,
0.05176145, 0.19608968, 0.0499805, 0.043499578, 0.0467594, 0.060759302, 0.052508164,
0.0652323, 0.05144325, 0.058530837, 0.042543426, 0.046227347, 0.07612142, 0.061683934,
0.04676748, 0.049106833, 0.044727433, 0.06013386, 0.04752745, 0.042593, 0.06795293,
0.06669428, 0.052826647, 0.055752717, 0.06496404, 0.057078563, 0.17696962, 0.05803781,
0.05120165, 0.045730032, 0.041603968, 0.043539185, 0.044150688, 0.057073023, 0.13700983,
0.042880923, 0.036413874, 0.04719974, 0.057656787, 0.05694053, 0.052096136, 0.044997063,
0.04628493, 0.0508443, 0.046908055, 0.040273644, 0.065447316, 0.048458684, 0.043775886,
0.045862064, 0.047123477, 0.0430363, 0.051121116, 0.063838184, 0.05493522, 0.042901233,
0.04359844, 0.068343334, 0.054447826, 0.03530873, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.06225102, 0.05273317,
0.05615375, 0.050188698, 0.050387505, 0.050749924, 0.06440692, 0.053184852, 0.057024017,
0.062133048, 0.05270529, 0.054198503, 0.05509262, 0.085315235, 0.07582388, 0.041753866,
0.053174518, 0.04671133, 0.050150756, 0.048325464, 0.064159535, 0.04980342, 0.1473352,
0.056091897, 0.061530758, 0.05299143, 0.05271256, 0.056047946, 0.0596624, 0.048437875,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.046000544, 0.05478325, 0.056823123, 0.0378193, 0.041640736, 0.05665806,
0.04799181, 0.056372624, 0.04923721, 0.06929124, 0.040019788, 0.04518707, 0.05982173,
0.04875891, 0.043513253, 0.047477607, 0.064352915, 0.052832793, 0.051989924, 0.051543195,
0.04725089, 0.053836115, 0.059475113, 0.0777515, 0.08391307, 0.04762611, 0.05359535,
0.10813084, 0.07019711, 0.046152968, 0.048564192, 0.117444284, 0.04571177, 0.036049288,
0.03954313, 0.05849861, 0.045825236, 0.04760062, 0.04140289, 0.048143566, 0.037430245,
0.03981874, 0.05603567, 0.04970511, 0.04138763, 0.04506357, 0.060654115, 0.053322814,
0.044995014, 0.047007035, 0.049169447, 0.04881095, 0.059895597, 0.073301025, 0.07948586,
0.043803737, 0.04890455, 0.10142739, 0.06591118, 0.044932038, 0.048613854, 0.053741556,
0.045440387, 0.048377167, 0.05515141, 0.043015506, 0.04611082, 0.043986306, 0.064480916,
0.059501182, 0.044896893, 0.0488923, 0.06252395, 0.08284417, 0.05347234, 0.04652123,
0.051385004, 0.04506874, 0.044321746, 0.046598524, 0.042761248, 0.049393896, 0.05679495,
0.05610772, 0.066474125, 0.044711605, 0.046760183, 0.055744927, 0.07571479, 0.062382944,
0.0483012, 0.06233933, 0.053944368, 0.04734004, 0.04772856, 0.060092714, 0.058206797,
0.09434205, 0.058060937, 0.0698302, 0.04786124, 0.05555308, 0.06809905, 0.074984156,
0.050170753, 0.046630897, 0.053439982, 0.04591161, 0.037432145, 0.040925626, 0.040894058,
0.045428548, 0.039124183, 0.048861608, 0.04739735, 0.03778156, 0.0439188, 0.053212766,
0.058652654, 0.039144907, 0.043876577, 0.12953746, 0.04309747, 0.040109534, 0.04298829,
0.049468797, 0.04545244, 0.050548624, 0.048599765, 0.05068521, 0.036610693, 0.039401565,
0.061850943, 0.0574983, 0.041339584, 0.041106705, 0.05846792, 0.04892622, 0.044272885,
0.051527243, 0.046308476, 0.043897636, 0.05747169, 0.07668447, 0.07658301, 0.04248614,
0.04724144, 0.10157942, 0.070237026, 0.046366293, 0.048109572, 0.06019945, 0.057761133,
0.0454532, 0.0550901, 0.055227064, 0.05086244, 0.06380826, 0.07036352, 0.09674716,
0.052107397, 0.05652785, 0.08862285, 0.06432432, 0.053782914, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.05014208,
0.056756876, 0.06036043, 0.04860208, 0.061623078, 0.053248, 0.049876086, 0.06402491,
0.07650502, 0.116607204, 0.056577604, 0.055174075, 0.08044198, 0.06359959, 0.051624015,
0.052468404, 0.0489187, 0.06824725, 0.04150189, 0.03932926, 0.1309313, 0.058002993,
0.05320773, 0.04527075, 0.053944454, 0.05580622, 0.06756229, 0.04744931, 0.04763099,
0.046504807, 0.049290095, 0.04659591, 0.0565974, 0.052462578, 0.044009153, 0.06625294,
0.07303924, 0.051586572, 0.059495647, 0.0636406, 0.05873547, 0.16480528, 0.062035985,
0.05576574, 0.045134168, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.09737017, 0.043631576, 0.06993051, 0.03796158,
0.042050995, 0.045931004, 0.051089123, 0.03837888, 0.04210535, 0.054234322, 0.044690445,
0.043357164, 0.045385428, 0.047773253, 0.05508524, 0.040730342, 0.055096883, 0.049784176,
0.051124822, 0.056520395, 0.04367109, 0.04380618, 0.05622891, 0.1277974, 0.09212723,
0.048738435, 0.047651656, 0.08838389, 0.07141987, 0.04567746, 0.053913325, 0.040347885,
0.043545984, 0.03557335, 0.035217572, 0.04034027, 0.054659095, 0.03793744, 0.04167996,
0.04506353, 0.038912106, 0.044165164, 0.042982962, 0.06131703, 0.0426676, 0.046213374,
0.042987097, 0.05126668, 0.053853452, 0.048852257, 0.065284014, 0.056174286, 0.0528436,
0.060317952, 0.062227834, 0.06833796, 0.11069544, 0.05675852, 0.054763567, 0.048987437,
0.04567735, 0.25637144, 0.044417046, 0.040382054, 0.044324305, 0.051891293, 0.047398802,
0.06412269, 0.047647398, 0.05686771, 0.03808922, 0.040541884, 0.07014275, 0.058343124,
0.04369101, 0.04982584, 0.17472115, 0.048173286, 0.0426449, 0.045484703, 0.05765849,
0.052784625, 0.061730668, 0.049581036, 0.05569663, 0.042773027, 0.046415318, 0.0667271,
0.05980431, 0.04505243, 0.03911864, 0.039944496, 0.043153767, 0.06721464, 0.083073705,
0.04041909, 0.034588706, 0.04251704, 0.05754301, 0.052088413, 0.04904132, 0.04078788,
0.045713533, 0.045788907, 0.044705976, 0.043064594, 0.044257123, 0.050807822, 0.0551972,
0.1136483, 0.043165203, 0.03678541, 0.045911986, 0.063348226, 0.05317481, 0.05529034,
0.0444704, 0.04874378, 0.047549482, 0.04631077, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.041575465, 0.042020224,
0.053071715, 0.037549127, 0.03841891, 0.044268195, 0.044379093, 0.04607617, 0.056712907,
0.08764612, 0.038740657, 0.046591446, 0.056786004, 0.045926705, 0.03793534, 0.039184034,
0.042429224, 0.044315707, 0.05119664, 0.092310004, 0.03964605, 0.03559388, 0.040844038,
0.055188876, 0.0469187, 0.046191238, 0.04161888, 0.045759153, 0.04597951, 0.041310463,
0.04945866, 0.2258793, 0.05050796, 0.045172054, 0.04924445, 0.053696323, 0.05215179,
0.0640379, 0.056042235, 0.06534166, 0.040776715, 0.044371355, 0.078215085, 0.06701178,
0.04794021, 0.04769718, 0.04824349, 0.047452845, 0.21095593, 0.069443196, 0.053864095,
0.049130253, 0.05653105, 0.061509036, 0.057654433, 0.06187657, 0.056276917, 0.05602618,
0.05811706, 0.057797816, 0.046513237, 0.049288314, 0.05041905, 0.052837953, 0.082321055,
0.044646256, 0.040080644, 0.045038067, 0.061345648, 0.054615263, 0.05016377, 0.04495888,
0.053768195, 0.05153446, 0.04556319, 0.042359702, 0.056865565, 0.05429025, 0.051673956,
0.05220649, 0.05727914, 0.04409549, 0.08919337, 0.057019606, 0.062070236, 0.05018081,
0.047695965, 0.050619908, 0.052140664, 0.04376542, 0.045999814, 0.058342975, 0.05404616,
0.05152846, 0.053599503, 0.048694957, 0.048934616, 0.05654936, 0.09900219, 0.0830284,
0.04980842, 0.054192405, 0.07785867, 0.06769485, 0.045953304, 0.035869602, 0.047987968,
0.042902786, 0.046183236, 0.050091576, 0.04166582, 0.04088568, 0.060600437, 0.088704474,
0.07634677, 0.043899767, 0.04316709, 0.07929202, 0.06608713, 0.04030351, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.03937162, 0.0563831, 0.045411695, 0.04441575, 0.049000245, 0.042035732, 0.042503383,
0.053881723, 0.08369718, 0.08155558, 0.042056564, 0.04344388, 0.07678004, 0.07167301,
0.040988196, 0.03790822, 0.04865242, 0.042560212, 0.054101266, 0.05517956, 0.043660834,
0.04349889, 0.058775473, 0.11229901, 0.068326525, 0.046698634, 0.044736877, 0.081637055,
0.07745329, 0.043458294, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.044378832, 0.05971463, 0.0500534, 0.054589573,
0.05519463, 0.070703685, 0.05293613, 0.20244597, 0.057655312, 0.066542625, 0.05681742,
0.055391792, 0.060961302, 0.061251633, 0.051363073, 0.03980483, 0.05250418, 0.04646795,
0.04609498, 0.050590336, 0.06453863, 0.04417786, 0.11869433, 0.052489236, 0.0641743,
0.050195083, 0.046586934, 0.05377085, 0.056970745, 0.050417986, 0.04282952, 0.055054314,
0.04777157, 0.049503822, 0.05007209, 0.061241537, 0.049357977, 0.14822403, 0.05303497,
0.063118756, 0.05091466, 0.050705478, 0.056987602, 0.05792101, 0.04942075, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.05353528, 0.05475992, 0.050780732, 0.04161672, 0.041155044,
0.05066305, 0.063952796, 0.04741992, 0.053705208, 0.05553288, 0.042049512, 0.05740854,
0.065784685, 0.08029139, 0.046020553, 0.05286213, 0.15971282, 0.052501377, 0.03945841,
0.043136265, 0.05822975, 0.051891424, 0.05419377, 0.04887355, 0.057654947, 0.039808523,
0.042781103, 0.06647597, 0.057801653, 0.04514252, 0.04419834, 0.06207836, 0.05370022,
0.042147968, 0.049955044, 0.046592534, 0.047691632, 0.054904565, 0.07494688, 0.07095385,
0.043397576, 0.0492136, 0.10872828, 0.07059504, 0.04758044, 0.053593643, 0.05290562,
0.05255027, 0.048246276, 0.04875795, 0.049693882, 0.058621783, 0.056755595, 0.06466363,
0.066063955, 0.04686071, 0.050000984, 0.061375357, 0.10177018, 0.070488594, 0.038934417,
0.03858201, 0.041869447, 0.069048345, 0.10568933, 0.040083263, 0.032783758, 0.04183897,
0.05103719, 0.046141706, 0.052957542, 0.041058745, 0.041297343, 0.042161986, 0.042215075,
0.05600503, 0.05855095, 0.065140404, 0.047685873, 0.051044438, 0.053890914, 0.06352814,
0.0555218, 0.06695726, 0.08413593, 0.05392019, 0.06132736, 0.06896021, 0.062339842,
0.048659485, 0.046832062, 0.04877314, 0.05215093, 0.06938235, 0.14922439, 0.046540435,
0.040142614, 0.052800715, 0.067272834, 0.06787131, 0.06294996, 0.04667434, 0.052839696,
0.056081213, 0.056907855, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.038067266, 0.12685454, 0.037029613, 0.037160337,
0.04208325, 0.04294524, 0.0394677, 0.05535498, 0.04505148, 0.047965713, 0.033864662,
0.03706682, 0.062220972, 0.05245557, 0.036502365, 0.050322533, 0.04365795, 0.05453436,
0.056143574, 0.052807026, 0.07334434, 0.059676614, 0.05735748, 0.058100738, 0.06373709,
0.08103767, 0.13494341, 0.05564623, 0.055688117, 0.056604855, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.039641213, 0.053005315, 0.04761033, 0.054017846, 0.059492093, 0.047882766, 0.043143786,
0.06165407, 0.13630211, 0.08464064, 0.05126365, 0.046564482, 0.07665631, 0.07274906,
0.045489155, 0.04587956, 0.049540482, 0.047622878, 0.07344421, 0.18155998, 0.04509205,
0.03975626, 0.056997463, 0.06510274, 0.06657943, 0.057961896, 0.043934457, 0.05395843,
0.05891811, 0.058300745, 0.04607004, 0.043689724, 0.0551517, 0.054388136, 0.04919933,
0.08495244, 0.059452362, 0.060711972, 0.05353934, 0.06254982, 0.0722726, 0.10589327,
0.051920734, 0.050662532, 0.05297094, 0.15027475, 0.04745794, 0.07574478, 0.0418487,
0.044598002, 0.046518497, 0.051795393, 0.040608045, 0.045623165, 0.052248754, 0.04987956,
0.04334918, 0.046918686, 0.04865253, 0.053497903, 0.048798088, 0.049525015, 0.06089563,
0.04342374, 0.039481707, 0.090052724, 0.07215518, 0.0542544, 0.045888506, 0.052601326,
0.04971381, 0.08268515, 0.04987447, 0.04819788, 0.041453365, 0.040276077, 0.04936011,
0.046128806, 0.054149315, 0.04984087, 0.062097717, 0.05127572, 0.15691835, 0.053910375,
0.06079255, 0.053369813, 0.051693585, 0.05727031, 0.05757822, 0.044227146, 0.03835871,
0.047229223, 0.043458544, 0.057289258, 0.05691276, 0.042735893, 0.04039331, 0.05948358,
0.12004199, 0.07015848, 0.048712905, 0.044496275, 0.068569325, 0.07072816, 0.04491059,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.036000427, 0.041195717, 0.042411488, 0.05788536, 0.06245665, 0.040344022,
0.036698155, 0.05015239, 0.08576985, 0.06924921, 0.044013534, 0.039122116, 0.05643198,
0.054460756, 0.038125835, 0.04231413, 0.057316493, 0.048598543, 0.045247577, 0.04837868,
0.045591988, 0.0498628, 0.057301175, 0.08191813, 0.07800151, 0.04428035, 0.05372341,
0.13662161, 0.06984303, 0.04453661, 0.054870993, 0.04620693, 0.059872553, 0.053402603,
0.049537163, 0.06426287, 0.06761565, 0.05624231, 0.060412757, 0.07327012, 0.06613685,
0.16349745, 0.060065076, 0.057895634, 0.054092184, 0.0793279, 0.05196881, 0.064143404,
0.037348934, 0.038447935, 0.05096493, 0.06922455, 0.039266136, 0.042258654, 0.050101053,
0.040789444, 0.051566307, 0.047402736, 0.05117301, 0.047558017, 0.0379812, 0.048744496,
0.043333583, 0.043870676, 0.04290427, 0.05176054, 0.04817234, 0.097708605, 0.04674461,
0.057048593, 0.043879785, 0.04815396, 0.056634948, 0.055519767, 0.041181035, 0.043881547,
0.058246944, 0.050762396, 0.04000133, 0.03915428, 0.04757831, 0.047724847, 0.059606977,
0.05236519, 0.07727976, 0.036872502, 0.043996435, 0.05950295, 0.057885382, 0.04258858,
0.048415292, 0.04880776, 0.052537154, 0.042377073, 0.042885046, 0.1708667, 0.04896997,
0.053723417, 0.042611554, 0.04893606, 0.056720663, 0.058720566, 0.04363104, 0.04628832,
0.04293577, 0.03586278, 0.044281904, 0.04528616, 0.0541463, 0.06532407, 0.043661658,
0.040071927, 0.05266098, 0.11403451, 0.06970325, 0.048584603, 0.04289682, 0.066868655,
0.061522715, 0.04470169, 0.049856856, 0.047186363, 0.05559534, 0.056260776, 0.051938556,
0.064508475, 0.06373422, 0.061578546, 0.06605658, 0.07329732, 0.063985825, 0.1550377,
0.062027533, 0.061500497, 0.05481655, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.043366104, 0.12060632, 0.041922513,
0.040404905, 0.04397351, 0.052271746, 0.045368373, 0.05565383, 0.04948989, 0.048824906,
0.038628127, 0.041128296, 0.06398331, 0.056494623, 0.0389488, 0.04559727, 0.043759566,
0.0413935, 0.042308185, 0.043061767, 0.041220818, 0.047579188, 0.039860927, 0.047206197,
0.04736805, 0.04010552, 0.042688582, 0.04609287, 0.07088995, 0.05983081, 0.03926811,
0.051557716, 0.04434678, 0.045348942, 0.054787192, 0.044001933, 0.040797237, 0.045995776,
0.075685814, 0.053168867, 0.045878995, 0.04184785, 0.05930705, 0.06204422, 0.04094679,
0.04481945, 0.05679533, 0.0501939, 0.05534233, 0.053667087, 0.07196173, 0.05475263,
0.20408903, 0.056603607, 0.067821205, 0.05808479, 0.05535222, 0.057990346, 0.061701443,
0.0508249, 0.03964698, 0.06085965, 0.046870355, 0.04070033, 0.046782114, 0.044856135,
0.04163758, 0.056172613, 0.05778881, 0.0631881, 0.03884206, 0.04344182, 0.08287955,
0.056070983, 0.040430438, 0.04909837, 0.053373035, 0.051881656, 0.04221453, 0.042402204,
0.05954052, 0.043935463, 0.062185887, 0.038771637, 0.06676626, 0.04959027, 0.04889559,
0.04270944, 0.044733416, 0.05582582, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.054653052, 0.05279126,
0.05234526, 0.053470295, 0.054395862, 0.048413627, 0.05520853, 0.05693258, 0.06168709,
0.06578338, 0.050051663, 0.05053869, 0.057547484, 0.088354945, 0.080174714, 0.044778015,
0.044040162, 0.059714027, 0.044277404, 0.03887895, 0.09616802, 0.06398031, 0.05194026,
0.047001913, 0.050175734, 0.051170357, 0.09464154, 0.049051076, 0.045903433, 0.041373283,
0.044180535, 0.045357402, 0.056150958, 0.045426525, 0.040569153, 0.15599824, 0.059191097,
0.054614857, 0.04625631, 0.051457398, 0.05637958, 0.07860876, 0.045350946, 0.047180053,
0.04256295, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.042365145, 0.046685744, 0.04519326, 0.06816412, 0.1330329,
0.042619005, 0.03748141, 0.05078787, 0.05704988, 0.05753542, 0.053562563, 0.041819524,
0.0482026, 0.05266979, 0.054010663, 0.04345933, 0.05325209, 0.051270794, 0.041907128,
0.045712024, 0.04884456, 0.04577997, 0.06206996, 0.055814367, 0.089328945, 0.041004397,
0.045877557, 0.07258528, 0.05486931, 0.042163167, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.05257025, 0.049661465,
0.05372677, 0.040736515, 0.045954905, 0.049047966, 0.044059034, 0.046579227, 0.04345546,
0.051830187, 0.05133577, 0.054863717, 0.045733158, 0.045086686, 0.04562823, 0.058933772,
0.05179249, 0.057376824, 0.052174766, 0.051963165, 0.0524727, 0.053524606, 0.050518785,
0.051935013, 0.06600262, 0.052407254, 0.04757205, 0.05001686, 0.06689438, 0.113769524,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.044760663, 0.07788692, 0.04994777, 0.044426784, 0.049968436, 0.047563925,
0.053261857, 0.058216818, 0.06848247, 0.07668595, 0.045113534, 0.051958416, 0.159248,
0.07433115, 0.0488556, 0.046528377, 0.042629495, 0.045859277, 0.035599172, 0.03649608,
0.043134443, 0.04656667, 0.04911225, 0.041011576, 0.052169435, 0.0378046, 0.038402863,
0.041789573, 0.05549023, 0.046842124, 0.055969056, 0.053820506, 0.06676149, 0.047543373,
0.044618938, 0.11224788, 0.073612064, 0.06112736, 0.04895328, 0.05767477, 0.05760547,
0.080167785, 0.052164223, 0.052941427, 0.048575643, 0.039866287, 0.04793895, 0.049901184,
0.03686237, 0.042514335, 0.044367954, 0.042947613, 0.045330834, 0.04965202, 0.06654984,
0.0423901, 0.046540555, 0.05850057, 0.050080206, 0.04574471, 0.044131063, 0.043813236,
0.051754344, 0.036549654, 0.042564165, 0.045817055, 0.044808853, 0.045666266, 0.047211632,
0.066594236, 0.047514893, 0.042910334, 0.052504063, 0.045287997, 0.050670613, 0.042832825,
0.046401583, 0.053824566, 0.05035633, 0.049280025, 0.119071156, 0.04976615, 0.058267046,
0.050590184, 0.054390058, 0.07188462, 0.07029266, 0.045079734, 0.048573494, 0.045226008,
0.04588037, 0.053338196, 0.044924982, 0.036247663, 0.035720754, 0.03863618, 0.056420512,
0.040427033, 0.04864435, 0.060905933, 0.035175078, 0.041920412, 0.05971131, 0.056601126,
0.036540814, 0.054068685, 0.18480097, 0.05283539, 0.043483745, 0.046174694, 0.055968642,
0.056905996, 0.061189428, 0.052082587, 0.06260838, 0.042271137, 0.046592727, 0.07140608,
0.0629895, 0.047815274, 0.03632202, 0.046519022, 0.04311741, 0.035979852, 0.0374274,
0.039737973, 0.04246618, 0.053554054, 0.054816335, 0.07125893, 0.03586915, 0.041724958,
0.06824855, 0.05176743, 0.037501294, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.042940978, 0.04578306, 0.0507907,
0.036298443, 0.04290793, 0.03982654, 0.042920005, 0.0459302, 0.05429885, 0.07886874,
0.0394169, 0.04382028, 0.06254038, 0.051656835, 0.042603843, 0.04949411, 0.17296174,
0.05029394, 0.041597523, 0.04288163, 0.05298396, 0.05299782, 0.05617411, 0.05229115,
0.062799945, 0.038144685, 0.04438182, 0.07809928, 0.06384038, 0.043901134, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.04667757, 0.041811816, 0.04620949, 0.05409473, 0.046337295, 0.044546023, 0.04027325,
0.041999675, 0.04361961, 0.0497629, 0.046312958, 0.04512083, 0.049934678, 0.041564748,
0.04099895, 0.04341007, 0.052493792, 0.046675954, 0.044827398, 0.04473176, 0.0408374,
0.0449016, 0.055412844, 0.05791266, 0.07055144, 0.03880446, 0.042533536, 0.0774256,
0.06198279, 0.038943462, 0.058064952, 0.058603425, 0.070077375, 0.05238146, 0.053812124,
0.18435395, 0.054631103, 0.06971448, 0.052244138, 0.06378903, 0.061647546, 0.054912504,
0.05370764, 0.055857994, 0.05620227, 0.045569304, 0.057760853, 0.049619406, 0.03839853,
0.039522346, 0.050328266, 0.057597496, 0.046648163, 0.049309004, 0.05206844, 0.03834182,
0.04215947, 0.058866154, 0.087064214, 0.049558003, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.054550298, 0.047478117,
0.08116266, 0.04433129, 0.054624304, 0.046977844, 0.04568482, 0.04532117, 0.053756457,
0.059361268, 0.064755686, 0.0469084, 0.053913563, 0.049427547, 0.05780951, 0.032808635,
0.036299586, 0.04090055, 0.033155765, 0.031540394, 0.04040363, 0.03755739, 0.035784803,
0.035406116, 0.046352513, 0.038955715, 0.04841362, 0.034273773, 0.032334726, 0.029854234,
0.039858565, 0.05262852, 0.049106874, 0.03376611, 0.0368348, 0.04298082, 0.046989657,
0.046265345, 0.049130403, 0.06035501, 0.03445223, 0.038782395, 0.07365849, 0.05347576,
0.03802559, 0.048517793, 0.12849826, 0.04783676, 0.04083975, 0.041491244, 0.049556296,
0.053229768, 0.058971714, 0.04852062, 0.056917824, 0.03748591, 0.041092016, 0.059550226,
0.05677484, 0.041659046, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.04656429, 0.04973969, 0.051893685, 0.054117333,
0.07521513, 0.047061775, 0.040859822, 0.05171481, 0.056617778, 0.062435046, 0.057500187,
0.052032553, 0.053530928, 0.051866047, 0.052919127, 0.041115027, 0.0491441, 0.044714984,
0.04640203, 0.04789036, 0.037487812, 0.04305193, 0.04846083, 0.072575346, 0.0850005,
0.039448347, 0.044775937, 0.06571654, 0.05926964, 0.04183301, 0.059711535, 0.05604461,
0.08202167, 0.049001925, 0.053028356, 0.09630152, 0.0525732, 0.062189214, 0.050963815,
0.06461207, 0.057413585, 0.050645288, 0.05605843, 0.054291293, 0.05881703, 0.043308694,
0.064699285, 0.047752507, 0.042727202, 0.04464589, 0.044380825, 0.053349216, 0.057818346,
0.060166597, 0.0622739, 0.038960185, 0.042326644, 0.08031533, 0.1579883, 0.05188369,
0.045649555, 0.060663924, 0.05122754, 0.047769777, 0.04881192, 0.06077752, 0.049627814,
0.12950422, 0.053708326, 0.06843716, 0.048045736, 0.045992374, 0.05704425, 0.060516726,
0.051662683, 0.06941924, 0.050848987, 0.19059025, 0.045808166, 0.05234702, 0.054773055,
0.05896814, 0.04818636, 0.062064033, 0.06844658, 0.06355392, 0.05328453, 0.059653923,
0.056231376, 0.06582442, 0.028212812, 0.033659443, 0.03263489, 0.035683885, 0.03299727,
0.034291305, 0.031341985, 0.04820243, 0.03549323, 0.05226733, 0.03848938, 0.039685592,
0.03686482, 0.032380607, 0.03182083, 0.04310431, 0.060201995, 0.048308127, 0.03779556,
0.0386452, 0.042153895, 0.054970216, 0.04882918, 0.05076819, 0.068601675, 0.035208788,
0.04290695, 0.08885125, 0.06180377, 0.041327782, 0.0446325, 0.12783624, 0.04672151,
0.035575088, 0.03898756, 0.047184996, 0.046431493, 0.04584516, 0.04763771, 0.05314301,
0.035477277, 0.03931134, 0.06156421, 0.0575255, 0.03885892, 0.04564856, 0.04764922,
0.04946677, 0.16798155, 0.08978868, 0.053302433, 0.045655433, 0.06323396, 0.0650562,
0.06253621, 0.07166818, 0.052797593, 0.053324237, 0.053400766, 0.060945038, 0.044872694,
0.048981577, 0.04660029, 0.050041802, 0.058233883, 0.045016922, 0.040707424, 0.04532808,
0.051769692, 0.054411016, 0.04789848, 0.050317794, 0.051267393, 0.049479105, 0.043783344,
0.045589704, 0.054327372, 0.049426895, 0.052109644, 0.06016518, 0.044190194, 0.04443336,
0.054121714, 0.06393309, 0.07371569, 0.04899141, 0.05041633, 0.064822085, 0.05731459,
0.044796087, 0.05501909, 0.05933999, 0.062423103, 0.05055942, 0.05030746, 0.1756825,
0.052679528, 0.07008624, 0.0487252, 0.060496546, 0.06294638, 0.05566318, 0.05132343,
0.054059256, 0.052565828, 0.050920773, 0.06511027, 0.050173976, 0.0465473, 0.048311856,
0.04878806, 0.060810547, 0.051270954, 0.06771685, 0.06479981, 0.04305919, 0.049579073,
0.084081486, 0.18458298, 0.058854762, 0.043674365, 0.061714616, 0.04959407, 0.051804848,
0.05314332, 0.05940468, 0.04655885, 0.17419177, 0.056449547, 0.073880695, 0.04757379,
0.046399698, 0.059410784, 0.062507845, 0.05002024, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.028996734, 0.033146136,
0.033275865, 0.035311658, 0.033200685, 0.02961447, 0.029335853, 0.044881795, 0.037821554,
0.06718429, 0.03483408, 0.034386724, 0.03831531, 0.032679413, 0.031041238, 0.034693953,
0.043455824, 0.041674763, 0.03608558, 0.039280564, 0.036413673, 0.037667762, 0.04509376,
0.050995346, 0.059064154, 0.03374147, 0.036148537, 0.072505005, 0.058325637, 0.034848813,
0.056934014, 0.18258889, 0.05730611, 0.043980986, 0.04588363, 0.061596174, 0.05887242,
0.062288586, 0.054771323, 0.06518726, 0.042444713, 0.048031833, 0.07682393, 0.06916624,
0.049306016, 0.053057782, 0.0506063, 0.055882275, 0.040766425, 0.041465107, 0.045592356,
0.058854416, 0.043488972, 0.051195525, 0.07363334, 0.045533657, 0.05381525, 0.05918614,
0.05815848, 0.05070309, 0.042778816, 0.043420553, 0.040661294, 0.036623232, 0.039390072,
0.04009603, 0.051139303, 0.03752887, 0.046963383, 0.047383882, 0.035648923, 0.04010793,
0.054904845, 0.08640556, 0.04868763, 0.049940348, 0.0687025, 0.051907808, 0.050397173,
0.05256769, 0.049036786, 0.06018186, 0.05981916, 0.07427983, 0.06820842, 0.044920657,
0.051704507, 0.09294468, 0.14551656, 0.057206087, 0.04581829, 0.059896883, 0.048909158,
0.05539011, 0.05384489, 0.059477165, 0.048624344, 0.18082017, 0.055807255, 0.07704123,
0.05152707, 0.048886776, 0.057464436, 0.060186137, 0.057652965, 0.04868821, 0.06321655,
0.049738355, 0.043232147, 0.044137754, 0.047891412, 0.061604787, 0.049104244, 0.06574489,
0.06558487, 0.041849084, 0.04993933, 0.08484385, 0.13098054, 0.052363776, 0.047559354,
0.14218691, 0.048849024, 0.04126794, 0.045192435, 0.048210725, 0.049604006, 0.058510277,
0.052564856, 0.06109606, 0.038520273, 0.042562906, 0.07055583, 0.062141262, 0.043581676,
0.04169453, 0.05008569, 0.042407505, 0.03965341, 0.03868252, 0.03812686, 0.054205183,
0.04470765, 0.057404388, 0.06991502, 0.036785215, 0.051807687, 0.08684946, 0.07135002,
0.04086493, 0.04111889, 0.046090044, 0.050648093, 0.043471757, 0.03969189, 0.045097187,
0.0482553, 0.056678846, 0.046319257, 0.061123244, 0.04475321, 0.056592707, 0.046496976,
0.043906588, 0.041042726, 0.041332453, 0.049776006, 0.054385733, 0.039935023, 0.042676933,
0.043543205, 0.048623707, 0.05110697, 0.060596183, 0.07227617, 0.03940532, 0.047560625,
0.07638763, 0.054180324, 0.039039537, 0.029579984, 0.032159213, 0.034619167, 0.038404137,
0.03400762, 0.033076957, 0.031246586, 0.044682406, 0.03747398, 0.051847536, 0.03914241,
0.039324246, 0.036002334, 0.031576134, 0.030883104, 0.059664577, 0.058248486, 0.06738147,
0.046356786, 0.046731763, 0.166463, 0.05799766, 0.062750496, 0.046990708, 0.059303936,
0.054947358, 0.053976085, 0.051014066, 0.053043053, 0.05019491, 0.04723038, 0.04825051,
0.04934783, 0.16107456, 0.07451333, 0.058114037, 0.04902986, 0.06810877, 0.057735614,
0.059339106, 0.064844154, 0.052852385, 0.053986393, 0.05380134, 0.062207967, 0.06628103,
0.05642067, 0.16607045, 0.045800637, 0.050261084, 0.07460729, 0.058861747, 0.05312275,
0.0627019, 0.06915395, 0.06296759, 0.05852342, 0.0588481, 0.05645192, 0.059927456,
0.071867056, 0.05444388, 0.18231653, 0.048111282, 0.050245866, 0.059733886, 0.06250417,
0.04940001, 0.06094132, 0.06514459, 0.06528644, 0.05520936, 0.05798774, 0.055338908,
0.06146896, 0.04266383, 0.04752582, 0.046723615, 0.047416676, 0.049712352, 0.040938463,
0.04184539, 0.055293173, 0.07078511, 0.065760575, 0.04406872, 0.045071762, 0.06778504,
0.06048632, 0.04080955, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.039413534, 0.043309063, 0.04546036, 0.09182951,
0.06270267, 0.05304309, 0.042814348, 0.074703634, 0.05479789, 0.056009647, 0.06411826,
0.04871843, 0.045625824, 0.045338456, 0.049020898, 0.041020457, 0.109579645, 0.041130606,
0.037656624, 0.03770983, 0.043957267, 0.04500714, 0.053566962, 0.045572855, 0.0562156,
0.033820875, 0.038557347, 0.06295613, 0.05045945, 0.03586226, 0.04564082, 0.15453438,
0.04692138, 0.042005938, 0.046318408, 0.050183617, 0.04845229, 0.060039196, 0.05054859,
0.05812543, 0.038111918, 0.041709155, 0.06970046, 0.0615915, 0.04334997, 0.045553096,
0.043087777, 0.04888767, 0.05070033, 0.06532971, 0.04187815, 0.039228134, 0.045190863,
0.0530469, 0.055467438, 0.05217003, 0.04492557, 0.050154977, 0.04747342, 0.045597762,
0.042470858, 0.043690275, 0.04610681, 0.054499347, 0.06801283, 0.04372207, 0.03744715,
0.047808286, 0.053696167, 0.052600548, 0.053593725, 0.045332007, 0.048461955, 0.047379073,
0.043870725, 0.05863788, 0.054817118, 0.20511653, 0.045567527, 0.053245228, 0.052958827,
0.055648785, 0.050218865, 0.058007054, 0.06731524, 0.0619709, 0.052042715, 0.062447242,
0.05855541, 0.06345067, 0.047222443, 0.050448805, 0.057053365, 0.050174832, 0.04545759,
0.049676206, 0.047742255, 0.05979609, 0.05050248, 0.07509249, 0.04697833, 0.050135944,
0.05333065, 0.04623978, 0.041838825, 0.048870392, 0.045498535, 0.047736008, 0.0520713,
0.063186154, 0.04129888, 0.037766457, 0.04580656, 0.05257465, 0.056033358, 0.048588146,
0.04154564, 0.054698106, 0.04749758, 0.045520063, 0.053151254, 0.119546555, 0.047952138,
0.037467767, 0.0371951, 0.050821844, 0.057313766, 0.05198788, 0.0431875, 0.05216109,
0.035609767, 0.040941466, 0.057419028, 0.05916307, 0.040857546, 0.04506389, 0.046506606,
0.047559615, 0.17169099, 0.07255322, 0.055629674, 0.04626294, 0.066048644, 0.05332964,
0.054618143, 0.060688592, 0.048144583, 0.04959583, 0.051215712, 0.061110836, 0.04214259,
0.04177861, 0.045247495, 0.056212854, 0.0683849, 0.042441685, 0.03783765, 0.04580571,
0.05495571, 0.052485354, 0.053017493, 0.046325445, 0.048693992, 0.04870795, 0.04465438,
0.05115645, 0.06546056, 0.04958002, 0.056342542, 0.055619024, 0.06532324, 0.048394255,
0.09910009, 0.053687047, 0.06883112, 0.054296568, 0.054556735, 0.054596085, 0.060164023,
0.059746694, 0.041661717, 0.047926255, 0.044264454, 0.045779947, 0.04833834, 0.04100415,
0.04391793, 0.051762883, 0.063351795, 0.06675958, 0.043195024, 0.04631063, 0.074910924,
0.06633787, 0.041364897, 0.045813292, 0.052307095, 0.049251866, 0.04435732, 0.044622634,
0.03901946, 0.049289145, 0.048750717, 0.06348796, 0.06632616, 0.037762143, 0.040758602,
0.061222494, 0.06275639, 0.040308904, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.046354063, 0.051454507, 0.05131711,
0.04468387, 0.046364155, 0.041755743, 0.047922958, 0.051676653, 0.058761764, 0.07419363,
0.039692946, 0.051718626, 0.06549757, 0.05138567, 0.04031521, 0.044300154, 0.04741365,
0.04732639, 0.046566695, 0.04527208, 0.041098557, 0.046773646, 0.05452844, 0.061488863,
0.07683896, 0.0427724, 0.048297085, 0.062902585, 0.060142674, 0.041164216, 0.06163526,
0.059037864, 0.07016076, 0.050422758, 0.048352186, 0.19919114, 0.059650417, 0.06519782,
0.04766962, 0.060959358, 0.06053291, 0.05702226, 0.053169586, 0.053308178, 0.053689886,
0.041336462, 0.05834762, 0.04373864, 0.045004558, 0.04502521, 0.05546258, 0.045936298,
0.1117516, 0.04780895, 0.06455161, 0.043120988, 0.043589167, 0.050207026, 0.052296937,
0.046868753, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.043775637, 0.066589355, 0.046821564, 0.04274337, 0.043510847,
0.058852013, 0.04754205, 0.10607299, 0.04916364, 0.059087574, 0.04415533, 0.043238845,
0.051632684, 0.06077425, 0.048316803, 0.053377323, 0.053546865, 0.07596936, 0.05099571,
0.055891775, 0.11839628, 0.0521762, 0.06332159, 0.052954685, 0.062444758, 0.06196937,
0.05201664, 0.055313345, 0.057169337, 0.059881076, 0.04556402, 0.05939753, 0.046599522,
0.039774593, 0.039465014, 0.039883744, 0.06577982, 0.04551008, 0.05571574, 0.070688985,
0.03722537, 0.046331592, 0.07735336, 0.06849556, 0.04448127, 0.04016696, 0.04320349,
0.044203244, 0.09529426, 0.055641983, 0.052783728, 0.04404167, 0.06797055, 0.05225812,
0.05283586, 0.05765136, 0.04945703, 0.045703597, 0.04578831, 0.04866299, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.037765495, 0.07083392, 0.037130293, 0.034126915, 0.034365904, 0.044543926, 0.04149591,
0.04296257, 0.04125358, 0.044167474, 0.033812054, 0.039493132, 0.052924234, 0.04631383,
0.03191935, 0.042650737, 0.056311492, 0.046316486, 0.044637565, 0.04559127, 0.040429685,
0.051925182, 0.04876467, 0.08020239, 0.083393686, 0.04114808, 0.048548307, 0.10804852,
0.08706899, 0.046636768, 0.035661496, 0.04909794, 0.04216046, 0.033963546, 0.03592452,
0.038439225, 0.04319737, 0.044709012, 0.047130432, 0.049156576, 0.03195404, 0.034503385,
0.06975953, 0.100782834, 0.040530507, 0.040999684, 0.042254787, 0.043595128, 0.054673526,
0.06058249, 0.040802706, 0.03710613, 0.041562118, 0.05179568, 0.049939252, 0.04590484,
0.042640936, 0.048126634, 0.045463648, 0.04315228, 0.031582877, 0.033616707, 0.036595467,
0.03662018, 0.03212041, 0.034074377, 0.034658972, 0.0445764, 0.037513666, 0.04766949,
0.036212776, 0.038258318, 0.037781354, 0.03317235, 0.02957246, 0.038282268, 0.041693427,
0.042102456, 0.050768107, 0.075967185, 0.035926085, 0.033635966, 0.047523927, 0.05484172,
0.058353018, 0.045988735, 0.03672561, 0.052348837, 0.051585063, 0.048344824, 0.041088633,
0.09594215, 0.044653513, 0.03388722, 0.03613233, 0.041714642, 0.04320714, 0.043141723,
0.04718094, 0.050820943, 0.034009125, 0.037640464, 0.06450554, 0.054490812, 0.035973616,
0.04253137, 0.08708978, 0.045347735, 0.034649633, 0.035517275, 0.045469932, 0.04580487,
0.046134494, 0.04394315, 0.05197852, 0.03319878, 0.03797939, 0.058881775, 0.05084379,
0.035314366, 0.045401935, 0.047812477, 0.049873535, 0.11598594, 0.08019105, 0.054324754,
0.047272522, 0.069614574, 0.059892554, 0.05857932, 0.07206147, 0.0531355, 0.052354127,
0.051002193, 0.057202082, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.042795617, 0.060501903, 0.045251116, 0.04545002,
0.044089224, 0.05795238, 0.04838727, 0.12345944, 0.047961064, 0.06580812, 0.045526255,
0.04677846, 0.05093195, 0.055627055, 0.047928765, 0.04433468, 0.048127886, 0.04627508,
0.051342852, 0.05170988, 0.043236688, 0.04373744, 0.052641388, 0.06009222, 0.064954646,
0.044280387, 0.04705946, 0.06351489, 0.056080844, 0.040814005, 0.038682435, 0.04286192,
0.03949934, 0.073651366, 0.08308994, 0.039864462, 0.035541106, 0.052550696, 0.04827214,
0.056774274, 0.048854787, 0.037001293, 0.047062445, 0.050725028, 0.05117358, 0.042148553,
0.043577194, 0.048115894, 0.11654276, 0.06542522, 0.05594523, 0.046254583, 0.066930525,
0.061043464, 0.05966952, 0.066235684, 0.055165853, 0.050440364, 0.05033651, 0.053204227,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.055287696, 0.057353776, 0.06684689, 0.050788507, 0.0523157, 0.14594008,
0.051115714, 0.06583713, 0.050753254, 0.06766122, 0.06232806, 0.052169345, 0.053811945,
0.053856995, 0.055178333, 0.042129446, 0.054486606, 0.044001117, 0.046931483, 0.048390996,
0.057977885, 0.041348465, 0.09729028, 0.045411374, 0.06658187, 0.0482844, 0.04643601,
0.047328304, 0.050458904, 0.05700958, 0.048087124, 0.050974563, 0.054134253, 0.051496547,
0.049978126, 0.04270019, 0.045334514, 0.05301864, 0.058187317, 0.07326377, 0.045474343,
0.048525482, 0.056517288, 0.050111864, 0.042650435, 0.040255815, 0.042962797, 0.042073034,
0.079315804, 0.067389965, 0.047027342, 0.04087349, 0.06251048, 0.04881888, 0.051395554,
0.062582515, 0.043417882, 0.0459815, 0.047274206, 0.04994103, 0.043740917, 0.050258353,
0.04621124, 0.045381274, 0.048165023, 0.041930605, 0.0466371, 0.054179855, 0.059638973,
0.0777767, 0.043620087, 0.053287085, 0.059767924, 0.05371445, 0.0425768, 0.03696013,
0.052677028, 0.04406644, 0.03599347, 0.037248105, 0.0398307, 0.044351388, 0.049955852,
0.052467745, 0.064061776, 0.033737607, 0.04064134, 0.08103245, 0.05525314, 0.036766987,
0.03880626, 0.03926036, 0.045607112, 0.17816319, 0.062341396, 0.04846766, 0.040651474,
0.0542766, 0.05896691, 0.05325216, 0.06311094, 0.050426517, 0.04695872, 0.043877397,
0.04578634, 0.06036872, 0.052497722, 0.20062074, 0.046554487, 0.051748108, 0.06485543,
0.052615996, 0.052922245, 0.060687337, 0.06456333, 0.06858513, 0.057017367, 0.055507552,
0.053072013, 0.058383826, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.029153762, 0.0317174, 0.034192346, 0.038675107,
0.032911703, 0.03493229, 0.032486305, 0.04540446, 0.03754418, 0.0494563, 0.0400314,
0.04144539, 0.03483925, 0.03131209, 0.029923823, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.040016808, 0.04545501,
0.044661384, 0.04703334, 0.048840787, 0.039291907, 0.040233098, 0.047809176, 0.08017981,
0.07218798, 0.042985294, 0.04645261, 0.0703104, 0.060642146, 0.040786646, 0.028322382,
0.028759468, 0.033570863, 0.035756726, 0.03993113, 0.039131556, 0.028809356, 0.04395161,
0.035647284, 0.042428844, 0.052578084, 0.040714998, 0.035505846, 0.031720567, 0.03722607,
0.06206128, 0.05929575, 0.07413619, 0.046099976, 0.0448787, 0.19442903, 0.05410672,
0.060563527, 0.04673461, 0.060151204, 0.054948535, 0.051537316, 0.050983608, 0.051185522,
0.05076518, 0.039876617, 0.07309441, 0.042481497, 0.031014254, 0.031785533, 0.03924134,
0.041494302, 0.039857924, 0.04250399, 0.046389114, 0.029190257, 0.032735, 0.05188507,
0.05410878, 0.03356238, 0.045571376, 0.063353874, 0.04716617, 0.047534503, 0.04530422,
0.045696728, 0.057604145, 0.05620518, 0.064639315, 0.065604135, 0.04078316, 0.04646716,
0.082713835, 0.13959791, 0.052815735, 0.04023068, 0.04834437, 0.0446142, 0.045778457,
0.051645823, 0.03845276, 0.040697712, 0.052851435, 0.07745454, 0.07400258, 0.041838765,
0.043574493, 0.065884314, 0.060029157, 0.041487116, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.044063278, 0.059919916,
0.054147325, 0.04396781, 0.04574382, 0.04833417, 0.05347427, 0.061959177, 0.06760994,
0.07845725, 0.042026974, 0.050177496, 0.09418708, 0.06705572, 0.04392616, 0.04260172,
0.05756692, 0.043743778, 0.04854733, 0.046239175, 0.057273902, 0.04660067, 0.14768898,
0.051120427, 0.069116585, 0.046829626, 0.050321285, 0.05439188, 0.058601283, 0.049641706,
0.036082376, 0.037028108, 0.045228343, 0.11149664, 0.047367368, 0.049670536, 0.041312173,
0.055372145, 0.056670308, 0.050949167, 0.056368064, 0.0543322, 0.047409285, 0.041871857,
0.03928471, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.051453482, 0.05567783, 0.05768302, 0.041193817,
0.04218512, 0.10609129, 0.046934698, 0.054202765, 0.042497426, 0.052728906, 0.04843383,
0.046526078, 0.047558535, 0.04760756, 0.044232722, 0.06161809, 0.055662613, 0.064355955,
0.044416595, 0.042556014, 0.14755733, 0.056179266, 0.056308895, 0.041717518, 0.054206967,
0.051586244, 0.051212505, 0.04615804, 0.047331806, 0.047435153, 0.05102455, 0.047891404,
0.104758866, 0.045530997, 0.058214054, 0.05158637, 0.045965753, 0.050536107, 0.058258157,
0.057792783, 0.06582848, 0.04532166, 0.05340528, 0.052553043, 0.05681246, 0.0382408,
0.039889738, 0.04230568, 0.055038862, 0.0686684, 0.03916665, 0.034058798, 0.042867657,
0.047291372, 0.050374467, 0.047100678, 0.0403265, 0.04360989, 0.04288553, 0.04321111,
0.048590906, 0.052249346, 0.055728048, 0.046274897, 0.044747118, 0.049160108, 0.052631456,
0.057018925, 0.045328025, 0.07766929, 0.046690024, 0.050647948, 0.051009823, 0.048325576,
0.045466855, 0.04801197, 0.051941186, 0.057076033, 0.05823616, 0.10698959, 0.044602945,
0.041009575, 0.051810954, 0.06816666, 0.06951169, 0.058273885, 0.046770725, 0.059846587,
0.055376887, 0.05565126, 0.041145016, 0.040491436, 0.042779345, 0.056828234, 0.056955945,
0.041861646, 0.037704885, 0.04315109, 0.050929785, 0.048698187, 0.050328467, 0.0446189,
0.048081316, 0.04520657, 0.04109302, 0.054778054, 0.05429217, 0.05429217, 0.039673563,
0.040146355, 0.050672006, 0.06493111, 0.04334618, 0.045694828, 0.048471913, 0.03893358,
0.043054678, 0.054525115, 0.06953552, 0.053077713, 0.07360183, 0.051734664, 0.19378406,
0.046753656, 0.050388318, 0.057235684, 0.063026376, 0.04650452, 0.061638102, 0.064919904,
0.06277158, 0.05480206, 0.05633585, 0.05438007, 0.06212332, 0.04643114, 0.058021594,
0.051258396, 0.04818369, 0.0478393, 0.044210605, 0.05674548, 0.052184325, 0.075374186,
0.090406395, 0.043368597, 0.054683063, 0.1582338, 0.078864105, 0.046212293, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.056679383, 0.059046615, 0.068622865, 0.050543196, 0.055169992, 0.112678364, 0.04970417,
0.06770581, 0.052291945, 0.06784773, 0.057753816, 0.049656264, 0.054332934, 0.054083984,
0.057666205, 0.048659425, 0.053063992, 0.056901928, 0.052293297, 0.044886, 0.05442305,
0.060730387, 0.057985872, 0.054308172, 0.064424925, 0.051000994, 0.06632165, 0.05291772,
0.049140077, 0.040679563, 0.031680707, 0.030803686, 0.0366068, 0.03941185, 0.032435864,
0.03689485, 0.034964, 0.039728682, 0.03569784, 0.04414219, 0.04085329, 0.043427866,
0.03504647, 0.031391393, 0.030940328, 0.06677235, 0.06329424, 0.07534642, 0.049922716,
0.05272367, 0.16339193, 0.058992516, 0.06770423, 0.052110378, 0.06938822, 0.05733182,
0.052110378, 0.05678172, 0.056450095, 0.05767932, 0.042891204, 0.059277993, 0.044383764,
0.037939753, 0.037728857, 0.03843881, 0.053340536, 0.04488499, 0.055283908, 0.06972592,
0.03413429, 0.040234584, 0.09331276, 0.07306634, 0.042242687, 0.053261604, 0.20451498,
0.053897187, 0.04291357, 0.045280132, 0.056526486, 0.056422155, 0.060735248, 0.053897187,
0.06641061, 0.040076368, 0.044938333, 0.0830038, 0.06636219, 0.046942264, 0.030159006,
0.030712223, 0.035337873, 0.036371265, 0.031454537, 0.03631255, 0.03568992, 0.042766217,
0.036064364, 0.04876845, 0.037815012, 0.045549978, 0.03466335, 0.032271292, 0.030089775,
0.047086373, 0.06404116, 0.051392127, 0.047078963, 0.048807055, 0.046360016, 0.05311168,
0.056324303, 0.06930171, 0.077619135, 0.044828508, 0.05543311, 0.1252989, 0.06564895,
0.04565062, 0.054411966, 0.055579934, 0.06426223, 0.06056127, 0.053480223, 0.059025805,
0.066909775, 0.07072004, 0.057743102, 0.079643324, 0.06358882, 0.076726526, 0.05839071,
0.05669822, 0.054057583, 0.049572896, 0.13275433, 0.045807216, 0.041341, 0.042845562,
0.05252923, 0.047076352, 0.054929715, 0.046293125, 0.05192735, 0.038350005, 0.040485997,
0.05938698, 0.059884783, 0.04638655, 0.037443865, 0.030322932, 0.04544309, 0.03408492,
0.03706312, 0.047493607, 0.033715174, 0.035527464, 0.033713263, 0.046599645, 0.078412175,
0.046778906, 0.032522388, 0.031630732, 0.041621152, 0.039185904, 0.050521165, 0.05127265,
0.047353275, 0.09605852, 0.043495916, 0.036738493, 0.051275034, 0.07296341, 0.069364265,
0.055531375, 0.0408641, 0.05407078, 0.049359642, 0.051644336, 0.047292996, 0.05055282,
0.06323921, 0.04977476, 0.052218303, 0.048866622, 0.05083726, 0.05377053, 0.14425215,
0.08101886, 0.053376503, 0.058917724, 0.07626498, 0.066799395, 0.049852796, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.05065415, 0.05353627, 0.054497793, 0.036789916, 0.0392101, 0.051607504, 0.052312385,
0.0509986, 0.04763702, 0.050559964, 0.038419113, 0.04111983, 0.047183912, 0.06693969,
0.05318211, 0.050629262, 0.06151531, 0.05383487, 0.06185395, 0.06230144, 0.06991727,
0.04911835, 0.1881226, 0.056124426, 0.06952439, 0.056624733, 0.04905803, 0.056461107,
0.060254246, 0.05466001, 0.06661405, 0.052761827, 0.20373613, 0.043824762, 0.049200416,
0.059875492, 0.05925714, 0.050121725, 0.060268123, 0.06575474, 0.059875492, 0.053829223,
0.058841195, 0.05574105, 0.060298644, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.05121295, 0.1087883,
0.050484095, 0.03591844, 0.040065724, 0.04998393, 0.04590787, 0.04448552, 0.043078836,
0.048497688, 0.03587851, 0.038970325, 0.050836798, 0.054285076, 0.04717539, 0.034582667,
0.033484083, 0.043531112, 0.041312143, 0.048101068, 0.05389729, 0.03265253, 0.04811782,
0.039067864, 0.0530203, 0.072102144, 0.04249178, 0.036765296, 0.037614137, 0.05331909,
0.041027177, 0.0511083, 0.049088325, 0.047594193, 0.10036008, 0.043390412, 0.03808398,
0.05384299, 0.072611615, 0.078570135, 0.05645103, 0.042179577, 0.05652439, 0.05379541,
0.05105736, 0.04264922, 0.055254888, 0.050002273, 0.044253834, 0.050762393, 0.043183323,
0.044928942, 0.04781478, 0.092615746, 0.06572988, 0.04611763, 0.046507638, 0.07924396,
0.06501349, 0.041944977, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.041364565, 0.04675228, 0.043850273, 0.0419384,
0.045020454, 0.048489355, 0.039805915, 0.047953364, 0.045150317, 0.053448334, 0.04026315,
0.037886165, 0.04336474, 0.052191004, 0.07349447, 0.05108208, 0.062262755, 0.052427262,
0.058653835, 0.058276113, 0.073540665, 0.050198007, 0.19890766, 0.05557853, 0.06253776,
0.055547163, 0.049958233, 0.05564312, 0.060093675, 0.053345785, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.040176533,
0.054420967, 0.043349583, 0.040492345, 0.04355206, 0.041610353, 0.041027144, 0.05440109,
0.05186768, 0.080578096, 0.041539405, 0.044729784, 0.052987833, 0.051275276, 0.04806119,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.05289295, 0.1314001, 0.050730534, 0.044766996, 0.046297606, 0.060632784,
0.052752722, 0.060632784, 0.050079387, 0.058501657, 0.043053083, 0.046410155, 0.06625886,
0.06558827, 0.050910637, 0.032162122, 0.033345502, 0.037952468, 0.039537825, 0.042780455,
0.05155908, 0.03160704, 0.050869085, 0.03497295, 0.048449118, 0.064723164, 0.03920808,
0.03410315, 0.035001665, 0.04589935, 0.043925844, 0.051897068, 0.051844396, 0.046149805,
0.08557387, 0.039892334, 0.03669768, 0.049403846, 0.06986759, 0.06685558, 0.051706374,
0.04065913, 0.05509992, 0.05414354, 0.04704469, 0.044349156, 0.04926654, 0.060658775,
0.050058343, 0.061392833, 0.048172183, 0.045795124, 0.052302007, 0.13669541, 0.08060529,
0.05430936, 0.0521606, 0.06529328, 0.060974207, 0.05537763, 0.043171983, 0.04259689,
0.05625371, 0.037095077, 0.035137456, 0.07977598, 0.06340778, 0.046988662, 0.040728368,
0.04687809, 0.04231415, 0.08083333, 0.04337685, 0.041765153, 0.037129868, 0.05141583,
0.057129763, 0.054564875, 0.05148307, 0.05780586, 0.054217167, 0.04633646, 0.05613014,
0.054848053, 0.06796214, 0.051239576, 0.047109216, 0.05352757, 0.062933765, 0.11085838,
0.04617394, 0.060776055, 0.048320673, 0.056274783, 0.057164785, 0.066787675, 0.04802738,
0.23796347, 0.05324083, 0.0645738, 0.052665193, 0.04620853, 0.05255921, 0.057474412,
0.051789265, 0.061946493, 0.03822176, 0.06534701, 0.036319353, 0.03400893, 0.046052963,
0.062785536, 0.03542913, 0.039669737, 0.040996436, 0.04319037, 0.04257865, 0.03805314,
0.04275779, 0.037242275, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.056961674, 0.16275047, 0.050174326,
0.045422602, 0.044447627, 0.059219338, 0.05521922, 0.06228458, 0.05082497, 0.05720886,
0.043502573, 0.04687555, 0.06152193, 0.06930976, 0.04872001, 0.06640665, 0.052452207,
0.074999645, 0.045629025, 0.040740702, 0.0624281, 0.19262101, 0.05626752, 0.049230535,
0.061336417, 0.046476945, 0.07747916, 0.0567001, 0.061623786, 0.045624606, 0.048323568,
0.05058017, 0.059798297, 0.03744995, 0.038532287, 0.06949261, 0.048096333, 0.055296175,
0.042919166, 0.047524776, 0.04144593, 0.043708332, 0.044420566, 0.055314228, 0.046259567,
0.05683344, 0.047551364, 0.0636025, 0.05005012, 0.051100187, 0.057087865, 0.050567724,
0.053317707, 0.050077215, 0.06074375, 0.051016223, 0.047041226, 0.048367, 0.06267369,
0.13261919, 0.044510335, 0.060995128, 0.048253283, 0.056388587, 0.0563361, 0.070701875,
0.04528612, 0.1703304, 0.053885683, 0.06408444, 0.050910104, 0.046142023, 0.052443273,
0.058420047, 0.05129372, 0.04634511, 0.045267623, 0.04893408, 0.0406039, 0.04341707,
0.050750855, 0.03903751, 0.045771446, 0.040987376, 0.05106337, 0.04126704, 0.037619486,
0.038129088, 0.044798676, 0.075816326, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.045370918, 0.034702774, 0.050527968,
0.03410781, 0.030725723, 0.044125225, 0.08142644, 0.036479082, 0.03917611, 0.047549162,
0.036774106, 0.09641627, 0.042996287, 0.04348167, 0.034437723, 0.034243245, 0.041817773,
0.042430762, 0.035026886, 0.043554068, 0.038337413, 0.033711564, 0.04811888, 0.045657728,
0.11744799, 0.040887505, 0.0375991, 0.052502, 0.042389788, 0.040616103, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.049825363, 0.043544296, 0.06988233, 0.03787415, 0.037817046, 0.11857002,
0.057130396, 0.048561033, 0.04273985, 0.053312346, 0.052186206, 0.07556428, 0.04544182,
0.045432594, 0.045212757, 0.04562811, 0.036862396, 0.056575432, 0.0368383, 0.038702294,
0.05420018, 0.044626515, 0.04355032, 0.04163407, 0.055288244, 0.06343971, 0.050741237,
0.042260658, 0.039780155, 0.044213172, 0.06713508, 0.048474956, 0.082510546, 0.038883913,
0.03799457, 0.057866186, 0.069789246, 0.046454813, 0.043261725, 0.048046246, 0.04539397,
0.050933316, 0.041695282, 0.04775195, 0.043209255, 0.068347804, 0.048200097, 0.084538646,
0.04121384, 0.03931528, 0.055713326, 0.07488848, 0.045343753, 0.046136357, 0.051516853,
0.048590537, 0.054072544, 0.044627048, 0.050420277, 0.04277375, 0.05253643, 0.0563615,
0.061758354, 0.04264397, 0.04252397, 0.045310862, 0.057466332, 0.04560279, 0.064462334,
0.06967014, 0.042773683, 0.050821494, 0.061863206, 0.06410847, 0.041946977, 0.0630206,
0.05401852, 0.061544858, 0.05485915, 0.056139622, 0.059501212, 0.056297675, 0.05840663,
0.055766255, 0.07263042, 0.057648163, 0.053509705, 0.053696785, 0.067118876, 0.13517956,
0.059203997, 0.04999564, 0.07451952, 0.048295707, 0.05281575, 0.07463228, 0.06037934,
0.060176965, 0.05529948, 0.07647441, 0.084124, 0.07081262, 0.058055867, 0.053375967,
0.058710434, 0.052682515, 0.19238563, 0.051576506, 0.045030963, 0.04625831, 0.06267478,
0.05388539, 0.063982114, 0.050988995, 0.056721967, 0.044694826, 0.048086092, 0.0666037,
0.060231846, 0.045389604, 0.0459513, 0.121877596, 0.04712837, 0.03597609, 0.039499167,
0.046038847, 0.04234161, 0.045509577, 0.0435721, 0.04774481, 0.037254263, 0.037428007,
0.049940936, 0.05047699, 0.04335084, 0.0487091, 0.049610958, 0.059644714, 0.042273387,
0.0613525, 0.043859098, 0.039080095, 0.044492405, 0.061104096, 0.067631245, 0.04873684,
0.043108687, 0.05531464, 0.048191994, 0.046842948, 0.044530336, 0.048541408, 0.053398635,
0.042379472, 0.081374384, 0.042342644, 0.03803107, 0.044649933, 0.0643252, 0.06787869,
0.055262078, 0.042360645, 0.051947284, 0.04749461, 0.04947805, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.052728757, 0.047524374, 0.058533818, 0.041567747, 0.054091334, 0.044415195, 0.04443422,
0.045026507, 0.056491666, 0.07367398, 0.05073895, 0.048612803, 0.05454341, 0.04901666,
0.049055036, 0.042629316, 0.05529672, 0.04249102, 0.030882, 0.03172638, 0.061440315,
0.040253054, 0.04347084, 0.03379182, 0.040099125, 0.035221, 0.043431893, 0.040887002,
0.037365764, 0.03417689, 0.034495637, 0.028547825, 0.049308322, 0.036804687, 0.037122928,
0.04270755, 0.033443987, 0.034475073, 0.041561484, 0.042171862, 0.08033413, 0.04806698,
0.03417706, 0.032478124, 0.037548464, 0.054880705, 0.049397185, 0.058506947, 0.04172156,
0.050043713, 0.043529585, 0.04234844, 0.04391816, 0.05256849, 0.070088685, 0.045422114,
0.04517198, 0.054342743, 0.04753246, 0.044147935, 0.048282057, 0.064878024, 0.05002952,
0.05816159, 0.05912707, 0.06305706, 0.04707216, 0.14074838, 0.056553125, 0.06463712,
0.050762337, 0.047513533, 0.056553125, 0.06127492, 0.052046467, 0.049657658, 0.05970331,
0.06082956, 0.049900323, 0.05234475, 0.04696862, 0.05491708, 0.05210314, 0.1140617,
0.08116696, 0.04943723, 0.056476217, 0.096989885, 0.077245034, 0.048206948, 0.047481857,
0.053499628, 0.06168343, 0.050977454, 0.05825927, 0.04852283, 0.049375787, 0.051681686,
0.15635392, 0.077344224, 0.05285935, 0.056078117, 0.07810121, 0.06843583, 0.05153376,
0.07571491, 0.054131296, 0.1834835, 0.047260173, 0.047226034, 0.066768415, 0.074053176,
0.047873426, 0.05769006, 0.06193314, 0.058296278, 0.05757849, 0.055507857, 0.056886576,
0.055596672, 0.046820216, 0.057147074, 0.0572798, 0.047284987, 0.051560476, 0.044496432,
0.050168015, 0.048719723, 0.10623935, 0.07808982, 0.047658738, 0.05174599, 0.09207502,
0.07504125, 0.044812057, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.055097375, 0.048095047, 0.07094125, 0.038185287,
0.03637352, 0.0765862, 0.056307293, 0.045701053, 0.04179897, 0.050842714, 0.046263397,
0.058904164, 0.046847023, 0.0458275, 0.044287417, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.043173995, 0.04979874,
0.047091432, 0.058643736, 0.055790726, 0.06337608, 0.047363173, 0.1447337, 0.052048247,
0.061803818, 0.054148246, 0.045732956, 0.050686985, 0.054299816, 0.048102923, 0.042594686,
0.057421587, 0.04410874, 0.05214967, 0.051697075, 0.060487885, 0.04414989, 0.16436198,
0.050200917, 0.063849725, 0.045491263, 0.04258276, 0.05018129, 0.057138458, 0.05037862,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.036996085, 0.036288407, 0.044244073, 0.035234332, 0.038001064,
0.053779468, 0.037961703, 0.046182245, 0.035263527, 0.056371786, 0.06253956, 0.04878271,
0.038088962, 0.035361055, 0.041218854, 0.054230943, 0.058571294, 0.060756814, 0.04816339,
0.04964902, 0.068478175, 0.05601918, 0.0735657, 0.057492185, 0.06378064, 0.047958482,
0.05196636, 0.054886848, 0.07203981, 0.06970333, 0.05220348, 0.14186718, 0.051975608,
0.041518565, 0.041953675, 0.06223341, 0.052275557, 0.057841342, 0.046465285, 0.054847773,
0.041315764, 0.046820346, 0.060489595, 0.05656616, 0.042437907, 0.05492351, 0.048924815,
0.072734006, 0.04556217, 0.040898774, 0.06397361, 0.122206286, 0.051107235, 0.04997114,
0.05949298, 0.04987813, 0.099591784, 0.058975186, 0.05828892, 0.045737505, 0.050649364,
0.04788823, 0.058034994, 0.03788357, 0.037801314, 0.05818419, 0.049682166, 0.056701973,
0.045739494, 0.050776817, 0.04202371, 0.046731688, 0.044301957, 0.057864875, 0.05038401,
0.044105023, 0.04591977, 0.057777178, 0.036487345, 0.055737324, 0.040539883, 0.035587616,
0.038019054, 0.05635303, 0.05576753, 0.04710898, 0.039323673, 0.04854764, 0.042757664,
0.045204245, 0.0437026, 0.046340626, 0.05084182, 0.04195015, 0.04515696, 0.04409718,
0.04044972, 0.053935423, 0.050525837, 0.08742929, 0.041044228, 0.04218381, 0.057346083,
0.04715503, 0.043683216, 0.039943963, 0.04561979, 0.044822857, 0.044575635, 0.07689961,
0.04259918, 0.036631797, 0.05387959, 0.059415765, 0.06860741, 0.05956387, 0.04172075,
0.05100887, 0.047986012, 0.04925221, 0.048531346, 0.12800603, 0.046924688, 0.036625765,
0.04012836, 0.052123826, 0.045552418, 0.050154567, 0.04316831, 0.05259554, 0.03768571,
0.039024785, 0.055925574, 0.051840905, 0.04313424, 0.05256013, 0.16380326, 0.048084475,
0.047284048, 0.048075356, 0.05577767, 0.052838527, 0.0652616, 0.052526653, 0.05893035,
0.04419776, 0.04755522, 0.06694345, 0.06813107, 0.048444178, 0.033238005, 0.026312364,
0.04733069, 0.030618938, 0.02969952, 0.041306347, 0.03313404, 0.02903209, 0.035274684,
0.037304442, 0.062468335, 0.04864685, 0.031409528, 0.028781574, 0.03254381, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.04876657, 0.060918875, 0.050753307, 0.05429276, 0.054499727, 0.070578516, 0.049948297,
0.20228946, 0.05408862, 0.0643356, 0.053031925, 0.046725783, 0.055205334, 0.06034742,
0.051573332, 0.053619295, 0.059319954, 0.064625084, 0.04925569, 0.05470546, 0.049949337,
0.055594563, 0.051376782, 0.118011996, 0.078739986, 0.052373882, 0.05758952, 0.08103734,
0.07237366, 0.051435854, 0.03866972, 0.05283246, 0.048754975, 0.052126773, 0.071846835,
0.041981496, 0.03801598, 0.053574648, 0.06768668, 0.06207175, 0.05143705, 0.04073869,
0.054281283, 0.05327235, 0.051254064, 0.0458279, 0.04100102, 0.055135056, 0.03929621,
0.045226105, 0.049998853, 0.04035948, 0.049313597, 0.04203278, 0.059820347, 0.056207545,
0.04443229, 0.045857877, 0.044670604, 0.057078738, 0.071143046, 0.04815931, 0.106960766,
0.04122324, 0.03898065, 0.05668997, 0.07872573, 0.044960488, 0.04720862, 0.04979509,
0.047380716, 0.054208923, 0.045088727, 0.05089895, 0.044399902, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.042989697,
0.04724544, 0.042946164, 0.05439754, 0.049713302, 0.053884316, 0.04570982, 0.075821854,
0.046297327, 0.051915813, 0.054191574, 0.045470763, 0.04806478, 0.048379503, 0.046382666,
0.049849723, 0.058563247, 0.05607109, 0.050723232, 0.058819834, 0.049808174, 0.05161126,
0.04979125, 0.089671895, 0.06846668, 0.0524644, 0.055434097, 0.07919311, 0.075231805,
0.046319865, 0.04177053, 0.040364366, 0.050709397, 0.041755445, 0.048993446, 0.055295814,
0.038248047, 0.04864655, 0.043094836, 0.061125096, 0.060671303, 0.045824476, 0.044272486,
0.04414948, 0.060853448, 0.038584605, 0.044429287, 0.05984247, 0.046115316, 0.05105396,
0.04909184, 0.039842058, 0.04877652, 0.08273614, 0.06064221, 0.051293477, 0.043436762,
0.0494598, 0.053425483, 0.046753444, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.03379195, 0.03211181, 0.04312827,
0.033469494, 0.037019905, 0.047762536, 0.03220712, 0.03893951, 0.032678302, 0.052272547,
0.06116539, 0.043184254, 0.03473477, 0.033955052, 0.044125307, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.04973887, 0.054558944, 0.060510796, 0.04850197, 0.06609839, 0.055505704, 0.048073716,
0.057689875, 0.058932163, 0.09415187, 0.06243765, 0.053152315, 0.06550771, 0.053178843,
0.05922337, 0.059929803, 0.05677488, 0.07441214, 0.044958163, 0.046834372, 0.144321,
0.05389223, 0.060440775, 0.048543263, 0.06356835, 0.054505404, 0.05329929, 0.052635003,
0.051331118, 0.051783547, 0.038458947, 0.043898974, 0.051882803, 0.050018378, 0.05261917,
0.0453932, 0.040160153, 0.051484358, 0.102136426, 0.066661224, 0.05116041, 0.043875854,
0.056963027, 0.057819035, 0.04696686, 0.036468405, 0.03069096, 0.040240735, 0.0290234,
0.03147537, 0.044346657, 0.035306897, 0.03614468, 0.02979527, 0.045979552, 0.048684184,
0.04126366, 0.032310225, 0.03140639, 0.03520968, 0.057117403, 0.046094917, 0.08657897,
0.038651865, 0.039962903, 0.064646274, 0.051192377, 0.044507127, 0.043135703, 0.055739224,
0.04825571, 0.048327256, 0.04860448, 0.045590937, 0.048985794, 0.04516857, 0.11302961,
0.044018697, 0.041093156, 0.042748988, 0.052820284, 0.048431616, 0.057919502, 0.046798617,
0.05445788, 0.039570272, 0.043190956, 0.06341187, 0.05692459, 0.042773645, 0.04050729,
0.045016363, 0.043745097, 0.04513126, 0.047453355, 0.046448305, 0.04305681, 0.058020543,
0.06416583, 0.05961044, 0.040651765, 0.042450957, 0.05293345, 0.07300618, 0.06234241,
0.040487666, 0.044216353, 0.062471543, 0.048732087, 0.061575588, 0.0464643, 0.0415021,
0.047846556, 0.12746318, 0.073027834, 0.055034604, 0.050899174, 0.06204474, 0.05561432,
0.053974863, 0.048314277, 0.057594255, 0.048492532, 0.058044475, 0.053422615, 0.06789469,
0.052677218, 0.11566201, 0.051801093, 0.058049623, 0.059813797, 0.052631125, 0.05279186,
0.05456306, 0.05169953, 0.06512491, 0.05172145, 0.07523948, 0.046480052, 0.042319786,
0.062344164, 0.16555098, 0.054650072, 0.05118146, 0.06175017, 0.050515298, 0.07649066,
0.055929165, 0.06332572, 0.047262166, 0.04309173, 0.050734106, 0.046062227, 0.055339795,
0.052239966, 0.058506873, 0.04627825, 0.09367766, 0.0510103, 0.055423252, 0.057177268,
0.047362488, 0.049908705, 0.052005176, 0.047250524, 0.037657835, 0.031490017, 0.049693283,
0.029390836, 0.03305359, 0.044471987, 0.034838088, 0.035750136, 0.03197184, 0.045696236,
0.058210075, 0.039912272, 0.0333603, 0.030546334, 0.035562515, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0493046,
0.03905875, 0.04780977, 0.04367297, 0.04293472, 0.04238799, 0.0429791, 0.040609602,
0.040881075, 0.050114747, 0.04323916, 0.038913917, 0.03928464, 0.04867883, 0.09245222,
0.048163347, 0.054264802, 0.055312, 0.04044897, 0.042363595, 0.079363704, 0.04299816,
0.051132843, 0.041940387, 0.05041668, 0.046690464, 0.04681881, 0.04468233, 0.045211714,
0.04392581, 0.057572674, 0.040229153, 0.058417194, 0.03667197, 0.034383398, 0.05920263,
0.052485213, 0.040290248, 0.03458495, 0.044038814, 0.04082102, 0.037941188, 0.038503114,
0.03918901, 0.040213924, 0.066998936, 0.052615326, 0.15205799, 0.039804023, 0.038713824,
0.06370093, 0.061753146, 0.04681134, 0.047886238, 0.051827416, 0.04688514, 0.055180013,
0.04536752, 0.047886238, 0.04476892, 0.04666767, 0.05594336, 0.06039989, 0.04761395,
0.08627465, 0.045713034, 0.041874927, 0.051338863, 0.073896766, 0.08338172, 0.06366058,
0.047222298, 0.06196385, 0.05325837, 0.05536471, 0.04084024, 0.04247441, 0.047466706,
0.03608952, 0.038803745, 0.04444114, 0.04171031, 0.043276325, 0.03671835, 0.072691716,
0.046855655, 0.04288861, 0.042110495, 0.039473128, 0.04471447, 0.04155216, 0.05135218,
0.050337993, 0.042532325, 0.071587965, 0.040309127, 0.03716648, 0.04691883, 0.061778452,
0.06966907, 0.053760365, 0.04087967, 0.05613474, 0.047647014, 0.050039742, 0.045440976,
0.05366967, 0.05865537, 0.04841087, 0.097172976, 0.04584419, 0.04008958, 0.052231647,
0.0754692, 0.08067403, 0.06429705, 0.044880364, 0.057764884, 0.05469322, 0.055280607,
0.059581485, 0.056451242, 0.062273975, 0.054444544, 0.056841597, 0.063017264, 0.053302947,
0.05744351, 0.05174326, 0.06801247, 0.055019915, 0.049709715, 0.05262097, 0.06579821,
0.15307693, 0.062239204, 0.051451627, 0.2247476, 0.044835556, 0.04745707, 0.05951544,
0.055568386, 0.046990767, 0.059259597, 0.063563325, 0.06264263, 0.056140378, 0.0550517,
0.052657247, 0.057879493, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.051608633, 0.058657765, 0.05285612, 0.045930557,
0.053487066, 0.052350514, 0.045078337, 0.05239209, 0.04619223, 0.055862267, 0.046014935,
0.041405194, 0.048118044, 0.058643162, 0.095471285, 0.059775397, 0.054382913, 0.055704627,
0.038725823, 0.03899238, 0.09032674, 0.051239718, 0.05203684, 0.038627908, 0.047451444,
0.04378497, 0.046590388, 0.04261486, 0.045596205, 0.04115611, 0.048389032, 0.05821194,
0.058112517, 0.04487235, 0.049478628, 0.053162176, 0.05289028, 0.07270477, 0.067151256,
0.14541246, 0.04820346, 0.05441473, 0.0838073, 0.06453833, 0.052328985, 0.04150258,
0.05345111, 0.05028835, 0.04082849, 0.052115906, 0.049310666, 0.039501354, 0.049677715,
0.043464683, 0.06466228, 0.0528051, 0.040171105, 0.047615267, 0.043725155, 0.0481132,
0.06373985, 0.045895185, 0.06889337, 0.04013764, 0.038251337, 0.06889337, 0.056827042,
0.044621866, 0.03912761, 0.052240983, 0.04576102, 0.046245687, 0.044533662, 0.044533662,
0.046817444, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.050850827, 0.14011618, 0.05363648, 0.041036285, 0.043958027,
0.056909952, 0.05150792, 0.056138493, 0.0498892, 0.057326086, 0.041930217, 0.04451267,
0.062131077, 0.055814926, 0.045053296, 0.046383552, 0.05950191, 0.051598217, 0.04916344,
0.050711393, 0.04948594, 0.04871743, 0.07997035, 0.058165167, 0.13705692, 0.050182514,
0.04995399, 0.07091073, 0.059252124, 0.053144816, 0.06426882, 0.038475383, 0.06470717,
0.036855854, 0.033274934, 0.048819263, 0.09941637, 0.037528124, 0.037762545, 0.046068497,
0.042047888, 0.06485669, 0.04198938, 0.045180526, 0.04064696, 0.050431404, 0.059722867,
0.061119027, 0.04839717, 0.05276998, 0.05980971, 0.054345213, 0.061169446, 0.053673342,
0.13759586, 0.05721099, 0.05721099, 0.067644574, 0.057362936, 0.060460765, 0.044070642,
0.09666724, 0.04030867, 0.036852323, 0.03792073, 0.042031, 0.04436127, 0.047564704,
0.044916473, 0.049235206, 0.033592973, 0.034675278, 0.04984885, 0.057899073, 0.041757565,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.043783408, 0.048840433, 0.053315222, 0.060911153, 0.09868733, 0.045060582,
0.039722178, 0.052625503, 0.072711155, 0.06800802, 0.06569566, 0.04401059, 0.056321815,
0.05500022, 0.057150334, 0.039301842, 0.04591065, 0.050060008, 0.048078578, 0.050714742,
0.044553615, 0.03987914, 0.05181045, 0.08660986, 0.057404894, 0.0465657, 0.042121217,
0.04923088, 0.057557944, 0.050368004, 0.045825455, 0.056168422, 0.04707535, 0.04840328,
0.0479323, 0.088793136, 0.043247763, 0.07066504, 0.04239611, 0.053070128, 0.05314751,
0.046258286, 0.04131079, 0.04752733, 0.052463014, 0.0603683, 0.04449618, 0.059229784,
0.05974036, 0.062364038, 0.049823012, 0.050679132, 0.048229713, 0.055603284, 0.05650524,
0.06022459, 0.048560396, 0.050683886, 0.061829366, 0.12689194, 0.036059532, 0.052856155,
0.039001368, 0.044169333, 0.04148159, 0.05263978, 0.042605918, 0.08157596, 0.046723075,
0.0523828, 0.03878584, 0.03769455, 0.05617841, 0.056326274, 0.038496535, 0.06924279,
0.054185793, 0.2091401, 0.044365052, 0.05031547, 0.060477816, 0.05484946, 0.050196182,
0.06391656, 0.0671474, 0.05855836, 0.053284787, 0.05464779, 0.051826663, 0.05784577,
0.039559674, 0.044592988, 0.043472126, 0.04789006, 0.061786335, 0.0468374, 0.03763311,
0.06547093, 0.057356358, 0.07336788, 0.05353905, 0.041126374, 0.048008516, 0.049868315,
0.04795576, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.048454676, 0.15636212, 0.04735066, 0.040093616, 0.043970596,
0.04952021, 0.049336385, 0.056578007, 0.04965426, 0.06013429, 0.036922097, 0.040001277,
0.066031516, 0.064032204, 0.04717211, 0.0336122, 0.028108263, 0.047034487, 0.03635175,
0.03776835, 0.03963857, 0.029199926, 0.03088417, 0.03961167, 0.03835826, 0.080070265,
0.04528828, 0.033785153, 0.030934103, 0.03413562, 0.041545134, 0.046662226, 0.05015441,
0.058122516, 0.13231795, 0.04327649, 0.037359513, 0.05040713, 0.06828103, 0.06855702,
0.06054836, 0.04187464, 0.05518311, 0.05228274, 0.051559146, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.051396277,
0.051613573, 0.05861518, 0.048165664, 0.047270592, 0.13480195, 0.054929312, 0.07606072,
0.04725718, 0.055739887, 0.06330132, 0.059832986, 0.046069197, 0.053042457, 0.05668401,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.037665218, 0.05621976, 0.03970065, 0.04426219, 0.041795876, 0.053211145,
0.04426219, 0.065610915, 0.044659417, 0.047741495, 0.037738446, 0.037789393, 0.05178219,
0.058520243, 0.039229047, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.043698434, 0.043842416, 0.047136962, 0.045678984,
0.043050498, 0.06000586, 0.05109068, 0.054738842, 0.04511676, 0.056184642, 0.050103933,
0.085591815, 0.045864213, 0.045638125, 0.04483365, 0.045263194, 0.059943985, 0.04627753,
0.039406296, 0.037785444, 0.042343542, 0.06396711, 0.04774252, 0.053751953, 0.06487966,
0.03705816, 0.053402, 0.076547004, 0.05989503, 0.039546285, 0.047062013, 0.105368026,
0.04002394, 0.035195928, 0.036888413, 0.043462373, 0.0438277, 0.049545053, 0.040513393,
0.048217174, 0.035083767, 0.034967147, 0.0453507, 0.05321733, 0.043871287, 0.040655643,
0.0345875, 0.063166454, 0.043717287, 0.044957668, 0.053338718, 0.03812861, 0.042075,
0.047003534, 0.049934268, 0.15945363, 0.05430336, 0.03941094, 0.038157627, 0.045829196,
0.042706538, 0.05201731, 0.052865528, 0.059363104, 0.09429997, 0.046983935, 0.04116916,
0.05544854, 0.0757127, 0.06947259, 0.062063437, 0.043786526, 0.056137048, 0.055224605,
0.05459261, 0.04375994, 0.053008758, 0.05919288, 0.05279835, 0.05595973, 0.046511415,
0.04507005, 0.05119218, 0.15144305, 0.083336785, 0.051641405, 0.049834177, 0.073966525,
0.0683735, 0.0464868, 0.057479203, 0.060635604, 0.06049844, 0.04920067, 0.047797207,
0.1515198, 0.053009085, 0.06570472, 0.04585732, 0.055537548, 0.053757243, 0.05026695,
0.04895917, 0.055171866, 0.052730296, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.041896347, 0.052739944, 0.047034655,
0.05040453, 0.04657303, 0.06227748, 0.05434427, 0.18121296, 0.053771302, 0.06250529,
0.05289317, 0.05280006, 0.059413344, 0.06157233, 0.04586512, 0.060696136, 0.05040062,
0.16517386, 0.04972882, 0.055848144, 0.06148678, 0.053088486, 0.05178863, 0.07209964,
0.06739625, 0.07533416, 0.056950357, 0.05956253, 0.056523394, 0.06392219, 0.04686364,
0.050282303, 0.05339465, 0.039895352, 0.04479365, 0.057624582, 0.043793026, 0.052712664,
0.04212837, 0.07765596, 0.052929077, 0.056390148, 0.049993504, 0.0450397, 0.04798949,
0.036249977, 0.049998358, 0.038202696, 0.03986849, 0.03941128, 0.037835866, 0.043570686,
0.04702047, 0.051299, 0.055185866, 0.037604794, 0.04521924, 0.085612655, 0.05405112,
0.03584038, 0.05216167, 0.17531063, 0.049375046, 0.04514191, 0.04657228, 0.05685951,
0.05500767, 0.07117037, 0.05050667, 0.063570864, 0.04162692, 0.044040814, 0.0647374,
0.066259734, 0.04828804, 0.04771814, 0.06541387, 0.050170276, 0.045765504, 0.042708978,
0.044046428, 0.059619144, 0.050650746, 0.06522051, 0.085069716, 0.041381024, 0.053323895,
0.097463354, 0.07663647, 0.045481097, 0.06374829, 0.04099582, 0.052531954, 0.048316505,
0.053143825, 0.041786585, 0.04302049, 0.038129322, 0.04843631, 0.04828074, 0.057163212,
0.04456599, 0.043012068, 0.047558874, 0.06816168, 0.064973116, 0.048156753, 0.062321525,
0.051772006, 0.050917286, 0.05589528, 0.051491268, 0.053122748, 0.046722803, 0.061442245,
0.05394204, 0.046265163, 0.04676317, 0.0573326, 0.12846819, 0.037495807, 0.053499173,
0.04185173, 0.05229037, 0.050451104, 0.056815438, 0.043107115, 0.1255158, 0.051369376,
0.0630159, 0.043354034, 0.041448716, 0.05446065, 0.060493685, 0.0445359, 0.06308031,
0.043711424, 0.06102014, 0.056893863, 0.06381555, 0.048090022, 0.05043666, 0.046975065,
0.054196205, 0.057896696, 0.06112999, 0.04811392, 0.05035267, 0.060092315, 0.11787244,
0.054992698, 0.19144085, 0.05286512, 0.04510575, 0.046885334, 0.05876529, 0.056749362,
0.06640784, 0.05129913, 0.067053035, 0.04234904, 0.046128754, 0.0698621, 0.067053035,
0.051132437, 0.040683024, 0.045573164, 0.043450564, 0.046011426, 0.0470484, 0.041614648,
0.04516674, 0.04953134, 0.056019645, 0.058402825, 0.04049141, 0.044118732, 0.07375165,
0.06273121, 0.039138857, 0.047733318, 0.050592866, 0.054716118, 0.048928656, 0.054160267,
0.04933322, 0.04844397, 0.06930202, 0.0824569, 0.14479506, 0.054508142, 0.057808835,
0.07392949, 0.06647678, 0.051573273, 0.050063517, 0.065896906, 0.051576402, 0.045777727,
0.04384667, 0.045475475, 0.06661802, 0.052157305, 0.067265816, 0.09270742, 0.041667655,
0.05329143, 0.108882844, 0.08544432, 0.046624642, 0.04381629, 0.043367106, 0.05267098,
0.03666732, 0.035391156, 0.043813556, 0.04517153, 0.04498491, 0.05460355, 0.07443098,
0.037369978, 0.050317414, 0.058028653, 0.049757693, 0.041599605, 0.060187966, 0.060481228,
0.061921958, 0.0494191, 0.048485182, 0.18253002, 0.06687046, 0.07225125, 0.049033664,
0.05717858, 0.05951729, 0.06606788, 0.053455155, 0.059024096, 0.053576168, 0.032041855,
0.035097137, 0.039608616, 0.035215523, 0.040308375, 0.046836898, 0.03252794, 0.048155893,
0.036967736, 0.050450254, 0.05045739, 0.044629898, 0.04049082, 0.03499051, 0.039440256,
0.056348726, 0.051206846, 0.1118781, 0.047259916, 0.0622522, 0.056764502, 0.048484985,
0.052242603, 0.068268225, 0.06788837, 0.0664496, 0.050152943, 0.061476294, 0.058653098,
0.06312544, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.04081892, 0.05140269, 0.05536045, 0.04695972, 0.04853078,
0.04141791, 0.04393475, 0.04873208, 0.10514324, 0.08120511, 0.046811704, 0.048456237,
0.07038407, 0.060910523, 0.045573395, 0.061937, 0.045091856, 0.05764376, 0.05369121,
0.055066667, 0.04856315, 0.052649032, 0.04809323, 0.05043007, 0.06334425, 0.054347914,
0.047314957, 0.04768132, 0.059707634, 0.13914697, 0.036779385, 0.033790763, 0.047064643,
0.040353574, 0.04689836, 0.04803678, 0.03314504, 0.043368008, 0.03972253, 0.05400413,
0.08214211, 0.04383077, 0.039358567, 0.03635403, 0.04865576, 0.03546127, 0.06723806,
0.034740463, 0.033074595, 0.033691913, 0.040082928, 0.03971545, 0.0407153, 0.038504634,
0.040470883, 0.03298134, 0.03567023, 0.05006846, 0.041563183, 0.032647807, 0.04724317,
0.08510535, 0.04107634, 0.03391767, 0.03415857, 0.044123065, 0.044761904, 0.044461872,
0.03916475, 0.043868497, 0.035215855, 0.038406555, 0.049095616, 0.048506536, 0.037179414,
0.047302876, 0.048893385, 0.058576334, 0.05585271, 0.109244905, 0.045182373, 0.039108977,
0.05031823, 0.07444433, 0.07536818, 0.07322778, 0.04551551, 0.056237932, 0.053756844,
0.05622165, 0.041938845, 0.054985303, 0.0450243, 0.068563275, 0.09023295, 0.047128,
0.041316114, 0.06046303, 0.058608953, 0.058444675, 0.05277372, 0.041019864, 0.05214992,
0.05633406, 0.05886558, 0.05152682, 0.04674287, 0.08286004, 0.051130272, 0.06226278,
0.059096098, 0.047345664, 0.05493815, 0.062967196, 0.06026643, 0.072162956, 0.04721451,
0.049529273, 0.053221673, 0.05670449, 0.049183276, 0.044185188, 0.052306373, 0.03936122,
0.034902986, 0.048538703, 0.05333159, 0.048620995, 0.04691621, 0.06577831, 0.037754968,
0.054027554, 0.05074653, 0.05054417, 0.041500006, 0.042661805, 0.052781492, 0.05576707,
0.055626318, 0.08855332, 0.04738053, 0.040506545, 0.0567345, 0.07451092, 0.07133103,
0.06525461, 0.044974383, 0.055720184, 0.0539795, 0.0560614, 0.0471835, 0.11056777,
0.04521383, 0.037790228, 0.039245505, 0.05217461, 0.047587607, 0.052615676, 0.044093776,
0.052677397, 0.035661895, 0.03865813, 0.05877023, 0.061602745, 0.045223605, 0.043717094,
0.045013968, 0.043444674, 0.1971421, 0.08473022, 0.048773587, 0.041541737, 0.055069555,
0.051846553, 0.054429635, 0.05400702, 0.044510227, 0.04879169, 0.053261064, 0.063739784,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.04164632, 0.050549638, 0.047296185, 0.05722256, 0.05547295, 0.06347659,
0.046463117, 0.16397864, 0.055124808, 0.064294435, 0.060536154, 0.050294135, 0.04993511,
0.0526619, 0.0527321, 0.03809001, 0.04494575, 0.05558658, 0.048889045, 0.053825438,
0.043869518, 0.03957899, 0.04785854, 0.10205495, 0.07683184, 0.052763555, 0.049529847,
0.057916973, 0.05281776, 0.047703598, 0.032571565, 0.040922772, 0.05132245, 0.039732516,
0.047141425, 0.038486525, 0.033947106, 0.041936584, 0.07858106, 0.06318986, 0.0435578,
0.040889338, 0.049225517, 0.046858627, 0.041445825, 0.061446834, 0.049624965, 0.22012514,
0.04605332, 0.052902974, 0.057176545, 0.050419353, 0.04651748, 0.06804943, 0.0620359,
0.067499965, 0.05380019, 0.0553202, 0.05147452, 0.057553187, 0.05115722, 0.04800132,
0.06982853, 0.045125257, 0.042584334, 0.04239268, 0.05835169, 0.043711055, 0.07917281,
0.06806522, 0.04388871, 0.04944579, 0.05965426, 0.06140102, 0.041896336, 0.05111821,
0.049465343, 0.073568664, 0.05273007, 0.049997956, 0.04815128, 0.055515885, 0.049985014,
0.09389458, 0.07671812, 0.053433213, 0.0640886, 0.064873174, 0.060693398, 0.050193764,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.045207255, 0.05972346, 0.050462577, 0.054010555, 0.05488968, 0.069607295,
0.05244035, 0.18362041, 0.057743214, 0.0705373, 0.05631755, 0.05412297, 0.059381746,
0.061561756, 0.053246062, 0.037825607, 0.04939925, 0.04268173, 0.05085597, 0.050292443,
0.054798745, 0.04325746, 0.09382231, 0.04767686, 0.061207183, 0.0434085, 0.03995259,
0.050374046, 0.056326527, 0.04427652, 0.029727662, 0.04120376, 0.03201152, 0.035318222,
0.032971278, 0.041631896, 0.03566585, 0.056560747, 0.034815326, 0.03832658, 0.03298522,
0.031543717, 0.04310971, 0.039590307, 0.028579645, 0.05586298, 0.05808757, 0.059666112,
0.051700264, 0.050344393, 0.17782694, 0.055801015, 0.07575699, 0.04849145, 0.058604933,
0.061088923, 0.057083137, 0.04902455, 0.05712037, 0.057636622, 0.047344137, 0.063315816,
0.050365396, 0.047815114, 0.046097916, 0.04510894, 0.059577838, 0.052577827, 0.06761052,
0.07406321, 0.0426549, 0.058930006, 0.11600658, 0.069408014, 0.043133333, 0.0356726,
0.031046195, 0.04960486, 0.035486907, 0.038699236, 0.04468487, 0.034353096, 0.036254343,
0.04320472, 0.04267373, 0.06887482, 0.04601004, 0.03714888, 0.033160064, 0.037245225,
0.055955764, 0.03824417, 0.043536603, 0.04365968, 0.043896362, 0.037099198, 0.038857136,
0.03606374, 0.039708976, 0.0442668, 0.045480583, 0.03862365, 0.039178237, 0.044080414,
0.07134353, 0.04936303, 0.1251443, 0.048810877, 0.040308204, 0.041132588, 0.057088375,
0.05022725, 0.057948966, 0.045957748, 0.057206552, 0.039688885, 0.044938978, 0.066076,
0.055772085, 0.043938976, 0.04528172, 0.064644866, 0.049322758, 0.04592897, 0.049627114,
0.05122228, 0.05768101, 0.053426072, 0.07195536, 0.07962066, 0.0446043, 0.05490847,
0.12536956, 0.075347096, 0.04601896, 0.060382962, 0.0453132, 0.05554754, 0.053159524,
0.05677747, 0.047206193, 0.04906268, 0.045959536, 0.051793303, 0.05900047, 0.05435342,
0.048289925, 0.0486157, 0.056553476, 0.12208653, 0.039861638, 0.048023604, 0.04801219,
0.05729997, 0.07356077, 0.045583025, 0.03972787, 0.055200227, 0.061998118, 0.05840409,
0.05747005, 0.042310797, 0.05030762, 0.04966895, 0.051115826, 0.045941424, 0.044561688,
0.052479304, 0.046699643, 0.053358655, 0.048737586, 0.041388564, 0.045953173, 0.05648352,
0.06634183, 0.049839463, 0.0488784, 0.055945672, 0.04754868, 0.05076421, 0.040862918,
0.04821904, 0.046317417, 0.07096228, 0.09209626, 0.043969385, 0.039916184, 0.05605434,
0.059638023, 0.05720965, 0.05720965, 0.04082893, 0.049482975, 0.05168328, 0.05524852,
0.055184577, 0.09896831, 0.05302522, 0.03992574, 0.041922778, 0.068036154, 0.051320277,
0.05522898, 0.04524827, 0.05409346, 0.04311645, 0.050301585, 0.06101686, 0.053809803,
0.046704587, 0.04560539, 0.19028267, 0.043645926, 0.040198423, 0.04243756, 0.045077316,
0.046318457, 0.052295826, 0.048982717, 0.05292175, 0.036370516, 0.038601317, 0.059130855,
0.06135552, 0.041308347, 0.027828427, 0.02962356, 0.034318484, 0.04003335, 0.048050765,
0.039106153, 0.026525637, 0.039613254, 0.03525258, 0.040058464, 0.052147187, 0.031403076,
0.031181462, 0.03287421, 0.044754148, 0.054753598, 0.044848394, 0.172531, 0.044354204,
0.0482181, 0.05497839, 0.046426654, 0.04391017, 0.06101173, 0.058957756, 0.06741712,
0.05078251, 0.048878733, 0.047463953, 0.049653865, 0.042803466, 0.06292755, 0.046806168,
0.051216934, 0.05082424, 0.066023685, 0.046300825, 0.14846155, 0.054435704, 0.064240545,
0.04866661, 0.04620197, 0.053811364, 0.05959389, 0.049330615, 0.04591599, 0.052417506,
0.060202472, 0.052265797, 0.05352918, 0.046076585, 0.046889275, 0.049384262, 0.118105166,
0.07753348, 0.051929966, 0.054749466, 0.07162599, 0.06518386, 0.047064107, 0.053311642,
0.051374264, 0.06713252, 0.044330895, 0.05269437, 0.04718326, 0.043783724, 0.04551522,
0.057295248, 0.062264733, 0.054526314, 0.047354594, 0.055165634, 0.04876459, 0.048464306,
0.03610305, 0.035415396, 0.04943923, 0.035368416, 0.040630363, 0.0500871, 0.033625375,
0.04409076, 0.038719993, 0.050540026, 0.08556274, 0.041106418, 0.037655048, 0.03482355,
0.040425982, 0.06929667, 0.049744237, 0.15082194, 0.042843528, 0.044872105, 0.058327783,
0.06178891, 0.047459632, 0.054766033, 0.059235085, 0.05294432, 0.05128409, 0.05311336,
0.052816138, 0.055663895, 0.059461057, 0.06991814, 0.059140712, 0.05182147, 0.04971589,
0.13569196, 0.06904527, 0.07599694, 0.05247147, 0.058096044, 0.055029295, 0.071185,
0.058499128, 0.058106545, 0.04991733, 0.038979743, 0.052520923, 0.04396624, 0.050806668,
0.047897935, 0.054154675, 0.04839364, 0.15072398, 0.052529667, 0.061399944, 0.047687568,
0.046524394, 0.061618768, 0.061375055, 0.042831846, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.037970137, 0.03254297,
0.048775874, 0.0387545, 0.04445877, 0.052072696, 0.033396546, 0.041575838, 0.041160498,
0.049057946, 0.1146965, 0.04423047, 0.034016628, 0.03639431, 0.050102174, 0.04116243,
0.049825154, 0.05706844, 0.056670308, 0.053087443, 0.04665027, 0.04699094, 0.05137399,
0.14630763, 0.07462648, 0.05267414, 0.052823264, 0.07771217, 0.07047342, 0.045393076,
0.050289307, 0.06129635, 0.052199733, 0.044939056, 0.043289874, 0.048433475, 0.06619436,
0.0498633, 0.06476372, 0.07853971, 0.045549642, 0.06323572, 0.09441734, 0.0772667,
0.045838572, 0.03917631, 0.035259277, 0.052899648, 0.044370294, 0.05179958, 0.057765506,
0.036985755, 0.04764609, 0.045528647, 0.05624314, 0.1669392, 0.050242763, 0.038768046,
0.040020466, 0.054921545, 0.059035562, 0.05180967, 0.13559026, 0.05436286, 0.064576045,
0.06724757, 0.05282155, 0.057401475, 0.06972913, 0.06749338, 0.08589246, 0.056532744,
0.058397543, 0.05730814, 0.061801627, 0.04413287, 0.05948159, 0.048192505, 0.059137486,
0.052612577, 0.059287153, 0.052803766, 0.17024131, 0.05829792, 0.070244454, 0.052278988,
0.04895077, 0.060062945, 0.063519105, 0.048514806, 0.039556358, 0.042111807, 0.046440694,
0.045088354, 0.05592632, 0.04648715, 0.038754206, 0.06047075, 0.054126497, 0.06995432,
0.054146435, 0.046436727, 0.050860617, 0.046051458, 0.046716087, 0.047837928, 0.052420042,
0.047847897, 0.046390124, 0.045640305, 0.114404485, 0.04785521, 0.06417155, 0.043098077,
0.052303933, 0.05275669, 0.05259281, 0.044948265, 0.0490577, 0.0471429, 0.037717115,
0.049766734, 0.05123355, 0.051199034, 0.0515455, 0.044895798, 0.039705317, 0.05074535,
0.123363554, 0.07671975, 0.050717793, 0.046418652, 0.060372002, 0.059665356, 0.04361679,
0.0409956, 0.035404455, 0.05816519, 0.04492284, 0.05017565, 0.053856794, 0.036382206,
0.04376884, 0.04697879, 0.05075358, 0.2287136, 0.04895531, 0.03861177, 0.037865177,
0.04802569, 0.045447465, 0.05398764, 0.044027243, 0.04453359, 0.04318389, 0.07569557,
0.04576486, 0.06183722, 0.04259885, 0.046538103, 0.050653618, 0.050771553, 0.040398236,
0.04597431, 0.04502892, 0.04893797, 0.24480392, 0.04692538, 0.042305566, 0.044869814,
0.049817327, 0.05030852, 0.057586297, 0.05035973, 0.05743355, 0.03761154, 0.040625107,
0.062404923, 0.06668761, 0.047029454, 0.057820525, 0.046503562, 0.052943073, 0.054991446,
0.059208624, 0.046179507, 0.049815014, 0.047834348, 0.054320093, 0.05902464, 0.057584796,
0.047655456, 0.048862364, 0.06315225, 0.10631875, 0.04168886, 0.047008906, 0.048456248,
0.050884757, 0.048795477, 0.040072035, 0.04301199, 0.047899272, 0.084060766, 0.07182483,
0.048002392, 0.050772514, 0.059776712, 0.058806207, 0.045408636, 0.04025407, 0.045239482,
0.047373787, 0.051588222, 0.048431613, 0.06477266, 0.048453894, 0.1247196, 0.053219035,
0.05988785, 0.06062051, 0.05401617, 0.047150046, 0.048969056, 0.04624529, 0.0486143,
0.06511642, 0.052413266, 0.050900694, 0.048738807, 0.04673772, 0.058524277, 0.05754077,
0.0750148, 0.08672711, 0.0471703, 0.055636022, 0.13947476, 0.07612843, 0.04591278,
0.048213024, 0.06595733, 0.052896596, 0.052976437, 0.055404045, 0.07485142, 0.04834089,
0.20443322, 0.054913547, 0.07168019, 0.051609468, 0.04746092, 0.055640437, 0.062296413,
0.05332605, 0.035514154, 0.027806165, 0.046616852, 0.029417716, 0.030551916, 0.04319117,
0.034493376, 0.030998344, 0.033563364, 0.03774937, 0.058109652, 0.04219431, 0.031010581,
0.02886964, 0.031515915, 0.05364418, 0.050477337, 0.13583885, 0.045226023, 0.055875044,
0.062363714, 0.04554032, 0.048448388, 0.063116856, 0.05766247, 0.06381651, 0.052362286,
0.052362286, 0.048627228, 0.054045554, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.057172816, 0.059358127, 0.060180906,
0.0500883, 0.048530504, 0.21368921, 0.056297652, 0.071174785, 0.047431733, 0.058422834,
0.059069626, 0.05573687, 0.049867295, 0.058047466, 0.05493188, 0.049585775, 0.04972951,
0.05505364, 0.048170686, 0.045606736, 0.12185599, 0.060616642, 0.07252654, 0.047381766,
0.054655436, 0.056356058, 0.06517478, 0.046541683, 0.051680837, 0.051315006, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.04243256, 0.05010313, 0.048715934, 0.06508523, 0.08430422, 0.04664443, 0.04214657,
0.056664307, 0.06588283, 0.061966922, 0.058689896, 0.043057594, 0.052462682, 0.054206103,
0.05548618, 0.049023315, 0.0450326, 0.054237504, 0.044344023, 0.044358138, 0.05356667,
0.056708634, 0.051438864, 0.058885157, 0.07634662, 0.05155769, 0.091139376, 0.06089981,
0.051947642, 0.049334485, 0.059735447, 0.045011677, 0.061383054, 0.036467984, 0.03854177,
0.040562645, 0.04508066, 0.037746146, 0.04181913, 0.04792103, 0.0400197, 0.039221883,
0.042192023, 0.041366626, 0.03978001, 0.043244366, 0.05327519, 0.05200452, 0.044919733,
0.076776795, 0.04081488, 0.037440438, 0.049316097, 0.0657125, 0.07399852, 0.052828595,
0.040069863, 0.05430998, 0.054154485, 0.0518957, 0.0599507, 0.0462912, 0.056455474,
0.053322256, 0.058627766, 0.04760646, 0.05412661, 0.047387835, 0.061536048, 0.05815886,
0.058222212, 0.05387604, 0.054781266, 0.068866424, 0.096172, 0.061694503, 0.046500105,
0.11122155, 0.044891212, 0.052149218, 0.05121043, 0.050928816, 0.045304343, 0.059487518,
0.060890246, 0.07129694, 0.053631138, 0.056967318, 0.05303158, 0.06271215, 0.050699793,
0.06965507, 0.052056126, 0.048378095, 0.045110393, 0.04597191, 0.0645273, 0.05258426,
0.06496884, 0.07747154, 0.04400503, 0.057019826, 0.08962305, 0.07393342, 0.044871856,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.052710116, 0.06017174, 0.0538462, 0.051280756, 0.049416196, 0.12551968,
0.050956335, 0.07951661, 0.04765906, 0.055296537, 0.059924044, 0.05531923, 0.047179922,
0.054421835, 0.054981712, 0.044784743, 0.040465415, 0.05249574, 0.036398448, 0.037897818,
0.045776, 0.04865077, 0.044135034, 0.049711533, 0.071008615, 0.040227246, 0.061345678,
0.05153626, 0.045763917, 0.041793507, 0.036786158, 0.050010428, 0.043397572, 0.043781932,
0.047670178, 0.048986018, 0.041244566, 0.07545483, 0.06160861, 0.062345106, 0.045709785,
0.044677533, 0.059875812, 0.057316538, 0.042676758, 0.05543139, 0.06542006, 0.055220928,
0.05150024, 0.05003627, 0.114467286, 0.059084807, 0.07638279, 0.05229797, 0.056048166,
0.057964895, 0.06150689, 0.05245856, 0.059663422, 0.05323101, 0.043332573, 0.062481772,
0.04631439, 0.041180607, 0.038714733, 0.042132027, 0.055649865, 0.047115665, 0.054845795,
0.068198025, 0.03733237, 0.04438458, 0.07586753, 0.06633927, 0.038325857, 0.039632514,
0.08711267, 0.037212096, 0.035041705, 0.032632127, 0.043754924, 0.044617973, 0.048680194,
0.040009297, 0.04534373, 0.0339926, 0.038022406, 0.04886526, 0.04567172, 0.034546975,
0.046232477, 0.047823902, 0.05496271, 0.044770807, 0.048396092, 0.054980494, 0.04679263,
0.052199636, 0.055051804, 0.067435354, 0.051556922, 0.0660356, 0.056989312, 0.046880625,
0.048047956, 0.048861455, 0.06715793, 0.050055243, 0.04844643, 0.04898355, 0.04358637,
0.057194386, 0.05089256, 0.07119792, 0.0745145, 0.045369707, 0.05431658, 0.1042498,
0.08040947, 0.04729432, 0.04105199, 0.039601043, 0.045747716, 0.044854492, 0.051396713,
0.052364785, 0.040180095, 0.056100734, 0.04607735, 0.058115885, 0.05762087, 0.047406,
0.044220515, 0.04305834, 0.043897998, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.045167208, 0.046683248, 0.054264195,
0.062878735, 0.095006354, 0.044374887, 0.0382939, 0.052912895, 0.062904805, 0.07255181,
0.06162683, 0.04330057, 0.05535395, 0.051204663, 0.053740088, 0.04117088, 0.054860827,
0.049582615, 0.04780384, 0.049401652, 0.042959355, 0.04462148, 0.053235672, 0.10193719,
0.07121298, 0.04576651, 0.046829984, 0.08849732, 0.067965135, 0.040151544, 0.05630862,
0.061293174, 0.06386828, 0.05089516, 0.04749921, 0.153055, 0.06702474, 0.07239499,
0.050573207, 0.058323015, 0.054446395, 0.06793859, 0.05421707, 0.05497083, 0.050403904,
0.0473096, 0.054443076, 0.049616106, 0.05181626, 0.05253237, 0.046745326, 0.053487904,
0.054352313, 0.06964398, 0.06417172, 0.04580367, 0.049154833, 0.06612444, 0.13315846,
0.065932855, 0.046463497, 0.066542424, 0.049996085, 0.05095922, 0.0553627, 0.06659459,
0.047399558, 0.22747938, 0.055867404, 0.070093125, 0.04859371, 0.045305274, 0.05603763,
0.061559804, 0.051745594, 0.08809701, 0.060028315, 0.108022176, 0.04357136, 0.048794698,
0.070295826, 0.059717212, 0.051920507, 0.05217057, 0.06279796, 0.05221443, 0.051119085,
0.054298002, 0.054040577, 0.053943466, 0.03398755, 0.047532827, 0.039562155, 0.035029374,
0.03999321, 0.036770616, 0.036760118, 0.048696008, 0.055142928, 0.09095572, 0.035720337,
0.039363515, 0.058754414, 0.050496764, 0.03976486, 0.047562405, 0.054706465, 0.04879137,
0.052061144, 0.050891526, 0.044118326, 0.050185613, 0.053857993, 0.0706336, 0.069966495,
0.046063337, 0.04646485, 0.13032341, 0.08160825, 0.045295447, 0.0461524, 0.12547867,
0.044967707, 0.040733557, 0.04349999, 0.04557528, 0.046345837, 0.062259134, 0.049566917,
0.061186023, 0.03703943, 0.039339837, 0.062670246, 0.060144953, 0.044611096, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.037231416, 0.042424057, 0.039223652, 0.06839337, 0.1074303, 0.03888534, 0.034572855,
0.051784337, 0.05799287, 0.057820015, 0.047955703, 0.03559325, 0.046829406, 0.050110903,
0.049753796, 0.035937108, 0.049507145, 0.044471458, 0.04825035, 0.04737797, 0.042877,
0.042212665, 0.047323886, 0.112235785, 0.060702726, 0.04554391, 0.04461144, 0.07045046,
0.06778065, 0.039327357, 0.062023096, 0.064683475, 0.069440015, 0.05336575, 0.05111498,
0.13997613, 0.071176216, 0.07137338, 0.054198965, 0.058966238, 0.059987664, 0.07507428,
0.057223182, 0.057801515, 0.05359512, 0.043304563, 0.056363467, 0.043244448, 0.051668208,
0.054588705, 0.04463923, 0.04750101, 0.054919202, 0.07527146, 0.062947944, 0.04352157,
0.04567281, 0.08247843, 0.12629217, 0.049935218, 0.049407504, 0.07348157, 0.052213594,
0.056585822, 0.059618212, 0.07412363, 0.049955744, 0.18199264, 0.05683437, 0.07044078,
0.05312549, 0.04864922, 0.05520808, 0.0638707, 0.054492656, 0.061507314, 0.049047984,
0.10077215, 0.04225896, 0.052145615, 0.056444116, 0.046822913, 0.047701858, 0.054760847,
0.06364505, 0.053820405, 0.048859168, 0.04959534, 0.04940523, 0.059084862, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.04492919, 0.057358846, 0.048266407, 0.047589593, 0.050478015, 0.044098154, 0.05062744,
0.055352494, 0.07150034, 0.08580693, 0.043582443, 0.047845017, 0.20251116, 0.09402819,
0.047030248, 0.051832702, 0.19580775, 0.051358063, 0.04565161, 0.049077664, 0.051893663,
0.051988535, 0.065960325, 0.05649753, 0.069533974, 0.041655667, 0.043998756, 0.07420494,
0.067966886, 0.050661728, 0.043747783, 0.043061584, 0.043633994, 0.15268418, 0.06652005,
0.054583207, 0.043469578, 0.05586372, 0.047968287, 0.050704055, 0.056455825, 0.04835103,
0.04648982, 0.050466057, 0.062026273, 0.04724515, 0.050578933, 0.05731845, 0.06373736,
0.1151303, 0.04661386, 0.04073749, 0.053198017, 0.06839973, 0.06972985, 0.06163779,
0.04684195, 0.059446868, 0.0543528, 0.05428346, 0.037236206, 0.046180535, 0.04661836,
0.050935026, 0.0536866, 0.043000393, 0.0415579, 0.051149283, 0.11315273, 0.06739197,
0.04727528, 0.04559493, 0.08227519, 0.070341684, 0.04437542, 0.054995336, 0.052702088,
0.06212493, 0.04786371, 0.042373005, 0.08665003, 0.0737411, 0.059783425, 0.050237462,
0.052249216, 0.05089695, 0.0700321, 0.051620487, 0.053547468, 0.04608233, 0.03552618,
0.04553831, 0.03836333, 0.04613445, 0.04640928, 0.037132807, 0.041166883, 0.052590605,
0.06695093, 0.059236504, 0.03769975, 0.037088454, 0.0884158, 0.0887716, 0.03965193,
0.04565643, 0.06589347, 0.049175613, 0.052198336, 0.057338826, 0.0661839, 0.04619978,
0.2239404, 0.058228154, 0.07095925, 0.048924915, 0.04444308, 0.0550884, 0.06427615,
0.051493287, 0.09888187, 0.053697854, 0.18963023, 0.044066034, 0.047566995, 0.06408531,
0.061846837, 0.048527695, 0.052945763, 0.06519229, 0.05264978, 0.052980233, 0.053281855,
0.05569446, 0.058952793, 0.04213398, 0.056213625, 0.049183745, 0.04950357, 0.059392717,
0.04867189, 0.043526664, 0.06561279, 0.07709028, 0.13142636, 0.05030331, 0.047529843,
0.06938953, 0.06893668, 0.05433455, 0.047716126, 0.055654854, 0.052773945, 0.049804445,
0.051996227, 0.044861995, 0.052562248, 0.052781824, 0.07821653, 0.07911643, 0.047037583,
0.05043567, 0.14652935, 0.09646017, 0.050028093, 0.04931157, 0.16991244, 0.048551664,
0.04391579, 0.04563296, 0.05502424, 0.052355375, 0.06436709, 0.05189922, 0.06269315,
0.04107946, 0.04346077, 0.07207153, 0.06267112, 0.045641463, 0.047469128, 0.063181564,
0.051803485, 0.047221754, 0.048629258, 0.043470427, 0.055066466, 0.05013869, 0.07761618,
0.08491563, 0.042349797, 0.048045352, 0.12708475, 0.09017808, 0.049656227, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.04423494, 0.06337962, 0.050322708, 0.050553422, 0.054992136, 0.06298867,
0.04548772, 0.24711758, 0.053699665, 0.06802586, 0.048690088, 0.04486584, 0.0548141,
0.059713725, 0.05111393, 0.047107387, 0.05954115, 0.047012042, 0.05648659, 0.05768657,
0.046496376, 0.052489147, 0.06191973, 0.08071283, 0.07281231, 0.046183325, 0.048502877,
0.104991026, 0.1538755, 0.05407456, 0.04992434, 0.2075294, 0.051001623, 0.045104828,
0.04798565, 0.052082572, 0.052482218, 0.061851665, 0.05788882, 0.066298276, 0.04081003,
0.044433292, 0.072484754, 0.06832944, 0.04988289, 0.03774831, 0.049245004, 0.043143462,
0.037501518, 0.0377694, 0.036441606, 0.042818684, 0.04654942, 0.055363163, 0.07106394,
0.032953907, 0.037762076, 0.08514477, 0.06351832, 0.037091292, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.049361356, 0.056101147, 0.059005097, 0.04890757, 0.054585643, 0.049443223, 0.047634963,
0.067361124, 0.08189457, 0.12561694, 0.057892658, 0.05548358, 0.07268466, 0.06083378,
0.053149145, 0.06135345, 0.055191223, 0.06121595, 0.048119783, 0.041990973, 0.11963522,
0.06798414, 0.059459094, 0.046490427, 0.050367475, 0.050936166, 0.052253924, 0.04902058,
0.053338427, 0.04802095, 0.037766013, 0.040549815, 0.044751503, 0.15791789, 0.0522895,
0.0480211, 0.042638823, 0.052819848, 0.062225584, 0.051725414, 0.053680953, 0.050080676,
0.05741474, 0.047727752, 0.03856202, 0.074715175, 0.05799982, 0.13040707, 0.048342276,
0.0563515, 0.07666464, 0.057030533, 0.059035797, 0.061449908, 0.07540022, 0.062034715,
0.054943476, 0.059116688, 0.05886167, 0.067646526, 0.07997638, 0.05405633, 0.15115376,
0.046668176, 0.046393093, 0.063862875, 0.06606933, 0.049369384, 0.05450349, 0.06238404,
0.0561386, 0.052662674, 0.051961143, 0.05448083, 0.054685775, 0.036610316, 0.04590614,
0.04231149, 0.04920062, 0.05090987, 0.043186434, 0.03869845, 0.04917028, 0.08558842,
0.061898023, 0.044890005, 0.044544544, 0.060663395, 0.064786136, 0.042461693, 0.040832583,
0.05193809, 0.040868342, 0.053648178, 0.055494245, 0.041999817, 0.043237858, 0.053460173,
0.06288829, 0.057966106, 0.040892236, 0.041909426, 0.07884243, 0.09475108, 0.045339357,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.043138836, 0.044449262, 0.052382324, 0.06456838, 0.112772554, 0.044458948,
0.03814963, 0.04683327, 0.06500603, 0.05749072, 0.059704885, 0.04558077, 0.050061826,
0.04966324, 0.052486256, 0.07291743, 0.057307802, 0.12501568, 0.047166098, 0.054459978,
0.067639485, 0.055579714, 0.059319504, 0.06092801, 0.07487658, 0.058774184, 0.055073597,
0.0583783, 0.057937402, 0.06747196, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.04916069, 0.05007198, 0.0554994,
0.07279857, 0.08975804, 0.049906526, 0.04378683, 0.050752696, 0.06478709, 0.057979297,
0.056713942, 0.050431535, 0.059181523, 0.053903576, 0.049735133, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.047720168, 0.046196986, 0.053248893, 0.07476578, 0.10878198, 0.046276808, 0.040501457,
0.04915631, 0.061737884, 0.059867736, 0.0595664, 0.047682986, 0.053306155, 0.05101665,
0.054640647, 0.04481676, 0.064891696, 0.04861373, 0.052616905, 0.057441086, 0.06134948,
0.045530573, 0.18639784, 0.055737257, 0.073089905, 0.047485955, 0.044149987, 0.054738045,
0.060498126, 0.0504009, 0.040823963, 0.05195761, 0.05307348, 0.057239007, 0.05645843,
0.045808993, 0.045154646, 0.054642994, 0.20567526, 0.08438431, 0.052702054, 0.050436225,
0.084579915, 0.071374685, 0.04395094, 0.03951234, 0.055908293, 0.048019722, 0.047802117,
0.048939537, 0.044649176, 0.04523358, 0.048603844, 0.11970213, 0.06711362, 0.045542333,
0.047207814, 0.07223764, 0.0736319, 0.041892935, 0.081810266, 0.055369228, 0.13068819,
0.045485698, 0.050640605, 0.06611264, 0.05981389, 0.051756885, 0.05601636, 0.06323008,
0.055922255, 0.05191575, 0.052228354, 0.054434933, 0.058888227, 0.048556816, 0.06599301,
0.056975916, 0.053156696, 0.05137817, 0.050551552, 0.054352224, 0.060594633, 0.077936344,
0.07362774, 0.051093634, 0.05427605, 0.08882813, 0.0691852, 0.044437885, 0.0366356,
0.05158097, 0.0426196, 0.04607422, 0.0487848, 0.04052011, 0.03863083, 0.050612006,
0.07268235, 0.059312485, 0.042334948, 0.0405891, 0.074538365, 0.0595077, 0.036252137,
0.04701009, 0.046148196, 0.05152148, 0.046106048, 0.038316265, 0.06280005, 0.074062206,
0.05605367, 0.046485543, 0.048405875, 0.045452893, 0.05705632, 0.046476416, 0.0497482,
0.039962485, 0.047420796, 0.06523609, 0.05332625, 0.053800315, 0.05777691, 0.06986235,
0.051885772, 0.19003484, 0.057710998, 0.07138099, 0.053134147, 0.05077654, 0.059982672,
0.06368334, 0.053987995, 0.044024084, 0.059783258, 0.05188602, 0.051060505, 0.05575217,
0.06590847, 0.046249103, 0.18060106, 0.05296054, 0.068448484, 0.048320662, 0.044321354,
0.055789355, 0.06109248, 0.050131567, 0.045732114, 0.06542391, 0.05063472, 0.053159248,
0.05870972, 0.066699244, 0.046912804, 0.21943219, 0.0553456, 0.07402651, 0.050515488,
0.045252096, 0.05431605, 0.0602369, 0.05360341, 0.05168506, 0.047966484, 0.06269268,
0.04584227, 0.04026256, 0.08409647, 0.068591565, 0.054223906, 0.047805164, 0.05010813,
0.050922133, 0.06938515, 0.04776263, 0.048140883, 0.043771435, 0.04977682, 0.05789915,
0.052437402, 0.051621716, 0.05270159, 0.04661705, 0.05327046, 0.05379465, 0.06886246,
0.07392501, 0.04695299, 0.049741242, 0.12427866, 0.08076191, 0.0488457, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.045558468, 0.15393119, 0.046555944, 0.04278021, 0.04752729, 0.046161473,
0.046902232, 0.060005456, 0.058011487, 0.059985097, 0.037678074, 0.04040334, 0.07460117,
0.069980636, 0.04558813, 0.047211792, 0.0562689, 0.050771307, 0.054618023, 0.053872608,
0.04538026, 0.05101076, 0.057131656, 0.07988794, 0.079774305, 0.04736188, 0.048364315,
0.14583753, 0.08754446, 0.048295163, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.048645258, 0.0542507, 0.055840027,
0.061767656, 0.08953098, 0.048225503, 0.042761713, 0.0527384, 0.06739554, 0.062704846,
0.053978935, 0.048158407, 0.061978653, 0.055084504, 0.05140571, 0.047671717, 0.05780174,
0.056099642, 0.046840712, 0.047937047, 0.048426256, 0.054354623, 0.06873136, 0.07086112,
0.1253447, 0.047066655, 0.05451427, 0.08797035, 0.06903992, 0.05115846, 0.049612515,
0.04774689, 0.055946615, 0.07500884, 0.09196459, 0.048383836, 0.042499304, 0.049923707,
0.06148196, 0.05972939, 0.059451617, 0.05092337, 0.056899887, 0.0515676, 0.053326722,
0.04120902, 0.09587203, 0.0393877, 0.03803954, 0.039294455, 0.040248834, 0.043283768,
0.046640433, 0.046485834, 0.047070045, 0.03243429, 0.034231104, 0.049145147, 0.06007926,
0.039541725, 0.038093705, 0.11160825, 0.036162816, 0.03801607, 0.04194766, 0.037690923,
0.03870015, 0.051231273, 0.046022616, 0.048014894, 0.032921296, 0.034672625, 0.056136753,
0.056980237, 0.037092064, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.038963117, 0.05093302,
0.044083256, 0.038956385, 0.03789738, 0.040733423, 0.03991255, 0.047815006, 0.048243847,
0.051190678, 0.035581622, 0.038506616, 0.050414566, 0.045753587, 0.032527562, 0.042605113,
0.043254115, 0.048498, 0.080162175, 0.15368657, 0.042360604, 0.03697147, 0.048113994,
0.061319478, 0.059643447, 0.05892737, 0.04203615, 0.049273536, 0.049765747, 0.05536558,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.05193639, 0.06286127, 0.04998908, 0.04881099, 0.047837462, 0.06987816,
0.053216368, 0.0651042, 0.05046306, 0.049697682, 0.05220044, 0.052981496, 0.051452942,
0.053389993, 0.046076886, 0.0511041, 0.05849232, 0.056555904, 0.06127127, 0.060085088,
0.071709014, 0.055152323, 0.16453475, 0.058344502, 0.07128623, 0.062424272, 0.055224255,
0.057374362, 0.059980538, 0.056461073, 0.03878926, 0.049824554, 0.046513304, 0.04593819,
0.045097835, 0.039326895, 0.041456282, 0.05123383, 0.091301136, 0.07390226, 0.041969925,
0.043948784, 0.081486806, 0.06443145, 0.0389893, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.043389827, 0.06511581,
0.053839535, 0.04820751, 0.046538174, 0.04806618, 0.047722813, 0.054252923, 0.07215209,
0.06362905, 0.04729707, 0.04988063, 0.07379287, 0.057730816, 0.039841253, 0.04460889,
0.060149584, 0.04948527, 0.048258983, 0.048279345, 0.044949356, 0.05161953, 0.06207078,
0.068453334, 0.08431701, 0.04277481, 0.047837704, 0.12604755, 0.0799973, 0.0458097,
0.044970077, 0.045433912, 0.051395707, 0.16520521, 0.06968428, 0.055706818, 0.047743145,
0.05744175, 0.06558882, 0.05646784, 0.073743485, 0.058692668, 0.055817623, 0.05215585,
0.047356695, 0.08371407, 0.057879478, 0.13085395, 0.04667274, 0.051826082, 0.076005,
0.061292667, 0.05619895, 0.05767567, 0.06613129, 0.061730925, 0.057637002, 0.055473167,
0.055484653, 0.058014102, 0.042549048, 0.051286165, 0.04882987, 0.044681363, 0.046377752,
0.057721313, 0.049020134, 0.11382285, 0.047380555, 0.05880437, 0.046450842, 0.045948934,
0.05013933, 0.05554774, 0.0464871, 0.050762795, 0.052002426, 0.06146155, 0.048779845,
0.047587454, 0.05058748, 0.051963836, 0.062207796, 0.06898791, 0.12809621, 0.047283124,
0.05448548, 0.069266774, 0.06868198, 0.052043855, 0.049481332, 0.04785314, 0.054084476,
0.047276285, 0.039465256, 0.06173454, 0.07116674, 0.05555924, 0.049641963, 0.04947378,
0.04770487, 0.05903273, 0.049688987, 0.052378997, 0.041216534, 0.03837952, 0.05057397,
0.053962145, 0.0476257, 0.04721737, 0.04166092, 0.04189988, 0.049638227, 0.10572438,
0.077688836, 0.04654138, 0.04442869, 0.066340156, 0.06275609, 0.041585702, 0.04299771,
0.04544753, 0.047641836, 0.17371023, 0.06602411, 0.050942, 0.0452723, 0.05593081,
0.06232805, 0.05416631, 0.061880715, 0.05327373, 0.05470604, 0.050737496, 0.04395317,
0.046513986, 0.046930388, 0.05577586, 0.04600903, 0.04249913, 0.074056804, 0.05569077,
0.054796953, 0.05045683, 0.048529815, 0.05247726, 0.06501311, 0.044361386, 0.045853324,
0.043323126, 0.04865239, 0.22242841, 0.047820147, 0.04219652, 0.046405274, 0.051121887,
0.04932106, 0.061127927, 0.053594146, 0.06588435, 0.039488662, 0.04221597, 0.073008105,
0.0676279, 0.05025735, 0.053225648, 0.05832412, 0.04868722, 0.052898187, 0.056220107,
0.04831361, 0.060336035, 0.05231812, 0.07407563, 0.06825992, 0.04792894, 0.05499732,
0.07364153, 0.13619307, 0.06690124, 0.03599488, 0.049968876, 0.043995082, 0.046202898,
0.04757645, 0.0425295, 0.039757535, 0.04929868, 0.08049181, 0.05779087, 0.04267138,
0.042507228, 0.06798547, 0.06104836, 0.037164245, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.039926972, 0.04998325,
0.04250076, 0.046679907, 0.04955106, 0.03959935, 0.0447335, 0.04881165, 0.06891064,
0.069785185, 0.041007992, 0.04364402, 0.14317717, 0.10615198, 0.04434293, 0.04618029,
0.066677056, 0.051378753, 0.051651254, 0.053134676, 0.07144511, 0.051041417, 0.21858022,
0.055093236, 0.06479037, 0.051432442, 0.047237385, 0.056530524, 0.064303316, 0.050523955,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.08583041, 0.05608398, 0.1586725, 0.04523363, 0.048522696, 0.06586233,
0.06118199, 0.052236434, 0.054596286, 0.065596215, 0.054178808, 0.050647493, 0.053939655,
0.055833682, 0.059503715, 0.047312688, 0.056079116, 0.04546817, 0.052683413, 0.050577648,
0.04468484, 0.055196144, 0.05703862, 0.07254312, 0.06550252, 0.04196085, 0.04771921,
0.0971933, 0.13671938, 0.050330605, 0.06230643, 0.061263867, 0.06815462, 0.052350506,
0.049390122, 0.13734333, 0.06834516, 0.069417804, 0.053912632, 0.057375826, 0.061003074,
0.06925091, 0.05447041, 0.056553192, 0.054344963, 0.06494654, 0.059733905, 0.07119821,
0.05211413, 0.04637464, 0.10773045, 0.07917121, 0.06189746, 0.053045742, 0.056923267,
0.055668753, 0.061321136, 0.057067953, 0.05990265, 0.05290276, 0.07321182, 0.056116425,
0.15883388, 0.046631526, 0.053722654, 0.06765198, 0.05839966, 0.054429833, 0.06454074,
0.07025949, 0.05819118, 0.05239669, 0.059995458, 0.059952892, 0.06566579, 0.04452753,
0.04348164, 0.04716535, 0.05763119, 0.077529185, 0.040674943, 0.036373977, 0.04672636,
0.05503814, 0.05304256, 0.048778944, 0.04109182, 0.050814006, 0.047123246, 0.04633934,
0.032081127, 0.043040503, 0.03820839, 0.040564902, 0.045094732, 0.037550762, 0.03460261,
0.054874554, 0.05949936, 0.09245772, 0.038270768, 0.038309675, 0.060579076, 0.050581284,
0.03796374, 0.049811188, 0.048949823, 0.0557882, 0.07808169, 0.08778084, 0.049716502,
0.042706158, 0.048261452, 0.064944714, 0.055807326, 0.061601095, 0.05232411, 0.056992773,
0.05201527, 0.04968569, 0.04477396, 0.048698485, 0.05339714, 0.06849576, 0.17197491,
0.04465354, 0.039476912, 0.04971807, 0.066046596, 0.064651534, 0.05819209, 0.046479803,
0.053814303, 0.054635573, 0.05901539, 0.04487301, 0.062291887, 0.04376586, 0.051148366,
0.0512658, 0.04580837, 0.051215168, 0.0600757, 0.07305844, 0.06849908, 0.042217754,
0.047406662, 0.112636164, 0.13807754, 0.04878408, 0.08549362, 0.056154255, 0.14844015,
0.04743172, 0.05370444, 0.06932711, 0.059296567, 0.052949455, 0.059549835, 0.071846016,
0.06103757, 0.054975085, 0.058753617, 0.05843492, 0.062605634, 0.036356486, 0.05081328,
0.038192555, 0.039410193, 0.038782112, 0.035297368, 0.04355423, 0.042083237, 0.053713083,
0.06007969, 0.034795254, 0.04098289, 0.096704125, 0.06416692, 0.037059292, 0.034334112,
0.04026688, 0.035765644, 0.04181825, 0.040751975, 0.03427853, 0.039043203, 0.046570785,
0.058204133, 0.060603313, 0.033381578, 0.038247608, 0.060082607, 0.10063314, 0.043124966,
0.06337284, 0.04491088, 0.057834234, 0.043000713, 0.038008664, 0.06960175, 0.079066984,
0.045740187, 0.04214142, 0.04786604, 0.04696292, 0.05012584, 0.045559037, 0.049958795,
0.04385044, 0.069342285, 0.053154785, 0.06314596, 0.05012553, 0.045021083, 0.13513464,
0.06935573, 0.057550896, 0.045243368, 0.05461076, 0.05376421, 0.053162728, 0.048760623,
0.052891016, 0.053516705, 0.038551275, 0.042031415, 0.043838605, 0.037199322, 0.038858127,
0.041436933, 0.040502485, 0.04326241, 0.045681495, 0.05026921, 0.034423515, 0.03628672,
0.06094947, 0.051131252, 0.03739905, 0.03599334, 0.07704615, 0.037565645, 0.034788337,
0.038215242, 0.035007663, 0.036010724, 0.048045952, 0.046065874, 0.046534155, 0.030314788,
0.031548712, 0.056775354, 0.054349653, 0.03765192, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.034254767, 0.030692836, 0.04518432, 0.032235295, 0.035213556, 0.04656089,
0.035372276, 0.039841637, 0.03912651, 0.044264495, 0.05037064, 0.04986819, 0.038106933,
0.033691116, 0.037722893, 0.043174952, 0.051150315, 0.049182504, 0.069736585, 0.1689203,
0.04402277, 0.039227065, 0.054610975, 0.06727572, 0.07110867, 0.057595585, 0.04354289,
0.05914144, 0.0582173, 0.058052525, 0.045788553, 0.053509608, 0.051925972, 0.049520876,
0.05048172, 0.043502633, 0.051398143, 0.048655987, 0.13891114, 0.07590028, 0.046596367,
0.05070152, 0.1010838, 0.080817185, 0.04519785, 0.07511395, 0.058792803, 0.077809565,
0.052488573, 0.04741375, 0.11871236, 0.089151666, 0.05853487, 0.05278272, 0.060772013,
0.05815734, 0.0641919, 0.058645863, 0.059621986, 0.057493478, 0.06266069, 0.050326392,
0.060305502, 0.052038424, 0.05246468, 0.05739526, 0.06050991, 0.052752975, 0.05528591,
0.0654773, 0.054459315, 0.05523423, 0.052483167, 0.07640784, 0.10506934, 0.04364419,
0.057327442, 0.046401158, 0.054260105, 0.054546557, 0.062089704, 0.045224577, 0.2525023,
0.05291683, 0.06995361, 0.046761673, 0.04234633, 0.053216036, 0.05903881, 0.0520486,
0.07478729, 0.05322626, 0.15252186, 0.046288285, 0.055421602, 0.060095105, 0.056303766,
0.051473543, 0.06363502, 0.06971259, 0.060783852, 0.05118292, 0.059371132, 0.05878505,
0.06300147, 0.049324248, 0.051888235, 0.05785233, 0.039777894, 0.042362083, 0.055035785,
0.0468617, 0.047696467, 0.043616835, 0.06752548, 0.042344168, 0.04355825, 0.052305017,
0.04709396, 0.05067989, 0.03907981, 0.04460778, 0.048188414, 0.041247904, 0.048760176,
0.042275306, 0.04254438, 0.04142037, 0.061042592, 0.04874445, 0.04329516, 0.044452038,
0.06264672, 0.054803565, 0.041124675, 0.050567534, 0.20679693, 0.04873543, 0.044446025,
0.047383796, 0.05835529, 0.052833844, 0.07130182, 0.05205562, 0.061235946, 0.041790284,
0.0440006, 0.07187164, 0.065057814, 0.047014177, 0.032089356, 0.029530235, 0.041861698,
0.03797088, 0.042247932, 0.047783885, 0.032742288, 0.041814886, 0.038318474, 0.043526925,
0.07569247, 0.045655392, 0.035328906, 0.03301573, 0.039189458, 0.040707152, 0.042568035,
0.04497366, 0.045564134, 0.061199665, 0.039880574, 0.03480498, 0.04395693, 0.046164177,
0.05788982, 0.04495131, 0.038618546, 0.045450848, 0.040097777, 0.04288616, 0.0428205,
0.04971634, 0.049531803, 0.045749933, 0.051296692, 0.040356103, 0.044390887, 0.050586592,
0.106658205, 0.07338722, 0.044646475, 0.04520111, 0.09116494, 0.07770158, 0.043538556,
0.07286154, 0.05979417, 0.07453732, 0.05243333, 0.04782915, 0.105216295, 0.09609917,
0.060628094, 0.054836813, 0.06180545, 0.057684246, 0.06533192, 0.0606963, 0.06375505,
0.056173984, 0.051984522, 0.04201737, 0.047497153, 0.04589059, 0.046778224, 0.04601854,
0.051960576, 0.0403636, 0.048486978, 0.047952577, 0.046398785, 0.049949467, 0.046469864,
0.06816582, 0.06196434, 0.041106265, 0.058552194, 0.04580441, 0.052971985, 0.05255309,
0.059109773, 0.04785092, 0.26115832, 0.05378078, 0.068081714, 0.04569269, 0.043905772,
0.05772088, 0.06510834, 0.046602868, 0.07527906, 0.054893587, 0.16828725, 0.045805965,
0.052143738, 0.070261985, 0.05937663, 0.053922947, 0.0612788, 0.0716676, 0.060174048,
0.052593797, 0.05692691, 0.056743294, 0.060644396, 0.042410832, 0.04057579, 0.048755832,
0.04379257, 0.053267367, 0.04022586, 0.03926535, 0.03825377, 0.0539223, 0.061409723,
0.053554885, 0.04167753, 0.048104245, 0.046359185, 0.052103963, 0.044092935, 0.048329927,
0.048758302, 0.048664715, 0.044959426, 0.047151912, 0.04885243, 0.052461307, 0.06394961,
0.06716829, 0.04106176, 0.044913504, 0.07092766, 0.0680758, 0.045379136, 0.039517142,
0.09287817, 0.038489085, 0.03799965, 0.042484898, 0.043112017, 0.039351135, 0.060941286,
0.042379845, 0.048028044, 0.0357705, 0.034990076, 0.05348917, 0.04857744, 0.03841092,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.049415924, 0.05331102, 0.054612566, 0.066842705, 0.08710561, 0.048695866,
0.044242747, 0.051328924, 0.06563039, 0.06129909, 0.05262752, 0.049632758, 0.059447933,
0.056171943, 0.054101836, 0.042920526, 0.04770577, 0.05062401, 0.044516914, 0.045786653,
0.03839884, 0.04703103, 0.044985425, 0.10072997, 0.08466764, 0.042138606, 0.045406397,
0.07927875, 0.06490842, 0.043632347, 0.06664573, 0.058398932, 0.070282035, 0.052257717,
0.04713378, 0.14864267, 0.076561265, 0.06377905, 0.052159753, 0.05842174, 0.05854095,
0.06265812, 0.05593561, 0.06083478, 0.057430703, 0.055176627, 0.054017495, 0.051410053,
0.045810323, 0.04378843, 0.052301504, 0.07559602, 0.053254582, 0.05848474, 0.059368186,
0.04453726, 0.051406458, 0.07062064, 0.108852364, 0.055716112, 0.04716602, 0.05742509,
0.051861797, 0.051565357, 0.05266117, 0.06770807, 0.047699805, 0.17280379, 0.051951177,
0.06616064, 0.05088319, 0.046913702, 0.052837536, 0.058097232, 0.05267967, 0.06448466,
0.050912473, 0.10518764, 0.04236143, 0.04819906, 0.057855174, 0.049844824, 0.05183497,
0.057041056, 0.064871736, 0.05404435, 0.047132395, 0.050799645, 0.051893707, 0.059808783,
0.03869091, 0.047679838, 0.04580668, 0.04672626, 0.05125457, 0.043245375, 0.04123821,
0.050352152, 0.059514858, 0.06900729, 0.044336885, 0.042362988, 0.055179425, 0.049324453,
0.043087192, 0.03734566, 0.045425713, 0.04116443, 0.04406568, 0.04425033, 0.038793106,
0.042199094, 0.04849418, 0.0709189, 0.063092455, 0.039322417, 0.041103687, 0.07261712,
0.078775555, 0.03865966, 0.045911945, 0.14166507, 0.046718888, 0.043207873, 0.047833357,
0.04976111, 0.04615604, 0.065106794, 0.053660117, 0.059412323, 0.040708303, 0.041249793,
0.067633204, 0.061115026, 0.045474183, 0.04935387, 0.054209426, 0.05651577, 0.045085315,
0.04917182, 0.056570824, 0.051737197, 0.049015664, 0.065104164, 0.068229, 0.044394452,
0.048052546, 0.06264551, 0.059958607, 0.05392065, 0.047540512, 0.044844683, 0.04738686,
0.04591985, 0.04382224, 0.051367916, 0.05197858, 0.047922324, 0.047709297, 0.052449327,
0.044490945, 0.045015186, 0.046099093, 0.07493166, 0.07073659, 0.051035754, 0.043788522,
0.046929985, 0.05145158, 0.04789383, 0.050224677, 0.0519268, 0.04455258, 0.046648003,
0.053420573, 0.049351137, 0.052204337, 0.046176203, 0.06380889, 0.06919905, 0.047669675,
0.053268805, 0.05446897, 0.062771484, 0.057616733, 0.06827574, 0.05472602, 0.15696631,
0.060631834, 0.06944083, 0.0646763, 0.057410985, 0.05648273, 0.05762761, 0.05350325,
0.05404261, 0.04442336, 0.054475725, 0.046380356, 0.0436177, 0.054014515, 0.051074404,
0.047257263, 0.044422306, 0.051816843, 0.04792113, 0.046037845, 0.04346282, 0.059069082,
0.069534674, 0.04726235, 0.13119163, 0.045398314, 0.0449699, 0.046686657, 0.056747228,
0.052006643, 0.07238127, 0.049686622, 0.055461004, 0.04270232, 0.04487908, 0.0657773,
0.059287935, 0.0427948, 0.039615225, 0.043109946, 0.041541837, 0.04988753, 0.045415197,
0.04027326, 0.04274313, 0.04886799, 0.06100899, 0.067531325, 0.040149152, 0.04294013,
0.074309126, 0.067864455, 0.0422218, 0.039891407, 0.045857392, 0.04560531, 0.04724145,
0.058617912, 0.04841876, 0.040607393, 0.05222881, 0.055842116, 0.075962596, 0.044972863,
0.04051943, 0.05692462, 0.052752938, 0.04598501, 0.042491652, 0.04678943, 0.049651522,
0.03719032, 0.03685716, 0.045527566, 0.047153126, 0.043209422, 0.055492975, 0.06487573,
0.035855323, 0.04075297, 0.05764731, 0.056304615, 0.041847274, 0.050181884, 0.05582516,
0.058637016, 0.05135268, 0.060218796, 0.05681381, 0.05172465, 0.06393894, 0.067160726,
0.10112857, 0.06576568, 0.06309253, 0.07240747, 0.057855465, 0.057715207, 0.05725421,
0.04882708, 0.056994535, 0.04094322, 0.037205297, 0.07155633, 0.0783475, 0.049037263,
0.04336978, 0.04722224, 0.04380494, 0.05136153, 0.04863937, 0.052578304, 0.04240282,
0.03621192, 0.035077132, 0.049636677, 0.036996156, 0.043547615, 0.049257986, 0.03341641,
0.042516675, 0.039405413, 0.05143239, 0.07343011, 0.042398676, 0.03936372, 0.036040876,
0.04592191, 0.06599707, 0.047165785, 0.081172824, 0.038809452, 0.04742436, 0.049891766,
0.046412338, 0.046334922, 0.049652476, 0.060972277, 0.048784863, 0.042579643, 0.04544423,
0.0478545, 0.05319356, 0.059895273, 0.04655517, 0.09785606, 0.040475056, 0.047893688,
0.057978757, 0.04884217, 0.046631142, 0.054807715, 0.062381063, 0.05366576, 0.04780082,
0.05095884, 0.050431136, 0.056631878, 0.04764829, 0.05586591, 0.056602497, 0.04438934,
0.052158475, 0.042798843, 0.048969224, 0.04598911, 0.10893276, 0.079395376, 0.045524657,
0.049434554, 0.08050158, 0.0712951, 0.046496466, 0.056665517, 0.05627583, 0.054354023,
0.05505432, 0.05627333, 0.056973696, 0.06410477, 0.057702675, 0.071893044, 0.07087772,
0.052545443, 0.057779193, 0.06781012, 0.116482645, 0.08254173, 0.042081285, 0.03650655,
0.054014735, 0.047936577, 0.052059986, 0.06836714, 0.03894186, 0.05012844, 0.046025906,
0.054595273, 0.21660344, 0.05256764, 0.04034301, 0.04093426, 0.05332102, 0.047949374,
0.07862657, 0.04644735, 0.051625587, 0.04851593, 0.052139524, 0.050202873, 0.06593039,
0.05335764, 0.056501467, 0.04793994, 0.048082013, 0.06620186, 0.05990431, 0.043708563,
0.0435505, 0.07217608, 0.043191705, 0.046935115, 0.05001314, 0.047141545, 0.044176318,
0.061782, 0.050099667, 0.05234594, 0.042539716, 0.042024776, 0.056535624, 0.056535624,
0.043373507, 0.059330292, 0.049067345, 0.08072112, 0.041791044, 0.04931023, 0.051139638,
0.04846155, 0.05096584, 0.05777408, 0.061949052, 0.05104166, 0.046963792, 0.049300443,
0.052349474, 0.058187783, 0.05057783, 0.046503738, 0.058354452, 0.04888847, 0.061094806,
0.046629883, 0.04310788, 0.042446032, 0.058876954, 0.0635591, 0.059153467, 0.046796404,
0.050269842, 0.051668964, 0.059657082, 0.050314132, 0.04954865, 0.05692305, 0.053141538,
0.06863088, 0.04639415, 0.041536145, 0.04402394, 0.054421045, 0.054647453, 0.047481395,
0.04437755, 0.051609844, 0.048278086, 0.05206354, 0.045808423, 0.15515535, 0.047013413,
0.0424967, 0.04785157, 0.049661577, 0.047004733, 0.059186406, 0.051961422, 0.057622027,
0.03813588, 0.04042097, 0.067097045, 0.061208744, 0.04498976, 0.031554252, 0.033409715,
0.037935518, 0.031757373, 0.035634276, 0.04700252, 0.03058246, 0.04288186, 0.031244203,
0.046639014, 0.053106874, 0.035911422, 0.032883223, 0.033298496, 0.040137764, 0.051421743,
0.05237152, 0.057262857, 0.06393997, 0.09101578, 0.04832195, 0.043303855, 0.052960023,
0.063134044, 0.060882304, 0.054266643, 0.048709344, 0.05947443, 0.054370206, 0.05303216,
0.036246344, 0.044863462, 0.04167455, 0.045382403, 0.044518325, 0.048213553, 0.03627238,
0.1092339, 0.045865174, 0.05337392, 0.04605438, 0.040697236, 0.04221999, 0.04276173,
0.042496737, 0.04216775, 0.051514406, 0.050201092, 0.046262316, 0.052816905, 0.04073938,
0.044648737, 0.04697411, 0.09431317, 0.07168798, 0.043210473, 0.047402807, 0.09542375,
0.071744785, 0.044787448, 0.048193242, 0.053268615, 0.05649674, 0.047528453, 0.05214719,
0.043228045, 0.05294302, 0.047329094, 0.115458354, 0.0804816, 0.04634882, 0.05130284,
0.08368888, 0.07584949, 0.048697986, 0.053404327, 0.045109216, 0.06666413, 0.040697083,
0.052944675, 0.046983883, 0.04296346, 0.044109162, 0.04915499, 0.06489936, 0.050607093,
0.043420944, 0.04640584, 0.04652854, 0.06304765, 0.03936737, 0.048929304, 0.04553306,
0.043280594, 0.051273547, 0.03817977, 0.039914794, 0.04406915, 0.086592786, 0.06038935,
0.041949883, 0.042928416, 0.08081512, 0.07528412, 0.042096324, 0.04274038, 0.05115076,
0.052867405, 0.04719013, 0.05440695, 0.041447327, 0.04558476, 0.048131958, 0.11480384,
0.08015936, 0.045545943, 0.04829651, 0.08804118, 0.07310342, 0.047254566, 0.07149331,
0.059109487, 0.06859828, 0.05306559, 0.04792189, 0.160863, 0.070309915, 0.06300854,
0.051305406, 0.059425443, 0.055549223, 0.05430173, 0.05672026, 0.059425443, 0.058585327,
0.046528675, 0.055318862, 0.049078874, 0.060190342, 0.057474446, 0.059348766, 0.045936346,
0.11988576, 0.05197641, 0.06309001, 0.05294987, 0.044617686, 0.05356045, 0.056713007,
0.053615775, 0.048649203, 0.05505122, 0.053802423, 0.05684707, 0.055568293, 0.06640161,
0.053130735, 0.13910294, 0.057739902, 0.06652338, 0.0623474, 0.0516973, 0.05679251,
0.060167216, 0.05090873, 0.04624441, 0.053910535, 0.050286397, 0.059534203, 0.055441197,
0.06378308, 0.046690945, 0.14888903, 0.053051602, 0.06521251, 0.054518625, 0.04671943,
0.052383326, 0.05703509, 0.052121207, 0.060286652, 0.04584055, 0.058351815, 0.043949302,
0.04256538, 0.07192177, 0.061305393, 0.045161106, 0.04294389, 0.051875696, 0.044897664,
0.042895008, 0.04941546, 0.05074828, 0.051976692, 0.03993678, 0.049096372, 0.043048397,
0.045560848, 0.04264851, 0.042822942, 0.047943044, 0.05090579, 0.05980307, 0.067417994,
0.03836495, 0.046393353, 0.07068466, 0.062741496, 0.040110886, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.047766034,
0.04490119, 0.048124295, 0.044024576, 0.039069474, 0.053076476, 0.059183296, 0.050140373,
0.04835544, 0.05311091, 0.04421965, 0.058619186, 0.050359294, 0.066464156, 0.049486402,
0.0473727, 0.16530105, 0.04807524, 0.042747315, 0.046614382, 0.051140077, 0.049197257,
0.060805526, 0.054782733, 0.057595648, 0.0402253, 0.042739846, 0.07096907, 0.06366092,
0.045015067, 0.044442084, 0.0459529, 0.046819314, 0.04332103, 0.04419377, 0.04307111,
0.047844965, 0.039368216, 0.048868578, 0.058227077, 0.040268492, 0.042738054, 0.062469173,
0.055814724, 0.053833466, 0.05695582, 0.048552785, 0.061494004, 0.05170929, 0.05340171,
0.055134084, 0.05122762, 0.05083747, 0.051227756, 0.05980277, 0.050266523, 0.046952233,
0.04909498, 0.06840146, 0.12554513, 0.046869792, 0.048908517, 0.052068416, 0.056593318,
0.08860988, 0.04372422, 0.039512645, 0.04599876, 0.06399512, 0.056038447, 0.052179977,
0.04504709, 0.05529019, 0.052295037, 0.04848465, 0.041473597, 0.048271492, 0.050685864,
0.047437273, 0.055427346, 0.045573022, 0.042250276, 0.04841872, 0.06829415, 0.07466986,
0.04806206, 0.04566058, 0.057011046, 0.054843344, 0.04547633, 0.052487798, 0.05303006,
0.061735164, 0.05110825, 0.06086386, 0.04667687, 0.046815637, 0.04524336, 0.06068768,
0.05771433, 0.044129994, 0.046973236, 0.060367625, 0.055053934, 0.051750213, 0.04364326,
0.12965922, 0.0432673, 0.039764564, 0.042162936, 0.04729905, 0.044730198, 0.058169827,
0.045478426, 0.051220633, 0.035917807, 0.03902279, 0.059009373, 0.052366953, 0.039709736,
0.041611794, 0.1219143, 0.04134635, 0.03888956, 0.04286932, 0.046527848, 0.042272534,
0.060368408, 0.044899467, 0.053531658, 0.034968726, 0.037192874, 0.060063154, 0.0565247,
0.04143889, 0.04369542, 0.039265968, 0.053817622, 0.051197782, 0.05910972, 0.06365485,
0.038984522, 0.052411567, 0.046867996, 0.057354584, 0.12887563, 0.04836146, 0.041067068,
0.044736996, 0.064107604, 0.056773845, 0.046842743, 0.09670136, 0.040623456, 0.050017197,
0.04829516, 0.048631717, 0.043673493, 0.050051916, 0.05570246, 0.050617494, 0.04466864,
0.052211642, 0.05026178, 0.058577623, 0.043138914, 0.048310135, 0.04453668, 0.05690197,
0.04977558, 0.05835951, 0.045449555, 0.10964309, 0.04624118, 0.05584402, 0.053694192,
0.047589015, 0.04742527, 0.049055014, 0.050104193, 0.04011073, 0.048900373, 0.049721085,
0.04612587, 0.050971728, 0.0415265, 0.04473059, 0.047068562, 0.16505855, 0.07893954,
0.04524207, 0.046170823, 0.096321136, 0.07762302, 0.044328574, 0.044179413, 0.047970533,
0.05053542, 0.07005947, 0.12019984, 0.04331616, 0.038979463, 0.04982017, 0.06659393,
0.06227447, 0.05527293, 0.0455788, 0.054318577, 0.05287358, 0.052494075, 0.038438197,
0.03320777, 0.051422976, 0.048486475, 0.051100608, 0.050548196, 0.03556651, 0.0433253,
0.047760177, 0.048430365, 0.21271276, 0.050725315, 0.038647816, 0.038222603, 0.04933553,
0.04011658, 0.03621151, 0.046706628, 0.032892272, 0.040793013, 0.040200345, 0.03539833,
0.03730976, 0.03953651, 0.044730816, 0.038618274, 0.035830677, 0.036034703, 0.03925967,
0.04386523, 0.071284845, 0.05730147, 0.06973924, 0.052908424, 0.047086023, 0.14329167,
0.081445605, 0.059350576, 0.050932705, 0.058012653, 0.05901094, 0.061455857, 0.057267446,
0.06322509, 0.057370305, 0.0481375, 0.054592844, 0.049605265, 0.0631268, 0.05732574,
0.06402556, 0.04743615, 0.12204644, 0.052277744, 0.061700903, 0.054074507, 0.0462226,
0.05419331, 0.057990596, 0.053402122, 0.042256225, 0.052353777, 0.05229582, 0.04586164,
0.051657017, 0.04222414, 0.04753781, 0.04525414, 0.15301014, 0.066942185, 0.04612282,
0.04780219, 0.096828215, 0.08133314, 0.045931466, 0.032744624, 0.030959347, 0.04436427,
0.032331347, 0.037990257, 0.03905903, 0.02975176, 0.03343139, 0.037942845, 0.04043015,
0.058247264, 0.0362195, 0.034610663, 0.030876765, 0.037690733, 0.060149547, 0.04466082,
0.0709554, 0.039979234, 0.040722948, 0.050855055, 0.054677986, 0.039470118, 0.045822814,
0.052879754, 0.044186153, 0.042039037, 0.044292703, 0.04621862, 0.05134769, 0.04099486,
0.04346363, 0.045566197, 0.0503334, 0.047354214, 0.044917535, 0.046060868, 0.052268013,
0.05763719, 0.05964467, 0.04325838, 0.04283063, 0.06509457, 0.0619296, 0.04082995,
0.042004224, 0.036771603, 0.054925863, 0.045216765, 0.0481637, 0.058895685, 0.039171558,
0.047237687, 0.045770556, 0.055597626, 0.16247527, 0.058077075, 0.04092235, 0.038925648,
0.048597753, 0.061573535, 0.052763376, 0.10803524, 0.044559255, 0.053822257, 0.06647458,
0.052473653, 0.05418455, 0.059418935, 0.074474774, 0.057523012, 0.051781304, 0.05586026,
0.05557427, 0.065584764, 0.04511986, 0.053972773, 0.04868563, 0.051893357, 0.05202798,
0.05901431, 0.04659579, 0.13883099, 0.053240716, 0.062134616, 0.05175099, 0.04385619,
0.052467205, 0.058004677, 0.04885356, 0.047606252, 0.052313082, 0.05182133, 0.050438445,
0.058271687, 0.048203252, 0.044829268, 0.05063684, 0.06003115, 0.08433812, 0.050596975,
0.045723088, 0.057692453, 0.05553704, 0.050932102, 0.07391986, 0.056924682, 0.070689306,
0.053525057, 0.04906232, 0.15118426, 0.07093399, 0.06040258, 0.049081344, 0.06250772,
0.060259208, 0.060373824, 0.05494162, 0.056347292, 0.059529785, 0.044534247, 0.05281023,
0.052366313, 0.04640401, 0.050952192, 0.04176681, 0.049107756, 0.04807782, 0.10813335,
0.07544524, 0.044170815, 0.048022293, 0.09836135, 0.07509539, 0.04547666, 0.038359724,
0.03809353, 0.04679766, 0.041202907, 0.046505924, 0.05904782, 0.037655678, 0.053218596,
0.04174982, 0.052147985, 0.12418155, 0.043528445, 0.03793442, 0.039405856, 0.04489942,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.044081178, 0.18732902, 0.043732088, 0.038739294, 0.03991171, 0.048227582,
0.04477778, 0.055427305, 0.048689593, 0.05349722, 0.035836168, 0.039336402, 0.059961904,
0.058873747, 0.041492376, 0.041150138, 0.045005187, 0.045817, 0.04791446, 0.04734153,
0.050744943, 0.04275307, 0.04942051, 0.050258785, 0.05285732, 0.041619897, 0.039435595,
0.04624433, 0.06768788, 0.08606684, 0.03455709, 0.040098716, 0.039086737, 0.040232424,
0.044165324, 0.03491964, 0.039319687, 0.03978919, 0.08179371, 0.062466778, 0.037146788,
0.042587552, 0.06981536, 0.06361416, 0.040292136, 0.04577505, 0.05712362, 0.045349147,
0.06032835, 0.059926312, 0.06049337, 0.045838397, 0.10756698, 0.05024176, 0.061635308,
0.05889177, 0.048635468, 0.0514304, 0.05263295, 0.058049846, 0.039521303, 0.045884714,
0.04137629, 0.045346946, 0.040852394, 0.038932707, 0.044936456, 0.044925515, 0.06350336,
0.07158451, 0.037025623, 0.048602067, 0.07728826, 0.06701815, 0.040680796, 0.047119267,
0.06194346, 0.05176347, 0.05532856, 0.05619855, 0.06474842, 0.050884392, 0.19539326,
0.054979566, 0.068910435, 0.05293244, 0.04693394, 0.05737135, 0.063770495, 0.05274526,
0.042953636, 0.037898827, 0.05476799, 0.05062875, 0.05730042, 0.06670395, 0.0390614,
0.052943807, 0.04871034, 0.057543416, 0.19127181, 0.050832357, 0.041489378, 0.04357007,
0.058750965, 0.06715435, 0.046684537, 0.10799482, 0.041197967, 0.04767222, 0.049429476,
0.05118366, 0.04270548, 0.052671596, 0.057608653, 0.050251987, 0.042461775, 0.050220396,
0.04919049, 0.057513613, 0.04212652, 0.049292475, 0.043850362, 0.04491154, 0.040469985,
0.052817546, 0.054088157, 0.06442231, 0.054614197, 0.0581082, 0.04083865, 0.048050754,
0.05561767, 0.07694705, 0.047902133, 0.05740499, 0.06443362, 0.059769087, 0.049700025,
0.046053253, 0.15362346, 0.06385186, 0.07122325, 0.04978993, 0.056718368, 0.05342084,
0.057671685, 0.05236919, 0.057126462, 0.050595302, 0.06872878, 0.060093384, 0.06696902,
0.051647816, 0.046610583, 0.13624687, 0.08180862, 0.06320864, 0.051980022, 0.058496412,
0.053438168, 0.055261597, 0.05937222, 0.064694375, 0.05415901, 0.051987585, 0.045827143,
0.095003754, 0.04022397, 0.049332418, 0.051214717, 0.04687135, 0.04776729, 0.05478596,
0.059726108, 0.052226882, 0.046219796, 0.053044386, 0.05210547, 0.059726108, 0.046809826,
0.051502064, 0.05273386, 0.06824895, 0.20591614, 0.045878053, 0.04087531, 0.05381747,
0.073367454, 0.06633492, 0.057554167, 0.046785757, 0.056353725, 0.059104286, 0.055812716,
0.047160562, 0.047878783, 0.05765959, 0.04727802, 0.05210063, 0.04945996, 0.04551853,
0.04474932, 0.053410716, 0.06979472, 0.049994975, 0.04697443, 0.054281507, 0.054623753,
0.057723857, 0.039955363, 0.039979592, 0.043899663, 0.04745458, 0.0657696, 0.039391413,
0.03435382, 0.044548545, 0.045149766, 0.05029604, 0.047482677, 0.04092045, 0.043574348,
0.038861576, 0.041171856, 0.053317722, 0.05039147, 0.05580087, 0.06883715, 0.066369854,
0.049613442, 0.045311626, 0.0492677, 0.057654843, 0.05538684, 0.05465053, 0.050261166,
0.058978435, 0.052219216, 0.05109609, 0.048746463, 0.043805897, 0.050260633, 0.045997966,
0.047130805, 0.049861513, 0.045220602, 0.04547375, 0.04654694, 0.05414889, 0.046518534,
0.0438787, 0.043971717, 0.060303558, 0.0903491, 0.046143614, 0.031825576, 0.053352322,
0.028929973, 0.036217056, 0.033763863, 0.033246275, 0.032055706, 0.03784301, 0.041322272,
0.035894517, 0.030718643, 0.034226257, 0.035719927, 0.040171143, 0.045168523, 0.05143701,
0.053906336, 0.047621418, 0.049452696, 0.045292106, 0.048384592, 0.043769658, 0.076433085,
0.06798474, 0.045573913, 0.052338663, 0.07475092, 0.05722259, 0.041712523, 0.04560014,
0.049676217, 0.047772843, 0.043871228, 0.04104629, 0.056017295, 0.04932169, 0.060406487,
0.046994932, 0.06113627, 0.043736905, 0.048153687, 0.045970365, 0.059347488, 0.058498804,
0.065157734, 0.060456272, 0.06664803, 0.048703246, 0.046618648, 0.20264469, 0.06268265,
0.06585392, 0.04922077, 0.056757446, 0.054186907, 0.05530141, 0.052307513, 0.057176135,
0.05628462, 0.03376406, 0.033904243, 0.039178893, 0.04306211, 0.04899682, 0.035438642,
0.033058587, 0.03709257, 0.05051599, 0.04899682, 0.03918879, 0.035336576, 0.041697886,
0.040037375, 0.039198697, 0.041942444, 0.044434313, 0.05196039, 0.04049931, 0.04206609,
0.042380054, 0.04484776, 0.042235658, 0.056018595, 0.055830996, 0.043288335, 0.04374996,
0.0498111, 0.0505181, 0.044666126, 0.062350646, 0.048245307, 0.063416585, 0.041701224,
0.040083572, 0.092470035, 0.057078134, 0.04964067, 0.042049836, 0.054685082, 0.05007207,
0.05239726, 0.046084363, 0.046016373, 0.049055025, 0.042151812, 0.042743705, 0.048344884,
0.04993697, 0.03992358, 0.045119002, 0.047402967, 0.05069165, 0.049653277, 0.055797584,
0.043253206, 0.044724368, 0.052783415, 0.046623964, 0.037456255, 0.045298107, 0.1566102,
0.04482409, 0.042882446, 0.047496766, 0.04867751, 0.04750693, 0.065737545, 0.050153095,
0.057911806, 0.038961977, 0.041508082, 0.062585086, 0.0614101, 0.04405027, 0.04214212,
0.041411366, 0.045121722, 0.05134087, 0.051414754, 0.045891687, 0.041311145, 0.04169276,
0.04644927, 0.05163833, 0.051311407, 0.041105293, 0.044143543, 0.04384665, 0.048118357,
0.04648828, 0.047664143, 0.04693109, 0.047727857, 0.046762057, 0.040693548, 0.044972863,
0.040661927, 0.060452413, 0.06331613, 0.04358687, 0.046594836, 0.070982, 0.05912252,
0.0464647, 0.040071692, 0.044917304, 0.04709821, 0.049255695, 0.058451436, 0.04717828,
0.040896453, 0.045762837, 0.057891436, 0.06481116, 0.051524844, 0.043640718, 0.04865491,
0.047774956, 0.047912043, 0.048769455, 0.15554419, 0.047979977, 0.041750938, 0.04316976,
0.053061645, 0.053046595, 0.05976302, 0.049979746, 0.061751146, 0.039190646, 0.043096967,
0.06628827, 0.06368988, 0.04686251, 0.052278295, 0.042954784, 0.055310998, 0.04858504,
0.045126386, 0.06682165, 0.057869397, 0.050728485, 0.050559767, 0.0619421, 0.06894479,
0.20318681, 0.05214068, 0.05013539, 0.0526434, 0.04488933, 0.051827256, 0.053632982,
0.06555724, 0.14029604, 0.045989227, 0.04134864, 0.054838527, 0.07511905, 0.072611004,
0.06495016, 0.04447967, 0.059807997, 0.05907155, 0.06054091, 0.040048834, 0.049047653,
0.04924312, 0.048876118, 0.05383933, 0.041723263, 0.041394945, 0.048806444, 0.11115124,
0.07401225, 0.0458987, 0.046279904, 0.087269284, 0.07197947, 0.042827442, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.040918183, 0.05155265, 0.043799736, 0.04331332, 0.043937024, 0.050650153, 0.049229577,
0.06880611, 0.056712348, 0.060865585, 0.038680535, 0.041974653, 0.06012571, 0.07207833,
0.051413633, 0.044141166, 0.0634212, 0.048222277, 0.05266633, 0.05530133, 0.06071299,
0.04628352, 0.2091038, 0.0595901, 0.072358645, 0.04513883, 0.044130154, 0.05931171,
0.06854053, 0.050261587, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.035579972, 0.04549742, 0.04241713, 0.0421151,
0.047022376, 0.04032845, 0.039028257, 0.047331207, 0.06727418, 0.081171095, 0.039978094,
0.04177647, 0.05710959, 0.05440064, 0.043644696, 0.037002694, 0.04827846, 0.0422506,
0.03608726, 0.037067767, 0.03887187, 0.042380452, 0.045009606, 0.05298124, 0.059843697,
0.03190367, 0.038230885, 0.08387982, 0.05959006, 0.03707296, 0.05072535, 0.19774595,
0.04890849, 0.039450757, 0.041609876, 0.053026613, 0.049258735, 0.052636646, 0.045481484,
0.052513663, 0.038060036, 0.040724456, 0.053587414, 0.05988527, 0.046909478, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.044723693, 0.044056788, 0.05510376, 0.055255655, 0.07911365, 0.041408278, 0.036791593,
0.04704206, 0.059315547, 0.0641536, 0.05886655, 0.0429104, 0.05441071, 0.047532253,
0.046943873, 0.043997075, 0.048297074, 0.054254796, 0.045575954, 0.047804423, 0.041423652,
0.045912705, 0.04463275, 0.09268504, 0.073103525, 0.044303354, 0.052005284, 0.075289264,
0.061066832, 0.041910674, 0.060303356, 0.0648293, 0.063299075, 0.052046355, 0.049874693,
0.15361415, 0.06996692, 0.07435715, 0.052365236, 0.058755565, 0.057419356, 0.07477916,
0.056378946, 0.05919397, 0.052816764, 0.046140395, 0.05678481, 0.05130669, 0.050190527,
0.054710783, 0.05431248, 0.050790545, 0.069666475, 0.071207225, 0.06531904, 0.043776177,
0.046109375, 0.06580802, 0.0922897, 0.06884995, 0.043140996, 0.057127196, 0.045823686,
0.05019235, 0.050948486, 0.06076778, 0.043472562, 0.15711717, 0.04917815, 0.06583099,
0.0475684, 0.045224156, 0.05045251, 0.05373811, 0.052288976, 0.076078355, 0.05630266,
0.15515211, 0.047133345, 0.050179575, 0.06338404, 0.061177146, 0.053493496, 0.061490655,
0.06768605, 0.058655195, 0.055459317, 0.057541084, 0.05652156, 0.062672704, 0.050976973,
0.059557565, 0.062467538, 0.05150703, 0.06169781, 0.05439025, 0.049028255, 0.056816418,
0.056560636, 0.09631268, 0.06270386, 0.049703434, 0.06049344, 0.062237695, 0.08356984,
0.048536234, 0.056542505, 0.051131677, 0.0555128, 0.051771283, 0.04759551, 0.052179728,
0.054497056, 0.06786269, 0.067662664, 0.049369715, 0.052445587, 0.12063694, 0.07081253,
0.044664364, 0.05027978, 0.12686673, 0.044207495, 0.042858165, 0.04358107, 0.051395573,
0.04807089, 0.051744327, 0.047972936, 0.049998652, 0.039233014, 0.041193526, 0.052100282,
0.06080851, 0.044624053, 0.04522645, 0.039253812, 0.04600952, 0.04610922, 0.052199777,
0.053472713, 0.040508524, 0.045416143, 0.04310317, 0.054538228, 0.083686866, 0.053796683,
0.042102206, 0.04303793, 0.052824747, 0.042216998, 0.045036864, 0.057425685, 0.041358612,
0.07235395, 0.03848214, 0.036580734, 0.041727636, 0.06009991, 0.05662963, 0.051334143,
0.041644704, 0.050523084, 0.047346573, 0.04896106, 0.041697495, 0.053281255, 0.048205074,
0.040969502, 0.04358429, 0.03821647, 0.04542576, 0.0452256, 0.078901604, 0.059606247,
0.038883686, 0.042856276, 0.077513196, 0.065756835, 0.039787788, 0.060337458, 0.059523482,
0.06745743, 0.04750692, 0.04699907, 0.15456593, 0.057855807, 0.06221938, 0.04847413,
0.057562295, 0.055156022, 0.05689445, 0.052780632, 0.056068994, 0.05166236, 0.057769384,
0.04998034, 0.0593066, 0.049588, 0.058377333, 0.04715337, 0.04725628, 0.05005108,
0.05162539, 0.05893386, 0.051863108, 0.043214686, 0.05154294, 0.05992949, 0.17970449,
0.053398084, 0.06185309, 0.055438083, 0.060831804, 0.05735442, 0.07191439, 0.05644342,
0.1447272, 0.057749398, 0.06715341, 0.060032018, 0.05337855, 0.06008364, 0.065117225,
0.057397436, 0.08124953, 0.050281636, 0.18913494, 0.044534113, 0.044158395, 0.06112027,
0.07870912, 0.045659903, 0.053954493, 0.058352783, 0.052256238, 0.052103218, 0.05240028,
0.05585488, 0.054319058, 0.055718377, 0.05449818, 0.07118955, 0.05283385, 0.063287504,
0.056163542, 0.052793678, 0.05399596, 0.059204727, 0.088545255, 0.067529984, 0.05273599,
0.059799034, 0.058711167, 0.079801895, 0.04630253, 0.058675945, 0.05442301, 0.043946955,
0.045166153, 0.055429958, 0.051171687, 0.05445228, 0.061401937, 0.0675775, 0.039571676,
0.04871146, 0.077110924, 0.06329167, 0.0486278, 0.05212427, 0.108876936, 0.050921235,
0.0398043, 0.041150067, 0.052458826, 0.049301494, 0.04897116, 0.04844825, 0.052677933,
0.037744332, 0.040033083, 0.052951407, 0.06942976, 0.052644044, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.042312145,
0.04768088, 0.04419812, 0.04248159, 0.041683298, 0.046872452, 0.0529868, 0.047012575,
0.057388283, 0.059321783, 0.04023975, 0.048760112, 0.058527347, 0.09488347, 0.05542757,
0.05037035, 0.04197016, 0.049080756, 0.046674397, 0.04989442, 0.04193537, 0.049664807,
0.04321573, 0.055043187, 0.052159954, 0.045963004, 0.044549014, 0.047184724, 0.06321433,
0.081294864, 0.045941338, 0.05571698, 0.050924517, 0.057667077, 0.05568913, 0.063308,
0.049028553, 0.17481203, 0.055941798, 0.067922935, 0.054337077, 0.048233703, 0.055630017,
0.061953854, 0.05231826, 0.050946377, 0.039427176, 0.049524125, 0.04997347, 0.053779427,
0.042858463, 0.044340868, 0.04095187, 0.048951287, 0.049171895, 0.050251395, 0.04258411,
0.04513773, 0.05659924, 0.10238896, 0.048402667, 0.10491783, 0.04374946, 0.035108592,
0.0365234, 0.047811236, 0.046426795, 0.0479701, 0.040927142, 0.048724394, 0.032676257,
0.035807807, 0.04999504, 0.056158345, 0.043102633, 0.040223006, 0.058933545, 0.04675313,
0.036046118, 0.037334125, 0.039917555, 0.047971744, 0.04381481, 0.057430927, 0.0683524,
0.03444474, 0.040424544, 0.108605504, 0.06720015, 0.039434098, 0.03888381, 0.044770446,
0.047604475, 0.039541997, 0.043483857, 0.040468317, 0.040177025, 0.040267896, 0.04664804,
0.06532386, 0.039731722, 0.04114981, 0.046029154, 0.052046373, 0.054832373, 0.045769647,
0.061934598, 0.05155787, 0.04874066, 0.05145723, 0.04575344, 0.053861987, 0.054608736,
0.079816885, 0.08784374, 0.045654483, 0.050222673, 0.17832677, 0.083432086, 0.047615845,
0.049527302, 0.04926109, 0.05250901, 0.046428718, 0.0458566, 0.044881612, 0.045992758,
0.041740876, 0.049333267, 0.06114213, 0.0470228, 0.04582758, 0.047012363, 0.057984933,
0.06591745, 0.062053416, 0.06539465, 0.06324749, 0.053827263, 0.050174322, 0.14904441,
0.07139995, 0.07346125, 0.053749733, 0.058633033, 0.05798751, 0.07220346, 0.057456203,
0.058853045, 0.05251428, 0.037012342, 0.033393316, 0.05570081, 0.040948194, 0.045998823,
0.05138636, 0.034816645, 0.04356259, 0.044444434, 0.049102105, 0.11907786, 0.046292793,
0.038799573, 0.03647021, 0.045309037, 0.056972064, 0.056180995, 0.17819148, 0.047350593,
0.05619745, 0.057089694, 0.05795902, 0.051750872, 0.063164674, 0.06710143, 0.061413016,
0.05770503, 0.061989676, 0.062737405, 0.06419661, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.05396825, 0.044319373,
0.06746031, 0.046964962, 0.05271785, 0.047833722, 0.0488747, 0.040560976, 0.059116475,
0.06122718, 0.059709433, 0.050312787, 0.04963728, 0.052872397, 0.067885935, 0.051304694,
0.05637602, 0.05962627, 0.04614539, 0.044835594, 0.0615855, 0.05906177, 0.077895164,
0.060535472, 0.06753669, 0.04678004, 0.053038005, 0.05889462, 0.07966961, 0.0590636,
0.054416295, 0.045157205, 0.053788118, 0.051880103, 0.0527178, 0.065919615, 0.05637993,
0.052894656, 0.053955812, 0.0661317, 0.07868758, 0.119733244, 0.052733332, 0.055658303,
0.060870364, 0.039672468, 0.08858035, 0.039073497, 0.03068999, 0.033020407, 0.04318721,
0.040261984, 0.041502696, 0.037805907, 0.046198584, 0.029478446, 0.033408675, 0.04914511,
0.054083522, 0.037731145, 0.048213605, 0.10991371, 0.04564283, 0.035003018, 0.037152342,
0.049111124, 0.04652679, 0.045961436, 0.042525616, 0.04945327, 0.033063497, 0.036434848,
0.051842388, 0.062317435, 0.044006497, 0.04626702, 0.046470433, 0.055167414, 0.048480276,
0.081238635, 0.042919196, 0.037884586, 0.045540135, 0.06130795, 0.06314689, 0.05182285,
0.042872842, 0.05452742, 0.04941351, 0.051485587, 0.045234833, 0.05002506, 0.060443006,
0.049426746, 0.10267559, 0.04490047, 0.03891866, 0.049378995, 0.080235615, 0.07425288,
0.06593564, 0.04643229, 0.058121853, 0.054629356, 0.053963646, 0.062002424, 0.049820244,
0.24525006, 0.04293859, 0.04903718, 0.055708893, 0.051161062, 0.047280625, 0.05512554,
0.06476704, 0.062141865, 0.05108819, 0.053731635, 0.05148757, 0.05845908, 0.051776305,
0.044569507, 0.060654353, 0.040216435, 0.04621578, 0.043842908, 0.040712792, 0.041357554,
0.043203622, 0.0676584, 0.047485903, 0.03952508, 0.044088654, 0.04620281, 0.06627707,
0.043196537, 0.050873123, 0.054990172, 0.05674398, 0.09524261, 0.046281002, 0.03995923,
0.0546449, 0.07258869, 0.0724481, 0.06115885, 0.045072854, 0.05909267, 0.05578656,
0.05376433, 0.050682828, 0.15801065, 0.050096415, 0.037778575, 0.040385243, 0.056441013,
0.048393425, 0.050947662, 0.04489165, 0.05269528, 0.036580686, 0.03915345, 0.05676933,
0.062166356, 0.047387797, 0.043153375, 0.035098467, 0.056736678, 0.049018536, 0.04990425,
0.054470703, 0.03900811, 0.04440032, 0.0495118, 0.048382305, 0.3053761, 0.051639263,
0.040205985, 0.04045172, 0.05006943, 0.044466008, 0.05024864, 0.055432625, 0.053009387,
0.10300333, 0.042623926, 0.03984463, 0.05126788, 0.067633905, 0.077298224, 0.056276415,
0.04417391, 0.06247134, 0.055595282, 0.056018066, 0.04208397, 0.056070082, 0.046322353,
0.054785468, 0.054465782, 0.061703134, 0.049154576, 0.20509295, 0.057615813, 0.064491086,
0.055323116, 0.048763502, 0.0598684, 0.06199456, 0.04886056, 0.039969925, 0.05224326,
0.046722613, 0.037235796, 0.040474053, 0.035692893, 0.043006618, 0.039715454, 0.066683285,
0.05365645, 0.03632327, 0.04028791, 0.07037927, 0.059351794, 0.03788861, 0.047040373,
0.05105783, 0.0585961, 0.04981084, 0.053650275, 0.044406608, 0.04882633, 0.04593375,
0.0960529, 0.06854849, 0.050282802, 0.05462682, 0.07066947, 0.06299738, 0.05206428,
0.06203773, 0.04826728, 0.15596314, 0.042498544, 0.04653482, 0.05011575, 0.054714,
0.045573957, 0.053715326, 0.063455224, 0.05887455, 0.054714, 0.05525476, 0.054188818,
0.059069823, 0.048139222, 0.052276604, 0.056306206, 0.0432926, 0.04865493, 0.04090226,
0.047215335, 0.042570192, 0.06955559, 0.061796, 0.043966793, 0.047600858, 0.058851674,
0.05918744, 0.04735778, 0.047451265, 0.05249483, 0.055218577, 0.04484419, 0.048065387,
0.041276734, 0.05001479, 0.04577358, 0.087261684, 0.06561003, 0.042543642, 0.048415262,
0.06982091, 0.067614585, 0.044924818, 0.05266533, 0.06661585, 0.053418305, 0.04783626,
0.04764111, 0.10375677, 0.055180863, 0.067379676, 0.048767135, 0.05276129, 0.052879825,
0.05682095, 0.050300404, 0.05569742, 0.048846263, 0.048361614, 0.056979515, 0.053668577,
0.059527233, 0.058085106, 0.065359585, 0.0551006, 0.16877536, 0.05884044, 0.07016753,
0.059761602, 0.050941437, 0.06047252, 0.064643435, 0.052187603, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.042376105,
0.05028403, 0.0457298, 0.05463703, 0.051978372, 0.0574584, 0.044481773, 0.23803413,
0.052302167, 0.06421922, 0.052429337, 0.045949135, 0.04926753, 0.052677575, 0.049276665,
0.06872671, 0.06104455, 0.0686374, 0.048418738, 0.046089955, 0.18235299, 0.07002449,
0.06762386, 0.048458043, 0.055894457, 0.055623833, 0.06368974, 0.051611748, 0.057122312,
0.054681167, 0.04296616, 0.055251002, 0.048918467, 0.050422423, 0.04894513, 0.047835015,
0.052489594, 0.055807568, 0.084117234, 0.077927895, 0.04522066, 0.055972792, 0.11620465,
0.07039641, 0.04550762, 0.045865186, 0.043315955, 0.05919755, 0.05305672, 0.061576933,
0.06620684, 0.041158296, 0.05884216, 0.052680485, 0.064557604, 0.1411991, 0.050409433,
0.046410263, 0.047568202, 0.062382396, 0.05697677, 0.055347715, 0.05730823, 0.050508782,
0.054070484, 0.06079322, 0.05009308, 0.056384664, 0.050593168, 0.06523225, 0.052155778,
0.048418596, 0.04903897, 0.061794072, 0.11188787, 0.052065402, 0.090666674, 0.04697665,
0.03709846, 0.03873407, 0.050150663, 0.04983663, 0.045416515, 0.041619744, 0.045194484,
0.03511401, 0.038792625, 0.046565242, 0.061942756, 0.04787459, 0.042086024, 0.051316474,
0.049641646, 0.037907053, 0.038116805, 0.04548886, 0.052293792, 0.04250514, 0.065650254,
0.07212552, 0.03594559, 0.045707233, 0.07604095, 0.061775964, 0.043309767, 0.041566297,
0.051025216, 0.043932803, 0.04473718, 0.046152465, 0.046903234, 0.046174273, 0.05562669,
0.062293448, 0.06281911, 0.040627547, 0.044729695, 0.059194144, 0.07527429, 0.062045164,
0.04492081, 0.046127245, 0.05637283, 0.04828111, 0.070110865, 0.041710485, 0.038172,
0.04780181, 0.061400738, 0.06455809, 0.05340754, 0.043508142, 0.056948412, 0.05125272,
0.049421668, 0.052680515, 0.049049865, 0.061986454, 0.048746705, 0.05570196, 0.046692733,
0.049209304, 0.044332832, 0.055001687, 0.06917545, 0.052680515, 0.045507587, 0.052127715,
0.056883775, 0.07863401, 0.041678857, 0.041865226, 0.052518144, 0.04029618, 0.05062179,
0.038263433, 0.03426372, 0.040939346, 0.050244868, 0.058212627, 0.043360777, 0.037573457,
0.047537077, 0.043688186, 0.041179866, 0.05488072, 0.13077262, 0.054737754, 0.040424295,
0.044783313, 0.05287319, 0.051638402, 0.048498943, 0.04976383, 0.05308271, 0.03913864,
0.042143732, 0.055808313, 0.06598366, 0.05234562, 0.054364942, 0.15985028, 0.05355358,
0.03937213, 0.0411489, 0.05328904, 0.053600676, 0.050977293, 0.04958928, 0.05569823,
0.03741627, 0.041729923, 0.060259633, 0.06700698, 0.047610756, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.055267077,
0.05185524, 0.21910194, 0.044313848, 0.05300234, 0.05348127, 0.053042598, 0.04860917,
0.061482355, 0.06499452, 0.05752249, 0.052565444, 0.06176678, 0.060116924, 0.06287802,
0.04586725, 0.05636941, 0.050540175, 0.051416013, 0.051204916, 0.06408708, 0.05300208,
0.15643, 0.055396535, 0.06467863, 0.052146442, 0.04931889, 0.059434664, 0.061636686,
0.049167708, 0.054879643, 0.055303916, 0.06922005, 0.051809672, 0.061593123, 0.048333652,
0.054785784, 0.04773632, 0.1101243, 0.06921209, 0.056419138, 0.06004273, 0.06922358,
0.06681, 0.062785424, 0.04042912, 0.04625319, 0.05168823, 0.043758355, 0.059483957,
0.038427934, 0.036328223, 0.045697823, 0.06952168, 0.062723495, 0.04946023, 0.040179007,
0.056263912, 0.050504908, 0.045618173, 0.05088226, 0.039857775, 0.052897718, 0.042940486,
0.04912316, 0.049610883, 0.042666726, 0.040358245, 0.04694158, 0.055702593, 0.06883755,
0.056215625, 0.04283389, 0.043606553, 0.05336692, 0.055268336, 0.051067445, 0.12350433,
0.04708153, 0.057255346, 0.0557541, 0.05226099, 0.052482784, 0.06159707, 0.062137548,
0.06580456, 0.052357078, 0.060557127, 0.058451004, 0.061628476, 0.0576863, 0.05686285,
0.06258533, 0.049480453, 0.046690132, 0.20378728, 0.06531898, 0.06810634, 0.04852704,
0.05467871, 0.05949269, 0.06694136, 0.05113977, 0.055925455, 0.052777313, 0.049263947,
0.059348736, 0.05522897, 0.056227442, 0.05624751, 0.06558146, 0.057072084, 0.13573614,
0.0568222, 0.067943625, 0.060657248, 0.051698167, 0.06373024, 0.06701572, 0.05360939,
0.05172606, 0.059077233, 0.065945946, 0.053421564, 0.05688956, 0.048818108, 0.05192001,
0.049242444, 0.10192077, 0.074760795, 0.05378427, 0.05990177, 0.072784774, 0.07005275,
0.051226977, 0.04637738, 0.04062691, 0.06104614, 0.048931707, 0.056265544, 0.06984067,
0.041172974, 0.053629957, 0.050327763, 0.062350575, 0.16286197, 0.052706514, 0.0431988,
0.04510592, 0.059984293, 0.03687812, 0.041746937, 0.045634333, 0.04799167, 0.048839763,
0.039599076, 0.039412152, 0.04627971, 0.09608821, 0.0710282, 0.044065915, 0.046207342,
0.062381964, 0.05675077, 0.042579208, 0.046046108, 0.063806616, 0.053088576, 0.045447953,
0.048110258, 0.046703305, 0.057091124, 0.050546482, 0.081871234, 0.07940184, 0.044305947,
0.052203458, 0.14911856, 0.09705012, 0.04789256, 0.045397878, 0.04182459, 0.05787975,
0.04948689, 0.056648165, 0.07336397, 0.0428736, 0.059660047, 0.050452765, 0.064185016,
0.15112837, 0.05413379, 0.044674482, 0.045634165, 0.057083644, 0.0650443, 0.05422686,
0.17853512, 0.04788458, 0.057886656, 0.062430725, 0.05271652, 0.051953454, 0.062218856,
0.06794665, 0.072095625, 0.05257817, 0.058446378, 0.054811724, 0.061224386, 0.04941946,
0.056532104, 0.055827405, 0.05892256, 0.05742263, 0.06558456, 0.056198496, 0.1291871,
0.056801897, 0.06935964, 0.061561055, 0.051358208, 0.061419517, 0.06590525, 0.055601377,
0.04503235, 0.043442678, 0.05214223, 0.045762647, 0.050370075, 0.039191894, 0.04115249,
0.039623547, 0.05329024, 0.05942085, 0.0535671, 0.04162653, 0.046719454, 0.04962818,
0.0625747, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.039553236, 0.044346295, 0.048775855, 0.04662863, 0.04615461,
0.04157988, 0.041648928, 0.04404099, 0.09602651, 0.06412149, 0.045399897, 0.048351523,
0.0666033, 0.058043055, 0.039571483, 0.04056304, 0.042017702, 0.04854028, 0.042457137,
0.048117653, 0.05280111, 0.038654782, 0.059093222, 0.04244574, 0.05265589, 0.070566416,
0.03882564, 0.042243674, 0.044592794, 0.053402036, 0.064323954, 0.06793794, 0.062181357,
0.051053885, 0.050109614, 0.17696409, 0.062246487, 0.071535, 0.04975797, 0.05667929,
0.05873335, 0.059279718, 0.05468432, 0.05943691, 0.05507612, 0.03903494, 0.069684364,
0.03487199, 0.036731414, 0.03590805, 0.041640807, 0.042340126, 0.047154777, 0.041309357,
0.04537702, 0.03395099, 0.03646571, 0.051047366, 0.049873706, 0.03503734, 0.053261925,
0.048441652, 0.05734856, 0.051524118, 0.057627413, 0.04687199, 0.046523657, 0.050385345,
0.051624477, 0.053773757, 0.05096223, 0.041026577, 0.051082537, 0.061204806, 0.15078616,
0.04249758, 0.041491527, 0.052827187, 0.04062445, 0.04825, 0.03847125, 0.04219765,
0.036653843, 0.07097733, 0.053013746, 0.04494291, 0.04719221, 0.05256096, 0.049621284,
0.047169913, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.04514716, 0.058693264, 0.0499326, 0.044676185, 0.047114965,
0.046654906, 0.05773125, 0.04795586, 0.08075753, 0.07807433, 0.042139467, 0.051468775,
0.114616044, 0.092497736, 0.0505937, 0.048480127, 0.058073226, 0.054335482, 0.059641827,
0.058040656, 0.06918597, 0.054987334, 0.17022783, 0.059188623, 0.06931769, 0.061018568,
0.053197026, 0.06316713, 0.06728188, 0.05385664, 0.043457545, 0.04303917, 0.04313398,
0.04720928, 0.049160596, 0.04738027, 0.0391721, 0.04919905, 0.040701576, 0.049773578,
0.059155617, 0.039466854, 0.041137133, 0.045739032, 0.060068566, 0.05499986, 0.048445035,
0.15854995, 0.043053992, 0.049442165, 0.048845485, 0.054121073, 0.04424141, 0.055042293,
0.06378395, 0.053944822, 0.051029813, 0.057821184, 0.055900358, 0.065756336, 0.05584714,
0.051585633, 0.05614079, 0.053904474, 0.058878884, 0.049325377, 0.04531274, 0.05077499,
0.050458964, 0.05457533, 0.053198192, 0.041576885, 0.050096665, 0.059500795, 0.16728228,
0.06590833, 0.06627532, 0.06366299, 0.049102586, 0.04739827, 0.13175528, 0.0686584,
0.06606231, 0.050334293, 0.056632295, 0.05804861, 0.06558798, 0.05530441, 0.05864814,
0.051905174, 0.0676786, 0.06978869, 0.06531494, 0.05181594, 0.04919565, 0.1701775,
0.06442231, 0.072173, 0.049292065, 0.058050968, 0.05644672, 0.057247713, 0.053652417,
0.058832005, 0.05591148, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.039812233, 0.05087131, 0.049586173, 0.054095544,
0.08205679, 0.045333743, 0.039801013, 0.056472763, 0.07491959, 0.06710382, 0.05598662,
0.042612247, 0.05665271, 0.05610332, 0.050976325, 0.037784584, 0.04402585, 0.045067552,
0.045171656, 0.061615616, 0.043035116, 0.036175497, 0.051691644, 0.05536859, 0.06695705,
0.05334921, 0.04296303, 0.049561072, 0.0459982, 0.051304664, 0.04029358, 0.047660105,
0.05063821, 0.048469678, 0.06549756, 0.042847686, 0.037655693, 0.050012823, 0.06682943,
0.06373898, 0.05306338, 0.041776005, 0.055356093, 0.050844535, 0.04698236, 0.04002676,
0.043280013, 0.052217532, 0.04344872, 0.064343624, 0.037516892, 0.03531925, 0.042069685,
0.057821583, 0.060695775, 0.047461007, 0.039809898, 0.05275354, 0.04707162, 0.04603832,
0.04437776, 0.050061323, 0.045149144, 0.039973855, 0.04727381, 0.046499446, 0.038889267,
0.044421025, 0.040901933, 0.049027532, 0.041574374, 0.03697107, 0.040018134, 0.048670292,
0.06678153, 0.062360585, 0.041958652, 0.09565493, 0.04011046, 0.042758424, 0.045057464,
0.055000868, 0.038481988, 0.047938388, 0.05001374, 0.0487371, 0.043845065, 0.046511304,
0.048756607, 0.052813124, 0.049821947, 0.06483559, 0.0578639, 0.04647093, 0.051196966,
0.051947474, 0.0589156, 0.052580655, 0.08957282, 0.089746974, 0.045706496, 0.055788253,
0.12647934, 0.08742216, 0.055341423, 0.041086625, 0.03923766, 0.041473128, 0.041867867,
0.045035228, 0.039929368, 0.039151333, 0.038479622, 0.042535715, 0.047937848, 0.038445003,
0.035878215, 0.039953794, 0.05147026, 0.117819294, 0.055445664, 0.0628873, 0.06042564,
0.04774004, 0.04662928, 0.17342766, 0.06198788, 0.07463029, 0.04838062, 0.056537706,
0.052488323, 0.060074665, 0.050794415, 0.058072947, 0.051025163, 0.040438294, 0.040087935,
0.048755564, 0.042996034, 0.05177485, 0.04236952, 0.036719576, 0.04224583, 0.04365894,
0.058821544, 0.05760956, 0.041199174, 0.041457742, 0.04407788, 0.06876036, 0.04926432,
0.052227035, 0.059851345, 0.053025417, 0.059475254, 0.05588277, 0.04890079, 0.055282403,
0.06714603, 0.078872845, 0.064130016, 0.05692823, 0.05400686, 0.060481064, 0.07654528,
0.054469325, 0.054970376, 0.06355008, 0.042933505, 0.042480163, 0.10436236, 0.050808124,
0.055751268, 0.044275645, 0.051265474, 0.04576831, 0.043413714, 0.049182214, 0.0526474,
0.051799905, 0.04821044, 0.06916358, 0.051818855, 0.051451333, 0.052629896, 0.04719534,
0.05718551, 0.05541061, 0.082363926, 0.08575547, 0.04754941, 0.055617888, 0.15091683,
0.09278926, 0.05042546, 0.04857849, 0.068693094, 0.04599289, 0.033921365, 0.037720855,
0.043532662, 0.04279633, 0.038928356, 0.03997342, 0.040600162, 0.03329681, 0.034039255,
0.041247297, 0.04983663, 0.04745149, 0.042281915, 0.040804926, 0.057100464, 0.040333085,
0.04922031, 0.044304375, 0.03947195, 0.041224636, 0.0551045, 0.056085467, 0.058800463,
0.04289615, 0.04563858, 0.04618065, 0.056810927, 0.05231639, 0.06924053, 0.058242757,
0.043871794, 0.0463483, 0.05598369, 0.05846027, 0.049938362, 0.06345108, 0.07044193,
0.042393606, 0.047007643, 0.09630497, 0.08526432, 0.050365597, 0.028735844, 0.03205684,
0.036549993, 0.0347365, 0.041852225, 0.03203611, 0.027615482, 0.032101963, 0.03851234,
0.05233118, 0.043210123, 0.03223733, 0.034071937, 0.03424667, 0.042515613, 0.03829953,
0.07439894, 0.03717679, 0.034463603, 0.03628649, 0.0402007, 0.037777994, 0.057358198,
0.038916495, 0.046444174, 0.03266479, 0.033156317, 0.050743043, 0.04763264, 0.03626654,
0.056734975, 0.052101687, 0.05925775, 0.042290475, 0.041855343, 0.057301506, 0.06749396,
0.045831468, 0.04948963, 0.0543931, 0.048414323, 0.07963848, 0.056184925, 0.055450246,
0.043152418, 0.05559015, 0.05556093, 0.06321647, 0.05514236, 0.062106773, 0.04802578,
0.047127683, 0.048349574, 0.052603412, 0.060144495, 0.050309654, 0.050950285, 0.05554634,
0.05045114, 0.051383525, 0.040034994, 0.045409426, 0.049009815, 0.045500956, 0.04866239,
0.0395379, 0.044164293, 0.043751404, 0.105095394, 0.07071056, 0.04362918, 0.047086287,
0.06909711, 0.0669948, 0.046210207, 0.057441063, 0.07089536, 0.055324625, 0.056006093,
0.053664804, 0.097750045, 0.062913425, 0.07875179, 0.057181586, 0.05813459, 0.06124327,
0.06444214, 0.05664601, 0.06169746, 0.05418588, 0.044343982, 0.055050567, 0.046930697,
0.046303477, 0.046900596, 0.047295973, 0.05514935, 0.0603438, 0.06577012, 0.06661708,
0.039956417, 0.0462491, 0.07646105, 0.14706993, 0.055544637, 0.041268706, 0.044458438,
0.050446533, 0.05794229, 0.05495169, 0.060676057, 0.046027362, 0.10778716, 0.057457503,
0.06459852, 0.061628908, 0.050096776, 0.056447472, 0.0539321, 0.04508336, 0.057700895,
0.055513993, 0.13795151, 0.049457453, 0.06230635, 0.05980622, 0.05403209, 0.05448671,
0.065397054, 0.068816036, 0.067032844, 0.056188058, 0.065397054, 0.060510848, 0.068330206,
0.03330577, 0.033470657, 0.03800494, 0.03443132, 0.03982705, 0.03813088, 0.032152113,
0.036628198, 0.035265516, 0.061986648, 0.045197222, 0.03736573, 0.03960698, 0.034800373,
0.043098576, 0.03915879, 0.051731933, 0.042835053, 0.042930726, 0.047496207, 0.03932232,
0.043184005, 0.047091473, 0.056155726, 0.059685223, 0.03806852, 0.04093092, 0.0999789,
0.0649259, 0.038925536, 0.042832248, 0.08409425, 0.04089154, 0.038909122, 0.036605652,
0.05374993, 0.046491902, 0.054616764, 0.041277874, 0.047265366, 0.036231283, 0.042039506,
0.053304173, 0.05073315, 0.036248576, 0.042931907, 0.036022596, 0.044250526, 0.043373596,
0.045994483, 0.05446074, 0.041929662, 0.041854706, 0.04312082, 0.051190227, 0.07824444,
0.06757256, 0.04193269, 0.043130737, 0.050512865, 0.04930367, 0.04975695, 0.051680308,
0.07950305, 0.058863334, 0.04929351, 0.043385483, 0.04701449, 0.06018627, 0.050482746,
0.057526875, 0.052203067, 0.057901755, 0.051844932, 0.044515185, 0.046688188, 0.05607657,
0.058162205, 0.045242667, 0.04746308, 0.043249726, 0.05079592, 0.046312086, 0.10201422,
0.077142514, 0.044454824, 0.04795148, 0.08266846, 0.07299917, 0.044427343, 0.057328757,
0.080862105, 0.05312587, 0.05769028, 0.055124417, 0.09036038, 0.062423926, 0.07838046,
0.057231005, 0.057757374, 0.05784996, 0.059540227, 0.059777733, 0.065114945, 0.053710688,
0.03659559, 0.041499265, 0.038575713, 0.037444357, 0.039978433, 0.036431927, 0.04015819,
0.04056057, 0.054415334, 0.048310235, 0.033112735, 0.035995904, 0.04889065, 0.07395605,
0.05295411, 0.03958106, 0.04295988, 0.046657346, 0.058699988, 0.053935394, 0.056160618,
0.04274226, 0.116453074, 0.055185392, 0.062647946, 0.058871694, 0.04654387, 0.05054024,
0.05194793, 0.044413637, 0.06940529, 0.052422795, 0.19693881, 0.046133988, 0.04914668,
0.058953773, 0.056849103, 0.0469132, 0.064590365, 0.061596327, 0.056650814, 0.05201559,
0.056637667, 0.053194564, 0.061478343, 0.031080922, 0.03321227, 0.038547613, 0.03372357,
0.043596428, 0.036329698, 0.03054379, 0.03574549, 0.035498, 0.06363401, 0.048582632,
0.035667542, 0.037440784, 0.036126334, 0.04354289, 0.045278635, 0.060822107, 0.048888963,
0.04425368, 0.048567154, 0.045291547, 0.055385746, 0.04841984, 0.079450764, 0.06816453,
0.044042036, 0.05123091, 0.12228011, 0.07897972, 0.04400546, 0.0447851, 0.11622034,
0.043295324, 0.04233481, 0.042379823, 0.052154746, 0.047582876, 0.067458406, 0.045948207,
0.05537053, 0.038494408, 0.04147067, 0.062806, 0.054890834, 0.04104091, 0.04382309,
0.046105713, 0.04958753, 0.042071704, 0.036975607, 0.049533326, 0.09379651, 0.042658586,
0.048321564, 0.050482243, 0.039997723, 0.073782705, 0.053908583, 0.050515562, 0.035990205,
0.05063164, 0.04627135, 0.057741757, 0.04768878, 0.050802853, 0.043086022, 0.04061091,
0.039926022, 0.048817623, 0.048321847, 0.046974782, 0.043928176, 0.049938202, 0.044845354,
0.04410711, 0.03442774, 0.04336879, 0.042902265, 0.04270354, 0.048336323, 0.03818668,
0.03804136, 0.047469083, 0.098177835, 0.068889715, 0.042457636, 0.04054008, 0.076992564,
0.07607967, 0.0412029, 0.053297922, 0.06690778, 0.053288292, 0.053022355, 0.052386604,
0.07716527, 0.05852015, 0.06692355, 0.056383904, 0.056071464, 0.060512237, 0.059465688,
0.054309722, 0.05888797, 0.052145403, 0.052375205, 0.056164555, 0.054747608, 0.051239774,
0.04885527, 0.05939108, 0.057005998, 0.06656324, 0.0624137, 0.06335883, 0.048717987,
0.052617885, 0.056920957, 0.08048412, 0.07149222, 0.036587268, 0.0424367, 0.041887302,
0.048693456, 0.0475426, 0.047946718, 0.039937012, 0.07568522, 0.04466113, 0.051110502,
0.044924177, 0.037573583, 0.048516836, 0.049911853, 0.04021876, 0.066791624, 0.051561695,
0.12209247, 0.045245428, 0.053545717, 0.053209476, 0.054934457, 0.04782458, 0.06614571,
0.06495871, 0.057523463, 0.050929252, 0.058397833, 0.056927886, 0.06298788, 0.036743384,
0.0349979, 0.045462705, 0.031765092, 0.03472389, 0.037576843, 0.036961183, 0.034520417,
0.03519989, 0.05620566, 0.043057095, 0.036877457, 0.038067244, 0.036580473, 0.04453273,
0.045925274, 0.06504685, 0.04759648, 0.044783577, 0.0477755, 0.04358379, 0.054801255,
0.04866672, 0.07443146, 0.073984794, 0.042267445, 0.04898172, 0.14069876, 0.08093873,
0.046304125, 0.041090224, 0.14319648, 0.040633164, 0.038637336, 0.043050338, 0.045901816,
0.042197328, 0.054470323, 0.046877135, 0.04967498, 0.03543259, 0.037271105, 0.06292673,
0.0576008, 0.040879622, 0.047604352, 0.07259127, 0.050587542, 0.043806784, 0.04514308,
0.0431616, 0.05572214, 0.049960993, 0.06988157, 0.08310898, 0.042966466, 0.050049733,
0.123279385, 0.079114944, 0.047680326, 0.040875923, 0.049332026, 0.042833865, 0.04706223,
0.048977505, 0.046595167, 0.04457547, 0.055926237, 0.057637904, 0.058264013, 0.04135201,
0.042158954, 0.053492382, 0.08041091, 0.07277158, 0.04639642, 0.055078547, 0.04941898,
0.05300393, 0.053116426, 0.05671656, 0.048191756, 0.07300704, 0.06528669, 0.06808969,
0.050437998, 0.049174838, 0.06056547, 0.078058906, 0.07580518, 0.042396124, 0.050150327,
0.048823066, 0.0527014, 0.054639507, 0.056088414, 0.04583954, 0.11994065, 0.053969122,
0.064717375, 0.05485576, 0.04433938, 0.05733741, 0.05880314, 0.04699286, 0.055130143,
0.04974826, 0.052880082, 0.05327589, 0.053168375, 0.053406537, 0.059534714, 0.050588954,
0.061240382, 0.058875263, 0.051361177, 0.053922176, 0.059976887, 0.09516252, 0.08018329,
0.04545396, 0.08140285, 0.047680635, 0.03869249, 0.040086955, 0.061798688, 0.045229286,
0.05603005, 0.04417535, 0.0521479, 0.041321144, 0.044541158, 0.058558356, 0.050338402,
0.04242194, 0.045588236, 0.069060944, 0.048469387, 0.046309724, 0.048318267, 0.045807976,
0.0535736, 0.052664295, 0.068266146, 0.07799713, 0.045780215, 0.04909431, 0.112339936,
0.070314385, 0.04419211, 0.033724595, 0.03410791, 0.043794032, 0.03187069, 0.036984585,
0.03823124, 0.03341557, 0.03515259, 0.0425961, 0.049496643, 0.050007172, 0.038145266,
0.038679775, 0.035329197, 0.041736607, 0.044795394, 0.06630925, 0.051845748, 0.0416572,
0.043461327, 0.047074873, 0.05462432, 0.052428182, 0.064276665, 0.074679404, 0.042314846,
0.0474966, 0.10759973, 0.07222327, 0.04426359, 0.034283884, 0.03233796, 0.040027454,
0.034447286, 0.039293353, 0.03877686, 0.031135479, 0.037088048, 0.037167564, 0.049808662,
0.05974364, 0.038158104, 0.036755506, 0.033132818, 0.04111535, 0.06063253, 0.073437706,
0.05716138, 0.05189265, 0.051030822, 0.10910791, 0.06270103, 0.07898103, 0.05286666,
0.058061205, 0.05691703, 0.061467808, 0.056010135, 0.06201299, 0.05399725, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.07402268, 0.056541767, 0.16844375, 0.04660381, 0.051025067, 0.06341632, 0.06355411,
0.04975096, 0.0672791, 0.06830665, 0.056620207, 0.055783823, 0.060685024, 0.057520986,
0.060445752, 0.06719341, 0.051797275, 0.1190513, 0.04496534, 0.052497003, 0.05307036,
0.051965103, 0.045557573, 0.06494456, 0.06334669, 0.05963127, 0.051213432, 0.058973867,
0.054529563, 0.05835422, 0.039116982, 0.046037167, 0.04897066, 0.044858374, 0.047009263,
0.037373167, 0.040699355, 0.047409218, 0.09799839, 0.077615, 0.04221282, 0.04456356,
0.08434981, 0.06589684, 0.041694183, 0.049363878, 0.058241136, 0.050503608, 0.05111098,
0.05212768, 0.05161102, 0.05136896, 0.06416508, 0.062738575, 0.06863375, 0.04542008,
0.04722282, 0.060368873, 0.09314595, 0.07632604, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.051810868, 0.15060389,
0.050372574, 0.047594097, 0.047746398, 0.06317587, 0.05470383, 0.06636927, 0.052198004,
0.060208358, 0.04444948, 0.04867489, 0.06862048, 0.06581387, 0.048071872, 0.05212301,
0.19261391, 0.049889367, 0.045492154, 0.046320803, 0.063494325, 0.054510493, 0.06347422,
0.051092915, 0.05834233, 0.043416172, 0.047880307, 0.0702798, 0.06400335, 0.046974927,
0.05021099, 0.048354056, 0.059667684, 0.046677727, 0.05017591, 0.043358855, 0.04442835,
0.041634273, 0.054045133, 0.05184192, 0.04004866, 0.041927166, 0.053622227, 0.049962867,
0.04678731, 0.04960266, 0.05250815, 0.05240086, 0.04739295, 0.04544658, 0.04322888,
0.043939818, 0.041548233, 0.050774094, 0.04943702, 0.03821363, 0.0407921, 0.05539004,
0.051207837, 0.042488493, 0.06328004, 0.053216264, 0.15388635, 0.04591706, 0.05419384,
0.05973049, 0.053412627, 0.051632915, 0.07128914, 0.06796956, 0.061156306, 0.053828295,
0.059972476, 0.05688719, 0.06154727, 0.033002693, 0.033424146, 0.037261054, 0.033668816,
0.03961171, 0.035538744, 0.03135171, 0.033294108, 0.034831535, 0.05911304, 0.045328487,
0.036599185, 0.036679827, 0.03556029, 0.04563387, 0.051895823, 0.05263295, 0.056247562,
0.05598046, 0.0499578, 0.045820042, 0.04367644, 0.04248887, 0.05465484, 0.050115716,
0.048152138, 0.047774624, 0.055237323, 0.04895593, 0.043262295, 0.04682383, 0.17115448,
0.046683654, 0.03951781, 0.042132188, 0.054523326, 0.047923546, 0.054155782, 0.04783651,
0.05693014, 0.037199784, 0.040470045, 0.06653364, 0.063782014, 0.04647884, 0.05149154,
0.049366362, 0.056791425, 0.049614605, 0.043045238, 0.06931398, 0.08110497, 0.053108905,
0.055509944, 0.059460156, 0.055142593, 0.15891056, 0.06150278, 0.059633452, 0.045450278,
0.055871118, 0.05431901, 0.06074547, 0.054377165, 0.05525177, 0.048592832, 0.049849074,
0.046415195, 0.059283774, 0.058775764, 0.045543056, 0.05055737, 0.063907556, 0.056212258,
0.0510321, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.04374131, 0.053615585, 0.055853583, 0.049511068, 0.0524204,
0.043637548, 0.04457041, 0.049235854, 0.0946614, 0.07503376, 0.051667754, 0.05198725,
0.06557026, 0.06450746, 0.04998334, 0.041751746, 0.053153053, 0.050679654, 0.043849614,
0.04617272, 0.041241005, 0.047239546, 0.04512551, 0.10352506, 0.07099817, 0.043111324,
0.04860836, 0.08158604, 0.07801689, 0.04364956, 0.056069564, 0.049644783, 0.12504463,
0.043591842, 0.050630584, 0.05454832, 0.050715845, 0.045108918, 0.05804933, 0.059994318,
0.058554303, 0.0524328, 0.060279325, 0.053425856, 0.060152154, 0.04267213, 0.051120076,
0.04991285, 0.042135235, 0.047584277, 0.039719492, 0.044530753, 0.045859, 0.07902834,
0.072425246, 0.040540893, 0.044927932, 0.0920004, 0.066898525, 0.041930273, 0.046197712,
0.06048142, 0.052243836, 0.048365563, 0.05314717, 0.042979, 0.047336042, 0.051692903,
0.09175199, 0.07512821, 0.044747107, 0.047580034, 0.102162726, 0.07949875, 0.045863412,
0.053620968, 0.06573287, 0.05327219, 0.052513614, 0.05128944, 0.090251245, 0.06207211,
0.07705115, 0.054874796, 0.057200618, 0.058751434, 0.06763566, 0.054835275, 0.05827182,
0.050751943, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.04225009, 0.052566707, 0.04858897, 0.051850367, 0.051809344,
0.063589804, 0.051178254, 0.23904969, 0.05666413, 0.06601907, 0.055422187, 0.05094665,
0.061565347, 0.060174953, 0.048324432, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.06643752, 0.071883485, 0.06359828,
0.052847784, 0.05253145, 0.1505782, 0.0638951, 0.076169744, 0.052076485, 0.057974283,
0.060939535, 0.059495952, 0.05450812, 0.060442835, 0.05662123, 0.048203602, 0.06319916,
0.051084843, 0.050342344, 0.047240384, 0.044791676, 0.05828439, 0.0537186, 0.07339557,
0.07413614, 0.048031036, 0.0531026, 0.1273813, 0.07167599, 0.044606112, 0.041222908,
0.036614295, 0.057363898, 0.04627073, 0.050066657, 0.052540325, 0.03691263, 0.04515518,
0.048323013, 0.052655976, 0.2314965, 0.051274348, 0.040717028, 0.03878537, 0.04663876,
0.045166403, 0.056576654, 0.04800029, 0.051678974, 0.050191484, 0.055390522, 0.05134136,
0.07951702, 0.067708515, 0.068137325, 0.044502504, 0.048936456, 0.06495072, 0.09182645,
0.058423758, 0.048409693, 0.14959691, 0.04588403, 0.039190438, 0.037857104, 0.054066125,
0.050331697, 0.056994956, 0.04479165, 0.05579927, 0.03717144, 0.040909443, 0.057666402,
0.059539396, 0.043958105, 0.047129232, 0.0671868, 0.049284074, 0.04750264, 0.047648568,
0.044127997, 0.056159507, 0.05173083, 0.07630617, 0.0890472, 0.043230373, 0.049820937,
0.12377616, 0.078318514, 0.046027143, 0.04240991, 0.052751236, 0.048827544, 0.04498895,
0.04399203, 0.055217486, 0.047629803, 0.07844446, 0.051149394, 0.05850096, 0.04379998,
0.044647485, 0.04791557, 0.057960942, 0.04859417, 0.04549056, 0.048274383, 0.0535352,
0.05493563, 0.10056133, 0.04214963, 0.038463037, 0.044257242, 0.06314682, 0.05512406,
0.050499767, 0.04474536, 0.052178815, 0.051305797, 0.05094844, 0.030484121, 0.031859342,
0.03322838, 0.032108467, 0.038326837, 0.035074577, 0.028123701, 0.04226515, 0.036093835,
0.053217854, 0.044160537, 0.03754378, 0.040332247, 0.032323435, 0.03571911, 0.045636576,
0.05275358, 0.048821, 0.04383941, 0.042891823, 0.04133277, 0.040791642, 0.039954122,
0.04980689, 0.04709338, 0.038563162, 0.040581062, 0.053575438, 0.045947056, 0.03754217,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.038531628, 0.08362011, 0.038234144, 0.039594345, 0.0401201, 0.043229453,
0.041690897, 0.052240428, 0.045467507, 0.053565614, 0.035222758, 0.037104163, 0.059677478,
0.054381587, 0.038133014, 0.053085383, 0.045924123, 0.053968564, 0.0511445, 0.046828095,
0.07317063, 0.06379674, 0.05111114, 0.052686438, 0.05858576, 0.06802094, 0.19703466,
0.056555875, 0.056638338, 0.049673248, 0.05988164, 0.05073578, 0.213882, 0.04533081,
0.052178964, 0.053840593, 0.054209404, 0.047279835, 0.062294587, 0.0642222, 0.059004564,
0.052965406, 0.058745444, 0.059589747, 0.06583903, 0.040086113, 0.047161218, 0.048199076,
0.04746106, 0.046134796, 0.056158483, 0.04683746, 0.10277753, 0.051926896, 0.05723671,
0.05624996, 0.04546305, 0.049575526, 0.05230621, 0.04454399, 0.043805268, 0.05146829,
0.05634049, 0.052477513, 0.055734523, 0.041784313, 0.04388541, 0.052232195, 0.13752593,
0.09055372, 0.049221925, 0.048988044, 0.07902978, 0.075505346, 0.049812417, 0.05305722,
0.05151833, 0.05551861, 0.07030812, 0.08613701, 0.04849668, 0.044489656, 0.051978,
0.06133911, 0.0582729, 0.05739116, 0.05080929, 0.058123115, 0.053591453, 0.05343619,
0.030257456, 0.027576206, 0.04362692, 0.042184714, 0.04000697, 0.04164361, 0.029912844,
0.03519451, 0.042558268, 0.039699342, 0.08950146, 0.044357266, 0.033383615, 0.031285226,
0.034341678, 0.05541072, 0.052412692, 0.11567141, 0.050520778, 0.065940484, 0.061622795,
0.048890524, 0.056703817, 0.06780803, 0.064739466, 0.07369456, 0.052024312, 0.056937464,
0.057425037, 0.063430816, 0.06431355, 0.072426245, 0.06106972, 0.05177213, 0.050018877,
0.12311235, 0.06347173, 0.07226823, 0.051601138, 0.058724433, 0.05740385, 0.058400113,
0.056250475, 0.059197262, 0.052654747, 0.037097085, 0.040713944, 0.042262774, 0.050132036,
0.04715215, 0.050539486, 0.040787008, 0.08180431, 0.043730155, 0.051033475, 0.049131807,
0.04008279, 0.046545427, 0.048048556, 0.041523956, 0.04328241, 0.04841979, 0.05803983,
0.048052818, 0.056555264, 0.044536248, 0.045393392, 0.043534264, 0.10533096, 0.06974714,
0.05334247, 0.052289136, 0.063632086, 0.066448, 0.055960447, 0.04822188, 0.0386975,
0.065400355, 0.050618347, 0.05308657, 0.054266255, 0.040514812, 0.04464849, 0.05188858,
0.050963566, 0.26412663, 0.056025706, 0.044460747, 0.043357246, 0.051150363, 0.036383875,
0.044653513, 0.05029086, 0.04314432, 0.049343668, 0.038554057, 0.03738804, 0.047302168,
0.10045205, 0.073157966, 0.04372025, 0.040275622, 0.07040189, 0.062664896, 0.040396158,
0.038875133, 0.055835217, 0.043141674, 0.04042228, 0.04499552, 0.038957946, 0.04207178,
0.047861937, 0.060644902, 0.07373149, 0.039085355, 0.040797245, 0.106288895, 0.06431152,
0.041108456, 0.042120546, 0.03598138, 0.051353946, 0.05671337, 0.057466988, 0.05425608,
0.038168304, 0.04826341, 0.05058153, 0.051040925, 0.20308842, 0.051506665, 0.041755006,
0.040445928, 0.05329514, 0.068536885, 0.057044685, 0.15468086, 0.047757503, 0.051724277,
0.0742179, 0.06088268, 0.052982192, 0.063273706, 0.07228004, 0.06383646, 0.055053934,
0.060469493, 0.05581237, 0.061447, 0.04011045, 0.04402654, 0.04713008, 0.052416418,
0.0486254, 0.05749541, 0.045883507, 0.106183566, 0.053843103, 0.05766657, 0.062128264,
0.046629064, 0.051609125, 0.05192656, 0.04348066, 0.03165994, 0.0314015, 0.04034334,
0.030814437, 0.039151866, 0.032190137, 0.028732304, 0.030525465, 0.040071107, 0.048702873,
0.047152024, 0.033881832, 0.033632528, 0.03374149, 0.0408093, 0.057579566, 0.072210945,
0.057194866, 0.054295238, 0.05310721, 0.08975495, 0.065397605, 0.07886117, 0.057394315,
0.058073558, 0.05830191, 0.068764895, 0.059430126, 0.06362996, 0.05228181, 0.043818865,
0.05584147, 0.05036498, 0.0469965, 0.05148294, 0.041897755, 0.045539953, 0.0516171,
0.1171022, 0.07705994, 0.045021296, 0.048868056, 0.09531767, 0.073844954, 0.044402223,
0.042257756, 0.038109522, 0.05560105, 0.04908857, 0.055792045, 0.060594782, 0.038568232,
0.050914757, 0.04906877, 0.054288667, 0.20823847, 0.047887545, 0.04091828, 0.042966243,
0.054760456, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.04794493, 0.12734064, 0.047182247, 0.043195006, 0.043180566,
0.059572093, 0.050879814, 0.06328395, 0.048425253, 0.056477338, 0.039541826, 0.04445429,
0.068070024, 0.05962902, 0.043807313, 0.04710035, 0.048648417, 0.0503599, 0.051175565,
0.044808608, 0.06356441, 0.04495751, 0.05233305, 0.048798084, 0.051966026, 0.05341076,
0.0444634, 0.04338703, 0.055318546, 0.06436477, 0.042692646, 0.050082907, 0.05534677,
0.049887933, 0.054013204, 0.04326493, 0.044203743, 0.050871532, 0.115863636, 0.091561064,
0.048775826, 0.04971568, 0.088000886, 0.0677706, 0.04708758, 0.041868847, 0.05148983,
0.047271054, 0.049771573, 0.050763957, 0.05355962, 0.048592843, 0.079506986, 0.04974809,
0.054739557, 0.054630466, 0.042741735, 0.05510265, 0.055150542, 0.043671597, 0.046907037,
0.062306773, 0.049014598, 0.04186057, 0.042975616, 0.04274686, 0.05776889, 0.044900537,
0.07315448, 0.07636547, 0.04120272, 0.050528675, 0.10546668, 0.083892636, 0.04660515,
0.035082895, 0.047379803, 0.040407684, 0.041653894, 0.042179603, 0.049450107, 0.039754797,
0.089556634, 0.04652471, 0.053178653, 0.040911373, 0.03859304, 0.053023938, 0.05324785,
0.04034174, 0.042018976, 0.03787478, 0.050061394, 0.047923777, 0.052194618, 0.057721872,
0.0384574, 0.052174155, 0.04690734, 0.05092244, 0.23901133, 0.045831613, 0.040986266,
0.042281706, 0.05278227, 0.052875493, 0.04946888, 0.11500199, 0.04232495, 0.05687801,
0.05890864, 0.04524636, 0.04863315, 0.05787425, 0.06222754, 0.059135277, 0.046389528,
0.055057, 0.04964178, 0.058306385, 0.044178784, 0.038876012, 0.041334826, 0.04674283,
0.043997575, 0.042027324, 0.042647686, 0.03985524, 0.044303343, 0.044905424, 0.04377425,
0.04063387, 0.042193852, 0.05781808, 0.07974786, 0.049542576, 0.057847846, 0.05406843,
0.050050516, 0.046189878, 0.09625096, 0.061713457, 0.07558324, 0.05083848, 0.055309884,
0.05155771, 0.069323145, 0.051055275, 0.057088573, 0.047823716, 0.050770264, 0.05942468,
0.05610241, 0.049469274, 0.045540206, 0.11236959, 0.059794277, 0.07813436, 0.049368132,
0.054671723, 0.0513129, 0.06264686, 0.05004185, 0.05716098, 0.048651416, 0.07037545,
0.05591442, 0.17839935, 0.045331083, 0.04796091, 0.066418044, 0.06616741, 0.049819443,
0.06175117, 0.069666445, 0.059394836, 0.056587838, 0.057877753, 0.05469919, 0.05963665,
0.052185524, 0.056785166, 0.05313248, 0.05614685, 0.054465204, 0.048254374, 0.047110878,
0.049867038, 0.055783045, 0.05530879, 0.045497715, 0.046438336, 0.06380884, 0.056564763,
0.04852877, 0.052069157, 0.05642469, 0.06728154, 0.052538745, 0.06109408, 0.058345865,
0.05009697, 0.055596475, 0.07491742, 0.08413421, 0.07863945, 0.059217222, 0.060761336,
0.058456216, 0.06122146, 0.051195167, 0.055262487, 0.056746274, 0.065966375, 0.0920286,
0.04751335, 0.04382219, 0.050001234, 0.065015204, 0.057998728, 0.052974597, 0.049516164,
0.059477802, 0.054877244, 0.05207142, 0.046946578, 0.048795033, 0.05102423, 0.05700407,
0.06526842, 0.045375712, 0.04245136, 0.0460515, 0.055960562, 0.054402895, 0.04751941,
0.045995574, 0.05628164, 0.051212706, 0.04737643, 0.04406176, 0.049175717, 0.046507414,
0.04879401, 0.04495532, 0.048122194, 0.04902428, 0.06690611, 0.05747587, 0.05973223,
0.039840143, 0.045529414, 0.056351274, 0.079692736, 0.048578233, 0.07174579, 0.055391923,
0.16300432, 0.049100626, 0.05268444, 0.062353835, 0.061856717, 0.048545163, 0.061873097,
0.06902558, 0.065926, 0.05720118, 0.06169364, 0.056905698, 0.06269198, 0.041376825,
0.05692004, 0.050139517, 0.037439466, 0.039543025, 0.036864135, 0.05087217, 0.041158114,
0.06543298, 0.06444302, 0.037333023, 0.04425803, 0.08928982, 0.06972989, 0.043662712,
0.053608995, 0.053393774, 0.05527103, 0.05078832, 0.056716923, 0.052511718, 0.060450185,
0.048540298, 0.062232677, 0.05960687, 0.048840344, 0.050484426, 0.061700102, 0.10047087,
0.071860105, 0.05250073, 0.06042569, 0.057916675, 0.05145042, 0.046467066, 0.114561126,
0.06658179, 0.07982877, 0.053219177, 0.05588726, 0.054415718, 0.069723964, 0.05233711,
0.059827004, 0.05028182, 0.035096873, 0.030738555, 0.040248618, 0.0328837, 0.038270604,
0.037570056, 0.030483907, 0.034993168, 0.03328765, 0.047667462, 0.058102693, 0.03497999,
0.03400191, 0.03146466, 0.041490678, 0.031113254, 0.034983672, 0.03839729, 0.03071117,
0.037211407, 0.036157254, 0.030411357, 0.034940347, 0.03345352, 0.06875055, 0.044090934,
0.03335572, 0.0365976, 0.034772426, 0.041082017, 0.06809848, 0.07289091, 0.062095225,
0.05299071, 0.05204909, 0.13672641, 0.06325316, 0.074460186, 0.05163194, 0.058118787,
0.058401212, 0.05813882, 0.054826412, 0.060354322, 0.055642903, 0.0510962, 0.06970275,
0.04601435, 0.04105276, 0.042654853, 0.040751945, 0.051959686, 0.045496106, 0.056450225,
0.060422286, 0.040191576, 0.043216888, 0.06438629, 0.067209326, 0.0466763, 0.039640047,
0.07676861, 0.042595435, 0.03629555, 0.03704869, 0.052824616, 0.042437598, 0.056739613,
0.041316282, 0.046966016, 0.03781599, 0.03950543, 0.05446866, 0.047452163, 0.038529836,
0.033751313, 0.035417676, 0.040866088, 0.032030605, 0.03431625, 0.033265773, 0.034176048,
0.032589503, 0.03689125, 0.054156836, 0.03873557, 0.033448223, 0.034821954, 0.03858778,
0.04359508, 0.04931974, 0.19500634, 0.050177902, 0.043240193, 0.0469583, 0.047821857,
0.04801092, 0.05728326, 0.056839816, 0.061031144, 0.040575355, 0.042569175, 0.0676008,
0.06298791, 0.046997894, 0.049136665, 0.045468576, 0.05402325, 0.046276662, 0.043702368,
0.057455655, 0.05956622, 0.047666457, 0.056321476, 0.058014244, 0.052196782, 0.10365021,
0.058507267, 0.056616664, 0.04365368, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.03769961, 0.048587386, 0.046592355,
0.048369296, 0.058340933, 0.04026463, 0.038411066, 0.050804265, 0.14543298, 0.0677947,
0.045895133, 0.04149855, 0.06490304, 0.06547883, 0.04173602, 0.051848106, 0.05724612,
0.06399605, 0.04958023, 0.053706814, 0.15392973, 0.05052974, 0.06990692, 0.05091236,
0.057748277, 0.056569982, 0.0499637, 0.051792756, 0.055406693, 0.05228684, 0.051478174,
0.06099976, 0.053720713, 0.05041945, 0.052023, 0.050107382, 0.063369915, 0.05715826,
0.07271902, 0.069294095, 0.045510318, 0.05124249, 0.0772025, 0.17149186, 0.06457971,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.04518287, 0.062048584, 0.049145173, 0.04248713,
0.045052566, 0.03982358, 0.051746663, 0.044676013, 0.081197046, 0.07442316, 0.041420758,
0.047668286, 0.16089347, 0.0736727, 0.044098057, 0.047476172, 0.16787659, 0.048477784,
0.040879637, 0.045901146, 0.047863003, 0.04709563, 0.05376409, 0.051610924, 0.0562463,
0.040495906, 0.04263001, 0.06808803, 0.057964977, 0.044128504, 0.056316104, 0.055271212,
0.059901543, 0.051863242, 0.04897049, 0.06293718, 0.081646994, 0.054484453, 0.060223993,
0.06623221, 0.054496083, 0.11114487, 0.06796417, 0.06343521, 0.046409097, 0.039509647,
0.04398062, 0.043148484, 0.07033881, 0.15706897, 0.04196154, 0.035654563, 0.049890704,
0.058916066, 0.05863048, 0.052142344, 0.038869016, 0.04665895, 0.052009903, 0.057315238,
0.040811416, 0.05505625, 0.051306136, 0.052893136, 0.061117645, 0.044381276, 0.04252268,
0.055173293, 0.14000249, 0.08099231, 0.04960463, 0.04694218, 0.08533405, 0.07260523,
0.04684717, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.04661323, 0.058451235, 0.047056314, 0.050811324, 0.05131547,
0.045898438, 0.05421781, 0.060402613, 0.07375644, 0.0768769, 0.042436153, 0.048114683,
0.08200655, 0.1851463, 0.055602252, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.09327226, 0.052964166, 0.2063042,
0.04292001, 0.041629102, 0.062326796, 0.07707917, 0.045163788, 0.05155487, 0.06024479,
0.050151613, 0.055840824, 0.05108442, 0.057194993, 0.05226899, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.049687978,
0.065221526, 0.052793097, 0.04840453, 0.0511417, 0.04574766, 0.05388156, 0.050652985,
0.075727664, 0.072389, 0.04713877, 0.05359436, 0.10133088, 0.07523711, 0.049581412,
0.04822151, 0.11807779, 0.04587858, 0.04091746, 0.04424148, 0.044924155, 0.04864631,
0.054398704, 0.04759457, 0.055111226, 0.039493684, 0.04153325, 0.055758204, 0.05742945,
0.04505261, 0.053047206, 0.048830178, 0.0581411, 0.05292685, 0.046986427, 0.06518757,
0.083125226, 0.054194666, 0.05747472, 0.06513332, 0.054578274, 0.18610239, 0.06315191,
0.059730433, 0.046601057, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.040214095, 0.049310263, 0.0530292, 0.05562385,
0.06473902, 0.046550933, 0.042509016, 0.053702082, 0.16861495, 0.07843978, 0.05249162,
0.047672883, 0.07245585, 0.070280515, 0.048455138, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.039017845, 0.049467724,
0.04193475, 0.045319736, 0.046745773, 0.04224875, 0.043652743, 0.05873521, 0.06288681,
0.06440031, 0.038276043, 0.039973304, 0.069581605, 0.09115961, 0.04301268, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.04892648, 0.1580787, 0.04687213, 0.044695888,
0.048950795, 0.05053326, 0.04820275, 0.060692713, 0.05275942, 0.056345, 0.043172717,
0.043472666, 0.06461683, 0.059185848, 0.044019014, 0.046369914, 0.06709995, 0.05091679,
0.050795577, 0.055722255, 0.04588183, 0.052812915, 0.053593494, 0.09118782, 0.08479329,
0.04683428, 0.050419293, 0.1567129, 0.09523405, 0.05010945, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.04803235,
0.058163553, 0.04816825, 0.0489386, 0.05164239, 0.045026097, 0.05472727, 0.059995823,
0.07554103, 0.07470883, 0.041846972, 0.04681608, 0.07743794, 0.19656026, 0.06086666,
0.045334447, 0.063459076, 0.052495066, 0.05127261, 0.051574655, 0.06640976, 0.05105356,
0.17485027, 0.059681438, 0.06966195, 0.050046626, 0.049267292, 0.059857372, 0.06436893,
0.050283108, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.052580874, 0.20204175, 0.051268708, 0.049419634, 0.054364067,
0.056236852, 0.054399945, 0.06552496, 0.059644684, 0.06259563, 0.047749106, 0.048725467,
0.074267834, 0.06560881, 0.047928218, 0.051968258, 0.07938712, 0.054040555, 0.051911935,
0.053169794, 0.046898942, 0.057729628, 0.052424036, 0.082952894, 0.087329224, 0.047728915,
0.05280322, 0.13421994, 0.09409728, 0.05182207, 0.05443959, 0.060364302, 0.05707845,
0.046808247, 0.04965922, 0.05458866, 0.06063587, 0.06461446, 0.06672073, 0.113682926,
0.04956715, 0.05651292, 0.08261671, 0.07147393, 0.051192295, 0.04698021, 0.07001985,
0.050000895, 0.050148726, 0.055927772, 0.04500015, 0.05010032, 0.052096777, 0.083353385,
0.07113084, 0.047121502, 0.049226828, 0.12213901, 0.09203602, 0.05013211, 0.055319946,
0.058431122, 0.060689155, 0.048861474, 0.050333552, 0.05174554, 0.05611533, 0.06490904,
0.072127804, 0.122148916, 0.05306468, 0.054234255, 0.07572126, 0.066404186, 0.049849194,
0.0556079, 0.056273963, 0.06549472, 0.046359953, 0.044969674, 0.22235572, 0.06354806,
0.06341088, 0.047498982, 0.05656719, 0.05687299, 0.06748057, 0.05210824, 0.052079808,
0.04937135, 0.050431114, 0.04911063, 0.051418792, 0.046011563, 0.04106019, 0.05432615,
0.09899995, 0.048892587, 0.05207308, 0.058746215, 0.04531632, 0.099496625, 0.06517643,
0.058302503, 0.040124524, 0.06897097, 0.04790338, 0.2707806, 0.04041974, 0.043317277,
0.05600416, 0.054983497, 0.043241616, 0.054706346, 0.06407063, 0.050652277, 0.04702053,
0.05213398, 0.05043535, 0.055359673, 0.065629475, 0.052723207, 0.12830009, 0.0426945,
0.044266034, 0.07346093, 0.057122715, 0.049879692, 0.051944718, 0.063339174, 0.054884315,
0.057404455, 0.054110296, 0.052862253, 0.05436465, 0.044996355, 0.05703039, 0.05818368,
0.053452726, 0.056038514, 0.04581762, 0.045585476, 0.055617187, 0.13282512, 0.09117657,
0.050301597, 0.05055357, 0.07468328, 0.069079585, 0.047233853, 0.046755653, 0.06171499,
0.047188386, 0.051858313, 0.052128814, 0.045433525, 0.054022975, 0.05493011, 0.07462903,
0.07218557, 0.04367763, 0.04837212, 0.092574745, 0.19760881, 0.05399946, 0.057976965,
0.057502557, 0.056598373, 0.051607523, 0.047038376, 0.057756554, 0.10563174, 0.052533884,
0.059243403, 0.06491466, 0.051045526, 0.09843648, 0.07314052, 0.06350288, 0.044367433,
0.050529316, 0.26296976, 0.04876226, 0.04457496, 0.048365004, 0.051195092, 0.052189033,
0.059752133, 0.05273999, 0.059567995, 0.04188957, 0.044877537, 0.07020156, 0.06383538,
0.04598298, 0.051835433, 0.24136057, 0.05029134, 0.0462888, 0.0518227, 0.050993484,
0.050947294, 0.062137783, 0.055837777, 0.059881788, 0.043500844, 0.0453089, 0.07061063,
0.06338799, 0.04815118, 0.043518446, 0.045410052, 0.054409966, 0.05748714, 0.14940946,
0.044366293, 0.037438817, 0.04465416, 0.06823838, 0.057719402, 0.05825266, 0.04582184,
0.04993146, 0.049953442, 0.051519904, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0789242, 0.045664266, 0.109193794,
0.039903842, 0.038807403, 0.056154463, 0.060332607, 0.041570596, 0.04370875, 0.048799403,
0.04725107, 0.04464553, 0.04213879, 0.04774746, 0.046127178, 0.053078473, 0.04470706,
0.05646723, 0.040893953, 0.035604272, 0.052913718, 0.0678918, 0.049682654, 0.046256714,
0.05651157, 0.04052064, 0.06282507, 0.04962243, 0.05223462, 0.040735077, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.044005845, 0.17083567, 0.044034813, 0.041859303, 0.04677615, 0.048123978, 0.044826448,
0.05895672, 0.050877266, 0.05500731, 0.037717313, 0.039892163, 0.07031226, 0.06045043,
0.04355738, 0.056588374, 0.057754304, 0.0588328, 0.052593086, 0.04966049, 0.059394263,
0.07990304, 0.0571006, 0.06194069, 0.07022868, 0.05172027, 0.098020256, 0.07391266,
0.067165, 0.046482354, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.043803237, 0.06568376, 0.044308554, 0.049342316,
0.049110346, 0.057134435, 0.047396332, 0.15143877, 0.05265901, 0.056693435, 0.04347107,
0.042410243, 0.05758681, 0.062506326, 0.044177108, 0.037917834, 0.048809275, 0.049186904,
0.05375544, 0.065137126, 0.043727126, 0.03938903, 0.05861166, 0.13476804, 0.08093355,
0.051671077, 0.04573005, 0.073291995, 0.06525536, 0.04717915, 0.038902625, 0.049495228,
0.04878248, 0.05166036, 0.058398526, 0.041841723, 0.039488483, 0.05509452, 0.12326032,
0.07441482, 0.04645086, 0.044763975, 0.07297266, 0.06701979, 0.04093264, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.040350646, 0.052665282, 0.04962943, 0.05253575, 0.05837092, 0.044270486, 0.043647792,
0.05300047, 0.12819372, 0.070274666, 0.05081998, 0.04889802, 0.07898821, 0.071785524,
0.046972204, 0.040950183, 0.05428325, 0.052778445, 0.048309322, 0.05183465, 0.04129024,
0.044865675, 0.04789839, 0.13022903, 0.07480878, 0.04642794, 0.047271244, 0.08761197,
0.07637634, 0.04546764, 0.050763503, 0.055917665, 0.05728586, 0.048059277, 0.04883512,
0.24955748, 0.050925843, 0.06662162, 0.04699833, 0.05644099, 0.05767666, 0.051330023,
0.047387704, 0.053344574, 0.051453166, 0.043677654, 0.058500804, 0.04715373, 0.0528085,
0.051953126, 0.06406163, 0.047451384, 0.26750657, 0.05227959, 0.065065965, 0.048658755,
0.044674926, 0.050918687, 0.056307796, 0.048980873, 0.04256046, 0.07014383, 0.044054937,
0.050712675, 0.05311053, 0.059145026, 0.044885125, 0.13497922, 0.053729694, 0.06619897,
0.045316346, 0.04208812, 0.053404283, 0.0599239, 0.05003217, 0.045178153, 0.061324306,
0.05031753, 0.0575365, 0.05948909, 0.068226136, 0.04815604, 0.19063412, 0.05682001,
0.07168199, 0.053483836, 0.047583196, 0.057653118, 0.062258963, 0.052529167, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.043584075, 0.059611302, 0.0457421, 0.04572957, 0.048837006, 0.040120445, 0.04559595,
0.048324578, 0.077411376, 0.06353192, 0.042174764, 0.043885503, 0.17373672, 0.07996077,
0.041945584, 0.05844564, 0.057912424, 0.060478896, 0.05166467, 0.048813473, 0.059603076,
0.08323814, 0.056202356, 0.061388336, 0.06878663, 0.051655725, 0.09899175, 0.072931804,
0.06544991, 0.04573403, 0.04994681, 0.06062978, 0.04717342, 0.04718799, 0.045220353,
0.04687627, 0.056252044, 0.066233136, 0.06792978, 0.07224135, 0.04228198, 0.04702597,
0.06744013, 0.12585373, 0.050303858, 0.043029577, 0.1695206, 0.04408497, 0.03830764,
0.043919988, 0.048303053, 0.044211924, 0.050022244, 0.049736172, 0.05130492, 0.037116334,
0.038777497, 0.06358845, 0.0555371, 0.04185644, 0.043806657, 0.06947093, 0.046509095,
0.046499982, 0.053232487, 0.040738974, 0.047256764, 0.047307678, 0.08121092, 0.066298604,
0.044896286, 0.046344794, 0.12594144, 0.07443708, 0.046924826, 0.04860515, 0.065736,
0.05150733, 0.05226094, 0.051403563, 0.05465975, 0.05551097, 0.071940385, 0.07146237,
0.06815722, 0.044725344, 0.047090393, 0.06308775, 0.09574219, 0.060706213, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.04834986, 0.040113047, 0.050685182, 0.039092176, 0.037185453, 0.040195785, 0.04953694,
0.043154906, 0.053932756, 0.07377865, 0.040372163, 0.042354647, 0.05971315, 0.053638574,
0.038481668, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.04263261, 0.16137113, 0.042351447, 0.041137647, 0.047155946,
0.046768606, 0.04394796, 0.05466952, 0.049937993, 0.052351598, 0.036657944, 0.03836543,
0.06456275, 0.058751598, 0.04150447, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.059442993, 0.05840754, 0.057248816,
0.048756637, 0.04686952, 0.054126035, 0.08155461, 0.052629832, 0.056453522, 0.06354739,
0.047665387, 0.07838742, 0.06975576, 0.062032476, 0.04378155, 0.074334, 0.056081086,
0.19275962, 0.046158116, 0.046577107, 0.066513285, 0.06445951, 0.047309764, 0.05950769,
0.0674011, 0.055706587, 0.051886756, 0.05900564, 0.055254653, 0.057045076, 0.04571612,
0.072226025, 0.0478058, 0.05350824, 0.050798263, 0.06644456, 0.051348776, 0.22044188,
0.05607669, 0.063670054, 0.048965286, 0.046361987, 0.05873398, 0.06876463, 0.04913771,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.039962243, 0.047219314, 0.043065272, 0.06567568, 0.115658894, 0.04115503,
0.037324358, 0.050857402, 0.055769905, 0.0542052, 0.049934078, 0.037597258, 0.047123123,
0.051602647, 0.055542365, 0.05401129, 0.051304758, 0.061398495, 0.048580028, 0.046700884,
0.058726035, 0.06490888, 0.048451483, 0.061971113, 0.06265306, 0.054882795, 0.09384252,
0.06422207, 0.057299737, 0.044118978, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.044265773, 0.061197508,
0.0485425, 0.057674535, 0.05333596, 0.062425345, 0.050926525, 0.22817838, 0.05715897,
0.06825664, 0.050950088, 0.04757896, 0.05732393, 0.064047694, 0.048137188, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.044590943, 0.041058738, 0.049227573, 0.046581533, 0.040594265, 0.054588757, 0.070801444,
0.047232427, 0.0495628, 0.05619313, 0.04663852, 0.14893135, 0.054940473, 0.051794235,
0.039905883, 0.04079922, 0.053982835, 0.051465265, 0.045876157, 0.049675424, 0.039950814,
0.040652897, 0.050565884, 0.084479466, 0.07858229, 0.0431309, 0.042992648, 0.06996757,
0.061821736, 0.042898, 0.05234148, 0.084712766, 0.056169357, 0.049131393, 0.05054049,
0.048177887, 0.059071902, 0.052966014, 0.08063534, 0.093757726, 0.047203176, 0.052741908,
0.13658644, 0.08497439, 0.049473543, 0.056476224, 0.055040356, 0.057746265, 0.05175844,
0.048029248, 0.059250183, 0.09441789, 0.052624375, 0.058885876, 0.066565685, 0.05300437,
0.10872358, 0.07124793, 0.06272198, 0.04480445, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.043058828, 0.05876899,
0.045823857, 0.05213041, 0.049050536, 0.05932133, 0.04811871, 0.2287899, 0.05387058,
0.06570903, 0.0478561, 0.045985498, 0.054589383, 0.058988325, 0.047554694, 0.050950117,
0.041929606, 0.052986152, 0.03723098, 0.036631413, 0.043203127, 0.048374314, 0.043533348,
0.052682795, 0.08089, 0.040238578, 0.044380765, 0.05172644, 0.050114967, 0.04282548,
0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.044173524, 0.055794634, 0.053223863, 0.04697322, 0.050184835, 0.04156044,
0.043345287, 0.056528583, 0.08951006, 0.07782354, 0.04445744, 0.044158608, 0.08001386,
0.067906275, 0.04202814, 0.057284378, 0.057184894, 0.054369528, 0.046573866, 0.04338892,
0.052662965, 0.09693569, 0.050526515, 0.05343541, 0.060073268, 0.04636016, 0.08064536,
0.066901505, 0.060137384, 0.042343605, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.050051697, 0.19356178, 0.048674528,
0.046263974, 0.05003566, 0.051636387, 0.05041908, 0.06542714, 0.054669503, 0.06069261,
0.043025915, 0.044164497, 0.07166654, 0.061541222, 0.045094214, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.04326386, 0.058982667, 0.04563891, 0.04646809, 0.053541347, 0.04081497,
0.045841876, 0.046933908, 0.083857484, 0.06582214, 0.044481844, 0.045477558, 0.1827533,
0.08038244, 0.044995848, 0.048349522, 0.0594328, 0.053192735, 0.05800789, 0.055308525,
0.06990919, 0.05448771, 0.19523081, 0.056716196, 0.06833714, 0.05644908, 0.05319619,
0.05769059, 0.06124608, 0.05244554, 0.061812278, 0.05416755, 0.062230393, 0.048957385,
0.0474494, 0.055119567, 0.07826921, 0.05187844, 0.058259945, 0.06483517, 0.050248142,
0.090627626, 0.06309599, 0.059786, 0.045112804, 0.101002894, 0.057356264, 0.15809913,
0.046497766, 0.045716982, 0.0662376, 0.06994485, 0.05005664, 0.05346925, 0.06870635,
0.052215714, 0.054668263, 0.055363093, 0.059699558, 0.060965646, 0.045702744, 0.05591998,
0.045205675, 0.050799843, 0.05058888, 0.042382795, 0.052008633, 0.053399287, 0.07051493,
0.06390965, 0.04385124, 0.046950556, 0.10702151, 0.11722807, 0.05025779, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.055452973, 0.053213593, 0.07303099, 0.051470544, 0.05044048, 0.14911619, 0.055100143,
0.062137492, 0.0498591, 0.057880025, 0.060410224, 0.053433757, 0.053306885, 0.05513508,
0.05507689, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.043014273, 0.04647005, 0.052522216, 0.060058925, 0.16675313,
0.04355585, 0.037390817, 0.047572944, 0.07012092, 0.06211885, 0.059273005, 0.044627056,
0.05281356, 0.05237062, 0.051961113, 0.050507076, 0.04098301, 0.052536294, 0.03555743,
0.034735944, 0.044523995, 0.058392104, 0.0449429, 0.04890114, 0.071741834, 0.037468594,
0.047372375, 0.048918854, 0.058079716, 0.04303682, 0.045168415, 0.04665025, 0.05291995,
0.048936825, 0.07741657, 0.041802894, 0.03739532, 0.042535335, 0.061956722, 0.056875348,
0.049096294, 0.044030726, 0.052917108, 0.0501848, 0.050364107, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03899423,
0.05461014, 0.040756494, 0.04251058, 0.04283244, 0.04312055, 0.042797457, 0.05691105,
0.0573842, 0.058330987, 0.03774301, 0.038450666, 0.059729453, 0.09955912, 0.05003225,
0.08894271, 0.054750007, 0.2114993, 0.044098046, 0.044489063, 0.06453029, 0.06417266,
0.047723, 0.05240812, 0.061455633, 0.05207051, 0.052556474, 0.05161948, 0.053827368,
0.055857334, 0.04610431, 0.0735704, 0.046806306, 0.04615894, 0.052055214, 0.0418664,
0.05156911, 0.047971934, 0.07408174, 0.077030286, 0.04326076, 0.047296647, 0.12434279,
0.08237725, 0.04792498, 0.04962593, 0.05852395, 0.05312658, 0.049840763, 0.05255402,
0.05303847, 0.05889032, 0.056652736, 0.07242599, 0.06795937, 0.045919225, 0.049530506,
0.069993615, 0.16644967, 0.072802916, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.047438204, 0.041224066, 0.05418828,
0.035257753, 0.03610795, 0.045344654, 0.051228832, 0.048040267, 0.04755697, 0.07981901,
0.039727017, 0.04821751, 0.05127496, 0.049477227, 0.04279538, 0.0513093, 0.041843828,
0.05834419, 0.03792401, 0.04200359, 0.044176508, 0.047955792, 0.044548918, 0.051668983,
0.08476817, 0.04607335, 0.045789182, 0.050721176, 0.049368367, 0.047867827, 0.05217581,
0.050761703, 0.06126314, 0.046713483, 0.045827404, 0.23490052, 0.058975484, 0.06543003,
0.04774276, 0.055941813, 0.06364861, 0.063155055, 0.049460456, 0.05223753, 0.0517662,
0.056301773, 0.0646674, 0.056999087, 0.042585403, 0.044832367, 0.043861624, 0.051967036,
0.041079655, 0.060173832, 0.056706823, 0.043932848, 0.04699408, 0.06370272, 0.061435476,
0.045584306, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.057496294, 0.03863553, 0.058652632, 0.034174398, 0.03423139,
0.047816474, 0.06347418, 0.038273174, 0.041169994, 0.056000248, 0.039239705, 0.045469113,
0.04264767, 0.046675235, 0.04457436, 0.046449568, 0.06950392, 0.05022841, 0.040027946,
0.043075252, 0.040141318, 0.048842635, 0.04140674, 0.06299236, 0.06411653, 0.039867178,
0.04314822, 0.07322385, 0.07289841, 0.044681244, 0.050990537, 0.047625124, 0.057298496,
0.039982352, 0.04284934, 0.05280199, 0.048444405, 0.04782962, 0.05035945, 0.075404465,
0.046837453, 0.04668019, 0.056846734, 0.051267505, 0.046268452, 0.050228935, 0.19140974,
0.049649853, 0.044060964, 0.045224138, 0.06302241, 0.05357678, 0.06390418, 0.05148494,
0.059828363, 0.04300867, 0.048816953, 0.074860744, 0.06330548, 0.047525935, 0.03604599,
0.027784793, 0.045225836, 0.035800382, 0.036433857, 0.049937394, 0.03351349, 0.0354497,
0.035153348, 0.039130114, 0.11615778, 0.046370313, 0.0327509, 0.032629974, 0.039877754,
0.045619644, 0.045277882, 0.047026988, 0.05021876, 0.071894124, 0.042578217, 0.037079748,
0.049498837, 0.05035023, 0.055760324, 0.052141678, 0.041744974, 0.048802454, 0.044141144,
0.04356411, 0.042060994, 0.04608493, 0.05487099, 0.051815495, 0.047991823, 0.04473535,
0.045677282, 0.04361904, 0.06927848, 0.061397705, 0.05491578, 0.06337838, 0.059211683,
0.050623346, 0.042468064, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.05369869, 0.05097674, 0.058547873, 0.05296557,
0.05543319, 0.05314203, 0.04743051, 0.052048188, 0.0513679, 0.06065383, 0.053262163,
0.045049682, 0.05030859, 0.06728646, 0.14628772, 0.03515658, 0.036351487, 0.041128743,
0.04395036, 0.042086974, 0.048552226, 0.042002443, 0.05147844, 0.042943303, 0.046309665,
0.048722334, 0.05057206, 0.049739897, 0.04062389, 0.034271937, 0.042881124, 0.039269194,
0.05366845, 0.039492603, 0.052205462, 0.040640034, 0.03820636, 0.0388011, 0.0468956,
0.05304545, 0.053095162, 0.039472137, 0.045963805, 0.041877374, 0.05033904, 0.047381993,
0.04824048, 0.045499768, 0.04502517, 0.044734646, 0.059866853, 0.045966562, 0.04812066,
0.043140426, 0.057624206, 0.06343077, 0.06517065, 0.04674913, 0.046082612, 0.048894987,
0.060588427, 0.054818183, 0.063905284, 0.046746198, 0.04278115, 0.05941113, 0.13929419,
0.052778028, 0.054079205, 0.06263991, 0.05052772, 0.08083719, 0.069146454, 0.067687646,
0.046664022, 0.045467976, 0.11906257, 0.046188235, 0.042568248, 0.042215664, 0.058991875,
0.05083216, 0.06831332, 0.04932404, 0.05956622, 0.040360026, 0.046139795, 0.072126515,
0.061168794, 0.043763794, 0.0365088, 0.03025003, 0.041762494, 0.03792013, 0.042895883,
0.04660836, 0.031874403, 0.03982646, 0.03573829, 0.04692954, 0.07288331, 0.03949276,
0.03433208, 0.03628043, 0.04886808, 0.056997363, 0.058154397, 0.05898681, 0.050856587,
0.055350102, 0.05156221, 0.045282047, 0.049466837, 0.053066302, 0.056066357, 0.04913984,
0.049875505, 0.05949098, 0.050281625, 0.045300823, 0.051092397, 0.04966004, 0.0612988,
0.039706133, 0.044355623, 0.042708818, 0.05125721, 0.03702286, 0.062011406, 0.05576972,
0.04592006, 0.050448984, 0.05583133, 0.051741544, 0.040632952, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05510441,
0.051527627, 0.059831835, 0.050905447, 0.053133972, 0.05879073, 0.049166135, 0.05886526,
0.047206677, 0.06588486, 0.05746442, 0.04801124, 0.049961504, 0.05933278, 0.12473555,
0.036209352, 0.043896124, 0.042546052, 0.048067357, 0.047164302, 0.05593604, 0.043013256,
0.08477747, 0.048236698, 0.053145356, 0.053778064, 0.04620572, 0.045796096, 0.048171505,
0.04029773, 0.056205027, 0.044063486, 0.05583727, 0.034347557, 0.033236094, 0.040624984,
0.060928714, 0.033635706, 0.04112493, 0.04226154, 0.039860193, 0.047750484, 0.04686477,
0.043895435, 0.0355524, 0.046078656, 0.12505867, 0.04568704, 0.03926198, 0.04161288,
0.057922676, 0.04557252, 0.057837684, 0.045284864, 0.054169256, 0.039005857, 0.04056422,
0.061509814, 0.05423906, 0.04504259, 0.032265272, 0.03018541, 0.03951661, 0.032777566,
0.036504775, 0.050596446, 0.02948107, 0.040543396, 0.031811476, 0.044303015, 0.05633952,
0.03555094, 0.031644613, 0.033442553, 0.041065857, 0.056915954, 0.05012495, 0.056954894,
0.05143188, 0.062424306, 0.0484626, 0.042573422, 0.046267286, 0.05120935, 0.054516513,
0.05289164, 0.04893797, 0.04976654, 0.048299465, 0.050294824, 0.04446125, 0.048855837,
0.05539142, 0.05216309, 0.058142282, 0.045949467, 0.045854066, 0.04599445, 0.10206781,
0.07452125, 0.052439794, 0.05687733, 0.069071725, 0.063592225, 0.053676866, 0.05662363,
0.056255154, 0.06160268, 0.04965829, 0.048776228, 0.12407054, 0.05098068, 0.06412017,
0.04485223, 0.056616504, 0.05082266, 0.046003196, 0.048823748, 0.051433135, 0.05766414,
0.056124214, 0.04964073, 0.059231393, 0.052427754, 0.055735253, 0.057076167, 0.05482056,
0.05268587, 0.056955665, 0.06463573, 0.055569082, 0.05402519, 0.053947832, 0.07343008,
0.11656541, 0.04104562, 0.04380417, 0.046233848, 0.053970147, 0.052967675, 0.062646694,
0.045846816, 0.07332, 0.051286023, 0.051457115, 0.06487813, 0.056667194, 0.043288175,
0.044119176, 0.048261534, 0.05886027, 0.04740521, 0.09339937, 0.0462775, 0.057709012,
0.049681317, 0.046962757, 0.045129947, 0.056182988, 0.064041585, 0.070405245, 0.051462185,
0.055830922, 0.05013807, 0.062785536, 0.048206344, 0.049503192, 0.05548487, 0.040438883,
0.044446107, 0.043965593, 0.042195927, 0.04566304, 0.044683482, 0.057774045, 0.04764865,
0.04301788, 0.047037337, 0.05234544, 0.06449719, 0.053208128, 0.03821596, 0.045365017,
0.03333683, 0.03131861, 0.03777564, 0.06336035, 0.03313744, 0.037058394, 0.03905313,
0.03523216, 0.04237208, 0.043242387, 0.04286554, 0.03267066, 0.053909335, 0.058580056,
0.055417024, 0.043463707, 0.04266429, 0.08697534, 0.052945927, 0.05893497, 0.042335983,
0.051774167, 0.04944561, 0.048879705, 0.05080389, 0.049516264, 0.047708567, 0.0638218,
0.046536997, 0.052385654, 0.03597937, 0.03523187, 0.045459308, 0.08550569, 0.036141377,
0.041609306, 0.045787398, 0.040008344, 0.055929143, 0.049027935, 0.04742189, 0.03725059,
0.054981958, 0.04246737, 0.055757403, 0.048738077, 0.05833023, 0.047359083, 0.046400473,
0.044406876, 0.04965461, 0.05626281, 0.057849318, 0.05144572, 0.047305256, 0.055875096,
0.08541833, 0.036734767, 0.033947114, 0.039288484, 0.032265995, 0.03695415, 0.030821599,
0.030630693, 0.03409247, 0.031968214, 0.0374163, 0.03323543, 0.02822884, 0.032020785,
0.036665943, 0.07846042, 0.039586235, 0.04732279, 0.045893442, 0.059743106, 0.05354309,
0.061872967, 0.04592545, 0.1588924, 0.054762255, 0.062746905, 0.058337327, 0.050456963,
0.050502423, 0.052356653, 0.045578316, 0.0579011, 0.048054483, 0.06320284, 0.05074293,
0.050581675, 0.054192767, 0.04986325, 0.05414434, 0.048593007, 0.0609364, 0.058107466,
0.047599986, 0.049086258, 0.060844462, 0.123503834, 0.047525626, 0.13958158, 0.04543894,
0.04380367, 0.04405862, 0.054444395, 0.048846446, 0.069931194, 0.049301773, 0.06308407,
0.039799757, 0.042354528, 0.070748754, 0.063154966, 0.046754684, 0.051620327, 0.039153703,
0.05380627, 0.04118576, 0.037316423, 0.045073934, 0.068133846, 0.038890228, 0.04185386,
0.05518445, 0.042600606, 0.052425273, 0.04360003, 0.048547495, 0.047301985, 0.04509567,
0.04532753, 0.055118322, 0.034664918, 0.03506359, 0.053938966, 0.04530418, 0.045598634,
0.036832992, 0.053704277, 0.044333182, 0.04216432, 0.04339128, 0.040617615, 0.04139597,
0.05198274, 0.040591612, 0.054611057, 0.041272134, 0.0457499, 0.043429546, 0.04664025,
0.03887893, 0.048461508, 0.058613088, 0.049003568, 0.05008845, 0.04610644, 0.05087752,
0.07133652, 0.04124892, 0.050933376, 0.050527196, 0.045945123, 0.0403747, 0.054419454,
0.054538578, 0.060760003, 0.050798636, 0.06400477, 0.049572896, 0.060270213, 0.05714422,
0.048237693, 0.039836135, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.054124366, 0.050565653, 0.11784578,
0.041439027, 0.048657395, 0.057055853, 0.049769655, 0.049281117, 0.05593405, 0.062476866,
0.056555107, 0.053534772, 0.052039653, 0.049463775, 0.05500175, 0.048144285, 0.049974505,
0.08222589, 0.044350058, 0.05652883, 0.052652203, 0.04815561, 0.049676392, 0.058618624,
0.06120694, 0.058104537, 0.04976004, 0.05605537, 0.052435, 0.054916244, 0.04014887,
0.036245275, 0.058439545, 0.041435663, 0.047979295, 0.04065343, 0.03806706, 0.034901287,
0.063278295, 0.050768174, 0.05978225, 0.048243932, 0.044377964, 0.04440903, 0.05563072,
0.05101781, 0.05831206, 0.058490995, 0.04539717, 0.052592684, 0.04904619, 0.045403637,
0.054311953, 0.04701158, 0.05920922, 0.04546196, 0.041566253, 0.04961015, 0.057811633,
0.09809238, 0.042309925, 0.036588896, 0.054181695, 0.044063814, 0.04916468, 0.057356358,
0.038098708, 0.04741956, 0.04601879, 0.05069907, 0.23743455, 0.048856825, 0.03952969,
0.03952969, 0.047343083, 0.036718763, 0.055756595, 0.037872996, 0.035641912, 0.03692403,
0.058106627, 0.038704038, 0.066109315, 0.037056293, 0.04411021, 0.038265474, 0.03922796,
0.046408925, 0.042176746, 0.037041526, 0.034003977, 0.0697392, 0.0348467, 0.031150535,
0.030639052, 0.036350325, 0.03655345, 0.037858464, 0.039453775, 0.039749756, 0.030042822,
0.033573017, 0.041608516, 0.046558164, 0.033740174, 0.04657849, 0.06299859, 0.05323763,
0.043715265, 0.046149902, 0.044994008, 0.040888067, 0.04691797, 0.049403947, 0.050474897,
0.040689096, 0.040545017, 0.05254357, 0.04642126, 0.03949302, 0.048113853, 0.04937325,
0.056364927, 0.06292424, 0.110709965, 0.045912795, 0.04075752, 0.052336633, 0.06747693,
0.063231304, 0.05937621, 0.04663916, 0.055730633, 0.054212067, 0.05497973, 0.037431292,
0.04081864, 0.053810332, 0.037664652, 0.042089503, 0.043328676, 0.04116673, 0.041416526,
0.04196181, 0.043337427, 0.04486756, 0.038422283, 0.041084465, 0.043206748, 0.043034334,
0.03251917, 0.03633581, 0.040374875, 0.032536753, 0.035346303, 0.038421217, 0.033760868,
0.036204495, 0.03510491, 0.064039886, 0.04278138, 0.036937352, 0.038958855, 0.036731888,
0.04321821, 0.049247406, 0.054725558, 0.05618886, 0.05204835, 0.04752894, 0.049355667,
0.04365589, 0.047558066, 0.051034246, 0.05320688, 0.04911843, 0.049795955, 0.053343482,
0.045191478, 0.039231177, 0.038153972, 0.07807616, 0.03909292, 0.03783278, 0.03726797,
0.048548166, 0.042881556, 0.06738772, 0.04002404, 0.048131038, 0.037729513, 0.039867114,
0.05356659, 0.046412803, 0.036281962, 0.032596186, 0.026864609, 0.040836416, 0.031546984,
0.033212457, 0.048121344, 0.03003626, 0.03633963, 0.03088524, 0.037233822, 0.06528209,
0.035717137, 0.028837536, 0.031209437, 0.037762806, 0.038216297, 0.048007336, 0.048171155,
0.048066787, 0.10134685, 0.04169487, 0.034665655, 0.05144485, 0.06841124, 0.07306592,
0.054327365, 0.038674485, 0.05159618, 0.050013904, 0.048894774, 0.04196843, 0.045473106,
0.048039913, 0.054947436, 0.052409433, 0.060537737, 0.05029432, 0.107796505, 0.051023066,
0.06294617, 0.06272335, 0.049952555, 0.053153675, 0.050771393, 0.045441136, 0.037187368,
0.034588102, 0.050978173, 0.039527062, 0.048025105, 0.038258646, 0.034480017, 0.03278009,
0.050069634, 0.05222236, 0.05593614, 0.04230718, 0.04134437, 0.04241311, 0.060437463,
0.043765865, 0.034784425, 0.0669812, 0.037130043, 0.044717863, 0.037340023, 0.035894413,
0.031566177, 0.048713617, 0.04787589, 0.056890517, 0.04086647, 0.04039634, 0.03950279,
0.05412919, 0.047567636, 0.047522757, 0.08499895, 0.04296913, 0.059371788, 0.049696203,
0.04555936, 0.047553632, 0.05829677, 0.05968361, 0.056885343, 0.0471796, 0.056210764,
0.05198822, 0.05628593, 0.05096316, 0.045710117, 0.08748594, 0.04370982, 0.05078045,
0.050989423, 0.04488113, 0.04131447, 0.062193092, 0.06041252, 0.061791614, 0.050728604,
0.047881708, 0.047611948, 0.05579862, 0.042126026, 0.04194299, 0.058520842, 0.04637228,
0.049098596, 0.045010623, 0.043152172, 0.039807685, 0.06270993, 0.06769112, 0.0623121,
0.0516978, 0.049797643, 0.0489229, 0.054314185, 0.05962655, 0.052855834, 0.06749051,
0.053810813, 0.05046263, 0.14983971, 0.057139914, 0.06195639, 0.0486792, 0.057505894,
0.054771688, 0.04932264, 0.053166058, 0.058202624, 0.060233902, 0.044791523, 0.050702255,
0.050765272, 0.058290586, 0.054319475, 0.06832668, 0.04804369, 0.117480196, 0.053497806,
0.059523076, 0.058325563, 0.050681036, 0.050906036, 0.05310821, 0.051523864, 0.044117942,
0.04794155, 0.05107264, 0.056381296, 0.05197506, 0.07285308, 0.05084965, 0.110342555,
0.05540323, 0.058900274, 0.06326355, 0.05858702, 0.048289653, 0.050842166, 0.050755844,
0.0468385, 0.053858444, 0.053699214, 0.058812696, 0.05584539, 0.068326056, 0.05342325,
0.1598598, 0.058471963, 0.06542297, 0.06358515, 0.055231653, 0.05828323, 0.058974385,
0.050714184, 0.054429166, 0.041223742, 0.05597117, 0.049268015, 0.05387674, 0.045902845,
0.0414471, 0.041275535, 0.04800701, 0.053469256, 0.05475504, 0.042735755, 0.047078267,
0.0473445, 0.057411242, 0.067305416, 0.045786954, 0.05755728, 0.037283964, 0.035365473,
0.046861693, 0.11809537, 0.037138607, 0.04159986, 0.047719333, 0.040254842, 0.05523971,
0.05368335, 0.052693058, 0.041473504, 0.039753634, 0.035803962, 0.050462198, 0.04833966,
0.05220127, 0.061825115, 0.03844658, 0.04947286, 0.045549877, 0.05025144, 0.18046589,
0.050618395, 0.038002774, 0.03945924, 0.04850324, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.03850245, 0.065595,
0.039691143, 0.03640801, 0.037262008, 0.06061667, 0.044321, 0.06061667, 0.037889708,
0.044727147, 0.038785886, 0.044378344, 0.048875444, 0.043814715, 0.0373439, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.04453348, 0.04247061, 0.04853829, 0.053961366, 0.047217272, 0.062362388, 0.046269502,
0.05653943, 0.050070982, 0.051828388, 0.058671933, 0.048567005, 0.046035226, 0.05780487,
0.06263799, 0.05154217, 0.050181326, 0.058040712, 0.05341704, 0.052964106, 0.04747033,
0.04241383, 0.04522547, 0.057195198, 0.05343138, 0.0523058, 0.04927917, 0.05552871,
0.048445355, 0.042558096, 0.043051753, 0.04732864, 0.06346512, 0.04629096, 0.042813454,
0.055858243, 0.048931632, 0.050852288, 0.05382119, 0.061375156, 0.057521928, 0.060452167,
0.04986424, 0.045290235, 0.04169491, 0.047442205, 0.15715753, 0.047095645, 0.041587263,
0.04479187, 0.054746505, 0.050297886, 0.06386049, 0.048310816, 0.0623809, 0.038430218,
0.042592596, 0.06743988, 0.06165948, 0.046151485, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.0, 0.0, 0.059556555, 0.056891043,
0.16273811, 0.047314055, 0.05938175, 0.06193029, 0.05370324, 0.054595824, 0.061237663,
0.07059132, 0.067924984, 0.056906413, 0.062033273, 0.0565567, 0.06462008, 0.04374497,
0.048882816, 0.04995964, 0.054283656, 0.053733353, 0.060268395, 0.052405383, 0.119150795,
0.05312624, 0.06350692, 0.057979677, 0.051526204, 0.0578465, 0.054882336, 0.046424862,
0.050688207, 0.053284258, 0.062485844, 0.053596377, 0.06146114, 0.049442504, 0.05099373,
0.047560114, 0.116759725, 0.07120379, 0.059446346, 0.06249946, 0.071397044, 0.06740572,
0.06005516, 0.03968097, 0.045051638, 0.047405366, 0.060047936, 0.105484076, 0.04115926,
0.03597034, 0.04723648, 0.061233122, 0.056815863, 0.059198227, 0.04012057, 0.0463667,
0.04717762, 0.052103978, 0.042366356, 0.0354141, 0.053938087, 0.047645453, 0.047901448,
0.051577266, 0.03995867, 0.041947212, 0.04884747, 0.04740165, 0.16565756, 0.052263238,
0.04092826, 0.04116416, 0.045424372, 0.044117972, 0.045145255, 0.065795355, 0.042504724,
0.060704026, 0.054986198, 0.04138352, 0.050526947, 0.05655088, 0.058700483, 0.05586215,
0.043841876, 0.04912709, 0.048912622, 0.054454416, 0.056605924, 0.040221933, 0.057005346,
0.04533048, 0.04684665, 0.04770348, 0.041016877, 0.040983506, 0.04344749, 0.052526377,
0.053151395, 0.042702593, 0.043819573, 0.044035055, 0.05295256, 0.04148893, 0.044063892,
0.049815148, 0.056701142, 0.051513307, 0.05860274, 0.0476593, 0.09576575, 0.05569328,
0.065177046, 0.05957472, 0.049127236, 0.049951367, 0.051316716, 0.047591344, 0.04889629,
0.056121502, 0.06474773, 0.059166167, 0.05491664, 0.052125745, 0.050690453, 0.048844863,
0.08686657, 0.06747829, 0.061285716, 0.06540444, 0.07049505, 0.060150106, 0.04692618,
0.04066238, 0.034224186, 0.05156763, 0.050065055, 0.049515672, 0.056290183, 0.03959577,
0.042001836, 0.047377095, 0.045712773, 0.28983447, 0.057277936, 0.039936226, 0.039169468,
0.045610033, 0.033053152, 0.034578346, 0.036823608, 0.033595365, 0.03763252, 0.033439543,
0.030876394, 0.030604351, 0.050171535, 0.043775026, 0.036576, 0.038735494, 0.040095046,
0.03853453, 0.03511276, 0.05818675, 0.036897942, 0.057623066, 0.035210162, 0.03466906,
0.045871314, 0.061455384, 0.036365386, 0.04148377, 0.053372175, 0.042015497, 0.04920569,
0.043368068, 0.04495292, 0.047853213, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.047468305, 0.048304196, 0.08754521,
0.040410966, 0.05173984, 0.047441218, 0.046334762, 0.044210806, 0.05279954, 0.061078187,
0.051122293, 0.046580415, 0.056020662, 0.050222382, 0.055719066, 0.044855785, 0.050175343,
0.048401758, 0.056832585, 0.053293217, 0.06247266, 0.0472714, 0.103851184, 0.05118311,
0.056928936, 0.058245387, 0.047006436, 0.051877443, 0.05297405, 0.047557425, 0.04874761,
0.05290966, 0.056444924, 0.045322947, 0.042905074, 0.054922406, 0.05579058, 0.057825796,
0.04689123, 0.06695868, 0.04981625, 0.05825162, 0.05447682, 0.05199817, 0.04834885,
0.060164437, 0.064091876, 0.060053736, 0.052399974, 0.050248634, 0.21210726, 0.05454841,
0.0730329, 0.047647353, 0.05681038, 0.057181183, 0.050681878, 0.049741503, 0.056296248,
0.05499423, 0.048782814, 0.055369515, 0.062456083, 0.057541862, 0.060323972, 0.049313758,
0.049959667, 0.050924573, 0.11447819, 0.076063246, 0.059135795, 0.065055184, 0.07305353,
0.067553475, 0.054077536, 0.04409429, 0.03617119, 0.06312272, 0.05115691, 0.05168201,
0.05605998, 0.040250316, 0.04374041, 0.04983016, 0.048861, 0.28479078, 0.055163316,
0.041785043, 0.04204587, 0.048673045, 0.057740804, 0.053415347, 0.06322782, 0.05325097,
0.049109336, 0.22269997, 0.0571053, 0.06434417, 0.047523886, 0.05493818, 0.062253628,
0.05388702, 0.05006125, 0.053323667, 0.057118658, 0.050402097, 0.12008955, 0.047961935,
0.041853774, 0.0410656, 0.06616567, 0.052619837, 0.059024226, 0.045828782, 0.05286955,
0.04080618, 0.050472233, 0.062062737, 0.05661405, 0.043061726, 0.05683437, 0.05007234,
0.05458419, 0.06279147, 0.06468256, 0.0588642, 0.052579448, 0.053170722, 0.052475926,
0.061847527, 0.06131377, 0.052434687, 0.050767176, 0.06468445, 0.10549273, 0.047075294,
0.058032993, 0.058792308, 0.05642582, 0.05816824, 0.050164595, 0.04835346, 0.049287975,
0.11025918, 0.07053356, 0.05759862, 0.05960445, 0.07111534, 0.07262892, 0.049369987,
0.040656324, 0.04502064, 0.04596852, 0.051815715, 0.049081773, 0.06257887, 0.047807157,
0.084536895, 0.05291275, 0.057593994, 0.06694189, 0.05369565, 0.044194903, 0.047334787,
0.047867652, 0.030641194, 0.04151821, 0.03300919, 0.034004524, 0.03292298, 0.03066768,
0.033874266, 0.03241952, 0.044964515, 0.038748737, 0.0309108, 0.03396819, 0.06584596,
0.042620413, 0.027487492, 0.036857095, 0.04037381, 0.04188761, 0.044132356, 0.046407178,
0.06329038, 0.03994316, 0.08941651, 0.043856695, 0.052623242, 0.056558907, 0.043972608,
0.041885685, 0.044077955, 0.0503976, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,
0.0, 0.0, 0.0, 0.0, 0.05522494, 0.049525425, 0.13366047,
0.04164421, 0.045884755, 0.067612596, 0.049122117, 0.048506744, 0.0515057, 0.060231432,
0.066822395, 0.052976705, 0.051281992, 0.045512274, 0.050627958, 0.050400198, 0.040922794,
0.05173076, 0.054019265, 0.056265853, 0.042901278, 0.040411487, 0.042852264, 0.047717463,
0.04793897, 0.056068473, 0.040426586, 0.04364611, 0.050710145, 0.11376454, 0.056928962,
0.05598953, 0.06526745, 0.05171205, 0.0472844, 0.17480668, 0.062631264, 0.066303104,
0.049408898, 0.060854126, 0.058177393, 0.057823125, 0.053446736, 0.056285933, 0.053051967,
0.059297677, 0.05347846, 0.06865162, 0.05312041, 0.052643366, 0.10554699, 0.054528642,
0.060209397, 0.047992885, 0.06305348, 0.058931407, 0.05145979, 0.052647777, 0.053805232,
0.059596043, 0.062102716, 0.057577495, 0.13663425, 0.049710628, 0.06406329, 0.07263335,
0.052439854, 0.058172405, 0.061797593, 0.06957934, 0.07880811, 0.05638549, 0.061039988,
0.053545646, 0.06149118, 0.032870837, 0.03625831, 0.0373283, 0.049453393, 0.0827558,
0.037006564, 0.029472463, 0.045446545, 0.05038008, 0.053908173, 0.052671682, 0.03445478,
0.040363006, 0.0392013, 0.040672306, 0.05060342, 0.048240658, 0.059988987, 0.045100223,
0.0514039, 0.04320059, 0.041840762, 0.043548565, 0.052172698, 0.055805042, 0.042140696,
0.042188648, 0.05209716, 0.04820869, 0.046203077, 0.048854243, 0.045424085, 0.04950522,
0.047440372, 0.04595596, 0.05573886, 0.04648038, 0.049339995, 0.043538984, 0.057915673,
0.050546147, 0.043815512, 0.04382291, 0.05665381, 0.09146069, 0.04422158, 0.04614958,
0.06869766, 0.037852738, 0.049232915, 0.049509194, 0.041581262, 0.045859825, 0.050033335,
0.055126507, 0.048239004, 0.044526175, 0.048020963, 0.045818873, 0.050695915, 0.04558313,
0.054998733, 0.053082954, 0.055410065, 0.054832194, 0.051487345, 0.0493064, 0.056586493,
0.08299206, 0.0781031, 0.055421878, 0.060033914, 0.06964002, 0.07205999, 0.051682997,
0.04409585, 0.037202783, 0.042326085, 0.04573463, 0.045149073, 0.047080632, 0.041842856,
0.04370809, 0.039005242, 0.050830647, 0.049431518, 0.04248828, 0.037618622, 0.048595052,
0.0812012, 0.043140136, 0.03773463, 0.05300276, 0.03809628, 0.044786155, 0.038089085,
0.036359593, 0.03684441, 0.04061286, 0.05185915, 0.039638143, 0.036259945, 0.044634987,
0.042044748, 0.048173852, 0.06312764, 0.05926942, 0.06459747, 0.049885202, 0.045593925,
0.23128845, 0.057163194, 0.0674124, 0.04472059, 0.056557402, 0.053469636, 0.05098626,
0.04858598, 0.053723324, 0.053619105, 0.051072076, 0.042479124, 0.051260483, 0.04439581,
0.050596975, 0.04287741, 0.040702652, 0.042371716, 0.044360686, 0.05407455, 0.050583966,
0.046557188, 0.04426453, 0.04837143, 0.078384496, 0.055073522, 0.15765832, 0.054467253,
0.04162654, 0.041941617, 0.057044923, 0.057441372, 0.059038278, 0.049229674, 0.060941946,
0.039991528, 0.046002783, 0.06902431, 0.06537639, 0.047984757, 0.052911825, 0.054072898,
0.062473554, 0.04852463, 0.045109957, 0.059870724, 0.05275403, 0.050887235, 0.048502415,
0.06622551, 0.06170526, 0.053692475, 0.050357275, 0.051567156, 0.05217808, 0.07056297,
0.04005523, 0.05715737, 0.043114357, 0.040388893, 0.04743124, 0.060999345, 0.039613258,
0.0424572, 0.05400494, 0.04761889, 0.046773624, 0.044546336, 0.048000965, 0.058561362,
0.051658522, 0.04979105, 0.07999412, 0.042639226, 0.041129723, 0.058307614, 0.049955983,
0.047807865, 0.04559173, 0.057358176, 0.057474323, 0.053360462, 0.044248186, 0.045399114,
0.04682225, 0.048447836, 0.1793767, 0.04676402, 0.043609686, 0.049510155, 0.050000425,
0.04694377, 0.05790442, 0.05353444, 0.05470372, 0.040693693, 0.04209057, 0.066176474,
0.06521316, 0.046704564, 0.037338056, 0.038657855, 0.040797926, 0.04736458, 0.050606772,
0.048139706, 0.037111107, 0.05957702, 0.041482516, 0.06006212, 0.056697637, 0.049951565,
0.042737823, 0.04158507, 0.052934393, 0.046714146, 0.048419107, 0.052175414, 0.059024576,
0.13866603, 0.04417393, 0.037826005, 0.049383476, 0.06899938, 0.06168218, 0.060890663,
0.0433409, 0.052007623, 0.052337583, 0.05249821, 0.03531151, 0.03845206, 0.04078954,
0.043757442, 0.041285243, 0.03957044, 0.036721185, 0.04969441, 0.080319546, 0.059069566,
0.042400077, 0.04222499, 0.047454048, 0.050508175, 0.043536484, 0.041301046, 0.044755567,
0.053141054, 0.043375693, 0.040644556, 0.06523223, 0.05405129, 0.048580505, 0.04812219,
0.047990937, 0.047305644, 0.07522461, 0.043973815, 0.044477973, 0.03927624, 0.04874506,
0.05732133, 0.05512878, 0.044421718, 0.046883702, 0.055927273, 0.059815396, 0.057427403,
0.06295488, 0.06229889, 0.042504847, 0.046657853, 0.07608402, 0.122744426, 0.06000423,
0.045017205, 0.05725874, 0.046186447, 0.063769475, 0.06163041, 0.06539765, 0.04713143,
0.12414979, 0.05152268, 0.06560379, 0.053484347, 0.047285233, 0.050835423, 0.053446632,
0.054801065, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.040299613, 0.046685427, 0.048105653, 0.045875825, 0.051715083,
0.044793468, 0.042160377, 0.046650346, 0.06566193, 0.06458179, 0.0466037, 0.043592405,
0.051136196, 0.050740846, 0.042377587, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.049385235, 0.21641149, 0.04819,
0.046081293, 0.05112824, 0.05491495, 0.049921215, 0.06396585, 0.05558337, 0.058089346,
0.043110892, 0.044432905, 0.07211572, 0.06365906, 0.0464572, 0.036187433, 0.0333532,
0.046307236, 0.03446042, 0.0368405, 0.05722335, 0.037403874, 0.045177277, 0.03566942,
0.055420347, 0.06278032, 0.05429823, 0.037210234, 0.034828138, 0.040178925, 0.03750979,
0.045151398, 0.04953686, 0.044569872, 0.07522215, 0.043491926, 0.033551168, 0.04611383,
0.054822214, 0.05598945, 0.060185805, 0.038867343, 0.042452365, 0.0420291, 0.045303497,
0.035070315, 0.03653532, 0.0477549, 0.03977971, 0.045204803, 0.039923318, 0.035564385,
0.03836963, 0.059863783, 0.053441774, 0.0493168, 0.046179492, 0.04087017, 0.04078952,
0.04910499, 0.06763086, 0.06762365, 0.06415363, 0.051705755, 0.050314773, 0.16931322,
0.06451689, 0.07310106, 0.048439346, 0.058220454, 0.058803566, 0.05843264, 0.05310856,
0.058731224, 0.055904374, 0.05006223, 0.054536797, 0.058543112, 0.046172492, 0.047017112,
0.0636161, 0.060933907, 0.06362879, 0.06567816, 0.0633989, 0.04684693, 0.053431895,
0.067178704, 0.09292985, 0.064944826, 0.04627961, 0.06539214, 0.051393185, 0.05473482,
0.057784073, 0.072958685, 0.04992807, 0.19110088, 0.05816662, 0.06979753, 0.05313281,
0.04709859, 0.059691027, 0.0681066, 0.054435376, 0.06131784, 0.052019347, 0.11683866,
0.04023492, 0.04255047, 0.067687705, 0.05456296, 0.04833971, 0.053999554, 0.05647527,
0.05232064, 0.054826096, 0.050285075, 0.048912004, 0.050869633, 0.039198723, 0.04532482,
0.047598664, 0.049369216, 0.053759817, 0.0464678, 0.039158575, 0.046127092, 0.0505831,
0.05408881, 0.04883853, 0.044436943, 0.044818383, 0.049600348, 0.051482935, 0.047162995,
0.0619561, 0.05229394, 0.046846107, 0.047603153, 0.048602276, 0.057968043, 0.05321984,
0.085395165, 0.100473434, 0.044022936, 0.056687374, 0.14425397, 0.088969044, 0.048236158,
0.043495093, 0.12534356, 0.04123449, 0.041961994, 0.04737316, 0.04949215, 0.042215776,
0.05538991, 0.04804515, 0.047750466, 0.039347548, 0.039775427, 0.056416906, 0.055215456,
0.04193915, 0.030682992, 0.031298213, 0.03548456, 0.042239256, 0.052048508, 0.042627737,
0.030264527, 0.04564975, 0.03783941, 0.043991935, 0.06315404, 0.038064104, 0.0343222,
0.034450326, 0.045101546, 0.04750398, 0.050233535, 0.055751435, 0.044763703, 0.07310423,
0.044823483, 0.03725966, 0.044517178, 0.052782547, 0.053892463, 0.054225188, 0.043021545,
0.0465227, 0.045707498, 0.04778925, 0.037720967, 0.03920692, 0.042795494, 0.044965915,
0.049713228, 0.040940065, 0.03742191, 0.047313523, 0.058729727, 0.056347616, 0.04669931,
0.043776862, 0.045435846, 0.048043873, 0.055529196, 0.06332868, 0.06365568, 0.06614652,
0.049671385, 0.047733694, 0.17459257, 0.06981993, 0.07097405, 0.04901921, 0.060630523,
0.05680266, 0.064578496, 0.05337936, 0.057786867, 0.051880356, 0.05356504, 0.06134128,
0.060891673, 0.0454404, 0.044372693, 0.07547931, 0.069316745, 0.0663611, 0.0585129,
0.0612008, 0.047621146, 0.052146852, 0.06628068, 0.08089497, 0.055494223, 0.04519335,
0.058580004, 0.047985263, 0.063167036, 0.059099127, 0.07041634, 0.04931529, 0.14070141,
0.054112785, 0.06956312, 0.05433838, 0.0501528, 0.05285271, 0.05674028, 0.05619635,
0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.04306189, 0.05068715, 0.049026944, 0.05750874, 0.057196774, 0.05103064,
0.045965202, 0.0561051, 0.06617087, 0.072730884, 0.052680343, 0.051298764, 0.05505911,
0.05680987, 0.05052921, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 0.0, 0.0, 0.048524663, 0.22723997, 0.0484146, 0.043903843,
0.04687663, 0.053333145, 0.04968089, 0.06227829, 0.050514814, 0.060618315, 0.04010892,
0.04316228, 0.065482266, 0.06531139, 0.048907354, 0.04879925, 0.06405983, 0.050963912,
0.051661, 0.049518064, 0.050787028, 0.061721444, 0.057292983, 0.0786626, 0.093375355,
0.047075413, 0.060191303, 0.14299104, 0.09185381, 0.048021298, 0.03232265, 0.036202587,
0.037074562, 0.03250131, 0.033399783, 0.043458015, 0.033702772, 0.03721522, 0.038152665,
0.036363933, 0.030529264, 0.031021504, 0.036787447, 0.048280507, 0.046591457, 0.04408028,
0.03994169, 0.046008755, 0.03797988, 0.03601198, 0.049537733, 0.059990484, 0.045851897,
0.046985596, 0.049428828, 0.038308375, 0.05106152, 0.048847586, 0.06657121, 0.04581147,
0.044668294, 0.061695773, 0.04749653, 0.063915625, 0.06257012, 0.06399361, 0.047463957,
0.161189, 0.05741123, 0.073026, 0.052429803, 0.047098383, 0.05584856, 0.059950992,
0.055722117, 0.04393184, 0.043340784, 0.045709305, 0.03907595, 0.04089343, 0.047690395,
0.050597776, 0.0416693, 0.0477815, 0.0501847, 0.039231185, 0.04220571, 0.04766439,
0.069570065, 0.058252074, 0.053210817, 0.14404382, 0.05110858, 0.04588653, 0.047226045,
0.06500099, 0.05198009, 0.06400984, 0.048845094, 0.05822377, 0.045037597, 0.048340224,
0.0635814, 0.06253724, 0.050041478, 0.042358037, 0.054968767, 0.045012273, 0.040923484,
0.044953864, 0.041499335, 0.047399532, 0.043194044, 0.06779315, 0.06377169, 0.041367315,
0.048434302, 0.08606881, 0.067431904, 0.04377499, 0.036742933, 0.044662647, 0.04215841,
0.047951527, 0.04872005, 0.04846854, 0.04120337, 0.06047821, 0.050792955, 0.08758584,
0.04209494, 0.04267462, 0.05476766, 0.047850095, 0.04209494, 0.04857791, 0.06810085,
0.055516746, 0.045888383, 0.04786998, 0.04691075, 0.056121156, 0.04895464, 0.08138316,
0.08181636, 0.043948583, 0.052526355, 0.108642, 0.08438537, 0.04782929, 0.04242858,
0.047276802, 0.047895383, 0.05135916, 0.05539715, 0.050624296, 0.04177852, 0.047466207,
0.045973055, 0.060391724, 0.046945613, 0.039570373, 0.04822502, 0.05047089, 0.055634253,
0.06823623, 0.055057876, 0.061972443, 0.04146308, 0.03961287, 0.10137302, 0.06570461,
0.05416753, 0.04020351, 0.049517985, 0.046273336, 0.049882304, 0.044521023, 0.046832718,
0.043534804, 0.028643277, 0.02899814, 0.034741335, 0.034615077, 0.039625816, 0.040880926,
0.0290475, 0.044965763, 0.036045104, 0.046359643, 0.05102056, 0.04261147, 0.034442678,
0.031159285, 0.037455272, 0.0624773, 0.048594866, 0.09173098, 0.03952572, 0.043175653,
0.060122732, 0.05167936, 0.046412144, 0.049887605, 0.05451256, 0.04875098, 0.047104098,
0.045492712, 0.04672356, 0.04742975, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.04919652, 0.04048562, 0.066363454,
0.039939653, 0.042935412, 0.043285612, 0.04645771, 0.0376225, 0.04863118, 0.05628731,
0.050655995, 0.04653866, 0.040693298, 0.045399312, 0.05552365, 0.052729405, 0.05757466,
0.057731293, 0.046799768, 0.04597447, 0.06255193, 0.06757591, 0.060687482, 0.0598949,
0.059996877, 0.045970045, 0.05048653, 0.06469134, 0.10369379, 0.0625614, 0.03683474,
0.033074383, 0.04333244, 0.03732419, 0.041750263, 0.049462777, 0.034092, 0.04173977,
0.036924817, 0.051670942, 0.08493508, 0.04951184, 0.036377065, 0.033306718, 0.041151516,
0.042396866, 0.1474121, 0.04162887, 0.03598045, 0.041295007, 0.046448328, 0.03950942,
0.045338277, 0.04166916, 0.046470698, 0.035597045, 0.037095167, 0.053556643, 0.049713485,
0.04360346, 0.042668965, 0.116257176, 0.04063076, 0.03808555, 0.0389919, 0.04691324,
0.0437757, 0.05490119, 0.039585773, 0.049555033, 0.033463333, 0.03652397, 0.04971768,
0.05282442, 0.040207636, 0.040867265, 0.04770063, 0.04084986, 0.06658633, 0.104144506,
0.041003846, 0.03765474, 0.053110026, 0.053815983, 0.055221178, 0.049073473, 0.03692636,
0.049411215, 0.053757295, 0.053053617, 0.045005668, 0.048102006, 0.05122077, 0.072986834,
0.2119825, 0.045505, 0.0391617, 0.051845767, 0.0691626, 0.06721534, 0.06451817,
0.044626508, 0.052768912, 0.05527244, 0.061720487, 0.061846588, 0.056101084, 0.18063127,
0.0454851, 0.054066144, 0.073085815, 0.051046208, 0.05456542, 0.06126706, 0.067792036,
0.0668029, 0.056545403, 0.05790755, 0.052524023, 0.06033341, 0.042775907, 0.04009802,
0.04554337, 0.042285964, 0.04616497, 0.041535445, 0.044323016, 0.038799774, 0.045449797,
0.072548255, 0.044903234, 0.042785212, 0.04799758, 0.046723146, 0.051754314, 0.04161607,
0.047322348, 0.043601573, 0.06785359, 0.11983736, 0.041450612, 0.036894474, 0.05090908,
0.05750005, 0.055471867, 0.053501107, 0.038957696, 0.04836313, 0.052117284, 0.054302614,
0.042595614, 0.07588768, 0.04250559, 0.033721317, 0.03902779, 0.05137888, 0.036107827,
0.044104222, 0.0358639, 0.044009984, 0.03605606, 0.03522077, 0.044572473, 0.042098925,
0.04137786, 0.036512863, 0.03653187, 0.04833834, 0.048012644, 0.05363251, 0.052709214,
0.037694175, 0.050649572, 0.044775892, 0.05829588, 0.07044828, 0.046637505, 0.04063591,
0.04475297, 0.052442297, 0.035860755, 0.040018376, 0.038415555, 0.069993585, 0.09837011,
0.03876515, 0.032487262, 0.046806827, 0.052920677, 0.052739583, 0.046579495, 0.036334712,
0.04138222, 0.04431821, 0.047750607, 0.045837156, 0.06404795, 0.048725102, 0.056839433,
0.056351863, 0.069946006, 0.049265992, 0.14697672, 0.05633914, 0.06782786, 0.05499322,
0.0486386, 0.054983836, 0.06248004, 0.056299396, 0.04393453, 0.043923773, 0.05722518,
0.042860992, 0.047989227, 0.045930974, 0.044659574, 0.04485386, 0.062285822, 0.06715762,
0.05331952, 0.05481517, 0.052165173, 0.045930974, 0.047908407, 0.044150025, 0.040744875,
0.053991992, 0.036302082, 0.041453533, 0.03989192, 0.040918626, 0.037561335, 0.04441599,
0.059843957, 0.046397362, 0.04246187, 0.041682687, 0.039698895, 0.04274808, 0.04582585,
0.044870853, 0.08042404, 0.04047213, 0.045072995, 0.065394044, 0.04166993, 0.04453422,
0.0506122, 0.05254678, 0.05977872, 0.049840946, 0.04433079, 0.04166294, 0.04241666,
0.035827942, 0.03966986, 0.039220624, 0.04609017, 0.0394002, 0.04521958, 0.03982011,
0.054747876, 0.054381974, 0.0491383, 0.042364646, 0.047219194, 0.05018997, 0.053890627,
0.038671825, 0.039412964, 0.038348388, 0.054827236, 0.03676372, 0.04052191, 0.039585073,
0.03923693, 0.036348496, 0.05069641, 0.058553804, 0.046760816, 0.04599002, 0.042648077,
0.04280734, 0.046377886, 0.057237145, 0.05307558, 0.061132412, 0.046117995, 0.043555703,
0.12527432, 0.063806966, 0.059881363, 0.046160344, 0.05078155, 0.054201145, 0.057841737,
0.04605216, 0.050532855, 0.049726505, 0.039747622, 0.053900454, 0.040470943, 0.05268792,
0.055098042, 0.059530288, 0.03950247, 0.07808754, 0.044528462, 0.053129744, 0.05121671,
0.040495407, 0.043199435, 0.047283635, 0.052038603, 0.048176955, 0.062076636, 0.05218695,
0.06498079, 0.06254284, 0.072991796, 0.049587183, 0.16526589, 0.060732022, 0.075320706,
0.057383858, 0.04888923, 0.05669488, 0.06239541, 0.060774855, 0.041286375, 0.06041895,
0.039586812, 0.052081432, 0.04945147, 0.05330185, 0.04194445, 0.09588705, 0.046317484,
0.05725341, 0.04183781, 0.040513594, 0.047545202, 0.051799417, 0.04615853, 0.04821912,
0.053213026, 0.058055505, 0.0480802, 0.04712677, 0.088034585, 0.056669496, 0.062063403,
0.05234009, 0.053098615, 0.056966975, 0.06915676, 0.04866945, 0.05003351, 0.0468176,
0.044543896, 0.06956638, 0.047462698, 0.047413502, 0.052066784, 0.045687616, 0.050531637,
0.056683607, 0.07906881, 0.08842918, 0.042973854, 0.048751865, 0.16530591, 0.088925205,
0.04771321, 0.03176185, 0.030113865, 0.04129132, 0.031912014, 0.036625117, 0.04167185,
0.029367797, 0.03596208, 0.032255206, 0.041299406, 0.055863768, 0.03517346, 0.03411414,
0.031062655, 0.03924802, 0.046088375, 0.052504953, 0.048891302, 0.03925618, 0.040872894,
0.052366294, 0.058873393, 0.043812785, 0.05040588, 0.051216595, 0.03970901, 0.045264997,
0.05891849, 0.090838656, 0.04848596, 0.04829922, 0.15850137, 0.04664515, 0.041299865,
0.048783936, 0.053309195, 0.046101358, 0.051769704, 0.04790838, 0.05279495, 0.040476006,
0.04257657, 0.05788029, 0.055659007, 0.04790838, 0.04668969, 0.060508326, 0.052680794,
0.05237407, 0.05394768, 0.047936134, 0.05376527, 0.05700968, 0.09608622, 0.09382816,
0.04928706, 0.055599302, 0.14690709, 0.083039, 0.047315862, 0.041970447, 0.053081673,
0.048054174, 0.0395092, 0.038327895, 0.0562607, 0.056492705, 0.0616469, 0.04922199,
0.054293003, 0.03907933, 0.045156267, 0.0565598, 0.072303094, 0.044914328, 0.033140313,
0.03511807, 0.04093583, 0.034376856, 0.063230544, 0.03272333, 0.02715685, 0.031137697,
0.0422944, 0.03876676, 0.038976338, 0.03148719, 0.03379815, 0.033222765, 0.037238583,
0.034177803, 0.040652398, 0.04092638, 0.04051554, 0.041964896, 0.044520233, 0.038221043,
0.049166735, 0.052600145, 0.06248818, 0.043191686, 0.044420358, 0.04624389, 0.04189165,
0.03901391, 0.044420052, 0.050364632, 0.055030044, 0.044387743, 0.111749396, 0.04245216,
0.037710685, 0.045850895, 0.061899055, 0.0634312, 0.054847203, 0.0422599, 0.054200526,
0.050051324, 0.05104403, 0.04329485, 0.115535095, 0.040694147, 0.035386983, 0.040235102,
0.046359714, 0.04002262, 0.048920963, 0.041218527, 0.048234463, 0.035043385, 0.036373906,
0.049971316, 0.050705235, 0.045352142, 0.047003113, 0.16433695, 0.046303816, 0.043232527,
0.051454764, 0.052338395, 0.044462726, 0.055180445, 0.05297644, 0.053199034, 0.041933566,
0.042436115, 0.06566702, 0.058227606, 0.046715412, 0.05085668, 0.053467374, 0.06760055,
0.052222688, 0.05957484, 0.07622616, 0.05301203, 0.0687749, 0.058103275, 0.07161097,
0.0733543, 0.06704336, 0.06377067, 0.05576583, 0.06344949, 0.049075127, 0.047612667,
0.093352236, 0.044795077, 0.052551616, 0.07445819, 0.045784604, 0.05514975, 0.05295921,
0.056171726, 0.06441573, 0.049615867, 0.049634207, 0.04900419, 0.052870277, 0.045294076,
0.05907382, 0.046431225, 0.062077727, 0.061173476, 0.06633748, 0.04767581, 0.1353117,
0.050889283, 0.06458902, 0.057487927, 0.05036696, 0.05153061, 0.053406168, 0.057093594,
0.047018655, 0.042033114, 0.061673008, 0.044764, 0.046321645, 0.04337664, 0.04609145,
0.040995028, 0.076148994, 0.05710953, 0.051429633, 0.052634943, 0.04898208, 0.048906818,
0.04941522, 0.041207466, 0.046900675, 0.049350437, 0.042099137, 0.068176866, 0.03979853,
0.032701384, 0.040888164, 0.050718937, 0.05075308, 0.04939154, 0.03849742, 0.043689467,
0.0403366, 0.04172637, 0.054181114, 0.04540931, 0.07072993, 0.052528497, 0.056283087,
0.068133496, 0.05500904, 0.05581097, 0.06545368, 0.06845342, 0.085898414, 0.07727346,
0.056440342, 0.05282951, 0.05941807, 0.047825728, 0.04510993, 0.100220114, 0.045204986,
0.04710723, 0.07892156, 0.04425449, 0.05171757, 0.05140053, 0.05497615, 0.07126978,
0.05401949, 0.045736544, 0.045173366, 0.047478765, 0.05528926, 0.06134115, 0.060651813,
0.051459305, 0.049407773, 0.09794672, 0.06582761, 0.06611658, 0.054897256, 0.05747397,
0.05777244, 0.07767179, 0.054731503, 0.055871688, 0.04961746, 0.045930967, 0.05359124,
0.047706928, 0.057091128, 0.054335445, 0.06358236, 0.04963513, 0.13449125, 0.050886653,
0.062458962, 0.061992597, 0.05172811, 0.050306726, 0.052030314, 0.055877313, 0.0487591,
0.04878527, 0.060721118, 0.051957566, 0.051946662, 0.048121464, 0.051857907, 0.04637442,
0.09257245, 0.07261861, 0.05416175, 0.061291676, 0.06512039, 0.05918928, 0.04991949,
0.037260786, 0.033958197, 0.05237661, 0.035324775, 0.041137867, 0.046732143, 0.034316305,
0.039639886, 0.041466497, 0.048881438, 0.080781944, 0.043716658, 0.03924202, 0.035033382,
0.0426699, 0.03215254, 0.03692371, 0.04680749, 0.0428011, 0.043554667, 0.04260238,
0.03825101, 0.04866196, 0.08357251, 0.061773457, 0.04313633, 0.04652791, 0.05363074,
0.046851743, 0.037113264, 0.04804136, 0.06661992, 0.04944126, 0.044279154, 0.047956236,
0.04462337, 0.056641765, 0.050989244, 0.073313415, 0.08618189, 0.04208011, 0.051916946,
0.13082488, 0.081477955, 0.047637146, 0.03364898, 0.03285985, 0.041299906, 0.03898129,
0.04406022, 0.059817642, 0.034582186, 0.050795678, 0.037262626, 0.05086501, 0.076104045,
0.047139585, 0.036338612, 0.035079718, 0.04228946, 0.07195435, 0.05282539, 0.20243572,
0.046034824, 0.048636816, 0.06025133, 0.05751051, 0.048620198, 0.059234485, 0.06547512,
0.066092245, 0.056004807, 0.05348231, 0.05279918, 0.058642715, 0.04338283, 0.057259403,
0.04405829, 0.05391426, 0.053819425, 0.056785833, 0.04405751, 0.1424965, 0.049226724,
0.06645014, 0.050309602, 0.04426741, 0.050874766, 0.05447632, 0.053801943, 0.043951277,
0.041394092, 0.050830107, 0.048501693, 0.057047054, 0.04790829, 0.0414598, 0.04760603,
0.05024664, 0.06399104, 0.05577918, 0.04342507, 0.045495924, 0.046939805, 0.05488187,
0.060786594, 0.07031998, 0.060308114, 0.051195607, 0.05109614, 0.1433463, 0.06539232,
0.07732857, 0.0513278, 0.058359127, 0.055872962, 0.06305367, 0.056678664, 0.06161901,
0.05299371, 0.045519087, 0.04537863, 0.047866058, 0.048869547, 0.049235333, 0.045948427,
0.043216705, 0.05426172, 0.06272846, 0.0567639, 0.050099406, 0.051210392, 0.047830936,
0.05318483, 0.0656963, 0.035259925, 0.035922162, 0.045457486, 0.03554603, 0.040867943,
0.05431526, 0.035681605, 0.0473447, 0.03926759, 0.0519073, 0.058506943, 0.04716669,
0.041052714, 0.03612868, 0.042913858, 0.046074085, 0.04920493, 0.053157214, 0.047583897,
0.042955097, 0.08866352, 0.05788297, 0.0622972, 0.051024415, 0.04976411, 0.052746095,
0.06785617, 0.047074042, 0.05030821, 0.04586043, 0.036024038, 0.07038437, 0.035341103,
0.03807121, 0.040973485, 0.04444325, 0.035101756, 0.061941803, 0.03869891, 0.044019137,
0.038183067, 0.034036286, 0.047137506, 0.044117834, 0.038833287, 0.045233544, 0.058531098,
0.04774427, 0.047853474, 0.048599802, 0.051468633, 0.057075083, 0.06039363, 0.06787427,
0.06443264, 0.04229869, 0.046274196, 0.08156935, 0.11986787, 0.055473004, 0.039047074,
0.043041524, 0.050878145, 0.048737206, 0.047599282, 0.04382534, 0.04477985, 0.043348253,
0.121756904, 0.062866904, 0.047242537, 0.056744475, 0.059558667, 0.05863726, 0.047014404,
0.041339032, 0.04652391, 0.0454924, 0.055259988, 0.0537666, 0.06481142, 0.04678997,
0.10908822, 0.050653256, 0.05543982, 0.061416853, 0.056138065, 0.045639545, 0.046978325,
0.051341154, 0.03612467, 0.045517825, 0.040603675, 0.034676, 0.038153682, 0.035972755,
0.042760435, 0.03766094, 0.05978729, 0.067356445, 0.034944374, 0.04573264, 0.09071774,
0.060276356, 0.0368219, 0.044218168, 0.06220116, 0.047356542, 0.058252536, 0.054399364,
0.065424, 0.04851667, 0.22743374, 0.05591817, 0.07029683, 0.04980701, 0.04759262,
0.0554812, 0.061318062, 0.05178393, 0.03521548, 0.035006378, 0.046770632, 0.03562886,
0.04187754, 0.050832734, 0.03457382, 0.046849214, 0.04088583, 0.04911485, 0.060032874,
0.045471486, 0.040999472, 0.037024997, 0.041977454, 0.060190823, 0.049904987, 0.15892024,
0.04848184, 0.05522418, 0.05673414, 0.051060274, 0.050589994, 0.060107376, 0.06077661,
0.07842901, 0.05387173, 0.05700017, 0.051281795, 0.0598399, 0.039298486, 0.04804791,
0.045131486, 0.04500066, 0.045715537, 0.053502608, 0.04568503, 0.06150188, 0.060643077,
0.05533972, 0.03958501, 0.041728616, 0.059828103, 0.07369698, 0.054549493, 0.052754812,
0.05709793, 0.06080274, 0.050334815, 0.046294313, 0.10814256, 0.06776517, 0.063577674,
0.055106737, 0.05535745, 0.058738735, 0.08165953, 0.054405794, 0.05673519, 0.047302864,
0.04889804, 0.053112395, 0.053583164, 0.046062052, 0.046202976, 0.090597786, 0.05704108,
0.065003626, 0.047235653, 0.05224387, 0.055989813, 0.0683353, 0.049357, 0.050362736,
0.046686657, 0.06530424, 0.04969652, 0.12651578, 0.043593407, 0.044964053, 0.060424294,
0.059494406, 0.04707796, 0.052309044, 0.060089365, 0.055627946, 0.056003038, 0.05234893,
0.051014185, 0.054693744, 0.041031208, 0.044801284, 0.049622703, 0.05296922, 0.13309626,
0.042281874, 0.035102744, 0.04533503, 0.061260767, 0.057731956, 0.054940578, 0.041844677,
0.04618944, 0.04648165, 0.049875416, 0.036947016, 0.04580388, 0.047101807, 0.05061309,
0.05611812, 0.049944136, 0.040349044, 0.0613913, 0.06215081, 0.0637306, 0.051691156,
0.045655802, 0.051676326, 0.05485234, 0.04936551, 0.044400785, 0.046102095, 0.051316656,
0.07098517, 0.18375641, 0.046998303, 0.03866986, 0.051343355, 0.06358755, 0.064456865,
0.06414857, 0.046177674, 0.048828136, 0.050565466, 0.054225992, 0.037894666, 0.041424207,
0.044475425, 0.056326248, 0.106871225, 0.040035956, 0.033621192, 0.046341196, 0.05731322,
0.05213203, 0.05247908, 0.03839375, 0.043690898, 0.046283837, 0.046147037, 0.04756834,
0.05524949, 0.052891936, 0.046590313, 0.041441225, 0.0571132, 0.07048081, 0.06527725,
0.059940718, 0.0670146, 0.04432362, 0.054346222, 0.06858222, 0.09095904, 0.048725236,
0.05725725, 0.05390028, 0.15367028, 0.04982815, 0.06012087, 0.07318703, 0.051836427,
0.0578285, 0.06346544, 0.06420065, 0.0750896, 0.055865344, 0.059908334, 0.055994365,
0.061176304, 0.039717004, 0.04652716, 0.042830627, 0.03822205, 0.038551196, 0.040093493,
0.04389413, 0.040518697, 0.06316094, 0.056314457, 0.03845967, 0.05189931, 0.070062496,
0.05189931, 0.03614675, 0.046118725, 0.059215248, 0.04804475, 0.05306721, 0.051817343,
0.058302183, 0.056587458, 0.06749535, 0.06518431, 0.064426325, 0.045214556, 0.048843093,
0.08020578, 0.09556659, 0.05273103, 0.057218574, 0.06320765, 0.061283827, 0.0531938,
0.051488254, 0.14825775, 0.06904853, 0.07871961, 0.053210422, 0.058741633, 0.059331477,
0.077770844, 0.056434013, 0.059462547, 0.052631065, 0.048779108, 0.046643633, 0.058351003,
0.05330801, 0.063732825, 0.0519007, 0.0415677, 0.049679786, 0.05255326, 0.063732825,
0.07034625, 0.046617825, 0.045830335, 0.04629462, 0.05872538, 0.045327112, 0.041432027,
0.05874816, 0.049628586, 0.05788667, 0.052826375, 0.040213197, 0.04545789, 0.05452397,
0.05641641, 0.077935465, 0.050718922, 0.04406558, 0.042374074, 0.0486013, 0.056068685,
0.056867525, 0.063367374, 0.048943266, 0.048084304, 0.20182109, 0.06400749, 0.07538487,
0.04811826, 0.05718647, 0.057082903, 0.06560226, 0.051145006, 0.05384595, 0.05247456,
0.0480858, 0.060760375, 0.05170082, 0.0453865, 0.04288774, 0.047407746, 0.062212795,
0.054296173, 0.07232052, 0.10238504, 0.04170171, 0.057064716, 0.111134924, 0.07652764,
0.04459903, 0.04556133, 0.11585831, 0.04540903, 0.03573837, 0.038583018, 0.05232396,
0.043163452, 0.05016518, 0.041274793, 0.049595874, 0.035055455, 0.03943879, 0.054626837,
0.05101358, 0.042477455, 0.037376873, 0.040195987, 0.043589447, 0.042650722, 0.04928771,
0.053227246, 0.039513815, 0.055403225, 0.044619713, 0.060384028, 0.05431035, 0.045729276,
0.045902953, 0.040628295, 0.04481348, 0.04304266, 0.0542182, 0.046066903, 0.0411468,
0.03894705, 0.04885406, 0.052845594, 0.049762912, 0.05728228, 0.07035408, 0.03738025,
0.052559167, 0.07626086, 0.05751354, 0.039249025, 0.037378795, 0.047275025, 0.042750373,
0.0501381, 0.053290438, 0.047078118, 0.040903572, 0.0667583, 0.056616243, 0.07065173,
0.0443147, 0.04315245, 0.056318313, 0.049550712, 0.042784892}}};
const std::vector<ApproximatePredictInputs<float, int>> approximate_predict_inputs = {
{1000,
15,
200,
5,
10,
{-6.6041913, 3.047298, 4.655545, 0.83865887, 5.1522017, 0.3939999,
0.3331755, -1.1581178, -9.929659, -7.6812243, -8.826958, 2.2155986,
-3.1659298, 0.20807476, 8.883152, -3.3655725, -4.4421263, 6.543759,
-3.375205, -4.51362, 0.98012805, -7.751923, 5.0105557, -7.2450852,
9.294448, 5.3771634, -5.3180985, -7.419219, 5.438088, 4.452026,
1.8479553, -6.0338445, -8.907131, 8.805243, 10.003104, 5.6695657,
-4.9876866, -8.158999, 3.3457386, -1.8733226, -7.682, 0.24759215,
-9.318731, 8.401295, -3.555015, -4.4170446, -1.9870255, 3.7783911,
-5.762051, -8.429805, -3.3490055, -7.17304, 8.952734, 6.365469,
2.6988056, 8.0805855, 5.6108527, -6.6712294, 7.8428493, 0.3387423,
2.1070492, -6.419331, -8.904938, 9.92732, 9.172394, 6.3302364,
-3.1055036, -8.590717, 4.775813, -2.268273, -6.4184856, -0.3573347,
-9.482119, 6.863401, -5.1427565, 2.5543456, -3.5910947, -0.4267789,
0.7572713, -5.5823216, 9.949384, 5.5132847, 8.1076145, 8.348458,
1.9715664, 6.813264, -7.682309, -6.097306, -8.253738, -4.2600827,
-2.3817012, -3.894461, 6.1327686, -2.828165, -4.9104753, 1.1062309,
-7.3330817, 5.3864703, -7.576201, 10.387991, 5.0214605, -5.6772346,
-9.51337, 6.7588396, 2.9511678, -7.7766223, 3.0740974, 5.4775443,
1.4106811, 5.4639354, -0.27586702, 0.24972604, -1.0978495, -9.511789,
-7.621691, -9.220928, 3.5146074, -3.2389846, 0.3064171, 9.214225,
-3.414265, 1.9911942, 3.5397892, -2.9501078, 9.399453, 8.767797,
-4.195589, -1.7146497, -4.354175, -3.7303705, -9.468127, 1.6166995,
0.3621102, -8.335985, -4.279626, -5.5144205, -2.0442793, 6.532834,
-6.062912, -8.655461, -4.476213, -6.9485946, 8.362085, 6.357876,
1.4709525, 8.155231, 6.5162497, -7.138609, 6.262826, 0.102894336,
-2.7064266, -3.9988105, 5.909083, -2.377781, -4.74294, 0.13764441,
-6.9854836, 4.1717424, -9.73467, 9.613495, 5.5026045, -6.2842097,
-10.995163, 5.7367043, 4.4109936, -7.640729, -4.1871195, 0.62842536,
-1.0466465, -4.532282, 2.2611666, -8.951952, -4.617881, -2.355307,
-1.5662061, 5.744808, -5.6937137, 0.04490433, 1.967616, -9.7449045,
-2.5019507, 8.096001, 3.8942993, 1.7595124, -7.306286, -7.0109897,
-8.798673, 7.694308, 1.972951, 4.5020027, -9.543178, 8.01537,
5.991318, -5.8540792, -7.210287, 2.2041163, -3.6701603, 0.41913012,
0.43260413, -5.7361946, 9.763319, 6.103354, 8.703783, 8.318354,
1.6460444, 9.875157, -8.798932, -4.6066346, -9.071033, -4.456251,
-8.153979, 4.9592443, 3.8976126, 0.15902549, 5.4066496, 0.08121103,
0.7939708, -1.6604385, -7.720359, -8.001697, -9.532967, 2.1703176,
-3.6261456, 0.6018867, 8.9938345, 5.479017, 4.370349, -8.90881,
-2.5305526, -7.1432943, 8.797483, 3.1349554, -3.4217916, -9.352138,
-4.1753564, -3.9112256, 5.050787, 2.9310477, 6.8177004, -0.8608325,
-3.4482756, -2.722893, 4.9118657, -7.190457, -8.671057, -3.755069,
-6.2711267, 8.648108, 6.784892, 4.450313, 8.627825, 5.707677,
-7.2843714, 7.566874, 0.123956904, -8.208866, 3.882511, 5.6097383,
1.1503326, 5.3655376, 0.79534316, -0.032866176, -0.14423996, -9.961533,
-7.7797327, -9.568259, 2.8146017, -3.401666, 0.4750985, 7.6241474,
-3.0593903, 8.082423, 3.354535, 2.3287637, -7.6519837, -8.387133,
-8.566322, 9.068623, 2.0180507, 4.748395, -9.53103, 9.328975,
7.2922063, -5.95641, -6.176326, -4.9280424, -2.1777153, 4.6793118,
-5.1208076, -9.583013, -5.215721, -6.7620707, 8.578337, 6.402896,
3.980733, 7.502372, 6.492162, -6.951472, 8.388737, 0.41837123,
-3.5695906, 8.701726, 5.239358, 2.1230352, -7.751644, -6.758883,
-8.568605, 6.7048225, 2.1299078, 4.2021976, -10.38839, 9.648648,
7.041402, -4.995082, -5.625839, 4.5795603, -3.704867, -0.32394007,
1.5339218, -6.024277, 9.667898, 5.2046275, 9.105326, 7.7133164,
2.3471587, 7.1040235, -8.533861, -4.874834, -9.632401, -3.9160802,
-7.908441, 3.0715456, 5.042133, 0.88956624, 5.058157, 0.16772307,
0.91569895, -1.2866435, -8.7948265, -7.439278, -9.577955, 2.404209,
-3.4939048, 0.5730508, 9.129821, -3.319531, -0.10301979, 4.063939,
-1.6584153, 8.753799, 7.882726, -4.645801, -0.1323107, -4.479451,
-3.846734, -8.674804, 0.6454921, -0.40323806, -7.3481636, -4.0186872,
2.994777, -4.9355974, 1.8264719, 1.2624903, -6.6472807, 10.321332,
4.821721, 9.074615, 7.775707, 2.4050736, 8.392983, -9.245799,
-6.027439, -8.361249, -3.513845, -2.6801074, 9.689147, 5.472508,
1.1138442, -6.4614472, -6.389289, -9.046622, 8.286518, 1.9172614,
4.249355, -9.70946, 9.409102, 5.88146, -6.7612534, -5.247347,
5.094545, 7.6293974, -4.87967, -8.291521, -4.991865, -1.1040689,
6.395935, 7.3338923, -11.181063, -0.045847546, -2.128726, -6.0191855,
-7.1829877, -3.8500516, 8.434917, -1.4297206, 10.418551, 6.0829315,
2.819026, -6.1627836, -6.4653406, -8.293475, 6.937693, 1.449561,
4.1590896, -9.707439, 9.080937, 7.136324, -5.084504, -6.3016157,
-2.7217407, -4.9501724, 6.070602, -3.6528919, -4.410214, -0.17823021,
-6.51875, 6.082442, -8.213283, 9.4114065, 5.187853, -5.824392,
-10.704393, 6.058635, 3.5354173, -6.192646, -3.6094382, 0.77813494,
-0.4960521, -4.9251547, 2.7132092, -6.3753676, -5.402118, -2.4490068,
-1.2315263, 5.6464534, -5.7631507, -0.056371227, 1.3752958, -9.047085,
-2.6350596, -5.3330426, 6.5956073, -3.3130417, -3.342689, 0.33955404,
-8.160975, 4.5369, -8.530538, 9.344388, 4.444662, -5.193228,
-9.422486, 6.289476, 5.0921755, 3.3196993, -3.5289283, -0.2934442,
0.9484976, -7.3288827, 8.1533575, 5.759379, 8.521267, 8.038143,
1.9454731, 7.7540045, -8.261828, -6.943514, -8.121774, -4.4425726,
1.9670354, -6.8249884, -8.026754, 9.301449, 8.4870825, 6.288525,
-3.4376528, -6.976936, 4.3059134, -0.6643575, -7.517175, 0.17535661,
-8.437576, 7.2534637, -5.069008, -3.9012227, -0.2620066, 4.9176264,
-2.9958425, 10.027427, 9.624045, -5.567969, -0.46590063, -3.0767615,
-4.846028, -9.174853, 1.6435078, -0.16618606, -9.002738, -4.975077,
-2.8667848, 1.7363213, 4.0991964, -2.5258641, 9.248608, 9.047988,
-5.693675, 0.0037772516, -4.5349064, -3.9938538, -9.143254, 2.3926642,
1.2261714, -8.996827, -3.8973618, -5.5837207, -2.331108, 5.3350635,
-6.3448434, -7.7639937, -4.5786624, -6.8803196, 7.6580687, 6.1537514,
2.4382505, 7.3941555, 7.1036377, -6.1442103, 7.2949185, 0.2889209,
-1.7334696, 8.286352, 5.0688205, 1.2357788, -7.3162656, -5.5402875,
-8.971806, 7.475726, 2.6313477, 4.508429, -9.483017, 9.653669,
8.3312435, -5.793551, -6.222731, 4.6132526, 5.9116755, -8.824667,
-2.0805092, -7.4759617, 6.786457, 2.4575458, -3.5497286, -8.386408,
-4.6061974, -3.5168798, 4.868839, 2.7742658, 8.687114, -0.013885555,
-3.2930691, 0.26906267, 3.6637099, -2.1926463, 9.404198, 8.325509,
-6.3163705, -0.8453042, -2.3073251, -3.7972465, -9.130233, 2.064124,
-0.1195946, -8.153388, -4.770488, 5.3647437, 8.754217, -2.4106593,
-8.346487, -5.763693, -2.276131, 5.0921474, 7.7174997, -9.7048435,
0.22228764, -2.6318195, -4.409874, -7.9550424, -2.8741581, 7.840001,
5.0947356, 5.429874, -9.812888, -4.2619257, -8.087908, 8.001339,
3.0177667, -2.7421856, -9.419765, -2.6492429, -4.1717587, 4.741101,
2.743903, 7.6225433, 0.381313, 4.0469112, 5.2156196, -7.4159937,
-2.8641448, -8.19136, 6.6477365, 2.6988661, -4.3152432, -8.901968,
-3.5470922, -2.5869286, 5.059407, 2.251304, 8.569148, -0.765362,
-2.553056, -5.0130887, 4.742057, -3.90204, -5.3189387, 0.9485549,
-7.409879, 5.102205, -8.705802, 10.091475, 5.814383, -7.3554554,
-9.979342, 5.34089, 4.768357, -0.7485468, -5.175415, 6.3486204,
-3.1219993, -4.6762595, 1.2888644, -6.5178156, 5.8297715, -9.374522,
10.629847, 5.3781567, -4.814461, -11.461468, 6.4460106, 5.480171,
6.831538, 8.236086, -2.4162357, -6.621108, -5.680868, 0.29442224,
6.377431, 6.540744, -10.304505, -1.3647615, -0.5268666, -4.44151,
-8.313548, -3.4072156, 8.925119, 4.687855, 7.71594, -4.401417,
-7.7435193, -6.1070724, -1.3846647, 6.259015, 8.385041, -9.512905,
-1.0728005, -2.5372021, -5.5191984, -8.253586, -5.3255544, 9.802876,
3.9939768, 6.0524645, -7.8714094, -2.7126296, -7.209169, 7.2356877,
2.629668, -2.566546, -8.29111, -3.8988457, -2.5496798, 2.936725,
4.1876006, 6.5500836, -0.99466646, 4.7416415, 5.496659, -8.304664,
-2.765846, -6.812615, 7.348469, 1.4452133, -2.2409484, -8.469595,
-4.192831, -4.373715, 4.8692636, 2.6691291, 8.846158, -0.7978366,
-4.605248, 1.2223709, 3.5939949, -3.0447555, 11.054072, 8.7564,
-4.444807, 0.3384347, -3.7093132, -4.1057897, -8.998789, 2.3816452,
0.017237613, -7.9125266, -5.345933, -6.772394, -2.8415458, 0.12006761,
-2.55625, -4.9569845, 3.1021261, -7.3192515, -4.5409083, -2.5607505,
-0.5273524, 6.485698, -4.9925747, 0.032850716, 0.920058, -9.360417,
1.3626592, -6.768956, -9.075602, 9.5362015, 8.011105, 6.199791,
-3.516889, -6.7469044, 3.3474243, -0.9698291, -8.821939, -1.7708311,
-8.720887, 8.182516, -3.9994414, -5.5655613, -1.633937, 6.48551,
-5.1894245, -8.7144, -5.274194, -6.929163, 8.758403, 5.891101,
1.5023059, 6.833816, 7.0916185, -5.264798, 7.2305326, 1.2577746,
-4.106659, 0.094991215, 4.661225, -2.1144485, 8.829437, 9.947514,
-5.2466745, 0.7931179, -4.7052836, -4.3895082, -9.605378, 2.930508,
-0.3123664, -9.000139, -4.072303, -4.003389, -1.1616999, 4.3899875,
-2.035924, 9.748104, 8.800784, -5.7588625, 0.29111838, -3.8833985,
-4.391245, -7.9865365, 1.5285267, 1.2961063, -8.948176, -5.71197,
4.4741416, -3.0377352, -0.11647939, -2.1837972e-05, -6.5728817, 9.695845,
5.6708007, 9.6671715, 8.449491, 2.636698, 8.666284, -9.202659,
-6.217332, -8.353392, -3.5294595, -2.3272796, 9.561563, 3.7740133,
1.04885, -6.514268, -6.67222, -8.662983, 7.5660367, 1.5462829,
4.324029, -9.38316, 8.898151, 7.954895, -5.4215345, -7.197413,
2.0961356, -7.878846, -8.074521, 8.564786, 9.801723, 6.1855946,
-4.9879074, -7.9315515, 5.222017, -0.8518596, -7.599498, 0.6562786,
-9.164762, 8.333082, -5.588538, -3.5440683, -0.5231263, 3.665892,
-1.6147137, 10.405448, 8.815226, -6.937167, -0.20391269, -4.183826,
-4.478031, -8.892283, 2.8046355, 0.7467902, -8.658428, -5.68761,
-2.7461247, -4.6542172, 7.903271, -2.9446023, -3.6216543, 0.8809103,
-7.5200768, 5.8712325, -8.626881, 10.160291, 6.407123, -5.7739096,
-9.058603, 5.633876, 3.8469055, 7.028618, 8.562295, -3.613276,
-7.707871, -6.4942007, -1.8473703, 6.7223425, 7.5253367, -9.006142,
0.13059531, -2.6228805, -4.8181324, -6.59743, -2.6189253, 8.74365,
6.2895265, 9.399066, -4.077298, -6.7908845, -5.1946006, -1.9205595,
6.919768, 7.6106286, -9.835867, 0.4013528, -2.069671, -6.135895,
-7.071083, -3.4044542, 8.802136, -4.602445, -2.2868743, 5.1336083,
-6.04972, -8.588211, -4.5931864, -5.953241, 10.075162, 5.160576,
1.3218237, 7.2575912, 5.0105505, -6.236251, 8.375435, 1.8327441,
-1.5218296, -4.9718084, 6.3242707, -1.0184163, -4.1699986, 0.5834959,
-8.725222, 7.0167747, -9.450634, 9.035332, 5.212949, -5.0341983,
-9.3482895, 6.1987424, 4.3529515, -7.3496304, 3.210813, 5.1807637,
0.8126032, 5.6510706, -0.69063073, -0.2633498, -2.8945854, -8.636354,
-8.122011, -9.261415, 2.7266417, -4.460084, -0.7099316, 7.447609,
-6.223276, -3.3375864, -0.11186252, -1.8217812, -4.3880253, 1.2949482,
-7.783603, -4.490462, -2.0606906, -0.6948074, 5.839032, -5.410896,
0.18852852, 2.1216168, -9.143247, 2.9236112, -6.2569003, -8.700651,
8.565221, 8.548126, 6.752273, -3.2680845, -9.128569, 4.797548,
-0.7951003, -6.5795193, -1.0359513, -10.268307, 8.0821295, -4.472451,
4.2302337, 5.1866417, -7.803733, -2.3174598, -6.717919, 6.568632,
2.2257285, -2.8507524, -8.415178, -2.7076874, -3.201958, 5.8858,
3.1291618, 8.487772, -0.78216195, 2.5387375, -7.1927094, -8.677946,
7.471042, 9.92616, 5.0749016, -2.8553307, -8.277642, 3.5364935,
-1.6363639, -7.7608624, 0.8965101, -11.053696, 9.080284, -4.587784,
-7.3328085, 5.039416, 5.043607, 0.28917202, 5.8478637, 0.27441445,
0.5048246, -1.6188425, -8.81807, -8.532185, -9.913063, 2.0225337,
-3.6533992, -0.59011126, 7.67236, 6.8135934, 8.113954, -4.477491,
-9.500047, -5.6973433, -1.5079542, 5.7415495, 7.314771, -9.506487,
0.6844271, -3.300923, -6.4778433, -7.8159285, -2.8471754, 9.200658,
-7.8914022, 4.9856186, 6.6034303, 1.8931761, 6.939605, -0.769734,
-0.118914194, -2.2814384, -9.182385, -7.719249, -9.477782, 3.2946892,
-2.8502502, -0.8965499, 7.733449, 2.019114, -5.7543383, -7.983907,
9.620951, 8.990881, 5.4120574, -4.337236, -7.909189, 4.0667114,
-2.71705, -7.5649147, 0.01420066, -9.506511, 9.0903, -4.5291543,
-2.6467242, -4.4752617, 6.6260858, -2.1867945, -3.7042851, 1.1997058,
-7.4351587, 6.5632153, -8.573365, 8.980399, 5.162594, -5.9665174,
-9.252407, 6.180759, 3.2907078, -6.1920424, -3.7303412, 1.385776,
-0.8483552, -3.9733107, 1.0786366, -7.8821554, -4.2430034, -2.6074026,
-1.669743, 7.391693, -4.9450483, 0.7061716, 1.8987172, -9.219538,
-5.8968096, -4.1738825, 0.7590989, -1.3815836, -3.387182, 2.2010822,
-8.451245, -3.273762, -3.3067758, -1.3362367, 5.2865562, -5.044418,
-1.2102832, 4.0447154, -8.331752, -5.007557, -1.5802612, 4.6986103,
-5.289203, -8.561202, -4.2709618, -6.6485515, 7.674484, 6.1901064,
2.7824886, 7.5194063, 5.92236, -5.4720984, 7.6036234, 0.945973,
-6.832204, 4.4715323, 5.492118, 0.6609872, 5.0978894, -0.24738201,
-0.6192307, -2.6308036, -9.912141, -7.476407, -9.828612, 2.0150447,
-4.28316, -0.14732587, 7.9036283, -7.020154, 5.444887, 4.7633314,
0.890556, 5.780306, 1.0112236, 0.09077269, -2.3818946, -9.049624,
-7.282824, -9.367125, 2.3375149, -3.0168152, 0.54103905, 7.613425,
-5.4322505, -1.360736, 4.196054, -5.293789, -8.067301, -4.3440394,
-6.515967, 7.900019, 5.486329, 2.8337772, 7.9041967, 4.7610354,
-5.9031844, 7.4709387, 0.65188056, -6.998253, -3.8609188, 0.6755554,
-2.2303321, -3.9414937, 2.1283765, -8.5455885, -4.7593765, -2.962287,
0.44278103, 6.0931063, -6.9413614, 0.62491417, 0.76517856, -8.313108,
2.801484, -6.682013, -9.06558, 8.026709, 8.556826, 4.752688,
-3.978621, -8.465464, 3.008049, -1.2446365, -7.3044558, 1.3687768,
-10.370548, 8.997974, -4.1432295, -8.261298, 4.7363267, 5.042858,
-0.12264052, 6.6704116, -0.4278814, 1.5015568, -0.55427516, -10.192324,
-7.1189384, -9.827426, 1.4215178, -4.010099, 0.44976008, 7.827414,
-2.03553, 0.19619605, 4.163274, -3.68158, 9.636696, 7.863422,
-4.4787655, 0.45206985, -3.0309298, -5.017168, -9.149868, 2.5859582,
0.3631776, -9.576135, -6.101458, 1.8643811, -5.8853703, -9.488215,
9.767247, 9.420786, 5.93236, -3.921636, -8.121643, 4.5388584,
-1.9506255, -8.0929985, 0.62895024, -9.729104, 8.816781, -3.6347952,
2.7928703, -4.330242, 0.4543469, 1.4141184, -6.402065, 10.208152,
5.8743396, 7.265231, 7.52592, 1.5647101, 8.773037, -10.067522,
-4.296036, -8.723736, -3.3891797, -4.8869085, -1.5862483, 5.37258,
-5.6078396, -7.5634336, -5.5277634, -7.0703654, 8.907505, 6.482465,
3.6150064, 8.267969, 5.8735623, -5.358689, 7.351351, 1.438466,
-5.5339947, -2.9931746, 0.41248065, -2.846398, -4.6008925, 3.144954,
-7.2261305, -4.8566184, -3.0261056, -0.2901663, 6.0862327, -6.1737766,
0.02791168, 1.5740607, -9.716679, 3.8797028, 4.82684, -9.363322,
-3.450865, -9.67764, 7.643276, 3.9038982, -3.278565, -9.7202215,
-4.230297, -3.8175473, 3.8798141, 3.3532772, 8.311437, -1.3645178,
-1.8129332, -4.727099, 6.6193047, -3.1251788, -5.4156384, 1.0342423,
-5.8569813, 5.4253364, -9.050108, 9.128229, 5.95211, -5.7117467,
-10.000534, 5.666615, 4.201811, -2.965483, 9.047465, 4.0375896,
1.7039808, -6.1752224, -7.283934, -8.253344, 6.532728, 2.3931632,
5.1705494, -11.318461, 8.84037, 7.0528035, -5.8953495, -6.1036987,
-1.6432287, -3.675063, 6.6293554, -3.7803771, -4.622883, 0.88476545,
-8.466554, 7.0160046, -8.409323, 10.004685, 6.400764, -6.4667864,
-9.487728, 6.2323236, 4.008286, -6.3161836, -3.5321717, -0.33144033,
-0.5810744, -3.6746502, 2.7397878, -6.9031897, -4.1433754, -2.2017603,
-0.46433043, 5.455691, -6.408046, 0.35598817, 2.9326055, -9.938367,
-7.2791057, 4.337263, 4.5251927, 1.5873216, 6.928788, -0.3905743,
0.19146807, -1.8419421, -9.834208, -9.763881, -9.087892, 2.1363218,
-4.8400645, 0.6986468, 7.393917, -8.467025, 3.7889616, 5.345175,
2.9066525, 4.8151107, 1.3040056, 1.8577529, -1.5172974, -9.700324,
-8.326617, -9.419158, 1.7435046, -3.7662477, 1.21923, 7.72357,
-1.9473196, -4.02339, 5.3303733, -2.3646116, -4.5449166, 1.3491421,
-6.6376634, 7.1694803, -7.5615697, 10.680339, 6.455465, -5.821721,
-9.867028, 6.9202003, 4.6496034, -7.749109, 4.531767, 5.187277,
1.1581954, 5.600595, 0.25167716, -0.42003885, -0.4456361, -10.793382,
-8.32154, -9.437405, 3.0168574, -2.6095595, -1.1293902, 7.7959437,
-7.5359325, 3.948884, 4.5347214, 1.1992495, 5.818175, -0.35101187,
-1.0835941, -0.5610799, -9.358461, -8.107908, -8.771405, 3.7954552,
-5.6742187, 0.4753984, 7.152675, 2.4811144, -2.88918, 0.5142701,
1.3850923, -6.2455187, 8.165029, 4.900146, 9.260652, 8.0132065,
2.7846925, 8.719777, -8.906841, -6.6375875, -9.023412, -5.113224,
-8.399863, 3.5913968, 5.2158575, 1.1589565, 4.4308906, -0.008243978,
1.1309544, -2.2390187, -8.600879, -8.934291, -7.8892894, 2.440326,
-3.6109486, -1.1530097, 8.107834, -4.9431596, -2.319937, 5.7009645,
-5.801145, -8.528718, -5.3833385, -7.904932, 8.599324, 5.2347975,
4.141636, 5.9861455, 5.513319, -6.2569084, 7.595587, 0.013943018,
-4.142541, 0.620121, 6.360386, -2.2407918, 11.350814, 9.647084,
-5.4052744, 0.18851137, -2.7359867, -3.0224962, -9.664997, 2.3685153,
-0.9469063, -8.393157, -3.3397224, -3.9067862, -0.10742425, 2.8030407,
-2.3275099, 8.300625, 8.723764, -4.5525885, 0.450315, -4.438627,
-4.3970976, -9.873077, 1.8074089, 0.23993115, -9.321578, -4.598021,
-5.0831814, -0.39099583, 5.141868, -4.625487, -8.255296, -3.795106,
-7.945507, 8.853691, 5.647925, 2.0755963, 8.6500845, 5.9238024,
-6.97091, 7.5997877, -0.12347032, -6.314487, 4.9583936, 5.130986,
1.7345638, 5.136751, 0.45631626, -1.2190984, -2.1630406, -8.806135,
-8.016931, -9.226136, 1.7854582, -2.4193735, 1.0047846, 8.01085,
6.3658514, 8.074824, -2.4717648, -8.868973, -4.351266, -1.4050578,
6.702588, 8.387754, -9.468425, 0.49520385, -1.4821761, -5.8962784,
-7.622635, -3.7485554, 8.826997, -0.86598396, -5.1047907, 7.039303,
-2.1232376, -4.770006, 1.5805304, -6.6465664, 7.2535143, -8.837529,
8.837634, 5.0910525, -5.879489, -9.848808, 6.033822, 2.8007088,
4.937564, 4.8684483, -7.5395145, -2.9962616, -8.600703, 7.3937197,
1.8701361, -2.9163053, -7.182109, -4.330954, -3.2636728, 3.3632696,
3.0688832, 7.124255, -0.99308574, -2.638899, 8.923411, 4.670547,
1.8702682, -6.2049117, -5.3327436, -9.228572, 6.3646607, 1.9605027,
5.967248, -10.150883, 10.545579, 7.8232436, -6.1407294, -5.9652123,
5.3021097, 5.697137, -9.198297, -3.7776916, -7.578472, 7.142189,
2.5271366, -3.3932774, -6.9203315, -4.711512, -4.889296, 4.824787,
2.4876537, 7.9655023, -0.14698339, -4.1244345, -2.021293, 4.902458,
-5.1067815, -7.4282646, -3.9189434, -7.2638535, 7.4607773, 5.3818192,
2.7193286, 6.9704823, 6.209919, -6.655646, 7.1104774, -0.037077304,
-3.5080817, -0.091356404, 4.312471, -2.4351866, 9.492442, 9.374046,
-5.0860243, 0.64213854, -3.4670746, -4.991362, -9.969674, 2.0132246,
0.56936646, -8.866785, -4.877276, 3.7930262, -4.497649, 0.38297787,
0.49704826, -5.7800245, 9.413736, 5.985708, 9.551376, 8.38068,
2.1491826, 7.738736, -8.2321005, -6.453506, -9.336299, -3.7555673,
-5.2762284, -2.4409008, 4.737542, -6.4841266, -8.076311, -4.2627306,
-7.9692197, 8.009572, 6.3980284, 2.2879598, 6.941353, 6.021227,
-6.0280805, 6.845733, 1.1719347, 3.9631398, 5.4886236, -7.7040186,
-2.1827116, -7.6585855, 7.0083194, 3.129221, -2.909753, -9.161689,
-4.336779, -4.9469824, 4.699946, 2.693261, 8.539589, -0.30436292,
-2.8498523, -6.0499063, 7.512279, -3.3455105, -3.5669465, 1.6020763,
-8.081716, 5.2820516, -6.716635, 10.963387, 5.4228325, -5.8455253,
-10.33115, 6.6118755, 3.6351194, 2.3213682, -6.6318, -7.909522,
9.037989, 8.789794, 6.8610635, -4.059716, -7.677982, 3.350473,
-1.651829, -7.5003176, -0.087481745, -8.976351, 7.998356, -6.1098523,
-2.733466, -4.352908, 6.323479, -2.3540924, -5.033011, 0.88404334,
-6.700715, 4.5982275, -7.023845, 9.539533, 4.0273676, -5.9150205,
-9.866147, 6.2519026, 4.830981, 2.6559327, -7.324616, -8.168721,
9.937086, 9.799283, 6.366573, -4.599236, -8.639354, 4.559394,
-0.6514058, -7.586549, -0.38899848, -10.730295, 7.4368114, -3.486407,
-8.117989, 3.626084, 5.706674, 1.8172352, 5.8419905, 0.9366346,
0.07541532, -0.2771901, -10.122262, -8.551051, -10.603388, 2.4161515,
-4.065486, 0.5395699, 8.322053, -3.0090036, -3.4955814, 5.9249964,
-1.9945337, -4.132573, -0.41256574, -6.736785, 5.9615726, -8.827814,
9.909208, 5.7771387, -5.530372, -9.06478, 6.5949235, 3.174203,
-3.0626688, 1.1968384, 4.072327, -2.169729, 9.938285, 9.626275,
-5.1897626, 0.06541137, -4.394287, -4.29422, -10.080205, 1.6378499,
-0.14403796, -9.631019, -4.780583, 2.4608018, -3.5488522, -0.49014947,
0.64396673, -6.9995327, 9.776471, 6.7636576, 7.878867, 6.599434,
1.8705788, 8.448473, -8.077088, -6.392767, -9.235902, -3.2361362,
-4.928245, -0.8914784, 5.023839, -6.237934, -8.573552, -4.530052,
-5.778845, 9.413736, 6.237675, 1.7729601, 6.5710874, 6.084028,
-6.0331726, 7.588944, 0.45072612, -2.315514, 9.593314, 4.64898,
2.9906435, -7.064887, -4.9759912, -8.400361, 6.7235126, 1.2726755,
4.499182, -9.744734, 9.897998, 6.980119, -5.804198, -6.9562564,
-4.3378525, -1.4484497, 4.321709, -3.7100258, -9.442933, -3.5404868,
-6.4736757, 7.7720532, 6.075397, 2.6129482, 8.34312, 5.574124,
-6.4519606, 6.2159314, 2.0011787, -1.7032428, -4.3645062, 6.889859,
-2.9379845, -3.1384661, -0.18151554, -7.0837593, 6.089457, -7.939691,
9.147041, 4.2243247, -5.9159102, -9.692697, 7.3519483, 3.2478933,
-7.473241, -3.070959, 1.0393138, -1.0344299, -3.1107128, 1.5728164,
-5.9869685, -3.5045729, -2.8383517, -0.08366621, 4.929114, -6.5836844,
-0.14132404, 1.478007, -9.810582, -6.901405, 3.2046943, 5.407528,
1.039397, 5.34749, 0.830437, 0.14775743, -1.0210586, -9.465323,
-7.9485846, -7.761438, 1.719269, -2.810488, 0.6308984, 7.815662,
-2.667733, 9.271442, 5.279388, 1.4109445, -5.8347454, -7.069896,
-8.853285, 6.8004746, 0.32533205, 4.7802835, -9.072519, 9.201268,
6.6957464, -5.3920603, -7.4572825, 0.59805363, -5.630727, -9.459207,
9.856493, 9.97475, 5.5375338, -4.700674, -7.8012986, 3.0936337,
-1.1252109, -7.8972144, -1.3432727, -10.873557, 8.345087, -5.2174907,
-6.2138515, -3.6057966, -0.26815072, -0.37344697, -4.244429, 2.2502527,
-6.7143726, -3.9938557, -2.0055676, -0.6776134, 5.274813, -5.753472,
-0.51591957, 1.9242831, -9.094253, -7.456514, -2.6904464, 1.4621465,
-2.2656834, -3.6926217, 1.8849756, -7.025682, -3.9511502, -2.8368485,
-1.4272363, 5.2201405, -6.6478586, -0.29758677, 1.8012666, -9.572023,
6.0411367, 8.145005, -4.399552, -6.910796, -7.389369, -1.5188481,
7.1589227, 6.259904, -9.581112, -0.26159045, -1.9099423, -4.895685,
-7.4640527, -4.671746, 8.174696, 6.051852, 8.740961, -3.643459,
-8.109215, -6.5092273, -1.071357, 5.814012, 7.296874, -10.108911,
0.80167013, -1.7618455, -5.599092, -7.2626696, -1.7728385, 9.480107,
-1.8119643, 8.4845295, 3.6446114, 3.0241032, -7.105503, -7.0556927,
-7.9085917, 7.712884, 2.3414216, 5.6769533, -10.038773, 10.047685,
6.688762, -5.565203, -5.293573, 4.4238534, 5.210335, -7.560741,
-2.89979, -7.104123, 7.1043224, 3.6326268, -3.8568738, -9.158733,
-2.7131915, -3.807796, 5.7128754, 3.3754377, 7.636, -1.825007,
-2.2417538, -4.0779576, 7.607329, -3.0003328, -4.2284374, 1.756126,
-7.2746177, 5.119315, -7.008882, 9.408904, 5.5560246, -5.2164817,
-10.315503, 5.603721, 4.314763, -7.313927, 5.1783695, 5.6405606,
-0.0637994, 6.332728, -0.029464703, 0.41828826, -1.2522358, -9.733106,
-7.6832514, -9.785729, 3.6201987, -3.792392, -0.09639487, 7.5038815,
3.234203, 5.8293576, -8.6437235, -2.7220767, -7.9614167, 7.3443804,
2.5719478, -3.7873058, -9.35629, -4.0944405, -3.3323522, 5.011421,
3.3484938, 7.3983216, -0.3570103, -2.568238, -0.5652298, 4.6586185,
-3.1916215, 9.028054, 8.925563, -4.470107, 0.6164475, -4.0510554,
-4.3599396, -8.937277, 2.588883, 0.87693846, -9.869485, -4.4130516,
5.3250628, 7.179221, -8.835256, -1.5040814, -7.866832, 6.3013816,
2.25202, -2.6482968, -9.315557, -4.2005906, -3.0808558, 4.5942655,
3.1603856, 7.9107356, -0.21996377, 2.5606067, -6.229099, -8.12502,
9.204749, 8.939733, 6.7557116, -4.857987, -8.677028, 3.0097206,
-0.44091088, -6.8992834, 0.24465187, -8.561878, 7.57091, -4.580384,
-5.691582, -1.3971928, 4.2977967, -3.7909253, -7.553252, -4.196168,
-6.168398, 7.4481916, 7.928769, 2.254925, 7.3448634, 4.774288,
-5.928198, 7.076739, 0.85800385, -7.157043, 3.7899494, 5.0018516,
0.8073772, 5.4966364, 0.7139369, -0.08507292, -0.7486053, -10.038788,
-8.435511, -8.7984, 3.3735638, -3.1168935, -0.74964434, 7.825164,
-2.8241596, 0.8149421, 4.4502425, -3.296524, 8.8940935, 9.405465,
-5.450503, 0.24939519, -5.495073, -4.644471, -8.004534, 2.4989269,
0.17033602, -7.580766, -6.062238, -4.2435584, 0.27865496, 2.501646,
-2.7539287, 10.010319, 9.223915, -5.163252, 0.31426778, -3.5596163,
-5.465124, -9.200085, 3.8964763, -0.8809309, -9.030218, -3.0678484,
-2.2983913, 1.0483304, 2.75732, -3.2627459, 9.605366, 9.4755745,
-6.007699, 0.75596595, -3.4047768, -3.965859, -8.3125515, 2.5078897,
0.3675871, -7.8800497, -3.656217, 2.2554514, -6.3346195, -8.693943,
7.517176, 9.424983, 7.7167187, -4.159752, -8.556211, 3.9460046,
-0.9782854, -6.724321, -0.9769695, -8.800263, 8.599704, -4.923903,
-3.686106, 0.9819232, 3.8127258, -2.66767, 10.422246, 9.262399,
-6.1508665, 0.20209369, -4.469839, -3.6975002, -8.774928, 3.221267,
0.6888531, -8.207254, -4.8954387, 5.383807, 5.6646347, -9.427935,
-4.5952783, -7.5784526, 7.11232, 1.7821006, -4.572479, -9.243154,
-3.9164977, -3.030219, 4.7241654, 3.8767965, 7.019332, 0.061039727,
5.2285237, 5.1552052, -8.391638, -3.5558589, -7.377455, 6.7287097,
2.3667665, -2.7675612, -8.175302, -3.7957747, -4.7340255, 6.687091,
3.9732506, 8.250127, -1.0829442, 0.48004612, -6.936632, -7.930858,
7.8817573, 7.207298, 6.5677724, -3.7371645, -8.809704, 2.994783,
-1.8757707, -7.4818606, -0.5348825, -8.966385, 6.1881695, -5.4241967,
-5.4247994, -4.418338, 1.0166323, -1.4973878, -4.220349, 2.3012846,
-7.0335546, -4.238467, -2.5224638, 0.22608232, 6.393334, -5.398116,
-0.0341889, 1.2258228, -8.402282, 6.2603784, 7.9857163, -3.7194378,
-8.812128, -4.0846305, -2.116918, 5.408279, 7.06561, -10.533988,
0.1787471, -1.1703452, -4.8611317, -7.032381, -3.461639, 8.892908,
-7.419972, 4.3958707, 5.4875984, 0.50646764, 4.2985168, 0.31382585,
1.4939059, -2.167627, -8.379008, -7.6434336, -9.37636, 2.4499242,
-4.682043, 0.8628238, 7.608405, 1.3059819, -7.342236, -7.525493,
8.315856, 8.505296, 6.964067, -3.6707375, -8.703106, 3.836298,
-1.914977, -6.7886605, 0.6464979, -8.939825, 8.43957, -3.5915687,
2.3601024, -3.617801, 1.1452205, 0.671262, -5.357131, 10.296615,
6.667269, 9.003649, 8.307721, 2.9479306, 8.750661, -8.581014,
-6.394323, -8.928466, -4.396786, -1.762679, -5.1490183, 5.847296,
-2.2923465, -4.1038527, 0.9727451, -7.774064, 6.469503, -9.037836,
9.666348, 6.4577565, -6.4882703, -10.99065, 5.5724454, 4.833156,
-7.0755854, -3.6153402, 0.36412764, -0.67108864, -3.3442469, 4.049752,
-6.8043795, -3.9290495, -2.536694, -1.1258167, 5.940458, -6.2133055,
0.40261146, 2.7706096, -9.775572, 6.95987, 8.277488, -4.7535224,
-7.432352, -5.017416, -1.7621588, 5.313657, 7.220258, -9.866353,
-0.5702063, -2.6676333, -5.619131, -7.7658467, -3.5397823, 8.799807,
-6.0638175, -3.8162751, -0.9962679, -0.8233546, -4.0246396, 2.592846,
-4.461756, -5.615986, -1.4654754, -1.0798136, 5.9046493, -6.038383,
-0.012276665, 1.4494083, -8.840336, -1.7345922, -4.4044056, 6.1087646,
-2.606622, -4.2943096, 1.509161, -6.63766, 6.0652733, -9.137064,
9.280674, 5.060043, -6.2463727, -9.701922, 6.8098507, 4.1013637,
3.1197705, -3.255434, 0.64717627, 1.1733677, -6.492831, 8.540086,
4.7175856, 9.286526, 7.9101677, 1.861041, 8.140761, -8.443782,
-5.3475046, -9.330125, -4.432139, -8.028736, 4.8407736, 5.4504232,
1.3517575, 4.7500486, -0.5376609, 1.3261853, -1.485236, -9.228866,
-7.9228034, -10.199576, 3.3818755, -3.3606176, 1.7853596, 7.4404225,
-3.9205072, -1.1700501, 6.3223853, -5.046071, -8.761395, -3.2786324,
-6.798722, 8.748542, 6.1324306, 0.6052422, 8.552526, 6.4171443,
-6.0377226, 7.8079367, 0.90124434, 5.828331, 8.476322, -3.4633813,
-8.433314, -4.526709, -2.6467595, 6.9092946, 6.3289633, -8.540905,
-0.86627376, -0.8452045, -5.614988, -7.8593144, -3.117919, 9.300525,
1.4230325, -7.01487, -8.795086, 9.648165, 9.457963, 6.383321,
-4.674588, -8.663086, 4.943768, -1.7115909, -6.3998895, -0.26660463,
-9.310628, 8.935663, -3.9340756, 6.8041105, 8.412243, -3.5535102,
-8.561621, -6.445503, -1.3832904, 5.9804077, 7.858094, -9.338858,
0.085718356, -2.462179, -5.200329, -8.315257, -3.7786214, 8.172603,
-2.2765777, 8.490346, 5.725685, 2.054142, -6.054119, -6.8328466,
-7.3958044, 8.552261, 1.8480253, 4.8415513, -9.136547, 10.356239,
5.9734063, -5.2729816, -5.6226034, -4.519781, 8.995026, 5.880455,
3.1360512, -7.1995945, -7.301658, -8.510186, 6.6246533, 2.2335546,
4.6977077, -8.729457, 9.328089, 6.5062814, -6.3678055, -6.942317,
-5.705383, -4.253298, 0.59788376, -0.23254518, -3.5476296, 2.0490863,
-7.833957, -5.663378, -3.1761703, -1.0263915, 5.0124936, -6.0984044,
0.3384851, 1.6908922, -9.525993, 2.1022243, -7.420413, -7.958498,
8.565332, 8.657723, 6.0434046, -4.5466924, -8.338759, 3.9254982,
-0.95311093, -8.1877165, -0.43785557, -8.560197, 8.533888, -3.3718548,
6.363491, 7.911365, -2.2591245, -8.356364, -5.3390894, -1.757546,
5.3571672, 8.748758, -10.688054, 0.3690365, -2.8702703, -5.837193,
-7.3073144, -3.8250103, 9.268955, -5.718677, -2.8842857, 0.072342195,
-1.5217837, -5.323056, 1.9623281, -6.5125237, -4.483654, -2.2244272,
-1.0119525, 5.9303594, -6.8099084, 0.34433538, -0.2455038, -9.201819,
-2.2776294, -5.126559, 7.2859063, -3.1472993, -5.42094, -0.5807105,
-6.2989206, 6.4756045, -8.447406, 9.429546, 5.0745964, -6.7787023,
-10.191577, 4.8251677, 3.6219022, -7.819583, 5.376076, 5.4956985,
1.3416452, 6.0052376, 0.13386025, 0.1342827, -0.6991465, -10.268442,
-7.6158986, -10.454715, 2.8709688, -3.5777876, 1.3449323, 7.0558476,
5.6882324, 7.0041876, -4.8593683, -7.046389, -5.68021, -1.6354411,
6.248854, 7.9236717, -9.986629, 1.8557837, -1.5581475, -4.8145943,
-7.039025, -3.2732098, 8.447314, -7.6663084, 4.841292, 5.8776445,
1.5785995, 5.0404344, -1.5078632, 0.108006015, -1.6622211, -9.710668,
-7.409182, -7.981443, 2.8146787, -3.8645926, -0.0100414455, 8.238399,
3.4793339, -7.253042, -9.282994, 9.512531, 8.232679, 5.7250185,
-3.4902158, -8.490558, 3.5194814, -0.67602015, -7.264153, 0.09031108,
-9.549615, 8.445451, -4.911013, -3.0187998, 8.886259, 5.6022925,
1.5209948, -7.4390616, -7.21803, -9.505658, 7.4093924, 3.1595752,
4.387607, -9.764957, 9.193929, 5.5546193, -5.1350408, -6.417987,
5.349413, 8.165493, -3.9138405, -7.866619, -4.3123507, -1.9473151,
6.4022694, 8.208503, -9.6715975, -0.90967584, -1.2980076, -6.371946,
-6.6179276, -3.823704, 10.936851, 6.3446784, 8.107528, -4.640459,
-6.898585, -5.356469, -1.5997015, 6.6618876, 7.3801913, -9.324014,
0.5915346, -1.1492876, -4.6121483, -6.9841557, -3.3600368, 8.072164,
-4.1198525, 0.7555654, 4.941512, -2.389357, 9.8780575, 9.580077,
-4.893425, 0.43282157, -5.6755767, -4.225428, -9.993709, 3.0053184,
-0.04942825, -8.948761, -5.480621, 5.013179, 7.842478, -3.5478444,
-8.016402, -5.5619717, -2.5675163, 6.6098294, 7.667936, -9.515481,
0.942055, -1.5107092, -5.222767, -7.493429, -2.766411, 7.754133,
-4.2082543, 0.57620376, 3.469063, -3.2948682, 8.778304, 9.19441,
-4.303763, 0.99954015, -4.4905763, -2.7979724, -9.965554, 2.9497707,
-0.45160705, -9.580071, -4.9484, -5.6266503, -4.394396, 1.0485666,
-2.7401724, -3.550599, 1.3892375, -6.698588, -4.1471157, -3.3405206,
-1.1635257, 6.1839423, -5.9323983, 0.69377786, 3.2317998, -10.165961,
-5.684705, -2.4857993, 4.9103174, -4.0576863, -8.405846, -3.3769405,
-6.420691, 9.373656, 4.2024326, 2.4549546, 8.344207, 5.6732264,
-5.034868, 8.889903, 0.45845333, -7.336739, 3.37069, 6.025308,
2.1142733, 4.547738, 0.82624567, 0.35414946, -1.1562195, -9.839273,
-7.19604, -9.087151, 2.1583009, -4.528522, -0.8507979, 8.282888,
-3.4223263, 9.483057, 4.8964977, 1.3152539, -7.239334, -7.621559,
-8.882203, 7.9921227, 1.332292, 4.514284, -9.959491, 8.843186,
6.5739317, -6.4778876, -6.7510552, 5.551059, 9.295274, -4.194333,
-8.634327, -5.217347, -1.5350995, 6.22338, 8.200944, -9.476569,
-0.156049, -1.9489131, -5.909654, -6.990375, -3.4634, 7.602987,
2.6166887, -5.230665, -9.313794, 8.713105, 9.474653, 6.6204896,
-4.058692, -8.657631, 4.3016458, -0.69215536, -8.200627, 0.85234815,
-8.493091, 8.280416, -4.768682, -6.353651, -4.551934, -0.16042583,
-1.8285453, -3.9708822, 2.1059275, -8.776885, -5.6415973, -3.0975688,
-0.55821973, 3.7802658, -6.3563356, -0.083684735, 2.8201277, -9.340507,
-6.834617, 4.0006237, 6.09603, 0.85212725, 4.7007055, -0.44478396,
0.3880529, -1.7211156, -9.642818, -7.608396, -9.491241, 2.6819296,
-4.909339, 0.44560245, 8.088887, 5.087524, 6.0714912, -8.883549,
-3.9490192, -6.6395226, 8.700229, 3.5459273, -3.6170897, -9.537077,
-4.5263314, -4.6749463, 4.5169587, 3.875124, 7.645091, -0.34407547,
3.1453638, -4.650583, 0.7416898, -0.43035465, -7.738161, 9.909119,
5.8575706, 8.749802, 6.919733, 1.462462, 8.253562, -9.347276,
-6.597663, -8.638823, -2.4942796, -2.8154185, 9.519953, 4.3788958,
3.1820443, -7.1593723, -6.7228303, -8.185514, 6.330667, 0.78973407,
3.093492, -8.704501, 9.011896, 8.439592, -6.148191, -6.234315,
3.673099, -4.040092, -0.31107458, 0.2150231, -6.5641985, 9.842855,
6.1524286, 8.442407, 7.0891023, 2.1454902, 7.86101, -8.592927,
-5.1526494, -8.516474, -3.3828113, -3.03659, 9.3362665, 4.165478,
3.3825407, -6.7840524, -7.1358347, -8.709051, 6.3805346, 1.3421704,
5.0017414, -10.048136, 8.6653595, 7.02451, -4.923225, -5.8602333,
-4.9744606, -1.4571482, 5.091973, -6.424844, -9.455028, -4.347189,
-6.8880315, 9.189641, 6.6925206, 3.0916681, 7.233014, 5.106985,
-5.980149, 8.218799, -0.8851272, -2.5423663, -5.2855573, 5.999234,
-3.766257, -5.011624, 1.8475406, -7.8223495, 6.132461, -8.860995,
10.562944, 6.362709, -5.4208374, -9.675263, 6.1474867, 3.3028345,
-2.6507325, 8.861909, 5.409023, 2.550961, -6.3101707, -5.9662743,
-8.823625, 7.80089, 1.8051136, 4.388368, -9.67941, 9.466094,
7.0654626, -6.325972, -4.8988295, 3.9787474, -5.1343126, 0.14310281,
1.0838717, -6.945494, 10.603511, 5.0578694, 8.761074, 8.285763,
1.5441002, 7.1448836, -8.530209, -5.3597345, -9.331281, -4.0858974,
0.850366, -6.1672444, -8.490257, 9.424773, 10.053324, 6.425281,
-3.8040621, -8.663305, 3.186106, -0.36623588, -6.5538006, -0.2631391,
-9.280027, 7.553178, -4.0034704, 5.633687, 4.669405, -8.405149,
-1.7841642, -8.601697, 7.5378513, 1.9043722, -3.7352831, -9.291011,
-4.2926536, -1.5909951, 5.183243, 3.522366, 8.088651, -1.6800812,
-5.001394, -1.8060129, 4.6114826, -5.8048253, -8.161239, -5.174723,
-6.3604364, 8.321998, 6.7402973, 3.3408406, 6.258053, 5.666335,
-6.8674684, 6.5822706, 0.06407154, -6.098208, -2.9131985, 1.2523654,
-2.279653, -3.7399683, 3.1673112, -6.939357, -3.3934207, -1.2641739,
-0.16200364, 5.878035, -5.2749624, 0.38611025, 1.8650235, -9.316981,
-4.837732, -2.7312233, 5.487975, -4.336039, -7.501555, -3.690873,
-5.779878, 8.631322, 6.60208, 2.1915321, 7.6337976, 7.0041156,
-6.6811953, 7.2481694, 0.8550871, 4.183987, 5.372437, -7.442821,
-3.2915533, -8.098771, 7.596767, 2.0619025, -3.2752714, -9.814909,
-2.2303226, -4.43973, 4.3668966, 2.7284126, 9.083137, -1.4830935,
-7.7036104, 3.7200797, 4.9053006, 1.0363547, 6.5483513, 0.5385447,
1.1089149, -1.1902317, -10.2737465, -7.8712153, -8.692075, 1.847423,
-3.2672932, 1.7972403, 8.894601, 6.261283, 9.052175, -2.6986508,
-7.34303, -5.110045, -1.0263683, 6.080255, 7.1760406, -10.492579,
-0.018181792, -1.9084237, -6.0025578, -7.50697, -3.4744833, 9.209759,
-1.3516569, -5.312567, 7.3274055, -2.13184, -3.7769036, 0.8430275,
-9.24949, 5.5947304, -8.947128, 9.05296, 5.5304656, -6.3189607,
-10.466786, 5.5680685, 3.6362877, -2.7697423, 9.77641, 5.9590893,
3.05344, -7.221822, -7.6638417, -8.739707, 6.085615, 2.2485178,
4.05813, -9.9145355, 8.281905, 7.008373, -6.126108, -7.1824427,
4.1208754, 6.3138857, -8.8672285, -2.624828, -7.5682845, 8.285271,
3.1459513, -4.2887206, -9.560121, -3.3751166, -3.54137, 4.2735033,
3.462089, 7.8754764, -0.51898134, 3.9308856, -3.7750916, -0.018514993,
-0.1569453, -5.7293496, 9.688416, 4.8266087, 8.412977, 7.5698156,
0.9199105, 7.865107, -9.023831, -6.369078, -9.612542, -4.632297,
2.859743, -6.7002254, -6.428803, 10.593252, 9.185626, 6.093511,
-3.2108176, -6.8543367, 2.5380402, -2.447413, -7.99466, 0.3114545,
-8.964418, 8.935248, -5.664189, -2.0496101, 8.332009, 5.190838,
2.7841866, -7.454105, -6.205746, -8.549381, 7.898965, 3.3500552,
3.9896798, -10.115926, 8.775537, 6.0777855, -5.807189, -6.1246943,
-1.7453873, -4.8780723, 5.9857354, -3.1994214, -4.412534, 1.7934477,
-8.535377, 6.0600963, -8.071554, 9.873153, 6.361801, -6.0777283,
-10.0187435, 6.7238593, 4.2066665, 4.268113, 4.8080344, -9.529007,
-2.4946408, -6.7143826, 8.1484995, 2.9629154, -2.935059, -7.5404963,
-3.5478966, -3.0440428, 4.3675747, 1.7977191, 7.2655606, -0.5621128,
-4.4248877, 1.213106, 3.6311016, -2.964644, 10.45088, 8.463441,
-5.4732504, -0.59316665, -2.8497858, -2.9078217, -8.736167, 2.739656,
-0.35266805, -7.607295, -4.239683, 0.911149, -6.259438, -9.980769,
8.1178, 7.8597293, 6.107698, -4.012949, -8.275245, 2.9548562,
-2.0175138, -7.2341766, -0.48184815, -9.090778, 7.566734, -4.6976566,
4.0334306, -4.134351, 0.034857232, 0.39927483, -5.689589, 8.895091,
5.953873, 8.522263, 6.513373, 1.5587797, 7.612109, -8.206418,
-6.913628, -8.447271, -3.5571465, -2.209026, 9.49946, 4.763388,
1.7162502, -7.4589405, -6.819809, -9.588825, 5.278578, 2.327892,
4.794206, -11.242363, 8.691385, 7.0822606, -4.312971, -6.348945,
-8.090201, 4.047216, 5.4974623, 1.6941508, 5.340855, -1.4634264,
-0.22087532, -0.8216299, -11.409916, -7.0109725, -9.006556, 3.6495314,
-3.851879, -1.27157, 8.452583, -6.6063185, -3.1731997, 0.92703277,
-0.8826011, -5.131586, 3.0854187, -7.0272474, -4.4156013, -2.5723913,
-2.121965, 5.9895763, -6.7270846, -0.6621804, 0.7826399, -8.2921095,
2.0135007, -6.3783274, -8.576093, 7.687055, 8.689418, 6.1230927,
-3.532321, -9.583847, 3.5711737, -1.096816, -6.964507, 0.3885385,
-8.904429, 7.6264496, -6.1600018, 3.0631917, -6.1792016, -8.987658,
9.15775, 9.144175, 6.173463, -4.1356525, -8.1615925, 3.8331258,
-1.8923012, -8.27035, -1.0684416, -10.43242, 7.33431, -4.7127767,
-7.5271974, 4.244059, 5.44081, 1.3070996, 5.6851335, -0.3421151,
0.49568284, -2.3086596, -9.715552, -6.868588, -9.148251, 2.7135634,
-2.3855941, -0.78117067, 7.3747106, 1.8602474, -6.565698, -8.621007,
8.852345, 9.589714, 6.652946, -3.3934784, -8.73662, 3.4857519,
-0.36061612, -6.9329824, -1.0574634, -9.066194, 6.8016505, -4.791546,
-5.7584367, -2.1796103, 4.7094307, -5.457627, -9.41753, -3.9835794,
-5.819224, 8.790603, 6.7286997, 3.434269, 8.294754, 7.382539,
-6.61804, 7.3482413, 1.3352708, -4.323992, -0.7485251, 4.9006476,
-5.2371535, -7.2854, -5.69179, -7.258583, 9.620038, 6.927184,
3.0337868, 7.115194, 5.4561687, -6.68245, 8.934786, 0.34693494,
-6.7689514, -4.044323, 0.07180737, -2.904996, -3.5474708, 3.1294703,
-6.7540574, -4.953487, -2.8138072, -0.88316524, 6.1226754, -5.538686,
-0.22923289, 1.9056883, -8.7508955, -2.6438503, 9.225369, 4.6155806,
1.1550951, -6.0796514, -6.3537564, -8.284605, 6.6869516, 3.0042562,
3.1801558, -9.17751, 10.931516, 5.955477, -6.1496263, -6.2937446,
1.6489857, -7.4887266, -8.014497, 7.503758, 10.120153, 7.1383915,
-4.1192513, -9.868753, 3.4317431, -1.241879, -8.266654, -0.45711467,
-8.241093, 8.651857, -5.4715953, -6.553812, 3.9577007, 4.418358,
-0.49517465, 4.732808, -0.20364787, 1.5740428, -0.97798604, -9.278058,
-8.122255, -9.779633, 3.7121615, -3.5723634, -1.3004067, 7.6798515,
5.0094295, 6.221851, -9.514052, -1.604081, -9.438889, 6.7239323,
2.1446404, -2.9748333, -8.57573, -3.8971238, -3.5047317, 3.9075074,
1.7030867, 8.252813, -0.07010722, 5.047089, 4.3438673, -8.773425,
-2.6230845, -7.309032, 8.307136, 2.1922452, -2.3464022, -9.190149,
-2.850301, -2.9241066, 3.9527025, 2.9660769, 7.7083898, -0.11779228,
-5.8215933, -2.0513673, 2.7862425, -5.837192, -8.517714, -3.8103993,
-6.963649, 8.837324, 4.6365557, 2.6282892, 6.25356, 5.227629,
-6.399615, 8.744565, 1.0466896, -1.6773224, 10.783139, 4.2682705,
1.6305621, -6.1487145, -6.402786, -7.545633, 7.7322726, 1.7707958,
4.57491, -8.812218, 9.972534, 7.003945, -5.0065455, -5.544994,
6.49627, 7.1661944, -4.3503723, -8.218473, -5.1650457, -1.2070343,
5.7701235, 6.397565, -9.663849, 0.087644, -0.14172882, -5.5322213,
-6.3327155, -4.066965, 7.756945, -2.6466424, 8.964166, 4.6138234,
2.4825103, -6.843265, -6.3672614, -8.89483, 7.3785677, 0.6235598,
4.802881, -9.345768, 10.096804, 4.6214743, -4.2913556, -6.4612136,
3.9682038, 5.347114, -8.83599, -3.6481912, -7.9124813, 8.037474,
2.862584, -2.9310222, -8.6263, -3.414027, -4.056747, 4.375467,
2.8855524, 7.938516, -0.33157, -5.299468, -2.2204027, 5.662303,
-5.8766065, -7.867662, -4.1916714, -7.1404524, 9.026838, 5.6531897,
3.114804, 7.57812, 5.6560144, -5.8985543, 8.233792, 1.5971086,
5.1944265, 5.671571, -8.121056, -2.5626402, -6.5441175, 7.4272985,
2.1957467, -4.023068, -8.554107, -5.4888463, -3.625336, 4.4480357,
1.7960198, 7.0944686, 0.011058775, 2.1608014, -7.0070057, -8.908396,
9.543954, 8.903522, 5.6433964, -4.0628004, -8.870431, 4.959041,
-1.5934263, -6.9771037, 0.050167263, -10.001404, 8.18224, -4.2755985,
5.1621923, 5.984071, -9.516025, -1.4925612, -8.131434, 7.076523,
1.8341045, -1.4932094, -8.559383, -4.376036, -2.891587, 5.1305184,
2.6197405, 8.33268, -1.1334059, -5.094096, -0.17547357, 3.7314086,
-2.892594, 9.092681, 7.4639273, -4.6941366, 0.325528, -2.8611047,
-4.8550067, -9.871375, 0.31529754, -0.15191633, -8.339646, -4.855308,
-1.6479719, -5.319242, 6.5386705, -1.5265297, -3.5664678, 1.798347,
-7.164413, 6.268638, -7.8074217, 10.829918, 4.0787735, -6.946495,
-8.972947, 6.115406, 3.6669424, 3.2742665, -3.336139, 0.6641795,
1.0515046, -5.4893293, 9.013979, 6.1290936, 7.647568, 7.2630835,
0.6930706, 8.245093, -8.262533, -6.0004716, -9.052135, -4.1426806,
-7.254432, 4.00798, 4.9424148, 2.6036742, 6.0771585, -0.65650856,
0.8543153, -1.3663346, -9.217531, -7.7460327, -8.538667, 2.9701424,
-3.7563424, 0.9009472, 8.314629, -6.6194797, -4.6891484, 0.34468392,
-1.5767236, -3.629655, 3.154274, -6.2331443, -4.550625, -2.8192184,
-2.057007, 5.13941, -5.331128, 1.4155967, 0.98424715, -9.485717,
6.4867973, 8.142159, -4.375856, -8.444515, -5.4585853, -1.2130208,
4.6308312, 7.9517965, -10.207106, -0.71727717, -0.99303335, -5.5743766,
-6.7513766, -2.94113, 8.857134, -5.317979, -1.4048023, 5.4052396,
-5.0097756, -7.578903, -2.9009626, -6.6883097, 9.363578, 5.9162455,
3.49758, 8.172268, 6.7793326, -6.2027497, 7.3833294, -0.18932506,
-4.106047, -1.8479048, 5.261147, -5.491506, -7.594243, -4.3853784,
-5.877226, 8.521441, 5.560786, 3.1436365, 7.2159443, 6.507792,
-8.179182, 7.8548236, 0.46512112, 6.381178, 8.278895, -4.040741,
-7.682524, -4.5759535, -0.00888081, 6.958354, 8.518543, -10.301519,
0.7062636, -0.7148163, -6.185277, -7.9426055, -2.0311632, 8.175625,
1.8696405, -6.112564, -9.506553, 9.506024, 9.452237, 6.499074,
-4.359625, -9.209316, 4.0369368, -2.6413324, -7.7813807, 0.56236553,
-9.177901, 9.550248, -4.106824, -3.160993, -0.46009862, 3.8370736,
-3.7296188, 11.412559, 8.091174, -4.9574842, 0.7452831, -3.2569637,
-3.7072506, -9.484321, 3.137049, 0.69295424, -8.8218155, -4.258664,
-3.3286479, -0.03589475, 5.0759387, -3.5094073, 9.618779, 9.421228,
-5.002332, -0.9628687, -3.3728616, -3.7827327, -9.735159, 2.6865277,
0.31601584, -9.866854, -5.024821, 7.185669, 8.453019, -3.0568688,
-7.387575, -4.5324626, -2.219079, 5.3901467, 7.210178, -11.373771,
1.0763981, -0.7972793, -5.8611894, -7.862232, -3.2983398, 8.8946085,
-2.4997404, -4.2117186, 6.1179805, -3.2927575, -4.7509055, 1.8010701,
-9.436227, 6.320859, -8.484997, 8.56113, 5.661704, -5.69266,
-9.47681, 4.452949, 5.0936193, 1.4718101, -6.867808, -7.8326106,
9.243548, 8.635009, 5.9507318, -4.337751, -8.54341, 4.369883,
-2.1571703, -6.433089, 0.483299, -8.548626, 7.8623505, -4.929359,
-5.2266955, 0.38607156, 2.4996762, -2.2148552, 9.047054, 7.7390203,
-5.5342975, -0.4540826, -3.1489441, -4.1655436, -9.02115, 3.0181417,
-0.013720537, -8.442493, -3.9483678, 5.901988, 8.412507, -2.8885925,
-7.2286825, -5.6591425, -2.221577, 4.815012, 7.0306234, -9.036296,
-0.57197016, -1.161633, -6.2229505, -7.528442, -2.7522423, 8.396547,
6.8798733, 10.322063, -3.5593238, -8.717177, -5.8516965, -1.0200111,
5.2369323, 7.333194, -9.594722, 1.2895312, -0.7346435, -4.9526343,
-9.116568, -2.6029594, 9.352054, 5.210012, 8.991413, -2.8021963,
-7.496477, -5.4732943, -1.7648114, 6.5576596, 8.320477, -9.693961,
1.7318175, -2.2803392, -5.5716205, -7.863834, -4.0072956, 8.751036,
3.004901, -3.925182, 0.9022056, -0.04514931, -7.3671117, 9.707366,
6.2073674, 7.690172, 8.653606, 1.9842789, 9.201749, -8.2793045,
-7.8459373, -9.201832, -2.7512927, -1.7633613, 8.6521845, 5.626422,
3.5823984, -7.133614, -7.1919613, -7.8209586, 8.429223, 1.6562982,
3.8673208, -9.78556, 8.457082, 6.0057964, -6.456116, -6.900959,
4.3286724, -4.1530027, 0.22359423, 0.4065021, -6.0350533, 10.179146,
4.2827163, 9.076586, 7.7028766, 2.0373757, 8.7587805, -8.361892,
-6.1528535, -9.245167, -2.814172, 1.4542931, -6.71657, -10.222914,
9.102644, 10.312841, 5.1360545, -4.317874, -7.816183, 4.5616503,
-1.5107074, -8.043811, -1.474708, -9.239823, 9.615861, -4.6302495,
-7.4324126, 4.390486, 4.154037, 0.9810606, 6.5509086, -0.585615,
0.23184587, -1.8283142, -9.552528, -8.250358, -9.030697, 2.8663275,
-3.171397, -0.564245, 7.913091, -6.1497135, -4.5897865, -0.174877,
-1.1204481, -4.2094736, 2.2600157, -7.7410693, -4.3183875, -3.3197262,
-0.25546142, 6.4281936, -7.298856, -0.36596882, 0.8007209, -9.526009,
-1.9902167, -4.7573347, 6.464833, -3.7120461, -3.2673295, 1.4676216,
-6.5066066, 5.830128, -9.719415, 11.104781, 5.4021273, -6.549846,
-9.597796, 7.126213, 4.1210113, 4.635898, 2.739748, -7.653324,
-3.2089489, -8.243657, 6.9831567, 2.1533124, -2.9534507, -9.820438,
-4.534758, -3.2561226, 5.0281844, 3.2002287, 7.1933293, -1.2404875,
-7.52308, 4.4847116, 5.1501746, 1.2320029, 5.0086575, -0.64990515,
-0.24434268, -2.423568, -10.10742, -7.9641585, -9.299234, 2.5383005,
-5.3618727, 1.2736243, 8.911865, -5.4337044, -0.9275393, 5.177542,
-4.214206, -8.1707735, -4.035256, -6.7278295, 9.166548, 4.9460273,
2.7278159, 7.3801794, 6.581235, -6.39999, 7.124066, 0.79199034,
-5.951818, -2.3884735, 5.7873106, -5.4010324, -8.728347, -4.286513,
-6.433126, 8.902299, 6.1098886, 3.4331465, 8.108536, 6.362802,
-6.2303705, 8.311305, 1.006832, 1.0325776, -6.8184295, -8.848388,
9.32505, 8.937919, 6.5259967, -2.5531168, -7.475707, 3.6509986,
-1.3251555, -7.8089066, 0.72998685, -9.751349, 8.218064, -4.788562,
-4.43647, 1.4069009, 3.0661678, -2.0852513, 8.373691, 9.669811,
-3.6610136, -0.20411494, -4.161883, -3.8758821, -9.492839, 3.4640818,
0.18811505, -9.194544, -4.8052707, -1.9562751, -5.396671, 7.2244134,
-4.1042705, -4.7859893, 0.6733157, -7.626872, 5.6483974, -7.199235,
9.142119, 7.1537504, -5.7196975, -9.825119, 6.6425967, 5.9080963,
-2.9319873, 9.074899, 4.530905, 2.7906172, -6.7015324, -6.643788,
-9.126641, 6.9821987, 1.7195095, 4.437568, -9.882999, 9.60104,
8.101633, -5.1434307, -6.5917172, 5.5131564, 8.238954, -2.888748,
-7.6163554, -4.881376, -0.96246946, 6.7148695, 6.75368, -9.739211,
-0.3789184, -1.6395797, -5.326239, -6.427544, -4.164509, 9.19525,
-3.4204237, -0.55911225, 3.598112, -1.8717264, 9.479167, 7.933319,
-6.222248, 0.0584301, -3.3787603, -5.435525, -8.947462, 3.1225271,
1.3750818, -8.738569, -4.072309, 6.3996367, 6.4317718, -4.9360414,
-7.2259774, -5.824084, -2.149345, 7.2498875, 7.44395, -9.250494,
0.0985766, 0.22396015, -5.9810033, -6.3815455, -3.2906528, 7.700037,
-5.3679686, -2.1498203, 5.72123, -4.676052, -8.522402, -3.7256706,
-7.5294423, 9.558535, 7.666502, 3.032926, 5.9701796, 5.68268,
-5.839621, 7.556142, 1.6773838, -9.121028, 4.6026692, 5.6779413,
1.7488263, 5.142811, 0.092244714, 1.4514242, -0.45954087, -9.512038,
-7.7446475, -8.593779, 1.6776468, -3.9390335, 1.1063131, 8.83397,
-4.378659, -2.0124521, 6.0388727, -5.5405765, -9.140235, -2.6808467,
-8.053059, 8.076836, 5.7234883, 4.4238896, 8.694013, 7.119191,
-6.50006, 9.549504, 1.0446912, -2.3713434, -5.294737, 7.1503954,
-3.4508197, -3.9949348, 0.613053, -7.2616463, 6.147369, -9.042264,
9.884995, 6.5161295, -6.5703526, -9.06929, 7.357897, 3.5658383,
-2.7428699, -4.092422, 7.093502, -3.4786215, -3.9189878, 0.5840764,
-8.631479, 6.861359, -9.051518, 9.139593, 5.1627493, -5.8351207,
-9.9296465, 6.6928754, 3.1995044, -1.7829196, 8.657274, 4.4509664,
1.288035, -7.1906323, -6.615999, -8.308435, 6.678007, 2.6310244,
5.1103983, -9.298905, 10.711954, 6.1072, -6.624476, -7.608605,
-2.9679024, -4.923202, 6.2134686, -1.3777441, -6.3229804, 0.72818786,
-5.97353, 6.129231, -7.981595, 9.807617, 4.977762, -7.5950274,
-9.745739, 5.766304, 3.5769122, 2.6376789, -7.3882236, -8.777024,
8.414234, 9.041251, 5.878416, -4.2420764, -8.925153, 3.5953023,
-1.5893755, -9.609849, 1.3406186, -8.550091, 7.9229975, -4.811227,
4.5515013, 5.3228703, -7.814619, -3.571171, -7.500082, 6.85732,
2.6725612, -3.42738, -8.663778, -4.165301, -3.8533783, 4.7932143,
3.4117503, 7.3254595, -1.1638387, -4.023022, 1.2129718, 4.677602,
-2.3377054, 9.86466, 8.818226, -3.663859, -0.02484208, -3.7710817,
-4.7575436, -9.914077, 2.525394, 0.20583992, -9.522972, -5.4339695,
5.9962373, 7.7580905, -2.3413475, -7.534203, -5.1056395, -1.0800859,
6.7891335, 7.156591, -10.08548, 0.33171728, -0.8420172, -4.769477,
-7.770972, -2.710206, 8.721985, -2.4802833, 0.7338031, 4.4850903,
-3.48118, 9.951911, 9.44162, -6.0733833, 0.9419685, -3.8713555,
-4.580449, -9.526854, 0.78063136, 0.4689161, -8.787125, -4.562927,
-7.160183, 4.7655616, 4.9403553, 0.1145123, 5.2742224, 0.31238,
1.7184484, -2.2877502, -9.790217, -8.114754, -8.422353, 2.7037492,
-4.348009, 0.56250054, 7.8680825, 6.3795466, 7.2835784, -3.4583366,
-8.23565, -5.635168, -0.7405026, 6.9965515, 6.5166693, -10.601033,
-0.07287533, -1.6705023, -6.0183725, -6.874243, -3.0045238, 9.142453,
5.2538953, 6.2413235, -8.56029, -2.9853377, -7.548832, 6.4572735,
2.330436, -2.860829, -8.27947, -3.9695408, -3.3439307, 4.0148683,
3.243594, 7.6897798, -0.7488675, -2.0841026, -4.095799, 7.2485023,
-3.723259, -2.8436575, 1.3911572, -7.1827154, 6.8066154, -6.853202,
8.730635, 5.2872114, -7.6905885, -9.754702, 6.5650043, 4.1097336,
2.1258192, -6.961536, -9.79647, 9.2204075, 11.07073, 4.8798923,
-3.930321, -7.5981774, 3.7708154, -1.2761397, -8.468687, 0.06648892,
-9.841965, 6.653463, -3.988186, 5.394244, 5.1445603, -8.0223875,
-2.1810744, -7.5366745, 5.6139307, 2.4787393, -2.5207174, -8.359596,
-3.4682662, -4.7019134, 4.087805, 3.0390997, 8.031523, 0.29456526,
2.963585, -3.2174275, -0.62798226, 0.050266743, -6.846023, 9.446967,
5.0539, 9.251317, 8.615374, 2.377704, 9.570636, -7.939071,
-6.9321704, -8.14109, -3.9170027, -8.3941555, 6.017116, 4.876912,
1.7149926, 4.293995, -0.31981698, 0.365617, -1.1182091, -11.397335,
-7.725873, -10.176235, 2.9552503, -4.4383106, -0.42742407, 7.95979,
-2.4943075, 9.23047, 5.8314295, 2.141697, -5.058449, -6.484253,
-10.070862, 7.850862, 2.2891111, 5.0642786, -9.117084, 9.301278,
5.791844, -5.899534, -6.958865, -4.2715955, -1.1626486, 4.398245,
-6.184211, -8.951277, -3.521674, -7.8789907, 8.870283, 6.416188,
3.4819288, 7.555449, 5.133195, -6.9401474, 7.023783, 0.41028407,
-8.615841, 4.3627706, 5.3826504, 1.8454479, 6.666523, -1.0793616,
0.41792488, -1.6426144, -9.187993, -7.691203, -9.618198, 2.4195766,
-4.494396, 0.86291695, 8.472773, -1.9832267, 7.8457026, 5.020231,
1.509333, -6.480208, -7.414391, -10.101746, 6.184243, 2.0559597,
4.3432574, -10.221332, 9.845212, 5.485789, -5.799474, -7.211212,
-8.344746, 3.9320216, 4.3803377, -0.057675365, 6.1319137, -1.0313221,
0.32957998, -0.92215586, -9.922078, -8.419787, -9.21305, 3.510762,
-4.531401, 0.06631034, 6.9385834, -7.5932593, 3.61068, 5.270011,
2.2032762, 5.7308216, -0.10234367, 0.3414195, -1.2175727, -8.966781,
-7.4157043, -9.603722, 2.9555554, -3.3769226, -0.5336227, 8.269091,
-3.1020706, 8.320312, 3.1325057, 1.5258964, -7.80579, -5.730699,
-8.131455, 6.841818, 3.5990052, 4.848687, -9.815692, 7.648613,
8.252513, -6.7259183, -7.51528, -4.658547, -2.2915761, 5.269436,
-5.494158, -8.309954, -4.52679, -7.012936, 8.6611595, 6.3038597,
2.6651337, 8.622966, 5.3799257, -6.3135414, 6.7496395, 1.0473019,
3.1172771, -4.813991, 1.3556207, 0.72166735, -6.645131, 10.503951,
4.633084, 8.002068, 7.129193, 1.9104896, 8.739627, -7.712377,
-4.279946, -10.670057, -3.0672784, -4.8731794, -1.9275502, 3.7261534,
-5.405707, -8.410501, -3.7577012, -7.271516, 8.289416, 5.127806,
1.0641701, 6.653264, 4.9742136, -6.0329003, 7.4884863, 0.41007885,
-2.9251204, -5.8010187, 6.6409197, -1.600671, -2.514476, 0.92193073,
-6.3998384, 6.2064056, -8.159769, 9.498235, 5.581449, -5.6494513,
-10.294479, 6.2549825, 4.096246, 2.9022977, -3.3622625, -0.65462875,
1.0636849, -5.9859815, 10.586823, 4.9630833, 7.895132, 7.2697473,
1.0224016, 9.373145, -7.1461024, -5.856191, -8.240548, -3.3996224,
-5.574361, -1.8048911, 5.086229, -5.792983, -9.176964, -4.4049516,
-5.637843, 7.6407804, 7.347027, 2.4862704, 7.453972, 5.312744,
-6.8084874, 7.499143, 0.05485836, -7.507731, 4.6676455, 5.88813,
1.7795922, 5.299076, -0.22673032, 2.0990326, -0.7842573, -10.136894,
-7.7721796, -9.606016, 2.6334743, -4.573527, -0.116932444, 9.419418,
2.0955498, -6.9949045, -9.035552, 9.465029, 8.950255, 6.106283,
-2.388769, -9.043136, 2.3883705, -0.6484374, -8.034286, -0.31922805,
-9.859039, 8.004548, -3.7054236, -5.679229, -2.6450648, 5.3884463,
-5.865199, -9.559734, -3.52503, -5.799003, 9.683966, 6.293313,
3.1062276, 8.38812, 5.6244507, -5.4911246, 8.043958, 1.426443,
6.2410283, 8.175542, -3.6503782, -8.205972, -6.8156476, -1.8605603,
5.6795797, 7.5408583, -9.224456, 0.8719458, -1.1219131, -5.651312,
-7.9680614, -3.7706685, 7.8029447, -4.255021, -1.9406642, 4.7843895,
-5.9398675, -8.499529, -4.2599845, -6.6309, 8.503445, 6.068868,
2.1243894, 8.088346, 6.127004, -6.3907094, 7.3279796, -0.14549434,
3.6546001, -3.8558402, 1.3862147, 0.59532267, -5.6342225, 9.364573,
5.076811, 9.20475, 8.183292, 2.2078805, 8.49438, -8.256675,
-4.4084826, -8.266475, -2.9874146, -6.1960344, -4.287005, 0.8338423,
-2.132704, -3.6984656, 2.523083, -7.4255886, -4.7441173, -3.2934399,
-0.5042282, 6.5638056, -6.4526396, 0.60555947, 0.38737246, -9.480225,
-5.3951473, -2.5467622, 4.54797, -5.265985, -9.357446, -4.7291393,
-6.311283, 10.4156885, 6.49672, 2.025992, 7.1722317, 6.095554,
-6.5131965, 8.48332, 0.9390763, 7.0746355, 7.987247, -3.3654993,
-8.404388, -4.548624, -2.2879407, 6.9427595, 7.7890368, -10.542874,
1.6033076, -1.48971, -5.100119, -8.671678, -1.7652856, 8.001004,
-8.065735, 4.905678, 6.071599, 0.05744425, 6.212392, 1.3182079,
0.6310592, -1.0298207, -8.765714, -8.035009, -9.24382, 1.8082685,
-3.615836, 0.52009225, 8.512644, -4.518369, -1.2493688, 6.4335203,
-5.4160776, -8.162092, -4.8912425, -5.2808495, 8.358372, 5.946656,
2.4253502, 6.9777846, 5.5628357, -6.635192, 6.895123, 0.2371223,
1.7444432, -7.017487, -8.7640295, 8.81006, 10.094424, 6.298438,
-4.419867, -6.7232494, 3.7191763, -0.65876234, -7.984994, 0.15771914,
-9.094453, 6.8636537, -3.4137077, -5.430304, -2.688095, 4.324083,
-6.004561, -8.046302, -4.471837, -6.9205875, 9.483015, 7.0689125,
2.4705877, 6.9876075, 5.4683404, -5.7836556, 7.707885, 0.5162984,
4.631848, 6.2609177, -9.457449, -3.0545638, -7.5816016, 5.9784746,
1.8824438, -3.8579614, -9.286013, -3.646031, -3.6087253, 4.2034235,
3.0507233, 8.950407, 0.59177804, 2.8084397, -2.8953943, 1.2609816,
0.69984347, -7.5586557, 8.994999, 5.0827813, 8.329985, 7.408144,
1.5005755, 8.166905, -8.675029, -5.934517, -8.843196, -4.130845,
2.7958903, -3.5591156, 0.23459685, 0.64684266, -5.0296817, 10.215722,
5.598379, 7.1770306, 7.643841, 3.8501189, 8.276022, -7.4578977,
-5.070983, -8.94218, -4.0826683, 3.2895505, -3.9825332, 1.2035798,
-1.0520489, -6.314538, 9.317619, 5.185111, 9.998338, 7.2754164,
2.498257, 8.686862, -7.3470864, -6.547375, -9.608533, -2.7261446,
-2.9564986, 0.9374454, 4.130894, -1.20312, 9.129167, 8.619045,
-4.67169, -0.5274864, -3.6355722, -4.886296, -9.542431, 3.514875,
-0.23033069, -9.529621, -3.6075957, -7.8802133, -2.8417516, -0.5016414,
-1.5477563, -4.4758883, 2.6490452, -8.32881, -3.8335862, -1.2557464,
-1.8328221, 5.8363137, -6.469912, 0.5828098, 1.8616948, -9.520033,
-3.4735663, 8.357809, 5.0004034, 2.33282, -6.519094, -4.1831975,
-8.4387045, 8.118419, 2.6901014, 4.6174254, -9.808999, 9.929476,
6.1078753, -5.9189906, -6.703255, -8.465186, 3.6399543, 6.42953,
1.4944904, 5.494776, -0.29901367, 0.6591608, -1.2530144, -9.287099,
-7.76178, -8.763257, 2.7443812, -4.3974376, -0.2572822, 6.7660165,
-4.4973545, -4.693532, 1.5200527, -0.6632985, -3.9789717, 3.4680922,
-6.949649, -3.8846955, -2.7099476, -0.022102097, 6.471048, -6.4444985,
-0.6387843, 1.7062049, -9.631019, -5.6311774, -5.807418, 0.9696413,
-1.8189526, -5.45686, 2.5948997, -6.248562, -4.253321, -2.005751,
0.24999447, 6.6239595, -4.858549, 0.804178, 1.9010949, -10.192368,
3.0323603, -5.787009, -9.380333, 9.682704, 9.547942, 6.0087137,
-4.4844794, -8.549934, 5.119529, -1.1964722, -7.0117445, -0.65009135,
-9.858127, 8.943797, -5.7691035, -2.9273744, -5.2185516, 5.9065175,
-2.50218, -4.735642, 0.7282615, -6.5555525, 5.507514, -8.694169,
9.993771, 4.085798, -7.6951203, -9.647276, 6.6536384, 5.582101,
-2.2151668, -4.472705, 6.2136164, -3.512708, -5.6101165, 0.3673171,
-6.8503137, 5.551902, -8.675452, 10.06424, 5.809378, -6.781359,
-9.191616, 7.0983734, 3.346252, -3.9725425, 0.15292063, 4.0716047,
-2.097689, 10.076262, 9.087341, -6.0203805, 0.11729888, -4.625255,
-5.3461456, -10.146971, 2.4197533, 1.0063066, -8.943199, -4.279104,
-2.938764, 0.30194497, 3.8727565, -1.5632783, 9.813448, 10.264909,
-4.7856245, -0.64888644, -4.390774, -4.2311144, -9.700811, 1.4160637,
0.8945724, -9.351834, -4.3303356, 1.189515, -6.225586, -7.3907957,
9.271504, 9.5062475, 6.1739364, -5.007342, -7.0976567, 3.2570603,
-0.84381187, -8.653359, -0.92208356, -8.307669, 7.9432964, -3.8071074,
3.5291488, -3.806314, -0.11322134, 0.80405843, -7.4224873, 10.985653,
4.6384983, 8.067602, 7.5883656, 2.8596525, 8.447641, -9.095445,
-5.202306, -8.583754, -3.116489, -1.36932, 8.748897, 4.4973726,
1.5659931, -7.5899568, -7.334604, -9.695086, 7.347381, 1.4833189,
4.325102, -10.677437, 9.629813, 7.232323, -7.148833, -6.101661,
6.3127575, 8.09352, -4.425308, -7.999072, -5.3282375, -1.760329,
8.068942, 6.627575, -9.03536, 0.8144679, -1.6614834, -6.1634903,
-8.140471, -3.6102736, 7.6081753, 4.856614, 6.176458, -9.116487,
-2.1549568, -7.1895833, 8.558496, 2.1735144, -4.1503673, -8.981973,
-2.7754538, -4.7762175, 3.1442606, 2.8137262, 7.4344425, -0.55029196,
5.8722563, 7.8684945, -3.8700378, -8.783729, -5.8238807, -1.5261787,
7.220296, 8.756182, -8.926126, -0.32781926, -2.1388466, -5.669374,
-6.3535013, -3.3634934, 7.928939, -3.1542604, 0.99667925, 4.2081666,
-3.0201821, 9.190852, 9.266916, -4.7896395, -0.15805003, -3.9689915,
-5.3652678, -8.795113, 2.3180652, 0.7548897, -8.970313, -5.643949,
-7.102018, -3.6075044, 1.0403842, -1.0406586, -3.0032725, 2.2331407,
-6.7420034, -4.9212976, -2.943733, -0.39172387, 6.2978907, -6.2123013,
0.2345693, 0.7857798, -9.320912, 1.9108819, -6.3946733, -9.055417,
8.385259, 8.903205, 5.832816, -3.6560915, -6.4022045, 3.1834302,
-1.4885879, -7.603421, 0.1805974, -9.127744, 9.085275, -6.522875,
-6.0481706, -2.1495454, 5.8829074, -5.299556, -8.831239, -3.6415718,
-7.358308, 8.660698, 6.0366664, 3.2134666, 6.325152, 5.478303,
-7.415371, 7.669462, 1.6348441, 5.476556, 6.6932545, -3.787181,
-8.023717, -5.713185, -1.301943, 7.360959, 7.0024467, -9.741648,
0.7237554, -1.2638799, -5.9879403, -7.5352287, -2.8644187, 8.068348,
-7.289822, 4.21858, 5.396922, 0.47948205, 5.8357415, -0.5533364,
0.22742282, -1.2827674, -9.419463, -7.8253856, -9.896855, 2.5185237,
-3.7113519, -0.79114586, 7.8275566, -1.672665, 7.9480553, 4.2486334,
2.237218, -5.7837605, -6.9261346, -9.226968, 8.640333, 1.0086905,
2.6222873, -9.2803, 9.046759, 5.93399, -5.257368, -6.19284,
4.500629, 5.0450125, -7.634408, -2.6469333, -8.231533, 6.93254,
3.2507942, -2.261971, -7.659001, -3.250431, -3.673171, 2.3230639,
2.9591615, 7.7429748, -0.45771402, -7.1281557, 3.8174603, 5.1063075,
1.7062858, 4.9700685, -0.5226341, 0.86882424, -2.049593, -9.760972,
-7.4531517, -7.933552, 3.1519914, -3.3820326, 0.54297495, 7.4000964,
-3.347712, 10.389594, 4.664563, 1.4833618, -6.729841, -6.9587393,
-8.993007, 7.7534394, 2.5525556, 3.7901008, -9.991383, 9.205661,
5.037508, -6.8138514, -5.4066887, -4.788273, -0.1477942, 2.8109221,
-1.9131105, 9.418327, 7.689106, -4.701331, -0.43104488, -5.021098,
-3.128614, -9.1871605, 1.9858847, -0.22601688, -10.02972, -5.340255,
1.8628519, -6.6519814, -8.725267, 7.7658696, 10.3590975, 6.196875,
-3.597623, -7.3804727, 2.9698467, -0.8655198, -7.746584, 0.49627775,
-10.801289, 8.117042, -5.2463746, -2.4441104, -4.1461034, 6.5468016,
-2.3084872, -3.4369152, 1.5946407, -6.8521585, 6.0522437, -9.1955385,
10.255699, 6.6207185, -5.786212, -8.547476, 5.2413282, 3.3292325,
6.6265454, 8.124675, -2.0068057, -7.5162034, -5.540166, -2.0573957,
5.839325, 6.6365733, -8.901188, 0.227525, -1.256107, -4.4978704,
-9.253617, -2.352485, 8.896972, -2.7079148, -0.705306, 4.424503,
-2.7355208, 9.8318815, 8.910507, -5.53691, 0.04332635, -2.8279998,
-3.700553, -9.617646, 1.2526063, -1.1238667, -8.049155, -5.0956283,
-7.897529, 4.4653664, 5.0836844, 1.8828139, 4.3485017, -1.1700726,
0.58126634, -1.7932607, -9.930552, -9.079811, -9.357734, 2.6912796,
-3.255445, 0.21964812, 8.058507, -6.7842145, 3.3461685, 5.5911736,
0.055740938, 4.8324904, -0.9729481, 1.0020163, -1.4949667, -9.50107,
-8.616884, -9.581202, 2.1937826, -3.4444618, -0.52234995, 9.082186,
-6.093874, -3.515726, 1.1728579, -1.5930156, -4.603983, 1.4774525,
-7.78791, -2.092741, -1.8014874, -1.8243524, 4.7777987, -5.669218,
0.66777414, 2.23251, -9.249461, -4.408183, -1.0354197, 4.5245466,
-6.015329, -7.5393267, -3.7522502, -6.717569, 8.925289, 6.141545,
3.0352845, 7.193965, 6.261605, -7.325981, 7.369731, 0.31293702,
-1.7452555, -4.635, 6.257026, -2.241893, -4.8063927, 0.71378076,
-6.202056, 7.1213393, -7.4083085, 9.540251, 6.3063874, -6.6899533,
-10.524389, 4.6639094, 5.3081546, -3.1185315, 0.33983597, 4.4997706,
-2.931123, 8.970066, 8.840453, -3.532897, 0.10258977, -5.3710675,
-4.6673365, -9.194455, 3.4830666, 0.77366954, -9.615627, -2.7342217,
5.962454, 8.570003, -4.5479455, -6.141127, -4.842063, -2.636647,
5.863975, 7.785868, -9.037517, -0.67562026, -1.1526618, -6.920982,
-7.665228, -2.4692638, 8.472133, 4.9249225, -5.2353277, 0.87961656,
0.853844, -5.906171, 8.931532, 5.468381, 9.287966, 10.075584,
2.523625, 7.8438387, -8.526782, -6.3977327, -10.352405, -3.7244565,
3.8984163, -3.997321, 0.044247855, 1.0388248, -5.763386, 11.058892,
6.081722, 9.601867, 7.542371, 1.2398919, 8.481116, -8.155131,
-6.835302, -9.218161, -3.213871, -2.8808954, 8.578088, 4.2510448,
1.5269986, -6.0473156, -5.8857565, -9.23785, 6.740874, 2.352291,
3.7748954, -9.145258, 9.540243, 5.587832, -4.6699643, -5.106386,
-5.5897627, -1.9833931, 0.7698198, -1.7173742, -4.1933193, 0.99870473,
-7.696422, -4.443605, -3.039625, -0.7719519, 5.1278253, -5.2218027,
0.2848337, 1.8417811, -9.300518, -7.8451834, 5.291405, 4.22925,
0.15272519, 5.6078577, 0.6805531, -0.6473826, -1.1637968, -9.084918,
-9.283584, -8.812466, 3.3418324, -3.9311302, 1.0480562, 8.946848,
3.7086074, 5.1574345, -8.943366, -3.1713588, -7.0221004, 7.7566223,
2.452577, -3.7642128, -9.673588, -2.7862241, -3.0818539, 4.097455,
3.254869, 7.6950984, -0.75667816, 2.2200198, -4.727463, 1.3662891,
1.0152828, -6.0301876, 7.837398, 4.66408, 9.410934, 8.097289,
1.8549559, 8.832874, -7.0850916, -6.2350726, -9.046895, -3.3585756,
1.7658163, -6.6924353, -8.110454, 9.123173, 8.407803, 5.7954288,
-4.332873, -8.100613, 3.9827511, -0.9042061, -8.8024435, 0.6482969,
-9.135431, 9.1555805, -4.5134616, -2.9040527, 8.118073, 5.250599,
2.4283104, -6.9490504, -5.587464, -9.587387, 6.2556553, 1.5379646,
4.1295414, -9.417973, 9.229332, 6.895292, -6.6292953, -5.352865,
-5.700316, -4.5505843, 1.5586274, -1.0035452, -3.4540107, 1.7765737,
-6.611955, -4.9214606, -2.0495899, -0.75831276, 6.090795, -6.8294406,
-0.34186012, 2.2663512, -9.734836, -4.9197946, -1.8051782, 5.028836,
-5.500068, -8.541652, -3.5938246, -6.349285, 8.887191, 4.038615,
2.8779316, 6.270264, 6.5258656, -5.2364507, 7.2028127, 0.60359836,
3.9221323, 5.155128, -7.808452, -2.7550967, -5.857817, 7.397453,
2.507747, -3.3077114, -9.175299, -3.8208392, -2.706882, 4.671903,
2.1957276, 7.0966954, -0.8708428, 3.7151513, 5.304978, -7.5123887,
-2.7150173, -7.6738534, 8.408116, 1.4640142, -4.281959, -9.859925,
-3.5194998, -3.3797884, 5.672039, 2.921582, 7.4613447, -0.5544451,
-1.3017571, -4.4974093, 5.9400964, -2.1943376, -3.916828, 0.13566507,
-6.9145174, 6.7420254, -7.681664, 9.691224, 5.1942396, -5.1665945,
-10.73667, 6.6162343, 3.1781282, -6.8166046, -4.899577, 0.43695474,
-2.414404, -3.643378, 2.2947657, -8.230409, -4.3735533, -3.1992726,
-0.6551781, 6.6418347, -7.319145, 0.36520702, 1.7361984, -8.601053,
-1.8920498, 9.008705, 5.67584, 2.0273275, -7.4825263, -5.813923,
-8.461091, 6.5974507, 1.8890631, 3.548519, -10.55627, 10.046521,
7.9854445, -6.732215, -5.9694223, -3.3646872, 9.513385, 4.471651,
1.7107952, -6.381955, -6.569125, -9.091004, 8.135054, 1.2655559,
4.5926065, -9.17314, 9.181515, 6.877146, -6.6289973, -5.716682,
-6.912025, 5.1192813, 6.625189, 1.3964128, 5.611081, -1.1903088,
0.08266861, -1.5843371, -8.756393, -8.269873, -8.813978, 2.727073,
-3.836898, 0.34563962, 7.87752, -6.383673, -4.2313166, 0.6316461,
-1.8918971, -4.966848, 2.6729558, -6.769452, -4.720128, -2.0460143,
-1.3207047, 5.880735, -5.4322834, 0.26116, 2.16661, -9.432492,
-3.21341, 8.164354, 5.4505563, 2.5273335, -6.4427433, -6.4402676,
-8.846901, 6.695445, 2.0753634, 3.6874382, -8.905726, 9.295257,
6.071005, -5.978188, -6.0744486, 5.5430613, 5.14254, -7.094235,
-1.4362193, -7.774397, 6.857795, 2.954029, -3.6253068, -9.538422,
-2.8834865, -2.6448205, 4.7009816, 1.8671441, 6.8835864, -0.480641,
-4.0967956, 0.6951548, 3.7789447, -2.2057772, 10.473923, 7.425042,
-4.8866067, -1.2700436, -2.2731817, -4.6308775, -8.7397995, 2.166565,
-0.5085787, -9.615862, -3.3170953, 6.237825, 8.312589, -3.760769,
-7.138726, -5.129133, -1.8827187, 7.294874, 6.2638316, -9.925473,
0.89178824, -2.157931, -5.341155, -7.222355, -3.8500633, 8.80746,
6.1725125, 7.5903597, -3.7670822, -6.8600307, -5.376602, -1.3631464,
5.347243, 6.998141, -10.029878, -0.36545438, -1.6366142, -5.8582716,
-7.5466957, -3.1626444, 8.189166, 4.2351007, 4.018255, -9.445872,
-1.5653439, -6.533619, 7.3081207, 3.6300147, -3.4940917, -8.89922,
-3.740796, -3.5837786, 4.4414864, 3.5515282, 7.84935, -0.056964263,
6.65296, 8.2865095, -4.065057, -7.4995875, -5.934349, -1.3144441,
5.715887, 8.276013, -10.960123, 1.0775447, -1.6245389, -5.9004707,
-7.4857907, -2.734353, 9.814917, 4.9720235, 5.591757, -9.366627,
-3.4876125, -8.0651045, 7.0645003, 3.2015045, -4.196097, -8.979024,
-3.113247, -3.7062478, 4.9144144, 2.65347, 7.924003, -0.71563226,
2.0733283, -6.8612876, -8.738112, 9.214281, 9.936553, 6.170908,
-3.4745042, -6.899706, 3.9515772, -0.97879314, -8.622473, -1.9379729,
-9.040366, 8.120031, -5.184748, 6.394567, 8.885188, -3.794483,
-8.082814, -5.06751, -2.7025862, 6.194499, 7.120673, -10.505208,
0.95488733, -2.395348, -5.652955, -7.3974824, -2.3002806, 8.850804,
-7.0340147, 3.9374788, 4.6569567, 1.9385202, 4.5166936, -0.33519393,
0.6847074, -1.8508536, -8.943409, -8.240385, -8.385383, 2.6401858,
-3.2429912, 0.6936306, 7.7120514, -3.3216896, -4.472621, 7.1848173,
-3.740773, -5.4540725, 1.9595282, -7.6097474, 6.839103, -8.616785,
9.624371, 5.7517843, -6.6444855, -9.796809, 6.042186, 3.2128015,
-6.9656773, 4.0476184, 4.755916, 0.4707234, 5.670586, -0.8519433,
1.7487208, -2.3645294, -9.3000555, -6.7453732, -8.952053, 2.8511798,
-4.390819, -0.7958246, 8.690365, -1.2940229, 9.152823, 4.183886,
1.6344496, -7.1038704, -6.5831933, -8.4723425, 6.9219327, 2.0052521,
5.661041, -8.37903, 9.703624, 6.675455, -5.6691957, -5.934038,
-1.7933983, 10.722113, 5.60887, 2.367906, -6.4632993, -6.2827187,
-8.307077, 7.520357, 2.095241, 4.117636, -10.116085, 9.201724,
5.4637833, -5.8220553, -7.0555143, -3.2251525, 8.834123, 3.47187,
2.252626, -6.42659, -7.21834, -7.736537, 6.465487, 0.9972378,
4.3185678, -8.855341, 10.576946, 6.327634, -4.9981413, -6.3904567,
6.3680034, 7.298339, -3.6009893, -6.5272064, -4.497922, -0.8217098,
6.9172764, 7.7042265, -9.049829, -0.1046941, -2.6003401, -5.5095415,
-8.175, -3.986211, 8.353999, -6.005459, -3.224021, 0.44196293,
-1.5849929, -4.0691867, 1.6524578, -5.7474456, -5.2824697, -2.5434446,
0.53792405, 5.708279, -6.1395516, 0.034476973, 1.7220172, -8.109998,
-2.0481424, -4.6632338, 6.2183847, -2.5251815, -4.975683, 1.0678875,
-7.6933937, 5.415578, -8.955229, 9.474954, 5.4107146, -5.8608675,
-12.471414, 5.743873, 3.8630478, -7.135694, -3.3420107, 1.571681,
-2.146881, -4.8176694, 2.949421, -7.0200763, -3.7710736, -2.4343507,
-0.60511315, 4.7749887, -5.2734017, 1.1034018, 1.6877179, -9.101425,
4.285096, -4.4612913, 0.4965076, 0.9933323, -7.051389, 10.061099,
7.1299276, 8.778074, 8.250062, 2.0171626, 7.9379754, -9.504683,
-6.574384, -10.069825, -2.3005219, -3.6007566, -4.250442, 7.30863,
-2.9216805, -3.1601012, 1.4558052, -6.7837243, 6.804406, -8.275628,
7.992771, 5.095498, -5.187669, -10.454871, 6.1527977, 3.4057615,
-3.1708248, 1.182362, 3.0291345, -2.4810352, 8.611953, 9.689305,
-5.4002657, -0.7909602, -3.4226687, -4.714036, -9.483436, 0.6372943,
0.71616644, -8.395789, -4.42325, -6.350394, -3.7190423, -0.07159383,
-1.0642569, -4.507105, 2.2269413, -6.827724, -4.1526113, -2.9782333,
-0.95532715, 5.641665, -6.265532, 0.103531525, 2.9673443, -8.678349,
-9.341869, 4.6156588, 6.2792363, 1.3072072, 4.6975694, -0.48327714,
1.6935958, -0.81220585, -9.043599, -7.592683, -9.834212, 2.5878084,
-3.7674263, 0.55611587, 9.207573, -5.7032447, -4.055137, 6.16684,
-6.124226, -8.749096, -5.2233953, -6.095223, 8.694353, 5.583952,
2.3935144, 8.603938, 6.0223627, -6.3958125, 7.192213, -0.53936195,
4.6792603, 5.758088, -7.6996436, -3.9893427, -7.273452, 7.0743713,
2.5826092, -2.6400745, -9.564764, -4.489205, -4.8511343, 4.207712,
3.682452, 7.659844, -1.493789, 4.6625385, 4.255174, -6.621875,
-2.4667757, -7.535613, 7.692683, 2.9625502, -3.480427, -9.170702,
-3.360738, -3.226336, 4.0252004, 3.7444744, 7.949601, -0.04482794,
-7.3591776, -3.7998226, 0.53075004, -1.2397605, -4.00465, 2.0808163,
-6.167218, -5.27774, -3.252711, -0.9508786, 4.553287, -6.1296225,
1.4477179, 1.8629119, -8.909501, -5.184022, 0.01386279, 5.4913025,
-6.6893497, -8.943161, -4.2422194, -6.5979958, 7.673629, 6.0679145,
2.1657276, 6.656581, 6.2551584, -5.726885, 8.439909, 1.1889153,
-8.781021, 3.700332, 6.3897414, 0.92094684, 4.4989285, 0.029262021,
0.12188202, -2.034289, -10.258727, -5.9043097, -9.5475025, 1.3803111,
-2.6672835, 0.5991635, 7.614803, 7.508607, 7.6058636, -3.5324607,
-7.4830356, -6.236982, -0.5930056, 6.8062787, 8.30548, -10.336412,
0.2843341, -1.6242309, -5.3731217, -8.620923, -3.1701112, 7.859669,
2.6248236, -4.037354, 0.27806696, -1.0954541, -5.1329675, 8.76073,
5.8566713, 8.729288, 8.77688, 2.2422388, 8.273038, -8.855611,
-5.5218472, -10.406863, -3.04127, -3.5146406, -3.6433032, 6.2217417,
-3.2904103, -5.617081, 2.0725808, -8.084152, 6.084854, -7.870153,
10.032941, 5.403137, -7.2168527, -10.456365, 6.4406886, 3.5861251,
4.4335804, 6.443785, -9.664589, -1.4755827, -7.5875196, 7.194868,
2.3273208, -3.6874237, -7.995151, -4.528318, -3.2545435, 4.3231964,
2.217656, 9.554438, -0.262366, 3.82256, -3.097919, 1.2542965,
2.4756153, -6.5521755, 9.468392, 5.2111497, 8.791363, 8.969559,
1.0164894, 7.8347497, -7.2661233, -5.3663173, -9.349647, -2.895312,
4.6178713, 4.4970236, -8.128921, -2.388134, -7.204982, 7.9246726,
2.8887987, -3.66906, -8.778419, -3.716566, -3.420944, 5.290876,
3.2017176, 9.198396, 0.9888268, 2.6644826, -3.8068829, 0.713936,
1.0739329, -5.5369854, 9.7269945, 4.8997397, 10.314972, 8.03258,
1.8548614, 9.112232, -8.055534, -5.800883, -7.3509254, -3.6859784,
2.9091022, -4.7711563, 0.88710666, 1.4358181, -7.514095, 10.325413,
7.250847, 8.822184, 7.4477673, 3.935031, 7.514207, -8.216076,
-5.954607, -8.153851, -3.2415762, 4.6094704, 7.2710752, -7.711655,
-2.0168839, -7.3777366, 7.378608, 0.78870493, -3.8573258, -9.065235,
-5.092778, -4.3065248, 3.205538, 2.7243228, 8.317391, 0.21904066,
-4.0985804, -1.4040945, 3.9742804, -4.8898373, -7.814665, -4.1179075,
-5.6568966, 8.745657, 5.170631, 2.9236934, 7.60064, 5.6511335,
-5.867108, 6.683814, 1.4802417, 2.9769866, -5.788512, -9.670476,
9.512413, 9.275466, 5.7822595, -3.790114, -7.3849096, 4.0352426,
-1.8118818, -7.039514, -0.54277307, -9.160551, 7.974063, -4.977607,
-3.3953643, -0.29484314, 4.5300975, -2.496303, 10.334259, 9.281761,
-6.2357407, -1.2451144, -3.4674573, -4.702326, -8.956524, 2.2712495,
-0.063207306, -9.037878, -5.0328436, -7.214025, -4.346608, 0.23249117,
-1.5835001, -3.2782664, 2.6274416, -7.9881434, -3.9845536, -2.324008,
-0.08049599, 6.809898, -6.71709, -0.28291148, 0.9679869, -9.234806,
1.8905965, -7.0994406, -8.475301, 10.135242, 9.410562, 6.8652296,
-4.88088, -7.936125, 2.9165027, -2.2307975, -8.076796, 0.7378677,
-8.647219, 8.356511, -5.8972907, -4.6332083, -3.6120832, -0.46920362,
-1.7564415, -3.460449, 3.698968, -8.320312, -5.473054, -1.3630875,
-0.60587037, 5.095714, -5.632284, -1.5603347, 1.8506542, -8.8415165,
-2.9038048, 8.438732, 4.81046, 2.1446462, -7.2344875, -7.2098365,
-8.675893, 6.309864, 1.0370756, 3.6585405, -9.737723, 9.615832,
7.681602, -5.152856, -6.4754577, -7.199391, -3.8732023, 0.6892925,
-0.4086386, -5.0915914, 0.123399384, -7.081428, -2.8967493, -1.8055007,
-0.7318387, 5.359374, -5.4415383, -0.39679357, 2.1817417, -8.717163,
-2.6300368, 9.632848, 5.0965047, 0.8696952, -5.846049, -5.914046,
-9.276221, 7.6005855, 2.3681214, 4.343923, -9.973524, 8.928061,
6.6309648, -4.9323072, -5.9829807, 6.131422, 8.210675, -3.9408135,
-8.139571, -4.4752107, -1.0662954, 6.9391575, 8.072885, -9.492797,
0.86481816, -2.7573216, -5.414936, -7.7305956, -2.0733178, 9.507546,
5.0972176, 8.0317955, -3.591854, -8.179699, -4.8631997, -2.7618487,
5.9859548, 6.8751426, -11.006369, 1.2612578, -2.603221, -5.9696817,
-6.967107, -2.7954648, 8.082472, -6.4784293, -3.1640706, 2.1498394,
-1.9112898, -5.1421485, 2.4497309, -6.705029, -4.3187137, -1.6554806,
-1.8156244, 6.1885624, -6.430627, 1.4882534, 3.243052, -9.607652,
-1.7085698, 9.251234, 4.3491774, 2.416117, -5.290142, -6.7528033,
-8.664574, 7.0019703, 1.4274092, 4.7426867, -10.187569, 9.448294,
6.3144927, -5.417932, -6.129937, -2.7391443, 0.59240377, 3.6653671,
-3.328341, 9.074866, 8.93781, -5.556528, -1.2742648, -3.1844244,
-3.6237466, -10.15354, 2.2867045, -0.5779863, -9.1535225, -4.6684074,
7.1759663, 7.3641973, -3.6259208, -7.2714105, -4.8927345, -1.1482089,
7.236807, 7.1285334, -10.11766, -0.02785874, -1.0509187, -5.302217,
-7.398794, -2.6910326, 7.422457, -7.2027617, 2.5210826, 6.0674076,
1.9882846, 5.167844, 0.3779964, -0.14692286, -1.7783586, -8.442635,
-8.032382, -10.680228, 2.683896, -3.7678924, 1.0102718, 8.161365,
-7.766121, 4.5838485, 4.660446, 1.8665016, 6.2498345, -0.8162987,
0.11104556, -1.8215317, -7.6489472, -8.143102, -8.197079, 1.8415658,
-4.405636, -0.30920935, 7.559347, -2.6080153, -3.3012905, 5.6862464,
-1.7896237, -4.459233, 1.0632817, -6.598305, 5.602765, -7.904013,
7.8424582, 4.9409027, -6.67253, -10.767012, 6.15876, 4.4071717,
5.656594, 6.8484516, -3.1476777, -7.571905, -6.2254357, -1.9712182,
5.758922, 6.707439, -10.087535, 0.02836294, -2.1073356, -6.0885587,
-7.7628417, -3.1303124, 8.636786, 3.7710018, -3.909192, 0.42333752,
2.0595105, -6.317303, 9.594121, 4.67741, 8.63621, 8.3232155,
0.86923796, 8.578076, -8.159596, -6.0514417, -8.369443, -3.2592864,
-6.0579114, -1.7926767, 5.1349235, -5.7831187, -8.770867, -4.1543965,
-7.169088, 7.598512, 6.31293, 3.4194317, 8.037912, 5.5066266,
-7.1444583, 7.6522484, 1.8842989, -1.6498294, 8.167538, 5.8106794,
2.266483, -7.3731356, -6.919148, -8.447499, 7.3767266, 2.3994293,
3.51698, -9.469757, 8.408597, 6.5709944, -6.385953, -6.8783717,
2.3145907, -5.864707, -9.284212, 9.974858, 9.248446, 5.5465083,
-3.209382, -8.2473135, 4.216321, -1.4011812, -7.601114, 0.24903035,
-9.108602, 6.789536, -5.609271, 5.2646365, 3.7558274, -8.914788,
-4.099078, -8.348103, 5.0651083, 2.3700697, -4.181399, -8.686899,
-4.917938, -3.4382954, 4.301682, 3.8025165, 8.732204, -0.0059595746,
1.4045326, -5.1021776, -7.7670307, 10.321968, 8.4566145, 6.2178245,
-4.81318, -8.533545, 3.0419717, -0.32924134, -7.836596, 0.6514037,
-8.889345, 9.798955, -5.860011, -5.6919904, -1.6185277, 5.637761,
-5.835713, -7.6979113, -3.9375474, -8.415309, 8.803723, 5.878187,
3.044101, 8.090162, 6.210055, -5.1029224, 7.726085, 1.2242173,
4.0775433, -2.1252205, 1.0108831, 1.5983747, -6.2114277, 9.292624,
5.1695995, 8.2117, 7.415637, 1.0126104, 8.384375, -9.409417,
-5.946701, -9.120886, -3.3905387, -5.199861, -3.8421059, -0.1340202,
-2.292319, -4.3078012, 2.8822129, -7.299407, -3.0992985, -3.6885862,
-0.88706255, 4.8268437, -5.751982, 0.90550995, 1.5537581, -10.893942,
-6.5163236, -3.4397552, 0.28369677, -1.5917131, -3.6631076, 2.4715986,
-6.9888134, -3.8286197, -3.7476637, -0.34884825, 6.1322474, -6.7182026,
0.11383244, 1.8207762, -9.165139, -1.9188373, -3.685447, 6.557671,
-3.8604941, -3.7915502, 1.3823928, -8.540816, 6.041277, -9.622087,
9.272483, 4.812929, -5.459201, -9.807776, 5.4093146, 4.753193,
-6.1211348, -3.786471, 2.3802521, -1.1162397, -4.8782554, 2.170233,
-8.453495, -4.2135267, -3.2559023, -0.23782712, 5.318841, -6.0882607,
-0.16020171, 3.0654874, -9.295861, -3.9246643, -1.1393148, 2.998033,
-4.318775, -7.904071, -2.800485, -5.909349, 9.698792, 6.777466,
3.6121604, 8.825098, 6.22503, -7.36916, 7.6359572, 1.0598067,
2.1506774, -7.4168296, -8.384336, 10.437943, 8.602309, 5.914923,
-3.6162124, -8.084291, 2.9970481, -0.4116496, -5.935207, 0.040830225,
-9.945077, 7.1021357, -4.643817, -3.132097, 0.13191287, 5.418489,
-2.207806, 10.307531, 8.839859, -4.223659, -0.27383408, -3.9054093,
-3.1586568, -9.992204, 2.33356, 0.36018413, -8.606421, -4.74262,
-5.839156, -0.80932015, 5.2826877, -4.759288, -9.660967, -3.730625,
-6.7809463, 9.190304, 6.802815, 2.7141244, 7.008631, 6.619229,
-6.310707, 7.2402306, 0.14273828, -2.9942667, -3.9975731, 7.1852574,
-2.638108, -3.8044379, 0.73901707, -6.340163, 6.0500917, -8.468482,
9.551824, 5.9240108, -6.3205256, -9.206056, 5.516724, 3.3679056,
2.4033506, -7.193963, -8.720811, 8.990322, 9.643482, 5.211146,
-3.4929276, -9.939631, 3.2437413, -1.5387418, -7.1159067, -0.0806202,
-10.2961235, 7.496913, -4.685324, -2.3999703, -4.8728423, 6.567425,
-3.6588533, -3.6608834, -0.4019788, -6.8036804, 5.8153644, -9.328634,
9.879407, 4.3550296, -5.9978867, -10.8855715, 6.632119, 3.4900968,
-2.5225089, 8.312515, 4.6269197, 1.7711086, -6.653724, -7.4591713,
-8.474785, 8.3964405, 1.9461682, 4.44265, -9.105209, 9.117343,
6.8057175, -5.744403, -6.2951274, 6.4343185, 6.691375, -3.2405212,
-8.819071, -4.013054, -1.7410048, 6.2308826, 6.6758165, -10.301936,
1.8068116, -0.99352115, -6.6422853, -6.5497437, -2.412427, 8.6613455,
-5.04912, -3.345249, 0.8325753, -1.1029865, -3.899759, 0.8876871,
-7.405341, -3.5654964, -2.7069912, -1.8622332, 5.6668363, -4.8136506,
1.157513, 1.8048886, -8.404977, -3.5688822, 0.5040818, 2.8069234,
-3.25464, 9.228806, 9.394433, -5.2433686, -0.7811582, -4.5316305,
-3.4573638, -9.090912, 2.6197965, -0.5756312, -11.038101, -4.793244,
2.7478004, -4.177278, 0.562788, 0.14081088, -6.0579643, 8.764944,
6.246654, 10.426458, 7.2940173, 2.3319452, 7.9287653, -7.501482,
-6.9660234, -8.759044, -4.1044855, 1.735229, -6.7054596, -8.643964,
7.487597, 9.633358, 5.482142, -4.7150574, -9.227698, 2.4289358,
-2.1457634, -8.05555, 1.2708195, -9.680346, 8.310833, -4.5440636,
5.9576106, 8.866353, -2.5633192, -8.925977, -5.211982, -2.0016477,
6.1865196, 7.4311066, -10.009861, 0.37626797, -1.1807008, -5.35164,
-8.055478, -3.6881363, 7.8504004, -4.330071, -1.6665548, 5.076396,
-5.6002817, -8.60715, -3.7790267, -6.518534, 8.571923, 7.2190742,
0.591348, 7.786133, 6.5758586, -5.465725, 7.80577, 0.9323889,
3.5424657, 4.899465, -7.455043, -2.7389019, -7.063229, 8.697942,
1.9802995, -3.4515595, -8.919532, -5.1485686, -3.7136114, 5.208867,
3.1295621, 8.177613, -0.8185578, 3.328705, -3.0607684, 0.7639568,
0.9525489, -6.515406, 9.335051, 3.2833583, 8.9374, 7.5546136,
3.0728686, 8.451026, -9.062425, -6.1152587, -9.827739, -3.1620083,
-5.362314, -1.2921567, 5.327361, -5.3302774, -8.888587, -4.5527844,
-6.7238536, 10.301866, 6.373277, 2.2402432, 6.846539, 5.729205,
-4.7444935, 7.5717, 1.4216542, -6.7166, -3.3134437, 0.22413717,
-1.3928875, -4.157645, 0.8932867, -7.219809, -4.6399164, -3.017949,
0.1319421, 4.8239646, -5.4372807, 0.089414716, 1.6524593, -8.517732,
-6.5884647, -5.1078653, -0.15199757, -0.26707748, -4.3672123, 2.066689,
-7.4200096, -2.8242114, -1.5345919, 0.6745458, 5.5921874, -5.7995872,
-0.776961, 2.8821454, -9.888437, -3.646765, 1.6834354, 4.0576406,
-2.23706, 8.907418, 9.601109, -4.7717886, -0.09458417, -4.3336644,
-4.749811, -9.064614, 2.631544, -0.035433494, -8.769338, -4.453611,
-2.9219694, -5.3573117, 6.046344, -2.8424332, -5.5893497, 0.8695591,
-5.9960327, 6.1499248, -8.713867, 9.305026, 6.0738535, -6.6037693,
-8.59708, 7.6625805, 4.3168726, 4.0873175, -4.829057, 0.7545677,
1.3727825, -7.1706047, 8.339684, 4.980497, 8.3269, 8.423941,
1.5278224, 7.838041, -8.658294, -5.2973003, -9.086324, -3.618869,
-5.160398, -2.0772526, 6.4065695, -6.2402186, -8.388518, -4.0314784,
-7.524799, 9.074732, 5.883579, 0.92340237, 7.8911066, 5.291583,
-7.0020413, 8.332711, 0.5438318, -5.475752, -4.096353, 0.7418821,
-1.7603456, -5.4498396, 2.426098, -8.548701, -4.205151, -3.6305838,
0.5124929, 6.341473, -5.9324965, 1.1692834, 1.2558702, -8.690557,
-7.601538, 5.0042977, 5.4939704, 1.2349383, 5.4100456, -0.7921951,
0.8094976, -1.4773229, -9.843012, -7.0676484, -10.246568, 2.432461,
-2.9934773, 0.3922799, 8.117828, 6.4602036, 7.895043, -2.6972594,
-7.537776, -6.4763684, -3.0186179, 5.8566647, 6.7770367, -9.200746,
0.2087364, -0.858198, -6.485227, -6.9270077, -2.743004, 7.4454303,
-6.774294, 3.8858647, 5.1442966, 1.6952565, 6.166942, 0.53319055,
0.4099421, -2.3269954, -10.190873, -7.651328, -9.131859, 1.9591327,
-3.6816816, 0.6132709, 7.345471, -5.7091637, -2.1768086, 4.938067,
-6.2148037, -8.8039665, -4.996662, -6.863648, 7.842789, 5.5681744,
1.7519963, 7.321761, 5.466928, -5.6338496, 8.135275, 0.73794854,
-1.8118109, 8.668656, 3.5502715, 1.6734891, -5.8290954, -6.2849545,
-9.082384, 7.0790424, 1.7971557, 5.6151752, -9.320955, 9.699226,
7.3700514, -5.5860653, -6.5448303, -5.1421866, -2.6081507, 6.1404862,
-6.3517337, -7.5181293, -3.375357, -6.1467624, 8.108316, 6.402123,
1.7739153, 8.066847, 6.021652, -6.555123, 6.994559, -0.94740945,
5.0095654, 5.402654, -7.9377584, -3.4647434, -8.928226, 6.8889656,
1.9295157, -3.4371915, -9.174624, -3.2616417, -3.6980388, 4.697456,
2.8614936, 7.8807235, -0.5725919, -1.7451428, 9.886546, 3.1885056,
1.7332884, -7.1396356, -7.865368, -9.382799, 6.5461197, 3.2488894,
4.8164263, -8.698221, 9.903367, 5.8585167, -6.120382, -6.0209384,
3.7328608, -4.2443223, 0.20974995, 0.92421633, -6.198192, 9.460172,
5.8078604, 9.380667, 8.171391, 2.2755094, 7.3299284, -8.19499,
-5.7446876, -8.585196, -3.978039, -5.9903994, -2.6521115, 0.36149597,
-0.85726905, -5.0807085, 1.5675524, -6.8794384, -3.1182258, -2.4238338,
-1.0977409, 5.7030234, -6.8818097, 0.7078495, 2.4659245, -9.387455,
3.2457778, 5.3635454, -6.9716244, -2.267232, -6.149327, 7.8797464,
3.185198, -2.0001845, -8.464748, -3.9114127, -2.7734733, 5.609812,
2.4151742, 7.2391567, -0.45850226, -3.524892, 0.092688836, 3.14134,
-3.9132354, 8.842938, 9.384612, -4.9688344, -0.40589646, -3.8411129,
-3.5751715, -8.986373, 2.1063707, 0.70323086, -9.642321, -4.5845146,
7.316262, 7.7554197, -3.9711056, -9.01467, -5.2043657, -1.3536236,
5.299715, 7.5319033, -10.209692, 2.0981665, -2.0503328, -5.862479,
-8.215036, -2.353183, 8.597251, -2.6859953, 8.140838, 5.7825665,
0.9720707, -7.1876583, -6.7885914, -7.8294363, 6.3184195, 2.836515,
4.168615, -10.275366, 9.721669, 6.7881947, -6.1733694, -6.314639,
-3.6673272, -0.6037277, 4.1874137, -3.6572542, 9.551572, 10.210841,
-5.1610703, -0.87506807, -4.0713253, -4.88557, -8.846105, 2.0876946,
1.2504147, -8.156332, -4.534227, -4.900599, -2.9370446, 4.4679914,
-5.528951, -7.681797, -3.880365, -7.592825, 8.495731, 7.574872,
2.7459314, 7.2000837, 6.4176593, -5.734224, 7.5318623, 1.1386472,
2.3209145, -8.046987, -8.311764, 8.59397, 10.659053, 5.6257167,
-5.0901527, -8.376442, 2.301819, -0.6711805, -9.004793, -0.07654149,
-10.766698, 7.9621997, -3.6740355, -3.5080066, -0.42414105, 3.5235705,
-3.9768686, 8.789557, 9.31468, -4.7542315, -1.4405607, -4.078482,
-3.9088032, -9.3763, 1.7476346, -0.23493233, -9.348959, -4.872782,
-3.0600958, -4.09791, 5.9243546, -3.6587477, -4.62405, 0.44386676,
-7.336635, 5.0352526, -7.8378158, 10.527119, 4.0817556, -5.340766,
-10.456927, 6.9091115, 4.0892344, 4.4428782, 5.712187, -8.937002,
-1.7855833, -7.7947097, 7.694921, 3.4848828, -2.6598818, -9.715076,
-3.4689264, -2.90083, 5.216428, 1.971843, 7.2843304, 0.14048927,
-3.9430256, -2.2418306, 3.6924183, -5.142498, -8.28642, -4.1808953,
-7.2251368, 8.569753, 5.804526, 2.4564304, 8.018354, 6.2023935,
-5.841633, 7.688872, 0.5474327, 3.5435777, -3.61137, -0.48830578,
0.66919976, -5.6738424, 9.41808, 5.9643283, 10.026537, 8.623571,
2.0232105, 8.079792, -10.017634, -6.198212, -9.323663, -3.2201371,
-4.562237, -1.2995663, 5.7339225, -5.1295, -8.375976, -3.7413561,
-7.1578903, 7.603829, 6.6685057, 3.5344918, 6.6464496, 6.6487865,
-5.092063, 7.8496075, 0.21424331, -6.4582458, -1.6799798, 0.70425564,
-1.8873534, -4.4738674, 3.0409698, -7.1308336, -5.163902, -2.0293028,
-1.3463012, 7.0148277, -5.2504907, -0.028436558, 2.7450027, -9.023493,
-9.096898, 4.287031, 5.4058795, 2.339101, 5.503788, 0.6067295,
0.8633425, -1.7538533, -10.47789, -7.6496696, -10.415888, 3.6348531,
-3.8263237, 1.008038, 7.8151793, -2.8795345, 9.5702715, 3.7618763,
2.1786602, -7.8292346, -6.5536084, -8.8632765, 6.1929307, 2.8376179,
3.6472375, -10.155487, 9.538595, 7.452899, -6.464293, -6.320325,
-3.1437447, -1.014591, 4.254574, -3.2684894, 10.203594, 8.237166,
-5.213888, 0.19838731, -3.9443955, -3.7691193, -10.325156, 3.3476267,
0.42118174, -8.018622, -3.8151026, -6.09225, 5.00313, 5.933043,
2.0435503, 4.720169, -0.9797433, -0.3201526, -0.5150253, -9.874652,
-8.718121, -9.673114, 2.5611844, -3.524991, 0.34707955, 7.5716033,
-3.4816077, -4.774554, 6.7462773, -3.0538998, -5.461725, 1.9698462,
-7.109039, 7.0457463, -8.691012, 9.121731, 5.6333222, -6.1767373,
-10.71399, 7.3380775, 3.7015576, -2.0892475, 10.085812, 5.493012,
1.8237597, -5.8361187, -6.7760425, -9.074288, 6.894141, 1.8105714,
3.8897278, -9.469019, 9.510599, 6.6509852, -5.447361, -5.5300484,
4.822562, 4.984373, -9.03302, -3.843234, -8.008172, 8.021219,
2.4483304, -2.9426632, -7.563584, -3.5957246, -3.8874934, 4.9192,
2.3701422, 7.391159, -1.6196264, 3.8092427, 5.937913, -7.813994,
-2.8143158, -8.287718, 8.374735, 1.9406755, -4.895462, -8.407961,
-4.5536957, -2.8043075, 4.3253274, 3.1737487, 8.725306, -1.4019027,
-3.5726547, 0.78179294, 4.529287, -3.3881974, 8.74288, 8.607299,
-5.042302, -0.20213881, -4.647557, -4.1144676, -7.678463, 1.8379859,
-0.24987781, -8.578662, -4.818242, -5.117454, -4.5148954, 0.10624589,
-1.2180274, -5.016922, 1.9600813, -6.9879055, -4.3901095, -2.738134,
-1.2491328, 4.64339, -5.7814093, 1.5131419, 1.8611726, -8.913307,
-1.8971161, -3.1923783, 6.899726, -2.4385386, -3.980598, 1.1318543,
-6.5293493, 6.320701, -8.727861, 9.180547, 5.2706137, -5.8057957,
-10.110816, 6.529266, 4.444552, 4.638854, 4.6691217, -9.343299,
-3.127269, -8.025028, 6.8854837, 2.7924738, -3.326706, -8.930542,
-3.6522446, -4.1904025, 4.6658187, 2.8565712, 6.692122, 0.14128414,
-6.2409735, 3.5684962, 4.6838393, 0.72853315, 5.510342, -0.8682135,
0.7495298, -0.85639167, -9.117575, -9.080062, -8.674799, 3.3843653,
-3.1317675, 0.4324034, 8.305462, 6.3279643, 6.7106705, -4.1836724,
-8.615738, -6.851803, -1.4598849, 5.80182, 9.052325, -9.979097,
1.1205733, -2.7501493, -5.9289064, -7.534864, -3.8834043, 8.562654,
3.9074936, -3.8890572, 0.45945162, 0.84255755, -5.513143, 8.778603,
4.9385967, 9.497171, 7.865494, 2.9229267, 7.9674263, -8.268495,
-6.726032, -9.090301, -4.528376, -6.1461563, -4.3230863, -1.2120434,
-1.455095, -3.1794934, 2.8854086, -6.5343447, -3.2918155, -2.6107028,
-0.74047935, 5.271163, -6.2277756, 0.7157286, 2.6910105, -9.168604,
-1.3577203, 8.839961, 5.0434685, 2.1910448, -4.7244105, -6.096407,
-8.92787, 6.6546445, 0.8977878, 4.303876, -10.117756, 8.402619,
6.196252, -6.5103016, -5.1825013, -3.1043668, -4.961531, 5.5930915,
-1.4011675, -4.397015, 0.8133563, -6.7698617, 6.7175813, -7.8468657,
10.489274, 5.0033035, -6.2341933, -9.351235, 5.9400563, 4.133572,
6.718978, 6.5899878, -4.2197423, -8.820812, -5.765137, -1.1392213,
6.190694, 6.80178, -8.950834, -1.398454, -0.7859696, -5.9388456,
-7.657369, -3.6168509, 9.938216, 2.8604543, -4.176269, -0.032480065,
0.29806682, -5.1237435, 9.9813, 5.5188937, 9.73569, 6.8377767,
1.026506, 6.912907, -8.71974, -5.9671535, -9.023453, -3.388854,
-3.7868538, 0.2358615, 4.5804114, -4.1765995, 9.464467, 9.608519,
-5.427396, -0.0027902753, -4.4213595, -4.813891, -9.548155, 1.6938022,
0.53211325, -9.484891, -5.059108, -7.4199195, 4.41144, 5.1841702,
0.9226823, 5.6940627, -0.77240235, -0.39194813, -0.5792008, -8.9826,
-7.796047, -8.205816, 2.5407724, -2.786988, 0.2943157, 7.434004,
3.6179874, -5.4734077, -8.223705, 8.409029, 9.27832, 6.055695,
-3.6761286, -7.0308566, 4.3002524, -1.9511133, -6.595688, 0.1227307,
-8.831284, 9.212987, -5.6025066, 3.2084193, -4.1871395, -0.041901555,
0.33877632, -6.681616, 9.920659, 5.347335, 7.8451376, 9.134266,
2.1819751, 7.4526243, -8.539444, -6.0027223, -7.90224, -4.269072,
4.483309, 5.1612105, -6.308667, -2.1959343, -7.3934155, 7.0988226,
2.1501093, -3.4736, -8.523852, -5.096658, -4.5949063, 4.146306,
2.896147, 7.5767636, 0.47898337, 3.6931636, -4.097703, -0.13887994,
2.0608702, -6.143664, 9.83687, 4.927203, 7.9364796, 7.497932,
1.8711846, 7.6925306, -8.237041, -7.262296, -9.879487, -3.0237556,
-0.70784575, 9.055739, 4.649629, 1.9562821, -6.7409678, -6.981162,
-9.239891, 6.9407215, 1.999373, 3.7810543, -10.087302, 9.472698,
6.470369, -4.700423, -8.219179, 4.5559483, 6.059409, -6.87717,
-3.8107352, -7.7552876, 6.435746, 3.58038, -3.793456, -9.739074,
-3.3336227, -2.2753158, 5.056642, 2.894508, 8.030654, 0.43468696,
-3.8354254, 1.7778754, 3.7521513, -3.2931926, 8.9018345, 9.979589,
-6.120826, -0.58886594, -2.969658, -4.1323996, -9.127915, 3.6837893,
0.22602403, -9.466155, -4.6245246, 5.287118, 7.9300413, -3.2707233,
-8.926663, -6.0852256, -1.5615692, 5.7396297, 6.6436973, -10.105862,
0.669653, -1.8916824, -4.8029456, -8.301123, -4.124042, 10.206635,
3.113396, -4.4953685, 0.80065525, 0.70220363, -6.6659775, 9.988934,
5.5232577, 9.15163, 7.209619, 0.97462773, 8.51253, -8.453342,
-5.6418386, -9.15139, -3.3498406, -5.684921, -4.624882, 0.5551096,
-2.0088968, -3.9966486, 1.6103154, -7.4957933, -4.854178, -2.2169678,
-0.27768454, 5.5392847, -5.979758, -0.72932523, 1.6385177, -9.106135,
-6.1801667, -4.683267, 0.082921855, -1.9471834, -4.6009297, 1.859672,
-7.593911, -3.5737734, -3.446167, -0.7242213, 6.556091, -6.3657618,
-0.21785446, 1.6869241, -8.245184, -6.6729517, -4.1615, -0.043868612,
-2.2683814, -4.494702, 2.3640566, -6.793602, -4.547303, -2.9618616,
-1.5272892, 5.681512, -6.6004243, 0.6858782, 0.5981848, -9.322733,
2.2426426, -6.643728, -9.535674, 9.99336, 10.577679, 4.990086,
-4.6245446, -8.242425, 3.009484, -0.8427742, -8.069026, 1.4190774,
-8.478846, 8.33521, -4.1055098, -5.6288404, -2.481804, 5.0313454,
-5.1209555, -8.3456545, -4.7248325, -7.1298366, 7.800823, 6.0051317,
3.42059, 7.4262276, 6.6750317, -5.249818, 7.439567, 0.65624356,
-2.6386292, 8.64838, 5.374185, 1.4801291, -7.86555, -7.96975,
-8.414121, 6.4272223, 3.2506561, 2.704101, -8.40079, 9.545909,
6.581154, -6.134661, -6.0841055, -2.9381497, 8.742895, 4.8399844,
2.2072895, -6.4186463, -5.472966, -8.962191, 6.7647147, 1.0567768,
3.6498005, -9.611499, 10.654387, 6.286525, -5.596566, -6.3749967,
3.6286473, -3.6094818, -0.43751407, 1.6931443, -6.155699, 9.530568,
5.4962287, 8.427548, 8.049008, 2.2909114, 7.214658, -8.607283,
-6.2588058, -8.916135, -2.6580756, 5.869712, 7.975988, -2.810921,
-7.619258, -5.904651, -1.178541, 7.5555797, 7.856306, -9.477689,
0.7228592, -0.6438121, -4.804905, -7.2059603, -2.499647, 8.101038,
-5.0421815, -0.71923274, 4.894131, -4.8744483, -7.869636, -4.403551,
-5.8541327, 6.5578904, 5.3064823, 2.0271325, 6.855411, 6.3088255,
-6.0655723, 8.126973, 1.2041019, 1.7637342, -6.1024127, -7.8775587,
8.786929, 9.074222, 6.518047, -4.8575797, -7.890351, 4.0709805,
-0.72078896, -8.914962, -0.6294105, -9.927013, 8.256329, -4.4878793,
-3.6292624, 0.1524684, 3.4201217, -3.348982, 10.15516, 7.9692307,
-3.832203, -0.29512584, -4.52319, -3.7853587, -8.2226095, 2.9736295,
0.4048769, -8.20962, -5.628619, -3.6613832, 1.0474597, 3.8048406,
-3.759753, 9.320481, 8.648925, -4.620084, 0.48635, -4.721823,
-4.797253, -8.732612, 1.5900956, -0.70393026, -9.795508, -4.1806436,
-4.9610667, -4.77584, 0.8428181, -1.6868169, -4.249581, 4.088098,
-8.262202, -3.9794893, -2.345378, -0.11854989, 5.7721076, -5.790889,
0.010534771, 2.0366802, -9.311226, 5.4463773, 6.8840923, -2.697818,
-7.366038, -4.8911147, -1.0456618, 6.477173, 8.237088, -10.288491,
0.2896148, -1.1288388, -6.4826903, -5.4350805, -3.2277005, 8.957586,
1.8678188, -6.245517, -8.300136, 9.114548, 9.243449, 6.4731297,
-5.680297, -7.569109, 3.772985, -1.3526826, -6.126432, -0.5689156,
-9.594383, 9.580495, -5.406843, 3.2065918, -3.5969102, 0.46051404,
1.4928018, -5.656455, 8.656257, 6.1107974, 7.959543, 8.308046,
2.3918166, 7.0995626, -8.165065, -6.40829, -9.62659, -2.5477967,
-6.4775915, -4.0051312, -0.8221657, -1.7452071, -4.110426, 2.3489573,
-7.9294953, -3.2711115, -3.2790856, -0.19998027, 6.0025554, -6.4588833,
1.5274063, 1.0127456, -8.427584, -4.330744, 1.2549684, 4.4625726,
-1.940318, 10.28473, 8.958999, -5.625681, -0.09122906, -4.5933666,
-4.1877203, -8.469793, 2.0735888, 0.15857702, -9.036117, -3.8651614,
6.7392883, 8.136579, -5.133114, -8.517659, -5.3220057, -1.3629817,
6.2043715, 7.4728537, -9.852801, 0.2994854, -2.0991256, -4.870436,
-7.904233, -4.179151, 8.318495, -6.6182947, 4.4177327, 5.8932533,
0.7230467, 5.976448, 0.33398363, 0.6575944, -0.5279744, -9.868767,
-7.9731116, -9.304593, 2.0074806, -3.554028, -1.0318375, 8.884197,
5.9185805, 8.425355, -3.5211453, -7.461584, -5.9727254, -2.0327446,
7.0221434, 7.1590343, -9.661433, 0.00046925258, -1.0226712, -5.9833107,
-7.6976137, -2.6975017, 9.679272, 2.6270514, -6.374335, -9.2761965,
9.171331, 10.149477, 5.7503066, -2.439167, -8.414499, 3.7073455,
-1.66769, -7.4029455, -1.316017, -9.694459, 7.421923, -4.7296634,
-5.3128343, -3.4220006, 1.0474683, -1.302421, -3.1880958, 2.523892,
-7.8128843, -3.174748, -2.1842308, -1.1887311, 5.367874, -5.7989216,
0.60891813, 1.986396, -8.853076, -6.4675922, 2.9712186, 4.8504643,
0.3796064, 5.707802, -0.90323293, 0.75780135, -1.5796851, -9.827166,
-7.5701184, -9.559451, 1.9661639, -1.7519193, -0.1696032, 6.5990033,
-6.1756425, -3.9535809, 0.6950165, -0.9963139, -3.7237663, 2.625981,
-7.147416, -4.2952437, -2.7786543, -1.0150361, 6.4971585, -5.591035,
-1.7735833, 2.307422, -8.934676, 6.3175564, 7.790874, -3.05725,
-7.958814, -5.279218, -0.6553139, 6.9137754, 7.764824, -10.665383,
0.58230084, -2.266729, -6.4548345, -7.652887, -3.8807032, 9.411627,
-1.4181685, -2.9158294, 6.3103147, -3.768593, -4.666192, 0.8637251,
-6.786864, 5.6299634, -7.420348, 8.957566, 4.227452, -5.250989,
-11.481792, 6.166338, 6.319524, 4.574907, 5.5770893, -8.312141,
-2.5483756, -8.96058, 7.051, 1.4268552, -2.487243, -8.771428,
-3.9156923, -2.8685753, 4.887751, 1.9611052, 7.694621, -0.79031837,
4.513652, 8.222154, -3.7251465, -8.034304, -5.287466, -2.461339,
7.7107844, 7.1426744, -9.963049, -0.46083128, -1.6350175, -4.5869718,
-6.877551, -2.6495903, 9.240673, -6.462002, -3.6981559, 0.4555167,
-0.50782007, -5.5411887, 2.3375695, -8.482708, -3.6253085, -2.7388842,
-0.5848733, 5.098801, -5.110899, 1.013896, 2.2567723, -9.1616535,
-5.043788, -1.1575576, 4.0947385, -5.8893275, -8.545155, -3.2490788,
-6.8081217, 8.908991, 5.7497025, 3.0088646, 8.603279, 6.7703104,
-6.325618, 8.324023, 1.1911067, 5.9048767, 8.617628, -3.669518,
-7.9292655, -6.315361, -0.9080064, 5.9039288, 7.1445923, -8.824829,
0.5710804, -2.7915578, -6.796501, -7.64771, -4.096615, 9.312177,
-2.186735, -4.4936004, 6.7048364, -2.2210233, -3.6680636, 0.43797407,
-7.612816, 5.3158007, -8.019816, 10.751913, 5.224338, -6.194688,
-8.86281, 6.663119, 4.442512, -5.277305, -1.6095123, 5.6652446,
-5.8243685, -8.90918, -3.0437622, -6.086359, 9.147424, 6.713501,
2.989778, 8.841655, 6.1041775, -5.8645153, 6.835541, 0.20937236,
5.1260233, 5.162831, -7.4211435, -3.3823187, -7.2227683, 8.265752,
1.5018557, -3.622374, -7.929288, -2.9702823, -3.6163805, 4.0778537,
3.4203045, 8.57274, -1.3709658, -7.7956443, 3.768751, 4.642162,
2.0364745, 6.275999, 0.54041946, 0.82145584, -1.3842595, -9.839539,
-8.509381, -9.117319, 1.906667, -4.408625, 0.9784531, 9.481784,
-6.419728, -3.6856394, 0.55525374, -2.91461, -4.336277, 1.6410733,
-7.087475, -2.0674257, -2.415526, -1.0980711, 6.34878, -5.668643,
0.5787549, 2.2730868, -7.631411, 5.782113, 7.793034, -3.2860532,
-8.504571, -5.9797106, -1.1003709, 7.0175314, 6.7794957, -10.7942095,
2.5138288, -1.103467, -5.744746, -6.994371, -3.3348162, 9.465204,
5.9604745, 8.733884, -2.686698, -8.209682, -5.3875837, -1.3294263,
6.79384, 7.224679, -9.72781, 0.04456152, -1.571112, -4.649286,
-7.1504345, -2.7568176, 9.364344, -2.4045875, -4.893637, 6.9074845,
-3.6092815, -3.6661725, 0.11472879, -7.916554, 5.407157, -7.977337,
9.305996, 4.5012984, -6.449662, -9.911987, 5.4825478, 3.33097,
5.411543, 4.8542705, -9.326701, -2.4830413, -7.238049, 7.542187,
1.0561182, -3.9888587, -8.749615, -3.4393008, -2.9592185, 4.693101,
2.9826982, 7.803661, -0.075767934, -0.9949193, 8.798841, 4.793284,
2.1477382, -5.7754097, -6.9468164, -8.643013, 7.7490506, 2.1529267,
3.848948, -9.452447, 10.149739, 5.930292, -5.660139, -6.8535852,
3.431631, -6.876298, -8.200342, 10.574867, 8.880292, 5.0934186,
-5.2237315, -7.5476594, 2.3664553, -1.4575734, -7.253116, 0.033161078,
-9.014419, 8.3420105, -3.9291942, -2.2554843, -5.3423467, 7.832416,
-3.4623804, -4.8499155, 0.1691061, -5.9225793, 6.430973, -9.164601,
10.512101, 5.5853844, -5.845609, -10.130692, 5.7846375, 4.3735833,
-1.5416863, 9.468383, 4.5228963, 2.0758693, -6.035071, -7.4519644,
-8.580256, 7.048186, 2.0424216, 5.056368, -9.454541, 9.430702,
5.6969533, -5.23084, -5.9116616, -8.945927, 3.8676827, 6.476771,
2.6410658, 6.7011676, -0.25917503, -0.792824, -3.089307, -9.921444,
-7.980823, -8.4609785, 3.3356721, -3.5536969, -0.45147753, 7.4787836,
-2.9243064, -4.0912757, 6.2642255, -3.4626095, -4.831538, 1.3431888,
-7.621842, 5.961581, -8.498467, 8.395595, 4.415107, -5.790292,
-10.4054785, 7.048936, 4.813912, -2.778917, 9.093748, 5.10337,
3.0833814, -7.746098, -5.3869863, -10.204789, 7.217273, 2.4341223,
4.3581457, -10.0241995, 9.252511, 6.303752, -6.165773, -5.7687793,
4.351514, 6.815173, -9.606232, -3.8131764, -7.741267, 8.839895,
2.3243253, -3.5103111, -9.738171, -3.118203, -2.8291101, 5.1444764,
2.310695, 7.24956, -1.4608871, 3.7551436, -4.147409, 2.4504495,
0.23815703, -6.9479265, 8.995917, 5.6728506, 8.460836, 8.325179,
1.2729877, 7.4703703, -9.4889, -7.6368446, -9.458102, -3.7040796,
-5.7086415, -3.5125117, 0.84559464, -1.3262581, -4.170473, 1.7748331,
-6.720943, -3.862426, -2.3283498, -1.2468247, 4.1961794, -5.2385173,
-0.05049745, 1.2443483, -8.585557, 5.514104, 4.937252, -7.7224016,
-3.2009268, -7.1383014, 6.2409377, 2.6305573, -4.7713213, -8.91583,
-3.4721057, -3.8610115, 3.8568172, 3.3434324, 7.671831, -0.89840096,
-1.0084368, 9.45803, 3.222279, 2.1036878, -7.342878, -6.2834063,
-9.393092, 7.2432075, 2.3757913, 4.7674804, -10.428517, 9.164046,
6.316391, -6.2105484, -5.1276827, 4.421855, -3.4565558, 1.2602847,
-0.17221718, -5.4317155, 10.953742, 4.5780416, 9.32996, 7.897324,
0.8768959, 8.782503, -7.646628, -6.5011315, -8.279313, -4.6601276,
-2.6410017, -0.5928889, 3.6426384, -3.3224137, 9.240485, 9.296175,
-4.6633444, -0.2979113, -4.596583, -3.1229854, -7.574113, 2.2915874,
-0.4761658, -7.9376073, -5.4398007, -5.598192, -2.2760797, 5.1938677,
-5.986389, -8.487096, -4.3523455, -6.886979, 8.092477, 5.715118,
1.9118882, 7.695259, 6.473213, -6.8080316, 8.915628, 0.5666238,
3.1501644, -2.9981954, -1.1325172, 2.008343, -5.517249, 9.069858,
5.88303, 8.792182, 8.757541, 2.6458778, 7.457435, -7.819066,
-6.2242823, -9.037472, -4.0031137, 5.579616, 8.497304, -3.5416036,
-8.586126, -5.3845096, -2.0524364, 6.4661465, 6.6132574, -9.841716,
0.74027497, -1.6889163, -5.8314657, -7.460399, -2.7596688, 8.083166,
4.3527427, 6.0271482, -9.206521, -3.311398, -7.413142, 7.8210654,
3.250618, -2.2832994, -8.776046, -2.9448254, -3.735947, 5.2235875,
2.8963916, 8.05623, -0.259246, 5.018957, 5.391271, -9.211078,
-1.2620368, -7.3491178, 7.154786, 3.4566057, -2.0927687, -8.830578,
-3.5371754, -4.1879354, 4.4890885, 2.6118057, 7.873634, -0.32276917,
-4.1249967, 0.81904906, 3.5953918, -2.3744998, 9.769271, 8.280519,
-5.049646, 0.13065003, -4.287837, -4.0828934, -8.6981325, 2.1698716,
0.2108001, -8.002028, -4.0958333, -2.0755565, -0.5306399, 5.0815988,
-1.876862, 10.766264, 8.137187, -5.572536, 0.98933023, -5.2963614,
-5.4277115, -9.604076, 2.3256824, 1.168138, -9.100111, -4.852838,
4.225499, 5.593318, -7.479273, -4.2638917, -7.837129, 6.3291936,
1.3093927, -2.70517, -8.347596, -3.4657502, -2.8516154, 4.2563205,
2.4483032, 9.274266, -0.7283012, 1.545727, -3.2868643, -0.16896878,
1.0575109, -7.177785, 9.206026, 5.52626, 8.977994, 7.1161714,
2.7958372, 8.019658, -9.402279, -6.156659, -9.071391, -3.3809807,
1.330739, -5.4025116, -7.370866, 9.087397, 10.029112, 6.2906475,
-4.8434324, -8.334834, 3.7770102, -1.5151335, -7.7122927, -0.16044149,
-9.368845, 8.222989, -5.566824, 4.3269367, 5.381381, -8.832622,
-2.3699853, -7.959317, 8.377144, 3.4916098, -4.0061197, -8.68004,
-4.235813, -3.7249868, 5.9938226, 2.5358493, 7.996413, -0.17677659,
-2.2257104, 8.131667, 5.282382, 3.458679, -6.1569014, -7.9436684,
-9.177292, 8.210361, 1.5269316, 4.472125, -9.046066, 8.749346,
6.607185, -8.022105, -7.080572, 3.153788, -3.3519833, 0.3264804,
2.2937775, -7.525966, 9.457928, 4.3647757, 7.035379, 8.104771,
2.055648, 8.238099, -8.304486, -3.911533, -9.276176, -3.1363063,
-4.920706, -3.8736558, 4.225077, -5.9477005, -8.256241, -4.002434,
-6.894731, 9.038625, 5.444038, 3.0952396, 7.7615314, 6.33773,
-6.396027, 9.132701, 1.1320819, 3.3097246, 5.6325603, -8.424307,
-3.128758, -8.842619, 7.5920424, 3.3562922, -3.1874518, -9.322064,
-3.749167, -4.4526134, 5.332696, 2.8084404, 6.506545, -0.76617354,
-8.392743, 3.0715528, 5.952484, -0.06451188, 5.5924335, 0.49651757,
-0.5612018, -2.0961676, -8.642392, -7.469147, -9.803516, 3.5697825,
-2.8683054, 0.35975072, 7.5212173, -1.4749266, 8.215104, 4.5043173,
1.4713941, -8.186203, -6.937986, -8.923551, 8.382937, 2.4638684,
3.4445207, -8.290445, 10.252921, 7.056321, -5.9117565, -7.035108,
-2.533477, 9.178236, 5.7252293, 1.2743219, -6.1906013, -7.029902,
-8.872952, 7.7958965, 1.2363948, 4.429138, -9.471793, 9.742913,
6.8512707, -4.0345078, -6.809919, -7.823049, 5.065392, 5.290247,
0.63523513, 5.9107647, 0.014284824, -1.3104668, -1.1493326, -9.217338,
-7.828087, -8.747226, 2.2575328, -3.4773304, -0.9254577, 8.436293,
-2.2592869, 8.529249, 5.269599, 2.1882794, -6.3106236, -6.439369,
-9.418624, 6.931396, 2.5454059, 4.588711, -9.6029415, 9.480327,
7.543218, -6.167318, -5.9805326, 6.1149793, 6.8459077, -2.110969,
-8.474929, -5.554602, -1.2771024, 6.1582346, 7.0115905, -9.565679,
1.2832972, -1.1145235, -4.522884, -8.335326, -3.0255427, 8.9668665,
-3.6718702, 9.315019, 4.78526, 2.1632748, -7.773351, -7.6368494,
-8.101121, 7.2958345, 2.4993508, 4.1812744, -9.567481, 10.054996,
6.2876215, -5.685933, -6.6870933, -5.7954, -1.6725105, 4.310963,
-6.253503, -7.3369474, -3.4563577, -6.778474, 8.723291, 5.9888663,
3.0669887, 6.9559736, 6.7690597, -6.44576, 7.557287, 0.802044,
-6.661697, -3.9807005, 1.4231863, -2.262114, -5.1534, 1.8285384,
-6.483258, -5.2206492, -4.655272, -1.1944115, 6.089738, -5.1663413,
-0.039524186, 1.5602927, -8.263261, 4.313824, 4.984086, -7.3731494,
-3.3767862, -7.16793, 6.8706536, 2.1437478, -2.6608648, -8.994859,
-4.06371, -2.7033703, 5.710302, 3.2024324, 7.285916, -0.9131274,
4.086566, 4.4806833, -8.50511, -3.1105642, -5.318451, 7.735624,
2.179424, -4.1213174, -10.045034, -4.498007, -3.4158742, 5.5912538,
3.536332, 7.904621, -0.7819189, -6.189343, 4.868824, 4.6346517,
1.0282526, 4.9735303, 0.955695, -0.5392574, -1.3690464, -9.567587,
-7.2633843, -9.444222, 1.497462, -3.8886628, -0.5090997, 8.207035,
3.4083495, -6.095262, -8.868845, 8.259253, 10.173641, 5.4926534,
-2.7677965, -8.245627, 4.798068, -0.7216982, -7.6551323, -0.46003926,
-9.550865, 8.48623, -4.770391, 1.9849887, -6.0645876, -8.503481,
8.312074, 9.535605, 5.473759, -4.350741, -7.9140277, 3.8927314,
-2.0294127, -6.8214455, 0.3039426, -9.805539, 7.110099, -4.9258337,
3.173442, -2.8613362, -0.12305979, 1.8550614, -6.1555634, 9.279399,
7.253287, 8.051031, 7.648967, 1.0952977, 9.104208, -7.0477176,
-4.911397, -8.23869, -3.6317277, 6.833405, 8.811196, -3.8811748,
-8.244341, -5.9863076, -1.0069988, 6.5192394, 7.1605563, -8.954353,
0.498405, -2.2388403, -6.03448, -7.1252975, -2.5184836, 9.24437,
3.5908628, -4.464687, 1.5376401, 2.3281648, -6.0278716, 10.504975,
5.104253, 8.231999, 7.9251943, 2.2624583, 8.161694, -7.853712,
-5.86587, -9.794269, -3.1302383, 5.3768597, 7.6174517, -3.266603,
-8.043941, -6.6116776, -1.7887288, 5.6111846, 7.401607, -9.637772,
-0.9461962, -1.1204709, -6.3363743, -6.5112653, -3.2818398, 7.9857187,
2.6888497, -2.728406, 0.20310779, 0.5383021, -6.632832, 9.894585,
5.3922334, 8.340812, 9.062293, 1.3586807, 8.462381, -7.7735357,
-7.222277, -8.759272, -4.231969, -1.7831964, -4.8443604, 5.2202754,
-2.9694865, -6.038544, 1.1015611, -7.867026, 6.158292, -7.5891347,
9.073615, 6.1877747, -6.329059, -8.662321, 6.053622, 3.548802,
1.2545781, -6.4568815, -7.9937572, 8.099265, 9.441938, 6.8244877,
-3.8989751, -6.038676, 2.5166411, -0.45584014, -7.6801963, 0.44388232,
-9.003315, 7.6731987, -4.664103, 2.9124088, -3.823679, 0.5715246,
1.60012, -5.9340825, 9.258082, 5.3639193, 9.147089, 8.787346,
2.0464196, 8.109093, -8.925157, -7.0919867, -8.6290655, -3.0888085,
-3.1115754, 0.23376566, 4.0220566, -3.022098, 9.498671, 9.223041,
-5.553088, 1.122992, -5.023442, -3.800358, -9.333099, 3.521869,
0.2921857, -9.222359, -4.600466, -3.0540109, 9.151378, 3.9550178,
2.2589467, -8.071436, -6.159701, -8.507509, 7.5027437, 2.7101839,
5.327284, -8.878251, 8.109585, 5.753149, -6.190591, -6.345237,
5.7074914, 8.182782, -3.5225537, -7.4809294, -6.1544867, -1.2058327,
6.200589, 7.4123263, -10.368027, -0.19459817, -0.8300905, -6.293927,
-8.287121, -3.2838035, 8.317871, -7.1759152, 3.7935057, 5.837164,
2.2044828, 5.9661665, -0.27485988, 0.55343676, -2.516669, -9.585263,
-8.444478, -8.885387, 4.5291348, -3.3630052, -1.2223843, 7.8984685,
-3.2449925, -4.3616467, 6.191565, -1.5569112, -4.3569446, 0.2923821,
-7.984527, 6.8068733, -8.908816, 9.768419, 6.1139574, -6.3646674,
-9.921708, 6.297631, 4.615474, -2.8209207, 0.951452, 4.5492587,
-1.9455223, 9.370822, 7.940664, -5.014959, -0.74886584, -4.4947352,
-5.172884, -9.596402, 1.77365, -1.6107184, -8.480269, -2.8728454,
-3.8775504, 0.5036239, 3.911658, -2.5521069, 9.911842, 8.438393,
-5.0094323, -2.3856826, -3.8093004, -4.51376, -9.950676, 2.4122903,
0.87446874, -9.3036375, -5.088441, -6.300146, 3.2892234, 5.3445845,
1.2739036, 3.4519038, -0.31120998, 0.76854587, -1.201163, -8.859153,
-8.113416, -8.8726635, 2.245678, -4.7002077, -0.73699605, 7.127852,
-6.376377, -3.0489485, 0.17501748, -1.391076, -4.1348014, 2.8309596,
-8.782946, -4.581797, -2.5248644, -0.038545046, 5.3591876, -7.3201113,
0.7184868, 1.4035378, -9.903758, -6.182672, -4.4415336, 0.2717909,
-1.9183177, -3.4222121, 2.251976, -5.879289, -4.1995697, -3.1686478,
-1.9382004, 4.4413214, -7.1154194, 0.47167742, 2.2043989, -10.177825,
3.933674, -2.786138, -0.03869759, 0.73994607, -7.0283275, 9.2492695,
5.4770436, 9.251734, 8.659676, 1.4343324, 7.060357, -8.245021,
-5.8748665, -8.920862, -3.7220647, -2.535542, 9.786597, 4.719838,
2.0783808, -7.1341558, -6.9199715, -8.622867, 6.1264052, 1.0785705,
4.6817365, -9.468704, 9.269409, 6.6617565, -5.5099106, -6.7413325,
0.5295963, -5.988908, -9.278706, 7.5647693, 9.414186, 6.72317,
-3.9931645, -8.367051, 3.573107, -1.4563581, -8.502869, -0.5967383,
-8.947333, 8.411583, -4.7067585, -6.4694366, -4.0592566, -0.02747621,
-1.4848416, -5.25945, 1.8932502, -7.4095354, -4.336433, -2.8418114,
-0.92183524, 6.0391283, -5.3943624, -0.1701469, 1.0060508, -9.800423,
-2.4518857, 10.634547, 3.3327932, 2.4535518, -8.008529, -7.210462,
-8.076062, 7.368519, 1.2678789, 3.660739, -9.112592, 8.88694,
6.800374, -5.7213173, -6.819621, 5.7821717, 9.293605, -3.4206612,
-8.529012, -5.1829605, -1.7259437, 6.664143, 7.1846123, -9.808966,
-0.4231693, -0.9587371, -6.367133, -8.378828, -2.9905443, 9.9801855,
-2.6846278, -6.0076795, 6.8362236, -1.8461293, -4.7950225, 1.1619636,
-8.279217, 6.363989, -8.784355, 9.204046, 4.912641, -6.3012147,
-9.621855, 5.390714, 3.8591661, 1.610099, -6.3241105, -7.8378124,
8.559829, 7.6394277, 5.8793926, -3.268293, -7.6702166, 3.984391,
-1.3929454, -8.454556, 0.26424423, -9.896227, 9.715276, -4.4250455,
-4.9943314, -1.4973893, 4.400318, -4.9877715, -7.3144164, -4.32594,
-7.5659194, 8.547333, 4.6053386, 2.9039073, 7.745804, 7.246387,
-6.404395, 8.137119, 0.18394317, -3.4157526, 8.779237, 5.8081937,
1.7914557, -7.931827, -7.0521297, -9.029234, 5.4357023, 1.9842938,
3.9997973, -9.1009655, 10.692467, 7.4374485, -5.94144, -7.1380687,
-7.3939314, -3.3897498, 0.20379254, -2.1521482, -4.490426, 3.1170623,
-7.5848575, -3.9061894, -3.1844325, -0.34805503, 6.183375, -4.712498,
0.161794, 2.3163497, -9.002344, 3.7374039, 5.5830865, -8.078554,
-3.0400338, -7.2071133, 6.7649727, 0.985677, -3.6787703, -8.68457,
-3.9734416, -4.307356, 3.3211694, 2.3912873, 8.372614, 0.3236367,
-6.8252783, 3.2713268, 5.3724976, 1.7412783, 5.8525457, -0.5645517,
2.494533, -2.343139, -10.249849, -7.6786547, -7.889654, 2.1428719,
-3.8989604, 0.9255194, 8.138826, 2.387978, -5.8688974, -8.088347,
8.177597, 9.886751, 6.122665, -3.8567128, -7.99532, 3.5794904,
-0.36275488, -7.617838, -0.88014406, -9.587701, 8.311494, -3.96395,
-5.7719502, -5.0465727, -0.24244906, -2.1085618, -3.510202, 3.4344873,
-7.2832375, -4.2752824, -2.6237266, -0.065285504, 5.054372, -5.839666,
0.9673271, 2.1990573, -8.938285, 3.0680313, -6.7406898, -8.042734,
10.122257, 10.230826, 5.4034476, -4.085298, -8.288173, 4.0271387,
-0.71650267, -8.146461, -0.048974838, -10.726179, 9.195769, -5.1830626,
3.993193, 4.9056664, -8.669283, -5.351445, -8.173521, 6.6489983,
2.8389108, -3.945908, -8.879413, -2.70076, -2.8002875, 5.2237134,
2.4012387, 8.124377, -0.71942997, 5.797979, 8.451765, -3.1569724,
-7.939113, -4.9629064, -3.0490649, 6.432976, 7.8170915, -10.443962,
-0.37049094, -2.9582937, -4.4449167, -8.1068, -3.0789664, 9.280936,
-3.3179684, 0.70453, 4.369247, -2.8519983, 8.067081, 8.606615,
-3.9701643, 0.07591838, -4.2081995, -3.520472, -10.053343, 2.0335429,
0.15001135, -9.279389, -4.356537, -6.538746, -3.427287, 0.261455,
-0.5399124, -3.9166677, 2.1619463, -6.896721, -5.2567205, -3.4615004,
-1.7143478, 5.8035793, -4.7937636, 1.8465989, 2.2949271, -8.720401,
6.1536927, 7.476487, -3.3591337, -6.50047, -4.727347, -1.0835514,
7.080992, 6.954867, -10.2167425, -0.3085035, -1.7394806, -5.853248,
-6.7569003, -3.5804229, 8.2962885, 1.8387364, -6.510319, -8.95315,
7.572491, 9.040798, 5.9540133, -3.4641619, -8.535297, 4.8597326,
-2.5350986, -6.641418, -0.25348508, -8.71052, 8.2422695, -4.0267115,
-3.6491017, -5.4981084, 6.589515, -3.6566806, -4.428404, 0.10165893,
-7.729163, 6.6411552, -8.335751, 9.730973, 6.06017, -5.6892605,
-8.876964, 5.987327, 3.8364024, -2.6069791, -3.1785936, 7.8584776,
-3.472479, -3.9913135, 1.3784194, -7.5997505, 4.997061, -8.517596,
9.616747, 5.13986, -5.7160645, -10.415069, 5.5028305, 4.213505,
5.241862, 5.0386086, -9.830211, -1.3995483, -8.380678, 6.5207424,
3.2385666, -3.651182, -8.773206, -3.4424005, -2.7378254, 4.358701,
3.815116, 6.249195, -0.023850432, -7.123532, -3.5836127, -0.15333816,
-0.97224075, -4.8184977, 1.9449011, -7.4166856, -3.4740353, -2.029845,
-1.7506025, 5.741973, -6.50323, -0.59626466, 1.6963794, -9.28693,
-3.0399003, -3.4655375, 6.2951245, -2.7745023, -4.4256015, 0.39209762,
-6.492819, 6.13206, -8.842573, 8.824526, 4.819379, -7.1746526,
-9.881001, 5.471563, 3.9512546, -3.4712744, 0.09602489, 2.0221164,
-3.7386568, 9.978097, 10.662385, -4.3324795, 0.17151284, -3.9973664,
-3.1253262, -7.7584934, 1.7804968, -1.3464102, -10.439157, -3.501754,
-3.3052764, -1.3656846, 6.4531245, -4.843668, -8.979692, -4.472211,
-6.363669, 8.9002075, 5.7582684, 1.6110799, 9.730644, 5.9746366,
-6.496616, 7.930514, 0.61693484, -1.6678715, -4.961596, 6.583269,
-2.9299457, -5.271394, 0.04676263, -6.5040593, 6.323234, -7.773953,
9.964999, 5.5449886, -7.233894, -10.0788145, 6.2900596, 5.504322,
2.2627058, -3.2035701, 0.56909966, 1.4473436, -7.6278443, 9.609736,
4.671221, 8.511852, 8.270602, 2.021765, 7.841407, -8.225759,
-7.1524177, -8.290267, -3.637092, 1.6055322, -6.4472523, -9.019311,
9.107693, 10.369032, 6.5918713, -3.5516987, -8.3477745, 2.6458209,
-1.3428779, -6.7146096, -1.1654793, -10.050792, 8.274796, -5.5136585,
3.502721, -3.412777, 0.6050357, 0.5792995, -6.6626415, 8.552426,
6.751999, 8.799394, 7.73915, 1.3078451, 10.492445, -8.98148,
-6.380536, -8.804368, -3.0732443, -3.7702827, -0.1114695, 3.2487042,
-2.923425, 8.830721, 9.257804, -4.94533, -0.4899621, -4.241793,
-4.4647846, -8.695369, 2.0873861, 0.63087165, -8.560356, -4.026999,
-4.88389, -0.88496184, 5.2674894, -6.462822, -7.444053, -4.8682423,
-5.8984337, 8.530783, 6.9856153, 3.462413, 7.730312, 6.429416,
-5.972759, 6.4415026, 0.869676, 2.5730634, -7.184892, -6.9593353,
9.1630535, 10.205414, 6.061747, -3.534373, -7.4589295, 4.537397,
-0.45679232, -6.9486976, -0.3170225, -9.712452, 8.419233, -4.171211,
3.602609, -3.9889023, -0.31950787, 0.84786475, -6.086578, 8.932712,
5.3376584, 8.132355, 8.680018, 1.7518805, 9.454413, -7.1600266,
-6.137075, -9.801369, -4.9535217, -3.5577536, 1.2482433, 3.2518733,
-4.406122, 8.449739, 6.9737124, -3.4858916, 0.09514316, -4.6296844,
-3.9863534, -9.129727, 2.62594, 0.90025246, -9.173369, -5.0429583,
-2.7686276, 9.554475, 2.645899, 2.7773056, -8.097426, -7.133818,
-9.622097, 6.417246, 2.834879, 3.8340607, -9.3457575, 9.365353,
6.9827814, -5.6994424, -7.2615952, -1.2844939, 8.143581, 4.154845,
1.4702846, -6.25318, -7.0865746, -7.964908, 6.8520794, 2.217596,
3.5767086, -8.086706, 8.566878, 6.8657274, -5.309574, -6.073841,
-2.8616307, 7.9288216, 4.687873, 1.229557, -6.5481124, -7.5237064,
-7.7533736, 6.7752457, 1.7968571, 4.7309136, -10.449915, 9.557419,
7.563853, -6.878456, -6.2342567, -5.263654, -2.8599713, 4.97467,
-5.637753, -8.250962, -4.6414886, -6.7172327, 8.360587, 5.930358,
2.7243288, 7.4575777, 6.606263, -6.353049, 7.6062603, 0.66027963,
4.7961426, 7.690967, -5.031048, -8.409075, -6.5058017, -1.1889321,
6.360673, 7.4266853, -9.934143, -0.7610624, -1.292315, -4.2122865,
-7.3162436, -3.5458438, 7.858547, 1.0267531, -3.5704198, 0.605455,
-0.24926455, -6.9878607, 11.320345, 5.288729, 9.30165, 7.4410853,
0.49478754, 8.318304, -8.416073, -6.3602576, -9.175703, -3.3084056,
-2.558006, -2.5128856, 7.081894, -1.2064323, -3.404506, 1.0231831,
-6.9705305, 6.0183334, -9.7254505, 9.750372, 6.2923675, -6.4454346,
-9.601191, 6.905342, 3.5003705, 1.3220756, -6.8694468, -8.510462,
8.946285, 9.440392, 5.6988664, -3.087522, -8.869879, 4.025953,
-0.36962637, -7.2225995, -0.23822875, -9.682037, 8.91167, -5.8204827,
-3.1857746, -0.49799952, 4.1263027, -2.2091746, 9.28549, 9.634734,
-5.6361666, -1.2196128, -3.592513, -3.082383, -9.959493, 3.0468225,
0.08397685, -9.905409, -5.2191243, 3.3589704, 4.9422135, -8.647585,
-2.9111512, -8.7201805, 8.068234, 2.3944905, -3.3557525, -8.448384,
-4.332007, -3.1768844, 4.335171, 2.1235394, 8.145668, 0.4884702,
-2.766989, -3.407097, 6.3119, -2.3753085, -5.6938148, 1.6799136,
-6.4449677, 6.1121435, -8.667847, 10.017738, 7.047049, -7.259952,
-9.342138, 5.6710963, 4.123204, -7.65744, -3.7658496, 0.49597245,
-1.9330617, -3.7139452, 2.893357, -8.335415, -4.6910143, -3.2111628,
-1.5365325, 6.2841516, -6.142203, 0.09952838, 0.44558772, -8.626199,
-2.2063112, -4.7025876, 7.4232407, -3.732645, -3.9558215, 1.6248528,
-6.6800776, 7.671229, -8.87925, 10.407852, 5.370632, -5.162302,
-9.89494, 6.0377154, 5.0513973, -3.8755324, 0.14477552, 5.1488533,
-2.0800295, 10.238906, 9.822927, -4.3019834, -0.5366874, -4.121455,
-4.996131, -9.409603, 2.67016, 0.14778876, -8.052388, -5.226062,
-5.1261826, -1.564502, 5.880732, -6.9979844, -9.24797, -4.61149,
-6.778011, 8.49243, 6.0772257, 2.9537423, 6.7969065, 6.7273407,
-5.9651823, 7.6272974, 1.9074805, -3.2817101, 9.140212, 5.61428,
2.6159914, -7.9789777, -7.5728493, -8.179788, 6.635782, 1.865057,
4.546488, -10.266151, 9.47196, 5.715035, -6.1741753, -6.139653,
2.5644977, -5.1359153, -0.06633971, 1.0695713, -7.246635, 9.40267,
6.8328443, 9.506119, 9.047026, 2.231789, 9.254742, -8.736925,
-6.531381, -11.012853, -2.6630917, -6.715858, 4.912201, 4.7518167,
1.2862767, 5.046358, -0.6975746, 1.1234286, -2.2874522, -10.117289,
-8.614989, -10.41459, 2.8306983, -2.8998282, 0.042000093, 7.0973063,
6.795853, 8.318878, -3.9872746, -7.766106, -5.830612, -1.3537117,
7.4383707, 7.892561, -9.496402, -0.13007338, -1.1268231, -6.7386575,
-7.7793403, -2.8390987, 9.376489, -5.794828, -1.9871588, 4.850578,
-5.2274127, -8.847356, -4.621685, -5.770475, 8.79315, 5.769672,
2.4924915, 8.58096, 5.9088264, -6.7510924, 7.3716826, 0.4024563,
4.1401916, -4.4518223, -0.7311, 1.2007502, -6.875097, 9.616596,
3.8200884, 9.764698, 8.367152, 0.90766597, 7.402341, -7.0595636,
-6.5965085, -9.679383, -5.07963, 2.1140778, -2.4438727, 0.28639102,
1.4225165, -6.888175, 9.6073, 4.5906653, 9.106426, 7.647365,
1.9784329, 8.926066, -9.092928, -6.559487, -8.775328, -2.0246384,
4.370075, -3.2735786, 0.7746056, 1.0499882, -7.2703233, 9.053161,
6.091903, 8.054125, 6.1243086, 1.7990254, 8.338254, -8.135256,
-5.5703797, -8.64913, -3.3122544, 1.5814309, -3.2441728, 0.49527693,
1.234045, -6.8009343, 9.122573, 5.214939, 9.505115, 7.631415,
3.4158618, 8.739088, -9.123128, -6.1703253, -9.551627, -5.029461,
-3.6056268, -0.06385605, 4.541122, -2.083822, 9.380771, 9.410325,
-6.477093, -0.3498018, -3.2790587, -4.5995955, -9.778344, 3.7856688,
1.3269345, -8.643123, -3.6298332, 5.0939574, 8.024177, -4.063234,
-7.3582706, -6.061025, -1.99869, 4.945594, 7.333573, -9.973591,
-0.6169232, -1.5161208, -4.8388944, -7.75359, -3.0599797, 9.049512,
-3.7467897, -0.94188064, 5.185062, -5.656282, -9.530416, -3.9875932,
-7.5562263, 8.038759, 5.3288856, 2.3458004, 7.409633, 6.2649717,
-6.6889534, 8.599679, 0.6411012, -5.8547287, -0.900497, 3.3242447,
-5.7056537, -10.555997, -4.7868724, -7.1192384, 9.563363, 6.4400473,
2.084876, 8.175386, 6.085924, -6.242302, 7.00931, 0.09383524,
-7.296083, 5.600139, 4.531546, 2.5468814, 4.743694, 0.043028474,
-0.43702954, -1.4388938, -9.221301, -7.9504547, -8.871702, 2.7975183,
-4.078571, -0.2253555, 8.718969, -4.5975637, -0.88325626, 5.3083653,
-4.941938, -8.033586, -3.2116258, -8.140287, 9.061007, 6.8510227,
1.7309686, 6.809244, 5.477894, -6.778752, 7.513978, 0.9566668,
2.4231822, -6.413486, -8.834956, 8.448657, 10.046736, 7.326732,
-4.2462177, -8.475075, 4.028506, -1.4475707, -7.656813, 0.4212954,
-10.648779, 9.10922, -4.773524, -5.9260097, -4.40998, 1.0940176,
-0.206463, -3.4259899, 1.7263361, -6.9571304, -5.062306, -2.27209,
-0.56317794, 4.3946476, -6.819773, 0.08649154, 1.6376896, -9.917625,
-1.7141255, -3.6948562, 7.1352415, -1.4230419, -3.9902031, 0.7443875,
-6.941467, 6.914368, -9.05455, 9.281417, 5.2286234, -7.498894,
-9.792903, 4.9889383, 4.1742883, 3.169869, -4.007922, 1.0237681,
0.14897981, -6.8397155, 9.439343, 6.2227945, 9.141158, 8.0703745,
2.012682, 8.717649, -8.088993, -6.8013234, -9.063199, -4.2449756,
-2.0739307, 7.9151287, 4.1308827, 1.7999066, -6.9317307, -6.445639,
-8.713937, 6.388782, 2.2884388, 4.5888615, -9.196457, 10.154743,
7.2325983, -5.431792, -6.4126167, 6.479112, 7.6534333, -3.869274,
-8.308792, -4.9035606, -2.1021864, 6.955321, 7.360109, -9.8924885,
-0.24475123, -2.6006591, -6.011201, -7.8336973, -4.9060135, 8.720045,
-2.5705476, -5.5619273, 6.2129574, -2.4316103, -3.4068828, 0.9317122,
-6.2587776, 5.0932174, -8.819271, 9.535736, 5.3647037, -5.9668202,
-9.40592, 5.997864, 4.234467, -1.4793783, 8.86776, 4.171528,
2.701103, -7.303558, -5.601903, -8.36378, 6.981985, 3.5324159,
3.737451, -9.068843, 9.607702, 7.560072, -4.66016, -6.341098,
2.9568415, -3.2362366, -0.9384816, 1.3958215, -6.865675, 9.606834,
5.32923, 9.714269, 7.284442, 2.397324, 8.24183, -8.340963,
-6.2090406, -9.687426, -3.5032628, 2.8036015, -6.880088, -8.208478,
10.9479685, 9.239601, 6.443262, -4.4243646, -7.919682, 4.0068507,
-1.0605605, -8.273022, 0.047991443, -8.288692, 8.500453, -4.1510096,
-3.4480505, -0.11311919, 4.0034833, -2.4063876, 10.071398, 9.482707,
-5.2302895, 0.33373588, -3.3803742, -4.037463, -9.847592, 2.0643024,
0.717055, -9.938538, -5.8922954, -2.2345147, -3.8515575, 7.3411093,
-3.4291277, -4.7498984, -0.07172154, -6.89517, 6.510579, -9.453935,
9.4191, 4.5018177, -6.654846, -9.503846, 5.795791, 3.9265687,
-5.809618, -1.6301879, 4.6517982, -4.6458945, -7.3376956, -3.729404,
-7.8612924, 9.647591, 7.265412, 2.6812284, 8.35526, 5.6872687,
-7.574971, 7.3189807, 1.9973555, -2.5757453, -3.3516164, 8.258137,
-2.712888, -4.706647, 0.029863171, -6.8535376, 7.2919736, -8.682125,
8.072441, 5.91393, -5.3359065, -10.99965, 6.4327674, 4.4530053,
-4.2835584, 0.34015283, 3.9020565, -2.7601326, 8.875822, 8.207843,
-6.4304605, -0.146856, -4.2841344, -4.7763467, -8.336477, 1.58968,
1.0453644, -7.8337345, -5.7927012, 4.768312, 5.0623603, -7.316252,
-2.9631186, -7.7430553, 5.710993, 2.329158, -2.7965007, -9.476239,
-4.3058577, -2.9414358, 3.5584636, 3.49161, 7.371754, -0.82769924,
1.1476908, -6.9567814, -8.35341, 9.884954, 8.537631, 5.9120927,
-4.013634, -9.02361, 4.098605, -1.7037433, -7.6960087, -0.41282636,
-9.864766, 8.181477, -4.975327, -8.650614, 4.092699, 5.5283933,
1.6364843, 5.423928, 0.2556087, -0.11161814, -0.35864583, -8.796778,
-8.783621, -10.123988, 1.995363, -3.2383015, -0.7278956, 7.9196124,
-3.6498811, -1.277383, 5.7591558, -4.5999904, -7.1765075, -5.444967,
-7.6364923, 8.031346, 5.8335967, 2.4576766, 7.1163273, 6.226958,
-6.243892, 7.345196, -0.7360393, -6.513143, -3.5044694, 0.16269608,
-0.7511915, -5.117603, 2.3255236, -5.8528724, -4.857339, -3.1471846,
-0.5188648, 5.8292265, -5.761083, 0.6271198, 2.2925963, -8.294202,
-6.8854427, 5.6113553, 4.5485725, 1.9802318, 5.3589916, -0.2148592,
-0.6209149, -1.6485323, -10.189703, -8.166869, -10.140147, 2.4139724,
-3.9971576, 0.23914227, 7.0210047, -6.8623176, 3.7225342, 4.8428764,
2.5523992, 5.3141136, 0.01593558, 0.40774956, -2.0243561, -8.355147,
-8.211562, -10.160516, 2.41269, -5.1140747, -0.2844874, 7.4736395,
-5.604532, -3.9681902, 0.9644651, -2.1113997, -5.263048, 2.8095806,
-6.946636, -4.788543, -3.2815273, -0.090795964, 4.8709307, -4.856653,
-0.34574574, 2.2951658, -9.301024, -1.2673829, -3.7335098, 6.5521564,
-2.9258928, -3.0653977, 1.2184509, -7.41098, 5.045727, -8.233275,
10.318265, 5.0027394, -5.8218446, -10.144059, 6.180406, 2.3956714,
-5.3405147, -2.5213585, 5.813379, -5.0401125, -7.8826118, -5.4133205,
-6.8878894, 8.397876, 5.966007, 2.2368557, 7.186939, 6.668193,
-7.607595, 9.095286, 1.1255336, 3.5481706, -4.8472347, 0.023846913,
1.2046734, -5.716476, 8.916231, 3.4626772, 9.156235, 7.0423226,
0.54586244, 6.4548564, -7.660363, -5.9903107, -9.253414, -2.5210764,
3.0703356, -3.0082998, 0.31103656, 0.33804205, -6.8341904, 8.706574,
5.3119693, 9.653585, 7.33298, 2.0899327, 7.9778023, -8.412366,
-7.1373715, -9.249831, -3.628987, 3.8015141, 4.607977, -7.626828,
-3.6256306, -7.3261123, 6.8308187, 3.597894, -3.921337, -7.92848,
-3.6509526, -3.3197973, 3.2129076, 1.20911, 7.5898585, -0.38286725,
-7.032023, 4.293975, 4.464116, 1.5463665, 4.9189787, 1.1320794,
1.5360457, -1.0263114, -8.538913, -7.7968173, -8.83561, 3.7626123,
-3.5410314, -0.0072171967, 6.958094, -1.7994857, 0.027938241, 4.0009027,
-3.659896, 8.868945, 10.580285, -5.329778, 1.288467, -4.8527875,
-5.0406184, -9.501122, 2.1466618, 0.69974154, -8.654885, -4.7184916,
3.454109, -3.0952907, -0.15225683, 0.95801127, -7.258426, 9.882739,
5.865866, 9.137042, 7.8976803, 2.2483406, 9.560522, -8.429716,
-5.983291, -9.855742, -4.1011763, -2.1395311, -4.839554, 5.6861243,
-2.9827437, -3.9619374, 0.008490088, -6.6151366, 5.449135, -8.847075,
10.109217, 4.98049, -5.999384, -10.109841, 6.399786, 2.4706295,
5.5232964, 9.032656, -5.3622355, -7.897086, -4.620704, -1.5231267,
6.0767875, 7.305207, -10.108142, 0.23654069, -2.1301467, -4.7817717,
-6.606407, -3.3410697, 7.8876514, -3.0643032, -0.27358758, 4.1953382,
-1.7059952, 9.290492, 9.339559, -5.1341157, -1.6747315, -3.9680207,
-4.0919356, -9.621006, 1.3830408, -0.56284404, -8.435493, -3.0463033,
-5.336251, -2.6979253, 1.1803293, -1.3013769, -4.7412033, 1.6557468,
-6.8443627, -3.8642282, -1.6916441, -0.42328525, 4.651363, -5.270161,
-0.41395366, 1.5795115, -8.895851, -3.054036, 0.6136606, 5.124844,
-2.9114904, 9.776761, 9.8038645, -3.3243542, 0.046305962, -4.7133646,
-3.70964, -7.956252, 1.9655026, 0.64233756, -8.110315, -3.7151248,
-9.045556, 4.5174246, 5.2663026, 1.6146221, 5.260774, -0.42538747,
1.8605877, -2.4798274, -8.977852, -8.244392, -9.606462, 2.5062363,
-4.2752843, 0.57127076, 9.12478, 2.0953252, -6.799844, -9.402889,
9.229826, 8.936289, 6.6267295, -4.656859, -6.8432293, 3.0572276,
-1.0434384, -7.004934, -0.4959101, -8.715502, 7.3259797, -3.807918,
4.2081013, -3.6776476, 0.9971473, 1.7909048, -6.066342, 8.977124,
6.1856556, 8.790507, 8.376648, 0.45038816, 8.750372, -8.67716,
-4.923026, -8.659293, -3.3254929, 3.763426, -4.657741, 1.1352983,
1.275648, -6.816874, 9.292622, 6.621482, 9.303462, 7.9026957,
1.7916212, 8.491111, -8.588385, -7.5536675, -7.66593, -3.384666,
-2.525217, -4.7542768, 6.4432487, -2.0817072, -3.48921, 0.7337501,
-6.412576, 6.2943025, -8.463833, 10.372319, 5.495472, -4.5852184,
-10.896445, 5.948609, 4.0485835, -7.8423376, 4.6608386, 5.4064636,
1.9468511, 6.323372, 0.09081407, 0.6588695, -1.299302, -9.149226,
-7.803552, -9.371842, 3.5658472, -2.6515992, -0.8838732, 8.158344,
4.2039385, -3.9312928, 0.31762668, 0.16197652, -5.7285233, 9.520709,
5.208637, 7.4567065, 8.907636, 1.0986236, 8.02359, -7.9793086,
-6.9179335, -10.140911, -4.980157, -3.106116, 1.6560948, 5.6230216,
-2.8500783, 8.869486, 7.943957, -3.3930764, -0.36280727, -3.187908,
-4.8220687, -8.185475, 1.7012044, -0.7806229, -9.11638, -4.7993135,
-2.000701, -5.5278935, 6.202436, -2.5142913, -5.068937, 1.9776708,
-6.32476, 4.9439454, -9.161647, 8.137694, 4.8970814, -5.4892144,
-10.179284, 6.631787, 3.071513, -7.5926504, -4.295027, -0.056319993,
-1.7956929, -4.293118, 1.9065945, -8.595633, -3.63357, -3.4236832,
-0.7111275, 7.155377, -6.6500936, -1.4865125, 1.6480885, -8.30032,
3.54092, 5.851194, -7.5769525, -2.952939, -6.730501, 7.129283,
3.0606718, -4.9350386, -7.0356884, -2.733533, -3.6955714, 3.2116613,
2.9527693, 6.733507, -0.24430402, -8.197584, 3.3778021, 5.794003,
1.4507008, 5.6984425, -0.3831038, 0.2591114, -1.7088314, -9.548452,
-7.257438, -9.136164, 3.0782871, -3.3631892, 0.7905235, 8.353035,
4.2834597, 4.889479, -8.005233, -3.907081, -7.2872753, 7.741209,
1.850212, -3.9562695, -8.884951, -3.5829282, -3.603332, 4.8956175,
3.9852352, 7.388853, -1.7563275, -7.092982, -4.675233, 0.97068995,
-2.1652555, -3.7089984, 2.5608718, -8.418187, -4.6314025, -1.8368865,
-1.5654162, 5.378436, -5.6830816, 0.8330621, 1.6722142, -9.489249,
3.4892967, -2.7155862, 0.45351714, 1.652148, -4.4535027, 9.810362,
5.537186, 9.2956915, 7.6408505, 3.629577, 8.563535, -8.421204,
-6.665325, -8.3274, -2.8863623, -5.427958, -1.5622638, 4.734869,
-5.0960703, -7.609197, -4.708496, -6.866977, 8.916884, 5.8573074,
2.9930995, 7.817991, 6.8317246, -6.692792, 7.1912374, 0.57154065,
5.262226, 8.73893, -5.6140485, -7.2151227, -4.7872787, -1.7605251,
7.2513323, 7.1704435, -9.817255, 0.4526162, 0.5788225, -4.61674,
-7.111901, -3.8866596, 8.958707, -2.813345, 8.797866, 4.7953725,
1.6380457, -6.000598, -7.506335, -8.969138, 7.0157113, 3.0351849,
4.29904, -8.866019, 8.358305, 6.835788, -5.130476, -6.305902,
5.891887, 5.7640324, -3.3589296, -8.249672, -4.663495, -1.1847094,
7.1471624, 6.4560943, -9.134088, 1.6890455, -1.1168691, -5.0045996,
-7.4839745, -3.6149647, 9.391028, 6.866224, 7.225486, -4.0732226,
-7.246834, -4.7220097, -2.3477433, 7.1236258, 7.946098, -10.517525,
1.3339994, -2.6715803, -5.9951844, -7.2673907, -3.845193, 9.042868,
-7.8833556, 4.288839, 4.6174636, 0.90564966, 4.4335847, -0.070133045,
1.1936426, -2.2171803, -9.927487, -7.490389, -8.522953, 1.6829754,
-3.8310297, 0.3696054, 8.07989, -2.54215, -4.3672547, 6.033971,
-2.9055083, -5.440991, 1.2955346, -5.946076, 7.998486, -8.445169,
10.289481, 4.1984572, -4.9737725, -9.346198, 5.970475, 5.0031734,
2.710227, -4.1143517, 0.042586546, 0.37313628, -6.5459414, 9.931254,
4.7649717, 9.089449, 6.7052155, 1.565063, 8.206842, -7.0577483,
-6.2727046, -10.636325, -4.892616, 5.968736, 5.9554305, -8.22714,
-3.519929, -7.2828197, 6.755628, 3.3526254, -4.061783, -8.8080435,
-4.1690445, -4.1875997, 5.1110473, 3.1259706, 8.590562, -1.801614,
-3.7403152, 8.186006, 3.2124164, 1.7845849, -6.3773475, -5.8284597,
-8.786462, 8.463553, 1.0562292, 2.9690838, -9.627193, 9.667043,
6.6259665, -7.200427, -6.4258847, -3.7747812, 1.9886752, 4.322331,
-2.4699342, 9.687282, 9.539712, -6.6325326, -0.2226969, -4.008959,
-3.4061434, -9.22889, 1.0387287, 0.35464287, -9.533102, -5.2788205,
1.6931682, -6.5703983, -8.732127, 7.870187, 8.654362, 5.7754145,
-5.333079, -8.013312, 3.5103102, -2.367976, -7.276286, 0.4046491,
-9.153305, 7.9595604, -4.410391, 2.9368727, -4.6161427, -1.1444361,
0.7727951, -6.2078094, 9.354939, 5.4012413, 8.820107, 8.612536,
1.8246872, 8.035627, -8.426938, -5.6748, -8.107036, -3.933094,
1.4842756, -7.063414, -9.23983, 9.558845, 9.94292, 6.4838047,
-3.079279, -8.871186, 4.8517175, -0.1306831, -7.0443, 1.1507975,
-10.471866, 7.8192945, -5.3391128, -7.5223713, 4.326046, 4.7041316,
1.3972481, 4.549593, -0.7855242, 0.9067149, -2.1601439, -9.114869,
-8.8007965, -8.136038, 2.1862192, -3.869447, 0.40284312, 7.7555847,
2.1353018, -6.36406, -8.706733, 8.408601, 9.888937, 6.32598,
-3.8953953, -8.729314, 4.597116, -0.7316582, -7.94628, 0.6048156,
-9.660529, 7.6390057, -5.3561926, 3.696573, -2.6343684, -0.5735544,
0.9100202, -7.4346104, 9.231125, 5.328625, 9.228608, 7.9284415,
2.3943846, 8.929893, -6.7216997, -6.30105, -9.75417, -3.3211348,
3.7934284, 5.690039, -6.7971964, -2.2836373, -8.287619, 8.057003,
2.2442868, -2.4199, -8.16416, -5.840553, -4.199189, 5.2531934,
2.8573096, 7.0276256, -0.9992142, -2.1083896, 9.9938345, 5.2871222,
2.014911, -7.332483, -6.391353, -8.562888, 7.950158, 2.4669206,
4.8961387, -9.962975, 10.320373, 6.7871723, -4.300535, -6.845932,
-3.035015, 0.10054695, 4.084149, -2.980412, 9.972161, 10.252038,
-4.4053, 0.5903257, -3.3363104, -3.917311, -10.059761, 3.3819668,
1.316668, -9.134141, -4.686559, 4.2775326, 5.587619, -9.168268,
-1.9287819, -8.126611, 8.052261, 2.3024378, -3.2406402, -9.075308,
-4.4915504, -4.7176657, 3.8533967, 2.5244133, 7.8925705, 0.15156402,
-4.010436, -2.279318, 5.4815326, -4.474762, -8.810509, -3.8325553,
-7.4966345, 10.305992, 6.8178735, 2.5332506, 6.8882284, 5.9936438,
-6.5065875, 9.218241, -0.07102947, 6.3312626, 8.844105, -3.2383707,
-7.7585335, -4.9934807, -1.1410967, 6.887816, 5.7978597, -10.948377,
0.91832644, -2.7054753, -5.276767, -7.8491273, -3.945029, 9.102949,
3.028013, -3.6841736, 1.1514215, -0.120368235, -7.6788, 8.980712,
4.3589106, 8.492597, 8.248297, 1.3344754, 7.438706, -8.181614,
-5.4765625, -10.281936, -2.9192307, -7.356921, 5.09706, 4.750169,
0.62652415, 4.5891986, 0.003952122, -0.842956, -1.1256717, -9.568052,
-7.0479236, -9.841759, 2.5858855, -2.9273381, 1.1396087, 7.675223,
-4.698212, -2.619177, 5.060047, -4.8374267, -8.530889, -4.8673906,
-5.9532046, 7.2786665, 5.932951, 2.6389132, 7.4403477, 6.1997886,
-5.841291, 7.8263946, 0.5292041, -3.676703, -0.49005485, 3.7664673,
-4.1798797, 10.774166, 8.592756, -3.8540742, -0.028370729, -3.587029,
-4.64535, -9.116711, 2.6466293, 0.5187672, -8.428083, -4.6849036,
2.7026267, -3.270856, 1.8606595, 0.2286674, -6.5407424, 9.553818,
6.0531406, 8.398296, 5.913426, 2.9094322, 8.969839, -9.450213,
-6.505641, -7.9977264, -2.479936, 4.4787364, 5.7094836, -8.507645,
-3.5215454, -9.261673, 7.5637755, 2.5877256, -3.814762, -9.862254,
-3.339536, -3.6824777, 4.981936, 4.0559835, 6.245253, -1.0454221,
-3.3528783, 0.14424507, 4.6767197, -2.494267, 9.214947, 9.411009,
-4.0438204, -0.45377094, -3.700574, -4.3525124, -8.963372, 1.7330912,
0.13081527, -8.194949, -5.0829754, -5.847004, -4.8036866, 1.6868889,
-1.2194694, -3.0331173, 1.7239348, -5.9374795, -3.6144981, -2.285635,
-0.7147832, 5.529549, -5.166152, 0.38294068, 0.4713426, -9.853023,
-2.80314, -3.739256, 6.471953, -4.418778, -5.361087, 0.67936295,
-7.9956455, 5.754543, -8.48631, 10.761317, 5.2415776, -5.274907,
-8.889425, 7.3132453, 4.336475, -1.819207, -4.5843754, 7.384534,
-2.5867555, -4.9045286, 0.65757024, -8.163522, 5.968242, -7.730517,
10.217442, 6.426302, -6.237706, -8.947288, 6.1739154, 5.1672564,
3.992474, 6.385997, -8.419676, -3.3017192, -7.21863, 7.8873005,
2.9152884, -4.102827, -9.067645, -3.7832046, -3.5252154, 5.1169453,
1.3407185, 7.978284, -0.75868845, -8.102144, 4.08247, 4.174539,
1.2865875, 6.292456, -0.029244112, 1.5335814, -0.5153809, -8.947203,
-7.7898912, -9.355666, 3.2628682, -3.9870498, -1.1463507, 8.495732,
-4.9349427, -1.7382121, 0.9198348, -1.4893372, -3.8012633, 2.8584168,
-7.4345903, -2.900459, -2.5117135, -0.53018016, 6.1696663, -5.711414,
0.8719471, 1.4162141, -9.461803, -5.172819, -1.3379382, 4.793741,
-4.221129, -9.363175, -3.3273566, -7.310969, 7.3367653, 6.3289204,
2.4898496, 6.6524854, 4.8964777, -6.2744513, 8.5342655, 1.1963123,
-4.561236, 1.1688818, 4.4174256, -2.1723742, 10.33894, 8.671297,
-5.21523, -1.3814139, -1.7841377, -4.4079, -8.5351515, 1.3245739,
0.7510524, -7.670111, -4.4692674, 6.453337, 7.3419437, -4.1685176,
-7.4567695, -3.7720783, -0.75552493, 7.2461615, 7.0802107, -10.2811,
-0.33161741, -1.9935715, -6.9947915, -8.110136, -3.6399438, 8.735443,
6.8431005, 7.799528, -4.5123687, -8.377505, -6.048178, -1.4277271,
7.107523, 6.971019, -11.437731, 0.082519814, -1.7611157, -5.715288,
-8.041916, -3.0954893, 8.904493, -7.5737486, 3.3187006, 5.695824,
1.0419805, 5.114346, -0.14710021, 0.48287198, -0.8227286, -9.077854,
-7.951658, -8.836848, 3.0087242, -4.0115004, 0.08411111, 8.047573,
-2.1157877, -3.9992015, 6.437919, -2.6568744, -3.21423, -0.6328733,
-6.891984, 7.5388503, -8.3115635, 10.89314, 5.004447, -6.0481195,
-9.866091, 4.7569146, 3.3194225, -3.0405905, 9.119562, 4.879108,
3.2864892, -6.2143307, -7.283942, -9.467218, 7.667866, 1.098137,
5.443473, -8.762702, 9.069774, 5.449659, -4.805507, -6.4436784,
3.8877387, -2.6641645, 0.72104204, -0.11310558, -6.9456596, 10.317614,
5.3842707, 8.84568, 8.427724, 2.4454255, 8.632645, -8.376218,
-6.2992105, -8.4949255, -3.1058948, 0.59912384, -6.237596, -10.444258,
8.301745, 10.453057, 5.7606997, -4.7294407, -8.185114, 5.132195,
-2.4791276, -8.295153, 0.9517404, -8.011838, 8.114006, -5.691737,
4.5666203, 5.496548, -7.7020545, -1.7187724, -8.162109, 7.8228955,
3.001744, -2.6306505, -8.37975, -5.140102, -3.6051292, 3.7831283,
2.6093254, 8.430258, -0.3420959, -1.6427262, 7.8981867, 4.2203164,
1.9768403, -6.8467407, -7.1951556, -8.402333, 6.5761886, 1.9226346,
4.2456584, -9.228203, 9.896327, 5.8616033, -6.827098, -5.4691267,
4.8847914, 4.9063582, -8.759736, -2.7456627, -8.032234, 7.730428,
3.7743092, -3.9100482, -9.894421, -5.1815715, -3.6016612, 4.127908,
2.8966517, 7.8370733, -1.6779532, -1.4414047, -5.035737, 6.214293,
-3.1846218, -4.1440144, 1.1732963, -9.312775, 6.759084, -7.5035295,
9.002608, 4.7642865, -6.3408637, -10.41876, 6.465977, 2.7745793,
-6.3903165, -4.929907, -0.15017323, -2.0638697, -4.03033, 2.285599,
-7.715339, -4.0333323, -3.0554392, -1.0687588, 6.8749356, -5.0682015,
-0.6250181, 2.429104, -8.503015, 5.607639, 5.404223, -6.699931,
-4.0375285, -7.1969423, 8.078638, 3.0899034, -3.5250885, -8.932142,
-3.469452, -2.9253054, 5.013855, 3.4791899, 7.031046, -0.7151412,
2.901402, -4.4316077, 0.16208515, 0.22935644, -6.269433, 8.996756,
4.45683, 9.668581, 7.5481763, 1.0703038, 8.319703, -8.18229,
-7.3028693, -8.863301, -4.2264037, 3.7514596, -2.0897253, 1.9309268,
0.45350486, -5.1661916, 9.65991, 5.8520236, 7.605389, 7.777277,
1.3890231, 9.017126, -7.4803934, -7.532483, -9.110879, -2.7080903,
2.921823, -6.2629356, -7.75661, 9.3432865, 9.2441, 7.7586513,
-3.2853987, -7.6449366, 2.7912474, -2.0987654, -7.3543344, 0.0054030404,
-9.742115, 8.285096, -3.7123616, -7.207385, 4.7205715, 5.1331143,
0.41564643, 4.941891, 0.682556, 0.115416594, -1.3487873, -10.244573,
-7.606563, -9.691795, 3.4609072, -3.17139, -0.17558034, 8.062665,
3.8549154, -3.64575, 1.208214, 0.08202293, -5.9753456, 8.572496,
4.7227993, 8.348328, 7.237105, 1.5744025, 8.28758, -7.6441417,
-6.305154, -10.205392, -2.6953459, -3.680438, -4.737711, 8.076828,
-3.9463887, -5.768685, -0.19550517, -6.552664, 6.831283, -8.8678,
10.215987, 6.418074, -4.774544, -10.061993, 4.6981983, 3.5625317,
4.636084, 5.650058, -9.210613, -2.324559, -7.6227593, 6.061073,
1.815714, -3.7349296, -8.779834, -2.4578583, -3.3296137, 4.0035834,
2.905591, 7.497793, -0.32837072, 4.343629, 5.426969, -8.711357,
-3.1507385, -8.164997, 7.589452, 3.3833866, -2.562268, -9.608429,
-2.8779113, -3.7190444, 5.0126367, 2.1167986, 7.843641, -0.33609527,
5.8978233, 6.9067116, -3.7806575, -7.9640718, -4.7071404, -1.2412709,
7.4889135, 8.052854, -10.180504, -1.0416443, -0.68187726, -4.606274,
-8.559041, -2.1912286, 9.125676, -2.564449, -0.4077612, 3.729151,
-1.4930987, 9.25552, 7.2783594, -5.106432, 0.03942779, -5.814413,
-3.369435, -9.76186, 2.4212809, -0.10460661, -9.872271, -5.1764264,
3.3377721, -2.704967, 0.66857433, 0.6728477, -6.6530495, 9.272796,
6.6483746, 8.661669, 7.882691, 1.62038, 9.206773, -6.858264,
-5.7649136, -9.037513, -3.4676454, -3.6506896, 0.6573961, 4.724859,
-3.052057, 10.370214, 9.905973, -4.6114883, 0.052524507, -3.7590492,
-3.4722543, -8.405755, 3.1257493, 0.08250925, -8.581948, -3.4865882,
-5.772144, -3.1400251, 5.6200085, -5.0186934, -8.133438, -3.292921,
-7.2410965, 7.592173, 6.1133866, 2.424825, 6.839605, 6.017547,
-6.6967225, 6.565976, 0.47802114, -6.48091, 3.6072001, 3.9498942,
1.6581919, 5.6619124, -0.82999116, -0.61616766, -0.37300193, -8.50174,
-9.583391, -9.692172, 3.8931427, -4.046124, -0.17201474, 7.9533978,
-8.619978, 4.5182357, 4.657494, 1.9777148, 5.5770326, 0.30064544,
1.4421412, -1.6659772, -9.893758, -7.23198, -9.014757, 2.597077,
-2.73886, 0.022842806, 8.174271, -2.1537125, 9.391093, 5.390634,
1.7177027, -7.467074, -7.611476, -10.214777, 8.762868, 1.2500544,
4.006574, -9.78208, 9.613381, 7.2198687, -5.1508865, -6.771655,
4.904454, 5.1922736, -8.712234, -3.687767, -7.2860403, 6.8081403,
0.95645076, -3.6419196, -8.042028, -4.051204, -2.2797441, 4.892401,
2.98828, 7.654376, -0.60350186, 4.346703, 6.0450864, -7.903797,
-3.493229, -7.810131, 7.4589167, 2.2748578, -2.9725246, -8.531999,
-2.4975607, -4.5675287, 4.506418, 2.1151977, 8.16713, -1.6877218,
-1.7230362, -3.299179, 6.5165877, -2.410716, -4.7903905, 2.5705807,
-5.792972, 6.052256, -8.238377, 10.689247, 5.341898, -6.4853234,
-9.277765, 6.364136, 4.265243, 7.2072506, 7.8140974, -4.0175314,
-7.677814, -4.5424404, -1.576965, 5.691887, 6.136081, -10.715865,
0.8200201, -1.8762171, -5.9831085, -8.103869, -3.1881645, 8.690576,
5.15829, 6.6037273, -8.244169, -3.5921094, -7.7693434, 6.479102,
2.9159613, -4.327943, -8.938923, -4.48339, -3.4075563, 4.3356843,
2.874883, 7.7599, -1.2463628, 3.1838675, -3.9989047, 0.18408673,
-0.1492555, -6.1179323, 9.363459, 6.024409, 8.855445, 7.7826543,
1.3481644, 8.487992, -7.7133837, -6.88068, -9.034223, -3.3302634,
2.471361, -6.93737, -8.826766, 8.28474, 9.288136, 5.6002464,
-4.687557, -7.8654947, 3.8335783, -0.64893854, -7.8610234, 0.18917519,
-8.926671, 8.497749, -5.402358, -7.2587376, 4.354445, 5.6607094,
0.8760706, 4.1080756, -0.4393458, 0.7394499, -0.738405, -10.311899,
-7.123229, -9.824927, 2.981923, -4.069806, 0.29893327, 8.775643,
3.3072035, -4.102232, 2.1789277, 0.9062795, -7.2858787, 9.297767,
5.8662076, 9.961333, 7.48393, 0.8503, 9.009271, -9.196569,
-6.3856516, -9.275788, -3.542137, -5.1419396, -3.1650748, 0.62857354,
-0.70218223, -4.898184, 2.5156069, -5.9436083, -4.309739, -3.2661068,
-1.3350625, 4.940776, -6.5558577, 0.025333099, 0.8643945, -9.060059,
-3.7638009, 1.3006603, 2.930648, -1.6828228, 9.258093, 8.444938,
-5.1375, -0.56084764, -3.6738598, -4.7568827, -10.60041, 2.036893,
-0.8960011, -8.827401, -5.258062, -2.915564, 9.426291, 5.808812,
2.2494397, -7.7167454, -6.5688877, -8.000686, 6.896675, 1.9284883,
4.1717334, -10.137739, 9.851994, 6.564189, -5.459545, -6.984745,
3.7925048, 4.6633215, -8.126076, -2.4366872, -7.7790375, 7.80821,
1.6440209, -3.6311939, -7.575319, -3.732528, -3.4079149, 6.206286,
1.7716099, 8.301756, 0.6090889, 4.666259, -2.889, -0.14395317,
1.1249006, -6.0106854, 10.225452, 4.929633, 9.554454, 7.5130134,
1.0228959, 8.944381, -7.326619, -4.2688065, -9.935262, -3.0041497,
5.554488, 5.440729, -8.29724, -2.2636664, -8.1900215, 7.299905,
2.5538466, -4.0512805, -7.1064873, -4.765459, -4.4050064, 5.512082,
3.2504358, 7.0260797, -0.13059919, -4.271671, -0.109247826, 4.463972,
-2.07052, 10.289975, 8.72589, -4.547148, -0.38130563, -4.4063926,
-3.460941, -9.646949, 2.38603, 0.16248609, -9.719953, -3.7508056,
5.3057723, 9.361436, -3.9695253, -7.33968, -4.902846, -1.5555608,
6.785052, 7.386685, -10.853305, -0.01764484, -2.0805655, -5.8810654,
-8.515495, -2.459193, 9.9795885, -8.114333, 4.52152, 4.6586986,
2.4480875, 6.1383495, 0.4417636, 0.37089536, -0.3891831, -9.644221,
-7.8640547, -9.252434, 3.2194843, -3.2670448, -0.16117387, 7.778234,
-2.2840092, 8.546623, 5.3343086, 1.8506979, -7.408649, -6.504553,
-9.467256, 7.34325, 2.0159168, 4.9215784, -9.2560215, 9.380678,
7.221289, -4.7800727, -5.9730334, 7.4766254, 9.066331, -3.3312054,
-8.385968, -6.358288, -1.7290114, 6.8875446, 8.493264, -9.568745,
1.033052, -1.8234951, -5.1219053, -7.026343, -4.6334124, 8.568787,
-2.4737391, -4.7308645, 6.7816086, -3.2560906, -4.59976, 0.6174511,
-7.62071, 6.904139, -8.186233, 9.913178, 6.0108953, -6.2107353,
-10.4315, 5.8873916, 4.138395, -4.958099, -3.912568, 1.2031528,
-0.42206648, -4.695156, 1.8973438, -6.3485107, -2.977597, -2.2787466,
-1.4944704, 7.094462, -6.378204, 0.05009232, 2.0879278, -7.9830794,
-5.429115, -1.8087174, 3.443089, -6.1949654, -8.899338, -3.3597188,
-7.345244, 8.720281, 6.3209877, 3.5766168, 8.313016, 6.0331893,
-6.601438, 7.386497, 0.6181006, -8.187921, 4.349035, 5.5312696,
1.3723242, 5.739623, 0.17953525, -0.7856711, -1.0033705, -9.961328,
-8.609259, -10.144532, 3.0316873, -3.8686829, -1.0059643, 8.485956,
-6.345006, -4.187118, 1.2820076, -0.6978311, -4.7164316, 2.5219946,
-7.8902826, -5.12284, -3.1114652, -0.27492487, 6.3707066, -5.6473646,
0.7922558, 2.2096162, -9.520029, 2.888621, -7.4256835, -8.498906,
8.386103, 9.700013, 6.1441994, -5.0240536, -7.752068, 3.280116,
-0.58014745, -7.558307, 0.1803329, -9.982528, 7.7738757, -5.1454473,
3.0543482, -6.867554, -9.470557, 9.221004, 9.373329, 6.4169154,
-3.7736204, -7.692189, 4.6977744, -0.7992779, -8.293994, 0.85733217,
-8.163799, 10.392848, -5.610846, -3.872193, -0.740111, 4.4266324,
-1.9697429, 10.566829, 9.462566, -5.50089, -1.2429972, -3.253267,
-5.5605836, -9.500684, 2.034272, 0.0049618683, -8.945363, -4.3303676,
-0.9777084, -3.3959746, 5.956042, -2.7618248, -4.8576555, 1.2843829,
-6.8247375, 6.6121335, -8.276644, 11.792523, 4.725349, -7.223109,
-10.164721, 5.211637, 3.0924232, -1.6278131, -4.388016, 7.4943976,
-2.8757637, -4.9888926, 0.7044064, -8.254395, 5.2539263, -7.828276,
10.34869, 5.6989985, -7.2304406, -10.14044, 5.451729, 4.96072,
2.7891183, -4.167649, 0.77685016, 1.6579223, -6.588133, 10.0272665,
5.5890923, 8.702979, 7.748578, 2.4806275, 8.665389, -6.637072,
-6.6769, -8.732783, -3.8638341, -5.5774817, -0.70153505, 5.6642966,
-5.059678, -7.5952415, -5.6190104, -6.363878, 7.676501, 6.1573286,
2.44988, 7.8924274, 6.2331724, -5.327017, 7.429852, 1.2832249,
2.3701262, -7.01407, -8.379835, 8.656347, 8.826421, 5.3598933,
-5.1340046, -8.319505, 3.7952979, -1.2645867, -7.8504124, -0.75848407,
-8.886457, 7.2644157, -4.281181, -1.9889052, -4.8167295, 6.5223017,
-3.122967, -3.8163774, 1.6377031, -6.9833784, 5.8064218, -8.537923,
9.751458, 4.8831944, -5.620436, -9.567528, 7.3875375, 5.0158916,
6.041892, 8.259725, -3.9294024, -8.395762, -5.0143604, -1.5589857,
6.383793, 7.9771776, -9.940671, 0.2254698, -2.0715346, -5.2563257,
-7.357813, -2.617154, 8.452804, -1.8445097, 7.9748573, 2.852134,
2.6271937, -7.8364425, -7.0374455, -9.657407, 6.0621367, 2.401324,
4.69286, -9.991867, 7.5844674, 6.266482, -5.478955, -7.398739,
-1.4619666, 9.4723425, 4.60097, 2.1691477, -7.6674695, -5.168083,
-8.747873, 7.4000993, 2.5303369, 4.498158, -9.431591, 8.844865,
6.978881, -4.4358006, -5.4217067, -4.659544, -1.0139742, 5.884157,
-5.5130477, -8.011086, -4.4731374, -7.421725, 8.405308, 6.109013,
1.5049684, 8.031445, 4.136314, -4.368227, 8.902495, 0.60413873,
-6.6470423, -3.9147856, 0.91597337, -2.371798, -5.782744, 1.8516821,
-8.064622, -4.512805, -2.7762094, -1.1958742, 6.7202463, -5.777803,
0.4950208, 2.2838364, -9.868175, -5.5723953, -1.5259774, 5.5945134,
-5.324155, -7.3378105, -3.8832247, -6.1360493, 8.6874485, 6.6353726,
2.1103282, 7.108333, 6.317996, -6.029129, 8.206509, 1.5571198,
-7.598114, 4.902557, 6.3447857, 1.7360098, 5.3611236, 0.84004647,
-1.0797285, -2.4447439, -9.014474, -7.8322954, -8.9475565, 2.7732935,
-3.3132184, -0.39866814, 8.55067, -1.7455846, -3.4977765, 6.9930267,
-3.3628457, -4.299323, 1.0192782, -7.8389244, 5.7748685, -8.653024,
10.861672, 3.988377, -5.1816134, -10.545315, 5.348773, 4.963106,
-5.9096785, -4.296035, 0.3811736, -1.3324562, -4.8769484, 2.755635,
-7.569372, -4.3171268, -3.3688078, -2.6722338, 5.5698, -4.3176937,
0.8339118, 1.8348091, -9.255015, -4.466525, -0.52820647, 3.2409153,
-2.3106816, 8.45154, 9.04042, -4.5315514, 0.23663145, -4.252325,
-4.1695433, -9.18726, 2.034318, -0.09400384, -7.405626, -3.860553,
-5.8544736, -2.8740828, 0.08909136, -1.1935165, -3.8256376, 2.5674596,
-7.1569543, -3.6372294, -2.3351889, -1.744964, 6.3119283, -5.385421,
-0.24961151, 1.8748479, -8.592662, 4.3957467, 5.2138186, -8.1475,
-3.4857674, -8.435744, 6.475832, 3.8979206, -3.0107994, -8.515689,
-4.5206137, -3.059994, 4.510524, 2.9010677, 7.060621, -0.4435133,
-2.6261828, 9.212092, 4.465795, 3.0983117, -6.535945, -6.365695,
-8.3743105, 8.144955, 2.1490154, 3.2536693, -9.308529, 8.942247,
6.278821, -5.342763, -5.4967027, -2.1468363, 8.506266, 4.7706156,
1.4444016, -7.3076897, -7.8647723, -9.4845915, 6.3773437, 1.3391889,
4.8990006, -10.25289, 11.240865, 6.9941754, -5.6238327, -6.964351,
-3.1995134, 1.4404083, 4.1315045, -2.6265864, 9.448324, 10.496363,
-5.2225723, -0.0756852, -2.9580288, -4.2002826, -8.46632, 1.5533906,
-0.0901243, -7.931321, -4.9005084, -3.0503044, 9.031444, 4.9884777,
2.9889703, -6.2081375, -5.372882, -9.375471, 7.9341474, 2.1506395,
5.6943135, -10.154119, 8.810391, 6.229278, -7.239945, -6.731529,
-5.2011466, -4.1407795, 1.3471396, -0.2961784, -3.4765995, 1.9349236,
-6.9275117, -4.174044, -3.3053544, -0.6515488, 4.8781915, -5.175149,
-0.04054334, 1.9891031, -8.87269, 7.0332804, 8.923938, -4.145877,
-7.6633925, -4.991702, -1.8397741, 6.697425, 7.328891, -10.8885,
0.53635895, -2.4218795, -4.9004016, -8.07254, -3.3605964, 9.8187895,
3.318915, -2.993025, 0.22196385, 0.769064, -6.5877304, 9.017363,
5.7091365, 7.7418985, 8.793204, 1.492204, 8.942817, -9.119149,
-5.6121545, -8.625093, -3.224452, 3.142712, -3.990048, -0.84421885,
0.2762281, -5.969255, 10.592828, 5.4343886, 9.019446, 8.563282,
1.3903738, 8.694255, -8.748493, -5.4749656, -8.989472, -3.2844772,
5.6892896, 4.872395, -8.034137, -3.0292802, -6.8492866, 7.9774423,
2.613809, -3.6197107, -8.05618, -3.3275204, -3.975442, 3.6626644,
2.5491765, 8.805044, 0.24683951, -3.5072315, -4.1418424, 7.300777,
-3.5022743, -4.5521297, 1.0602251, -6.6962633, 4.6245656, -9.129215,
9.504933, 5.9370937, -4.276785, -10.049162, 5.324344, 4.4187346,
-5.763483, -1.563504, 5.040037, -5.492637, -7.8934984, -3.035087,
-6.599529, 9.052507, 6.3926315, 3.380347, 7.1051702, 6.826215,
-5.491249, 8.608956, 0.32474133, 1.799648, -7.5502434, -8.472886,
8.935008, 9.662809, 5.794427, -3.0531497, -8.660299, 4.8830886,
-2.4202957, -7.254896, 0.23633967, -9.86912, 8.483484, -3.9287374,
-2.9646475, 8.673299, 4.225203, 1.3683761, -6.845662, -7.461775,
-8.649008, 7.2883563, 1.8550366, 3.526157, -9.99205, 9.9269705,
6.999495, -6.4375067, -6.293968, -3.4943204, -0.78130114, 3.9403396,
-5.153033, -8.46001, -3.6264539, -7.2970595, 8.0062275, 5.689158,
2.2677252, 7.9999385, 5.2466006, -6.7044287, 6.598871, 1.4918274,
5.0877, 6.494176, -7.1873164, -0.825037, -8.3140135, 9.239023,
2.218018, -3.7967653, -8.499883, -4.1877966, -3.2170026, 3.6680696,
2.5840225, 8.644162, -0.5956444, -2.8312402, -4.661592, 5.9375234,
-2.3783178, -4.6442533, -0.05945016, -6.8723516, 5.0403285, -9.095171,
8.262917, 4.448125, -5.900716, -11.264733, 5.2724266, 4.1354885,
2.8470101, -6.1373134, -10.35913, 10.40716, 9.519337, 5.6243978,
-3.797735, -7.923225, 3.2178664, -1.0584085, -7.984466, -0.3795152,
-9.792625, 7.183538, -4.722315, -2.2580402, -5.2211423, 6.4951835,
-2.965495, -4.496866, 0.8761533, -7.128544, 5.741333, -7.618087,
8.919103, 4.9402165, -4.079712, -10.616152, 5.3616004, 4.6512804,
-4.371715, -0.9452253, 4.659573, -4.868753, -8.989224, -2.64179,
-7.260644, 9.312214, 6.753061, 2.8893366, 6.9598317, 5.9674635,
-7.7761106, 8.008274, 1.0315756, -7.575469, 3.9002707, 4.6316395,
1.0613387, 5.9011254, 0.2593292, -0.039219975, -2.0414073, -9.031789,
-8.347815, -9.329479, 2.8148365, -3.494437, 0.77220315, 7.2672553,
-2.0189807, 8.61134, 4.7252855, 3.7652287, -6.946869, -6.0756183,
-9.330551, 7.299031, 3.2618606, 3.7225747, -8.319596, 9.893623,
6.255126, -5.3105326, -5.6827126, 5.5157228, 7.350775, -3.1852071,
-8.994102, -5.850257, 0.67063665, 5.6826863, 8.018074, -10.271123,
0.5712777, -1.193731, -4.7675743, -7.7428703, -1.8554773, 8.445659,
-1.393967, 8.656435, 3.9471555, 1.8851188, -6.8406196, -6.1141753,
-10.023053, 8.394208, 1.9116948, 3.862635, -10.296783, 8.239798,
7.2250724, -5.701895, -7.2664733, -1.6775517, -5.201638, 6.0385184,
-3.1878204, -5.9582653, 1.0890994, -7.3725066, 6.723062, -9.277874,
9.304353, 4.390328, -6.7286897, -10.716945, 7.00641, 3.6483636,
-6.63675, -3.5559812, 0.31344596, -0.8439323, -3.74466, 1.5822508,
-6.4499354, -4.5322814, -2.1071227, -0.6214992, 6.9902477, -6.1629505,
0.04016701, 1.8346976, -9.283217, 4.7991495, 5.471423, -8.706965,
-2.7737815, -7.343405, 7.4493012, 2.732541, -2.4421341, -8.543219,
-4.2346525, -2.8778088, 3.6357846, 1.99416, 7.9793477, -0.19422384,
-7.403685, -3.5551767, 0.89556885, -1.4046333, -3.3883698, 2.0032039,
-6.8148985, -4.870872, -2.6892498, -1.0006483, 5.8612347, -6.265193,
0.19265795, 2.4265244, -9.376727, -3.9761724, -1.2989889, 5.677702,
-5.218225, -9.51261, -4.9362373, -6.4571614, 8.586296, 6.470298,
3.8415716, 7.937437, 6.954662, -6.202322, 7.286129, 0.74633855,
5.529283, 7.995379, -4.6934767, -8.504535, -4.00343, -1.1362116,
5.837701, 7.5073624, -9.463826, 0.91103613, -1.7407892, -4.874295,
-9.029605, -3.9871123, 8.972904, 2.7123868, -5.985961, -9.690327,
8.171903, 7.3252854, 5.6013803, -3.2380323, -7.6846952, 3.5296013,
-1.2125603, -8.041455, 0.04686942, -8.776114, 7.0552874, -4.614647,
-7.9032226, 4.996435, 5.3002543, 0.7005281, 6.227842, 0.011475163,
0.45670477, -1.3246437, -10.241085, -8.211529, -9.368909, 2.5919437,
-2.8221805, 0.3235979, 8.749763, -6.029564, -3.3548684, 1.0231326,
-0.5288603, -3.6796045, 2.4830716, -6.4610176, -4.175672, -3.2900753,
-0.9927472, 5.1820874, -6.4791493, 0.1835257, 1.2935975, -9.286565,
-6.4765377, -3.4880037, 0.60500973, -1.7716715, -4.0184655, 2.7372847,
-8.6450815, -3.345755, -2.9081414, -0.5807574, 6.541592, -6.966696,
-0.21191993, 1.6463073, -9.619619, 4.448166, -3.3990061, 0.12143043,
0.41258815, -6.376388, 9.820646, 5.4848104, 8.637968, 7.937406,
1.5352983, 8.197241, -9.690205, -5.3987346, -8.886295, -4.0348125,
-7.4481773, 4.204582, 4.821544, 0.56294876, 6.2107124, -0.10252308,
1.5331832, -1.8177173, -9.795974, -7.2712555, -10.507499, 1.8787838,
-2.9446409, -0.6489399, 6.8779936, -1.7737396, 9.788155, 5.4708,
2.420281, -7.6797304, -5.7367077, -9.64077, 7.535368, 1.4943072,
4.1165547, -9.358176, 9.623147, 6.944197, -4.62362, -6.0460267,
-6.5041537, -4.505428, 2.0147886, -1.4842197, -4.089174, 2.6230977,
-7.179601, -2.9705713, -3.1086178, -0.7423751, 5.1837897, -6.93054,
-0.14354959, 1.8223656, -9.371503, 1.7448856, -7.3990088, -8.103912,
9.270087, 9.387852, 6.439107, -3.6814947, -7.147886, 4.5911503,
-0.8159959, -6.967173, 0.263382, -9.635049, 8.716003, -5.800547,
-2.810444, -5.7753897, 6.74005, -2.23146, -4.5393744, 1.8594223,
-7.674257, 5.3056464, -8.556048, 9.679924, 5.9838734, -6.5681605,
-8.931891, 7.6860704, 4.109746, 1.4493451, -7.1162224, -9.725718,
8.626729, 9.995359, 6.528887, -3.222829, -7.681058, 2.9120917,
-1.4285026, -8.105374, 0.8349406, -10.1499815, 8.811996, -4.1993327,
3.1261284, -3.306654, 1.0760952, 0.92013645, -6.842751, 8.149914,
5.022422, 8.35793, 7.971487, 1.7163968, 8.919038, -8.659238,
-6.1202717, -8.266663, -3.2541158, -4.1904993, 1.1357827, 3.9571025,
-3.0927138, 8.781415, 10.086251, -3.9394133, 0.77429086, -3.6377277,
-4.100309, -10.113418, 2.0192132, -0.42512167, -10.248283, -3.6871402,
-1.6683478, 8.728633, 3.2131917, 1.2675092, -8.189181, -7.126169,
-8.825435, 8.497029, 2.2511494, 4.008081, -9.007727, 7.8504024,
6.813783, -5.213612, -7.398511, -2.9731507, 1.7537284, 3.5783243,
-1.4675912, 9.051097, 8.652566, -4.757014, 0.44610742, -3.3442087,
-4.9764237, -8.958083, 1.7113985, -0.9886651, -9.620541, -4.952701,
2.7505887, -5.415354, -9.2625885, 9.731504, 7.7938666, 5.3320146,
-4.5788693, -8.993314, 2.5761487, -0.90785086, -7.7090826, 0.16912198,
-8.828898, 8.37234, -4.363966, -1.3314515, -3.8149195, 5.8212833,
-3.3365865, -3.7110338, -1.0449071, -7.2861347, 5.042358, -8.842575,
9.528564, 5.194124, -6.412269, -10.657558, 7.9135666, 5.4443703,
5.641363, 8.416695, -4.6061773, -7.361384, -4.194317, -1.0612187,
6.806824, 7.8236537, -8.664162, 1.0618718, -1.3440112, -4.92512,
-8.222847, -2.704291, 8.1543045, -6.660939, -3.7290404, 0.18942481,
-1.4073925, -2.705612, 2.0641398, -7.460961, -4.6103864, -2.1518288,
-1.0054573, 5.248958, -5.0816116, 1.2784109, 1.4279948, -10.37757,
-2.7630188, 0.016434759, 4.204674, -2.7288861, 8.911991, 9.196969,
-5.1603303, 0.023894375, -4.506302, -5.1968226, -9.291158, 3.3918695,
0.11112251, -9.20254, -3.8992107, -5.0291705, 1.8456392, 3.657536,
-3.3808377, 9.256997, 10.307117, -3.418722, 0.032756194, -3.2837749,
-4.2729473, -9.988718, 3.2250073, -0.38190877, -7.8955536, -3.5180917,
4.986541, 6.271868, -8.536107, -1.683413, -7.047003, 6.766861,
4.0128617, -3.1531434, -9.31342, -2.6233144, -2.0418892, 4.569006,
2.3985941, 7.6235924, -0.05538929, 6.0154357, 8.125788, -3.4109778,
-7.6907077, -5.095131, -2.2492085, 6.498456, 7.294242, -9.698787,
0.26558772, -2.5430288, -6.480304, -9.15268, -2.6754224, 9.437554,
-5.5917234, -1.7598187, 5.1655426, -4.3431716, -8.190864, -6.100267,
-6.591725, 7.578942, 5.486228, 2.0995564, 6.9648747, 5.110837,
-6.1261616, 6.8636184, 1.0365121, -5.3663697, -2.5018656, 4.6703067,
-6.286849, -8.160234, -5.2242036, -6.352961, 9.033608, 4.9995646,
3.752948, 7.7887383, 6.5982304, -6.7996645, 8.286845, 1.5753202,
-8.443196, 3.9899874, 4.9556775, -0.017643621, 4.732681, -0.6636576,
1.4581941, -1.3151641, -9.028099, -8.891216, -8.535466, 3.6389797,
-5.111722, -0.32230788, 8.498365, -3.1372306, -4.873306, 7.250089,
-1.9549423, -3.7090962, -0.40201265, -6.4233136, 4.7569766, -7.880305,
9.480289, 5.936669, -6.9584002, -9.06974, 5.2837462, 3.6692286,
-7.430226, 4.7528315, 5.490018, 0.50132537, 5.874811, 0.1740496,
-0.2948099, -1.9982044, -9.00967, -8.006327, -8.259013, 3.0789988,
-4.053522, 0.16426991, 8.195698, -5.734968, -3.5758421, -0.097321026,
-1.8115845, -3.4544446, 2.002715, -7.4926767, -4.8256927, -2.3762438,
0.5651671, 4.9562464, -5.9895706, 1.2732432, 1.7925425, -8.754332,
-1.7962216, 0.47109967, 4.3137145, -3.583062, 8.145015, 8.675609,
-3.9576173, 0.17896295, -3.5634172, -3.7861938, -9.676206, 3.1628625,
-0.38938487, -7.9860625, -4.1174593, -4.964161, -1.8685355, 5.8341045,
-6.34102, -10.058669, -4.354374, -6.4476023, 8.7005415, 7.0249505,
2.974236, 7.034518, 4.4456472, -6.434717, 8.570751, 0.5124114,
-3.6418507, 0.5554921, 3.367971, -3.017075, 9.514487, 9.281623,
-4.0681076, 1.0239902, -4.581693, -4.225302, -8.790287, 3.25273,
0.5565237, -8.766786, -3.960491, -5.093242, -1.1662203, 5.345085,
-6.254107, -8.687721, -3.742164, -6.418398, 8.777365, 6.8158035,
2.4148731, 7.5542808, 3.7105362, -6.583354, 8.385912, 0.5135687,
2.9250274, -6.9842806, -9.27032, 9.032411, 9.915786, 6.2652893,
-5.011622, -8.608535, 3.6328795, -1.2499161, -6.178456, -1.0666534,
-8.958317, 9.228787, -3.2343855, 4.8704925, 4.7510295, -9.871552,
-4.0604935, -8.570907, 6.639867, 2.3541386, -4.091954, -8.4915495,
-3.942945, -3.4889514, 5.390299, 1.9249872, 8.037348, -0.56548274,
-4.9617343, -2.6649349, 3.9692755, -5.9428506, -7.8949842, -3.0987332,
-6.133977, 9.334152, 6.105837, 1.4486175, 7.1105576, 5.8923063,
-6.6165357, 7.8894663, 1.0958387, -5.631502, -4.384589, 1.4698544,
-1.5361322, -3.9733317, 2.4192832, -7.304139, -3.5895412, -2.117321,
-2.1025727, 6.6165576, -7.1702685, 1.0074711, 2.636985, -9.834668,
-2.3808458, 9.00357, 5.045383, 2.056876, -7.5607753, -6.0425096,
-8.949299, 7.3044095, 1.3690127, 3.851154, -10.207672, 9.277135,
7.8470488, -6.7135487, -7.492994, 3.1985943, -6.935993, -9.047269,
9.782481, 9.520245, 7.055303, -3.705615, -8.85654, 2.7514706,
-2.2830088, -7.095799, 0.735556, -8.017676, 8.776582, -4.010413,
-3.1072533, 0.76138145, 3.9190168, -3.6024258, 9.649632, 9.634952,
-4.4562793, 0.18947506, -4.1722145, -3.6073887, -9.216167, 1.8658406,
-0.0728769, -10.192603, -4.55464, 3.920998, -2.5004525, 0.7556708,
0.19498403, -6.4827724, 9.607175, 6.9504147, 7.811515, 8.738351,
2.0839396, 9.023966, -8.301546, -6.6700454, -8.569843, -3.0352917,
-3.1060672, 0.123924114, 4.6874747, -2.663372, 9.361349, 8.9156065,
-4.8481407, 0.64249396, -3.3601758, -4.0558143, -8.84251, 3.0814745,
-0.051395938, -9.751219, -3.51628, -3.5107403, 9.103959, 4.1631427,
2.5616202, -7.336464, -7.192438, -10.161006, 7.0069084, 0.3255847,
3.0527196, -9.05602, 9.948257, 6.946673, -6.430101, -6.3968987,
-6.0076656, -4.8721514, 1.0888506, -2.223866, -3.7888072, 2.6749995,
-7.697863, -3.7496884, -2.4904535, -1.2557896, 4.997531, -7.3803625,
0.5294438, 0.75292945, -9.004251, -6.045037, -4.0840354, 0.9659302,
-0.031182272, -4.268261, 1.5548874, -6.4351664, -4.241374, -4.1936316,
-0.28540513, 5.3287897, -6.0698977, 0.517075, 1.9816412, -8.574375,
-2.6364996, 0.044094425, 4.0758266, -4.158439, 8.512978, 9.675815,
-4.699956, 1.1723328, -3.0423644, -4.464688, -8.931193, 1.9696642,
1.4919473, -8.166115, -4.308291, 4.240168, 4.3839035, -9.306737,
-2.5585122, -8.50433, 8.040913, 2.4161782, -3.3221233, -8.923694,
-3.23936, -2.9481215, 4.8266616, 3.1465018, 7.5957923, -0.1761594,
1.635158, -7.5802045, -8.545995, 9.684341, 8.721182, 5.1186357,
-3.9720159, -8.107762, 4.0684843, -0.53223395, -7.600826, 1.2023906,
-9.155265, 7.9439096, -5.9524975, 6.4409337, 8.288787, -2.120614,
-7.4738345, -4.636201, -1.4115783, 7.534934, 8.893526, -9.489198,
-0.47991526, -2.0359435, -5.36049, -7.283144, -2.758799, 9.180332,
-2.4428005, -4.289643, 6.550183, -3.4626865, -3.4779873, 0.89256114,
-5.74742, 4.906095, -8.898732, 9.760586, 6.241062, -5.746058,
-9.672227, 5.520144, 3.327288, 3.4568498, 4.6385217, -8.447328,
-4.0117807, -7.100771, 7.8721576, 2.6949818, -4.1668334, -7.762736,
-2.3543062, -3.3933797, 4.949498, 2.8923664, 8.115513, -0.6393926,
-3.0676663, -0.12716746, 5.342863, -2.4860654, 9.001626, 8.836862,
-5.3240137, 0.9058949, -4.8705597, -4.770749, -9.655423, 2.180305,
-0.13619632, -10.014284, -4.7069464, -1.9712611, -5.049141, 5.675679,
-1.8318181, -4.0372977, 0.69356495, -7.9774384, 5.4686694, -7.5971737,
9.834102, 5.0594845, -5.668904, -10.459637, 7.3598185, 5.0841074,
-5.7341266, -3.6247492, 0.8194306, -1.3306394, -3.7847195, 0.46636584,
-7.417748, -3.9881816, -3.478473, -0.60817397, 5.5603895, -4.7776914,
1.3858026, 1.5222275, -9.1765785, -2.2348847, -5.357557, 6.534572,
-2.900494, -4.2688546, 1.3175057, -7.908893, 7.225223, -8.337407,
10.246494, 5.005191, -5.125219, -10.069494, 6.5035152, 3.4839725,
1.8757601, -5.8883753, -7.72355, 9.475095, 9.613179, 6.433695,
-4.2970076, -8.1376, 4.853009, -1.8567407, -6.42889, -0.3218815,
-8.384427, 7.2016377, -4.4142675, 6.178387, 8.037596, -4.1346507,
-6.579993, -3.451481, -1.6528535, 6.607423, 7.282804, -10.042738,
-0.3675033, -2.4112575, -6.3527617, -7.3190246, -3.6114688, 7.053389,
-6.409217, 4.2970686, 5.0458927, 0.38610092, 5.732731, 0.25703382,
-0.31148946, -1.5473726, -9.726479, -7.662987, -10.154891, 3.6045713,
-3.0718038, -0.08280907, 7.9998364, -7.032314, 3.3468149, 5.3258433,
1.9090985, 3.968849, -0.2675408, 0.060409773, -1.9405497, -9.221969,
-7.0576916, -9.464711, 2.8720486, -3.6328642, 0.17267628, 7.3887897,
-1.6439323, 9.778203, 5.0662756, 1.2085507, -7.1011133, -6.030941,
-8.739126, 8.947054, 2.2976227, 4.295886, -9.804691, 9.491675,
6.542124, -5.2575417, -5.693809, 1.5259458, -6.2299876, -8.674668,
7.5997577, 9.771124, 6.735506, -4.6034284, -7.752424, 3.0851119,
-1.1285808, -8.444385, -1.3516363, -9.580201, 8.099798, -5.102776,
1.8797437, -7.358552, -7.704615, 8.898274, 9.467731, 7.0322733,
-4.630093, -7.629714, 3.2717342, -1.2894686, -7.50864, -1.146617,
-9.857415, 9.132159, -4.2040668, 6.268142, 7.631262, -3.1463265,
-7.6342397, -5.4017415, -0.36699927, 6.8239098, 6.731365, -9.921062,
0.1618169, -1.5955563, -3.7906873, -7.3430963, -4.672828, 8.478235,
-5.23752, -4.488055, 1.5677861, -1.0111947, -5.1549315, 2.4944124,
-8.680141, -3.7192514, -2.053101, -1.2932495, 5.790003, -4.638614,
-0.069333926, 1.107056, -8.033828, -3.5469856, -0.033332966, 3.7997997,
-2.9539623, 9.276957, 9.894467, -5.0552077, 0.6615617, -2.9059353,
-3.7047288, -8.888133, 2.4200692, 0.65933156, -8.907872, -3.737872,
-3.466973, 0.23550095, 4.07296, -0.8387796, 9.653786, 9.700654,
-4.6577377, -0.23900166, -3.0766413, -4.5070047, -8.523723, 1.9573418,
-0.6184003, -8.714647, -5.2003384, 3.8263936, -4.4396343, 0.8862987,
1.7082888, -5.6999054, 9.567615, 5.340746, 8.386485, 8.926271,
1.4683529, 7.7115383, -7.764392, -6.6376386, -9.760499, -4.5039163,
6.0392504, 7.5356097, -2.028758, -8.333136, -4.7378454, -1.7328049,
5.59965, 7.2865815, -9.406592, 0.85930276, -0.55819434, -3.4961715,
-7.427333, -3.7118611, 9.175206, 1.5961306, -6.8860364, -9.654901,
8.295825, 9.80504, 5.252788, -5.026235, -8.487576, 4.1970296,
-1.3347954, -7.884442, -0.06541064, -10.5342245, 8.532702, -4.555996,
-2.0998847, 1.2904444, 4.867598, -2.9420898, 9.596392, 8.326153,
-4.615114, 0.13709225, -3.9660642, -3.5841537, -10.107703, 3.3356907,
0.68481576, -9.019869, -4.5311813, -3.2301183, -4.34194, 6.004943,
-3.1209073, -4.0283203, 1.2189362, -6.245637, 5.986362, -8.294472,
9.431323, 5.5722885, -5.718046, -9.958375, 5.046603, 4.3691764,
-7.9938965, 3.2712924, 4.6154914, 1.3642961, 5.647025, -0.02010454,
0.24344344, -0.796033, -9.447138, -7.3498936, -10.613059, -0.01747195,
-3.5142288, -0.82592213, 8.002613, -2.5667036, 9.796393, 4.8797865,
2.292897, -6.48079, -6.566714, -8.388422, 8.25393, 2.1598651,
4.6577544, -9.651125, 10.40628, 6.1753783, -4.4925594, -6.391611,
4.622693, 6.773807, -8.462213, -2.005495, -7.987173, 7.0438952,
1.5356572, -3.901669, -8.359946, -3.7750614, -2.8362765, 4.7190013,
2.5044317, 9.248063, -0.621434, -3.395665, 1.3872076, 4.4660835,
-1.6363684, 7.9000883, 9.571806, -5.3251586, 1.1744347, -2.8999548,
-4.2802052, -8.18732, 3.439467, 0.10566513, -7.2400713, -4.4490094,
2.821767, -6.5533547, -8.538616, 9.729733, 9.469679, 6.8007593,
-4.115962, -7.128169, 3.9092104, -1.0611137, -8.448278, 0.1044073,
-9.894598, 7.73961, -5.3951635, -3.7486873, -0.26094398, 4.926385,
-1.8088623, 8.816765, 9.141296, -5.06475, -0.07788902, -3.9374273,
-3.640387, -9.785314, 1.5988438, 0.9192143, -9.294276, -5.3111954,
2.8849535, -1.6119763, 1.3534416, 0.06302584, -6.139903, 8.966088,
4.7925205, 7.851928, 8.199099, 1.4810334, 9.114963, -7.5345025,
-5.1186028, -9.692125, -3.1606374, -5.741134, -1.0133828, 4.8290396,
-5.542567, -8.0015135, -2.9410028, -5.848071, 7.9133186, 4.802608,
4.319805, 9.017506, 6.205359, -7.1579404, 8.525438, 0.71125627,
2.641945, -3.7733855, 0.49750715, 2.976927, -6.431813, 8.600568,
5.6937637, 7.337731, 7.832177, 1.9692173, 8.993705, -7.5486145,
-6.4328413, -9.200884, -4.029678, 4.8682394, 4.7664247, -7.73443,
-2.2667832, -6.999542, 7.268269, 1.4750957, -2.6975288, -8.918093,
-4.7531996, -2.549906, 6.061709, 2.7222757, 9.25564, -0.49178118,
-4.4506955, -1.8604838, 5.6561646, -5.8403077, -8.7562, -5.123713,
-6.606588, 8.82163, 6.9878917, 3.221873, 7.229855, 5.248716,
-5.9656034, 8.628707, 1.3154743, -3.6242933, 9.3226185, 4.6261325,
2.3599126, -6.7228875, -5.9252114, -8.75067, 7.022939, 2.1079085,
4.54176, -9.554108, 9.426611, 6.157459, -6.2172484, -7.3453245,
2.0632749, -5.9967003, -8.779625, 8.708251, 9.691277, 6.7812653,
-4.3755665, -8.089311, 2.943752, -2.9455929, -7.4189134, 0.32977197,
-9.128783, 6.821715, -5.069995, 2.687267, -7.317653, -7.337332,
7.7029505, 8.894274, 6.4270954, -3.4987047, -8.116891, 3.6444871,
-0.9095651, -7.720984, 0.8787651, -8.724686, 8.797385, -5.50071,
-1.6207514, -4.9068327, 7.748731, -3.887284, -3.5474274, 1.3265176,
-8.382739, 7.9414954, -8.553609, 9.1235, 4.459934, -6.179699,
-9.978743, 6.3810806, 3.9286895, -6.0116353, -2.4744582, 5.2717237,
-5.9310913, -6.095964, -3.8439672, -6.8304057, 8.233125, 5.826294,
2.6891737, 7.5075994, 5.753752, -6.4598055, 8.792179, 1.7559983,
2.4534812, -6.2604866, -9.194306, 8.479189, 9.235175, 5.5402975,
-3.318336, -8.305003, 1.6497688, -1.4593256, -8.286216, -1.2383549,
-10.178429, 8.262994, -3.8943517, -5.131992, -1.6247146, 4.1514335,
-4.3075924, -8.468995, -4.0062213, -7.179477, 7.6651387, 5.739587,
2.821518, 7.900612, 6.1866956, -6.025892, 7.6152616, 1.1342188,
5.474643, 5.824748, -8.447901, -1.7820772, -7.901604, 7.983712,
2.2643185, -3.0803235, -8.808327, -3.8194501, -3.760504, 5.533521,
2.2381012, 7.2260923, 0.928994, -2.5117195, 8.203431, 5.692258,
2.5873232, -7.034302, -6.8612895, -8.69246, 5.8943086, 1.8492761,
3.6840625, -10.289444, 9.2014265, 7.907233, -5.304628, -6.763326,
1.9949925, -5.8438973, -7.6821365, 8.336287, 8.726796, 6.0696845,
-3.6923957, -8.413754, 4.631202, -0.90010333, -7.5316224, -1.1015985,
-10.2333, 8.383115, -6.3172174, 6.0867786, 7.9240646, -4.0073323,
-8.463959, -5.8282223, -1.9983485, 6.817653, 6.464775, -8.92771,
0.74137855, -1.6374288, -5.5576205, -7.9927897, -3.0586374, 9.619859,
6.617722, 8.128207, -3.776938, -7.4223785, -6.1082993, -2.0491183,
6.4035096, 7.3358502, -11.138888, 0.53354853, -1.1108701, -5.6119432,
-5.940566, -3.7257512, 8.605433, -1.4806908, -4.753651, 7.4627585,
-2.5209446, -5.237564, 1.8511611, -6.095644, 6.66154, -9.042099,
9.523529, 5.5466943, -6.518284, -8.704915, 6.410257, 2.2448032},
{-9.156419, 5.3382993, 4.218931, 1.0388873, 5.1188726, 0.28789923,
-0.66403043, -1.1256588, -8.074601, -8.796393, -9.238622, 2.2648208,
-3.4147594, 0.18481727, 7.7022886, -4.0184965, -0.57511175, 5.7962236,
-5.364314, -9.026188, -4.786282, -6.4098144, 8.886832, 7.1435266,
3.1233902, 6.3770556, 6.809805, -6.9672413, 7.5824, 0.961985,
-2.2192073, -5.491305, 5.8291707, -3.0785909, -4.807968, 0.7230418,
-7.1418605, 6.4147243, -8.558336, 10.07829, 5.4900274, -7.408513,
-10.547092, 6.2083673, 3.2903605, -7.185884, 3.884016, 5.101746,
1.2541873, 4.7178125, 0.39448896, 0.09540709, -1.6091995, -10.187662,
-9.635805, -9.505136, 4.417039, -3.1636574, 0.15793155, 7.9673057,
-8.475596, 4.3068485, 5.409865, 2.178005, 4.5031695, -2.2377465,
0.58335125, -0.18882181, -8.624355, -7.69541, -9.715562, 3.293194,
-4.394363, 0.5048643, 8.505158, 5.2808757, 3.5331442, -8.044594,
-3.2885385, -8.964062, 7.6199102, 3.4275236, -3.4782534, -8.061821,
-2.6517587, -2.5758934, 5.740099, 3.2706387, 7.7970586, -1.6770775,
5.7117615, 7.7926583, -4.063252, -9.342857, -4.81335, -0.5654317,
6.816361, 6.4182315, -10.002002, 0.21038114, -1.2326238, -5.0900054,
-8.116614, -3.1902997, 9.17829, -3.6660333, 0.5928117, 4.020767,
-1.8741283, 8.06987, 9.349458, -6.2369394, 0.47676873, -4.048555,
-4.0094633, -9.86698, 3.0869122, 0.78278774, -8.561944, -4.5177326,
-3.8657286, 0.3102671, 4.9884367, -3.6284225, 8.457659, 8.840426,
-4.2374887, -1.1185721, -5.964943, -4.6190014, -8.876042, 3.0314698,
-0.27063248, -9.258424, -3.6193402, -4.8447604, 0.10384789, 4.193676,
-2.4129548, 9.078777, 9.273055, -6.7061834, -0.51580375, -3.6649778,
-4.990796, -9.220972, 2.5040975, -0.18620397, -8.8511, -5.1009836,
-7.2450385, -3.4463844, 0.7517474, -2.0190156, -4.535124, 1.4956084,
-7.2539983, -3.4885075, -3.3627713, -0.52576774, 5.332339, -6.5615354,
0.20976752, 1.1236217, -9.458547, -3.7924962, -0.8168977, 3.413253,
-1.6333858, 9.243847, 9.078577, -5.1742406, 1.2778658, -2.8442626,
-2.7500439, -9.373592, 2.3982239, -1.0080693, -7.9365706, -5.2445164,
-7.1730146, 4.0061684, 5.4796715, 1.20506, 6.2075787, -0.16006368,
-0.7864661, -0.5658346, -10.12563, -8.299808, -9.788379, 3.690315,
-5.2078524, 2.367838, 8.89057, -2.3272796, 9.561563, 3.7740133,
1.04885, -6.514268, -6.67222, -8.662983, 7.5660367, 1.5462829,
4.324029, -9.38316, 8.898151, 7.954895, -5.4215345, -7.197413,
-8.635383, 4.3802285, 5.2513223, 1.346883, 5.590111, -0.28032964,
1.497561, -2.5698125, -10.071566, -7.9144497, -10.521648, 2.6051102,
-2.5498512, 0.18603437, 8.31282, -2.596791, -4.017034, 5.696748,
-2.659443, -5.330917, 1.1804227, -7.206465, 4.9133472, -7.69367,
9.223525, 4.8777184, -5.885288, -9.085511, 5.598153, 4.1803226,
3.475634, -4.233442, 1.09579, 0.8117338, -6.8319325, 9.767249,
4.8737288, 8.809706, 7.890164, 2.7181265, 8.7697735, -8.247669,
-5.5079064, -8.122309, -3.1029263, 6.780397, 7.2863965, -2.5764318,
-7.441407, -4.71989, -1.9183284, 6.958463, 6.450258, -9.237784,
0.3352336, -1.2645041, -6.38076, -8.229241, -2.8296368, 8.19435,
-1.5617663, -5.6124477, 4.7870054, -2.2109096, -5.3381248, 0.6965859,
-8.000595, 4.782553, -8.129963, 10.269147, 5.041338, -7.839416,
-10.271929, 6.5834913, 3.1019087, 6.0970397, 7.6056643, -3.503413,
-8.329759, -6.2327275, -1.0219464, 6.8009663, 6.651591, -9.234209,
-0.22715835, -1.4745643, -4.983603, -7.6262217, -2.929378, 8.496694,
-2.3214858, -0.22392803, 3.6714964, -2.5843356, 8.594137, 8.97197,
-4.742137, -0.28803217, -4.047805, -4.6737227, -10.322391, 2.4164019,
1.2820336, -8.957543, -4.269386, 1.8467497, -6.8059382, -8.543474,
8.642587, 10.19167, 5.541722, -4.0385346, -8.354369, 4.697545,
-1.0593618, -6.836944, -1.1363541, -9.125295, 8.80915, -4.7668014,
-3.396074, 0.5606265, 4.9510264, -2.2146635, 9.637748, 8.090525,
-5.6363864, -0.14092614, -3.9170732, -5.0943327, -7.574087, 3.2527626,
0.4750633, -8.919999, -4.575617, 6.060985, 8.151341, -3.5798054,
-9.352471, -5.6021566, -2.0538287, 6.4829435, 9.304293, -9.603721,
-0.0045247287, -1.0065188, -5.219962, -7.3086267, -2.8229012, 10.297774,
-2.6328206, -4.1610146, 7.7436833, -2.5886633, -5.218428, 1.1651435,
-6.3438735, 5.6170917, -8.602799, 9.74802, 4.895467, -5.57189,
-9.974221, 6.6029015, 3.5159023, -6.237272, 3.404211, 5.5633903,
0.89982665, 5.3451796, 1.7269523, -0.59742266, -1.2715619, -9.164232,
-7.082121, -9.302828, 2.9438436, -3.9870343, 0.3598027, 7.9110947,
-6.4613414, -4.281061, 1.2294351, -1.8541402, -5.16134, 1.1474174,
-6.785916, -5.0534077, -1.4444072, -2.3359509, 6.891039, -5.8588123,
0.21698959, 1.466848, -8.791596, 3.1130009, -3.8158994, 0.37530485,
1.4435463, -6.2665486, 9.904541, 5.446155, 8.845023, 6.4978065,
2.599429, 8.680027, -7.531543, -8.107721, -7.633592, -3.5911062,
-3.21341, 8.164354, 5.4505563, 2.5273335, -6.4427433, -6.4402676,
-8.846901, 6.695445, 2.0753634, 3.6874382, -8.905726, 9.295257,
6.071005, -5.978188, -6.0744486, -4.536719, -0.7512692, 4.7049856,
-5.2564535, -8.110622, -3.8745694, -6.7224054, 9.113831, 6.499982,
1.8017114, 8.0376215, 6.6945453, -6.802899, 7.8777366, 1.2651751,
-7.2666044, 5.5279393, 5.0820684, 1.7293745, 4.5140524, -0.7935935,
0.78534096, -0.4102984, -9.242688, -8.061312, -9.371912, 1.8529228,
-3.2897196, 0.78904694, 7.834866, 2.1245527, -5.8172064, -8.619009,
9.082922, 9.058112, 6.128085, -3.6922634, -9.243675, 2.7409308,
-0.67666525, -7.4396296, -0.22525014, -9.299326, 8.429715, -5.2022324,
-5.605632, -4.2721667, 0.30621618, -2.0462341, -4.4864225, 2.5011683,
-6.6802306, -4.802623, -2.064039, 0.070346184, 5.9929237, -4.692767,
-0.25696367, 0.9770331, -10.316096, 1.7219449, -6.8609085, -8.498862,
9.21183, 9.773622, 7.57509, -4.0315876, -8.605366, 2.719137,
-1.7086011, -7.582424, 1.1597288, -9.674558, 8.34306, -4.8358965,
5.7109065, 5.1997824, -7.667096, -1.7657642, -6.9838014, 6.959934,
2.7485735, -3.3989763, -9.361424, -3.553302, -4.3216615, 5.423499,
2.4259174, 7.8850665, -0.3574, -5.45464, -0.7187321, 4.735962,
-6.6191874, -9.241969, -3.3399029, -6.884703, 8.210152, 6.2744203,
3.019323, 8.211391, 7.087391, -6.520437, 6.9229465, 0.49741924,
-5.714762, -3.920736, 1.5310895, -1.3069419, -4.778316, 3.3032448,
-6.8328857, -4.883179, -2.8060002, -1.491533, 4.7355595, -5.3582,
1.6212804, 0.86929405, -8.676913, -4.308896, -2.2715824, 5.6644607,
-6.803109, -7.8355837, -5.052791, -6.2640395, 8.603944, 5.49465,
2.3831496, 7.9096346, 6.1475677, -5.85951, 9.234689, -0.3081243,
-2.0981023, -4.5837364, 6.980254, -2.781227, -5.062458, 1.6915218,
-7.292486, 6.0248265, -9.162274, 9.427442, 4.8255334, -6.1467485,
-8.691361, 5.348898, 3.007654, 7.8475046, 8.224898, -4.604263,
-8.1943035, -4.7263284, 0.0040657134, 5.250107, 5.8986645, -8.551282,
0.48767596, -2.259585, -5.1836033, -9.447716, -3.2453337, 9.087669,
-3.267849, 0.47469243, 2.5689826, -2.189663, 9.586419, 9.604734,
-2.2159877, -1.513909, -2.7751462, -4.5044036, -9.061131, 2.1594281,
-0.24338497, -9.369308, -4.1964145, -2.6438503, 9.225369, 4.6155806,
1.1550951, -6.0796514, -6.3537564, -8.284605, 6.6869516, 3.0042562,
3.1801558, -9.17751, 10.931516, 5.955477, -6.1496263, -6.2937446,
1.756042, -7.4857306, -8.088248, 9.432852, 9.243217, 7.4605927,
-4.656784, -9.114426, 3.2003248, -1.2288604, -7.388898, -0.26532704,
-9.065791, 7.3103304, -3.813765, 4.8670163, 5.2565265, -8.0483055,
-1.5007681, -7.7754626, 6.579898, 3.240919, -3.4663062, -10.2497015,
-3.1871583, -3.8710632, 4.5287504, 2.9835358, 7.8776045, -0.059085224,
2.551705, -5.6099687, -8.051724, 9.019452, 8.859785, 6.6567035,
-3.6322849, -7.4199224, 4.1292806, -0.46226323, -7.9339, 0.82571405,
-9.17391, 9.639091, -5.306832, -7.1873994, -3.4160562, 0.32690084,
-1.6234742, -3.677745, 2.5480423, -7.462799, -3.345576, -3.4295075,
-0.4474454, 6.11869, -6.223207, 0.5129819, 0.9725119, -8.424172,
0.9882244, -6.288785, -8.553587, 9.167816, 8.418917, 5.411207,
-3.1705177, -8.074246, 4.161711, -1.1771272, -7.538406, 0.5603369,
-9.673461, 8.253693, -5.147993, 2.1266177, -6.425567, -7.613618,
8.278863, 10.001666, 6.0181546, -3.9423492, -7.574184, 2.898755,
-0.9292633, -7.442719, 0.24825409, -9.109812, 9.905118, -5.270818,
-4.7761197, -1.3929124, 5.788752, -5.6559525, -8.8889675, -4.9645762,
-7.3533616, 10.658319, 7.0336833, 1.7223231, 6.5034914, 6.410748,
-5.8855133, 8.235398, 0.6083758, -8.490231, 3.8334427, 4.9530635,
1.0031434, 6.3164945, 0.26629567, -0.323364, -1.2766262, -9.142862,
-7.044067, -8.265038, 2.0176423, -4.280481, -0.7088907, 7.9875154,
-5.7842636, -1.4846907, 5.656278, -5.1035957, -7.288257, -4.208888,
-6.307455, 7.8297625, 5.891438, 3.1549516, 8.023583, 5.867665,
-6.3187184, 6.788668, 0.5369244, -7.1704245, -2.5398476, 0.5198131,
-1.8509074, -4.0256314, 2.1584282, -7.3648014, -3.7271903, -2.1425078,
-1.2499511, 5.3004465, -6.1990604, -1.3266561, 0.7876576, -8.11418,
7.1678786, 8.414981, -3.087591, -7.740283, -4.453975, -1.1710103,
5.7575336, 8.196971, -9.372425, -0.095184766, -1.987425, -5.3502407,
-7.2784634, -3.109592, 9.07611, 3.9478323, -4.111408, -0.6882469,
0.634525, -5.252379, 9.986848, 5.2585998, 8.545499, 7.6714025,
3.4117231, 8.70484, -7.929121, -5.359144, -8.928302, -3.674723,
-2.6288311, -0.12737024, 4.581883, -2.863696, 9.39071, 9.313172,
-4.787786, -0.13639021, -3.8321345, -3.1985075, -8.572446, 2.7996948,
-0.2652972, -9.592894, -3.7583604, -2.486999, -3.8108954, 7.8939605,
-1.784663, -4.7235045, 0.07018946, -7.0828953, 4.806032, -8.28277,
9.634417, 5.11867, -7.1419787, -9.530038, 5.936338, 3.318205,
5.2066703, 5.087264, -8.416352, -1.702131, -7.0548315, 7.074097,
1.8421279, -4.8883104, -9.232241, -3.9281447, -4.1873593, 4.5002437,
2.8049457, 7.586856, -1.0107033, -3.5576825, -0.26096594, 3.4048247,
-3.1948535, 9.640177, 9.117816, -6.531116, -1.53952, -4.4072394,
-3.9828095, -11.185514, 1.8414756, -0.31479302, -7.9985886, -4.696586,
3.1184084, -6.135421, -8.81595, 9.08041, 10.157197, 5.596092,
-3.6496532, -8.321895, 3.704782, -0.3020338, -7.425466, -0.063956216,
-10.264129, 8.708786, -4.3725615, -4.597963, 0.9012179, 3.7690432,
-3.5184565, 9.120633, 10.12895, -5.3390884, 0.19588766, -4.4941034,
-3.7726448, -8.782406, 3.485313, -0.06931429, -8.502367, -4.358423,
5.140103, 4.2939897, -9.256684, -3.5781474, -7.0174036, 8.459497,
2.392848, -3.5002148, -8.679796, -2.9670389, -4.1454806, 4.7589817,
3.4337878, 8.09502, -0.42299438, 4.0261736, -4.4937124, 0.8303022,
0.19681469, -6.739549, 10.731515, 5.369179, 8.942182, 8.505594,
2.304977, 8.542778, -7.974677, -4.397952, -9.135787, -3.352624,
4.4656205, -3.627241, -0.05463218, 0.5954855, -6.527154, 9.688609,
5.8686414, 8.388389, 7.879499, 3.457589, 9.646765, -7.9247236,
-6.0537405, -9.011433, -3.0639307, -2.2531514, 0.94571865, 4.3978257,
-2.4692948, 9.7113, 7.899575, -5.1595726, 0.5365807, -4.0166616,
-5.286823, -9.298944, 3.3841605, 0.92640465, -9.013827, -3.7610562,
-1.8709692, -4.1962123, 7.3255053, -3.1204004, -4.9687567, 0.122555226,
-8.557965, 7.4832845, -9.281233, 9.582861, 5.2511263, -5.8105016,
-9.318542, 6.91156, 3.7289927, -5.216679, -4.273006, -0.19759475,
-1.4491504, -4.1364098, 3.002992, -8.394848, -3.0864217, -2.7833686,
-1.177417, 4.995046, -7.164924, 0.86090815, 1.899614, -9.973965,
-5.4910603, -4.200808, -0.93155855, -2.06676, -5.4849715, 1.9909985,
-7.19723, -2.9836009, -2.443914, -1.0319707, 6.284103, -7.554319,
0.44961897, 2.387897, -10.106002, -2.532675, -4.0673523, 6.313767,
-1.6560588, -4.661055, 1.011201, -6.528702, 5.0510836, -9.741553,
8.66978, 6.3287044, -6.4119873, -8.098818, 5.914255, 4.2663326,
-2.3434393, -4.3752136, 6.400666, -1.7397915, -4.037627, 1.3683361,
-6.717499, 6.865371, -8.382272, 8.829956, 5.724677, -6.481636,
-10.259589, 6.719683, 5.003945, -6.1387596, -0.5676328, 6.0780406,
-6.3286204, -7.9776063, -4.557053, -6.591133, 8.79991, 5.998322,
2.1194391, 6.945833, 5.432107, -6.8508744, 7.8041553, 0.28581285,
-2.7538466, -4.251039, 6.1003494, -1.4555624, -4.285735, 0.59819657,
-7.0522394, 5.100951, -9.1891165, 10.578029, 4.9850693, -6.758524,
-9.513901, 7.1392217, 4.640414, 6.104336, 8.788033, -3.9600418,
-7.828938, -5.400681, -0.8639426, 4.7874727, 6.7899213, -9.713058,
1.0550014, -1.9961116, -6.871431, -7.168895, -3.6924503, 8.025428,
5.177085, 5.76472, -9.111557, -3.2811704, -6.961646, 7.0277257,
2.1834085, -4.0506253, -8.432314, -2.3365862, -4.2436066, 4.609077,
3.739704, 7.6885056, -0.23904124, 2.641114, -6.992464, -8.613562,
10.76977, 9.2453985, 6.9724383, -4.399948, -8.07105, 4.924221,
-1.6358271, -6.290521, 0.39896455, -9.705956, 8.629093, -4.1436124,
3.9859037, -2.9919102, 1.2322817, 1.3813168, -7.1030145, 10.535094,
4.700215, 9.001823, 7.368554, 1.9131027, 8.667619, -8.005199,
-5.7849984, -7.9658566, -3.1759193, -3.0510354, -0.5127189, 5.2521396,
-2.5857778, 10.577942, 8.735823, -3.6917107, 0.48757896, -3.5953057,
-4.139373, -9.436231, 3.031659, 0.15183237, -10.347374, -5.209101,
-2.3849895, -4.3158636, 7.2142596, -3.4271588, -3.336428, 0.66413516,
-7.1964726, 5.5208917, -10.2059555, 10.35657, 5.960686, -6.2226157,
-9.842664, 6.670386, 3.0433648, -6.880737, 4.2118607, 5.6850376,
0.4752439, 4.3317122, 0.44843468, 0.7181433, -2.0806158, -10.100382,
-7.054367, -10.204004, 3.8780792, -4.343315, 0.61828816, 7.9212966,
7.2431927, 7.348926, -2.5672731, -7.4490566, -6.420811, -1.2004898,
4.890277, 7.6524673, -9.241296, -0.19970311, -1.5652965, -4.189934,
-7.9567156, -3.988932, 9.895358, 3.836468, 5.725222, -8.650108,
-2.1406744, -6.8514485, 9.074763, 2.8717058, -3.1539822, -8.592764,
-4.02757, -3.259395, 4.3853426, 2.8690722, 8.666574, -1.2602813,
-2.315514, 9.593314, 4.64898, 2.9906435, -7.064887, -4.9759912,
-8.400361, 6.7235126, 1.2726755, 4.499182, -9.744734, 9.897998,
6.980119, -5.804198, -6.9562564, -1.8065041, -3.5014937, 7.4278836,
-3.0143435, -3.3378015, 0.95798886, -7.4174757, 5.6145577, -8.7207155,
9.466015, 5.5641866, -5.913285, -9.887425, 6.615085, 4.970599,
1.7791991, -7.025716, -9.087802, 8.53154, 10.144952, 7.1623,
-4.307247, -8.629207, 4.0146513, -1.5835062, -7.116183, 0.045584317,
-10.37325, 9.269662, -3.567286, -2.0496101, 8.332009, 5.190838,
2.7841866, -7.454105, -6.205746, -8.549381, 7.898965, 3.3500552,
3.9896798, -10.115926, 8.775537, 6.0777855, -5.807189, -6.1246943,
-5.531282, -3.6782076, 0.2044271, -0.91815233, -2.5859323, 2.3643644,
-7.0363684, -4.4786596, -3.2676542, -0.29736525, 5.1042604, -5.956428,
-0.049671445, 2.1835773, -8.837428, -1.0084368, 9.45803, 3.222279,
2.1036878, -7.342878, -6.2834063, -9.393092, 7.2432075, 2.3757913,
4.7674804, -10.428517, 9.164046, 6.316391, -6.2105484, -5.1276827,
-5.8179398, -1.2191972, 6.1875753, -6.209818, -9.1026535, -3.4926078,
-6.5855274, 8.979986, 6.4008203, 2.9415622, 6.5006814, 6.806564,
-5.449886, 7.6906066, 0.75641096, 2.0933912, -5.8074102, -8.459061,
9.297438, 9.711478, 6.481343, -3.4578187, -7.116151, 3.8222253,
-0.7006475, -7.6220503, 0.91162026, -9.785705, 9.447066, -4.852511,
-5.160675, -1.0412569, 6.7657337, -5.974227, -9.427133, -3.992298,
-6.2704806, 8.432346, 7.17969, 1.7310511, 7.9142547, 5.649339,
-5.0650344, 9.245941, 0.25018522, 5.280551, 4.9559727, -7.544381,
-3.005718, -7.480533, 7.444294, 2.3719463, -2.8144736, -8.173391,
-5.004326, -2.5832953, 3.428379, 3.4739318, 8.5329485, -1.319378,
-5.567855, -3.0428956, -0.9562445, -1.600981, -4.4354258, 1.2517997,
-7.7545943, -4.93451, -1.4461738, -0.22362544, 6.593608, -5.5013537,
-0.5056475, 1.4811271, -8.72843, 5.3780584, 6.3473873, -8.601755,
-4.315984, -8.108094, 8.169965, 2.449955, -4.081551, -9.082175,
-3.1919196, -3.11362, 4.424871, 2.4943724, 7.470024, -1.2013886,
2.13535, -3.457446, 0.38761395, 1.3209486, -6.146171, 10.346591,
5.5903134, 8.489395, 7.9821553, 2.338308, 8.471686, -8.2017355,
-6.571737, -9.5594845, -4.475217, 4.6982017, 5.7347655, -9.282387,
-1.8430327, -7.7516303, 7.2752633, 2.9617126, -3.2187881, -8.061637,
-3.5793667, -3.9250398, 4.845176, 1.9505411, 7.8202467, -0.5789625,
4.0954485, 4.4409847, -8.577281, -3.8839896, -7.1505795, 7.3197765,
1.4456764, -3.598486, -9.2553425, -3.5569315, -2.5580182, 3.2795029,
2.8316677, 7.632162, -0.08576344, 2.9827724, -4.820665, -9.070577,
8.635103, 10.043553, 6.645271, -2.6150296, -7.637808, 3.433156,
-0.78349173, -6.7831426, 0.47787574, -8.957138, 8.933081, -4.005893,
4.729381, 4.8990283, -8.742445, -3.3879035, -6.929414, 7.2769866,
3.796796, -3.4245021, -9.224718, -4.8399534, -4.7585316, 3.4832284,
2.938138, 8.100363, -1.662535, -7.2816644, 4.95603, 5.162535,
1.0016507, 5.525574, -0.7086881, 1.9173337, -2.5745423, -9.362299,
-6.4256473, -9.366656, 2.595181, -3.963092, 0.045139644, 9.112324,
2.5344908, -3.9459422, -0.7666484, 1.2136618, -5.8498735, 9.053462,
6.6044474, 7.931943, 6.8714848, 2.115116, 9.170453, -7.0514007,
-6.4015617, -8.340378, -3.5203493, -3.400066, 0.003962429, 4.3990927,
-3.4990125, 9.912593, 9.534971, -5.17982, -0.64203995, -4.6031103,
-3.928818, -8.401975, 1.7451713, 0.37445116, -10.431344, -4.836303,
-2.2592869, 8.529249, 5.269599, 2.1882794, -6.3106236, -6.439369,
-9.418624, 6.931396, 2.5454059, 4.588711, -9.6029415, 9.480327,
7.543218, -6.167318, -5.9805326, -5.284679, -3.4570992, 0.45621938,
-1.1651216, -4.9632597, 3.9490843, -7.119668, -4.0805306, -2.1647265,
-0.54189384, 5.860238, -6.5598564, 0.6147166, 3.1657085, -8.129198,
-1.5292208, -5.102776, 5.5794826, -1.8139999, -4.607186, 0.6783385,
-6.2517796, 6.4333005, -8.189866, 11.253241, 4.9944324, -5.3761983,
-9.849648, 6.497243, 5.2070746, -6.865968, 6.1966577, 5.490392,
0.86926943, 5.4014416, -1.3624412, -0.031642605, -1.7356774, -9.8584795,
-7.735523, -9.94711, 3.5129297, -3.7127354, 0.1649036, 7.921803,
2.586164, -7.688675, -9.207964, 8.804448, 9.260537, 6.6024175,
-3.783334, -8.981298, 3.950799, -0.76954013, -7.167382, 0.6600847,
-8.728484, 8.507834, -4.8735166, -6.754699, -3.8545423, 0.38615456,
-0.5436522, -3.9973226, 2.4733799, -7.4984365, -4.498431, -2.9755538,
-0.6024838, 5.40883, -5.803682, 1.7374693, 2.4580786, -9.299209,
-2.7360563, -4.701047, 7.5371637, -3.3171084, -4.9407444, 0.5160012,
-7.8488455, 6.129809, -7.371712, 9.963894, 5.268248, -6.2299542,
-10.983791, 6.9274054, 4.0826607, 7.522614, 7.924413, -2.9319065,
-6.859928, -5.9610357, -1.7975583, 7.2219076, 8.394122, -9.466941,
-0.400924, -0.26083717, -5.9295235, -7.8372893, -3.00806, 9.946106,
-3.8821065, -1.7192924, 4.481874, -6.355256, -8.592786, -3.559816,
-6.8648586, 9.651761, 5.1465845, 2.659613, 6.5525365, 6.327984,
-5.6477776, 7.556647, -1.0361049, 5.9643893, 8.397226, -3.8513625,
-8.029575, -4.928987, -1.2233036, 6.5816045, 7.543099, -10.935858,
0.7446981, -1.2230518, -6.269522, -7.773549, -3.2752116, 8.764047,
-3.3841984, -0.39229932, 3.6481724, -3.3134916, 9.010129, 8.87156,
-5.348142, 0.52830386, -4.755838, -4.1488113, -8.40969, 1.8320493,
-0.44896275, -9.131792, -3.601263, -4.2370377, -4.59228, 7.815326,
-1.702052, -4.7012773, 0.43237323, -6.8533735, 5.34507, -8.297732,
10.273995, 6.3037486, -6.095794, -10.032129, 5.6946406, 3.5583305,
-7.5630274, -4.7434363, -0.9323339, -1.5496844, -3.6731377, 3.2887077,
-7.1582565, -3.0170763, -3.6388342, -2.070968, 5.6646357, -5.7376785,
0.26180243, 0.40108192, -9.133376, 5.807761, 7.675481, -4.178928,
-8.7062435, -5.7605815, -1.3308456, 6.776816, 6.8244157, -10.150056,
-0.43374276, -1.6737874, -6.151744, -7.2015033, -4.497803, 8.606453,
-5.288564, -1.0503861, 5.542927, -4.945538, -9.416571, -3.35661,
-6.592699, 8.335459, 6.2627797, 1.4247105, 7.7152686, 5.3528814,
-7.215468, 6.7855287, 1.5657268, -3.6171882, 0.38460144, 5.535729,
-5.552274, -8.086247, -3.5836122, -7.0000415, 9.850601, 6.3234572,
3.0164952, 7.8953586, 6.368552, -5.6813407, 7.4191027, 0.3960336,
2.6575727, -4.459753, -1.1060127, 0.48693237, -7.2290735, 10.541103,
6.2095284, 8.308273, 9.473252, 2.6452353, 8.210103, -9.979734,
-4.476683, -10.068155, -4.6451726, -4.2022843, 0.4300488, 4.240806,
-3.5966403, 9.669565, 9.1402645, -6.2998195, -0.6572994, -4.271958,
-2.9818087, -8.872674, 1.2564496, 0.39380586, -10.053537, -3.669187,
-1.6427262, 7.8981867, 4.2203164, 1.9768403, -6.8467407, -7.1951556,
-8.402333, 6.5761886, 1.9226346, 4.2456584, -9.228203, 9.896327,
5.8616033, -6.827098, -5.4691267, 5.357181, 8.253368, -4.2883973,
-7.410103, -6.0843773, -1.750001, 6.1537323, 7.897683, -9.2180395,
-0.6570561, -1.6133261, -6.0545497, -8.483646, -3.3996086, 8.642256,
-4.677273, 1.2200086, 4.604564, -2.4007382, 10.500346, 8.584704,
-3.7411997, 0.5975042, -4.1480227, -3.508256, -10.036667, 1.6141268,
-0.37243235, -9.34071, -5.1666613, 2.9874296, -7.6557484, -9.090214,
9.241759, 10.408507, 6.121922, -4.2963643, -6.729748, 2.6710508,
-2.7361143, -7.251225, -0.44789976, -10.027093, 8.682258, -4.65374,
-5.398845, -1.19063, 4.840031, -5.4558244, -8.44263, -5.5487423,
-6.7852607, 8.111144, 5.8172216, 3.6786175, 6.549657, 6.642685,
-6.463873, 7.655348, 1.3401052, -7.4223614, 3.8569646, 3.508529,
1.1315485, 6.4152675, 0.5242627, 1.1304349, -0.5838881, -9.429557,
-7.7040505, -9.803773, 2.5069573, -3.2818406, 1.014133, 8.053717,
6.359578, 8.05051, -1.7548069, -7.5541015, -6.144135, -1.5246692,
5.116923, 7.1581917, -10.444097, 0.85571927, -2.036458, -5.63958,
-8.047584, -2.0305007, 8.633324, -6.09929, -4.4390955, 1.580935,
-1.2801274, -3.349909, 2.2843208, -5.767599, -2.9283683, -2.847038,
-0.19850065, 6.1552825, -5.0484824, -0.39075765, 2.3285275, -8.330094,
4.39891, 5.8360925, -8.85154, -2.2207773, -8.6248045, 7.3505344,
3.8232129, -4.0822716, -9.203255, -3.420618, -3.3706262, 4.8375645,
3.0935805, 8.18856, 0.2210884, 6.795792, 7.2120986, -3.5799494,
-8.446759, -5.262528, -2.0845866, 6.0746245, 6.517541, -9.405162,
0.8158618, -1.8160143, -5.5310774, -8.616707, -3.4574702, 8.8230505,
-3.0405905, 9.119562, 4.879108, 3.2864892, -6.2143307, -7.283942,
-9.467218, 7.667866, 1.098137, 5.443473, -8.762702, 9.069774,
5.449659, -4.805507, -6.4436784, -6.788195, -3.8819761, -0.107160725,
-1.6302885, -3.471012, 1.8332336, -6.6251383, -4.9479017, -2.3019001,
0.13049772, 3.9733682, -6.564351, 0.6886393, 1.7061596, -8.81119,
-4.530558, -3.8737023, 0.5048791, -1.3779871, -4.036758, 2.1360056,
-7.611686, -4.5399084, -2.6956904, -1.2589977, 5.2045274, -5.932023,
0.10620472, 2.9010866, -10.926671, -7.621211, 3.9929328, 6.0025797,
1.8888124, 4.8783293, 0.16084853, -0.22550313, -2.4149125, -9.93032,
-7.238496, -8.704228, 3.087368, -3.2053134, 0.5327386, 7.7022924,
-4.8319592, -2.4669728, 4.441017, -5.1833854, -8.494458, -4.182013,
-7.306521, 8.4326725, 5.5154443, 3.291214, 8.153887, 4.78111,
-6.919256, 6.8036094, 0.3318281, 3.966245, -2.0579517, 1.3703514,
1.328942, -5.886583, 9.989083, 6.0339065, 8.986813, 7.9694877,
1.9141843, 7.9097095, -8.426622, -7.2654123, -9.164292, -4.185407,
-2.6507325, 8.861909, 5.409023, 2.550961, -6.3101707, -5.9662743,
-8.823625, 7.80089, 1.8051136, 4.388368, -9.67941, 9.466094,
7.0654626, -6.325972, -4.8988295, 4.4538074, 7.660582, -8.30998,
-3.356939, -7.981069, 8.065981, 2.5452518, -4.388834, -8.085373,
-4.2480545, -2.1850252, 5.3481574, 2.438024, 8.640966, -0.5082026,
7.9833584, 7.1434493, -2.6150064, -7.1011605, -5.244851, -0.22680989,
6.620769, 7.487023, -9.898142, 1.0714443, -0.8842515, -5.9958177,
-8.526166, -3.389783, 8.298167, -1.1469772, -3.1687543, 8.017802,
-2.019077, -3.6644661, 1.2686906, -6.636663, 5.6581097, -9.081726,
9.735376, 5.325766, -6.342946, -9.402086, 6.977942, 4.199032,
4.2925735, 4.6514134, -8.669552, -3.0463095, -7.1368566, 8.179285,
3.4429412, -3.775557, -8.875288, -4.9587603, -4.0604424, 5.2675195,
3.8820574, 6.880211, -0.9704265, -5.8059387, -5.083739, 0.8754808,
-1.8249363, -3.775998, 1.7027764, -8.47354, -5.2963867, -2.6391037,
-0.69679457, 5.0704975, -5.5595098, -0.8783753, 1.8020356, -9.918703,
-5.7752275, -2.5524192, 5.586584, -6.2281923, -7.9939833, -3.881157,
-7.983639, 8.1196575, 6.9982843, 1.9812592, 7.104129, 6.396884,
-5.7202253, 7.6751027, 0.3685877, 6.4730477, 6.9648294, -3.0462086,
-8.661728, -5.054687, -1.0199026, 5.8725553, 7.6220303, -9.678648,
-0.16224325, -2.3577683, -6.9316816, -7.357938, -4.3430586, 8.924935,
3.079536, -3.0909173, 1.2339897, 0.074880116, -5.884731, 9.882513,
5.294362, 9.752974, 7.791508, 2.0459032, 8.316335, -8.219245,
-6.8477354, -10.10349, -2.37724, -2.4518857, 10.634547, 3.3327932,
2.4535518, -8.008529, -7.210462, -8.076062, 7.368519, 1.2678789,
3.660739, -9.112592, 8.88694, 6.800374, -5.7213173, -6.819621,
-7.238465, -4.1502047, 1.6634437, -1.5428135, -5.2276173, 2.0650377,
-7.401029, -6.0449276, -2.7107697, -1.0402545, 6.1908636, -4.712255,
1.0732843, 1.6600693, -9.84556, 4.373307, 5.9132752, -8.752781,
-2.0094981, -7.423869, 7.186957, 2.7793643, -4.4816527, -9.5175705,
-4.6161013, -3.3962734, 5.804884, 4.3130593, 8.19089, -0.20511037,
-5.072562, -2.8070931, 4.465721, -6.1268067, -8.315314, -4.15643,
-7.2807903, 8.717728, 5.7797318, 2.4779165, 8.600628, 7.011764,
-7.1783056, 8.431993, 1.3548224, -2.5225089, 8.312515, 4.6269197,
1.7711086, -6.653724, -7.4591713, -8.474785, 8.3964405, 1.9461682,
4.44265, -9.105209, 9.117343, 6.8057175, -5.744403, -6.2951274,
2.5132585, -7.097538, -8.568232, 8.448943, 8.884578, 5.1832843,
-4.553988, -8.992737, 3.0015492, -0.45940086, -8.223815, 1.7462057,
-8.9669075, 8.315793, -5.425251, -5.343185, -1.6062268, 4.805319,
-5.4703293, -6.990597, -4.377889, -7.0264125, 8.140674, 6.683342,
2.5412183, 6.974651, 6.9983544, -5.2748766, 7.4308834, -0.51973313,
-2.778917, 9.093748, 5.10337, 3.0833814, -7.746098, -5.3869863,
-10.204789, 7.217273, 2.4341223, 4.3581457, -10.0241995, 9.252511,
6.303752, -6.165773, -5.7687793, -7.451848, 4.22647, 5.415589,
1.5903296, 5.8709946, 0.26483506, 0.5173631, -1.5873165, -9.597508,
-7.978607, -8.577777, 3.143698, -5.7711525, 0.63054436, 8.287644,
3.1296065, -3.1472166, 0.85798645, -0.16926892, -5.2693324, 10.357757,
5.0647626, 9.067041, 8.242368, 2.1404712, 8.052271, -8.700286,
-6.098231, -8.274544, -3.1128733, -3.6805692, 0.80296373, 4.1702604,
-3.1379797, 9.592593, 9.749173, -6.399313, 0.75632197, -4.217812,
-4.0053473, -8.424189, 1.2311151, -0.44302824, -9.172409, -4.975698,
4.4079, 4.8351345, -6.999447, -2.9538054, -7.5963755, 7.6481085,
2.4964843, -2.1955037, -9.164687, -3.6441283, -4.0160627, 3.6681077,
2.322911, 7.718329, -0.85621303, -3.4735663, 8.357809, 5.0004034,
2.33282, -6.519094, -4.1831975, -8.4387045, 8.118419, 2.6901014,
4.6174254, -9.808999, 9.929476, 6.1078753, -5.9189906, -6.703255,
2.4779332, -3.6398525, 1.375762, 1.5770272, -7.4022613, 8.698953,
6.161196, 8.102238, 7.7393036, 2.343036, 7.7596436, -8.156387,
-7.0141606, -9.516412, -3.269546, 2.700253, -4.6976414, -0.8839833,
1.2897996, -7.0752673, 7.8846693, 5.7746615, 10.535078, 7.8922973,
2.544943, 8.494765, -8.299373, -5.4369893, -9.298646, -3.3062189,
-6.7873597, -4.256143, 0.08045289, -1.9658931, -4.141452, 1.6553928,
-7.020803, -4.1922736, -2.8400269, -1.5138949, 5.2997794, -5.4777503,
0.6353308, 1.1640027, -9.001459, -6.085931, -1.4323632, 5.511463,
-5.4675703, -7.6733546, -4.438825, -6.38035, 7.880188, 6.145921,
2.5460272, 7.5869274, 5.8147726, -6.3606296, 8.429413, 0.48110986,
2.1483753, -7.400373, -7.6465893, 9.591865, 9.157966, 6.186767,
-3.7618566, -9.475772, 3.5116363, -1.674339, -8.260369, -0.293232,
-8.053849, 8.634998, -5.2242255, -7.4994817, 4.8424644, 4.60871,
0.7648624, 5.2067356, -1.0661978, -0.118823916, -1.7825342, -8.879545,
-7.6583786, -9.235904, 3.3238368, -3.8090405, 0.44473922, 8.079074,
-7.9232473, 4.2652655, 5.6365457, 0.21484558, 3.812017, -0.50946385,
-0.3998421, -1.8048778, -9.595064, -8.159446, -8.354689, 2.9569294,
-3.5025482, 0.6069588, 7.3541465, 4.109814, -4.6125274, 1.5721611,
1.2275189, -6.796419, 9.352654, 5.893485, 8.843183, 8.273676,
1.3135281, 8.556037, -9.21975, -6.158201, -9.728189, -4.0082645,
6.0112767, 7.7777243, -4.162535, -7.9227033, -6.5253296, -1.8016518,
6.1608825, 7.035286, -10.030005, 0.17171112, -1.3161709, -4.945682,
-8.0575285, -4.089937, 8.128763, 6.7465863, 8.212232, -3.3156285,
-7.7685013, -5.050599, -3.2285364, 6.15267, 7.383537, -10.666667,
0.48537236, -1.7949098, -4.329011, -6.501579, -3.5737605, 8.752607,
-2.9227085, 1.4066819, 3.6375928, -2.888092, 8.288003, 8.974216,
-4.2667546, -0.38157666, -3.534098, -4.4365425, -9.035421, 1.3879027,
0.11322708, -11.06422, -4.557898, 4.275303, 5.784581, -8.700789,
-2.313518, -7.2518616, 6.6072617, 3.22615, -3.7572138, -8.163193,
-3.5232525, -2.209605, 4.4356976, 2.5066276, 7.730661, -0.76792735,
-3.5695906, 8.701726, 5.239358, 2.1230352, -7.751644, -6.758883,
-8.568605, 6.7048225, 2.1299078, 4.2021976, -10.38839, 9.648648,
7.041402, -4.995082, -5.625839, -4.8748918, -1.4866246, 5.394029,
-4.558989, -9.210139, -3.7288196, -5.940819, 7.348942, 6.3861637,
2.3151493, 7.372146, 6.316815, -6.6096587, 7.3781843, 0.81075156,
-2.211564, -4.3568363, 7.7663007, -2.6964061, -2.5601315, 1.2497784,
-8.414049, 6.5712786, -8.242176, 10.640566, 5.9161224, -6.1226053,
-10.746567, 6.1629124, 3.5417826, -3.0503044, 9.031444, 4.9884777,
2.9889703, -6.2081375, -5.372882, -9.375471, 7.9341474, 2.1506395,
5.6943135, -10.154119, 8.810391, 6.229278, -7.239945, -6.731529,
-2.9038048, 8.438732, 4.81046, 2.1446462, -7.2344875, -7.2098365,
-8.675893, 6.309864, 1.0370756, 3.6585405, -9.737723, 9.615832,
7.681602, -5.152856, -6.4754577, 3.1207442, -3.856653, 0.43202844,
0.8313042, -5.6281953, 10.939058, 5.1124125, 7.8311167, 7.8347497,
3.763796, 7.8749127, -7.082768, -4.905952, -9.482966, -3.0951052,
3.6651754, -6.804962, -8.545563, 9.152279, 10.416858, 6.10124,
-3.7124095, -7.6210303, 3.8152869, -1.5094537, -7.4233723, 0.65508044,
-10.03079, 8.279487, -5.314485, 1.6060838, -6.452426, -9.383829,
9.263488, 8.120832, 6.8883557, -3.5769064, -7.8673368, 4.3725443,
-0.031118015, -6.8491764, -1.3850738, -10.207933, 7.749035, -4.8061366,
-8.868855, 3.885024, 4.6642523, 0.79095066, 5.301643, -0.4545515,
-0.9308533, -0.92564243, -10.2425375, -7.674699, -7.9195585, 2.084639,
-5.4840817, -0.028789133, 8.922001, 4.390323, -4.031168, 0.25885418,
0.5270289, -7.013241, 8.937199, 4.6458983, 8.813837, 7.357566,
2.1216497, 7.3483577, -7.9985337, -5.4968724, -10.491069, -3.2315538,
-2.8616307, 7.9288216, 4.687873, 1.229557, -6.5481124, -7.5237064,
-7.7533736, 6.7752457, 1.7968571, 4.7309136, -10.449915, 9.557419,
7.563853, -6.878456, -6.2342567, 2.9910157, -3.2255893, -1.5926194,
1.7383416, -7.5207105, 9.137984, 4.7188873, 7.883702, 8.7091255,
1.6306087, 8.6800375, -8.262995, -5.746414, -9.041678, -4.391488,
-7.3057694, 4.8251824, 5.743705, 2.0577834, 5.9151564, 0.121925674,
1.2037617, -1.4677445, -10.10893, -7.9563184, -9.892848, 2.2555835,
-3.8140435, -0.38328013, 7.935756, -7.592389, 4.647879, 4.389132,
2.0055692, 5.9201107, 0.37864193, 0.7615896, -1.4354479, -9.020615,
-7.4279017, -9.619245, 2.326687, -3.641581, 1.2557279, 7.2839546,
6.382505, 8.923782, -2.8826938, -8.717515, -5.005848, -0.52759093,
6.631061, 7.978298, -8.452368, 0.93154275, -1.4772639, -4.826282,
-7.5012712, -3.2309644, 8.612205, -4.3908873, -1.389698, 5.4614887,
-5.389195, -8.455458, -4.667196, -6.2863946, 8.888634, 6.5068207,
2.2998507, 5.9218717, 6.8414483, -6.603785, 7.2472367, 1.2722801,
-1.196635, -4.719546, 6.106399, -2.137, -4.8052406, 2.1321285,
-6.7069674, 5.7024016, -6.9988713, 9.313738, 5.964362, -5.8161817,
-8.978339, 7.4022865, 4.1595497, -2.6859953, 8.140838, 5.7825665,
0.9720707, -7.1876583, -6.7885914, -7.8294363, 6.3184195, 2.836515,
4.168615, -10.275366, 9.721669, 6.7881947, -6.1733694, -6.314639,
-2.679783, 0.19461435, 4.3071327, -3.126654, 8.16122, 9.437986,
-6.3029327, -0.103073716, -4.940254, -2.912097, -8.624307, 2.2653143,
0.93817514, -9.562846, -4.046635, -2.2257104, 8.131667, 5.282382,
3.458679, -6.1569014, -7.9436684, -9.177292, 8.210361, 1.5269316,
4.472125, -9.046066, 8.749346, 6.607185, -8.022105, -7.080572,
2.8967333, -6.9516196, -7.7124248, 10.58694, 9.058654, 5.8560953,
-2.8903556, -6.9408574, 3.3186586, -1.4910809, -7.7564845, -1.0375772,
-9.955286, 7.4835095, -5.361859, 4.4344616, 5.3354306, -9.836401,
-3.214793, -7.617627, 7.373968, 1.7465899, -2.496044, -9.3351555,
-3.1017334, -3.1972976, 4.1397643, 3.9938672, 6.908709, 0.087706424,
-1.9262826, -4.0878453, 6.6982594, -3.1218529, -4.960623, 0.91422236,
-7.932013, 3.9989944, -8.203395, 10.370493, 3.7908428, -6.7324977,
-9.45615, 7.7494755, 4.1517024, 4.1157107, -3.0018613, 0.8277571,
0.16958666, -6.524397, 10.240861, 5.601858, 10.413509, 8.171869,
2.092434, 8.221103, -8.136671, -6.187072, -8.599778, -2.8237019,
-3.1300414, -0.11901236, 4.659268, -1.5727713, 10.185069, 8.738224,
-4.711362, -0.960229, -3.5817606, -3.9877677, -10.571133, 1.3780379,
-0.14461677, -9.181026, -5.2737045, 7.3387733, 8.671906, -3.5064857,
-7.140044, -6.1640635, -1.1792953, 7.6268096, 7.06198, -10.454301,
-0.24151617, -2.4145231, -6.1071773, -7.862048, -4.2315936, 8.869127,
-8.933645, 4.4142013, 5.2165446, 0.6535819, 5.8808155, 0.532211,
-0.67063534, -1.983087, -10.030017, -8.500104, -8.790784, 2.59253,
-3.8980408, -1.2312899, 8.596122, 4.5616593, 5.6215196, -9.08583,
-2.5338428, -8.014306, 7.2519517, 2.8483615, -3.377544, -9.034303,
-3.8570805, -3.5581877, 4.333116, 2.569992, 8.863308, -0.16305788,
-1.1921831, -5.372202, 6.4391885, -3.366709, -5.6878853, 0.7960452,
-7.266739, 7.1033545, -8.067419, 9.020808, 6.74276, -5.170963,
-9.482089, 6.1506896, 3.4655395, 1.1493545, -6.4998446, -9.175704,
9.566161, 8.8558035, 5.8556185, -5.230403, -8.363173, 1.987945,
-2.3056822, -7.026945, 0.45359832, -9.014409, 7.509525, -4.857798,
4.4751496, -4.6364827, -0.08367322, 0.4313205, -5.6764636, 9.185227,
6.3760757, 8.318536, 8.091843, 1.3732567, 9.939089, -9.061469,
-5.863468, -8.651811, -3.203734, -5.1804323, -4.0894804, 0.89871854,
-1.1432245, -2.0202007, 3.0207603, -7.299665, -4.8259854, -3.7972755,
-0.7361758, 5.1740737, -7.002102, -0.16791224, 1.0912077, -7.8899927},
{1000, 1000, 1001, 1001, 1002, 1002, 1003, 1003, 1004, 1004, 1005, 1005, 1006, 1006, 1007, 1007,
1008, 1008, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009,
1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009,
1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009,
1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009,
1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009,
1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009, 1009,
1009, 1009, 1009, 1009, 1009, 1009, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010,
1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010,
1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010,
1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010,
1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010,
1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010,
1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1010, 1011, 1011, 1011, 1011, 1011, 1011,
1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011,
1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011,
1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011,
1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011,
1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011,
1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1011, 1012, 1012,
1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012,
1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012,
1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012,
1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012,
1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012,
1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012, 1012,
1012, 1012, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013,
1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013,
1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013,
1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013,
1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013,
1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013, 1013,
1013, 1013, 1013, 1013, 1013, 1013, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014,
1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014,
1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014,
1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014,
1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014,
1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014,
1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1014, 1015, 1015, 1015, 1015, 1015, 1015,
1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015,
1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015,
1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015,
1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015,
1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015,
1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1015, 1016, 1016,
1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016,
1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016,
1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016,
1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016,
1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016,
1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016, 1016,
1016, 1016, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017,
1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017,
1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017,
1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017,
1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017,
1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017, 1017,
1017, 1017, 1017, 1017, 1017, 1017, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018,
1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018,
1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018,
1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018,
1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018,
1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018,
1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018, 1018},
{1001, 1010, 1002, 1012, 1003, 1016, 1004, 1005, 1006, 1008, 1014, 1015, 1007, 1013, 1011, 1017,
1009, 1018, 8, 23, 33, 34, 38, 48, 52, 53, 57, 82, 100, 101, 110, 120,
139, 144, 145, 146, 148, 182, 184, 218, 245, 255, 256, 260, 276, 280, 291, 293,
332, 341, 342, 349, 359, 363, 369, 392, 412, 432, 443, 463, 471, 481, 497, 500,
503, 512, 518, 530, 538, 552, 553, 559, 596, 602, 603, 634, 639, 640, 662, 671,
677, 681, 690, 695, 706, 722, 726, 741, 745, 747, 755, 773, 782, 789, 792, 800,
823, 825, 840, 845, 857, 873, 878, 917, 919, 924, 925, 934, 936, 944, 946, 950,
956, 969, 970, 974, 979, 981, 12, 18, 20, 25, 27, 36, 55, 89, 107, 123,
128, 134, 168, 169, 179, 188, 195, 197, 200, 211, 215, 221, 231, 237, 239, 265,
278, 287, 302, 305, 308, 334, 345, 355, 358, 373, 379, 386, 387, 390, 403, 404,
405, 436, 438, 442, 451, 468, 490, 493, 499, 511, 515, 526, 536, 546, 547, 583,
586, 589, 594, 608, 613, 614, 616, 618, 635, 645, 648, 653, 682, 683, 684, 697,
716, 719, 765, 772, 781, 805, 809, 829, 841, 848, 865, 866, 876, 877, 879, 888,
896, 898, 911, 918, 942, 947, 964, 977, 987, 995, 3, 9, 16, 19, 35, 51,
61, 75, 78, 85, 99, 102, 109, 112, 122, 124, 142, 164, 186, 198, 204, 206,
228, 229, 236, 241, 251, 252, 273, 274, 282, 284, 303, 309, 311, 314, 317, 319,
322, 325, 327, 352, 367, 381, 415, 419, 430, 450, 455, 461, 464, 475, 478, 484,
489, 491, 501, 506, 508, 545, 550, 572, 575, 597, 610, 619, 652, 672, 678, 685,
696, 701, 708, 709, 711, 724, 730, 736, 763, 784, 788, 799, 826, 852, 861, 867,
869, 886, 889, 894, 903, 928, 929, 935, 937, 940, 983, 986, 991, 993, 2, 4,
32, 50, 56, 65, 67, 71, 80, 83, 115, 117, 129, 141, 147, 151, 155, 166,
171, 178, 190, 202, 214, 219, 224, 225, 227, 232, 243, 254, 259, 267, 275, 289,
298, 316, 326, 338, 343, 351, 360, 378, 398, 431, 434, 452, 454, 462, 466, 473,
502, 532, 544, 551, 556, 563, 584, 606, 624, 625, 632, 646, 651, 657, 659, 665,
675, 679, 689, 712, 721, 728, 749, 774, 776, 778, 807, 816, 836, 855, 856, 862,
887, 892, 905, 913, 915, 920, 938, 943, 952, 960, 965, 966, 973, 980, 988, 989,
992, 996, 11, 29, 49, 64, 73, 74, 79, 86, 91, 126, 130, 131, 152, 158,
160, 170, 173, 185, 191, 205, 223, 230, 249, 269, 321, 333, 336, 337, 350, 366,
374, 380, 385, 389, 407, 409, 413, 418, 433, 435, 437, 441, 457, 458, 460, 470,
479, 480, 485, 495, 509, 519, 525, 541, 542, 543, 554, 558, 564, 566, 571, 578,
592, 620, 642, 643, 647, 654, 658, 663, 669, 693, 713, 731, 734, 746, 757, 761,
793, 798, 812, 839, 851, 854, 868, 872, 874, 880, 900, 902, 907, 908, 912, 923,
933, 941, 948, 949, 958, 968, 15, 37, 40, 41, 46, 47, 66, 87, 106, 108,
113, 135, 138, 140, 149, 150, 193, 203, 207, 212, 217, 234, 235, 240, 242, 244,
271, 290, 296, 299, 328, 347, 356, 376, 382, 383, 391, 395, 397, 416, 417, 424,
426, 429, 453, 476, 492, 496, 505, 516, 517, 521, 534, 537, 569, 576, 582, 590,
593, 600, 601, 604, 607, 611, 621, 622, 655, 660, 668, 691, 727, 739, 758, 760,
771, 780, 783, 791, 796, 808, 810, 813, 820, 821, 830, 831, 834, 842, 844, 875,
884, 890, 901, 926, 939, 951, 955, 978, 985, 994, 26, 39, 44, 45, 59, 60,
69, 104, 132, 133, 153, 159, 165, 167, 172, 176, 180, 181, 183, 189, 209, 238,
250, 253, 257, 261, 262, 263, 279, 281, 292, 295, 318, 323, 346, 348, 353, 362,
370, 393, 394, 396, 399, 406, 421, 439, 440, 444, 448, 469, 474, 487, 498, 523,
528, 539, 549, 555, 560, 562, 567, 570, 573, 579, 580, 599, 617, 627, 629, 636,
649, 661, 664, 686, 700, 707, 717, 744, 764, 766, 767, 785, 801, 802, 822, 833,
846, 849, 864, 881, 897, 904, 922, 927, 953, 961, 967, 972, 997, 998, 5, 13,
21, 24, 31, 54, 84, 97, 111, 121, 156, 162, 194, 196, 201, 213, 220, 247,
264, 266, 300, 310, 313, 320, 329, 330, 331, 344, 371, 372, 377, 410, 422, 425,
427, 428, 449, 456, 472, 477, 483, 494, 507, 524, 529, 533, 535, 540, 548, 557,
591, 595, 598, 605, 609, 626, 628, 630, 633, 644, 674, 676, 680, 687, 698, 702,
703, 704, 705, 715, 720, 737, 738, 742, 750, 751, 754, 762, 770, 775, 779, 786,
790, 806, 814, 815, 818, 824, 835, 838, 843, 860, 882, 883, 909, 916, 945, 971,
982, 984, 1, 6, 10, 28, 30, 42, 43, 58, 62, 72, 88, 90, 94, 105,
114, 116, 119, 125, 136, 157, 161, 174, 199, 210, 216, 246, 258, 270, 277, 285,
286, 288, 297, 312, 339, 340, 361, 368, 384, 401, 408, 411, 423, 447, 459, 465,
467, 482, 504, 514, 520, 527, 568, 574, 581, 585, 588, 631, 638, 650, 666, 667,
670, 673, 688, 692, 694, 714, 718, 723, 725, 735, 743, 752, 756, 769, 794, 795,
804, 811, 819, 832, 850, 858, 859, 863, 871, 885, 891, 893, 899, 914, 921, 931,
954, 957, 959, 975, 990, 999, 0, 7, 14, 17, 22, 63, 68, 70, 76, 77,
81, 92, 93, 95, 96, 98, 103, 118, 127, 137, 143, 154, 163, 175, 177, 187,
192, 208, 222, 226, 233, 248, 268, 272, 283, 294, 301, 304, 306, 307, 315, 324,
335, 354, 357, 364, 365, 375, 388, 400, 402, 414, 420, 445, 446, 486, 488, 510,
513, 522, 531, 561, 565, 577, 587, 612, 615, 623, 637, 641, 656, 699, 710, 729,
732, 733, 740, 748, 753, 759, 768, 777, 787, 797, 803, 817, 827, 828, 837, 847,
853, 870, 895, 906, 910, 930, 932, 962, 963, 976},
{0.0375429, 0.0375429, 0.0380193, 0.0380193, 0.0390152, 0.0390152, 0.0397533, 0.0397533,
0.0413317, 0.0413317, 0.0414699, 0.0414699, 0.0471734, 0.0471734, 0.0497421, 0.0497421,
0.0541098, 0.0541098, 0.347202, 0.325568, 0.428782, 0.396247, 0.351154, 0.376728,
0.428782, 0.346019, 0.37844, 0.319143, 0.266898, 0.38697, 0.428782, 0.428782,
0.406587, 0.324322, 0.300772, 0.334573, 0.404499, 0.374328, 0.379171, 0.344723,
0.286148, 0.315663, 0.418412, 0.335559, 0.350764, 0.341872, 0.398113, 0.355783,
0.336494, 0.38494, 0.369691, 0.41585, 0.309856, 0.348328, 0.317848, 0.298328,
0.347966, 0.401303, 0.390654, 0.403246, 0.340475, 0.428782, 0.427396, 0.369112,
0.397054, 0.33233, 0.402182, 0.425795, 0.32879, 0.355395, 0.360418, 0.418468,
0.334416, 0.428782, 0.290134, 0.408489, 0.297885, 0.345323, 0.385738, 0.255841,
0.428782, 0.287308, 0.37873, 0.403246, 0.338533, 0.406269, 0.345726, 0.337707,
0.332302, 0.323417, 0.300644, 0.330364, 0.375397, 0.351353, 0.428782, 0.306779,
0.292796, 0.384838, 0.351345, 0.36857, 0.379273, 0.356486, 0.364724, 0.349878,
0.351715, 0.428782, 0.295487, 0.318492, 0.409458, 0.428782, 0.40158, 0.336903,
0.396822, 0.428782, 0.357975, 0.364505, 0.279662, 0.396395, 0.382695, 0.321661,
0.426458, 0.404946, 0.374837, 0.369184, 0.371807, 0.341826, 0.342808, 0.396877,
0.350699, 0.340324, 0.349302, 0.366682, 0.376295, 0.387658, 0.328765, 0.377055,
0.422431, 0.366682, 0.377383, 0.325377, 0.346524, 0.361752, 0.332547, 0.342417,
0.426458, 0.344312, 0.341058, 0.331798, 0.261318, 0.317866, 0.352108, 0.305209,
0.324999, 0.354711, 0.390277, 0.368758, 0.406089, 0.409815, 0.376139, 0.3574,
0.341148, 0.426458, 0.394844, 0.420412, 0.379616, 0.426458, 0.407329, 0.320178,
0.370283, 0.361069, 0.420412, 0.305671, 0.316232, 0.328956, 0.371197, 0.381023,
0.409169, 0.373685, 0.334751, 0.279633, 0.323443, 0.40549, 0.426458, 0.408024,
0.318459, 0.426458, 0.3289, 0.341933, 0.308234, 0.345802, 0.356128, 0.387126,
0.426458, 0.345731, 0.38951, 0.294699, 0.415182, 0.338835, 0.368434, 0.356644,
0.426458, 0.426458, 0.284949, 0.357255, 0.39217, 0.349094, 0.340693, 0.426458,
0.344029, 0.339529, 0.383809, 0.326242, 0.379493, 0.328765, 0.397869, 0.409169,
0.408024, 0.395661, 0.440375, 0.356357, 0.292202, 0.374151, 0.438459, 0.339068,
0.335221, 0.440375, 0.413088, 0.40484, 0.317889, 0.351513, 0.40076, 0.411686,
0.383247, 0.322453, 0.294628, 0.316353, 0.293595, 0.364222, 0.371582, 0.358023,
0.375827, 0.307742, 0.302896, 0.440375, 0.390099, 0.357134, 0.389842, 0.440375,
0.347122, 0.264571, 0.378523, 0.437817, 0.36704, 0.373819, 0.365059, 0.440375,
0.357322, 0.354177, 0.403952, 0.361461, 0.440375, 0.323683, 0.317318, 0.352699,
0.374323, 0.37942, 0.360222, 0.28739, 0.349268, 0.349823, 0.360978, 0.340664,
0.407534, 0.336606, 0.382582, 0.431253, 0.352723, 0.440375, 0.352699, 0.393439,
0.386917, 0.43427, 0.354885, 0.433652, 0.385784, 0.288925, 0.364788, 0.440375,
0.359681, 0.440375, 0.345734, 0.291863, 0.35123, 0.304997, 0.307342, 0.342574,
0.440375, 0.306176, 0.408543, 0.327421, 0.372033, 0.378947, 0.376762, 0.292624,
0.411569, 0.396591, 0.371355, 0.344181, 0.354013, 0.355921, 0.385067, 0.322048,
0.344834, 0.363519, 0.294679, 0.389972, 0.323571, 0.432023, 0.398717, 0.386695,
0.378169, 0.309986, 0.373675, 0.355723, 0.326626, 0.378668, 0.332012, 0.377039,
0.398717, 0.333376, 0.304154, 0.375449, 0.348674, 0.275605, 0.342682, 0.391556,
0.38071, 0.354591, 0.343493, 0.342063, 0.276153, 0.330391, 0.346938, 0.382593,
0.392816, 0.312487, 0.398717, 0.357612, 0.398717, 0.307151, 0.367719, 0.323803,
0.31991, 0.345842, 0.354989, 0.342943, 0.339149, 0.325028, 0.378619, 0.384254,
0.344932, 0.392816, 0.329257, 0.38311, 0.285198, 0.318453, 0.352693, 0.336511,
0.290389, 0.328826, 0.314366, 0.393904, 0.333508, 0.392816, 0.310907, 0.370373,
0.355723, 0.398717, 0.326586, 0.333648, 0.322233, 0.393904, 0.334792, 0.365416,
0.358759, 0.344006, 0.389582, 0.359211, 0.347089, 0.389412, 0.357481, 0.398717,
0.346699, 0.398717, 0.264741, 0.324031, 0.398717, 0.398717, 0.291013, 0.390924,
0.377039, 0.362317, 0.324104, 0.398717, 0.356456, 0.318131, 0.315321, 0.321209,
0.373675, 0.362091, 0.373959, 0.369449, 0.370717, 0.393904, 0.350879, 0.355071,
0.322256, 0.357578, 0.377627, 0.437056, 0.396237, 0.437056, 0.357231, 0.297519,
0.387163, 0.389742, 0.413912, 0.374848, 0.437056, 0.365316, 0.385118, 0.363842,
0.30858, 0.381848, 0.354178, 0.32647, 0.299198, 0.356914, 0.35764, 0.405549,
0.370576, 0.425376, 0.435356, 0.318489, 0.324561, 0.295974, 0.437056, 0.33609,
0.383368, 0.374117, 0.421105, 0.437056, 0.381648, 0.352693, 0.437056, 0.3816,
0.42807, 0.268712, 0.322676, 0.318311, 0.307936, 0.437056, 0.32249, 0.383368,
0.405734, 0.286389, 0.358198, 0.367973, 0.325378, 0.355702, 0.367595, 0.402301,
0.432464, 0.415106, 0.333175, 0.359405, 0.404003, 0.382687, 0.355702, 0.304018,
0.405626, 0.308436, 0.353578, 0.339379, 0.437056, 0.383474, 0.349861, 0.343831,
0.40856, 0.385483, 0.35943, 0.409976, 0.356437, 0.367652, 0.369748, 0.370851,
0.336399, 0.326666, 0.372324, 0.374514, 0.317054, 0.38925, 0.377407, 0.349446,
0.415202, 0.394036, 0.420521, 0.437056, 0.437056, 0.418209, 0.365553, 0.355742,
0.371394, 0.339511, 0.370851, 0.377222, 0.369155, 0.31072, 0.357234, 0.412796,
0.404113, 0.368018, 0.330735, 0.368529, 0.357834, 0.316281, 0.339976, 0.33387,
0.387011, 0.354577, 0.412796, 0.357851, 0.329632, 0.32557, 0.32105, 0.297655,
0.338539, 0.403709, 0.35033, 0.352608, 0.387619, 0.412796, 0.333941, 0.347183,
0.302139, 0.412796, 0.412796, 0.349322, 0.368314, 0.317589, 0.335028, 0.407513,
0.366446, 0.345646, 0.315165, 0.339043, 0.412796, 0.351713, 0.343854, 0.333485,
0.356134, 0.306976, 0.272188, 0.348631, 0.407443, 0.296134, 0.397448, 0.374771,
0.341447, 0.404113, 0.342794, 0.318948, 0.369215, 0.349097, 0.362327, 0.332076,
0.365352, 0.397448, 0.379185, 0.332915, 0.387096, 0.389822, 0.386159, 0.314303,
0.343459, 0.332881, 0.303915, 0.385996, 0.365352, 0.317887, 0.272545, 0.379603,
0.35881, 0.309039, 0.400524, 0.343979, 0.378314, 0.366157, 0.33871, 0.346352,
0.38195, 0.412796, 0.365452, 0.380615, 0.412796, 0.325906, 0.33658, 0.373197,
0.355123, 0.28581, 0.412796, 0.323398, 0.360532, 0.412796, 0.346158, 0.368018,
0.341127, 0.350992, 0.371989, 0.360188, 0.2846, 0.281752, 0.368951, 0.402206,
0.360805, 0.382115, 0.310639, 0.408033, 0.373666, 0.383764, 0.345774, 0.400732,
0.337596, 0.342832, 0.289939, 0.413355, 0.405649, 0.388607, 0.416164, 0.334705,
0.371603, 0.33876, 0.362847, 0.38623, 0.279974, 0.352752, 0.410311, 0.294296,
0.399918, 0.408639, 0.396146, 0.324048, 0.348186, 0.338272, 0.41409, 0.316317,
0.313733, 0.416164, 0.416164, 0.375717, 0.413813, 0.365888, 0.33876, 0.376376,
0.348664, 0.396968, 0.402257, 0.31561, 0.402755, 0.354078, 0.348473, 0.314243,
0.299501, 0.329667, 0.393979, 0.31858, 0.410602, 0.416164, 0.395934, 0.348009,
0.357258, 0.322405, 0.416164, 0.416164, 0.356017, 0.416164, 0.397194, 0.416164,
0.398238, 0.360476, 0.416164, 0.330393, 0.39975, 0.38578, 0.373815, 0.388937,
0.27297, 0.310564, 0.357319, 0.370602, 0.369006, 0.376197, 0.308045, 0.391022,
0.397957, 0.329943, 0.416164, 0.40016, 0.309671, 0.318892, 0.352948, 0.384973,
0.350486, 0.33329, 0.344882, 0.32893, 0.39847, 0.359525, 0.381334, 0.330826,
0.388952, 0.345762, 0.382296, 0.384897, 0.327487, 0.357503, 0.41131, 0.357213,
0.336008, 0.41131, 0.332213, 0.41131, 0.372074, 0.373669, 0.38779, 0.377984,
0.341305, 0.38597, 0.378367, 0.295706, 0.325844, 0.361406, 0.393785, 0.281161,
0.360846, 0.345873, 0.301227, 0.395005, 0.32572, 0.319046, 0.299442, 0.346982,
0.332835, 0.287887, 0.398911, 0.379486, 0.39481, 0.328616, 0.385451, 0.41131,
0.394098, 0.41131, 0.360114, 0.37111, 0.37073, 0.41131, 0.41131, 0.369772,
0.305098, 0.299995, 0.363871, 0.366529, 0.296144, 0.310859, 0.370239, 0.382593,
0.408931, 0.396215, 0.37987, 0.348995, 0.342832, 0.279033, 0.276496, 0.294879,
0.348439, 0.351434, 0.334269, 0.41131, 0.40301, 0.286074, 0.41131, 0.378367,
0.374573, 0.3428, 0.332332, 0.306617, 0.319906, 0.402376, 0.357863, 0.331733,
0.288342, 0.394641, 0.386564, 0.305584, 0.35056, 0.400894, 0.41131, 0.348664,
0.2988, 0.363537, 0.392771, 0.401536, 0.40914, 0.394446, 0.360894, 0.378944,
0.321657, 0.322946, 0.294815, 0.392328, 0.317201, 0.401201, 0.333189, 0.306846,
0.304346, 0.401201, 0.322282, 0.401201, 0.375782, 0.358522, 0.332021, 0.337589,
0.297093, 0.335744, 0.362192, 0.372074, 0.343927, 0.355899, 0.401201, 0.355325,
0.368465, 0.331907, 0.395639, 0.307174, 0.289142, 0.331007, 0.290558, 0.385427,
0.392812, 0.305037, 0.292575, 0.332144, 0.322076, 0.376934, 0.34968, 0.316089,
0.391249, 0.359983, 0.32511, 0.335832, 0.326312, 0.338961, 0.338976, 0.401201,
0.378278, 0.323445, 0.359129, 0.365345, 0.39926, 0.361868, 0.277803, 0.386472,
0.372749, 0.373043, 0.357964, 0.321007, 0.362192, 0.356641, 0.362332, 0.380414,
0.39926, 0.363824, 0.319796, 0.32285, 0.359021, 0.350387, 0.401201, 0.37782,
0.292778, 0.336474, 0.392328, 0.375175, 0.309125, 0.294989, 0.328428, 0.393755,
0.303502, 0.346936, 0.295521, 0.327794, 0.401201, 0.28413, 0.335848, 0.401201,
0.313135, 0.336854, 0.36424, 0.330237, 0.327794, 0.35067, 0.277368, 0.331118,
0.401201, 0.341167, 0.395639, 0.401201, 0.33302, 0.319563, 0.426276, 0.43267,
0.328824, 0.432373, 0.43267, 0.37624, 0.418726, 0.365699, 0.398746, 0.402629,
0.329643, 0.322596, 0.310646, 0.408047, 0.316483, 0.345628, 0.344303, 0.361839,
0.376772, 0.424949, 0.43267, 0.361347, 0.360863, 0.394184, 0.393479, 0.355723,
0.43267, 0.357677, 0.31106, 0.419884, 0.334338, 0.423162, 0.428204, 0.33749,
0.356366, 0.376344, 0.308526, 0.367073, 0.353705, 0.43267, 0.353419, 0.350477,
0.367082, 0.43267, 0.429616, 0.367528, 0.384171, 0.315237, 0.393478, 0.429616,
0.339083, 0.341972, 0.295198, 0.348376, 0.343069, 0.424912, 0.378695, 0.358624,
0.330081, 0.382441, 0.399883, 0.387575, 0.309083, 0.361839, 0.305909, 0.330106,
0.411098, 0.36821, 0.332184, 0.345764, 0.343843, 0.360475, 0.35464, 0.382659,
0.381828, 0.34253, 0.34615, 0.354163, 0.43267, 0.43267, 0.407435, 0.429616,
0.387055, 0.369557, 0.43267, 0.43267, 0.298356, 0.40836, 0.403647, 0.411782,
0.41953, 0.356693, 0.429616, 0.424148, 0.383597, 0.33335, 0.42703, 0.401783,
0.416439, 0.28716},
{900, 100, 800, 100, 700, 100, 500, 200, 300, 200, 100, 100, 200, 100, 100, 100, 100, 100, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
Common::CLUSTER_SELECTION_METHOD::EOM,
false,
0.0,
{9, 2, 8, 9, 9, 5, 6, 0, 0, 0, 4, 0, 9, 1, 9, 8, 7, 6, 8, 6, 0, 3, 0, 6, 8, 9, 4, 7, 1,
2, 9, 3, 4, 3, 5, 2, 4, 2, 8, 6, 0, 1, 3, 5, 3, 4, 3, 3, 2, 9, 2, 4, 6, 7, 0, 8, 5, 0,
3, 0, 5, 7, 7, 0, 8, 4, 4, 8, 8, 2, 8, 6, 5, 3, 7, 0, 8, 9, 6, 5, 1, 8, 3, 1, 4, 1, 2,
3, 2, 5, 4, 5, 7, 5, 5, 3, 5, 9, 7, 0, 1, 4, 8, 9, 3, 4, 8, 6, 2, 6, 0, 8, 4, 6, 2, 2,
7, 0, 1, 6, 0, 3, 2, 9, 6, 4, 5, 6, 1, 4, 4, 9, 2, 7, 1, 5, 6, 8, 5, 4, 2, 6, 7, 1, 4,
5, 2, 1, 3, 2, 1, 9, 7, 0, 5, 1, 7, 7, 4, 2, 3, 9, 9, 7, 6, 6, 0, 5, 1, 2, 8, 1, 1, 7,
3, 3, 9, 7, 1, 7, 9, 9, 6, 2, 8, 1, 0, 1, 3, 5, 8, 7, 0, 6, 9, 5, 8, 3, 7, 4},
{0.78846246, 0.7849741, 0.937459, 0.7879698, 0.7145324, 0.69530874, 0.891888,
0.88132507, 0.7182645, 0.8545426, 0.9300125, 0.67259413, 0.6634503, 0.8718482,
0.8682387, 0.88102984, 1.0, 0.8767183, 0.7021334, 1.0, 0.91331875,
0.9947582, 0.8429277, 0.76598513, 0.9316941, 0.8204941, 0.7420175, 0.7653661,
0.96096206, 0.9490603, 0.85148084, 1.0, 0.88101643, 0.9286598, 0.8888738,
0.8281054, 0.8064258, 0.74586207, 0.96311015, 0.6262373, 0.6119416, 0.81256336,
0.8981358, 0.88311315, 0.9368603, 0.96779734, 1.0, 0.8786544, 0.7550972,
0.84879047, 0.92892593, 0.76864564, 0.9608688, 0.8873914, 0.9613044, 0.83862036,
0.8335599, 0.69317526, 0.9724107, 0.9421649, 0.95863754, 0.9092269, 0.8734039,
0.87950224, 0.9222009, 0.787177, 0.69281447, 0.8093117, 0.97554255, 0.81599283,
0.89801544, 0.8387986, 0.9083558, 0.83843285, 0.8811393, 0.8639771, 0.9148821,
0.85135794, 0.7230753, 0.860122, 0.93063366, 0.9793279, 0.9580142, 0.8849203,
0.94008243, 0.7849559, 0.87001556, 0.9347257, 0.7067981, 0.8687189, 0.7648523,
0.8955655, 0.980858, 1.0, 0.862825, 0.775708, 0.8236452, 0.8161936,
0.8614508, 0.94244635, 1.0, 0.73252815, 0.8437488, 0.8448594, 0.9435349,
0.97876644, 0.884773, 0.72953635, 0.7480388, 1.0, 0.9182666, 0.7623391,
0.6656271, 0.9488006, 0.7630241, 0.7011484, 0.649366, 0.7918839, 0.8639346,
0.93697715, 0.7992577, 0.76674926, 0.9192806, 0.8226126, 0.8549355, 0.7315127,
0.9998518, 0.9806381, 0.79453516, 0.8199058, 0.7388057, 0.9623143, 0.90600365,
0.8394823, 0.990545, 0.7378503, 0.7435042, 0.8281979, 0.90617263, 0.7777961,
0.9055647, 0.87161446, 0.8944556, 0.77123886, 0.75549823, 0.78260374, 0.877213,
1.0, 0.9303909, 0.867675, 0.87624264, 0.8818508, 0.923018, 0.8841391,
0.91878325, 0.7453629, 0.8917042, 0.7431318, 1.0, 0.9916206, 0.8906759,
0.9782674, 0.85721254, 0.97126245, 1.0, 0.8641099, 0.8171247, 0.9731305,
1.0, 0.8699122, 0.8259164, 0.7988863, 1.0, 0.7220662, 0.96562165,
0.79851115, 0.6863839, 0.8905847, 0.83507586, 0.80355006, 0.9839154, 0.95146024,
0.89326334, 0.85242677, 0.84625447, 0.86827004, 0.75781137, 0.65571195, 0.8325561,
0.9011331, 0.77075315, 0.9326203, 0.8810431, 0.90232337, 0.8286241, 1.0,
0.83256954, 0.83346933, 0.83974075, 0.6880056}}};
const std::vector<MembershipVectorInputs<float, int>> membership_vector_inputs = {
{1000,
15,
200,
5,
10,
approximate_predict_inputs[0].data,
approximate_predict_inputs[0].points_to_predict,
approximate_predict_inputs[0].condensed_parents,
approximate_predict_inputs[0].condensed_children,
approximate_predict_inputs[0].condensed_lambdas,
approximate_predict_inputs[0].condensed_sizes,
approximate_predict_inputs[0].cluster_selection_method,
approximate_predict_inputs[0].allow_single_cluster,
approximate_predict_inputs[0].cluster_selection_epsilon,
{0.0002530822530388832,
0.00018407567404210567,
0.000182106887223199,
0.00018524177721701562,
0.00019448586681392044,
0.00019815948326140642,
0.0001980908855330199,
0.00016657185915391892,
0.00018871990323532373,
0.7867118716239929,
0.0001816799776861444,
0.00020116902305744588,
0.7831955552101135,
0.00017527365707792342,
0.0002196182613261044,
0.0001871994900284335,
0.00018894199456553906,
0.00019486781093291938,
0.00024115735141094774,
0.0001886177051346749,
2.8098019177742566e-14,
2.644835305626314e-14,
3.68345084251695e-14,
2.696131112692266e-14,
3.520827631720881e-14,
2.8222329733081607e-14,
3.1144950849012684e-14,
2.9790860262609437e-14,
0.937458872795105,
2.9192007968900646e-14,
0.00020727327500935644,
0.00015153847925830632,
0.00014923505659680814,
0.00015247381816152483,
0.0001586345606483519,
0.00016630074242129922,
0.00016396638238802552,
0.00013663238496519625,
0.00015500378503929824,
0.7865288257598877,
0.0023241264279931784,
0.001784613006748259,
0.0017680305754765868,
0.0017621743027120829,
0.0018360188696533442,
0.001873290864750743,
0.0019224723801016808,
0.0015969841042533517,
0.0018349199090152979,
0.6978297233581543,
0.002737677888944745,
0.0026009061839431524,
0.002662775805220008,
0.0026909145526587963,
0.002935544354841113,
0.6706856489181519,
0.00296516390517354,
0.0025849631056189537,
0.0026453109458088875,
0.002799716079607606,
2.096020068620419e-08,
1.9538926920859012e-08,
2.1055260646107854e-08,
1.889239342744986e-08,
2.0816086632180486e-08,
2.4408937093767236e-08,
0.8918877840042114,
2.1953395545892818e-08,
2.317479186331184e-08,
2.30217178653902e-08,
0.8813241124153137,
1.07278836480873e-07,
9.706727155389672e-08,
1.054549940704419e-07,
1.1396699761689888e-07,
1.0535793393273707e-07,
9.917750531940328e-08,
9.856156424348228e-08,
1.0011526541120475e-07,
1.3765574635726807e-07,
0.7033942341804504,
0.0015983362682163715,
0.0014852865133434534,
0.0015967393992468715,
0.001750286784954369,
0.001626663259230554,
0.0015557239530608058,
0.0015090374508872628,
0.001547577790915966,
0.0022005606442689896,
0.8545218110084534,
2.273625113957678e-06,
2.100146730299457e-06,
2.3017485091259005e-06,
2.492304020051961e-06,
2.244888946734136e-06,
2.123563263012329e-06,
2.129831955244299e-06,
2.163373892472009e-06,
2.9658372113772202e-06,
9.203874641594556e-13,
8.227756041583045e-13,
9.656514953979012e-13,
8.450126449260909e-13,
0.9300125241279602,
8.807190003332077e-13,
8.120847744264026e-13,
8.806826795604294e-13,
1.0076335382400159e-12,
8.515964626185091e-13,
0.6337646245956421,
0.004234638065099716,
0.0039377715438604355,
0.004363706335425377,
0.0046120560728013515,
0.004171515814960003,
0.003982673864811659,
0.0040373243391513824,
0.004073547665029764,
0.005416255444288254,
0.006280964706093073,
0.004642014857381582,
0.004681616555899382,
0.004790678154677153,
0.004879931919276714,
0.005056649446487427,
0.005097731947898865,
0.004222454968839884,
0.00492987921461463,
0.6188682913780212,
3.75870172319992e-07,
0.871845006942749,
3.7173532518863794e-07,
3.173210529894277e-07,
3.6222598964741337e-07,
3.5518635854714375e-07,
3.3805454791036027e-07,
3.3008785749188974e-07,
3.3509408581267053e-07,
3.560813013336883e-07,
7.803906214576273e-07,
5.730094017053489e-07,
5.603833415079862e-07,
5.772134272774565e-07,
6.016087468196929e-07,
6.22694244611921e-07,
6.227214157661365e-07,
5.180352218303597e-07,
5.884909342057654e-07,
0.8682332634925842,
1.1367755092805965e-07,
1.0737686295669846e-07,
1.501674233850281e-07,
1.0805556627246915e-07,
1.4312823282125464e-07,
1.1483928119560005e-07,
1.2545275751563167e-07,
1.1863028248626506e-07,
0.8810287117958069,
1.1905306251946968e-07,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
2.0499211927926808e-07,
1.927855066696793e-07,
2.0731974359478045e-07,
1.8649507182999514e-07,
2.0591856753071625e-07,
2.3634844126263488e-07,
0.8767164945602417,
2.1867212751658371e-07,
2.299055239518566e-07,
2.2589235015857412e-07,
0.002511337399482727,
0.002371246926486492,
0.0032633140217512846,
0.0024543318431824446,
0.003203928004950285,
0.0025498317554593086,
0.0027719265781342983,
0.0026563755236566067,
0.6777471899986267,
0.0026039197109639645,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.9133185744285583,
2.1562490681859003e-10,
1.961576595599368e-10,
2.1651716530790566e-10,
2.3134436033522832e-10,
2.16883608294971e-10,
2.0475746098647107e-10,
2.0201645911654964e-10,
2.0264663558311469e-10,
2.8145905051069064e-10,
0.0,
0.0,
0.0,
0.9947580695152283,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.8428654074668884,
6.827289780630963e-06,
6.298644166236045e-06,
6.7981050051457714e-06,
7.413944786094362e-06,
6.728119387844345e-06,
6.40636972093489e-06,
6.387283974618185e-06,
6.465797923738137e-06,
8.976897333923262e-06,
0.00038288370706140995,
0.00036690602428279817,
0.00039562038728035986,
0.0003451861848589033,
0.0003812862851191312,
0.0004427516250871122,
0.7624126076698303,
0.0004066527762915939,
0.00042984599713236094,
0.0004215588851366192,
4.490415743234333e-13,
4.251922978201067e-13,
5.939976700612692e-13,
4.2563638702995676e-13,
5.565416291881953e-13,
4.516668614638897e-13,
4.930322101102946e-13,
4.674474782945248e-13,
0.9316940307617188,
4.727977991753063e-13,
4.423918653628789e-05,
3.1180410587694496e-05,
3.110723628196865e-05,
3.203861342626624e-05,
3.320777614135295e-05,
3.4194719773950055e-05,
3.428362470003776e-05,
2.8662063414230943e-05,
3.301577453385107e-05,
0.8201920986175537,
0.000869117968250066,
0.0007816385477781296,
0.0009427050244994462,
0.0008194441325031221,
0.7343723773956299,
0.0008457642397843301,
0.0007728348718956113,
0.0008526688907295465,
0.000946925429161638,
0.0008139877463690937,
0.0004243399016559124,
0.0003884983598254621,
0.0004493198939599097,
0.00040433465619571507,
0.0004704603343270719,
0.0004156433278694749,
0.00044823603820987046,
0.7615138292312622,
0.0004584285197779536,
0.00039302516961470246,
1.3799692751970065e-22,
0.9609621167182922,
1.372497184702753e-22,
1.160538842276516e-22,
1.323550574344324e-22,
1.2723040242395048e-22,
1.2198066562833672e-22,
1.205310422071281e-22,
1.2286812783582312e-22,
1.315128947755674e-22,
1.8995478157063684e-17,
2.0536343584682564e-17,
0.9490602612495422,
1.835897921931638e-17,
2.3170688842537342e-17,
1.9648957458425476e-17,
1.9703071614098695e-17,
2.0377270135443716e-17,
2.520216998071245e-17,
1.9725183806233463e-17,
4.4916218939761166e-06,
3.371252660144819e-06,
3.3190501653734827e-06,
3.3453750347689493e-06,
3.522138058542623e-06,
3.6460453429754125e-06,
3.720290578712593e-06,
3.046903430004022e-06,
3.4767349461617414e-06,
0.8514488339424133,
0.0,
0.0,
0.0,
0.9879382252693176,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.1883399508860748e-07,
1.0594634858307472e-07,
1.2510740532434284e-07,
1.1163908197886485e-07,
0.8810153603553772,
1.1485160200663813e-07,
1.0367984515369244e-07,
1.152264275106063e-07,
1.2804279947431496e-07,
1.081207656739025e-07,
3.3322667541968443e-12,
2.7847765326355356e-12,
2.8983024747547548e-12,
0.9269185066223145,
3.3396907201527215e-12,
3.2208435137709435e-12,
2.8192749784022864e-12,
2.9375054905883546e-12,
2.989901078456758e-12,
3.182274105686944e-12,
3.7339919600754e-08,
3.5016384458685934e-08,
3.516167623729416e-08,
3.669248727078411e-08,
3.8688138914722003e-08,
0.8888734579086304,
4.062863112608284e-08,
3.402507431360391e-08,
3.5493389560770083e-08,
3.9204945068149755e-08,
1.8074357285513543e-05,
1.9653665731311776e-05,
0.8279287219047546,
1.7290078176301904e-05,
2.2274352886597626e-05,
1.8666025425773114e-05,
1.8648825061973184e-05,
1.9387884094612673e-05,
2.3966404114617035e-05,
1.859080293797888e-05,
7.71554114180617e-05,
6.764035060768947e-05,
7.800028106430545e-05,
7.031043787719682e-05,
0.8057695627212524,
7.395906141027808e-05,
6.606870010728016e-05,
7.236905366880819e-05,
8.04358787718229e-05,
7.033678411971778e-05,
0.0006914456025697291,
0.0007456588209606707,
0.7390852570533752,
0.000668209744617343,
0.0008583673043176532,
0.0007160619716159999,
0.0007139441440813243,
0.000733432243578136,
0.0009341223048977554,
0.000715571572072804,
6.589148065323491e-24,
6.1983526837642436e-24,
8.479431949201731e-24,
6.170803688801663e-24,
8.224413728927314e-24,
6.578555635518636e-24,
7.185272502028642e-24,
6.836412177330039e-24,
0.9631101489067078,
6.82788774638822e-24,
0.00802935566753149,
0.007491857744753361,
0.00804034061729908,
0.0073533738031983376,
0.007928348146378994,
0.009356213733553886,
0.552039623260498,
0.008452903479337692,
0.008814538829028606,
0.008730733767151833,
0.5255595445632935,
0.009215196594595909,
0.008578543551266193,
0.009703218005597591,
0.01015243586152792,
0.009502007625997066,
0.009033103473484516,
0.009024934843182564,
0.008827847428619862,
0.012344719842076302,
5.160346699994989e-05,
0.8121354579925537,
5.11777943756897e-05,
4.3404590542195365e-05,
4.8865535063669086e-05,
4.79418522445485e-05,
4.569876546156593e-05,
4.491528670769185e-05,
4.5268894609762356e-05,
4.898860424873419e-05,
7.827212655797666e-09,
6.529231377783162e-09,
6.789384610073057e-09,
0.898135781288147,
7.909614296863765e-09,
7.498669241101652e-09,
6.61939747459428e-09,
6.950873210342934e-09,
7.0602044210943404e-09,
7.461514073270337e-09,
8.789705674416837e-08,
8.293388020774728e-08,
8.341675084011513e-08,
8.616299851382792e-08,
9.254695498839283e-08,
0.8831124305725098,
9.73229248302232e-08,
8.095203440916521e-08,
8.480373026031884e-08,
9.302381442921615e-08,
4.477976217370956e-14,
3.8338191806321434e-14,
3.9949035019696405e-14,
0.9368603825569153,
4.548604535018451e-14,
4.4101085495051526e-14,
3.8511803067322464e-14,
4.020692267081745e-14,
4.083773521169204e-14,
4.2893833152252495e-14,
2.3712074512384925e-27,
2.0862347233083074e-27,
2.4614228246584516e-27,
2.1703639655110084e-27,
0.9677971601486206,
2.255406483693338e-27,
2.0744538098384907e-27,
2.259566877559203e-27,
2.5860391587450555e-27,
2.1827798580834896e-27,
0.0,
0.0,
0.0,
0.9999999403953552,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.6267847513518063e-07,
1.3730826253777195e-07,
1.4391935110324994e-07,
0.8786529898643494,
1.6530978541595687e-07,
1.5830256927529263e-07,
1.3894002393044502e-07,
1.4394387903848838e-07,
1.4852390961550554e-07,
1.5595823299463518e-07,
0.0005881970282644033,
0.0006503592012450099,
0.7493658065795898,
0.0005644297343678772,
0.0007115041371434927,
0.0006024637259542942,
0.0006059486768208444,
0.0006264440016821027,
0.0007730901706963778,
0.0006090899696573615,
5.486292138812132e-06,
3.881987595377723e-06,
3.887108960043406e-06,
3.968424607592169e-06,
4.1731746023288e-06,
4.170396096014883e-06,
4.274667844583746e-06,
3.5850184758601245e-06,
4.129350600123871e-06,
0.8487528562545776,
1.2150328486393303e-12,
1.307158157756727e-12,
0.9289259910583496,
1.1627126128219145e-12,
1.5062066177629063e-12,
1.231432924381215e-12,
1.2400850745580838e-12,
1.2953713622587681e-12,
1.6365387777578233e-12,
1.2563756458805475e-12,
0.0004198023525532335,
0.00036896925303153694,
0.00043996868771500885,
0.0003868935164064169,
0.7650184631347656,
0.0003975082072429359,
0.000368771783541888,
0.00040135433664545417,
0.0004548841679934412,
0.00038901885272935033,
1.393721582021033e-22,
1.3178899609239476e-22,
1.3987774862259031e-22,
1.2515896728281046e-22,
1.3724538920162746e-22,
1.6042488676563268e-22,
0.960868775844574,
1.4600646579323753e-22,
1.5312905935204184e-22,
1.5278299553925224e-22,
4.310436096943704e-08,
3.9372103799451e-08,
4.456979141309603e-08,
4.0635271147948515e-08,
4.723127489114631e-08,
4.1775461312454354e-08,
4.475796799852105e-08,
0.8873909711837769,
4.514668106025965e-08,
3.921411817486842e-08,
0.9613043665885925,
8.415180648421109e-23,
7.809221246152296e-23,
8.490511185071198e-23,
9.11324177018501e-23,
8.386442130099546e-23,
8.077697749014146e-23,
8.047453452996697e-23,
8.106459617618504e-23,
1.101323787287006e-22,
8.843171599437483e-06,
8.379941391467582e-06,
1.1507891031214967e-05,
8.39383210404776e-06,
1.1120961062260903e-05,
8.80366496858187e-06,
9.667754056863487e-06,
9.20082220545737e-06,
0.8385351300239563,
9.29360612644814e-06,
1.4052963706490118e-05,
1.307265574723715e-05,
1.3084992133371998e-05,
1.3902407772548031e-05,
1.475775661674561e-05,
0.8334353566169739,
1.4998437109170482e-05,
1.2722061001113616e-05,
1.3225289876572788e-05,
1.4600891518057324e-05,
0.6655100584030151,
0.0029969397000968456,
0.0027648131363093853,
0.003118096385151148,
0.003279529046267271,
0.0030224090442061424,
0.002849841956049204,
0.0027967423666268587,
0.002880786545574665,
0.0039560492150485516,
8.414162702310216e-32,
7.165489713919013e-32,
7.488791214950246e-32,
0.9724105596542358,
8.525688904338831e-32,
8.223137229588314e-32,
7.317482896721861e-32,
7.591384247659787e-32,
7.734054584766263e-32,
8.106926094824496e-32,
0.9421651363372803,
2.2987978674207567e-15,
2.135355660467988e-15,
2.302646996891317e-15,
2.4937179362758636e-15,
2.3119533478828e-15,
2.1869935417896886e-15,
2.161611564250503e-15,
2.2035128018435155e-15,
3.0309642531814278e-15,
2.3697327066773426e-21,
2.1960494157449986e-21,
2.2261268014166778e-21,
2.328142243192761e-21,
2.470902388588833e-21,
0.958637535572052,
2.5442215697443237e-21,
2.1712212746197284e-21,
2.2453036187475916e-21,
2.4308360296166876e-21,
6.253880191486871e-10,
5.740372066576072e-10,
6.49229003890639e-10,
5.829880467267401e-10,
6.815960573725022e-10,
6.05642536122275e-10,
6.453548251350583e-10,
0.9092268943786621,
6.502650085060679e-10,
5.694551497015254e-10,
3.1579710935147887e-07,
2.921148620771419e-07,
3.3325909498671535e-07,
2.991846201894077e-07,
3.4792745395861857e-07,
3.1037487246976525e-07,
3.3504409202578245e-07,
0.8734010457992554,
3.3658724873930623e-07,
2.9093484954501037e-07,
0.8795010447502136,
1.3401542275914835e-07,
1.2183954822830856e-07,
1.321718627877999e-07,
1.4097884104558034e-07,
1.3231928619461542e-07,
1.264212414753274e-07,
1.2375681990306475e-07,
1.2493039491801028e-07,
1.7694318898975325e-07,
1.6127115268216308e-11,
1.5460755939389337e-11,
2.159463753648172e-11,
1.5234990352608335e-11,
1.9864427810989405e-11,
1.619614685421933e-11,
1.777253517964983e-11,
1.6660367530008102e-11,
0.9222009181976318,
1.6877664196779385e-11,
0.00020575900271069258,
0.0001817716402001679,
0.000211698716157116,
0.00019274622900411487,
0.7854013442993164,
0.00019799722940661013,
0.0001795708667486906,
0.00019765806791838259,
0.00022156687919050455,
0.0001868119725259021,
0.0030260561034083366,
0.0027391742914915085,
0.0032676581759005785,
0.0028837528079748154,
0.6659250259399414,
0.003014005022123456,
0.002749745501205325,
0.0030349809676408768,
0.0033713809680193663,
0.002802700735628605,
6.155051232781261e-05,
5.773942393716425e-05,
7.929089770186692e-05,
5.818824502057396e-05,
7.792837277520448e-05,
6.242646486498415e-05,
6.811827188357711e-05,
6.389007467078045e-05,
0.8087178468704224,
6.49222347419709e-05,
6.802893026641819e-36,
6.373562799568024e-36,
8.95568566081636e-36,
6.508534432722163e-36,
8.360054982031904e-36,
6.844305813155377e-36,
7.559837776681483e-36,
7.153923448733534e-36,
0.9755426645278931,
7.183106330032177e-36,
4.0245951822726056e-05,
4.3522326450329274e-05,
0.8156006932258606,
3.837023541564122e-05,
4.9416430556448177e-05,
4.094480027561076e-05,
4.139017255511135e-05,
4.2578656575642526e-05,
5.3881121857557446e-05,
4.183354394626804e-05,
6.906796468086895e-09,
6.52547571533546e-09,
8.935008288801782e-09,
6.650829220689047e-09,
8.588117772490023e-09,
6.984333555948297e-09,
7.618473851778162e-09,
7.13407377617159e-09,
0.8980153799057007,
7.302092708272312e-09,
8.738015822018497e-06,
8.260999493359122e-06,
8.710845577297732e-06,
7.846711923775729e-06,
8.743520993448328e-06,
1.0090280738950241e-05,
0.8387179374694824,
9.184515874949284e-06,
9.682076779427007e-06,
9.472021702094935e-06,
7.576928529040572e-10,
7.141871538607347e-10,
7.146616076703083e-10,
7.424448833504016e-10,
7.914729982516633e-10,
0.9083556532859802,
8.230387482655033e-10,
6.920529149745391e-10,
7.182910932712616e-10,
7.837293591883565e-10,
8.764655831328128e-06,
7.497745173168369e-06,
7.86956661613658e-06,
0.8383593559265137,
8.976381650427356e-06,
8.569822966819629e-06,
7.572063168481691e-06,
8.02258637122577e-06,
8.055099897319451e-06,
8.429727131442633e-06,
1.1287383472335932e-07,
1.0407764960973509e-07,
1.1875183503207154e-07,
1.0603702094158507e-07,
1.2449604014364013e-07,
1.1059880478114792e-07,
1.1837087754429376e-07,
0.8811383843421936,
1.1983460979081428e-07,
1.0391529770004126e-07,
0.8639678359031677,
1.008576305139286e-06,
9.264950904253055e-07,
1.0105918590852525e-06,
1.0757661357274628e-06,
9.891142553897225e-07,
9.565759455654188e-07,
9.6024439244502e-07,
9.54390998231247e-07,
1.3053422662778758e-06,
1.4441015050437045e-10,
1.3511301799606912e-10,
1.856359232998983e-10,
1.3656047126442417e-10,
1.7916350347757515e-10,
1.440548930142782e-10,
1.5756891624807423e-10,
1.4786717683623607e-10,
0.9148818850517273,
1.5072117165448873e-10,
3.771382353079389e-06,
2.772385414573364e-06,
2.742514425335685e-06,
2.791339966279338e-06,
2.9010211619606707e-06,
3.0431767754635075e-06,
3.0466819680441404e-06,
2.505725888113375e-06,
2.8902729809487937e-06,
0.8513314723968506,
0.0015077501302585006,
0.001440059975720942,
0.0015407745959237218,
0.0013567212736234069,
0.001495639095082879,
0.0017410119762644172,
0.709049642086029,
0.0015986631624400616,
0.001684310962446034,
0.001660698908381164,
1.443223140995542e-06,
1.3326495036380948e-06,
1.356645498162834e-06,
1.4197438531482476e-06,
1.5184647281785146e-06,
0.8601090312004089,
1.5381515368062537e-06,
1.3261542335385457e-06,
1.3645659464600612e-06,
1.4915161727913073e-06,
7.241417751893686e-13,
0.9306337833404541,
7.059784614543707e-13,
6.109497131291663e-13,
6.898317637601958e-13,
6.812408710060724e-13,
6.433208458132833e-13,
6.328905498634296e-13,
6.409212353550298e-13,
6.866245311036578e-13,
2.2238606628834847e-42,
2.0823295179866782e-42,
2.902089119616696e-42,
2.1047502934158752e-42,
2.712913826932846e-42,
2.221058065954835e-42,
2.450871014104105e-42,
2.282715198385127e-42,
0.9793277978897095,
2.3457736292797438e-42,
5.218588780543286e-21,
4.3951890457803504e-21,
4.641714717715295e-21,
0.9580142498016357,
5.2907542305367854e-21,
5.0882399794065605e-21,
4.50034276556402e-21,
4.688130939968814e-21,
4.78324257402499e-21,
5.029653135376645e-21,
6.838761379412972e-08,
0.8849197030067444,
6.868049950981003e-08,
5.8331504959596714e-08,
6.567564270199e-08,
6.38908304040342e-08,
6.153631915140068e-08,
6.112195194418746e-08,
6.149467424165778e-08,
6.520041040403157e-08,
7.956658200141895e-15,
6.930070785212154e-15,
8.124077650298496e-15,
7.518243264005488e-15,
0.9400823712348938,
7.628769205160913e-15,
6.9071081455285635e-15,
7.461688568183213e-15,
8.599279227334156e-15,
7.316203036195762e-15,
0.00021303852554410696,
0.7831655144691467,
0.0002114817179972306,
0.0001822729391278699,
0.00020357394532766193,
0.0002021284744841978,
0.00019437435548752546,
0.000188833408174105,
0.0001912984298542142,
0.00020341463095974177,
4.4599653392651817e-07,
4.898585643786646e-07,
0.8700112104415894,
4.239646784753859e-07,
5.427907012744981e-07,
4.5675955107071786e-07,
4.5577090190818126e-07,
4.7215917220455594e-07,
5.888216492166976e-07,
4.6110730522741505e-07,
1.1922724634828852e-13,
1.0151839628267864e-13,
1.0617196818983946e-13,
0.9347257614135742,
1.2039220803009704e-13,
1.1665000291660038e-13,
1.0262576650033745e-13,
1.0689546985206619e-13,
1.0877546964289247e-13,
1.1469773427471436e-13,
0.0020626746118068695,
0.0022514122538268566,
0.6865984797477722,
0.00198568357154727,
0.002561625326052308,
0.002139321994036436,
0.0021208799444139004,
0.002212588209658861,
0.0027268019039183855,
0.0021386141888797283,
5.635719162455644e-07,
5.2551541784851e-07,
5.388453132582072e-07,
5.513168730431062e-07,
6.038525839358044e-07,
0.8687138557434082,
6.121398996583594e-07,
5.242313250164443e-07,
5.392109301283199e-07,
5.818334898322064e-07,
0.0003737060178536922,
0.00033870991319417953,
0.00040540844202041626,
0.0003588051476981491,
0.7615484595298767,
0.0003674703184515238,
0.0003349876787979156,
0.0003662060189526528,
0.00041163366404362023,
0.00034683974809013307,
1.0556798102356879e-08,
9.874362660866609e-09,
1.001590366200844e-08,
1.024814455519163e-08,
1.1130394383940256e-08,
0.895565390586853,
1.1604468497239395e-08,
9.775446230264606e-09,
1.0088111679351641e-08,
1.0855812249133123e-08,
1.401298464324817e-45,
1.401298464324817e-45,
1.401298464324817e-45,
1.401298464324817e-45,
1.401298464324817e-45,
1.401298464324817e-45,
1.401298464324817e-45,
0.9808579087257385,
1.401298464324817e-45,
1.401298464324817e-45,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.129949282585585e-06,
1.0416553095637937e-06,
1.0728659844971844e-06,
1.100299982681463e-06,
1.212043343912228e-06,
0.8628149032592773,
1.2363406085569295e-06,
1.0379121704318095e-06,
1.0894523256865796e-06,
1.17115780540189e-06,
0.0003253588220104575,
0.00027499315910972655,
0.00028717683744616807,
0.772987961769104,
0.0003274757764302194,
0.00032236994593404233,
0.00028217388899065554,
0.0002920878760050982,
0.0002952611248474568,
0.00031308678444474936,
2.6724068447947502e-05,
2.4618164388812147e-05,
2.4834973373799585e-05,
2.613231299619656e-05,
2.7921598302782513e-05,
0.8234087228775024,
2.8974545784876682e-05,
2.4416465748799965e-05,
2.507374665583484e-05,
2.772476727841422e-05,
5.6379132729489356e-05,
4.17823794123251e-05,
4.1476065234746784e-05,
4.2309264244977385e-05,
4.391720722196624e-05,
4.560843444778584e-05,
4.683551378548145e-05,
3.828674743999727e-05,
4.3823831219924614e-05,
0.8157931566238403,
1.2169584806542844e-06,
1.1137905175928609e-06,
1.2766719237333746e-06,
1.1562034387679887e-06,
1.3531788454201887e-06,
1.1956110483879456e-06,
1.2823479664803017e-06,
0.8614397048950195,
1.2865907592640724e-06,
1.125888388742169e-06,
0.9424465298652649,
1.679152397164531e-15,
1.5559247734485545e-15,
1.693863983029799e-15,
1.8503290737169158e-15,
1.6793280506219679e-15,
1.6172573703680544e-15,
1.614344741699802e-15,
1.6210827829160916e-15,
2.187340613539826e-15,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0011390125146135688,
0.0010132569586858153,
0.0012148814275860786,
0.001085218507796526,
0.7224857211112976,
0.001134143560193479,
0.0010155646596103907,
0.0011173048987984657,
0.0012661636574193835,
0.0010568059515208006,
6.267162916628877e-06,
5.96749123360496e-06,
8.308308679261245e-06,
6.046620455890661e-06,
7.646014637430198e-06,
6.365760782500729e-06,
7.014048151177121e-06,
6.613652203668607e-06,
0.8436877727508545,
6.5886383708857466e-06,
7.452761110471329e-06,
5.583225174632389e-06,
5.431343197415117e-06,
5.497108304552967e-06,
5.7301308515889104e-06,
5.969879566691816e-06,
6.052291155356215e-06,
4.944880402035778e-06,
5.667175628332188e-06,
0.8448071479797363,
9.706689467299718e-16,
8.250320605427579e-16,
8.604393082874086e-16,
0.9435349106788635,
9.920011538692161e-16,
9.518180167305905e-16,
8.331404422490273e-16,
8.742566391186128e-16,
8.841363255362685e-16,
9.257992821733066e-16,
3.2561972415515774e-41,
2.9062930150096706e-41,
3.381193064569351e-41,
3.0630983131676176e-41,
0.9787665009498596,
3.159507647513165e-41,
2.823476275768074e-41,
3.073748181496486e-41,
3.4965199281832836e-41,
3.0057852059767326e-41,
5.670522540413003e-08,
5.38855502441038e-08,
7.692833747796612e-08,
5.427816418546172e-08,
7.014021718987351e-08,
5.6596075381776245e-08,
6.208095726378815e-08,
5.9418223230522926e-08,
0.8847724199295044,
5.943314462797389e-08,
0.001061184098944068,
0.0010145955020561814,
0.0010924838716164231,
0.0009687413112260401,
0.0010662769200280309,
0.0012369186151772738,
0.7195754051208496,
0.0011526953894644976,
0.001196487108245492,
0.001171511015854776,
0.0006623931112699211,
0.0007242010324262083,
0.7415693402290344,
0.0006331429467536509,
0.0008170953951776028,
0.0006854024832136929,
0.0006821154966019094,
0.0007062596851028502,
0.0008823449024930596,
0.0006765093421563506,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.9182665944099426,
5.0304121979038996e-11,
4.713373441544988e-11,
5.1051905758381366e-11,
5.559832108592566e-11,
5.076468065356998e-11,
4.9047127470558394e-11,
4.8359476145787283e-11,
4.9211738850640785e-11,
6.738603425260337e-11,
0.00041929585859179497,
0.0003971457772422582,
0.0005559647106565535,
0.0003974952269345522,
0.0005265120998956263,
0.0004149224259890616,
0.00045300336205400527,
0.00043863116297870874,
0.7582964301109314,
0.00043983160867355764,
0.0052673425525426865,
0.004537391941994429,
0.005313797388225794,
0.00481824018061161,
0.6209046244621277,
0.004972491879016161,
0.004527865443378687,
0.004970768000930548,
0.005522898864001036,
0.00479167653247714,
2.581303955436041e-17,
2.4269667769485494e-17,
2.5706594644414635e-17,
2.3133732067129698e-17,
2.5734091782337123e-17,
2.984703231129779e-17,
0.9488005638122559,
2.7243832057866857e-17,
2.8225579639681544e-17,
2.818853187440652e-17,
0.0004270136123523116,
0.00045935602975077927,
0.7588334679603577,
0.00041008915286511183,
0.0005272935959510505,
0.000439519586507231,
0.0004441268683876842,
0.0004635911318473518,
0.0005746004171669483,
0.0004449911939445883,
0.0021648146212100983,
0.00238444353453815,
0.6800108551979065,
0.0020633896347135305,
0.0026305066421628,
0.002230418147519231,
0.0022443304769694805,
0.002327741589397192,
0.0028689440805464983,
0.0022231165785342455,
0.006049368996173143,
0.005571983754634857,
0.00621400261297822,
0.005707009695470333,
0.0066705928184092045,
0.005870106630027294,
0.006237417925149202,
0.5953031182289124,
0.006233432795852423,
0.005508860107511282,
0.7905651330947876,
0.000143751225550659,
0.00013270530325826257,
0.00014356210886035115,
0.00015722411626484245,
0.00014192122034728527,
0.00013732603110838681,
0.00013610343739856035,
0.00013876723824068904,
0.00018741784151643515,
9.272175134356075e-07,
0.8639268279075623,
9.248606716028007e-07,
7.861579547352449e-07,
8.875958315002208e-07,
8.678379117554869e-07,
8.365941539523192e-07,
8.177325412361824e-07,
8.292303732559958e-07,
8.857466582412599e-07,
3.3218406188728275e-14,
3.159994984322338e-14,
3.383386032820325e-14,
3.014556039146986e-14,
3.332183907598339e-14,
3.854191678266325e-14,
0.9369771480560303,
3.549107012511092e-14,
3.6946357431788535e-14,
3.661873186405415e-14,
0.7983226776123047,
0.000102168349258136,
9.367313759867102e-05,
0.00010247694444842637,
0.00010975940676871687,
0.00010048323747469112,
9.75712391664274e-05,
9.654470341047272e-05,
9.730223973747343e-05,
0.00013495884195435792,
0.0004232035716995597,
0.0003552534617483616,
0.00037263764534145594,
0.763225257396698,
0.0004270242643542588,
0.0004103607207071036,
0.0003647382545750588,
0.00037915012217126787,
0.00038557412335649133,
0.00040608123526908457,
3.6380436857497145e-11,
4.008951157152829e-11,
0.9192806482315063,
3.5115417518216674e-11,
4.457970451121618e-11,
3.74578042194873e-11,
3.773666101825057e-11,
3.838310919102028e-11,
4.9122126505318775e-11,
3.797544570471878e-11,
3.766634836210869e-05,
2.7020705601898953e-05,
2.6856769181904383e-05,
2.7971773306489922e-05,
2.8656755603151396e-05,
2.9932121833553538e-05,
3.010015279869549e-05,
2.486624362063594e-05,
2.8290420232224278e-05,
0.8223510980606079,
2.3036641323415097e-06,
2.198754827986704e-06,
2.355473270654329e-06,
2.078902753055445e-06,
2.3141310521168634e-06,
2.680374564079102e-06,
0.8549140691757202,
2.414106347714551e-06,
2.6329548745707143e-06,
2.5600991193641676e-06,
0.0013438343303278089,
0.0011852685129269958,
0.0014419370563700795,
0.0012561626499518752,
0.7197446823120117,
0.0012919692089781165,
0.001184143591672182,
0.0013029715046286583,
0.0015100430464372039,
0.0012517330469563603,
0.0,
0.0,
0.0,
0.0,
0.0,
0.9998517632484436,
0.0,
0.0,
0.0,
0.0,
2.802596928649634e-45,
2.802596928649634e-45,
2.802596928649634e-45,
2.802596928649634e-45,
2.802596928649634e-45,
4.203895392974451e-45,
0.9806379079818726,
2.802596928649634e-45,
4.203895392974451e-45,
4.203895392974451e-45,
0.00013518209743779153,
0.7933940291404724,
0.00013711249630432576,
0.00011572794028325006,
0.00013132538879290223,
0.00012641851208172739,
0.00012203707592561841,
0.0001193817806779407,
0.0001238906552316621,
0.00012998496822547168,
3.705951166921295e-05,
3.2651707442710176e-05,
3.7891993997618556e-05,
3.478688086033799e-05,
0.8195874094963074,
3.545836443663575e-05,
3.2164469303097576e-05,
3.480994200799614e-05,
3.9527774788439274e-05,
3.4080192563124e-05,
0.0010709901107475162,
0.0009553811396472156,
0.0011183643946424127,
0.0010245833545923233,
0.729491114616394,
0.0010488491971045732,
0.0009379786788485944,
0.0010230764746665955,
0.0011532190255820751,
0.0009820930426940322,
2.5394764136272383e-23,
1.8586118285084314e-23,
1.8386959305508596e-23,
1.873352010066848e-23,
1.9753264815629578e-23,
2.0148241159986497e-23,
2.0019017855102243e-23,
1.6782316827815e-23,
1.940697381089928e-23,
0.9623143076896667,
1.179428110731351e-09,
1.2685132944056932e-09,
0.9060037136077881,
1.1413423539607948e-09,
1.4865306763311992e-09,
1.2091724288509909e-09,
1.2228977830375243e-09,
1.2827880979671136e-09,
1.6382253331670427e-09,
1.2131949889138127e-09,
8.6527588791796e-06,
7.933227607281879e-06,
8.910964425012935e-06,
8.112777322821785e-06,
9.358982424600981e-06,
8.345817150257062e-06,
9.061023774847854e-06,
0.8394049406051636,
9.00478789844783e-06,
7.943353011796717e-06,
0.0,
0.990544855594635,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0010038857581093907,
0.0009473990648984909,
0.0009698631474748254,
0.0009850271744653583,
0.001063613803125918,
0.7288420796394348,
0.0010968573624268174,
0.0009310253080911934,
0.0009612368885427713,
0.0010493533918634057,
0.0008616790873929858,
0.0008098686230368912,
0.0008701035985723138,
0.0007828595116734505,
0.0008618078427389264,
0.00099581154063344,
0.7354813814163208,
0.0009323902777396142,
0.0009727329015731812,
0.0009355669026263058,
1.873915607575327e-05,
1.7621654478716664e-05,
2.4143144401023164e-05,
1.7761063645593822e-05,
2.2934524167794734e-05,
1.8835609807865694e-05,
2.0691753888968378e-05,
1.9355315089342184e-05,
0.8280181884765625,
1.975242412299849e-05,
1.2898204726496942e-09,
1.1871887917180857e-09,
1.1901358787369531e-09,
1.2426338846793783e-09,
1.3304702894956222e-09,
0.9061726331710815,
1.3639276374988185e-09,
1.1688741086146592e-09,
1.1890000095604591e-09,
1.3293177669737588e-09,
0.0002906608860939741,
0.000256698316661641,
0.0003033126122318208,
0.00027610393590293825,
0.7752863168716431,
0.0002763342054095119,
0.0002513695217203349,
0.0002736095921136439,
0.0003150873526465148,
0.0002665693173184991,
1.3453030911492192e-09,
1.4515864066311224e-09,
0.9055647850036621,
1.2921472780647036e-09,
1.6613133091425425e-09,
1.362399193460817e-09,
1.3526854081291617e-09,
1.414280026423853e-09,
1.7644169458819192e-09,
1.3815186772347943e-09,
4.0776686205390433e-07,
3.813260605056712e-07,
4.0468054862685676e-07,
3.640377315150545e-07,
4.036292295950261e-07,
4.6160798206074105e-07,
0.8716108202934265,
4.30227572678632e-07,
4.489644425120787e-07,
4.4368567841956974e-07,
1.3021329792195502e-08,
1.1893436457910411e-08,
1.3491399109000213e-08,
1.2032384866245138e-08,
1.3979353674642425e-08,
1.2435037888280931e-08,
1.3664777753774615e-08,
0.8944553732872009,
1.3661365372286127e-08,
1.195089804895133e-08,
0.00032250056392513216,
0.7685054540634155,
0.0003231315058656037,
0.00027580931782722473,
0.0003142245695926249,
0.0003104670613538474,
0.0002969441993627697,
0.00028817669954150915,
0.00029121528496034443,
0.0003109250101260841,
0.0005915526999160647,
0.0005324966041371226,
0.000624816631898284,
0.0005520994309335947,
0.7503673434257507,
0.0005734334699809551,
0.0005133048980496824,
0.0005603769095614552,
0.0006345365545712411,
0.00054822803940624,
0.0001950367441168055,
0.0001836341107264161,
0.00018405748414807022,
0.00019076211901847273,
0.00020404005772434175,
0.7808713316917419,
0.00020892191969323903,
0.00017728726379573345,
0.0001836581650422886,
0.00020496748038567603,
1.608942739039776e-07,
1.7199380408783327e-07,
0.8772114515304565,
1.565929892421991e-07,
1.9818507723812218e-07,
1.6638973932003864e-07,
1.6638291810977535e-07,
1.7182102851620584e-07,
2.1861904997422243e-07,
1.668968394596959e-07,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
7.765452235235226e-13,
6.634368076609276e-13,
6.854417207435848e-13,
0.9303909540176392,
7.878320933997485e-13,
7.569853489701461e-13,
6.618272011156556e-13,
6.845095779257904e-13,
7.028499418755552e-13,
7.419190587408531e-13,
5.680636263605265e-07,
6.192842079144611e-07,
0.8676695823669434,
5.454801907944784e-07,
6.997564128141676e-07,
5.766644903815177e-07,
5.704857812816044e-07,
5.950575427959848e-07,
7.348615440605499e-07,
5.820151045554667e-07,
2.2576787728212366e-07,
0.8762407898902893,
2.2449019354553457e-07,
1.9169640097516094e-07,
2.1647123560342152e-07,
2.1035026520621614e-07,
2.0143023959917628e-07,
1.9916797100449912e-07,
2.0261472855054308e-07,
2.1492077451057412e-07,
1.2652937186885538e-07,
9.182775073668381e-08,
9.278662815859207e-08,
9.527293087785438e-08,
9.81186687454283e-08,
1.0048999854461726e-07,
1.0236620084924652e-07,
8.515724658764157e-08,
9.807610013012891e-08,
0.881850004196167,
1.2506547880652974e-11,
1.1351542102133738e-11,
1.296866780631234e-11,
1.1666118591990848e-11,
1.360329644067848e-11,
1.201308497122966e-11,
1.2980752757407732e-11,
0.9230177998542786,
1.3100937869270357e-11,
1.1419108714161297e-11,
0.8841384053230286,
7.432342385982338e-08,
6.91810129183068e-08,
7.497314413740241e-08,
8.193989486926512e-08,
7.38656282806005e-08,
7.120073775013225e-08,
7.120899425672178e-08,
7.211860975075979e-08,
9.63558264288622e-08,
4.9865299389662e-11,
4.6287876309669684e-11,
4.707735590248063e-11,
4.8085990050905636e-11,
5.2480981366231916e-11,
0.91878342628479,
5.456597673703101e-11,
4.578288442802503e-11,
4.7785719825554906e-11,
5.1922473265353375e-11,
0.0008959394181147218,
0.7379651069641113,
0.0008811400039121509,
0.0007502483204007149,
0.0008462243713438511,
0.0008234995766542852,
0.0007848272216506302,
0.0007843237835913897,
0.0007906978134997189,
0.0008408142603002489,
2.1772191161062437e-08,
2.016489020206791e-08,
2.281753452848534e-08,
2.0470334760602782e-08,
2.3998758535981324e-08,
2.1082650292214566e-08,
2.2921506470652275e-08,
0.8917040228843689,
2.3075099164771018e-08,
2.019423384069796e-08,
0.0008367212722077966,
0.0007848034729249775,
0.0008906977018341422,
0.000790073536336422,
0.0009255832992494106,
0.0008173509268090129,
0.000888096634298563,
0.7355328798294067,
0.000891038856934756,
0.0007747504278086126,
0.0,
0.0,
0.0,
0.0,
0.9999973177909851,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.9916204810142517,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
2.8017247188927286e-08,
2.3665162274255636e-08,
2.4420254263191055e-08,
0.8906757235527039,
2.8490969583572223e-08,
2.7117891931993654e-08,
2.3739969989833298e-08,
2.4824876376783322e-08,
2.5206301046409862e-08,
2.6808491426777437e-08,
3.4076495795758037e-40,
2.524551287358304e-40,
2.4984871359218623e-40,
2.534934908978951e-40,
2.6391915147247172e-40,
2.7248809158181798e-40,
2.7454099383205384e-40,
2.2658435648746562e-40,
2.6011602744029417e-40,
0.9782671928405762,
2.4581504476373084e-06,
1.8203553509010817e-06,
1.8167726238971227e-06,
1.8067064502247376e-06,
1.9375813735678094e-06,
1.9780225102294935e-06,
1.9768651782214874e-06,
1.6384424270654563e-06,
1.8983608924827422e-06,
0.857195258140564,
1.3694144313633154e-30,
1.2598518691378617e-30,
1.4210321750593353e-30,
1.2876495833879149e-30,
1.5012856194182226e-30,
1.3207292169341789e-30,
1.4272743321808459e-30,
0.9712626338005066,
1.4336048864775382e-30,
1.2593295734879043e-30,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
9.132087939178746e-07,
8.776196978033113e-07,
9.209322229253303e-07,
8.191689744307951e-07,
9.05868375866703e-07,
1.0584705023575225e-06,
0.8641015291213989,
9.451753157918574e-07,
1.0056701285066083e-06,
1.0093104947372922e-06,
0.8167849779129028,
3.719458982232027e-05,
3.392110374988988e-05,
3.6845827708020806e-05,
4.0184622775996104e-05,
3.7005844205850735e-05,
3.5819564800476655e-05,
3.565774386515841e-05,
3.5018161725020036e-05,
4.800082024303265e-05,
1.1688827828460876e-32,
1.0995284692107787e-32,
1.1241003419047955e-32,
1.154579367585282e-32,
1.2511332980674738e-32,
0.9731304049491882,
1.2869081462036055e-32,
1.090015119961177e-32,
1.1222980151913972e-32,
1.2204715564838314e-32,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
3.9510058513769764e-07,
4.287217905130092e-07,
0.869908332824707,
3.826898478109797e-07,
4.904668458038941e-07,
4.0934540379566897e-07,
4.07774962241092e-07,
4.2577028125379e-07,
5.277668719827489e-07,
4.1092604874393146e-07,
2.1730153093812987e-05,
2.0339077309472486e-05,
2.8269920221646316e-05,
2.0598976334440522e-05,
2.647225119289942e-05,
2.1251820726320148e-05,
2.3315456928685308e-05,
2.2439493477577344e-05,
0.8257094025611877,
2.247175871161744e-05,
0.00011114540393464267,
0.7979688048362732,
0.00010800695599755272,
9.322314872406423e-05,
0.00010502710210857913,
0.00010136813943972811,
9.809343464439735e-05,
9.753632912179455e-05,
9.864265302894637e-05,
0.00010433561692479998,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0015642372891306877,
0.001421081949956715,
0.0015952099347487092,
0.0014529804466292262,
0.0016924156807363033,
0.0014969457406550646,
0.0015946050407364964,
0.7082170248031616,
0.0016168226720765233,
0.0014148791087791324,
1.3315514787324684e-25,
1.1263332585437483e-25,
1.1739103225542426e-25,
0.9656217098236084,
1.3362040324400422e-25,
1.2890291642316942e-25,
1.1431705084895593e-25,
1.1947678048882861e-25,
1.2046259777537035e-25,
1.269194982403142e-25,
0.00011380860087228939,
9.693664469523355e-05,
0.00010178978118347004,
0.7975506782531738,
0.00011809397983597592,
0.00011155437823617831,
9.908204810926691e-05,
0.00010503666271688417,
0.00010528507118579,
0.00010874479630729184,
0.004316215869039297,
0.00315556931309402,
0.0032063601538538933,
0.0032227609772235155,
0.0033998535946011543,
0.003417698200792074,
0.0035319444723427296,
0.0029184026643633842,
0.003419804386794567,
0.6557953357696533,
2.6933218322255925e-08,
2.4846100288300477e-08,
2.7728328078069353e-08,
2.5001181569450637e-08,
2.9258462319603495e-08,
2.5961169214383517e-08,
2.8228081205838862e-08,
0.890584409236908,
2.8010262553834764e-08,
2.4571630063974226e-08,
1.0735547220974695e-05,
0.8349867463111877,
1.0537737580307294e-05,
9.020078323374037e-06,
1.0212772394879721e-05,
9.97689858195372e-06,
9.588460670784116e-06,
9.3764847406419e-06,
9.507299182587303e-06,
1.0217384442512412e-05,
8.441451063845307e-05,
7.855095464037731e-05,
8.880812674760818e-05,
8.062156848609447e-05,
9.48576707742177e-05,
8.309299300890416e-05,
8.803455421002582e-05,
0.8027859330177307,
8.810903818812221e-05,
7.755835395073518e-05,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.9839153289794922,
4.27875343224029e-18,
3.0984866393159395e-18,
3.066243759424082e-18,
3.1957940986251463e-18,
3.2903052738736175e-18,
3.3983474695822312e-18,
3.390999830996076e-18,
2.82884602554866e-18,
3.225121994038367e-18,
0.9514604806900024,
1.5477441550615367e-08,
1.4695585193180705e-08,
1.5721020929504448e-08,
1.386768033739827e-08,
1.5296471644887788e-08,
1.7853613698548543e-08,
0.8932632207870483,
1.6449938300411304e-08,
1.7083120695815524e-08,
1.6836120053653758e-08,
2.7534449600352673e-06,
3.0195972158253426e-06,
0.8523998856544495,
2.6451232315594098e-06,
3.321782514831284e-06,
2.824588364092051e-06,
2.847190671673161e-06,
2.924192585851415e-06,
3.6371143323776778e-06,
2.8669051062024664e-06,
5.02992998008267e-06,
4.746409103972837e-06,
6.768864295736421e-06,
4.8621941459714435e-06,
6.3149673223961145e-06,
5.142868758412078e-06,
5.53791323909536e-06,
5.344853434507968e-06,
0.846205472946167,
5.242775841907132e-06,
5.703771535081614e-07,
0.8682653307914734,
5.675448733200028e-07,
4.805768867299776e-07,
5.451105380416266e-07,
5.313465294420894e-07,
5.083468863631424e-07,
5.006339165447571e-07,
5.065492132416693e-07,
5.430979399534408e-07,
0.7527142763137817,
0.0005605221958830953,
0.0005126854521222413,
0.0005502335843630135,
0.000607573427259922,
0.0005563863087445498,
0.0005308641120791435,
0.0005258470773696899,
0.0005358700873330235,
0.0007171318284235895,
0.00541509548202157,
0.61064213514328,
0.005327322985976934,
0.004565962590277195,
0.005180850625038147,
0.004955968353897333,
0.004841678775846958,
0.004804892465472221,
0.004848755896091461,
0.005129228346049786,
1.5511597666773014e-05,
1.3114697139826603e-05,
1.3609484085463919e-05,
0.8324266672134399,
1.5721443560323678e-05,
1.4951362572901417e-05,
1.3358388059714343e-05,
1.40782449307153e-05,
1.411155335517833e-05,
1.4863396245345939e-05,
3.836663431400211e-09,
3.6057765662178554e-09,
3.6314469209486333e-09,
3.7095266858244713e-09,
4.039271139788525e-09,
0.9011329412460327,
4.223556171467635e-09,
3.537249160245892e-09,
3.6619716148322823e-09,
3.9635743576127425e-09,
0.00035357530578039587,
0.000334842421580106,
0.0004608502786140889,
0.00034079563920386136,
0.0004410339461173862,
0.0003586095117498189,
0.0003872338274959475,
0.0003618380578700453,
0.7673423886299133,
0.0003718887164723128,
2.998474407135687e-13,
2.766835968236875e-13,
3.151620674504696e-13,
2.8003993438399655e-13,
3.2409274916544994e-13,
2.918974198636015e-13,
3.173273276091404e-13,
0.9326202869415283,
3.163059278474961e-13,
2.758796609127895e-13,
0.881041944026947,
1.1214639528134285e-07,
1.0263241989605376e-07,
1.1562857338276444e-07,
1.2180794328742195e-07,
1.1103966812697763e-07,
1.0634828129241214e-07,
1.062341112856302e-07,
1.0677788253588005e-07,
1.4665859282558813e-07,
2.8454150147183555e-09,
2.69695465959785e-09,
2.8290059184143956e-09,
2.573463664390374e-09,
2.822698519366895e-09,
3.311747764911388e-09,
0.902323305606842,
3.01807645541885e-09,
3.1054809834785146e-09,
3.127528236390731e-09,
2.5579065550118685e-05,
1.8233697119285353e-05,
1.8092609025188722e-05,
1.84242453542538e-05,
1.9427001461735927e-05,
1.966076706594322e-05,
1.994954072870314e-05,
1.669095036049839e-05,
1.909297498059459e-05,
0.8284488916397095,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.3983441931486595e-05,
1.3339034012460615e-05,
1.921210059663281e-05,
1.3310981557879131e-05,
1.7528034732094966e-05,
1.4201437807059847e-05,
1.5541521861450747e-05,
1.4943253518140409e-05,
0.8324329257011414,
1.4516761439153925e-05,
1.3643780221173074e-05,
1.1465051102277357e-05,
1.187488578580087e-05,
0.833356499671936,
1.3916906027588993e-05,
1.3205932191340253e-05,
1.1564085980353411e-05,
1.198074642161373e-05,
1.2260265975783113e-05,
1.3074649359623436e-05,
8.39760195958661e-06,
7.680695489398204e-06,
8.798925591690931e-06,
7.992873179318849e-06,
9.287216016673483e-06,
8.229191735154018e-06,
8.880838322511408e-06,
0.839664876461029,
8.86927136889426e-06,
7.75681110098958e-06,
0.0034012040123343468,
0.0028817434795200825,
0.003361854236572981,
0.003149382770061493,
0.6591947078704834,
0.0031805469188839197,
0.0029254567343741655,
0.003160503227263689,
0.003645882708951831,
0.0031043440103530884}}};
}; // namespace HDBSCAN
}; // namespace ML | 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/tsvd_test.cu | /*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/decomposition/params.hpp>
#include <gtest/gtest.h>
#include <raft/core/handle.hpp>
#include <raft/random/rng.cuh>
#include <raft/util/cudart_utils.hpp>
#include <test_utils.h>
#include <tsvd/tsvd.cuh>
#include <vector>
namespace ML {
template <typename T>
struct TsvdInputs {
T tolerance;
int n_row;
int n_col;
int n_row2;
int n_col2;
float redundancy;
unsigned long long int seed;
int algo;
};
template <typename T>
::std::ostream& operator<<(::std::ostream& os, const TsvdInputs<T>& dims)
{
return os;
}
template <typename T>
class TsvdTest : public ::testing::TestWithParam<TsvdInputs<T>> {
public:
TsvdTest()
: params(::testing::TestWithParam<TsvdInputs<T>>::GetParam()),
stream(handle.get_stream()),
components(0, stream),
components_ref(0, stream),
data2(0, stream),
data2_back(0, stream)
{
basicTest();
advancedTest();
}
protected:
void basicTest()
{
raft::random::Rng r(params.seed, raft::random::GenPC);
int len = params.n_row * params.n_col;
rmm::device_uvector<T> data(len, stream);
std::vector<T> data_h = {1.0, 2.0, 4.0, 2.0, 4.0, 5.0, 5.0, 4.0, 2.0, 1.0, 6.0, 4.0};
data_h.resize(len);
raft::update_device(data.data(), data_h.data(), len, stream);
int len_comp = params.n_col * params.n_col;
components.resize(len_comp, stream);
rmm::device_uvector<T> singular_vals(params.n_col, stream);
std::vector<T> components_ref_h = {
-0.3951, 0.1532, 0.9058, -0.7111, -0.6752, -0.1959, -0.5816, 0.7215, -0.3757};
components_ref_h.resize(len_comp);
components_ref.resize(len_comp, stream);
raft::update_device(components_ref.data(), components_ref_h.data(), len_comp, stream);
paramsTSVD prms;
prms.n_cols = params.n_col;
prms.n_rows = params.n_row;
prms.n_components = params.n_col;
if (params.algo == 0)
prms.algorithm = solver::COV_EIG_DQ;
else
prms.algorithm = solver::COV_EIG_JACOBI;
tsvdFit(handle, data.data(), components.data(), singular_vals.data(), prms, stream);
}
void advancedTest()
{
raft::random::Rng r(params.seed, raft::random::GenPC);
int len = params.n_row2 * params.n_col2;
paramsTSVD prms;
prms.n_cols = params.n_col2;
prms.n_rows = params.n_row2;
prms.n_components = params.n_col2;
if (params.algo == 0)
prms.algorithm = solver::COV_EIG_DQ;
else if (params.algo == 1)
prms.algorithm = solver::COV_EIG_JACOBI;
else
prms.n_components = params.n_col2 - 15;
data2.resize(len, stream);
int redundant_cols = int(params.redundancy * params.n_col2);
int redundant_len = params.n_row2 * redundant_cols;
int informative_cols = params.n_col2 - redundant_cols;
int informative_len = params.n_row2 * informative_cols;
r.uniform(data2.data(), informative_len, T(-1.0), T(1.0), stream);
RAFT_CUDA_TRY(cudaMemcpyAsync(data2.data() + informative_len,
data2.data(),
redundant_len * sizeof(T),
cudaMemcpyDeviceToDevice,
stream));
rmm::device_uvector<T> data2_trans(prms.n_rows * prms.n_components, stream);
int len_comp = params.n_col2 * prms.n_components;
rmm::device_uvector<T> components2(len_comp, stream);
rmm::device_uvector<T> explained_vars2(prms.n_components, stream);
rmm::device_uvector<T> explained_var_ratio2(prms.n_components, stream);
rmm::device_uvector<T> singular_vals2(prms.n_components, stream);
tsvdFitTransform(handle,
data2.data(),
data2_trans.data(),
components2.data(),
explained_vars2.data(),
explained_var_ratio2.data(),
singular_vals2.data(),
prms,
stream);
data2_back.resize(len, stream);
tsvdInverseTransform(
handle, data2_trans.data(), components2.data(), data2_back.data(), prms, stream);
}
protected:
raft::handle_t handle;
cudaStream_t stream = 0;
TsvdInputs<T> params;
rmm::device_uvector<T> components, components_ref, data2, data2_back;
};
const std::vector<TsvdInputs<float>> inputsf2 = {{0.01f, 4, 3, 1024, 128, 0.25f, 1234ULL, 0},
{0.01f, 4, 3, 1024, 128, 0.25f, 1234ULL, 1},
{0.04f, 4, 3, 512, 64, 0.25f, 1234ULL, 2},
{0.04f, 4, 3, 512, 64, 0.25f, 1234ULL, 2}};
const std::vector<TsvdInputs<double>> inputsd2 = {{0.01, 4, 3, 1024, 128, 0.25f, 1234ULL, 0},
{0.01, 4, 3, 1024, 128, 0.25f, 1234ULL, 1},
{0.05, 4, 3, 512, 64, 0.25f, 1234ULL, 2},
{0.05, 4, 3, 512, 64, 0.25f, 1234ULL, 2}};
typedef TsvdTest<float> TsvdTestLeftVecF;
TEST_P(TsvdTestLeftVecF, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(components.data(),
components_ref.data(),
(params.n_col * params.n_col),
MLCommon::CompareApproxAbs<float>(params.tolerance),
handle.get_stream()));
}
typedef TsvdTest<double> TsvdTestLeftVecD;
TEST_P(TsvdTestLeftVecD, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(components.data(),
components_ref.data(),
(params.n_col * params.n_col),
MLCommon::CompareApproxAbs<double>(params.tolerance),
handle.get_stream()));
}
typedef TsvdTest<float> TsvdTestDataVecF;
TEST_P(TsvdTestDataVecF, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(data2.data(),
data2_back.data(),
(params.n_col2 * params.n_col2),
MLCommon::CompareApproxAbs<float>(params.tolerance),
handle.get_stream()));
}
typedef TsvdTest<double> TsvdTestDataVecD;
TEST_P(TsvdTestDataVecD, Result)
{
ASSERT_TRUE(MLCommon::devArrMatch(data2.data(),
data2_back.data(),
(params.n_col2 * params.n_col2),
MLCommon::CompareApproxAbs<double>(params.tolerance),
handle.get_stream()));
}
INSTANTIATE_TEST_CASE_P(TsvdTests, TsvdTestLeftVecF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(TsvdTests, TsvdTestLeftVecD, ::testing::ValuesIn(inputsd2));
INSTANTIATE_TEST_CASE_P(TsvdTests, TsvdTestDataVecF, ::testing::ValuesIn(inputsf2));
INSTANTIATE_TEST_CASE_P(TsvdTests, TsvdTestDataVecD, ::testing::ValuesIn(inputsd2));
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/quasi_newton.cu | /*
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/linear_model/glm.hpp>
#include <glm/qn/glm_linear.cuh>
#include <glm/qn/glm_logistic.cuh>
#include <glm/qn/glm_softmax.cuh>
#include <glm/qn/qn.cuh>
#include <gtest/gtest.h>
#include <raft/core/handle.hpp>
#include <raft/linalg/transpose.cuh>
#include <raft/util/cudart_utils.hpp>
#include <test_utils.h>
#include <vector>
namespace ML {
namespace GLM {
using detail::GLMDims;
using detail::LogisticLoss;
using detail::Softmax;
using detail::SquaredLoss;
struct QuasiNewtonTest : ::testing::Test {
static constexpr int N = 10;
static constexpr int D = 2;
const static double* nobptr;
const static double tol;
const static double X[N][D];
raft::handle_t cuml_handle;
const raft::handle_t& handle;
cudaStream_t stream = 0;
std::shared_ptr<SimpleMatOwning<double>> Xdev;
std::shared_ptr<SimpleVecOwning<double>> ydev;
QuasiNewtonTest() : handle(cuml_handle) {}
void SetUp()
{
stream = cuml_handle.get_stream();
Xdev.reset(new SimpleMatOwning<double>(N, D, stream, ROW_MAJOR));
raft::update_device(Xdev->data, &X[0][0], Xdev->len, stream);
ydev.reset(new SimpleVecOwning<double>(N, stream));
handle.sync_stream(stream);
}
void TearDown() {}
};
const double* QuasiNewtonTest::nobptr = 0;
const double QuasiNewtonTest::tol = 5e-6;
const double QuasiNewtonTest::X[QuasiNewtonTest::N][QuasiNewtonTest::D] = {
{-0.2047076594847130, 0.4789433380575482},
{-0.5194387150567381, -0.5557303043474900},
{1.9657805725027142, 1.3934058329729904},
{0.0929078767437177, 0.2817461528302025},
{0.7690225676118387, 1.2464347363862822},
{1.0071893575830049, -1.2962211091122635},
{0.2749916334321240, 0.2289128789353159},
{1.3529168351654497, 0.8864293405915888},
{-2.0016373096603974, -0.3718425371402544},
{1.6690253095248706, -0.4385697358355719}};
template <typename T, class Comp>
::testing::AssertionResult checkParamsEqual(const raft::handle_t& handle,
const T* host_weights,
const T* host_bias,
const T* w,
const GLMDims& dims,
Comp& comp,
cudaStream_t stream)
{
int C = dims.C;
int D = dims.D;
bool fit_intercept = dims.fit_intercept;
std::vector<T> w_ref_cm(C * D);
int idx = 0;
for (int d = 0; d < D; d++)
for (int c = 0; c < C; c++) {
w_ref_cm[idx++] = host_weights[c * D + d];
}
SimpleVecOwning<T> w_ref(dims.n_param, stream);
raft::update_device(w_ref.data, &w_ref_cm[0], C * D, stream);
if (fit_intercept) { raft::update_device(&w_ref.data[C * D], host_bias, C, stream); }
handle.sync_stream(stream);
return MLCommon::devArrMatch(w_ref.data, w, w_ref.len, comp);
}
template <typename T, class LossFunction>
T run(const raft::handle_t& handle,
LossFunction& loss,
const SimpleMat<T>& X,
const SimpleVec<T>& y,
T l1,
T l2,
T* w,
SimpleDenseMat<T>& z,
int verbosity,
cudaStream_t stream)
{
qn_params pams;
pams.max_iter = 100;
pams.grad_tol = 1e-16;
pams.change_tol = 1e-16;
pams.linesearch_max_iter = 50;
pams.lbfgs_memory = 5;
pams.penalty_l1 = l1;
pams.penalty_l2 = l2;
pams.verbose = verbosity;
int num_iters = 0;
T fx;
detail::qn_fit<T, LossFunction>(handle, pams, loss, X, y, z, w, &fx, &num_iters);
return fx;
}
template <typename T>
T run_api(const raft::handle_t& cuml_handle,
qn_loss_type loss_type,
int C,
bool fit_intercept,
const SimpleMat<T>& X,
const SimpleVec<T>& y,
T l1,
T l2,
T* w,
SimpleDenseMat<T>& z,
int verbosity,
cudaStream_t stream)
{
qn_params pams;
pams.max_iter = 100;
pams.grad_tol = 1e-8;
pams.change_tol = 1e-8;
pams.linesearch_max_iter = 50;
pams.lbfgs_memory = 5;
pams.penalty_l1 = l1;
pams.penalty_l2 = l2;
pams.verbose = verbosity;
pams.fit_intercept = fit_intercept;
pams.loss = loss_type;
int num_iters = 0;
SimpleVec<T> w0(w, X.n + fit_intercept);
w0.fill(T(0), stream);
T fx;
if (auto X_dense = dynamic_cast<const SimpleDenseMat<T>*>(&X)) {
qnFit(cuml_handle,
pams,
X_dense->data,
X_dense->ord == COL_MAJOR,
y.data,
X_dense->m,
X_dense->n,
C,
w,
&fx,
&num_iters);
} else if (auto X_sparse = dynamic_cast<const SimpleSparseMat<T>*>(&X)) {
qnFitSparse(cuml_handle,
pams,
X_sparse->values,
X_sparse->cols,
X_sparse->row_ids,
X_sparse->nnz,
y.data,
X_sparse->m,
X_sparse->n,
C,
w,
&fx,
&num_iters);
} else {
ADD_FAILURE();
}
return fx;
}
TEST_F(QuasiNewtonTest, binary_logistic_vs_sklearn)
{
#if CUDART_VERSION >= 11020
GTEST_SKIP();
#endif
MLCommon::CompareApprox<double> compApprox(tol);
// Test case generated in python and solved with sklearn
double y[N] = {1, 1, 1, 0, 1, 0, 1, 0, 1, 0};
raft::update_device(ydev->data, &y[0], ydev->len, stream);
handle.sync_stream(stream);
double alpha = 0.01 * N;
LogisticLoss<double> loss_b(handle, D, true);
LogisticLoss<double> loss_no_b(handle, D, false);
SimpleVecOwning<double> w0(D + 1, stream);
SimpleMatOwning<double> z(1, N, stream);
double l1, l2, fx;
double w_l1_b[2] = {-1.6899370396155091, 1.9021577534928300};
double b_l1_b = 0.8057670813749118;
double obj_l1_b = 0.44295941481024703;
l1 = alpha;
l2 = 0.0;
fx = run(handle, loss_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l1_b, fx));
ASSERT_TRUE(checkParamsEqual(handle, &w_l1_b[0], &b_l1_b, w0.data, loss_b, compApprox, stream));
fx = run_api(cuml_handle,
QN_LOSS_LOGISTIC,
2,
loss_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l1_b, fx));
double w_l2_b[2] = {-1.5339880402781370, 1.6788639581350926};
double b_l2_b = 0.806087868102401;
double obj_l2_b = 0.4378085369889721;
l1 = 0;
l2 = alpha;
fx = run(handle, loss_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l2_b, fx));
ASSERT_TRUE(checkParamsEqual(handle, &w_l2_b[0], &b_l2_b, w0.data, loss_b, compApprox, stream));
fx = run_api(cuml_handle,
QN_LOSS_LOGISTIC,
2,
loss_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l2_b, fx));
double w_l1_no_b[2] = {-1.6215035298864591, 2.3650868394981086};
double obj_l1_no_b = 0.4769896009200278;
l1 = alpha;
l2 = 0.0;
fx = run(handle, loss_no_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l1_no_b, fx));
ASSERT_TRUE(
checkParamsEqual(handle, &w_l1_no_b[0], nobptr, w0.data, loss_no_b, compApprox, stream));
fx = run_api(cuml_handle,
QN_LOSS_LOGISTIC,
2,
loss_no_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l1_no_b, fx));
double w_l2_no_b[2] = {-1.3931049893764620, 2.0140103094119621};
double obj_l2_no_b = 0.47502098062114273;
l1 = 0;
l2 = alpha;
fx = run(handle, loss_no_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l2_no_b, fx));
ASSERT_TRUE(
checkParamsEqual(handle, &w_l2_no_b[0], nobptr, w0.data, loss_no_b, compApprox, stream));
fx = run_api(cuml_handle,
QN_LOSS_LOGISTIC,
2,
loss_no_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l2_no_b, fx));
}
TEST_F(QuasiNewtonTest, multiclass_logistic_vs_sklearn)
{
#if CUDART_VERSION >= 11020
GTEST_SKIP();
#endif
// The data seems to small for the objective to be strongly convex
// leaving out exact param checks
MLCommon::CompareApprox<double> compApprox(tol);
double y[N] = {2, 2, 0, 3, 3, 0, 0, 0, 1, 0};
raft::update_device(ydev->data, &y[0], ydev->len, stream);
handle.sync_stream(stream);
double fx, l1, l2;
int C = 4;
double alpha = 0.016 * N;
SimpleMatOwning<double> z(C, N, stream);
SimpleVecOwning<double> w0(C * (D + 1), stream);
Softmax<double> loss_b(handle, D, C, true);
Softmax<double> loss_no_b(handle, D, C, false);
l1 = alpha;
l2 = 0.0;
double obj_l1_b = 0.5407911382311313;
fx = run(handle, loss_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l1_b, fx));
fx = run_api(cuml_handle,
QN_LOSS_SOFTMAX,
C,
loss_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l1_b, fx));
l1 = 0.0;
l2 = alpha;
double obj_l2_b = 0.5721784062720949;
fx = run(handle, loss_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l2_b, fx));
fx = run_api(cuml_handle,
QN_LOSS_SOFTMAX,
C,
loss_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l2_b, fx));
l1 = alpha;
l2 = 0.0;
double obj_l1_no_b = 0.6606929813245878;
fx = run(handle, loss_no_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l1_no_b, fx));
fx = run_api(cuml_handle,
QN_LOSS_SOFTMAX,
C,
loss_no_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l1_no_b, fx));
l1 = 0.0;
l2 = alpha;
double obj_l2_no_b = 0.6597171282106854;
fx = run(handle, loss_no_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l2_no_b, fx));
fx = run_api(cuml_handle,
QN_LOSS_SOFTMAX,
C,
loss_no_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l2_no_b, fx));
}
TEST_F(QuasiNewtonTest, linear_regression_vs_sklearn)
{
MLCommon::CompareApprox<double> compApprox(tol);
double y[N] = {0.2675836026202781,
-0.0678277759663704,
-0.6334027174275105,
-0.1018336189077367,
0.0933815935886932,
-1.1058853496996381,
-0.1658298189619160,
-0.2954290675648911,
0.7966520536712608,
-1.0767450516284769};
raft::update_device(ydev->data, &y[0], ydev->len, stream);
handle.sync_stream(stream);
double fx, l1, l2;
double alpha = 0.01 * N;
SimpleVecOwning<double> w0(D + 1, stream);
SimpleMatOwning<double> z(1, N, stream);
SquaredLoss<double> loss_b(handle, D, true);
SquaredLoss<double> loss_no_b(handle, D, false);
l1 = alpha;
l2 = 0.0;
double w_l1_b[2] = {-0.4952397281519840, 0.3813315300180231};
double b_l1_b = -0.08140861819001188;
double obj_l1_b = 0.011136986298775138;
fx = run(handle, loss_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l1_b, fx));
ASSERT_TRUE(checkParamsEqual(handle, &w_l1_b[0], &b_l1_b, w0.data, loss_b, compApprox, stream));
fx = run_api(cuml_handle,
QN_LOSS_SQUARED,
1,
loss_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l1_b, fx));
l1 = 0.0;
l2 = alpha;
double w_l2_b[2] = {-0.5022384743587150, 0.3937352417485087};
double b_l2_b = -0.08062397391797513;
double obj_l2_b = 0.004268621967866347;
fx = run(handle, loss_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l2_b, fx));
ASSERT_TRUE(checkParamsEqual(handle, &w_l2_b[0], &b_l2_b, w0.data, loss_b, compApprox, stream));
fx = run_api(cuml_handle,
QN_LOSS_SQUARED,
1,
loss_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l2_b, fx));
l1 = alpha;
l2 = 0.0;
double w_l1_no_b[2] = {-0.5175178128147135, 0.3720844589831813};
double obj_l1_no_b = 0.013981355746112447;
fx = run(handle, loss_no_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l1_no_b, fx));
ASSERT_TRUE(
checkParamsEqual(handle, &w_l1_no_b[0], nobptr, w0.data, loss_no_b, compApprox, stream));
fx = run_api(cuml_handle,
QN_LOSS_SQUARED,
1,
loss_no_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l1_no_b, fx));
l1 = 0.0;
l2 = alpha;
double w_l2_no_b[2] = {-0.5241651041233270, 0.3846317886627560};
double obj_l2_no_b = 0.007061261366969662;
fx = run(handle, loss_no_b, *Xdev, *ydev, l1, l2, w0.data, z, 0, stream);
ASSERT_TRUE(compApprox(obj_l2_no_b, fx));
ASSERT_TRUE(
checkParamsEqual(handle, &w_l2_no_b[0], nobptr, w0.data, loss_no_b, compApprox, stream));
fx = run_api(cuml_handle,
QN_LOSS_SQUARED,
1,
loss_no_b.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0.data,
z,
0,
stream);
ASSERT_TRUE(compApprox(obj_l2_no_b, fx));
}
TEST_F(QuasiNewtonTest, predict)
{
MLCommon::CompareApprox<double> compApprox(1e-8);
std::vector<double> w_host(D);
w_host[0] = 1;
std::vector<double> preds_host(N);
SimpleVecOwning<double> w(D, stream);
SimpleVecOwning<double> preds(N, stream);
raft::update_device(w.data, &w_host[0], w.len, stream);
qn_params pams;
pams.loss = QN_LOSS_LOGISTIC;
pams.fit_intercept = false;
qnPredict(handle, pams, Xdev->data, false, N, D, 2, w.data, preds.data);
raft::update_host(&preds_host[0], preds.data, preds.len, stream);
handle.sync_stream(stream);
for (int it = 0; it < N; it++) {
ASSERT_TRUE(X[it][0] > 0 ? compApprox(preds_host[it], 1) : compApprox(preds_host[it], 0));
}
pams.loss = QN_LOSS_SQUARED;
pams.fit_intercept = false;
qnPredict(handle, pams, Xdev->data, false, N, D, 1, w.data, preds.data);
raft::update_host(&preds_host[0], preds.data, preds.len, stream);
handle.sync_stream(stream);
for (int it = 0; it < N; it++) {
ASSERT_TRUE(compApprox(X[it][0], preds_host[it]));
}
}
TEST_F(QuasiNewtonTest, predict_softmax)
{
MLCommon::CompareApprox<double> compApprox(1e-8);
int C = 4;
std::vector<double> w_host(C * D);
w_host[0] = 1;
w_host[D * C - 1] = 1;
std::vector<double> preds_host(N);
SimpleVecOwning<double> w(w_host.size(), stream);
SimpleVecOwning<double> preds(N, stream);
raft::update_device(w.data, &w_host[0], w.len, stream);
qn_params pams;
pams.loss = QN_LOSS_SOFTMAX;
pams.fit_intercept = false;
qnPredict(handle, pams, Xdev->data, false, N, D, C, w.data, preds.data);
raft::update_host(&preds_host[0], preds.data, preds.len, stream);
handle.sync_stream(stream);
for (int it = 0; it < N; it++) {
if (X[it][0] < 0 && X[it][1] < 0) {
ASSERT_TRUE(compApprox(1, preds_host[it]));
} else if (X[it][0] > X[it][1]) {
ASSERT_TRUE(compApprox(0, preds_host[it]));
} else {
ASSERT_TRUE(compApprox(C - 1, preds_host[it]));
}
}
}
TEST_F(QuasiNewtonTest, dense_vs_sparse_logistic)
{
#if CUDART_VERSION >= 11020
GTEST_SKIP();
#endif
// Prepare a sparse input matrix from the dense matrix X.
// Yes, it's not sparse at all, yet the test does check whether the behaviour
// of dense and sparse variants is the same.
rmm::device_uvector<int> mem_X_cols(N * D, stream);
rmm::device_uvector<int> mem_X_row_ids(N + 1, stream);
int host_X_cols[N][D];
int host_X_row_ids[N + 1];
for (int i = 0; i < N; i++) {
for (int j = 0; j < D; j++) {
host_X_cols[i][j] = j;
}
}
for (int i = 0; i < N + 1; i++) {
host_X_row_ids[i] = i * D;
}
raft::update_device(mem_X_cols.data(), &host_X_cols[0][0], mem_X_cols.size(), stream);
raft::update_device(mem_X_row_ids.data(), &host_X_row_ids[0], mem_X_row_ids.size(), stream);
SimpleSparseMat<double> X_sparse(
Xdev->data, mem_X_cols.data(), mem_X_row_ids.data(), N * D, N, D);
MLCommon::CompareApprox<double> compApprox(tol);
double y[N] = {2, 2, 0, 3, 3, 0, 0, 0, 1, 0};
raft::update_device(ydev->data, &y[0], ydev->len, stream);
handle.sync_stream(stream);
int C = 4;
qn_loss_type loss_type = QN_LOSS_SOFTMAX; // Softmax (loss_b, loss_no_b)
double alpha = 0.016 * N;
Softmax<double> loss_b(handle, D, C, true);
Softmax<double> loss_no_b(handle, D, C, false);
SimpleMatOwning<double> z_dense(C, N, stream);
SimpleMatOwning<double> z_sparse(C, N, stream);
SimpleVecOwning<double> w0_dense(C * (D + 1), stream);
SimpleVecOwning<double> w0_sparse(C * (D + 1), stream);
std::vector<double> preds_dense_host(N);
std::vector<double> preds_sparse_host(N);
SimpleVecOwning<double> preds_dense(N, stream);
SimpleVecOwning<double> preds_sparse(N, stream);
auto test_run = [&](double l1, double l2, Softmax<double> loss) {
qn_params pams;
pams.penalty_l1 = l1;
pams.penalty_l2 = l2;
pams.loss = loss_type;
pams.fit_intercept = loss.fit_intercept;
double f_dense, f_sparse;
f_dense = run(handle, loss, *Xdev, *ydev, l1, l2, w0_dense.data, z_dense, 0, stream);
f_sparse = run(handle, loss, X_sparse, *ydev, l1, l2, w0_sparse.data, z_sparse, 0, stream);
ASSERT_TRUE(compApprox(f_dense, f_sparse));
qnPredict(
handle, pams, Xdev->data, Xdev->ord == COL_MAJOR, N, D, C, w0_dense.data, preds_dense.data);
qnPredictSparse(handle,
pams,
X_sparse.values,
X_sparse.cols,
X_sparse.row_ids,
X_sparse.nnz,
N,
D,
C,
w0_sparse.data,
preds_sparse.data);
raft::update_host(&preds_dense_host[0], preds_dense.data, preds_dense.len, stream);
raft::update_host(&preds_sparse_host[0], preds_sparse.data, preds_sparse.len, stream);
handle.sync_stream(stream);
for (int i = 0; i < N; i++) {
ASSERT_TRUE(compApprox(preds_dense_host[i], preds_sparse_host[i]));
}
f_dense = run_api(cuml_handle,
QN_LOSS_SOFTMAX,
C,
loss.fit_intercept,
*Xdev,
*ydev,
l1,
l2,
w0_dense.data,
z_dense,
0,
stream);
f_sparse = run_api(cuml_handle,
QN_LOSS_SOFTMAX,
C,
loss.fit_intercept,
X_sparse,
*ydev,
l1,
l2,
w0_sparse.data,
z_sparse,
0,
stream);
ASSERT_TRUE(compApprox(f_dense, f_sparse));
};
test_run(alpha, 0.0, loss_b);
test_run(0.0, alpha, loss_b);
test_run(alpha, 0.0, loss_no_b);
test_run(0.0, alpha, loss_no_b);
}
} // namespace GLM
} // end namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/svc_test.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cub/cub.cuh>
#include <cuml/common/logger.hpp>
#include <cuml/datasets/make_blobs.hpp>
#include <cuml/svm/svc.hpp>
#include <cuml/svm/svm_model.h>
#include <cuml/svm/svm_parameter.h>
#include <cuml/svm/svr.hpp>
#include <gtest/gtest.h>
#include <iostream>
#include <raft/core/math.hpp>
#include <raft/distance/kernels.cuh>
#include <raft/linalg/add.cuh>
#include <raft/linalg/map_then_reduce.cuh>
#include <raft/linalg/transpose.cuh>
#include <raft/random/rng.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <rmm/device_uvector.hpp>
#include <string>
#include <svm/smoblocksolve.cuh>
#include <svm/smosolver.cuh>
#include <svm/workingset.cuh>
#include <test_utils.h>
#include <thrust/device_ptr.h>
#include <thrust/execution_policy.h>
#include <thrust/fill.h>
#include <thrust/iterator/zip_iterator.h>
#include <thrust/reduce.h>
#include <thrust/transform.h>
#include <thrust/tuple.h>
#include <type_traits>
#include <vector>
namespace ML {
namespace SVM {
using namespace raft::distance::kernels;
// Initialize device vector C_vec with scalar C
template <typename math_t>
void init_C(math_t C, math_t* C_vec, int n, cudaStream_t stream)
{
thrust::device_ptr<math_t> c_ptr(C_vec);
thrust::fill(thrust::cuda::par.on(stream), c_ptr, c_ptr + n, C);
}
template <typename math_t>
class WorkingSetTest : public ::testing::Test {
public:
WorkingSetTest()
: stream(handle.get_stream()),
f_dev(10, stream),
y_dev(10, stream),
C_dev(10, stream),
alpha_dev(10, stream)
{
init_C(C, C_dev.data(), 10, stream);
raft::update_device(f_dev.data(), f_host, 10, stream);
raft::update_device(y_dev.data(), y_host, 10, stream);
raft::update_device(alpha_dev.data(), alpha_host, 10, stream);
}
protected:
raft::handle_t handle;
cudaStream_t stream = 0;
WorkingSet<math_t>* ws;
math_t f_host[10] = {1, 3, 10, 4, 2, 8, 6, 5, 9, 7};
rmm::device_uvector<math_t> f_dev;
math_t y_host[10] = {-1, -1, -1, -1, -1, 1, 1, 1, 1, 1};
rmm::device_uvector<math_t> y_dev;
rmm::device_uvector<math_t> C_dev;
math_t C = 1.5;
math_t alpha_host[10] = {0, 0, 0.1, 0.2, 1.5, 0, 0.2, 0.4, 1.5, 1.5};
rmm::device_uvector<math_t> alpha_dev; // l l l/u l/u u u l/u l/u l l
int expected_idx[4] = {4, 3, 8, 2};
int expected_idx2[4] = {8, 2, 4, 9};
};
typedef ::testing::Types<float, double> FloatTypes;
TYPED_TEST_CASE(WorkingSetTest, FloatTypes);
TYPED_TEST(WorkingSetTest, Init)
{
auto stream = this->handle.get_stream();
this->ws = new WorkingSet<TypeParam>(this->handle, stream, 10);
EXPECT_EQ(this->ws->GetSize(), 10);
delete this->ws;
this->ws = new WorkingSet<TypeParam>(this->handle, stream, 100000);
EXPECT_EQ(this->ws->GetSize(), 1024);
delete this->ws;
}
TYPED_TEST(WorkingSetTest, Select)
{
auto stream = this->handle.get_stream();
this->ws = new WorkingSet<TypeParam>(this->handle, stream, 10, 4);
EXPECT_EQ(this->ws->GetSize(), 4);
this->ws->SimpleSelect(
this->f_dev.data(), this->alpha_dev.data(), this->y_dev.data(), this->C_dev.data());
ASSERT_TRUE(devArrMatchHost(this->expected_idx,
this->ws->GetIndices(),
this->ws->GetSize(),
MLCommon::Compare<int>(),
stream));
this->ws->Select(
this->f_dev.data(), this->alpha_dev.data(), this->y_dev.data(), this->C_dev.data());
ASSERT_TRUE(devArrMatchHost(this->expected_idx,
this->ws->GetIndices(),
this->ws->GetSize(),
MLCommon::Compare<int>(),
stream));
this->ws->Select(
this->f_dev.data(), this->alpha_dev.data(), this->y_dev.data(), this->C_dev.data());
ASSERT_TRUE(devArrMatchHost(this->expected_idx2,
this->ws->GetIndices(),
this->ws->GetSize(),
MLCommon::Compare<int>(),
stream));
delete this->ws;
}
// TYPED_TEST(WorkingSetTest, Priority) {
// See Issue #946
//}
struct KernelCacheTestInput {
bool sparse;
bool batching;
bool sparse_compute;
};
std::ostream& operator<<(std::ostream& os, const KernelCacheTestInput& b)
{
os << "sparse=" << b.sparse << ", batching=" << b.batching
<< ", sparse_compute=" << b.sparse_compute;
return os;
}
template <typename math_t>
class KernelCacheTest : public ::testing::Test {
public:
KernelCacheTest()
: stream(handle.get_stream()),
n_rows(4),
n_cols(2),
n_ws(3),
x_dev(n_rows * n_cols, stream),
x_indptr_dev(n_rows + 1, stream),
x_indices_dev(n_rows * n_cols, stream),
x_data_dev(n_rows * n_cols, stream),
ws_idx_dev(2 * n_ws, stream)
{
raft::update_device(x_dev.data(), x_host, n_rows * n_cols, stream);
raft::update_device(x_indptr_dev.data(), x_host_indptr, n_rows + 1, stream);
raft::update_device(x_indices_dev.data(), x_host_indices, n_rows * n_cols, stream);
raft::update_device(x_data_dev.data(), x_host_data, n_rows * n_cols, stream);
raft::update_device(ws_idx_dev.data(), ws_idx_host, n_ws, stream);
}
protected:
// Naive host side kernel implementation used for comparison
void ApplyNonlin(KernelParams params)
{
switch (params.kernel) {
case LINEAR: break;
case POLYNOMIAL:
for (int z = 0; z < n_rows * n_ws; z++) {
math_t val = params.gamma * tile_host_expected[z] + params.coef0;
tile_host_expected[z] = pow(val, params.degree);
}
break;
case TANH:
for (int z = 0; z < n_rows * n_ws; z++) {
math_t val = params.gamma * tile_host_expected[z] + params.coef0;
tile_host_expected[z] = tanh(val);
}
break;
case RBF:
for (int i = 0; i < n_ws; i++) {
for (int j = 0; j < n_rows; j++) {
math_t d = 0;
for (int k = 0; k < n_cols; k++) {
int idx_i = ws_idx_host[i];
math_t diff = x_host[idx_i + k * n_rows] - x_host[j + k * n_rows];
d += diff * diff;
}
tile_host_expected[i * n_rows + j] = exp(-params.gamma * d);
}
}
break;
}
}
void check(math_t* kernel_data, int* nz_da_idx, int nnz_da, int batch_size, int offset)
{
auto stream = this->handle.get_stream();
std::vector<int> ws_idx_h(nnz_da);
raft::update_host(ws_idx_h.data(), nz_da_idx, nnz_da, stream);
handle.sync_stream(stream);
// Note: kernel cache can permute the working set, so we have to look
// up which rows we compare
for (int i = 0; i < nnz_da; i++) {
SCOPED_TRACE(i);
const math_t* cache_row = kernel_data + i * batch_size;
const math_t* row_exp = tile_host_all + ws_idx_h[i] * this->n_rows + offset;
EXPECT_TRUE(devArrMatchHost(
row_exp, cache_row, batch_size, MLCommon::CompareApprox<math_t>(1e-6f), stream));
}
}
raft::handle_t handle;
cudaStream_t stream = 0;
int n_rows; // =4
int n_cols; // =2
int n_ws;
rmm::device_uvector<math_t> x_dev;
rmm::device_uvector<int> ws_idx_dev;
math_t x_host[8] = {1, 2, 3, 4, 5, 6, 7, 8};
// csr representation
int x_host_indptr[5] = {0, 2, 4, 6, 8};
int x_host_indices[8] = {0, 1, 0, 1, 0, 1, 0, 1};
math_t x_host_data[8] = {1, 5, 2, 6, 3, 7, 4, 8};
rmm::device_uvector<int> x_indptr_dev;
rmm::device_uvector<int> x_indices_dev;
rmm::device_uvector<math_t> x_data_dev;
int ws_idx_host[4] = {0, 1, 3};
math_t tile_host_expected[12] = {26, 32, 38, 44, 32, 40, 48, 56, 44, 56, 68, 80};
math_t tile_host_all[16] = {26, 32, 38, 44, 32, 40, 48, 56, 38, 48, 58, 68, 44, 56, 68, 80};
};
TYPED_TEST_CASE_P(KernelCacheTest);
TYPED_TEST_P(KernelCacheTest, EvalTest)
{
auto stream = this->handle.get_stream();
std::vector<KernelParams> param_vec{KernelParams{LINEAR, 3, 1, 0},
KernelParams{POLYNOMIAL, 2, 1.3, 1},
KernelParams{TANH, 2, 0.5, 2.4},
KernelParams{RBF, 2, 0.5, 0}};
float cache_size = 0;
auto dense_view =
raft::make_device_strided_matrix_view<TypeParam, int, raft::layout_f_contiguous>(
this->x_dev.data(), this->n_rows, this->n_cols, 0);
for (auto params : param_vec) {
GramMatrixBase<TypeParam>* kernel = KernelFactory<TypeParam>::create(params);
KernelCache<TypeParam, raft::device_matrix_view<TypeParam, int, raft::layout_stride>> cache(
this->handle,
dense_view,
this->n_rows,
this->n_cols,
this->n_ws,
kernel,
params.kernel,
cache_size,
C_SVC);
cache.InitWorkingSet(this->ws_idx_dev.data());
auto batch_descriptor = cache.InitFullTileBatching(cache.getKernelIndices(false), this->n_ws);
// there should be only one batch for this test that contains the full n_rows x n_ws tile
ASSERT_TRUE(cache.getNextBatchKernel(batch_descriptor));
// apply nonlinearity on tile_host_expected
this->ApplyNonlin(params);
ASSERT_TRUE(devArrMatchHost(this->tile_host_expected,
batch_descriptor.kernel_data,
this->n_rows * this->n_ws,
MLCommon::CompareApprox<TypeParam>(1e-6f),
stream));
ASSERT_FALSE(cache.getNextBatchKernel(batch_descriptor));
delete kernel;
}
}
TYPED_TEST_P(KernelCacheTest, SvcCacheEvalTest)
{
KernelParams param{LINEAR, 3, 1, 0};
float cache_size = sizeof(TypeParam) * this->n_rows * 32 / (1024.0 * 1024);
std::vector<KernelCacheTestInput> data{{KernelCacheTestInput{false, false, false}},
{KernelCacheTestInput{false, true, false}},
{KernelCacheTestInput{true, false, false}},
{KernelCacheTestInput{true, true, false}},
{KernelCacheTestInput{true, false, true}},
{KernelCacheTestInput{true, true, true}}};
for (auto input : data) {
SCOPED_TRACE(input);
size_t tile_byte_limit = input.batching ? (2 * this->n_ws * sizeof(TypeParam)) : (1 << 30);
size_t sparse_byte_limit = input.sparse_compute ? 1 : (1 << 30);
GramMatrixBase<TypeParam>* kernel = KernelFactory<TypeParam>::create(param);
if (input.sparse) {
auto csr_structure =
raft::make_device_compressed_structure_view<int, int, int>(this->x_indptr_dev.data(),
this->x_indices_dev.data(),
this->n_rows,
this->n_cols,
this->n_rows * this->n_cols);
auto csr_view = raft::make_device_csr_matrix_view(this->x_data_dev.data(), csr_structure);
KernelCache<TypeParam, raft::device_csr_matrix_view<TypeParam, int, int, int>> cache(
this->handle,
csr_view,
this->n_rows,
this->n_cols,
this->n_ws,
kernel,
param.kernel,
cache_size,
C_SVC,
tile_byte_limit,
sparse_byte_limit);
for (int i = 0; i < 2; i++) {
// We calculate cache tile multiple times to see if cache lookup works
cache.InitWorkingSet(this->ws_idx_dev.data());
auto batch_descriptor =
cache.InitFullTileBatching(cache.getKernelIndices(false), this->n_ws);
while (cache.getNextBatchKernel(batch_descriptor)) {
this->check(batch_descriptor.kernel_data,
batch_descriptor.nz_da_idx,
batch_descriptor.nnz_da,
batch_descriptor.batch_size,
batch_descriptor.offset);
}
}
} else {
auto dense_view =
raft::make_device_strided_matrix_view<TypeParam, int, raft::layout_f_contiguous>(
this->x_dev.data(), this->n_rows, this->n_cols, 0);
KernelCache<TypeParam, raft::device_matrix_view<TypeParam, int, raft::layout_stride>> cache(
this->handle,
dense_view,
this->n_rows,
this->n_cols,
this->n_ws,
kernel,
param.kernel,
cache_size,
C_SVC,
tile_byte_limit,
sparse_byte_limit);
for (int i = 0; i < 2; i++) {
// We calculate cache tile multiple times to see if cache lookup works
cache.InitWorkingSet(this->ws_idx_dev.data());
auto batch_descriptor =
cache.InitFullTileBatching(cache.getKernelIndices(false), this->n_ws);
while (cache.getNextBatchKernel(batch_descriptor)) {
this->check(batch_descriptor.kernel_data,
batch_descriptor.nz_da_idx,
batch_descriptor.nnz_da,
batch_descriptor.batch_size,
batch_descriptor.offset);
}
}
}
delete kernel;
}
}
TYPED_TEST_P(KernelCacheTest, SvrCacheEvalTest)
{
KernelParams param{LINEAR, 3, 1, 0};
float cache_size = sizeof(TypeParam) * this->n_rows * 32 / (1024.0 * 1024);
this->n_ws = 6;
int ws_idx_svr[6] = {0, 5, 1, 4, 3, 7};
raft::update_device(this->ws_idx_dev.data(), ws_idx_svr, 6, this->stream);
std::vector<KernelCacheTestInput> data{{KernelCacheTestInput{false, false, false}},
{KernelCacheTestInput{false, true, false}},
{KernelCacheTestInput{true, false, false}},
{KernelCacheTestInput{true, true, false}},
{KernelCacheTestInput{true, false, true}},
{KernelCacheTestInput{true, true, true}}};
for (auto input : data) {
SCOPED_TRACE(input);
size_t tile_byte_limit = input.batching ? (2 * this->n_ws * sizeof(TypeParam)) : (1 << 30);
size_t sparse_byte_limit = input.sparse_compute ? 1 : (1 << 30);
GramMatrixBase<TypeParam>* kernel = KernelFactory<TypeParam>::create(param);
if (input.sparse) {
auto csr_structure =
raft::make_device_compressed_structure_view<int, int, int>(this->x_indptr_dev.data(),
this->x_indices_dev.data(),
this->n_rows,
this->n_cols,
this->n_rows * this->n_cols);
auto csr_view = raft::make_device_csr_matrix_view(this->x_data_dev.data(), csr_structure);
KernelCache<TypeParam, raft::device_csr_matrix_view<TypeParam, int, int, int>> cache(
this->handle,
csr_view,
this->n_rows,
this->n_cols,
this->n_ws,
kernel,
param.kernel,
cache_size,
EPSILON_SVR,
tile_byte_limit,
sparse_byte_limit);
for (int i = 0; i < 2; i++) {
// We calculate cache tile multiple times to see if cache lookup works
cache.InitWorkingSet(this->ws_idx_dev.data());
auto batch_descriptor =
cache.InitFullTileBatching(cache.getKernelIndices(false), this->n_ws);
while (cache.getNextBatchKernel(batch_descriptor)) {
this->check(batch_descriptor.kernel_data,
batch_descriptor.nz_da_idx,
batch_descriptor.nnz_da,
batch_descriptor.batch_size,
batch_descriptor.offset);
}
}
} else {
auto dense_view =
raft::make_device_strided_matrix_view<TypeParam, int, raft::layout_f_contiguous>(
this->x_dev.data(), this->n_rows, this->n_cols, 0);
KernelCache<TypeParam, raft::device_matrix_view<TypeParam, int, raft::layout_stride>> cache(
this->handle,
dense_view,
this->n_rows,
this->n_cols,
this->n_ws,
kernel,
param.kernel,
cache_size,
EPSILON_SVR,
tile_byte_limit,
sparse_byte_limit);
for (int i = 0; i < 2; i++) {
// We calculate cache tile multiple times to see if cache lookup works
cache.InitWorkingSet(this->ws_idx_dev.data());
auto batch_descriptor =
cache.InitFullTileBatching(cache.getKernelIndices(false), this->n_ws);
while (cache.getNextBatchKernel(batch_descriptor)) {
this->check(batch_descriptor.kernel_data,
batch_descriptor.nz_da_idx,
batch_descriptor.nnz_da,
batch_descriptor.batch_size,
batch_descriptor.offset);
}
}
}
delete kernel;
}
}
REGISTER_TYPED_TEST_CASE_P(KernelCacheTest, EvalTest, SvcCacheEvalTest, SvrCacheEvalTest);
INSTANTIATE_TYPED_TEST_CASE_P(My, KernelCacheTest, FloatTypes);
template <typename math_t>
class GetResultsTest : public ::testing::Test {
public:
GetResultsTest() : stream(handle.get_stream()) {}
protected:
void FreeDenseSupport()
{
rmm::mr::device_memory_resource* rmm_alloc = rmm::mr::get_current_device_resource();
auto stream = this->handle.get_stream();
rmm_alloc->deallocate(support_matrix.data, n_coefs * n_cols * sizeof(math_t), stream);
support_matrix.data = nullptr;
}
void TestResults()
{
auto stream = this->handle.get_stream();
rmm::device_uvector<math_t> x_dev(n_rows * n_cols, stream);
raft::update_device(x_dev.data(), x_host, n_rows * n_cols, stream);
rmm::device_uvector<math_t> f_dev(n_rows, stream);
raft::update_device(f_dev.data(), f_host, n_rows, stream);
rmm::device_uvector<math_t> y_dev(n_rows, stream);
raft::update_device(y_dev.data(), y_host, n_rows, stream);
rmm::device_uvector<math_t> alpha_dev(n_rows, stream);
raft::update_device(alpha_dev.data(), alpha_host, n_rows, stream);
rmm::device_uvector<math_t> C_dev(n_rows, stream);
init_C(C, C_dev.data(), n_rows, stream);
auto dense_view = raft::make_device_strided_matrix_view<math_t, int, raft::layout_f_contiguous>(
x_dev.data(), n_rows, n_cols, 0);
Results<math_t, raft::device_matrix_view<math_t, int, raft::layout_stride>> res(
handle, dense_view, n_rows, n_cols, y_dev.data(), C_dev.data(), C_SVC);
res.Get(alpha_dev.data(), f_dev.data(), &dual_coefs, &n_coefs, &idx, &support_matrix, &b);
ASSERT_EQ(n_coefs, 7);
math_t dual_coefs_exp[] = {-0.1, -0.2, -1.5, 0.2, 0.4, 1.5, 1.5};
EXPECT_TRUE(devArrMatchHost(
dual_coefs_exp, dual_coefs, n_coefs, MLCommon::CompareApprox<math_t>(1e-6f), stream));
int idx_exp[] = {2, 3, 4, 6, 7, 8, 9};
EXPECT_TRUE(devArrMatchHost(idx_exp, idx, n_coefs, MLCommon::Compare<int>(), stream));
math_t x_support_exp[] = {3, 4, 5, 7, 8, 9, 10, 13, 14, 15, 17, 18, 19, 20};
EXPECT_TRUE(devArrMatchHost(x_support_exp,
support_matrix.data,
n_coefs * n_cols,
MLCommon::CompareApprox<math_t>(1e-6f),
stream));
EXPECT_FLOAT_EQ(b, -6.25f);
// Modify the test by setting all SVs bound, then b is calculated differently
math_t alpha_host2[10] = {0, 0, 1.5, 1.5, 1.5, 0, 1.5, 1.5, 1.5, 1.5};
raft::update_device(alpha_dev.data(), alpha_host2, n_rows, stream);
FreeDenseSupport();
res.Get(alpha_dev.data(), f_dev.data(), &dual_coefs, &n_coefs, &idx, &support_matrix, &b);
FreeDenseSupport();
EXPECT_FLOAT_EQ(b, -5.5f);
}
raft::handle_t handle;
cudaStream_t stream = 0;
int n_rows = 10;
int n_cols = 2;
math_t x_host[20] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20};
math_t f_host[10] = {1, 3, 10, 4, 2, 8, 6, 5, 9, 7};
math_t y_host[10] = {-1, -1, -1, -1, -1, 1, 1, 1, 1, 1};
math_t alpha_host[10] = {0, 0, 0.1, 0.2, 1.5, 0, 0.2, 0.4, 1.5, 1.5};
// l l l/u l/u u u l/u l/u l l
math_t C = 1.5;
math_t* dual_coefs;
int n_coefs;
int* idx;
SupportStorage<math_t> support_matrix;
math_t b;
};
TYPED_TEST_CASE(GetResultsTest, FloatTypes);
TYPED_TEST(GetResultsTest, Results) { this->TestResults(); }
SvmParameter getDefaultSvmParameter()
{
SvmParameter param;
param.C = 1;
param.tol = 0.001;
param.cache_size = 200;
param.max_iter = -1;
param.nochange_steps = 1000;
param.verbosity = CUML_LEVEL_INFO;
param.epsilon = 0.1;
param.svmType = C_SVC;
return param;
}
template <typename math_t>
class SmoUpdateTest : public ::testing::Test {
public:
SmoUpdateTest()
: stream(handle.get_stream()),
n_rows(6),
n_ws(2),
f_dev(n_rows, stream),
kernel_dev(n_rows * n_ws, stream),
delta_alpha_dev(n_ws, stream)
{
RAFT_CUDA_TRY(cudaMemsetAsync(f_dev.data(), 0, f_dev.size() * sizeof(float), stream));
raft::update_device(kernel_dev.data(), kernel_host, n_ws * n_rows, stream);
raft::update_device(delta_alpha_dev.data(), delta_alpha_host, n_ws, stream);
}
protected:
void RunTest()
{
SvmParameter param = getDefaultSvmParameter();
SmoSolver<float> smo(handle, param, LINEAR, nullptr);
smo.UpdateF(f_dev.data(), n_rows, delta_alpha_dev.data(), n_ws, kernel_dev.data());
float f_host_expected[] = {0.1f, 7.4505806e-9f, 0.3f, 0.2f, 0.5f, 0.4f};
devArrMatchHost(f_host_expected, f_dev.data(), n_rows, MLCommon::CompareApprox<math_t>(1e-6));
}
raft::handle_t handle;
cudaStream_t stream = 0;
int n_rows;
int n_ws;
rmm::device_uvector<float> kernel_dev;
rmm::device_uvector<float> f_dev;
rmm::device_uvector<float> delta_alpha_dev;
float kernel_host[12] = {3, 5, 4, 6, 5, 7, 4, 5, 7, 8, 10, 11};
float delta_alpha_host[2] = {-0.1f, 0.1f};
};
TYPED_TEST_CASE(SmoUpdateTest, FloatTypes);
TYPED_TEST(SmoUpdateTest, Update) { this->RunTest(); }
template <typename math_t>
class SmoBlockSolverTest : public ::testing::Test {
public:
SmoBlockSolverTest()
: stream(handle.get_stream()),
n_rows(4),
n_cols(2),
n_ws(4),
ws_idx_dev(n_ws, stream),
y_dev(n_rows, stream),
C_dev(n_rows, stream),
f_dev(n_rows, stream),
alpha_dev(n_rows, stream),
delta_alpha_dev(n_ws, stream),
kernel_dev(n_ws * n_rows, stream),
return_buff_dev(2, stream)
{
RAFT_CUDA_TRY(cudaMemsetAsync(alpha_dev.data(), 0, alpha_dev.size() * sizeof(math_t), stream));
RAFT_CUDA_TRY(
cudaMemsetAsync(delta_alpha_dev.data(), 0, delta_alpha_dev.size() * sizeof(math_t), stream));
init_C(C, C_dev.data(), n_rows, stream);
raft::update_device(ws_idx_dev.data(), ws_idx_host, n_ws, stream);
raft::update_device(y_dev.data(), y_host, n_rows, stream);
raft::update_device(f_dev.data(), f_host, n_rows, stream);
raft::update_device(kernel_dev.data(), kernel_host, n_ws * n_rows, stream);
}
public: // because of the device lambda
void testBlockSolve()
{
SmoBlockSolve<math_t, 1024><<<1, n_ws, 0, stream>>>(y_dev.data(),
n_rows,
alpha_dev.data(),
n_ws,
delta_alpha_dev.data(),
f_dev.data(),
kernel_dev.data(),
ws_idx_dev.data(),
C_dev.data(),
1e-3f,
return_buff_dev.data(),
1);
RAFT_CUDA_TRY(cudaPeekAtLastError());
math_t return_buff_exp[2] = {0.2, 1};
devArrMatchHost(
return_buff_exp, return_buff_dev.data(), 2, MLCommon::CompareApprox<math_t>(1e-6), stream);
rmm::device_uvector<math_t> delta_alpha_calc(n_rows, stream);
raft::linalg::binaryOp(
delta_alpha_calc.data(),
y_dev.data(),
alpha_dev.data(),
n_rows,
[] __device__(math_t a, math_t b) { return a * b; },
stream);
MLCommon::devArrMatch(delta_alpha_dev.data(),
delta_alpha_calc.data(),
n_rows,
MLCommon::CompareApprox<math_t>(1e-6),
stream);
math_t alpha_expected[] = {0, 0.1f, 0.1f, 0};
MLCommon::devArrMatch(
alpha_expected, alpha_dev.data(), n_rows, MLCommon::CompareApprox<math_t>(1e-6), stream);
}
protected:
raft::handle_t handle;
cudaStream_t stream = 0;
int n_rows;
int n_cols;
int n_ws;
rmm::device_uvector<int> ws_idx_dev;
rmm::device_uvector<math_t> y_dev;
rmm::device_uvector<math_t> f_dev;
rmm::device_uvector<math_t> C_dev;
rmm::device_uvector<math_t> alpha_dev;
rmm::device_uvector<math_t> delta_alpha_dev;
rmm::device_uvector<math_t> kernel_dev;
rmm::device_uvector<math_t> return_buff_dev;
int ws_idx_host[4] = {0, 1, 2, 3};
math_t y_host[4] = {1, 1, -1, -1};
math_t C = 1.5;
math_t f_host[4] = {0.4, 0.3, 0.5, 0.1};
math_t kernel_host[16] = {26, 32, 38, 44, 32, 40, 48, 56, 38, 48, 58, 68, 44, 56, 68, 80};
};
TYPED_TEST_CASE(SmoBlockSolverTest, FloatTypes);
// test a single iteration of the block solver
TYPED_TEST(SmoBlockSolverTest, SolveSingleTest) { this->testBlockSolve(); }
template <typename math_t>
struct smoInput {
math_t C;
math_t tol;
KernelParams kernel_params;
int max_iter;
int max_inner_iter;
};
template <typename math_t>
struct svcInput {
math_t C;
math_t tol;
KernelParams kernel_params;
int n_rows;
int n_cols;
math_t* x_dev;
math_t* y_dev;
bool predict;
};
template <typename math_t>
struct smoOutput {
int n_support;
std::vector<math_t> dual_coefs;
math_t b;
std::vector<math_t> w;
std::vector<math_t> x_support;
std::vector<int> idx;
};
// If we want to compare decision function values too
template <typename math_t>
struct smoOutput2 { //: smoOutput<math_t> {
int n_support;
std::vector<math_t> dual_coefs;
math_t b;
std::vector<math_t> w;
std::vector<math_t> x_support;
std::vector<int> idx;
std::vector<math_t> decision_function;
};
template <typename math_t>
smoOutput<math_t> toSmoOutput(smoOutput2<math_t> x)
{
smoOutput<math_t> y{x.n_support, x.dual_coefs, x.b, x.w, x.x_support, x.idx};
return y;
}
template <typename math_t>
struct svmTol {
math_t b;
math_t cs;
int n_sv;
};
template <typename math_t>
void checkResults(SvmModel<math_t> model,
smoOutput<math_t> expected,
cudaStream_t stream,
svmTol<math_t> tol = svmTol<math_t>{0.001, 0.99999, -1})
{
math_t* dcoef_exp = expected.dual_coefs.size() > 0 ? expected.dual_coefs.data() : nullptr;
math_t* w_exp = expected.w.size() > 0 ? expected.w.data() : nullptr;
math_t* x_support_exp = expected.x_support.size() > 0 ? expected.x_support.data() : nullptr;
int* idx_exp = expected.idx.size() > 0 ? expected.idx.data() : nullptr;
math_t ay_tol = 1e-5;
if (tol.n_sv == -1) {
tol.n_sv = expected.n_support * 0.01;
if (expected.n_support > 10 && tol.n_sv < 3) tol.n_sv = 3;
}
EXPECT_LE(abs(model.n_support - expected.n_support), tol.n_sv);
if (dcoef_exp) {
EXPECT_TRUE(devArrMatchHost(
dcoef_exp, model.dual_coefs, model.n_support, MLCommon::CompareApprox<math_t>(1e-3f)));
}
math_t* dual_coefs_host = new math_t[model.n_support];
raft::update_host(dual_coefs_host, model.dual_coefs, model.n_support, stream);
raft::interruptible::synchronize(stream);
math_t ay = 0;
for (int i = 0; i < model.n_support; i++) {
ay += dual_coefs_host[i];
}
// Test if \sum \alpha_i y_i = 0
EXPECT_LT(raft::abs(ay), ay_tol);
if (x_support_exp) {
EXPECT_TRUE(model.support_matrix.data != nullptr && model.support_matrix.nnz == -1);
EXPECT_TRUE(devArrMatchHost(x_support_exp,
model.support_matrix.data,
model.n_support * model.n_cols,
MLCommon::CompareApprox<math_t>(1e-6f),
stream));
}
if (idx_exp) {
EXPECT_TRUE(devArrMatchHost(
idx_exp, model.support_idx, model.n_support, MLCommon::Compare<int>(), stream));
}
math_t* x_support_host = new math_t[model.n_support * model.n_cols];
if (model.n_support * model.n_cols > 0) {
EXPECT_TRUE(model.support_matrix.data != nullptr && model.support_matrix.nnz == -1);
raft::update_host(
x_support_host, model.support_matrix.data, model.n_support * model.n_cols, stream);
}
raft::interruptible::synchronize(stream);
if (w_exp) {
std::vector<math_t> w(model.n_cols, 0);
for (int i = 0; i < model.n_support; i++) {
for (int j = 0; j < model.n_cols; j++)
w[j] += x_support_host[i + model.n_support * j] * dual_coefs_host[i];
}
// Calculate the cosine similarity between w and w_exp
math_t abs_w = 0;
math_t abs_w_exp = 0;
math_t cs = 0;
for (int i = 0; i < model.n_cols; i++) {
abs_w += w[i] * w[i];
abs_w_exp += w_exp[i] * w_exp[i];
cs += w[i] * w_exp[i];
}
cs /= sqrt(abs_w * abs_w_exp);
EXPECT_GT(cs, tol.cs);
}
EXPECT_LT(raft::abs(model.b - expected.b), tol.b);
delete[] dual_coefs_host;
delete[] x_support_host;
}
template <typename math_t>
class SmoSolverTest : public ::testing::Test {
public:
SmoSolverTest()
: stream(handle.get_stream()),
x_dev(n_rows * n_cols, stream),
x_dev_indptr(n_rows + 1, stream),
x_dev_indices(n_nnz, stream),
x_dev_data(n_nnz, stream),
ws_idx_dev(n_ws, stream),
y_dev(n_rows, stream),
C_dev(n_rows, stream),
y_pred(n_rows, stream),
f_dev(n_rows, stream),
alpha_dev(n_rows, stream),
delta_alpha_dev(n_ws, stream),
kernel_dev(n_ws * n_rows, stream),
return_buff_dev(2, stream),
sample_weights_dev(n_rows, stream)
{
RAFT_CUDA_TRY(cudaMemsetAsync(alpha_dev.data(), 0, alpha_dev.size() * sizeof(math_t), stream));
RAFT_CUDA_TRY(
cudaMemsetAsync(delta_alpha_dev.data(), 0, delta_alpha_dev.size() * sizeof(math_t), stream));
}
protected:
void SetUp() override
{
raft::linalg::range(sample_weights_dev.data(), 1, n_rows + 1, stream);
raft::update_device(x_dev.data(), x_host, n_rows * n_cols, stream);
raft::update_device(x_dev_indptr.data(), x_host_indptr, n_rows + 1, stream);
raft::update_device(x_dev_indices.data(), x_host_indices, n_nnz, stream);
raft::update_device(x_dev_data.data(), x_host_data, n_nnz, stream);
raft::update_device(ws_idx_dev.data(), ws_idx_host, n_ws, stream);
raft::update_device(y_dev.data(), y_host, n_rows, stream);
init_C(C, C_dev.data(), n_rows, stream);
raft::update_device(f_dev.data(), f_host, n_rows, stream);
raft::update_device(kernel_dev.data(), kernel_host, n_ws * n_rows, stream);
RAFT_CUDA_TRY(cudaMemsetAsync(delta_alpha_dev.data(), 0, n_ws * sizeof(math_t), stream));
kernel = std::make_unique<GramMatrixBase<math_t>>();
}
public:
void blockSolveTest()
{
SmoBlockSolve<math_t, 1024><<<1, n_ws, 0, stream>>>(y_dev.data(),
n_rows,
alpha_dev.data(),
n_ws,
delta_alpha_dev.data(),
f_dev.data(),
kernel_dev.data(),
ws_idx_dev.data(),
C_dev.data(),
1e-3,
return_buff_dev.data());
RAFT_CUDA_TRY(cudaPeekAtLastError());
math_t return_buff[2];
raft::update_host(return_buff, return_buff_dev.data(), 2, stream);
handle.sync_stream(stream);
EXPECT_FLOAT_EQ(return_buff[0], 2.0f) << return_buff[0];
EXPECT_LT(return_buff[1], 100) << return_buff[1];
// check results won't work, because it expects that GetResults was called
rmm::device_uvector<math_t> delta_alpha_calc(n_rows, stream);
raft::linalg::binaryOp(
delta_alpha_calc.data(),
y_dev.data(),
alpha_dev.data(),
n_rows,
[] __device__(math_t a, math_t b) { return a * b; },
stream);
MLCommon::devArrMatch(delta_alpha_dev.data(),
delta_alpha_calc.data(),
n_rows,
MLCommon::CompareApprox<math_t>(1e-6),
stream);
math_t alpha_expected[] = {0.6f, 0, 1, 1, 0, 0.6f};
// for C=10: {0.25f, 0, 2.25f, 3.75f, 0, 1.75f};
MLCommon::devArrMatch(
alpha_expected, alpha_dev.data(), n_rows, MLCommon::CompareApprox<math_t>(1e-6), stream);
math_t host_alpha[6];
raft::update_host(host_alpha, alpha_dev.data(), n_rows, stream);
math_t w[] = {0, 0};
math_t ay = 0;
for (int i = 0; i < n_rows; i++) {
EXPECT_FLOAT_EQ(host_alpha[i], alpha_expected[i]) << "alpha " << i;
w[0] += x_host[i] * host_alpha[i] * y_host[i];
w[1] += x_host[i + n_rows] * host_alpha[i] * y_host[i];
ay += host_alpha[i] * y_host[i];
}
EXPECT_FLOAT_EQ(ay, 0.0);
EXPECT_FLOAT_EQ(w[0], -0.4);
EXPECT_FLOAT_EQ(w[1], 1.2);
// for C=10
// EXPECT_FLOAT_EQ(w[0], -2.0);
// EXPECT_FLOAT_EQ(w[1], 2.0);
}
void svrBlockSolveTest()
{
auto stream = this->handle.get_stream();
int n_ws = 4;
int n_rows = 2;
// int n_cols = 1;
// math_t x[2] = {1, 2};
// yr = {2, 3}
math_t f[4] = {-1.9, -2.9, -2.1 - 3.1};
math_t kernel[4] = {1, 2, 2, 4};
// ws_idx is defined as {0, 1, 2, 3}
int kColIdx[4] = {0, 1, 0, 1};
rmm::device_uvector<int> kColIdx_dev(4, stream);
raft::update_device(f_dev.data(), f, 4, stream);
raft::update_device(kernel_dev.data(), kernel, 4, stream);
raft::update_device(kColIdx_dev.data(), kColIdx, 4, stream);
SmoBlockSolve<math_t, 1024><<<1, n_ws, 0, stream>>>(y_dev.data(),
2 * n_rows,
alpha_dev.data(),
n_ws,
delta_alpha_dev.data(),
f_dev.data(),
kernel_dev.data(),
ws_idx_dev.data(),
C_dev.data(),
1e-3,
return_buff_dev.data(),
10,
EPSILON_SVR);
RAFT_CUDA_TRY(cudaPeekAtLastError());
math_t return_buff[2];
raft::update_host(return_buff, return_buff_dev.data(), 2, stream);
handle.sync_stream(stream);
EXPECT_LT(return_buff[1], 10) << return_buff[1];
math_t alpha_exp[] = {0, 0.8, 0.8, 0};
MLCommon::devArrMatch(
alpha_exp, alpha_dev.data(), 4, MLCommon::CompareApprox<math_t>(1e-6), stream);
math_t dalpha_exp[] = {-0.8, 0.8};
MLCommon::devArrMatch(
dalpha_exp, delta_alpha_dev.data(), 2, MLCommon::CompareApprox<math_t>(1e-6), stream);
}
protected:
raft::handle_t handle;
cudaStream_t stream = 0;
std::unique_ptr<GramMatrixBase<math_t>> kernel;
int n_rows = 6;
const int n_cols = 2;
int n_nnz = 12;
int n_ws = 6;
rmm::device_uvector<math_t> x_dev;
rmm::device_uvector<int> x_dev_indptr;
rmm::device_uvector<int> x_dev_indices;
rmm::device_uvector<math_t> x_dev_data;
rmm::device_uvector<int> ws_idx_dev;
rmm::device_uvector<math_t> y_dev;
rmm::device_uvector<math_t> C_dev;
rmm::device_uvector<math_t> y_pred;
rmm::device_uvector<math_t> f_dev;
rmm::device_uvector<math_t> alpha_dev;
rmm::device_uvector<math_t> delta_alpha_dev;
rmm::device_uvector<math_t> kernel_dev;
rmm::device_uvector<math_t> return_buff_dev;
rmm::device_uvector<math_t> sample_weights_dev;
math_t x_host[12] = {1, 2, 1, 2, 1, 2, 1, 1, 2, 2, 3, 3};
// csr representation
int x_host_indptr[7] = {0, 2, 4, 6, 8, 10, 12};
int x_host_indices[12] = {0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1};
math_t x_host_data[12] = {1, 1, 2, 1, 1, 2, 2, 2, 1, 3, 2, 3};
int ws_idx_host[6] = {0, 1, 2, 3, 4, 5};
math_t y_host[6] = {-1, -1, 1, -1, 1, 1};
math_t C = 1;
math_t f_host[6] = {1, 1, -1, 1, -1, -1};
math_t kernel_host[36] = {2, 3, 3, 4, 4, 5, 3, 5, 4, 6, 5, 7, 3, 4, 5, 6, 7, 8,
4, 6, 6, 8, 8, 10, 4, 5, 7, 8, 10, 11, 5, 7, 8, 10, 11, 13};
int n_coefs;
math_t b;
};
TYPED_TEST_CASE(SmoSolverTest, FloatTypes);
TYPED_TEST(SmoSolverTest, BlockSolveTest) { this->blockSolveTest(); }
TYPED_TEST(SmoSolverTest, SvrBlockSolveTest) { this->svrBlockSolveTest(); }
std::string kernelName(KernelParams k)
{
std::vector<std::string> names{"linear", "poly", "rbf", "tanh"};
return names[k.kernel];
}
template <typename math_t>
std::ostream& operator<<(std::ostream& os, const smoInput<math_t>& b)
{
os << kernelName(b.kernel_params) << ", C=" << b.C << ", tol=" << b.tol;
return os;
}
TYPED_TEST(SmoSolverTest, SmoSolveTest)
{
auto stream = this->handle.get_stream();
std::vector<std::pair<smoInput<TypeParam>, smoOutput<TypeParam>>> data{
{smoInput<TypeParam>{1, 0.001, KernelParams{LINEAR, 3, 1, 0}, 100, 1},
smoOutput<TypeParam>{4, // n_sv
{-0.6, 1, -1, 0.6}, // dual_coefs
-1.8, // b
{-0.4, 1.2}, // w
{1, 1, 2, 2, 1, 2, 2, 3}, // x_support
{0, 2, 3, 5}}}, // support idx
{smoInput<TypeParam>{10, 0.001, KernelParams{LINEAR, 3, 1, 0}, 100, 1},
smoOutput<TypeParam>{3, {-2, 4, -2, 0, 0}, -1.0, {-2, 2}, {}, {}}},
{smoInput<TypeParam>{1, 1e-6, KernelParams{POLYNOMIAL, 3, 1, 1}, 100, 1},
smoOutput<TypeParam>{
3, {-0.02556136, 0.03979708, -0.01423571}, -1.07739149, {}, {1, 1, 2, 1, 2, 2}, {0, 2, 3}}}};
for (auto d : data) {
auto p = d.first;
auto exp = d.second;
SCOPED_TRACE(p);
SvmParameter param = getDefaultSvmParameter();
param.C = p.C;
param.tol = p.tol;
// param.max_iter = p.max_iter;
GramMatrixBase<TypeParam>* kernel = KernelFactory<TypeParam>::create(p.kernel_params);
SmoSolver<TypeParam> smo(this->handle, param, p.kernel_params.kernel, kernel);
{
SvmModel<TypeParam> model1{0, this->n_cols, 0, nullptr, {}, nullptr, 0, nullptr};
auto dense_view =
raft::make_device_strided_matrix_view<TypeParam, int, raft::layout_f_contiguous>(
this->x_dev.data(), this->n_rows, this->n_cols, 0);
smo.Solve(dense_view,
this->n_rows,
this->n_cols,
this->y_dev.data(),
nullptr,
&model1.dual_coefs,
&model1.n_support,
&model1.support_matrix,
&model1.support_idx,
&model1.b,
p.max_iter,
p.max_inner_iter);
checkResults(model1, exp, stream);
svmFreeBuffers(this->handle, model1);
}
// also check sparse input
{
SvmModel<TypeParam> model2{0, this->n_cols, 0, nullptr, {}, nullptr, 0, nullptr};
auto csr_structure =
raft::make_device_compressed_structure_view<int, int, int>(this->x_dev_indptr.data(),
this->x_dev_indices.data(),
this->n_rows,
this->n_cols,
this->n_nnz);
auto csr_view = raft::make_device_csr_matrix_view(this->x_dev_data.data(), csr_structure);
smo.Solve(csr_view,
this->n_rows,
this->n_cols,
this->y_dev.data(),
nullptr,
&model2.dual_coefs,
&model2.n_support,
&model2.support_matrix,
&model2.support_idx,
&model2.b,
p.max_iter,
p.max_inner_iter);
checkResults(model2, exp, stream);
svmFreeBuffers(this->handle, model2);
}
}
}
TYPED_TEST(SmoSolverTest, SvcTest)
{
auto stream = this->handle.get_stream();
std::vector<std::pair<svcInput<TypeParam>, smoOutput2<TypeParam>>> data{
{svcInput<TypeParam>{1,
0.001,
KernelParams{LINEAR, 3, 1, 0},
this->n_rows,
this->n_cols,
this->x_dev.data(),
this->y_dev.data(),
true},
smoOutput2<TypeParam>{4,
{-0.6, 1, -1, 0.6},
-1.8f,
{-0.4, 1.2},
{1, 1, 2, 2, 1, 2, 2, 3},
{0, 2, 3, 5},
{-1.0, -1.4, 0.2, -0.2, 1.4, 1.0}}},
{// C == 0 marks a special test case with sample weights
svcInput<TypeParam>{0,
0.001,
KernelParams{LINEAR, 3, 1, 0},
this->n_rows,
this->n_cols,
this->x_dev.data(),
this->y_dev.data(),
true},
smoOutput2<TypeParam>{4,
{},
-1.0f,
{-2, 2},
{1, 1, 2, 2, 1, 2, 2, 3},
{0, 2, 3, 5},
{-1.0, -3.0, 1.0, -1.0, 3.0, 1.0}}},
{svcInput<TypeParam>{1,
1e-6,
KernelParams{POLYNOMIAL, 3, 1, 0},
this->n_rows,
this->n_cols,
this->x_dev.data(),
this->y_dev.data(),
true},
smoOutput2<TypeParam>{
3,
{-0.03900895, 0.05904058, -0.02003163},
-0.99999959,
{},
{1, 1, 2, 1, 2, 2},
{0, 2, 3},
{-0.9996812, -2.60106647, 0.9998406, -1.0001594, 6.49681105, 4.31951232}}},
{svcInput<TypeParam>{10,
1e-6,
KernelParams{TANH, 3, 0.3, 1.0},
this->n_rows,
this->n_cols,
this->x_dev.data(),
this->y_dev.data(),
false},
smoOutput2<TypeParam>{
6,
{-10., -10., 10., -10., 10., 10.},
-0.3927505,
{},
{1, 2, 1, 2, 1, 2, 1, 1, 2, 2, 3, 3},
{0, 1, 2, 3, 4, 5},
{0.25670694, -0.16451539, 0.16451427, -0.1568888, -0.04496891, -0.2387212}}},
{svcInput<TypeParam>{1,
1.0e-6,
KernelParams{RBF, 0, 0.15, 0},
this->n_rows,
this->n_cols,
this->x_dev.data(),
this->y_dev.data(),
true},
smoOutput2<TypeParam>{
6,
{-1., -1, 1., -1., 1, 1.},
0,
{},
{1, 2, 1, 2, 1, 2, 1, 1, 2, 2, 3, 3},
{0, 1, 2, 3, 4, 5},
{-0.71964003, -0.95941954, 0.13929202, -0.13929202, 0.95941954, 0.71964003}}}};
for (auto d : data) {
auto p = d.first;
auto exp = d.second;
SCOPED_TRACE(kernelName(p.kernel_params));
TypeParam* sample_weights = nullptr;
if (p.C == 0) {
p.C = 1;
sample_weights = this->sample_weights_dev.data();
}
SVC<TypeParam> svc(this->handle, p.C, p.tol, p.kernel_params);
svc.fit(p.x_dev, p.n_rows, p.n_cols, p.y_dev, sample_weights);
checkResults(svc.model, toSmoOutput(exp), stream);
rmm::device_uvector<TypeParam> y_pred(p.n_rows, stream);
if (p.predict) {
svc.predict(p.x_dev, p.n_rows, p.n_cols, y_pred.data());
EXPECT_TRUE(MLCommon::devArrMatch(this->y_dev.data(),
y_pred.data(),
p.n_rows,
MLCommon::CompareApprox<TypeParam>(1e-6f),
stream));
}
if (exp.decision_function.size() > 0) {
svc.decisionFunction(p.x_dev, p.n_rows, p.n_cols, y_pred.data());
EXPECT_TRUE(devArrMatchHost(exp.decision_function.data(),
y_pred.data(),
p.n_rows,
MLCommon::CompareApprox<TypeParam>(1e-3f),
stream));
}
}
}
struct blobInput {
double C;
double tol;
KernelParams kernel_params;
int n_rows;
int n_cols;
};
std::ostream& operator<<(std::ostream& os, const blobInput& b)
{
os << kernelName(b.kernel_params) << " " << b.n_rows << "x" << b.n_cols;
return os;
}
// until there is progress with Issue #935
template <typename inType, typename outType>
__global__ void cast(outType* out, int n, inType* in)
{
int tid = threadIdx.x + blockIdx.x * blockDim.x;
if (tid < n) out[tid] = in[tid];
}
// To have the same input data for both single and double precision,
// we generate the blobs in single precision only, and cast to dp if needed.
template <typename math_t>
void make_blobs(const raft::handle_t& handle,
math_t* x,
math_t* y,
int n_rows,
int n_cols,
int n_cluster,
float* centers = nullptr)
{
size_t free1, total;
RAFT_CUDA_TRY(cudaMemGetInfo(&free1, &total));
{
auto stream = handle.get_stream();
rmm::device_uvector<float> x_float(n_rows * n_cols, stream);
rmm::device_uvector<int> y_int(n_rows, stream);
Datasets::make_blobs(handle,
x_float.data(),
y_int.data(),
n_rows,
n_cols,
n_cluster,
true,
centers,
(float*)nullptr,
1.0f,
true,
-2.0f,
2.0f,
0);
int TPB = 256;
if (std::is_same<float, math_t>::value) {
raft::linalg::transpose(handle, x_float.data(), (float*)x, n_cols, n_rows, stream);
} else {
rmm::device_uvector<math_t> x2(n_rows * n_cols, stream);
cast<<<raft::ceildiv(n_rows * n_cols, TPB), TPB, 0, stream>>>(
x2.data(), n_rows * n_cols, x_float.data());
{
raft::linalg::transpose(handle, x2.data(), x, n_cols, n_rows, stream);
}
RAFT_CUDA_TRY(cudaPeekAtLastError());
}
cast<<<raft::ceildiv(n_rows, TPB), TPB, 0, stream>>>(y, n_rows, y_int.data());
RAFT_CUDA_TRY(cudaPeekAtLastError());
}
}
struct is_same_functor {
template <typename Tuple>
__host__ __device__ int operator()(Tuple t)
{
return thrust::get<0>(t) == thrust::get<1>(t);
}
};
TYPED_TEST(SmoSolverTest, BlobPredict)
{
auto stream = this->handle.get_stream();
// Pair.second is the expected accuracy. It might change if the Rng changes.
std::vector<std::pair<blobInput, TypeParam>> data{
{blobInput{1, 0.001, KernelParams{LINEAR, 3, 1, 0}, 200, 10}, 98},
{blobInput{1, 0.001, KernelParams{POLYNOMIAL, 3, 1, 0}, 200, 10}, 98},
{blobInput{1, 0.001, KernelParams{RBF, 3, 1, 0}, 200, 2}, 98},
{blobInput{1, 0.009, KernelParams{TANH, 3, 0.1, 0}, 200, 10}, 98}};
// This should be larger then N_PRED_BATCH in svcPredict
const int n_pred = 5000;
for (auto d : data) {
auto p = d.first;
SCOPED_TRACE(p);
// explicit centers for the blobs
rmm::device_uvector<float> centers(2 * p.n_cols, stream);
thrust::device_ptr<float> thrust_ptr(centers.data());
thrust::fill(thrust::cuda::par.on(stream), thrust_ptr, thrust_ptr + p.n_cols, -5.0f);
thrust::fill(
thrust::cuda::par.on(stream), thrust_ptr + p.n_cols, thrust_ptr + 2 * p.n_cols, +5.0f);
rmm::device_uvector<TypeParam> x(p.n_rows * p.n_cols, stream);
rmm::device_uvector<TypeParam> y(p.n_rows, stream);
rmm::device_uvector<TypeParam> x_pred(n_pred * p.n_cols, stream);
rmm::device_uvector<TypeParam> y_pred(n_pred, stream);
make_blobs(this->handle, x.data(), y.data(), p.n_rows, p.n_cols, 2, centers.data());
SVC<TypeParam> svc(this->handle, p.C, p.tol, p.kernel_params, 0, -1, 50, CUML_LEVEL_INFO);
svc.fit(x.data(), p.n_rows, p.n_cols, y.data());
// Create a different dataset for prediction
make_blobs(this->handle, x_pred.data(), y_pred.data(), n_pred, p.n_cols, 2, centers.data());
rmm::device_uvector<TypeParam> y_pred2(n_pred, stream);
svc.predict(x_pred.data(), n_pred, p.n_cols, y_pred2.data());
// Count the number of correct predictions
rmm::device_uvector<int> is_correct(n_pred, stream);
thrust::device_ptr<TypeParam> ptr1(y_pred.data());
thrust::device_ptr<TypeParam> ptr2(y_pred2.data());
thrust::device_ptr<int> ptr3(is_correct.data());
auto first = thrust::make_zip_iterator(thrust::make_tuple(ptr1, ptr2));
auto last = thrust::make_zip_iterator(thrust::make_tuple(ptr1 + n_pred, ptr2 + n_pred));
thrust::transform(thrust::cuda::par.on(stream), first, last, ptr3, is_same_functor());
int n_correct = thrust::reduce(thrust::cuda::par.on(stream), ptr3, ptr3 + n_pred);
TypeParam accuracy = 100 * n_correct / n_pred;
TypeParam accuracy_exp = d.second;
EXPECT_GE(accuracy, accuracy_exp);
}
}
TYPED_TEST(SmoSolverTest, MemoryLeak)
{
auto stream = this->handle.get_stream();
// We measure that we have the same amount of free memory available on the GPU
// before and after we call SVM. This can help catch memory leaks, but it is
// not 100% sure. Small allocations might be pooled together by cudaMalloc,
// and some of those would be missed by this method.
enum class ThrowException { Yes, No };
std::vector<std::pair<blobInput, ThrowException>> data{
{blobInput{1, 0.001, KernelParams{LINEAR, 3, 0.01, 0}, 1000, 1000}, ThrowException::No},
{blobInput{1, 0.001, KernelParams{POLYNOMIAL, 400, 5, 10}, 1000, 1000}, ThrowException::Yes}};
// For the second set of input parameters training will fail, some kernel
// function values would be 1e400 or larger, which does not fit fp64.
// This will lead to NaN diff in SmoSolver, which will throw an exception
// to stop fitting.
{
// the first time transpose is called it leaks 8MB -- probably for cuBlas init
rmm::device_uvector<TypeParam> in(100, stream);
rmm::device_uvector<TypeParam> out(100, stream);
raft::linalg::transpose(this->handle, in.data(), out.data(), 10, 10, stream);
}
size_t free1, total, free2;
RAFT_CUDA_TRY(cudaMemGetInfo(&free1, &total));
for (auto d : data) {
auto p = d.first;
SCOPED_TRACE(p);
{
rmm::device_uvector<TypeParam> x(p.n_rows * p.n_cols, stream);
rmm::device_uvector<TypeParam> y(p.n_rows, stream);
make_blobs(this->handle, x.data(), y.data(), p.n_rows, p.n_cols, 2);
SVC<TypeParam> svc(this->handle, p.C, p.tol, p.kernel_params);
if (d.second == ThrowException::Yes) {
// We want to check whether we leak any memory while we unwind the stack
EXPECT_THROW(svc.fit(x.data(), p.n_rows, p.n_cols, y.data()), raft::exception);
} else {
svc.fit(x.data(), p.n_rows, p.n_cols, y.data());
rmm::device_uvector<TypeParam> y_pred(p.n_rows, stream);
raft::interruptible::synchronize(stream);
RAFT_CUDA_TRY(cudaMemGetInfo(&free2, &total));
float delta = (free1 - free2);
// Just to make sure that we measure any mem consumption at all:
// we check if we see the memory consumption of x[n_rows*n_cols].
// If this error is triggered, increasing the test size might help to fix
// it (one could additionally control the exec time by the max_iter arg to
// SVC).
EXPECT_GT(delta, p.n_rows * p.n_cols * 4);
svc.predict(x.data(), p.n_rows, p.n_cols, y_pred.data());
}
}
}
RAFT_CUDA_TRY(cudaMemGetInfo(&free2, &total));
float delta = (free1 - free2);
EXPECT_EQ(delta, 0);
}
TYPED_TEST(SmoSolverTest, DISABLED_MillionRows)
{
auto stream = this->handle.get_stream();
if (sizeof(TypeParam) == 8) {
GTEST_SKIP(); // Skip the test for double input
} else {
// Stress test the kernel matrix calculation by calculating a kernel tile
// with more the 2.8B elements. This would fail with int32 addressing. The test
// is currently disabled because the memory usage might be prohibitive on CI
// The test will be enabled once https://github.com/rapidsai/cuml/pull/2449
// is merged, that PR would reduce the kernel tile memory size.
std::vector<std::pair<blobInput, TypeParam>> data{
{blobInput{1, 0.001, KernelParams{RBF, 3, 1, 0}, 2800000, 4}, 98},
{blobInput{1, 0.001, KernelParams{LINEAR, 3, 1, 0}, 2800000, 4}, 98},
{blobInput{1, 0.001, KernelParams{POLYNOMIAL, 3, 1, 0}, 2800000, 4}, 98},
{blobInput{1, 0.001, KernelParams{TANH, 3, 1, 0}, 2800000, 4}, 98}};
for (auto d : data) {
auto p = d.first;
SCOPED_TRACE(p);
// explicit centers for the blobs
rmm::device_uvector<float> centers(2 * p.n_cols, stream);
thrust::device_ptr<float> thrust_ptr(centers.data());
thrust::fill(thrust::cuda::par.on(stream), thrust_ptr, thrust_ptr + p.n_cols, -5.0f);
thrust::fill(
thrust::cuda::par.on(stream), thrust_ptr + p.n_cols, thrust_ptr + 2 * p.n_cols, +5.0f);
rmm::device_uvector<TypeParam> x(p.n_rows * p.n_cols, stream);
rmm::device_uvector<TypeParam> y(p.n_rows, stream);
rmm::device_uvector<TypeParam> y_pred(p.n_rows, stream);
make_blobs(this->handle, x.data(), y.data(), p.n_rows, p.n_cols, 2, centers.data());
const int max_iter = 2;
SVC<TypeParam> svc(
this->handle, p.C, p.tol, p.kernel_params, 0, max_iter, 50, CUML_LEVEL_DEBUG);
svc.fit(x.data(), p.n_rows, p.n_cols, y.data());
// predict on the same dataset
svc.predict(x.data(), p.n_rows, p.n_cols, y_pred.data());
}
}
}
template <typename math_t>
void initializeTestMatrix(
const raft::handle_t& handle, math_t* dense_matrix, int n_rows, int n_cols, math_t* y)
{
auto stream = handle.get_stream();
assert(n_cols % n_rows * n_rows % n_cols == 0);
/*
1 0 0 1 0 0
0 1 0 0 1 0
0 0 1 0 0 1
1 0 0
0 1 0
0 0 1
1 0 0
0 1 0
0 0 1
*/
// fill col-major
thrust::device_ptr<math_t> data_ptr(dense_matrix);
auto one_or_zero = [n_rows, n_cols] __device__(const int& a) {
int cycle = min(n_rows, n_cols);
int row_id = a % n_rows;
int col_id = a / n_rows;
return (row_id % cycle == col_id % cycle) ? (math_t)1 : (math_t)0;
};
thrust::transform(thrust::cuda::par.on(stream),
thrust::make_counting_iterator<int>(0),
thrust::make_counting_iterator<int>(n_rows * n_cols),
data_ptr,
one_or_zero);
// init y label to 1 for all that contain the first half of the features
{
thrust::device_ptr<math_t> label_ptr(y);
auto lable_hit = [n_rows, n_cols] __device__(const int& row) {
int cycle = min(n_rows, n_cols);
int first_col = row % cycle;
return (first_col < cycle / 2) ? (math_t)1 : (math_t)0;
};
thrust::transform(thrust::cuda::par.on(stream),
thrust::make_counting_iterator<int>(0),
thrust::make_counting_iterator<int>(n_rows),
label_ptr,
lable_hit);
}
handle.sync_stream(stream);
}
template <typename math_t>
void initializeTestMatrix(const raft::handle_t& handle,
raft::device_csr_matrix<math_t, int, int, int>& csr_matrix,
int n_rows,
int n_cols,
math_t* y)
{
auto stream = handle.get_stream();
assert(n_cols % n_rows * n_rows % n_cols == 0);
/*
1 0 0 1 0 0
0 1 0 0 1 0
0 0 1 0 0 1
1 0 0
0 1 0
0 0 1
1 0 0
0 1 0
0 0 1
*/
int nnz_per_row = std::max(n_cols / n_rows, 1);
int nnz = n_rows * nnz_per_row;
csr_matrix.initialize_sparsity(nnz);
auto csr_structure = csr_matrix.structure_view();
{
// init indptr with nnz_per_row
thrust::device_ptr<int> indptr_ptr(csr_structure.get_indptr().data());
auto mul_x = [] __device__(const int& a, const int& b) { return a * b; };
thrust::transform(thrust::cuda::par.on(stream),
thrust::make_counting_iterator<int>(0),
thrust::make_counting_iterator<int>(n_rows + 1),
thrust::make_constant_iterator<int>(nnz_per_row),
indptr_ptr,
mul_x);
}
// init indices/ data round-robin
{
thrust::device_ptr<int> indices_ptr(csr_structure.get_indices().data());
auto one_or_zero = [n_rows, n_cols, nnz_per_row] __device__(const int& a) {
int cycle = min(n_rows, n_cols);
int row_id = a / nnz_per_row;
int ith1 = a % nnz_per_row;
int col_id = ith1 * cycle + row_id % cycle;
return col_id;
};
thrust::transform(thrust::cuda::par.on(stream),
thrust::make_counting_iterator<int>(0),
thrust::make_counting_iterator<int>(nnz),
indices_ptr,
one_or_zero);
}
// init data to 1
{
thrust::device_ptr<math_t> data_ptr(csr_matrix.get_elements().data());
thrust::fill(thrust::cuda::par.on(stream), data_ptr, data_ptr + nnz, (math_t)1);
}
// init y label to 1 for all that contain the first half of the features
{
thrust::device_ptr<math_t> label_ptr(y);
auto lable_hit = [n_rows, n_cols] __device__(const int& row) {
int cycle = min(n_rows, n_cols);
int first_col = row % cycle;
return (first_col < cycle / 2) ? (math_t)1 : (math_t)0;
};
thrust::transform(thrust::cuda::par.on(stream),
thrust::make_counting_iterator<int>(0),
thrust::make_counting_iterator<int>(n_rows),
label_ptr,
lable_hit);
}
handle.sync_stream(stream);
}
TYPED_TEST(SmoSolverTest, DenseBatching)
{
auto stream = this->handle.get_stream();
if (sizeof(TypeParam) == 8) {
GTEST_SKIP(); // Skip the test for double input
} else {
std::vector<blobInput> data{
{blobInput{1, 0.001, KernelParams{RBF, 3, 1, 0}, 1000000, 4}},
{blobInput{1, 0.001, KernelParams{LINEAR, 3, 1, 0}, 1000000, 4}},
{blobInput{1, 0.001, KernelParams{POLYNOMIAL, 3, 1, 0}, 1000000, 4}},
{blobInput{1, 0.001, KernelParams{TANH, 3, 1, 0}, 1000000, 4}}};
for (auto input : data) {
SCOPED_TRACE(input);
// this will result in a big kernel tile of ~4GB which will result in batching
rmm::device_uvector<TypeParam> y(input.n_rows, stream);
rmm::device_uvector<TypeParam> dense_input(input.n_rows * input.n_cols, stream);
initializeTestMatrix(this->handle, dense_input.data(), input.n_rows, input.n_cols, y.data());
SvmParameter param = getDefaultSvmParameter();
param.max_iter = 2;
SvmModel<TypeParam> model;
TypeParam* sample_weights = nullptr;
svcFit(this->handle,
dense_input.data(),
input.n_rows,
input.n_cols,
y.data(),
param,
input.kernel_params,
model,
sample_weights);
// TODO predict with subset csr & dense
rmm::device_uvector<TypeParam> y_pred(input.n_rows, stream);
svcPredict(this->handle,
dense_input.data(),
input.n_rows,
input.n_cols,
input.kernel_params,
model,
y_pred.data(),
(TypeParam)200.0,
false);
svmFreeBuffers(this->handle, model);
}
}
}
TYPED_TEST(SmoSolverTest, SparseBatching)
{
auto stream = this->handle.get_stream();
if (sizeof(TypeParam) == 8) {
GTEST_SKIP(); // Skip the test for double input
} else {
std::vector<blobInput> data{
// sparse input with batching
{blobInput{1, 0.001, KernelParams{RBF, 3, 1, 0}, 1000000, 4}},
{blobInput{1, 0.001, KernelParams{LINEAR, 3, 1, 0}, 1000000, 4}},
{blobInput{1, 0.001, KernelParams{POLYNOMIAL, 3, 1, 0}, 1000000, 4}},
{blobInput{1, 0.001, KernelParams{TANH, 3, 1, 0}, 1000000, 4}},
// sparse input with sparse row extraction (also sparse support)
{blobInput{1, 0.001, KernelParams{RBF, 3, 1, 0}, 1000, 300000}},
{blobInput{1, 0.001, KernelParams{LINEAR, 3, 1, 0}, 1000, 300000}},
{blobInput{1, 0.001, KernelParams{POLYNOMIAL, 3, 1, 0}, 1000, 300000}},
{blobInput{1, 0.001, KernelParams{TANH, 3, 1, 0}, 1000, 300000}},
// sparse input with batching AND sparse row extraction (also sparse support)
{blobInput{1, 0.001, KernelParams{RBF, 3, 1, 0}, 290000, 290000}},
{blobInput{1, 0.001, KernelParams{LINEAR, 3, 1, 0}, 290000, 290000}},
{blobInput{1, 0.001, KernelParams{POLYNOMIAL, 3, 1, 0}, 290000, 290000}},
{blobInput{1, 0.001, KernelParams{TANH, 3, 1, 0}, 290000, 290000}},
// sparse input with sparse support
{blobInput{1, 0.001, KernelParams{RBF, 3, 1, 0}, 100000, 10000}},
{blobInput{1, 0.001, KernelParams{LINEAR, 3, 1, 0}, 100000, 10000}},
{blobInput{1, 0.001, KernelParams{POLYNOMIAL, 3, 1, 0}, 100000, 10000}},
{blobInput{1, 0.001, KernelParams{TANH, 3, 1, 0}, 100000, 10000}}};
for (auto input : data) {
SCOPED_TRACE(input);
// this will result in a big kernel tile of ~4GB which will result in batching
auto csr_input = raft::make_device_csr_matrix<TypeParam, int, int, int>(
this->handle, input.n_rows, input.n_cols);
rmm::device_uvector<TypeParam> y(input.n_rows, stream);
initializeTestMatrix(this->handle, csr_input, input.n_rows, input.n_cols, y.data());
auto csr_structure = csr_input.structure_view();
SvmParameter param = getDefaultSvmParameter();
param.max_iter = 2;
SvmModel<TypeParam> model;
TypeParam* sample_weights = nullptr;
svcFitSparse(this->handle,
csr_structure.get_indptr().data(),
csr_structure.get_indices().data(),
csr_input.get_elements().data(),
csr_structure.get_n_rows(),
csr_structure.get_n_cols(),
csr_structure.get_nnz(),
y.data(),
param,
input.kernel_params,
model,
sample_weights);
// predict with full input
rmm::device_uvector<TypeParam> y_pred(input.n_rows, stream);
svcPredictSparse(this->handle,
csr_structure.get_indptr().data(),
csr_structure.get_indices().data(),
csr_input.get_elements().data(),
csr_structure.get_n_rows(),
csr_structure.get_n_cols(),
csr_structure.get_nnz(),
input.kernel_params,
model,
y_pred.data(),
(TypeParam)200.0,
false);
MLCommon::devArrMatch(
y.data(), y_pred.data(), input.n_rows, MLCommon::CompareApprox<TypeParam>(1e-6), stream);
// predict with subset csr & dense for all edge cases
if (model.support_matrix.nnz >= 0) {
int n_extract = 100;
rmm::device_uvector<int> sequence(n_extract, stream);
auto csr_subset = raft::make_device_csr_matrix<TypeParam, int, int, int>(
this->handle, n_extract, input.n_cols);
csr_subset.initialize_sparsity(10); //! otherwise structure_view() call will fail
rmm::device_uvector<TypeParam> dense_subset(n_extract * input.n_cols, stream);
{
thrust::device_ptr<int> sequence_ptr(sequence.data());
thrust::sequence(
thrust::cuda::par.on(stream), sequence_ptr, sequence_ptr + n_extract, (int)0);
ML::SVM::extractRows(
csr_input.view(), csr_subset, sequence.data(), n_extract, this->handle);
ML::SVM::extractRows(
csr_input.view(), dense_subset.data(), sequence.data(), n_extract, this->handle);
}
rmm::device_uvector<TypeParam> y_pred_csr(n_extract, stream);
rmm::device_uvector<TypeParam> y_pred_dense(n_extract, stream);
// also reduce buffer memory to ensure batching
svcPredictSparse(this->handle,
csr_subset.structure_view().get_indptr().data(),
csr_subset.structure_view().get_indices().data(),
csr_subset.get_elements().data(),
csr_subset.structure_view().get_n_rows(),
csr_subset.structure_view().get_n_cols(),
csr_subset.structure_view().get_nnz(),
input.kernel_params,
model,
y_pred_csr.data(),
(TypeParam)50.0,
false);
svcPredict(this->handle,
dense_subset.data(),
n_extract,
input.n_cols,
input.kernel_params,
model,
y_pred_dense.data(),
(TypeParam)50.0,
false);
MLCommon::devArrMatch(y_pred_csr.data(),
y_pred_dense.data(),
n_extract,
MLCommon::CompareApprox<TypeParam>(1e-6),
stream);
}
svmFreeBuffers(this->handle, model);
}
}
}
template <typename math_t>
struct SvrInput {
SvmParameter param;
KernelParams kernel;
int n_rows;
int n_cols;
std::vector<math_t> x;
std::vector<math_t> y;
std::vector<math_t> sample_weighs;
};
template <typename math_t>
std::ostream& operator<<(std::ostream& os, const SvrInput<math_t>& b)
{
os << kernelName(b.kernel) << " " << b.n_rows << "x" << b.n_cols << ", C=" << b.param.C
<< ", tol=" << b.param.tol;
return os;
}
template <typename math_t>
class SvrTest : public ::testing::Test {
public:
SvrTest()
: stream(handle.get_stream()),
x_dev(n_rows * n_cols, stream),
y_dev(n_rows, stream),
C_dev(2 * n_rows, stream),
yc(n_train, stream),
f(n_train, stream),
alpha(n_train, stream)
{
}
protected:
void SetUp() override
{
raft::update_device(x_dev.data(), x_host, n_rows * n_cols, stream);
raft::update_device(y_dev.data(), y_host, n_rows, stream);
model.n_support = 0;
model.dual_coefs = nullptr;
model.support_matrix = {};
model.support_idx = nullptr;
model.n_classes = 0;
model.unique_labels = nullptr;
}
void TearDown() override { svmFreeBuffers(handle, model); }
public:
void TestSvrInit()
{
auto stream = this->handle.get_stream();
SvmParameter param = getDefaultSvmParameter();
param.svmType = EPSILON_SVR;
SmoSolver<math_t> smo(handle, param, LINEAR, nullptr);
smo.SvrInit(y_dev.data(), n_rows, yc.data(), f.data());
EXPECT_TRUE(
devArrMatchHost(yc_exp, yc.data(), n_train, MLCommon::CompareApprox<math_t>(1.0e-9), stream));
EXPECT_TRUE(devArrMatchHost(f_exp, f.data(), n_train, MLCommon::Compare<math_t>(), stream));
}
void TestSvrWorkingSet()
{
init_C((math_t)1.0, C_dev.data(), 2 * n_rows, stream);
WorkingSet<math_t>* ws;
ws = new WorkingSet<math_t>(handle, stream, n_rows, 20, EPSILON_SVR);
EXPECT_EQ(ws->GetSize(), 2 * n_rows);
raft::update_device(alpha.data(), alpha_host, n_train, stream);
raft::update_device(f.data(), f_exp, n_train, stream);
raft::update_device(yc.data(), yc_exp, n_train, stream);
ws->Select(f.data(), alpha.data(), yc.data(), C_dev.data());
int exp_idx[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13};
ASSERT_TRUE(
devArrMatchHost(exp_idx, ws->GetIndices(), ws->GetSize(), MLCommon::Compare<int>(), stream));
delete ws;
ws = new WorkingSet<math_t>(handle, stream, n_rows, 10, EPSILON_SVR);
EXPECT_EQ(ws->GetSize(), 10);
ws->Select(f.data(), alpha.data(), yc.data(), C_dev.data());
int exp_idx2[] = {6, 12, 5, 11, 3, 9, 8, 1, 7, 0};
ASSERT_TRUE(
devArrMatchHost(exp_idx2, ws->GetIndices(), ws->GetSize(), MLCommon::Compare<int>(), stream));
delete ws;
}
void TestSvrResults()
{
raft::update_device(yc.data(), yc_exp, n_train, stream);
init_C((math_t)0.001, C_dev.data(), n_rows * 2, stream);
auto dense_view = raft::make_device_strided_matrix_view<math_t, int, raft::layout_f_contiguous>(
x_dev.data(), n_rows, n_cols, 0);
Results<math_t, raft::device_matrix_view<math_t, int, raft::layout_stride>> res(
handle, dense_view, n_rows, n_cols, yc.data(), C_dev.data(), EPSILON_SVR);
model.n_cols = n_cols;
raft::update_device(alpha.data(), alpha_host, n_train, stream);
raft::update_device(f.data(), f_exp, n_train, stream);
res.Get(alpha.data(),
f.data(),
&model.dual_coefs,
&model.n_support,
&model.support_idx,
&model.support_matrix,
&model.b);
ASSERT_EQ(model.n_support, 5);
math_t dc_exp[] = {0.1, 0.3, -0.4, 0.9, -0.9};
EXPECT_TRUE(devArrMatchHost(
dc_exp, model.dual_coefs, model.n_support, MLCommon::CompareApprox<math_t>(1.0e-6), stream));
EXPECT_TRUE(model.support_matrix.nnz == -1);
math_t x_exp[] = {1, 2, 3, 5, 6};
EXPECT_TRUE(devArrMatchHost(x_exp,
model.support_matrix.data,
model.n_support * n_cols,
MLCommon::CompareApprox<math_t>(1.0e-6),
stream));
int idx_exp[] = {0, 1, 2, 4, 5};
EXPECT_TRUE(devArrMatchHost(idx_exp,
model.support_idx,
model.n_support,
MLCommon::CompareApprox<math_t>(1.0e-6),
stream));
}
void TestSvrFitPredict()
{
auto stream = this->handle.get_stream();
std::vector<std::pair<SvrInput<math_t>, smoOutput2<math_t>>> data{
{SvrInput<math_t>{
SvmParameter{1, 0, 1, 10, 1e-3, CUML_LEVEL_INFO, 0.1, EPSILON_SVR},
KernelParams{LINEAR, 3, 1, 0},
2, // n_rows
1, // n_cols
{0, 1}, // x
{2, 3} // y
},
smoOutput2<math_t>{2, {-0.8, 0.8}, 2.1, {0.8}, {0, 1}, {0, 1}, {2.1, 2.9}}},
{SvrInput<math_t>{
SvmParameter{1, 10, 1, 1, 1e-3, CUML_LEVEL_INFO, 0.1, EPSILON_SVR},
KernelParams{LINEAR, 3, 1, 0},
2, // n_rows
1, // n_cols
{1, 2}, // x
{2, 3} // y
},
smoOutput2<math_t>{2, {-0.8, 0.8}, 1.3, {0.8}, {1, 2}, {0, 1}, {2.1, 2.9}}},
{SvrInput<math_t>{
SvmParameter{1, 0, 1, 1, 1e-3, CUML_LEVEL_INFO, 0.1, EPSILON_SVR},
KernelParams{LINEAR, 3, 1, 0},
2, // n_rows
2, // n_cols
{1, 2, 5, 5}, // x
{2, 3} // y
},
smoOutput2<math_t>{2, {-0.8, 0.8}, 1.3, {0.8, 0.0}, {1, 2, 5, 5}, {0, 1}, {2.1, 2.9}}},
{SvrInput<math_t>{
SvmParameter{1, 0, 100, 10, 1e-6, CUML_LEVEL_INFO, 0.1, EPSILON_SVR},
KernelParams{LINEAR, 3, 1, 0},
7, // n_rows
1, // n_cols
{1, 2, 3, 4, 5, 6, 7}, // x
{0, 2, 3, 4, 5, 6, 8} // y
},
smoOutput2<math_t>{6,
{-1, 1, 0.45, -0.45, -1, 1},
-0.4,
{1.1},
{1.0, 2.0, 3.0, 5.0, 6.0, 7.0},
{0, 1, 2, 4, 5, 6},
{0.7, 1.8, 2.9, 4, 5.1, 6.2, 7.3}}},
// Almost same as above, but with sample weights
{SvrInput<math_t>{
SvmParameter{1, 0, 100, 10, 1e-3, CUML_LEVEL_INFO, 0.1, EPSILON_SVR},
KernelParams{LINEAR, 3, 1, 0},
7, // n_rows
1, // n_cols
{1, 2, 3, 4, 5, 6, 7}, // x
{0, 2, 3, 0, 4, 8, 12}, // y
{1, 1, 1, 10, 2, 10, 1} // sample weights
},
smoOutput2<math_t>{
6, {}, -15.5, {3.9}, {1.0, 2.0, 3.0, 4.0, 6.0, 7.0}, {0, 1, 2, 3, 5, 6}, {}}},
{SvrInput<math_t>{
SvmParameter{1, 0, 100, 10, 1e-6, CUML_LEVEL_INFO, 0.1, EPSILON_SVR},
KernelParams{LINEAR, 3, 1, 0},
7, // n_rows
1, // n_cols
{1, 2, 3, 4, 5, 6, 7}, // x
{2, 2, 2, 2, 2, 2, 2} // y
},
smoOutput2<math_t>{0, {}, 2, {}, {}, {}, {}}}};
for (auto d : data) {
auto p = d.first;
auto exp = d.second;
SCOPED_TRACE(p);
raft::update_device(x_dev.data(), p.x.data(), p.n_rows * p.n_cols, stream);
raft::update_device(y_dev.data(), p.y.data(), p.n_rows, stream);
rmm::device_uvector<math_t> sample_weights_dev(0, stream);
math_t* sample_weights = nullptr;
if (!p.sample_weighs.empty()) {
sample_weights_dev.resize(p.n_rows, stream);
sample_weights = sample_weights_dev.data();
raft::update_device(sample_weights_dev.data(), p.sample_weighs.data(), p.n_rows, stream);
}
svrFit(handle,
x_dev.data(),
p.n_rows,
p.n_cols,
y_dev.data(),
p.param,
p.kernel,
model,
sample_weights);
checkResults(model, toSmoOutput(exp), stream);
rmm::device_uvector<math_t> preds(p.n_rows, stream);
svcPredict(handle,
x_dev.data(),
p.n_rows,
p.n_cols,
p.kernel,
model,
preds.data(),
(math_t)200.0,
false);
if (!exp.decision_function.empty()) {
EXPECT_TRUE(devArrMatchHost(exp.decision_function.data(),
preds.data(),
p.n_rows,
MLCommon::CompareApprox<math_t>(1.0e-5),
stream));
}
}
}
protected:
raft::handle_t handle;
cudaStream_t stream = 0;
int n_rows = 7;
int n_train = 2 * n_rows;
const int n_cols = 1;
SvmModel<math_t> model;
rmm::device_uvector<math_t> x_dev;
rmm::device_uvector<math_t> y_dev;
rmm::device_uvector<math_t> C_dev;
rmm::device_uvector<math_t> yc;
rmm::device_uvector<math_t> f;
rmm::device_uvector<math_t> alpha;
math_t x_host[7] = {1, 2, 3, 4, 5, 6, 7};
math_t y_host[7] = {0, 2, 3, 4, 5, 6, 8};
math_t yc_exp[14] = {1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1};
math_t f_exp[14] = {
0.1, -1.9, -2.9, -3.9, -4.9, -5.9, -7.9, -0.1, -2.1, -3.1, -4.1, -5.1, -6.1, -8.1};
math_t alpha_host[14] = {0.2, 0.3, 0, 0, 1, 0.1, 0, 0.1, 0, 0.4, 0, 0.1, 1, 0};
}; // namespace SVM
typedef ::testing::Types<float> OnlyFp32;
TYPED_TEST_CASE(SvrTest, FloatTypes);
TYPED_TEST(SvrTest, Init) { this->TestSvrInit(); }
TYPED_TEST(SvrTest, WorkingSet) { this->TestSvrWorkingSet(); }
TYPED_TEST(SvrTest, Results) { this->TestSvrResults(); }
TYPED_TEST(SvrTest, FitPredict) { this->TestSvrFitPredict(); }
}; // namespace SVM
}; // namespace ML
| 0 |
rapidsai_public_repos/cuml/cpp/test | rapidsai_public_repos/cuml/cpp/test/sg/rf_test.cu | /*
* Copyright (c) 2019-2023, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cuml/common/logger.hpp>
#include <test_utils.h>
#include <decisiontree/batched-levelalgo/kernels/builder_kernels.cuh>
#include <decisiontree/batched-levelalgo/quantiles.cuh>
#include <raft/core/handle.hpp>
#include <cuml/datasets/make_blobs.hpp>
#include <cuml/ensemble/randomforest.hpp>
#include <cuml/fil/fil.h>
#include <cuml/tree/algo_helper.h>
#include <raft/random/rng.cuh>
#include <raft/core/handle.hpp>
#include <raft/linalg/transpose.cuh>
#include <raft/util/cuda_utils.cuh>
#include <raft/util/cudart_utils.hpp>
#include <thrust/binary_search.h>
#include <thrust/copy.h>
#include <thrust/device_vector.h>
#include <thrust/execution_policy.h>
#include <thrust/for_each.h>
#include <thrust/functional.h>
#include <thrust/host_vector.h>
#include <thrust/iterator/counting_iterator.h>
#include <thrust/logical.h>
#include <thrust/random.h>
#include <thrust/shuffle.h>
#include <thrust/transform.h>
#include <gtest/gtest.h>
#include <cstddef>
#include <memory>
#include <random>
#include <tuple>
#include <type_traits>
namespace ML {
// Utils for changing tuple into struct
namespace detail {
template <typename result_type, typename... types, std::size_t... indices>
result_type make_struct(std::tuple<types...> t,
std::index_sequence<indices...>) // &, &&, const && etc.
{
return {std::get<indices>(t)...};
}
} // namespace detail
template <typename result_type, typename... types>
result_type make_struct(std::tuple<types...> t) // &, &&, const && etc.
{
return detail::make_struct<result_type, types...>(
t, std::index_sequence_for<types...>{}); // if there is repeated types, then the change for
// using std::index_sequence_for is trivial
}
template <int I, typename RandomGenT, typename ParamT, typename T>
void SampleWithoutReplacemment(RandomGenT& gen, std::vector<ParamT>& sample, std::vector<T> x)
{
std::vector<T> parameter_sample(sample.size());
std::shuffle(x.begin(), x.end(), gen);
for (size_t i = 0; i < sample.size(); i++) {
parameter_sample[i] = x[i % x.size()];
}
std::shuffle(parameter_sample.begin(), parameter_sample.end(), gen);
for (size_t i = 0; i < sample.size(); i++) {
std::get<I>(sample[i]) = parameter_sample[i];
}
}
template <int I, typename RandomGenT, typename ParamT, typename T, typename... Args>
void AddParameters(RandomGenT& gen, std::vector<ParamT>& sample, std::vector<T> x, Args... args)
{
SampleWithoutReplacemment<I>(gen, sample, x);
if constexpr (sizeof...(args) > 0) { AddParameters<I + 1>(gen, sample, args...); }
}
template <typename ParamT, typename... Args>
std::vector<ParamT> SampleParameters(int num_samples, size_t seed, Args... args)
{
std::vector<typename ParamT::types> tuple_sample(num_samples);
std::default_random_engine gen(seed);
AddParameters<0>(gen, tuple_sample, args...);
std::vector<ParamT> sample(num_samples);
for (int i = 0; i < num_samples; i++) {
sample[i] = make_struct<ParamT>(tuple_sample[i]);
}
return sample;
}
struct RfTestParams {
std::size_t n_rows;
std::size_t n_cols;
int n_trees;
float max_features;
float max_samples;
int max_depth;
int max_leaves;
bool bootstrap;
int max_n_bins;
int min_samples_leaf;
int min_samples_split;
float min_impurity_decrease;
int n_streams;
CRITERION split_criterion;
int seed;
int n_labels;
bool double_precision;
// c++ has no reflection, so we enumerate the types here
// This must be updated if new fields are added
using types = std::tuple<std::size_t,
std::size_t,
int,
float,
float,
int,
int,
bool,
int,
int,
int,
float,
int,
CRITERION,
int,
int,
bool>;
};
std::ostream& operator<<(std::ostream& os, const RfTestParams& ps)
{
os << "n_rows = " << ps.n_rows << ", n_cols = " << ps.n_cols;
os << ", n_trees = " << ps.n_trees << ", max_features = " << ps.max_features;
os << ", max_samples = " << ps.max_samples << ", max_depth = " << ps.max_depth;
os << ", max_leaves = " << ps.max_leaves << ", bootstrap = " << ps.bootstrap;
os << ", max_n_bins = " << ps.max_n_bins << ", min_samples_leaf = " << ps.min_samples_leaf;
os << ", min_samples_split = " << ps.min_samples_split;
os << ", min_impurity_decrease = " << ps.min_impurity_decrease
<< ", n_streams = " << ps.n_streams;
os << ", split_criterion = " << ps.split_criterion << ", seed = " << ps.seed;
os << ", n_labels = " << ps.n_labels << ", double_precision = " << ps.double_precision;
return os;
}
template <typename DataT, typename LabelT>
auto FilPredict(const raft::handle_t& handle,
RfTestParams params,
DataT* X_transpose,
RandomForestMetaData<DataT, LabelT>* forest)
{
auto pred = std::make_shared<thrust::device_vector<float>>(params.n_rows);
ModelHandle model;
std::size_t num_outputs = 1;
if constexpr (std::is_integral_v<LabelT>) { num_outputs = params.n_labels; }
build_treelite_forest(&model, forest, params.n_cols);
fil::treelite_params_t tl_params{fil::algo_t::ALGO_AUTO,
num_outputs > 1,
1.f / num_outputs,
fil::storage_type_t::AUTO,
8,
1,
0,
nullptr};
fil::forest_variant forest_variant;
fil::from_treelite(handle, &forest_variant, model, &tl_params);
fil::forest_t<float> fil_forest = std::get<fil::forest_t<float>>(forest_variant);
fil::predict(handle, fil_forest, pred->data().get(), X_transpose, params.n_rows, false);
return pred;
}
template <typename DataT, typename LabelT>
auto FilPredictProba(const raft::handle_t& handle,
RfTestParams params,
DataT* X_transpose,
RandomForestMetaData<DataT, LabelT>* forest)
{
std::size_t num_outputs = params.n_labels;
auto pred = std::make_shared<thrust::device_vector<float>>(params.n_rows * num_outputs);
ModelHandle model;
static_assert(std::is_integral_v<LabelT>, "Must be classification");
build_treelite_forest(&model, forest, params.n_cols);
fil::treelite_params_t tl_params{
fil::algo_t::ALGO_AUTO, 0, 0.0f, fil::storage_type_t::AUTO, 8, 1, 0, nullptr};
fil::forest_variant forest_variant;
fil::from_treelite(handle, &forest_variant, model, &tl_params);
fil::forest_t<float> fil_forest = std::get<fil::forest_t<float>>(forest_variant);
fil::predict(handle, fil_forest, pred->data().get(), X_transpose, params.n_rows, true);
return pred;
}
template <typename DataT, typename LabelT>
auto TrainScore(
const raft::handle_t& handle, RfTestParams params, DataT* X, DataT* X_transpose, LabelT* y)
{
RF_params rf_params = set_rf_params(params.max_depth,
params.max_leaves,
params.max_features,
params.max_n_bins,
params.min_samples_leaf,
params.min_samples_split,
params.min_impurity_decrease,
params.bootstrap,
params.n_trees,
params.max_samples,
0,
params.split_criterion,
params.n_streams,
128);
auto forest = std::make_shared<RandomForestMetaData<DataT, LabelT>>();
auto forest_ptr = forest.get();
if constexpr (std::is_integral_v<LabelT>) {
fit(handle, forest_ptr, X, params.n_rows, params.n_cols, y, params.n_labels, rf_params);
} else {
fit(handle, forest_ptr, X, params.n_rows, params.n_cols, y, rf_params);
}
auto pred = std::make_shared<thrust::device_vector<LabelT>>(params.n_rows);
predict(handle, forest_ptr, X_transpose, params.n_rows, params.n_cols, pred->data().get());
// Predict and compare against known labels
RF_metrics metrics = score(handle, forest_ptr, y, params.n_rows, pred->data().get());
return std::make_tuple(forest, pred, metrics);
}
template <typename DataT, typename LabelT>
class RfSpecialisedTest {
public:
RfSpecialisedTest(RfTestParams params) : params(params)
{
auto stream_pool = std::make_shared<rmm::cuda_stream_pool>(params.n_streams);
raft::handle_t handle(rmm::cuda_stream_per_thread, stream_pool);
X.resize(params.n_rows * params.n_cols);
X_transpose.resize(params.n_rows * params.n_cols);
y.resize(params.n_rows);
// Make data
if constexpr (std::is_integral<LabelT>::value) {
Datasets::make_blobs(handle,
X.data().get(),
y.data().get(),
params.n_rows,
params.n_cols,
params.n_labels,
false,
nullptr,
nullptr,
5.0,
false,
-10.0f,
10.0f,
params.seed);
} else {
thrust::device_vector<int> y_temp(params.n_rows);
Datasets::make_blobs(handle,
X.data().get(),
y_temp.data().get(),
params.n_rows,
params.n_cols,
params.n_labels,
false,
nullptr,
nullptr,
5.0,
false,
-10.0f,
10.0f,
params.seed);
// if regression, make the labels normally distributed
raft::random::Rng r(4);
thrust::device_vector<double> normal(params.n_rows);
r.normal(normal.data().get(), normal.size(), 0.0, 2.0, nullptr);
thrust::transform(
normal.begin(), normal.end(), y_temp.begin(), y.begin(), thrust::plus<LabelT>());
}
raft::linalg::transpose(
handle, X.data().get(), X_transpose.data().get(), params.n_rows, params.n_cols, nullptr);
forest.reset(new typename ML::RandomForestMetaData<DataT, LabelT>);
std::tie(forest, predictions, training_metrics) =
TrainScore(handle, params, X.data().get(), X_transpose.data().get(), y.data().get());
Test();
}
// Current model should be at least as accurate as a model with depth - 1
void TestAccuracyImprovement()
{
if (params.max_depth <= 1) { return; }
// avereraging between models can introduce variance
if (params.n_trees > 1) { return; }
// accuracy is not guaranteed to improve with bootstrapping
if (params.bootstrap) { return; }
auto stream_pool = std::make_shared<rmm::cuda_stream_pool>(params.n_streams);
raft::handle_t handle(rmm::cuda_stream_per_thread, stream_pool);
RfTestParams alt_params = params;
alt_params.max_depth--;
auto [alt_forest, alt_predictions, alt_metrics] =
TrainScore(handle, alt_params, X.data().get(), X_transpose.data().get(), y.data().get());
double eps = 1e-8;
if (params.split_criterion == MSE) {
EXPECT_LE(training_metrics.mean_squared_error, alt_metrics.mean_squared_error + eps);
} else if (params.split_criterion == MAE) {
EXPECT_LE(training_metrics.mean_abs_error, alt_metrics.mean_abs_error + eps);
} else {
EXPECT_GE(training_metrics.accuracy, alt_metrics.accuracy);
}
}
// Regularisation parameters are working correctly
void TestTreeSize()
{
for (int i = 0u; i < forest->rf_params.n_trees; i++) {
// Check we have actually built something, otherwise these tests can all pass when the tree
// algorithm produces only stumps
size_t effective_rows = params.n_rows * params.max_samples;
if (params.max_depth > 0 && params.min_impurity_decrease == 0 && effective_rows >= 100) {
EXPECT_GT(forest->trees[i]->leaf_counter, 1);
}
// Check number of leaves is accurate
int num_leaves = 0;
for (auto n : forest->trees[i]->sparsetree) {
num_leaves += n.IsLeaf();
}
EXPECT_EQ(num_leaves, forest->trees[i]->leaf_counter);
if (params.max_leaves > 0) { EXPECT_LE(forest->trees[i]->leaf_counter, params.max_leaves); }
EXPECT_LE(forest->trees[i]->depth_counter, params.max_depth);
EXPECT_LE(forest->trees[i]->leaf_counter,
raft::ceildiv(int(params.n_rows), params.min_samples_leaf));
}
}
void TestMinImpurity()
{
for (int i = 0u; i < forest->rf_params.n_trees; i++) {
for (auto n : forest->trees[i]->sparsetree) {
if (!n.IsLeaf()) { EXPECT_GT(n.BestMetric(), params.min_impurity_decrease); }
}
}
}
void TestDeterminism()
{
// Regression models use floating point atomics, so are not bitwise reproducible
bool is_regression = params.split_criterion != GINI and params.split_criterion != ENTROPY;
if (is_regression) return;
// Repeat training
auto stream_pool = std::make_shared<rmm::cuda_stream_pool>(params.n_streams);
raft::handle_t handle(rmm::cuda_stream_per_thread, stream_pool);
auto [alt_forest, alt_predictions, alt_metrics] =
TrainScore(handle, params, X.data().get(), X_transpose.data().get(), y.data().get());
for (int i = 0u; i < forest->rf_params.n_trees; i++) {
EXPECT_EQ(forest->trees[i]->sparsetree, alt_forest->trees[i]->sparsetree);
}
}
// Instance counts in children sums up to parent.
void TestInstanceCounts()
{
for (int i = 0u; i < forest->rf_params.n_trees; i++) {
const auto& tree = forest->trees[i]->sparsetree;
for (auto n : tree) {
if (!n.IsLeaf()) {
auto sum = tree[n.LeftChildId()].InstanceCount() + tree[n.RightChildId()].InstanceCount();
EXPECT_EQ(sum, n.InstanceCount());
}
}
}
}
// Difference between the largest element and second largest
DataT MinDifference(DataT* begin, std::size_t len)
{
std::size_t max_element_index = 0;
DataT max_element = 0.0;
for (std::size_t i = 0; i < len; i++) {
if (begin[i] > max_element) {
max_element_index = i;
max_element = begin[i];
}
}
DataT second_max_element = 0.0;
for (std::size_t i = 0; i < len; i++) {
if (begin[i] > second_max_element && i != max_element_index) {
second_max_element = begin[i];
}
}
return std::abs(max_element - second_max_element);
}
// Compare fil against native rf predictions
// Only for single precision models
void TestFilPredict()
{
if constexpr (std::is_same_v<DataT, double>) {
return;
} else {
auto stream_pool = std::make_shared<rmm::cuda_stream_pool>(params.n_streams);
raft::handle_t handle(rmm::cuda_stream_per_thread, stream_pool);
auto fil_pred = FilPredict(handle, params, X_transpose.data().get(), forest.get());
thrust::host_vector<float> h_fil_pred(*fil_pred);
thrust::host_vector<float> h_pred(*predictions);
thrust::host_vector<float> h_fil_pred_prob;
if constexpr (std::is_integral_v<LabelT>) {
h_fil_pred_prob = *FilPredictProba(handle, params, X_transpose.data().get(), forest.get());
}
float tol = 1e-2;
for (std::size_t i = 0; i < h_fil_pred.size(); i++) {
// If the output probabilities are very similar for different classes
// FIL may output a different class due to numerical differences
// Skip these cases
if constexpr (std::is_integral_v<LabelT>) {
int num_outputs = forest->trees[0]->num_outputs;
auto min_diff = MinDifference(&h_fil_pred_prob[i * num_outputs], num_outputs);
if (min_diff < tol) continue;
}
EXPECT_LE(abs(h_fil_pred[i] - h_pred[i]), tol);
}
}
}
void Test()
{
TestAccuracyImprovement();
TestDeterminism();
TestMinImpurity();
TestTreeSize();
TestInstanceCounts();
TestFilPredict();
}
RF_metrics training_metrics;
thrust::device_vector<DataT> X;
thrust::device_vector<DataT> X_transpose;
thrust::device_vector<LabelT> y;
RfTestParams params;
std::shared_ptr<RandomForestMetaData<DataT, LabelT>> forest;
std::shared_ptr<thrust::device_vector<LabelT>> predictions;
};
// Dispatch tests based on any template parameters
class RfTest : public ::testing::TestWithParam<RfTestParams> {
public:
void SetUp() override
{
RfTestParams params = ::testing::TestWithParam<RfTestParams>::GetParam();
bool is_regression = params.split_criterion != GINI and params.split_criterion != ENTROPY;
if (params.double_precision) {
if (is_regression) {
RfSpecialisedTest<double, double> test(params);
} else {
RfSpecialisedTest<double, int> test(params);
}
} else {
if (is_regression) {
RfSpecialisedTest<float, float> test(params);
} else {
RfSpecialisedTest<float, int> test(params);
}
}
}
};
TEST_P(RfTest, PropertyBasedTest) {}
// Parameter ranges to test
std::vector<int> n_rows = {10, 100, 1452};
std::vector<int> n_cols = {1, 5, 152, 1014};
std::vector<int> n_trees = {1, 5, 17};
std::vector<float> max_features = {0.1f, 0.5f, 1.0f};
std::vector<float> max_samples = {0.1f, 0.5f, 1.0f};
std::vector<int> max_depth = {1, 10, 30};
std::vector<int> max_leaves = {-1, 16, 50};
std::vector<bool> bootstrap = {false, true};
std::vector<int> max_n_bins = {2, 57, 128, 256};
std::vector<int> min_samples_leaf = {1, 10, 30};
std::vector<int> min_samples_split = {2, 10};
std::vector<float> min_impurity_decrease = {0.0f, 1.0f, 10.0f};
std::vector<int> n_streams = {1, 2, 10};
std::vector<CRITERION> split_criterion = {CRITERION::INVERSE_GAUSSIAN,
CRITERION::GAMMA,
CRITERION::POISSON,
CRITERION::MSE,
CRITERION::GINI,
CRITERION::ENTROPY};
std::vector<int> seed = {0, 17};
std::vector<int> n_labels = {2, 10, 20};
std::vector<bool> double_precision = {false, true};
int n_tests = 100;
INSTANTIATE_TEST_CASE_P(RfTests,
RfTest,
::testing::ValuesIn(SampleParameters<RfTestParams>(n_tests,
0,
n_rows,
n_cols,
n_trees,
max_features,
max_samples,
max_depth,
max_leaves,
bootstrap,
max_n_bins,
min_samples_leaf,
min_samples_split,
min_impurity_decrease,
n_streams,
split_criterion,
seed,
n_labels,
double_precision)));
TEST(RfTests, IntegerOverflow)
{
std::size_t m = 1000000;
std::size_t n = 2150;
EXPECT_GE(m * n, 1ull << 31);
thrust::device_vector<float> X(m * n);
thrust::device_vector<float> y(m);
raft::random::Rng r(4);
r.normal(X.data().get(), X.size(), 0.0f, 2.0f, nullptr);
r.normal(y.data().get(), y.size(), 0.0f, 2.0f, nullptr);
auto forest = std::make_shared<RandomForestMetaData<float, float>>();
auto forest_ptr = forest.get();
auto stream_pool = std::make_shared<rmm::cuda_stream_pool>(4);
raft::handle_t handle(rmm::cuda_stream_per_thread, stream_pool);
RF_params rf_params =
set_rf_params(3, 100, 1.0, 256, 1, 2, 0.0, false, 1, 1.0, 0, CRITERION::MSE, 4, 128);
fit(handle, forest_ptr, X.data().get(), m, n, y.data().get(), rf_params);
// Check we have actually learned something
EXPECT_GT(forest->trees[0]->leaf_counter, 1);
// See if fil overflows
thrust::device_vector<float> pred(m);
ModelHandle model;
build_treelite_forest(&model, forest_ptr, n);
std::size_t num_outputs = 1;
fil::treelite_params_t tl_params{fil::algo_t::ALGO_AUTO,
num_outputs > 1,
1.f / num_outputs,
fil::storage_type_t::AUTO,
8,
1,
0,
nullptr};
fil::forest_variant forest_variant;
fil::from_treelite(handle, &forest_variant, model, &tl_params);
fil::forest_t<float> fil_forest = std::get<fil::forest_t<float>>(forest_variant);
fil::predict(handle, fil_forest, pred.data().get(), X.data().get(), m, false);
}
//-------------------------------------------------------------------------------------------------------------------------------------
struct QuantileTestParameters {
int n_rows;
int max_n_bins;
uint64_t seed;
};
template <typename T>
class RFQuantileBinsLowerBoundTest : public ::testing::TestWithParam<QuantileTestParameters> {
public:
void SetUp() override
{
auto params = ::testing::TestWithParam<QuantileTestParameters>::GetParam();
thrust::device_vector<T> data(params.n_rows);
thrust::host_vector<T> h_data(params.n_rows);
thrust::host_vector<T> h_quantiles(params.max_n_bins);
raft::random::Rng r(8);
r.normal(data.data().get(), data.size(), T(0.0), T(2.0), nullptr);
auto stream_pool = std::make_shared<rmm::cuda_stream_pool>(1);
raft::handle_t handle(rmm::cuda_stream_per_thread, stream_pool);
// computing the quantiles
auto [quantiles, quantiles_array, n_bins_array] =
DT::computeQuantiles(handle, data.data().get(), params.max_n_bins, params.n_rows, 1);
raft::update_host(
h_quantiles.data(), quantiles.quantiles_array, params.max_n_bins, handle.get_stream());
int n_unique_bins;
raft::copy(&n_unique_bins, quantiles.n_bins_array, 1, handle.get_stream());
if (n_unique_bins < params.max_n_bins) {
return; // almost impossible that this happens, skip if so
}
h_data = data;
for (std::size_t i = 0; i < h_data.size(); ++i) {
auto d = h_data[i];
// golden lower bound from thrust
auto golden_lb =
thrust::lower_bound(
thrust::seq, h_quantiles.data(), h_quantiles.data() + params.max_n_bins, d) -
h_quantiles.data();
// lower bound from custom lower_bound impl
auto lb = DT::lower_bound(h_quantiles.data(), params.max_n_bins, d);
ASSERT_EQ(golden_lb, lb)
<< "custom lower_bound method is inconsistent with thrust::lower_bound" << std::endl;
}
}
};
template <typename T>
class RFQuantileTest : public ::testing::TestWithParam<QuantileTestParameters> {
public:
void SetUp() override
{
auto params = ::testing::TestWithParam<QuantileTestParameters>::GetParam();
thrust::device_vector<T> data(params.n_rows);
thrust::device_vector<int> histogram(params.max_n_bins);
thrust::host_vector<int> h_histogram(params.max_n_bins);
raft::random::Rng r(8);
r.normal(data.data().get(), data.size(), T(0.0), T(2.0), nullptr);
auto stream_pool = std::make_shared<rmm::cuda_stream_pool>(1);
raft::handle_t handle(rmm::cuda_stream_per_thread, stream_pool);
// computing the quantiles
auto [quantiles, quantiles_array, n_bins_array] =
DT::computeQuantiles(handle, data.data().get(), params.max_n_bins, params.n_rows, 1);
int n_unique_bins;
raft::copy(&n_unique_bins, quantiles.n_bins_array, 1, handle.get_stream());
if (n_unique_bins < params.max_n_bins) {
return; // almost impossible that this happens, skip if so
}
auto d_quantiles = quantiles.quantiles_array;
auto d_histogram = histogram.data().get();
thrust::for_each(data.begin(), data.end(), [=] __device__(T x) {
for (int j = 0; j < params.max_n_bins; j++) {
if (x <= d_quantiles[j]) {
atomicAdd(&d_histogram[j], 1);
break;
}
}
});
h_histogram = histogram;
int max_items_per_bin = raft::ceildiv(params.n_rows, params.max_n_bins);
int min_items_per_bin = max_items_per_bin - 1;
int total_items = 0;
for (int b = 0; b < params.max_n_bins; b++) {
ASSERT_TRUE(h_histogram[b] == max_items_per_bin or h_histogram[b] == min_items_per_bin)
<< "No. samples in bin[" << b << "] = " << h_histogram[b] << " Expected "
<< max_items_per_bin << " or " << min_items_per_bin << std::endl;
total_items += h_histogram[b];
}
ASSERT_EQ(params.n_rows, total_items)
<< "Some samples from dataset are either missed of double counted in quantile bins"
<< std::endl;
}
};
// test to make sure that the quantiles and offsets calculated implement
// variable binning properly for categorical data, with unique values less than the `max_n_bins`
template <typename T>
class RFQuantileVariableBinsTest : public ::testing::TestWithParam<QuantileTestParameters> {
public:
void SetUp() override
{
auto params = ::testing::TestWithParam<QuantileTestParameters>::GetParam();
srand(params.seed);
auto stream_pool = std::make_shared<rmm::cuda_stream_pool>(1);
raft::handle_t handle(rmm::cuda_stream_per_thread, stream_pool);
thrust::device_vector<T> data(params.n_rows);
// n_uniques guaranteed to be non-zero and smaller than `max_n_bins`
int n_uniques;
while ((n_uniques = rand() % params.max_n_bins) == 0) {}
// populating random elements in data in [0, n_uniques)
thrust::counting_iterator<float> first(0);
thrust::copy(first, first + data.size(), data.begin());
thrust::transform(data.begin(), data.end(), data.begin(), [=] __device__(auto& x) {
x = T(int(x) % n_uniques);
return x;
});
thrust::shuffle(data.begin(), data.end(), thrust::default_random_engine(n_uniques));
// calling computeQuantiles
auto [quantiles, quantiles_array, n_bins_array] =
DT::computeQuantiles(handle, data.data().get(), params.max_n_bins, params.n_rows, 1);
int n_uniques_obtained;
raft::copy(&n_uniques_obtained, n_bins_array->data(), 1, handle.get_stream());
ASSERT_EQ(n_uniques_obtained, n_uniques) << "No. of unique bins is supposed to be " << n_uniques
<< ", but got " << n_uniques_obtained << std::endl;
thrust::device_vector<int> histogram(n_uniques);
thrust::host_vector<int> h_histogram(n_uniques);
auto d_quantiles = quantiles.quantiles_array;
auto d_histogram = histogram.data().get();
// creating a cumulative histogram from data based on the quantiles
// where histogram[i] has number of elements that are less-than-equal quantiles[i]
thrust::for_each(data.begin(), data.end(), [=] __device__(T x) {
for (int j = 0; j < n_uniques; j++) {
if (x <= d_quantiles[j]) {
atomicAdd(&d_histogram[j], 1);
break;
}
}
});
// since the elements are randomly and equally distributed, we verify the calculated histogram
h_histogram = histogram;
int max_items_per_bin = raft::ceildiv(params.n_rows, n_uniques);
int min_items_per_bin = max_items_per_bin - 1;
int total_items = 0;
for (int b = 0; b < n_uniques; b++) {
ASSERT_TRUE(h_histogram[b] == max_items_per_bin or h_histogram[b] == min_items_per_bin)
<< "No. samples in bin[" << b << "] = " << h_histogram[b] << " Expected "
<< max_items_per_bin << " or " << min_items_per_bin << std::endl;
total_items += h_histogram[b];
}
// recalculate the items for checking proper counting
ASSERT_EQ(params.n_rows, total_items)
<< "Some samples from dataset are either missed of double counted in quantile bins"
<< std::endl;
}
};
const std::vector<QuantileTestParameters> inputs = {{1000, 16, 6078587519764079670LLU},
{1130, 32, 4884670006177930266LLU},
{1752, 67, 9175325892580481371LLU},
{2307, 99, 9507819643927052255LLU},
{5000, 128, 9507819643927052255LLU}};
// float type quantile test
typedef RFQuantileTest<float> RFQuantileTestF;
TEST_P(RFQuantileTestF, test) {}
INSTANTIATE_TEST_CASE_P(RfTests, RFQuantileTestF, ::testing::ValuesIn(inputs));
// double type quantile test
typedef RFQuantileTest<double> RFQuantileTestD;
TEST_P(RFQuantileTestD, test) {}
INSTANTIATE_TEST_CASE_P(RfTests, RFQuantileTestD, ::testing::ValuesIn(inputs));
// float type quantile bins lower bounds test
typedef RFQuantileBinsLowerBoundTest<float> RFQuantileBinsLowerBoundTestF;
TEST_P(RFQuantileBinsLowerBoundTestF, test) {}
INSTANTIATE_TEST_CASE_P(RfTests, RFQuantileBinsLowerBoundTestF, ::testing::ValuesIn(inputs));
// double type quantile bins lower bounds test
typedef RFQuantileBinsLowerBoundTest<double> RFQuantileBinsLowerBoundTestD;
TEST_P(RFQuantileBinsLowerBoundTestD, test) {}
INSTANTIATE_TEST_CASE_P(RfTests, RFQuantileBinsLowerBoundTestD, ::testing::ValuesIn(inputs));
// float type quantile variable binning test
typedef RFQuantileVariableBinsTest<float> RFQuantileVariableBinsTestF;
TEST_P(RFQuantileVariableBinsTestF, test) {}
INSTANTIATE_TEST_CASE_P(RfTests, RFQuantileVariableBinsTestF, ::testing::ValuesIn(inputs));
// double type quantile variable binning test
typedef RFQuantileVariableBinsTest<double> RFQuantileVariableBinsTestD;
TEST_P(RFQuantileVariableBinsTestD, test) {}
INSTANTIATE_TEST_CASE_P(RfTests, RFQuantileVariableBinsTestD, ::testing::ValuesIn(inputs));
//------------------------------------------------------------------------------------------------------
TEST(RfTest, TextDump)
{
RF_params rf_params = set_rf_params(2, 2, 1.0, 2, 1, 2, 0.0, false, 1, 1.0, 0, GINI, 1, 128);
auto forest = std::make_shared<RandomForestMetaData<float, int>>();
std::vector<float> X_host = {1, 2, 3, 6, 7, 8};
thrust::device_vector<float> X = X_host;
std::vector<int> y_host = {0, 0, 1, 1, 1, 0};
thrust::device_vector<int> y = y_host;
auto stream_pool = std::make_shared<rmm::cuda_stream_pool>(1);
raft::handle_t handle(rmm::cuda_stream_per_thread, stream_pool);
auto forest_ptr = forest.get();
fit(handle, forest_ptr, X.data().get(), y.size(), 1, y.data().get(), 2, rf_params);
std::string expected_start_text = R"(Forest has 1 trees, max_depth 2, and max_leaves 2
Tree #0
Decision Tree depth --> 1 and n_leaves --> 2
Tree Fitting - Overall time -->)";
std::string expected_end_text = R"(└(colid: 0, quesval: 3, best_metric_val: 0.0555556)
├(leaf, prediction: [0.666667, 0.333333], best_metric_val: 0)
└(leaf, prediction: [0.333333, 0.666667], best_metric_val: 0))";
EXPECT_TRUE(get_rf_detailed_text(forest_ptr).find(expected_start_text) != std::string::npos);
EXPECT_TRUE(get_rf_detailed_text(forest_ptr).find(expected_end_text) != std::string::npos);
std::string expected_json = R"([
{"nodeid": 0, "split_feature": 0, "split_threshold": 3, "gain": 0.055555582, "instance_count": 6, "yes": 1, "no": 2, "children": [
{"nodeid": 1, "leaf_value": [0.666666687, 0.333333343], "instance_count": 3},
{"nodeid": 2, "leaf_value": [0.333333343, 0.666666687], "instance_count": 3}
]}
])";
EXPECT_EQ(get_rf_json(forest_ptr), expected_json);
}
//-------------------------------------------------------------------------------------------------------------------------------------
namespace DT {
struct ObjectiveTestParameters {
uint64_t seed;
int n_rows;
int max_n_bins;
int n_classes;
int min_samples_leaf;
double tolerance;
};
template <typename ObjectiveT>
class ObjectiveTest : public ::testing::TestWithParam<ObjectiveTestParameters> {
typedef typename ObjectiveT::DataT DataT;
typedef typename ObjectiveT::LabelT LabelT;
typedef typename ObjectiveT::IdxT IdxT;
typedef typename ObjectiveT::BinT BinT;
ObjectiveTestParameters params;
public:
auto RandUnder(int const end = 10000) { return rand() % end; }
auto GenRandomData()
{
std::default_random_engine rng;
std::vector<DataT> data(params.n_rows);
if constexpr (std::is_same<BinT, CountBin>::value) // classification case
{
for (auto& d : data) {
d = RandUnder(params.n_classes);
}
} else {
std::normal_distribution<DataT> normal(1.0, 2.0);
for (auto& d : data) {
auto rand_element{DataT(0)};
while (1) {
rand_element = normal(rng);
if (rand_element > 0) break; // only positive random numbers
}
d = rand_element;
}
}
return data;
}
auto GenHist(std::vector<DataT> data)
{
std::vector<BinT> cdf_hist, pdf_hist;
for (auto c = 0; c < params.n_classes; ++c) {
for (auto b = 0; b < params.max_n_bins; ++b) {
IdxT bin_width = raft::ceildiv(params.n_rows, params.max_n_bins);
auto data_begin = data.begin() + b * bin_width;
auto data_end = data_begin + bin_width;
if constexpr (std::is_same<BinT, CountBin>::value) { // classification case
auto count{IdxT(0)};
std::for_each(data_begin, data_end, [&](auto d) {
if (d == c) ++count;
});
pdf_hist.emplace_back(count);
} else { // regression case
auto label_sum{DataT(0)};
label_sum = std::accumulate(data_begin, data_end, DataT(0));
pdf_hist.emplace_back(label_sum, bin_width);
}
auto cumulative = b > 0 ? cdf_hist.back() : BinT();
cdf_hist.emplace_back(pdf_hist.empty() ? BinT() : pdf_hist.back());
cdf_hist.back() += cumulative;
}
}
return std::make_pair(cdf_hist, pdf_hist);
}
auto MSE(std::vector<DataT> const& data) // 1/n * 1/2 * sum((y - y_pred) * (y - y_pred))
{
DataT sum = std::accumulate(data.begin(), data.end(), DataT(0));
DataT const mean = sum / data.size();
auto mse{DataT(0.0)}; // mse: mean squared error
std::for_each(data.begin(), data.end(), [&](auto d) {
mse += (d - mean) * (d - mean); // unit deviance
});
mse /= 2 * data.size();
return std::make_tuple(mse, sum, DataT(data.size()));
}
auto MSEGroundTruthGain(std::vector<DataT> const& data, std::size_t split_bin_index)
{
auto bin_width = raft::ceildiv(params.n_rows, params.max_n_bins);
std::vector<DataT> left_data(data.begin(), data.begin() + (split_bin_index + 1) * bin_width);
std::vector<DataT> right_data(data.begin() + (split_bin_index + 1) * bin_width, data.end());
auto [parent_mse, label_sum, n] = MSE(data);
auto [left_mse, label_sum_left, n_left] = MSE(left_data);
auto [right_mse, label_sum_right, n_right] = MSE(right_data);
auto gain =
parent_mse - ((n_left / n) * left_mse + // the minimizing objective function is half deviance
(n_right / n) * right_mse); // gain in long form without proxy
// edge cases
if (n_left < params.min_samples_leaf or n_right < params.min_samples_leaf)
return -std::numeric_limits<DataT>::max();
else
return gain;
}
auto InverseGaussianHalfDeviance(
std::vector<DataT> const&
data) // 1/n * 2 * sum((y - y_pred) * (y - y_pred)/(y * (y_pred) * (y_pred)))
{
DataT sum = std::accumulate(data.begin(), data.end(), DataT(0));
DataT const mean = sum / data.size();
auto ighd{DataT(0.0)}; // ighd: inverse gaussian half deviance
std::for_each(data.begin(), data.end(), [&](auto d) {
ighd += (d - mean) * (d - mean) / (d * mean * mean); // unit deviance
});
ighd /= 2 * data.size();
return std::make_tuple(ighd, sum, DataT(data.size()));
}
auto InverseGaussianGroundTruthGain(std::vector<DataT> const& data, std::size_t split_bin_index)
{
auto bin_width = raft::ceildiv(params.n_rows, params.max_n_bins);
std::vector<DataT> left_data(data.begin(), data.begin() + (split_bin_index + 1) * bin_width);
std::vector<DataT> right_data(data.begin() + (split_bin_index + 1) * bin_width, data.end());
auto [parent_ighd, label_sum, n] = InverseGaussianHalfDeviance(data);
auto [left_ighd, label_sum_left, n_left] = InverseGaussianHalfDeviance(left_data);
auto [right_ighd, label_sum_right, n_right] = InverseGaussianHalfDeviance(right_data);
auto gain = parent_ighd -
((n_left / n) * left_ighd + // the minimizing objective function is half deviance
(n_right / n) * right_ighd); // gain in long form without proxy
// edge cases
if (n_left < params.min_samples_leaf or n_right < params.min_samples_leaf or
label_sum < ObjectiveT::eps_ or label_sum_right < ObjectiveT::eps_ or
label_sum_left < ObjectiveT::eps_)
return -std::numeric_limits<DataT>::max();
else
return gain;
}
auto GammaHalfDeviance(
std::vector<DataT> const& data) // 1/n * 2 * sum(log(y_pred/y_true) + y_true/y_pred - 1)
{
DataT sum(0);
sum = std::accumulate(data.begin(), data.end(), DataT(0));
DataT const mean = sum / data.size();
DataT ghd(0); // gamma half deviance
std::for_each(data.begin(), data.end(), [&](auto& element) {
auto log_y = raft::myLog(element ? element : DataT(1.0));
ghd += raft::myLog(mean) - log_y + element / mean - 1;
});
ghd /= data.size();
return std::make_tuple(ghd, sum, DataT(data.size()));
}
auto GammaGroundTruthGain(std::vector<DataT> const& data, std::size_t split_bin_index)
{
auto bin_width = raft::ceildiv(params.n_rows, params.max_n_bins);
std::vector<DataT> left_data(data.begin(), data.begin() + (split_bin_index + 1) * bin_width);
std::vector<DataT> right_data(data.begin() + (split_bin_index + 1) * bin_width, data.end());
auto [parent_ghd, label_sum, n] = GammaHalfDeviance(data);
auto [left_ghd, label_sum_left, n_left] = GammaHalfDeviance(left_data);
auto [right_ghd, label_sum_right, n_right] = GammaHalfDeviance(right_data);
auto gain =
parent_ghd - ((n_left / n) * left_ghd + // the minimizing objective function is half deviance
(n_right / n) * right_ghd); // gain in long form without proxy
// edge cases
if (n_left < params.min_samples_leaf or n_right < params.min_samples_leaf or
label_sum < ObjectiveT::eps_ or label_sum_right < ObjectiveT::eps_ or
label_sum_left < ObjectiveT::eps_)
return -std::numeric_limits<DataT>::max();
else
return gain;
}
auto PoissonHalfDeviance(
std::vector<DataT> const& data) // 1/n * sum(y_true * log(y_true/y_pred) + y_pred - y_true)
{
DataT sum = std::accumulate(data.begin(), data.end(), DataT(0));
auto const mean = sum / data.size();
auto poisson_half_deviance{DataT(0.0)};
std::for_each(data.begin(), data.end(), [&](auto d) {
auto log_y = raft::myLog(d ? d : DataT(1.0)); // we don't want nans
poisson_half_deviance += d * (log_y - raft::myLog(mean)) + mean - d;
});
poisson_half_deviance /= data.size();
return std::make_tuple(poisson_half_deviance, sum, DataT(data.size()));
}
auto PoissonGroundTruthGain(std::vector<DataT> const& data, std::size_t split_bin_index)
{
auto bin_width = raft::ceildiv(params.n_rows, params.max_n_bins);
std::vector<DataT> left_data(data.begin(), data.begin() + (split_bin_index + 1) * bin_width);
std::vector<DataT> right_data(data.begin() + (split_bin_index + 1) * bin_width, data.end());
auto [parent_phd, label_sum, n] = PoissonHalfDeviance(data);
auto [left_phd, label_sum_left, n_left] = PoissonHalfDeviance(left_data);
auto [right_phd, label_sum_right, n_right] = PoissonHalfDeviance(right_data);
auto gain = parent_phd - ((n_left / n) * left_phd +
(n_right / n) * right_phd); // gain in long form without proxy
// edge cases
if (n_left < params.min_samples_leaf or n_right < params.min_samples_leaf or
label_sum < ObjectiveT::eps_ or label_sum_right < ObjectiveT::eps_ or
label_sum_left < ObjectiveT::eps_)
return -std::numeric_limits<DataT>::max();
else
return gain;
}
auto Entropy(std::vector<DataT> const& data)
{ // sum((n_c/n_total)*(log(n_c/n_total)))
DataT entropy(0);
for (auto c = 0; c < params.n_classes; ++c) {
IdxT sum(0);
std::for_each(data.begin(), data.end(), [&](auto d) {
if (d == DataT(c)) ++sum;
});
DataT class_proba = DataT(sum) / data.size();
entropy += -class_proba * raft::myLog(class_proba ? class_proba : DataT(1)) /
raft::myLog(DataT(2)); // adding gain
}
return entropy;
}
auto EntropyGroundTruthGain(std::vector<DataT> const& data, std::size_t const split_bin_index)
{
auto bin_width = raft::ceildiv(params.n_rows, params.max_n_bins);
std::vector<DataT> left_data(data.begin(), data.begin() + (split_bin_index + 1) * bin_width);
std::vector<DataT> right_data(data.begin() + (split_bin_index + 1) * bin_width, data.end());
auto parent_entropy = Entropy(data);
auto left_entropy = Entropy(left_data);
auto right_entropy = Entropy(right_data);
DataT n = data.size();
DataT left_n = left_data.size();
DataT right_n = right_data.size();
auto gain = parent_entropy - ((left_n / n) * left_entropy + (right_n / n) * right_entropy);
// edge cases
if (left_n < params.min_samples_leaf or right_n < params.min_samples_leaf) {
return -std::numeric_limits<DataT>::max();
} else {
return gain;
}
}
auto GiniImpurity(std::vector<DataT> const& data)
{ // sum((n_c/n_total)(1-(n_c/n_total)))
DataT gini(0);
for (auto c = 0; c < params.n_classes; ++c) {
IdxT sum(0);
std::for_each(data.begin(), data.end(), [&](auto d) {
if (d == DataT(c)) ++sum;
});
DataT class_proba = DataT(sum) / data.size();
gini += class_proba * (1 - class_proba); // adding gain
}
return gini;
}
auto GiniGroundTruthGain(std::vector<DataT> const& data, std::size_t const split_bin_index)
{
auto bin_width = raft::ceildiv(params.n_rows, params.max_n_bins);
std::vector<DataT> left_data(data.begin(), data.begin() + (split_bin_index + 1) * bin_width);
std::vector<DataT> right_data(data.begin() + (split_bin_index + 1) * bin_width, data.end());
auto parent_gini = GiniImpurity(data);
auto left_gini = GiniImpurity(left_data);
auto right_gini = GiniImpurity(right_data);
DataT n = data.size();
DataT left_n = left_data.size();
DataT right_n = right_data.size();
auto gain = parent_gini - ((left_n / n) * left_gini + (right_n / n) * right_gini);
// edge cases
if (left_n < params.min_samples_leaf or right_n < params.min_samples_leaf) {
return -std::numeric_limits<DataT>::max();
} else {
return gain;
}
}
auto GroundTruthGain(std::vector<DataT> const& data, std::size_t const split_bin_index)
{
if constexpr (std::is_same<ObjectiveT, MSEObjectiveFunction<DataT, LabelT, IdxT>>::
value) // mean squared error
{
return MSEGroundTruthGain(data, split_bin_index);
} else if constexpr (std::is_same<ObjectiveT, PoissonObjectiveFunction<DataT, LabelT, IdxT>>::
value) // poisson
{
return PoissonGroundTruthGain(data, split_bin_index);
} else if constexpr (std::is_same<ObjectiveT,
GammaObjectiveFunction<DataT, LabelT, IdxT>>::value) // gamma
{
return GammaGroundTruthGain(data, split_bin_index);
} else if constexpr (std::is_same<ObjectiveT,
InverseGaussianObjectiveFunction<DataT, LabelT, IdxT>>::
value) // inverse gaussian
{
return InverseGaussianGroundTruthGain(data, split_bin_index);
} else if constexpr (std::is_same<ObjectiveT, EntropyObjectiveFunction<DataT, LabelT, IdxT>>::
value) // entropy
{
return EntropyGroundTruthGain(data, split_bin_index);
} else if constexpr (std::is_same<ObjectiveT,
GiniObjectiveFunction<DataT, LabelT, IdxT>>::value) // gini
{
return GiniGroundTruthGain(data, split_bin_index);
}
return DataT(0.0);
}
auto NumLeftOfBin(std::vector<BinT> const& cdf_hist, IdxT idx)
{
auto count{IdxT(0)};
for (auto c = 0; c < params.n_classes; ++c) {
if constexpr (std::is_same<BinT, CountBin>::value) // countbin
{
count += cdf_hist[params.max_n_bins * c + idx].x;
} else // aggregatebin
{
count += cdf_hist[params.max_n_bins * c + idx].count;
}
}
return count;
}
void SetUp() override
{
srand(params.seed);
params = ::testing::TestWithParam<ObjectiveTestParameters>::GetParam();
ObjectiveT objective(params.n_classes, params.min_samples_leaf);
auto data = GenRandomData();
auto [cdf_hist, pdf_hist] = GenHist(data);
auto split_bin_index = RandUnder(params.max_n_bins);
auto ground_truth_gain = GroundTruthGain(data, split_bin_index);
auto hypothesis_gain = objective.GainPerSplit(&cdf_hist[0],
split_bin_index,
params.max_n_bins,
NumLeftOfBin(cdf_hist, params.max_n_bins - 1),
NumLeftOfBin(cdf_hist, split_bin_index));
ASSERT_NEAR(ground_truth_gain, hypothesis_gain, params.tolerance);
}
};
const std::vector<ObjectiveTestParameters> mse_objective_test_parameters = {
{9507819643927052255LLU, 2048, 64, 1, 0, 0.00001},
{9507819643927052259LLU, 2048, 128, 1, 1, 0.00001},
{9507819643927052251LLU, 2048, 256, 1, 1, 0.00001},
{9507819643927052258LLU, 2048, 512, 1, 5, 0.00001},
};
const std::vector<ObjectiveTestParameters> poisson_objective_test_parameters = {
{9507819643927052255LLU, 2048, 64, 1, 0, 0.00001},
{9507819643927052259LLU, 2048, 128, 1, 1, 0.00001},
{9507819643927052251LLU, 2048, 256, 1, 1, 0.00001},
{9507819643927052258LLU, 2048, 512, 1, 5, 0.00001},
};
const std::vector<ObjectiveTestParameters> gamma_objective_test_parameters = {
{9507819643927052255LLU, 2048, 64, 1, 0, 0.00001},
{9507819643927052259LLU, 2048, 128, 1, 1, 0.00001},
{9507819643927052251LLU, 2048, 256, 1, 1, 0.00001},
{9507819643927052258LLU, 2048, 512, 1, 5, 0.00001},
};
const std::vector<ObjectiveTestParameters> invgauss_objective_test_parameters = {
{9507819643927052255LLU, 2048, 64, 1, 0, 0.00001},
{9507819643927052259LLU, 2048, 128, 1, 1, 0.00001},
{9507819643927052251LLU, 2048, 256, 1, 1, 0.00001},
{9507819643927052258LLU, 2048, 512, 1, 5, 0.00001},
};
const std::vector<ObjectiveTestParameters> entropy_objective_test_parameters = {
{9507819643927052255LLU, 2048, 64, 2, 0, 0.00001},
{9507819643927052256LLU, 2048, 128, 10, 1, 0.00001},
{9507819643927052257LLU, 2048, 256, 100, 1, 0.00001},
{9507819643927052258LLU, 2048, 512, 100, 5, 0.00001},
};
const std::vector<ObjectiveTestParameters> gini_objective_test_parameters = {
{9507819643927052255LLU, 2048, 64, 2, 0, 0.00001},
{9507819643927052256LLU, 2048, 128, 10, 1, 0.00001},
{9507819643927052257LLU, 2048, 256, 100, 1, 0.00001},
{9507819643927052258LLU, 2048, 512, 100, 5, 0.00001},
};
// mse objective test
typedef ObjectiveTest<MSEObjectiveFunction<double, double, int>> MSEObjectiveTestD;
TEST_P(MSEObjectiveTestD, MSEObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
MSEObjectiveTestD,
::testing::ValuesIn(mse_objective_test_parameters));
typedef ObjectiveTest<MSEObjectiveFunction<float, float, int>> MSEObjectiveTestF;
TEST_P(MSEObjectiveTestF, MSEObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
MSEObjectiveTestF,
::testing::ValuesIn(mse_objective_test_parameters));
// poisson objective test
typedef ObjectiveTest<PoissonObjectiveFunction<double, double, int>> PoissonObjectiveTestD;
TEST_P(PoissonObjectiveTestD, poissonObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
PoissonObjectiveTestD,
::testing::ValuesIn(poisson_objective_test_parameters));
typedef ObjectiveTest<PoissonObjectiveFunction<float, float, int>> PoissonObjectiveTestF;
TEST_P(PoissonObjectiveTestF, poissonObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
PoissonObjectiveTestF,
::testing::ValuesIn(poisson_objective_test_parameters));
// gamma objective test
typedef ObjectiveTest<GammaObjectiveFunction<double, double, int>> GammaObjectiveTestD;
TEST_P(GammaObjectiveTestD, GammaObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
GammaObjectiveTestD,
::testing::ValuesIn(gamma_objective_test_parameters));
typedef ObjectiveTest<GammaObjectiveFunction<float, float, int>> GammaObjectiveTestF;
TEST_P(GammaObjectiveTestF, GammaObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
GammaObjectiveTestF,
::testing::ValuesIn(gamma_objective_test_parameters));
// InvGauss objective test
typedef ObjectiveTest<InverseGaussianObjectiveFunction<double, double, int>>
InverseGaussianObjectiveTestD;
TEST_P(InverseGaussianObjectiveTestD, InverseGaussianObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
InverseGaussianObjectiveTestD,
::testing::ValuesIn(invgauss_objective_test_parameters));
typedef ObjectiveTest<InverseGaussianObjectiveFunction<float, float, int>>
InverseGaussianObjectiveTestF;
TEST_P(InverseGaussianObjectiveTestF, InverseGaussianObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
InverseGaussianObjectiveTestF,
::testing::ValuesIn(invgauss_objective_test_parameters));
// entropy objective test
typedef ObjectiveTest<EntropyObjectiveFunction<double, int, int>> EntropyObjectiveTestD;
TEST_P(EntropyObjectiveTestD, entropyObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
EntropyObjectiveTestD,
::testing::ValuesIn(entropy_objective_test_parameters));
typedef ObjectiveTest<EntropyObjectiveFunction<float, int, int>> EntropyObjectiveTestF;
TEST_P(EntropyObjectiveTestF, entropyObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
EntropyObjectiveTestF,
::testing::ValuesIn(entropy_objective_test_parameters));
// gini objective test
typedef ObjectiveTest<GiniObjectiveFunction<double, int, int>> GiniObjectiveTestD;
TEST_P(GiniObjectiveTestD, giniObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
GiniObjectiveTestD,
::testing::ValuesIn(gini_objective_test_parameters));
typedef ObjectiveTest<GiniObjectiveFunction<float, int, int>> GiniObjectiveTestF;
TEST_P(GiniObjectiveTestF, giniObjectiveTest) {}
INSTANTIATE_TEST_CASE_P(RfTests,
GiniObjectiveTestF,
::testing::ValuesIn(gini_objective_test_parameters));
} // end namespace DT
} // end namespace ML
| 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.